Remove path name from test case
[binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h>
23
24 #include "frame.h"
25 #include "language.h"
26 #include "inferior.h"
27 #include "infrun.h"
28 #include "gdbcmd.h"
29 #include "gdbcore.h"
30 #include "dis-asm.h"
31 #include "disasm.h"
32 #include "regcache.h"
33 #include "reggroups.h"
34 #include "target-float.h"
35 #include "value.h"
36 #include "arch-utils.h"
37 #include "osabi.h"
38 #include "frame-unwind.h"
39 #include "frame-base.h"
40 #include "trad-frame.h"
41 #include "objfiles.h"
42 #include "dwarf2.h"
43 #include "dwarf2/frame.h"
44 #include "gdbtypes.h"
45 #include "prologue-value.h"
46 #include "remote.h"
47 #include "target-descriptions.h"
48 #include "user-regs.h"
49 #include "observable.h"
50 #include "count-one-bits.h"
51
52 #include "arch/arm.h"
53 #include "arch/arm-get-next-pcs.h"
54 #include "arm-tdep.h"
55 #include "sim/sim-arm.h"
56
57 #include "elf-bfd.h"
58 #include "coff/internal.h"
59 #include "elf/arm.h"
60
61 #include "record.h"
62 #include "record-full.h"
63 #include <algorithm>
64
65 #include "producer.h"
66
67 #if GDB_SELF_TEST
68 #include "gdbsupport/selftest.h"
69 #endif
70
71 static bool arm_debug;
72
73 /* Print an "arm" debug statement. */
74
75 #define arm_debug_printf(fmt, ...) \
76 debug_prefixed_printf_cond (arm_debug, "arm", fmt, ##__VA_ARGS__)
77
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
81
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
84
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 (msym)->set_target_flag_1 (true)
87
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 (msym)->target_flag_1 ()
90
91 struct arm_mapping_symbol
92 {
93 CORE_ADDR value;
94 char type;
95
96 bool operator< (const arm_mapping_symbol &other) const
97 { return this->value < other.value; }
98 };
99
100 typedef std::vector<arm_mapping_symbol> arm_mapping_symbol_vec;
101
102 struct arm_per_bfd
103 {
104 explicit arm_per_bfd (size_t num_sections)
105 : section_maps (new arm_mapping_symbol_vec[num_sections]),
106 section_maps_sorted (new bool[num_sections] ())
107 {}
108
109 DISABLE_COPY_AND_ASSIGN (arm_per_bfd);
110
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
112
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
115 index).
116
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr<arm_mapping_symbol_vec[]> section_maps;
120
121 /* For each corresponding element of section_maps above, is this vector
122 sorted. */
123 std::unique_ptr<bool[]> section_maps_sorted;
124 };
125
126 /* Per-bfd data used for mapping symbols. */
127 static const registry<bfd>::key<arm_per_bfd> arm_bfd_data_key;
128
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element *setarmcmdlist = NULL;
131 static struct cmd_list_element *showarmcmdlist = NULL;
132
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings[] =
136 {
137 "auto",
138 "softfpa",
139 "fpa",
140 "softvfp",
141 "vfp",
142 NULL
143 };
144
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
147 static const char *current_fp_model = "auto";
148
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings[] =
151 {
152 "auto",
153 "APCS",
154 "AAPCS",
155 NULL
156 };
157
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
160 static const char *arm_abi_string = "auto";
161
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings[] =
164 {
165 "auto",
166 "arm",
167 "thumb",
168 NULL
169 };
170
171 static const char *arm_fallback_mode_string = "auto";
172 static const char *arm_force_mode_string = "auto";
173
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
178 static const struct
179 {
180 const char *name;
181 int regnum;
182 } arm_register_aliases[] = {
183 /* Basic register numbers. */
184 { "r0", 0 },
185 { "r1", 1 },
186 { "r2", 2 },
187 { "r3", 3 },
188 { "r4", 4 },
189 { "r5", 5 },
190 { "r6", 6 },
191 { "r7", 7 },
192 { "r8", 8 },
193 { "r9", 9 },
194 { "r10", 10 },
195 { "r11", 11 },
196 { "r12", 12 },
197 { "r13", 13 },
198 { "r14", 14 },
199 { "r15", 15 },
200 /* Synonyms (argument and variable registers). */
201 { "a1", 0 },
202 { "a2", 1 },
203 { "a3", 2 },
204 { "a4", 3 },
205 { "v1", 4 },
206 { "v2", 5 },
207 { "v3", 6 },
208 { "v4", 7 },
209 { "v5", 8 },
210 { "v6", 9 },
211 { "v7", 10 },
212 { "v8", 11 },
213 /* Other platform-specific names for r9. */
214 { "sb", 9 },
215 { "tr", 9 },
216 /* Special names. */
217 { "ip", 12 },
218 { "lr", 14 },
219 /* Names used by GCC (not listed in the ARM EABI). */
220 { "sl", 10 },
221 /* A special name from the older ATPCS. */
222 { "wr", 7 },
223 };
224
225 static const char *const arm_register_names[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
233
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options;
236
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles;
239
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style;
242
243 /* All possible arm target descriptors. */
244 static struct target_desc *tdesc_arm_list[ARM_FP_TYPE_INVALID][2];
245 static struct target_desc *tdesc_arm_mprofile_list[ARM_M_TYPE_INVALID];
246
247 /* This is used to keep the bfd arch_info in sync with the disassembly
248 style. */
249 static void set_disassembly_style_sfunc (const char *, int,
250 struct cmd_list_element *);
251 static void show_disassembly_style_sfunc (struct ui_file *, int,
252 struct cmd_list_element *,
253 const char *);
254
255 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
256 readable_regcache *regcache,
257 int regnum, gdb_byte *buf);
258 static void arm_neon_quad_write (struct gdbarch *gdbarch,
259 struct regcache *regcache,
260 int regnum, const gdb_byte *buf);
261
262 static CORE_ADDR
263 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
264
265
266 /* get_next_pcs operations. */
267 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
268 arm_get_next_pcs_read_memory_unsigned_integer,
269 arm_get_next_pcs_syscall_next_pc,
270 arm_get_next_pcs_addr_bits_remove,
271 arm_get_next_pcs_is_thumb,
272 NULL,
273 };
274
275 struct arm_prologue_cache
276 {
277 /* The stack pointer at the time this frame was created; i.e. the
278 caller's stack pointer when this function was called. It is used
279 to identify this frame. */
280 CORE_ADDR sp;
281
282 /* Additional stack pointers used by M-profile with Security extension. */
283 /* Use msp_s / psp_s to hold the values of msp / psp when there is
284 no Security extension. */
285 CORE_ADDR msp_s;
286 CORE_ADDR msp_ns;
287 CORE_ADDR psp_s;
288 CORE_ADDR psp_ns;
289
290 /* Active stack pointer. */
291 int active_sp_regnum;
292 int active_msp_regnum;
293 int active_psp_regnum;
294
295 /* The frame base for this frame is just prev_sp - frame size.
296 FRAMESIZE is the distance from the frame pointer to the
297 initial stack pointer. */
298
299 int framesize;
300
301 /* The register used to hold the frame pointer for this frame. */
302 int framereg;
303
304 /* True if the return address is signed, false otherwise. */
305 gdb::optional<bool> ra_signed_state;
306
307 /* Saved register offsets. */
308 trad_frame_saved_reg *saved_regs;
309
310 arm_prologue_cache() = default;
311 };
312
313
314 /* Reconstruct T bit in program status register from LR value. */
315
316 static inline ULONGEST
317 reconstruct_t_bit(struct gdbarch *gdbarch, CORE_ADDR lr, ULONGEST psr)
318 {
319 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
320 if (IS_THUMB_ADDR (lr))
321 psr |= t_bit;
322 else
323 psr &= ~t_bit;
324
325 return psr;
326 }
327
328 /* Initialize CACHE fields for which zero is not adequate (CACHE is
329 expected to have been ZALLOC'ed before calling this function). */
330
331 static void
332 arm_cache_init (struct arm_prologue_cache *cache, struct gdbarch *gdbarch)
333 {
334 cache->active_sp_regnum = ARM_SP_REGNUM;
335
336 cache->saved_regs = trad_frame_alloc_saved_regs (gdbarch);
337 }
338
339 /* Similar to the previous function, but extracts GDBARCH from FRAME. */
340
341 static void
342 arm_cache_init (struct arm_prologue_cache *cache, frame_info_ptr frame)
343 {
344 struct gdbarch *gdbarch = get_frame_arch (frame);
345 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
346
347 arm_cache_init (cache, gdbarch);
348 cache->sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
349
350 if (tdep->have_sec_ext)
351 {
352 const CORE_ADDR msp_val
353 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
354 const CORE_ADDR psp_val
355 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
356
357 cache->msp_s
358 = get_frame_register_unsigned (frame, tdep->m_profile_msp_s_regnum);
359 cache->msp_ns
360 = get_frame_register_unsigned (frame, tdep->m_profile_msp_ns_regnum);
361 cache->psp_s
362 = get_frame_register_unsigned (frame, tdep->m_profile_psp_s_regnum);
363 cache->psp_ns
364 = get_frame_register_unsigned (frame, tdep->m_profile_psp_ns_regnum);
365
366 /* Identify what msp is alias for (msp_s or msp_ns). */
367 if (msp_val == cache->msp_s)
368 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
369 else if (msp_val == cache->msp_ns)
370 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
371 else
372 {
373 warning (_("Invalid state, unable to determine msp alias, assuming "
374 "msp_s."));
375 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
376 }
377
378 /* Identify what psp is alias for (psp_s or psp_ns). */
379 if (psp_val == cache->psp_s)
380 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
381 else if (psp_val == cache->psp_ns)
382 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
383 else
384 {
385 warning (_("Invalid state, unable to determine psp alias, assuming "
386 "psp_s."));
387 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
388 }
389
390 /* Identify what sp is alias for (msp_s, msp_ns, psp_s or psp_ns). */
391 if (msp_val == cache->sp)
392 cache->active_sp_regnum = cache->active_msp_regnum;
393 else if (psp_val == cache->sp)
394 cache->active_sp_regnum = cache->active_psp_regnum;
395 else
396 {
397 warning (_("Invalid state, unable to determine sp alias, assuming "
398 "msp."));
399 cache->active_sp_regnum = cache->active_msp_regnum;
400 }
401 }
402 else if (tdep->is_m)
403 {
404 cache->msp_s
405 = get_frame_register_unsigned (frame, tdep->m_profile_msp_regnum);
406 cache->psp_s
407 = get_frame_register_unsigned (frame, tdep->m_profile_psp_regnum);
408
409 /* Identify what sp is alias for (msp or psp). */
410 if (cache->msp_s == cache->sp)
411 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
412 else if (cache->psp_s == cache->sp)
413 cache->active_sp_regnum = tdep->m_profile_psp_regnum;
414 else
415 {
416 warning (_("Invalid state, unable to determine sp alias, assuming "
417 "msp."));
418 cache->active_sp_regnum = tdep->m_profile_msp_regnum;
419 }
420 }
421 else
422 {
423 cache->msp_s
424 = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
425
426 cache->active_sp_regnum = ARM_SP_REGNUM;
427 }
428 }
429
430 /* Return the requested stack pointer value (in REGNUM), taking into
431 account whether we have a Security extension or an M-profile
432 CPU. */
433
434 static CORE_ADDR
435 arm_cache_get_sp_register (struct arm_prologue_cache *cache,
436 arm_gdbarch_tdep *tdep, int regnum)
437 {
438 if (tdep->have_sec_ext)
439 {
440 if (regnum == tdep->m_profile_msp_s_regnum)
441 return cache->msp_s;
442 if (regnum == tdep->m_profile_msp_ns_regnum)
443 return cache->msp_ns;
444 if (regnum == tdep->m_profile_psp_s_regnum)
445 return cache->psp_s;
446 if (regnum == tdep->m_profile_psp_ns_regnum)
447 return cache->psp_ns;
448 if (regnum == tdep->m_profile_msp_regnum)
449 return arm_cache_get_sp_register (cache, tdep, cache->active_msp_regnum);
450 if (regnum == tdep->m_profile_psp_regnum)
451 return arm_cache_get_sp_register (cache, tdep, cache->active_psp_regnum);
452 if (regnum == ARM_SP_REGNUM)
453 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
454 }
455 else if (tdep->is_m)
456 {
457 if (regnum == tdep->m_profile_msp_regnum)
458 return cache->msp_s;
459 if (regnum == tdep->m_profile_psp_regnum)
460 return cache->psp_s;
461 if (regnum == ARM_SP_REGNUM)
462 return arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
463 }
464 else if (regnum == ARM_SP_REGNUM)
465 return cache->sp;
466
467 gdb_assert_not_reached ("Invalid SP selection");
468 }
469
470 /* Return the previous stack address, depending on which SP register
471 is active. */
472
473 static CORE_ADDR
474 arm_cache_get_prev_sp_value (struct arm_prologue_cache *cache, arm_gdbarch_tdep *tdep)
475 {
476 CORE_ADDR val = arm_cache_get_sp_register (cache, tdep, cache->active_sp_regnum);
477 return val;
478 }
479
480 /* Set the active stack pointer to VAL. */
481
482 static void
483 arm_cache_set_active_sp_value (struct arm_prologue_cache *cache,
484 arm_gdbarch_tdep *tdep, CORE_ADDR val)
485 {
486 if (tdep->have_sec_ext)
487 {
488 if (cache->active_sp_regnum == tdep->m_profile_msp_s_regnum)
489 cache->msp_s = val;
490 else if (cache->active_sp_regnum == tdep->m_profile_msp_ns_regnum)
491 cache->msp_ns = val;
492 else if (cache->active_sp_regnum == tdep->m_profile_psp_s_regnum)
493 cache->psp_s = val;
494 else if (cache->active_sp_regnum == tdep->m_profile_psp_ns_regnum)
495 cache->psp_ns = val;
496
497 return;
498 }
499 else if (tdep->is_m)
500 {
501 if (cache->active_sp_regnum == tdep->m_profile_msp_regnum)
502 cache->msp_s = val;
503 else if (cache->active_sp_regnum == tdep->m_profile_psp_regnum)
504 cache->psp_s = val;
505
506 return;
507 }
508 else if (cache->active_sp_regnum == ARM_SP_REGNUM)
509 {
510 cache->sp = val;
511 return;
512 }
513
514 gdb_assert_not_reached ("Invalid SP selection");
515 }
516
517 /* Return true if REGNUM is one of the alternative stack pointers. */
518
519 static bool
520 arm_is_alternative_sp_register (arm_gdbarch_tdep *tdep, int regnum)
521 {
522 if ((regnum == tdep->m_profile_msp_regnum)
523 || (regnum == tdep->m_profile_msp_s_regnum)
524 || (regnum == tdep->m_profile_msp_ns_regnum)
525 || (regnum == tdep->m_profile_psp_regnum)
526 || (regnum == tdep->m_profile_psp_s_regnum)
527 || (regnum == tdep->m_profile_psp_ns_regnum))
528 return true;
529 else
530 return false;
531 }
532
533 /* Set the active stack pointer to SP_REGNUM. */
534
535 static void
536 arm_cache_switch_prev_sp (struct arm_prologue_cache *cache,
537 arm_gdbarch_tdep *tdep, int sp_regnum)
538 {
539 gdb_assert (arm_is_alternative_sp_register (tdep, sp_regnum));
540
541 if (tdep->have_sec_ext)
542 {
543 gdb_assert (sp_regnum != tdep->m_profile_msp_regnum
544 && sp_regnum != tdep->m_profile_psp_regnum);
545
546 if (sp_regnum == tdep->m_profile_msp_s_regnum
547 || sp_regnum == tdep->m_profile_psp_s_regnum)
548 {
549 cache->active_msp_regnum = tdep->m_profile_msp_s_regnum;
550 cache->active_psp_regnum = tdep->m_profile_psp_s_regnum;
551 }
552 else if (sp_regnum == tdep->m_profile_msp_ns_regnum
553 || sp_regnum == tdep->m_profile_psp_ns_regnum)
554 {
555 cache->active_msp_regnum = tdep->m_profile_msp_ns_regnum;
556 cache->active_psp_regnum = tdep->m_profile_psp_ns_regnum;
557 }
558 }
559
560 cache->active_sp_regnum = sp_regnum;
561 }
562
563 namespace {
564
565 /* Abstract class to read ARM instructions from memory. */
566
567 class arm_instruction_reader
568 {
569 public:
570 /* Read a 4 bytes instruction from memory using the BYTE_ORDER endianness. */
571 virtual uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const = 0;
572 };
573
574 /* Read instructions from target memory. */
575
576 class target_arm_instruction_reader : public arm_instruction_reader
577 {
578 public:
579 uint32_t read (CORE_ADDR memaddr, bfd_endian byte_order) const override
580 {
581 return read_code_unsigned_integer (memaddr, 4, byte_order);
582 }
583 };
584
585 } /* namespace */
586
587 static CORE_ADDR arm_analyze_prologue
588 (struct gdbarch *gdbarch, CORE_ADDR prologue_start, CORE_ADDR prologue_end,
589 struct arm_prologue_cache *cache, const arm_instruction_reader &insn_reader);
590
591 /* Architecture version for displaced stepping. This effects the behaviour of
592 certain instructions, and really should not be hard-wired. */
593
594 #define DISPLACED_STEPPING_ARCH_VERSION 5
595
596 /* See arm-tdep.h. */
597
598 bool arm_apcs_32 = true;
599 bool arm_unwind_secure_frames = true;
600
601 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
602
603 int
604 arm_psr_thumb_bit (struct gdbarch *gdbarch)
605 {
606 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
607
608 if (tdep->is_m)
609 return XPSR_T;
610 else
611 return CPSR_T;
612 }
613
614 /* Determine if the processor is currently executing in Thumb mode. */
615
616 int
617 arm_is_thumb (struct regcache *regcache)
618 {
619 ULONGEST cpsr;
620 ULONGEST t_bit = arm_psr_thumb_bit (regcache->arch ());
621
622 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
623
624 return (cpsr & t_bit) != 0;
625 }
626
627 /* Determine if FRAME is executing in Thumb mode. FRAME must be an ARM
628 frame. */
629
630 int
631 arm_frame_is_thumb (frame_info_ptr frame)
632 {
633 /* Check the architecture of FRAME. */
634 struct gdbarch *gdbarch = get_frame_arch (frame);
635 gdb_assert (gdbarch_bfd_arch_info (gdbarch)->arch == bfd_arch_arm);
636
637 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
638 directly (from a signal frame or dummy frame) or by interpreting
639 the saved LR (from a prologue or DWARF frame). So consult it and
640 trust the unwinders. */
641 CORE_ADDR cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
642
643 /* Find and extract the thumb bit. */
644 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
645 return (cpsr & t_bit) != 0;
646 }
647
648 /* Search for the mapping symbol covering MEMADDR. If one is found,
649 return its type. Otherwise, return 0. If START is non-NULL,
650 set *START to the location of the mapping symbol. */
651
652 static char
653 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
654 {
655 struct obj_section *sec;
656
657 /* If there are mapping symbols, consult them. */
658 sec = find_pc_section (memaddr);
659 if (sec != NULL)
660 {
661 arm_per_bfd *data = arm_bfd_data_key.get (sec->objfile->obfd.get ());
662 if (data != NULL)
663 {
664 unsigned int section_idx = sec->the_bfd_section->index;
665 arm_mapping_symbol_vec &map
666 = data->section_maps[section_idx];
667
668 /* Sort the vector on first use. */
669 if (!data->section_maps_sorted[section_idx])
670 {
671 std::sort (map.begin (), map.end ());
672 data->section_maps_sorted[section_idx] = true;
673 }
674
675 arm_mapping_symbol map_key = { memaddr - sec->addr (), 0 };
676 arm_mapping_symbol_vec::const_iterator it
677 = std::lower_bound (map.begin (), map.end (), map_key);
678
679 /* std::lower_bound finds the earliest ordered insertion
680 point. If the symbol at this position starts at this exact
681 address, we use that; otherwise, the preceding
682 mapping symbol covers this address. */
683 if (it < map.end ())
684 {
685 if (it->value == map_key.value)
686 {
687 if (start)
688 *start = it->value + sec->addr ();
689 return it->type;
690 }
691 }
692
693 if (it > map.begin ())
694 {
695 arm_mapping_symbol_vec::const_iterator prev_it
696 = it - 1;
697
698 if (start)
699 *start = prev_it->value + sec->addr ();
700 return prev_it->type;
701 }
702 }
703 }
704
705 return 0;
706 }
707
708 /* Determine if the program counter specified in MEMADDR is in a Thumb
709 function. This function should be called for addresses unrelated to
710 any executing frame; otherwise, prefer arm_frame_is_thumb. */
711
712 int
713 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
714 {
715 struct bound_minimal_symbol sym;
716 char type;
717 arm_displaced_step_copy_insn_closure *dsc = nullptr;
718 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
719
720 if (gdbarch_displaced_step_copy_insn_closure_by_addr_p (gdbarch))
721 dsc = ((arm_displaced_step_copy_insn_closure * )
722 gdbarch_displaced_step_copy_insn_closure_by_addr
723 (gdbarch, current_inferior (), memaddr));
724
725 /* If checking the mode of displaced instruction in copy area, the mode
726 should be determined by instruction on the original address. */
727 if (dsc)
728 {
729 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
730 (unsigned long) dsc->insn_addr,
731 (unsigned long) memaddr);
732 memaddr = dsc->insn_addr;
733 }
734
735 /* If bit 0 of the address is set, assume this is a Thumb address. */
736 if (IS_THUMB_ADDR (memaddr))
737 return 1;
738
739 /* If the user wants to override the symbol table, let him. */
740 if (strcmp (arm_force_mode_string, "arm") == 0)
741 return 0;
742 if (strcmp (arm_force_mode_string, "thumb") == 0)
743 return 1;
744
745 /* ARM v6-M and v7-M are always in Thumb mode. */
746 if (tdep->is_m)
747 return 1;
748
749 /* If there are mapping symbols, consult them. */
750 type = arm_find_mapping_symbol (memaddr, NULL);
751 if (type)
752 return type == 't';
753
754 /* Thumb functions have a "special" bit set in minimal symbols. */
755 sym = lookup_minimal_symbol_by_pc (memaddr);
756 if (sym.minsym)
757 return (MSYMBOL_IS_SPECIAL (sym.minsym));
758
759 /* If the user wants to override the fallback mode, let them. */
760 if (strcmp (arm_fallback_mode_string, "arm") == 0)
761 return 0;
762 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
763 return 1;
764
765 /* If we couldn't find any symbol, but we're talking to a running
766 target, then trust the current value of $cpsr. This lets
767 "display/i $pc" always show the correct mode (though if there is
768 a symbol table we will not reach here, so it still may not be
769 displayed in the mode it will be executed). */
770 if (target_has_registers ())
771 return arm_frame_is_thumb (get_current_frame ());
772
773 /* Otherwise we're out of luck; we assume ARM. */
774 return 0;
775 }
776
777 static inline bool
778 arm_m_addr_is_lockup (CORE_ADDR addr)
779 {
780 switch (addr)
781 {
782 /* Values for lockup state.
783 For more details see "B1.5.15 Unrecoverable exception cases" in
784 both ARMv6-M and ARMv7-M Architecture Reference Manuals, or
785 see "B4.32 Lockup" in ARMv8-M Architecture Reference Manual. */
786 case 0xeffffffe:
787 case 0xfffffffe:
788 case 0xffffffff:
789 return true;
790
791 default:
792 /* Address is not lockup. */
793 return false;
794 }
795 }
796
797 /* Determine if the address specified equals any of these magic return
798 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
799 architectures. Also include lockup magic PC value.
800 Check also for FNC_RETURN if we have the v8-M security extension.
801
802 From ARMv6-M Reference Manual B1.5.8
803 Table B1-5 Exception return behavior
804
805 EXC_RETURN Return To Return Stack
806 0xFFFFFFF1 Handler mode Main
807 0xFFFFFFF9 Thread mode Main
808 0xFFFFFFFD Thread mode Process
809
810 From ARMv7-M Reference Manual B1.5.8
811 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
812
813 EXC_RETURN Return To Return Stack
814 0xFFFFFFF1 Handler mode Main
815 0xFFFFFFF9 Thread mode Main
816 0xFFFFFFFD Thread mode Process
817
818 Table B1-9 EXC_RETURN definition of exception return behavior, with
819 FP
820
821 EXC_RETURN Return To Return Stack Frame Type
822 0xFFFFFFE1 Handler mode Main Extended
823 0xFFFFFFE9 Thread mode Main Extended
824 0xFFFFFFED Thread mode Process Extended
825 0xFFFFFFF1 Handler mode Main Basic
826 0xFFFFFFF9 Thread mode Main Basic
827 0xFFFFFFFD Thread mode Process Basic
828
829 For more details see "B1.5.8 Exception return behavior"
830 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
831
832 From ARMv8-M Architecture Technical Reference, D1.2.95
833 FType, Mode and SPSEL bits are to be considered when the Security
834 Extension is not implemented.
835
836 EXC_RETURN Return To Return Stack Frame Type
837 0xFFFFFFA0 Handler mode Main Extended
838 0xFFFFFFA8 Thread mode Main Extended
839 0xFFFFFFAC Thread mode Process Extended
840 0xFFFFFFB0 Handler mode Main Standard
841 0xFFFFFFB8 Thread mode Main Standard
842 0xFFFFFFBC Thread mode Process Standard */
843
844 static int
845 arm_m_addr_is_magic (struct gdbarch *gdbarch, CORE_ADDR addr)
846 {
847 if (arm_m_addr_is_lockup (addr))
848 return 1;
849
850 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
851 if (tdep->have_sec_ext)
852 {
853 switch ((addr & 0xff000000))
854 {
855 case 0xff000000: /* EXC_RETURN pattern. */
856 case 0xfe000000: /* FNC_RETURN pattern. */
857 return 1;
858 default:
859 return 0;
860 }
861 }
862 else
863 {
864 switch (addr)
865 {
866 /* Values from ARMv8-M Architecture Technical Reference. */
867 case 0xffffffa0:
868 case 0xffffffa8:
869 case 0xffffffac:
870 case 0xffffffb0:
871 case 0xffffffb8:
872 case 0xffffffbc:
873 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
874 the exception return behavior. */
875 case 0xffffffe1:
876 case 0xffffffe9:
877 case 0xffffffed:
878 case 0xfffffff1:
879 case 0xfffffff9:
880 case 0xfffffffd:
881 /* Address is magic. */
882 return 1;
883
884 default:
885 /* Address is not magic. */
886 return 0;
887 }
888 }
889 }
890
891 /* Remove useless bits from addresses in a running program. */
892 static CORE_ADDR
893 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
894 {
895 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
896
897 /* On M-profile devices, do not strip the low bit from EXC_RETURN
898 (the magic exception return address). */
899 if (tdep->is_m && arm_m_addr_is_magic (gdbarch, val))
900 return val;
901
902 if (arm_apcs_32)
903 return UNMAKE_THUMB_ADDR (val);
904 else
905 return (val & 0x03fffffc);
906 }
907
908 /* Return 1 if PC is the start of a compiler helper function which
909 can be safely ignored during prologue skipping. IS_THUMB is true
910 if the function is known to be a Thumb function due to the way it
911 is being called. */
912 static int
913 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
914 {
915 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
916 struct bound_minimal_symbol msym;
917
918 msym = lookup_minimal_symbol_by_pc (pc);
919 if (msym.minsym != NULL
920 && msym.value_address () == pc
921 && msym.minsym->linkage_name () != NULL)
922 {
923 const char *name = msym.minsym->linkage_name ();
924
925 /* The GNU linker's Thumb call stub to foo is named
926 __foo_from_thumb. */
927 if (strstr (name, "_from_thumb") != NULL)
928 name += 2;
929
930 /* On soft-float targets, __truncdfsf2 is called to convert promoted
931 arguments to their argument types in non-prototyped
932 functions. */
933 if (startswith (name, "__truncdfsf2"))
934 return 1;
935 if (startswith (name, "__aeabi_d2f"))
936 return 1;
937
938 /* Internal functions related to thread-local storage. */
939 if (startswith (name, "__tls_get_addr"))
940 return 1;
941 if (startswith (name, "__aeabi_read_tp"))
942 return 1;
943 }
944 else
945 {
946 /* If we run against a stripped glibc, we may be unable to identify
947 special functions by name. Check for one important case,
948 __aeabi_read_tp, by comparing the *code* against the default
949 implementation (this is hand-written ARM assembler in glibc). */
950
951 if (!is_thumb
952 && read_code_unsigned_integer (pc, 4, byte_order_for_code)
953 == 0xe3e00a0f /* mov r0, #0xffff0fff */
954 && read_code_unsigned_integer (pc + 4, 4, byte_order_for_code)
955 == 0xe240f01f) /* sub pc, r0, #31 */
956 return 1;
957 }
958
959 return 0;
960 }
961
962 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
963 the first 16-bit of instruction, and INSN2 is the second 16-bit of
964 instruction. */
965 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
966 ((bits ((insn1), 0, 3) << 12) \
967 | (bits ((insn1), 10, 10) << 11) \
968 | (bits ((insn2), 12, 14) << 8) \
969 | bits ((insn2), 0, 7))
970
971 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
972 the 32-bit instruction. */
973 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
974 ((bits ((insn), 16, 19) << 12) \
975 | bits ((insn), 0, 11))
976
977 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
978
979 static unsigned int
980 thumb_expand_immediate (unsigned int imm)
981 {
982 unsigned int count = imm >> 7;
983
984 if (count < 8)
985 switch (count / 2)
986 {
987 case 0:
988 return imm & 0xff;
989 case 1:
990 return (imm & 0xff) | ((imm & 0xff) << 16);
991 case 2:
992 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
993 case 3:
994 return (imm & 0xff) | ((imm & 0xff) << 8)
995 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
996 }
997
998 return (0x80 | (imm & 0x7f)) << (32 - count);
999 }
1000
1001 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
1002 epilogue, 0 otherwise. */
1003
1004 static int
1005 thumb_instruction_restores_sp (unsigned short insn)
1006 {
1007 return (insn == 0x46bd /* mov sp, r7 */
1008 || (insn & 0xff80) == 0xb000 /* add sp, imm */
1009 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
1010 }
1011
1012 /* Analyze a Thumb prologue, looking for a recognizable stack frame
1013 and frame pointer. Scan until we encounter a store that could
1014 clobber the stack frame unexpectedly, or an unknown instruction.
1015 Return the last address which is definitely safe to skip for an
1016 initial breakpoint. */
1017
1018 static CORE_ADDR
1019 thumb_analyze_prologue (struct gdbarch *gdbarch,
1020 CORE_ADDR start, CORE_ADDR limit,
1021 struct arm_prologue_cache *cache)
1022 {
1023 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1024 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1025 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1026 int i;
1027 pv_t regs[16];
1028 CORE_ADDR offset;
1029 CORE_ADDR unrecognized_pc = 0;
1030
1031 for (i = 0; i < 16; i++)
1032 regs[i] = pv_register (i, 0);
1033 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1034
1035 while (start < limit)
1036 {
1037 unsigned short insn;
1038 gdb::optional<bool> ra_signed_state;
1039
1040 insn = read_code_unsigned_integer (start, 2, byte_order_for_code);
1041
1042 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
1043 {
1044 int regno;
1045 int mask;
1046
1047 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
1048 break;
1049
1050 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
1051 whether to save LR (R14). */
1052 mask = (insn & 0xff) | ((insn & 0x100) << 6);
1053
1054 /* Calculate offsets of saved R0-R7 and LR. */
1055 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1056 if (mask & (1 << regno))
1057 {
1058 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1059 -4);
1060 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
1061 }
1062 }
1063 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
1064 {
1065 offset = (insn & 0x7f) << 2; /* get scaled offset */
1066 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
1067 -offset);
1068 }
1069 else if (thumb_instruction_restores_sp (insn))
1070 {
1071 /* Don't scan past the epilogue. */
1072 break;
1073 }
1074 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
1075 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
1076 (insn & 0xff) << 2);
1077 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
1078 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1079 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
1080 bits (insn, 6, 8));
1081 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
1082 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1083 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
1084 bits (insn, 0, 7));
1085 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
1086 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
1087 && pv_is_constant (regs[bits (insn, 3, 5)]))
1088 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
1089 regs[bits (insn, 6, 8)]);
1090 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
1091 && pv_is_constant (regs[bits (insn, 3, 6)]))
1092 {
1093 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
1094 int rm = bits (insn, 3, 6);
1095 regs[rd] = pv_add (regs[rd], regs[rm]);
1096 }
1097 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
1098 {
1099 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
1100 int src_reg = (insn & 0x78) >> 3;
1101 regs[dst_reg] = regs[src_reg];
1102 }
1103 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
1104 {
1105 /* Handle stores to the stack. Normally pushes are used,
1106 but with GCC -mtpcs-frame, there may be other stores
1107 in the prologue to create the frame. */
1108 int regno = (insn >> 8) & 0x7;
1109 pv_t addr;
1110
1111 offset = (insn & 0xff) << 2;
1112 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
1113
1114 if (stack.store_would_trash (addr))
1115 break;
1116
1117 stack.store (addr, 4, regs[regno]);
1118 }
1119 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
1120 {
1121 int rd = bits (insn, 0, 2);
1122 int rn = bits (insn, 3, 5);
1123 pv_t addr;
1124
1125 offset = bits (insn, 6, 10) << 2;
1126 addr = pv_add_constant (regs[rn], offset);
1127
1128 if (stack.store_would_trash (addr))
1129 break;
1130
1131 stack.store (addr, 4, regs[rd]);
1132 }
1133 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
1134 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
1135 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
1136 /* Ignore stores of argument registers to the stack. */
1137 ;
1138 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
1139 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
1140 /* Ignore block loads from the stack, potentially copying
1141 parameters from memory. */
1142 ;
1143 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
1144 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
1145 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
1146 /* Similarly ignore single loads from the stack. */
1147 ;
1148 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
1149 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
1150 /* Skip register copies, i.e. saves to another register
1151 instead of the stack. */
1152 ;
1153 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
1154 /* Recognize constant loads; even with small stacks these are necessary
1155 on Thumb. */
1156 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
1157 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
1158 {
1159 /* Constant pool loads, for the same reason. */
1160 unsigned int constant;
1161 CORE_ADDR loc;
1162
1163 loc = start + 4 + bits (insn, 0, 7) * 4;
1164 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1165 regs[bits (insn, 8, 10)] = pv_constant (constant);
1166 }
1167 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
1168 {
1169 unsigned short inst2;
1170
1171 inst2 = read_code_unsigned_integer (start + 2, 2,
1172 byte_order_for_code);
1173 uint32_t whole_insn = (insn << 16) | inst2;
1174
1175 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
1176 {
1177 /* BL, BLX. Allow some special function calls when
1178 skipping the prologue; GCC generates these before
1179 storing arguments to the stack. */
1180 CORE_ADDR nextpc;
1181 int j1, j2, imm1, imm2;
1182
1183 imm1 = sbits (insn, 0, 10);
1184 imm2 = bits (inst2, 0, 10);
1185 j1 = bit (inst2, 13);
1186 j2 = bit (inst2, 11);
1187
1188 offset = ((imm1 << 12) + (imm2 << 1));
1189 offset ^= ((!j2) << 22) | ((!j1) << 23);
1190
1191 nextpc = start + 4 + offset;
1192 /* For BLX make sure to clear the low bits. */
1193 if (bit (inst2, 12) == 0)
1194 nextpc = nextpc & 0xfffffffc;
1195
1196 if (!skip_prologue_function (gdbarch, nextpc,
1197 bit (inst2, 12) != 0))
1198 break;
1199 }
1200
1201 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
1202 { registers } */
1203 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1204 {
1205 pv_t addr = regs[bits (insn, 0, 3)];
1206 int regno;
1207
1208 if (stack.store_would_trash (addr))
1209 break;
1210
1211 /* Calculate offsets of saved registers. */
1212 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
1213 if (inst2 & (1 << regno))
1214 {
1215 addr = pv_add_constant (addr, -4);
1216 stack.store (addr, 4, regs[regno]);
1217 }
1218
1219 if (insn & 0x0020)
1220 regs[bits (insn, 0, 3)] = addr;
1221 }
1222
1223 /* vstmdb Rn{!}, { D-registers } (aka vpush). */
1224 else if ((insn & 0xff20) == 0xed20
1225 && (inst2 & 0x0f00) == 0x0b00
1226 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1227 {
1228 /* Address SP points to. */
1229 pv_t addr = regs[bits (insn, 0, 3)];
1230
1231 /* Number of registers saved. */
1232 unsigned int number = bits (inst2, 0, 7) >> 1;
1233
1234 /* First register to save. */
1235 int vd = bits (inst2, 12, 15) | (bits (insn, 6, 6) << 4);
1236
1237 if (stack.store_would_trash (addr))
1238 break;
1239
1240 /* Calculate offsets of saved registers. */
1241 for (; number > 0; number--)
1242 {
1243 addr = pv_add_constant (addr, -8);
1244 stack.store (addr, 8, pv_register (ARM_D0_REGNUM
1245 + vd + number, 0));
1246 }
1247
1248 /* Writeback SP to account for the saved registers. */
1249 regs[bits (insn, 0, 3)] = addr;
1250 }
1251
1252 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
1253 [Rn, #+/-imm]{!} */
1254 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1255 {
1256 int regno1 = bits (inst2, 12, 15);
1257 int regno2 = bits (inst2, 8, 11);
1258 pv_t addr = regs[bits (insn, 0, 3)];
1259
1260 offset = inst2 & 0xff;
1261 if (insn & 0x0080)
1262 addr = pv_add_constant (addr, offset);
1263 else
1264 addr = pv_add_constant (addr, -offset);
1265
1266 if (stack.store_would_trash (addr))
1267 break;
1268
1269 stack.store (addr, 4, regs[regno1]);
1270 stack.store (pv_add_constant (addr, 4),
1271 4, regs[regno2]);
1272
1273 if (insn & 0x0020)
1274 regs[bits (insn, 0, 3)] = addr;
1275 }
1276
1277 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
1278 && (inst2 & 0x0c00) == 0x0c00
1279 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1280 {
1281 int regno = bits (inst2, 12, 15);
1282 pv_t addr = regs[bits (insn, 0, 3)];
1283
1284 offset = inst2 & 0xff;
1285 if (inst2 & 0x0200)
1286 addr = pv_add_constant (addr, offset);
1287 else
1288 addr = pv_add_constant (addr, -offset);
1289
1290 if (stack.store_would_trash (addr))
1291 break;
1292
1293 stack.store (addr, 4, regs[regno]);
1294
1295 if (inst2 & 0x0100)
1296 regs[bits (insn, 0, 3)] = addr;
1297 }
1298
1299 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
1300 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1301 {
1302 int regno = bits (inst2, 12, 15);
1303 pv_t addr;
1304
1305 offset = inst2 & 0xfff;
1306 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
1307
1308 if (stack.store_would_trash (addr))
1309 break;
1310
1311 stack.store (addr, 4, regs[regno]);
1312 }
1313
1314 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
1315 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1316 /* Ignore stores of argument registers to the stack. */
1317 ;
1318
1319 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
1320 && (inst2 & 0x0d00) == 0x0c00
1321 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1322 /* Ignore stores of argument registers to the stack. */
1323 ;
1324
1325 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
1326 { registers } */
1327 && (inst2 & 0x8000) == 0x0000
1328 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1329 /* Ignore block loads from the stack, potentially copying
1330 parameters from memory. */
1331 ;
1332
1333 else if ((insn & 0xff70) == 0xe950 /* ldrd Rt, Rt2,
1334 [Rn, #+/-imm] */
1335 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1336 /* Similarly ignore dual loads from the stack. */
1337 ;
1338
1339 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
1340 && (inst2 & 0x0d00) == 0x0c00
1341 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1342 /* Similarly ignore single loads from the stack. */
1343 ;
1344
1345 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1346 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1347 /* Similarly ignore single loads from the stack. */
1348 ;
1349
1350 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1351 && (inst2 & 0x8000) == 0x0000)
1352 {
1353 unsigned int imm = ((bits (insn, 10, 10) << 11)
1354 | (bits (inst2, 12, 14) << 8)
1355 | bits (inst2, 0, 7));
1356
1357 regs[bits (inst2, 8, 11)]
1358 = pv_add_constant (regs[bits (insn, 0, 3)],
1359 thumb_expand_immediate (imm));
1360 }
1361
1362 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1363 && (inst2 & 0x8000) == 0x0000)
1364 {
1365 unsigned int imm = ((bits (insn, 10, 10) << 11)
1366 | (bits (inst2, 12, 14) << 8)
1367 | bits (inst2, 0, 7));
1368
1369 regs[bits (inst2, 8, 11)]
1370 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1371 }
1372
1373 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1374 && (inst2 & 0x8000) == 0x0000)
1375 {
1376 unsigned int imm = ((bits (insn, 10, 10) << 11)
1377 | (bits (inst2, 12, 14) << 8)
1378 | bits (inst2, 0, 7));
1379
1380 regs[bits (inst2, 8, 11)]
1381 = pv_add_constant (regs[bits (insn, 0, 3)],
1382 - (CORE_ADDR) thumb_expand_immediate (imm));
1383 }
1384
1385 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1386 && (inst2 & 0x8000) == 0x0000)
1387 {
1388 unsigned int imm = ((bits (insn, 10, 10) << 11)
1389 | (bits (inst2, 12, 14) << 8)
1390 | bits (inst2, 0, 7));
1391
1392 regs[bits (inst2, 8, 11)]
1393 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1394 }
1395
1396 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1397 {
1398 unsigned int imm = ((bits (insn, 10, 10) << 11)
1399 | (bits (inst2, 12, 14) << 8)
1400 | bits (inst2, 0, 7));
1401
1402 regs[bits (inst2, 8, 11)]
1403 = pv_constant (thumb_expand_immediate (imm));
1404 }
1405
1406 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1407 {
1408 unsigned int imm
1409 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1410
1411 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1412 }
1413
1414 else if (insn == 0xea5f /* mov.w Rd,Rm */
1415 && (inst2 & 0xf0f0) == 0)
1416 {
1417 int dst_reg = (inst2 & 0x0f00) >> 8;
1418 int src_reg = inst2 & 0xf;
1419 regs[dst_reg] = regs[src_reg];
1420 }
1421
1422 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1423 {
1424 /* Constant pool loads. */
1425 unsigned int constant;
1426 CORE_ADDR loc;
1427
1428 offset = bits (inst2, 0, 11);
1429 if (insn & 0x0080)
1430 loc = start + 4 + offset;
1431 else
1432 loc = start + 4 - offset;
1433
1434 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1435 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1436 }
1437
1438 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1439 {
1440 /* Constant pool loads. */
1441 unsigned int constant;
1442 CORE_ADDR loc;
1443
1444 offset = bits (inst2, 0, 7) << 2;
1445 if (insn & 0x0080)
1446 loc = start + 4 + offset;
1447 else
1448 loc = start + 4 - offset;
1449
1450 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1451 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1452
1453 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1454 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1455 }
1456 /* Start of ARMv8.1-m PACBTI extension instructions. */
1457 else if (IS_PAC (whole_insn))
1458 {
1459 /* LR and SP are input registers. PAC is in R12. LR is
1460 signed from this point onwards. NOP space. */
1461 ra_signed_state = true;
1462 }
1463 else if (IS_PACBTI (whole_insn))
1464 {
1465 /* LR and SP are input registers. PAC is in R12 and PC is a
1466 valid BTI landing pad. LR is signed from this point onwards.
1467 NOP space. */
1468 ra_signed_state = true;
1469 }
1470 else if (IS_BTI (whole_insn))
1471 {
1472 /* Valid BTI landing pad. NOP space. */
1473 }
1474 else if (IS_PACG (whole_insn))
1475 {
1476 /* Sign Rn using Rm and store the PAC in Rd. Rd is signed from
1477 this point onwards. */
1478 ra_signed_state = true;
1479 }
1480 else if (IS_AUT (whole_insn) || IS_AUTG (whole_insn))
1481 {
1482 /* These instructions appear close to the epilogue, when signed
1483 pointers are getting authenticated. */
1484 ra_signed_state = false;
1485 }
1486 /* End of ARMv8.1-m PACBTI extension instructions */
1487 else if (thumb2_instruction_changes_pc (insn, inst2))
1488 {
1489 /* Don't scan past anything that might change control flow. */
1490 break;
1491 }
1492 else
1493 {
1494 /* The optimizer might shove anything into the prologue,
1495 so we just skip what we don't recognize. */
1496 unrecognized_pc = start;
1497 }
1498
1499 /* Make sure we are dealing with a target that supports ARMv8.1-m
1500 PACBTI. */
1501 if (cache != nullptr && tdep->have_pacbti
1502 && ra_signed_state.has_value ())
1503 {
1504 arm_debug_printf ("Found pacbti instruction at %s",
1505 paddress (gdbarch, start));
1506 arm_debug_printf ("RA is %s",
1507 *ra_signed_state ? "signed" : "not signed");
1508 cache->ra_signed_state = ra_signed_state;
1509 }
1510
1511 start += 2;
1512 }
1513 else if (thumb_instruction_changes_pc (insn))
1514 {
1515 /* Don't scan past anything that might change control flow. */
1516 break;
1517 }
1518 else
1519 {
1520 /* The optimizer might shove anything into the prologue,
1521 so we just skip what we don't recognize. */
1522 unrecognized_pc = start;
1523 }
1524
1525 start += 2;
1526 }
1527
1528 arm_debug_printf ("Prologue scan stopped at %s",
1529 paddress (gdbarch, start));
1530
1531 if (unrecognized_pc == 0)
1532 unrecognized_pc = start;
1533
1534 if (cache == NULL)
1535 return unrecognized_pc;
1536
1537 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1538 {
1539 /* Frame pointer is fp. Frame size is constant. */
1540 cache->framereg = ARM_FP_REGNUM;
1541 cache->framesize = -regs[ARM_FP_REGNUM].k;
1542 }
1543 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1544 {
1545 /* Frame pointer is r7. Frame size is constant. */
1546 cache->framereg = THUMB_FP_REGNUM;
1547 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1548 }
1549 else
1550 {
1551 /* Try the stack pointer... this is a bit desperate. */
1552 cache->framereg = ARM_SP_REGNUM;
1553 cache->framesize = -regs[ARM_SP_REGNUM].k;
1554 }
1555
1556 for (i = 0; i < gdbarch_num_regs (gdbarch); i++)
1557 if (stack.find_reg (gdbarch, i, &offset))
1558 {
1559 cache->saved_regs[i].set_addr (offset);
1560 if (i == ARM_SP_REGNUM)
1561 arm_cache_set_active_sp_value(cache, tdep, offset);
1562 }
1563
1564 return unrecognized_pc;
1565 }
1566
1567
1568 /* Try to analyze the instructions starting from PC, which load symbol
1569 __stack_chk_guard. Return the address of instruction after loading this
1570 symbol, set the dest register number to *BASEREG, and set the size of
1571 instructions for loading symbol in OFFSET. Return 0 if instructions are
1572 not recognized. */
1573
1574 static CORE_ADDR
1575 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1576 unsigned int *destreg, int *offset)
1577 {
1578 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1579 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1580 unsigned int low, high, address;
1581
1582 address = 0;
1583 if (is_thumb)
1584 {
1585 unsigned short insn1
1586 = read_code_unsigned_integer (pc, 2, byte_order_for_code);
1587
1588 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1589 {
1590 *destreg = bits (insn1, 8, 10);
1591 *offset = 2;
1592 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1593 address = read_memory_unsigned_integer (address, 4,
1594 byte_order_for_code);
1595 }
1596 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1597 {
1598 unsigned short insn2
1599 = read_code_unsigned_integer (pc + 2, 2, byte_order_for_code);
1600
1601 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1602
1603 insn1
1604 = read_code_unsigned_integer (pc + 4, 2, byte_order_for_code);
1605 insn2
1606 = read_code_unsigned_integer (pc + 6, 2, byte_order_for_code);
1607
1608 /* movt Rd, #const */
1609 if ((insn1 & 0xfbc0) == 0xf2c0)
1610 {
1611 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1612 *destreg = bits (insn2, 8, 11);
1613 *offset = 8;
1614 address = (high << 16 | low);
1615 }
1616 }
1617 }
1618 else
1619 {
1620 unsigned int insn
1621 = read_code_unsigned_integer (pc, 4, byte_order_for_code);
1622
1623 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1624 {
1625 address = bits (insn, 0, 11) + pc + 8;
1626 address = read_memory_unsigned_integer (address, 4,
1627 byte_order_for_code);
1628
1629 *destreg = bits (insn, 12, 15);
1630 *offset = 4;
1631 }
1632 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1633 {
1634 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1635
1636 insn
1637 = read_code_unsigned_integer (pc + 4, 4, byte_order_for_code);
1638
1639 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1640 {
1641 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1642 *destreg = bits (insn, 12, 15);
1643 *offset = 8;
1644 address = (high << 16 | low);
1645 }
1646 }
1647 }
1648
1649 return address;
1650 }
1651
1652 /* Try to skip a sequence of instructions used for stack protector. If PC
1653 points to the first instruction of this sequence, return the address of
1654 first instruction after this sequence, otherwise, return original PC.
1655
1656 On arm, this sequence of instructions is composed of mainly three steps,
1657 Step 1: load symbol __stack_chk_guard,
1658 Step 2: load from address of __stack_chk_guard,
1659 Step 3: store it to somewhere else.
1660
1661 Usually, instructions on step 2 and step 3 are the same on various ARM
1662 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1663 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1664 instructions in step 1 vary from different ARM architectures. On ARMv7,
1665 they are,
1666
1667 movw Rn, #:lower16:__stack_chk_guard
1668 movt Rn, #:upper16:__stack_chk_guard
1669
1670 On ARMv5t, it is,
1671
1672 ldr Rn, .Label
1673 ....
1674 .Lable:
1675 .word __stack_chk_guard
1676
1677 Since ldr/str is a very popular instruction, we can't use them as
1678 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1679 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1680 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1681
1682 static CORE_ADDR
1683 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1684 {
1685 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1686 unsigned int basereg;
1687 struct bound_minimal_symbol stack_chk_guard;
1688 int offset;
1689 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1690 CORE_ADDR addr;
1691
1692 /* Try to parse the instructions in Step 1. */
1693 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1694 &basereg, &offset);
1695 if (!addr)
1696 return pc;
1697
1698 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1699 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1700 Otherwise, this sequence cannot be for stack protector. */
1701 if (stack_chk_guard.minsym == NULL
1702 || !startswith (stack_chk_guard.minsym->linkage_name (), "__stack_chk_guard"))
1703 return pc;
1704
1705 if (is_thumb)
1706 {
1707 unsigned int destreg;
1708 unsigned short insn
1709 = read_code_unsigned_integer (pc + offset, 2, byte_order_for_code);
1710
1711 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1712 if ((insn & 0xf800) != 0x6800)
1713 return pc;
1714 if (bits (insn, 3, 5) != basereg)
1715 return pc;
1716 destreg = bits (insn, 0, 2);
1717
1718 insn = read_code_unsigned_integer (pc + offset + 2, 2,
1719 byte_order_for_code);
1720 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1721 if ((insn & 0xf800) != 0x6000)
1722 return pc;
1723 if (destreg != bits (insn, 0, 2))
1724 return pc;
1725 }
1726 else
1727 {
1728 unsigned int destreg;
1729 unsigned int insn
1730 = read_code_unsigned_integer (pc + offset, 4, byte_order_for_code);
1731
1732 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1733 if ((insn & 0x0e500000) != 0x04100000)
1734 return pc;
1735 if (bits (insn, 16, 19) != basereg)
1736 return pc;
1737 destreg = bits (insn, 12, 15);
1738 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1739 insn = read_code_unsigned_integer (pc + offset + 4,
1740 4, byte_order_for_code);
1741 if ((insn & 0x0e500000) != 0x04000000)
1742 return pc;
1743 if (bits (insn, 12, 15) != destreg)
1744 return pc;
1745 }
1746 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1747 on arm. */
1748 if (is_thumb)
1749 return pc + offset + 4;
1750 else
1751 return pc + offset + 8;
1752 }
1753
1754 /* Advance the PC across any function entry prologue instructions to
1755 reach some "real" code.
1756
1757 The APCS (ARM Procedure Call Standard) defines the following
1758 prologue:
1759
1760 mov ip, sp
1761 [stmfd sp!, {a1,a2,a3,a4}]
1762 stmfd sp!, {...,fp,ip,lr,pc}
1763 [stfe f7, [sp, #-12]!]
1764 [stfe f6, [sp, #-12]!]
1765 [stfe f5, [sp, #-12]!]
1766 [stfe f4, [sp, #-12]!]
1767 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1768
1769 static CORE_ADDR
1770 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1771 {
1772 CORE_ADDR func_addr, func_end_addr, limit_pc;
1773
1774 /* See if we can determine the end of the prologue via the symbol table.
1775 If so, then return either PC, or the PC after the prologue, whichever
1776 is greater. */
1777 bool func_addr_found
1778 = find_pc_partial_function (pc, NULL, &func_addr, &func_end_addr);
1779
1780 /* Whether the function is thumb mode or not. */
1781 bool func_is_thumb = false;
1782
1783 if (func_addr_found)
1784 {
1785 CORE_ADDR post_prologue_pc
1786 = skip_prologue_using_sal (gdbarch, func_addr);
1787 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1788
1789 if (post_prologue_pc)
1790 post_prologue_pc
1791 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1792
1793
1794 /* GCC always emits a line note before the prologue and another
1795 one after, even if the two are at the same address or on the
1796 same line. Take advantage of this so that we do not need to
1797 know every instruction that might appear in the prologue. We
1798 will have producer information for most binaries; if it is
1799 missing (e.g. for -gstabs), assuming the GNU tools. */
1800 if (post_prologue_pc
1801 && (cust == NULL
1802 || cust->producer () == NULL
1803 || startswith (cust->producer (), "GNU ")
1804 || producer_is_llvm (cust->producer ())))
1805 return post_prologue_pc;
1806
1807 if (post_prologue_pc != 0)
1808 {
1809 CORE_ADDR analyzed_limit;
1810
1811 /* For non-GCC compilers, make sure the entire line is an
1812 acceptable prologue; GDB will round this function's
1813 return value up to the end of the following line so we
1814 can not skip just part of a line (and we do not want to).
1815
1816 RealView does not treat the prologue specially, but does
1817 associate prologue code with the opening brace; so this
1818 lets us skip the first line if we think it is the opening
1819 brace. */
1820 func_is_thumb = arm_pc_is_thumb (gdbarch, func_addr);
1821 if (func_is_thumb)
1822 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1823 post_prologue_pc, NULL);
1824 else
1825 analyzed_limit
1826 = arm_analyze_prologue (gdbarch, func_addr, post_prologue_pc,
1827 NULL, target_arm_instruction_reader ());
1828
1829 if (analyzed_limit != post_prologue_pc)
1830 return func_addr;
1831
1832 return post_prologue_pc;
1833 }
1834 }
1835
1836 /* Can't determine prologue from the symbol table, need to examine
1837 instructions. */
1838
1839 /* Find an upper limit on the function prologue using the debug
1840 information. If the debug information could not be used to provide
1841 that bound, then use an arbitrary large number as the upper bound. */
1842 /* Like arm_scan_prologue, stop no later than pc + 64. */
1843 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1844 if (limit_pc == 0)
1845 limit_pc = pc + 64; /* Magic. */
1846
1847 /* Set the correct adjustment based on whether the function is thumb mode or
1848 not. We use it to get the address of the last instruction in the
1849 function (as opposed to the first address of the next function). */
1850 CORE_ADDR adjustment = func_is_thumb ? 2 : 4;
1851
1852 limit_pc
1853 = func_end_addr == 0 ? limit_pc : std::min (limit_pc,
1854 func_end_addr - adjustment);
1855
1856 /* Check if this is Thumb code. */
1857 if (arm_pc_is_thumb (gdbarch, pc))
1858 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1859 else
1860 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL,
1861 target_arm_instruction_reader ());
1862 }
1863
1864 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1865 This function decodes a Thumb function prologue to determine:
1866 1) the size of the stack frame
1867 2) which registers are saved on it
1868 3) the offsets of saved regs
1869 4) the offset from the stack pointer to the frame pointer
1870
1871 A typical Thumb function prologue would create this stack frame
1872 (offsets relative to FP)
1873 old SP -> 24 stack parameters
1874 20 LR
1875 16 R7
1876 R7 -> 0 local variables (16 bytes)
1877 SP -> -12 additional stack space (12 bytes)
1878 The frame size would thus be 36 bytes, and the frame offset would be
1879 12 bytes. The frame register is R7.
1880
1881 The comments for thumb_skip_prolog() describe the algorithm we use
1882 to detect the end of the prolog. */
1883
1884 static void
1885 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1886 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1887 {
1888 CORE_ADDR prologue_start;
1889 CORE_ADDR prologue_end;
1890
1891 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1892 &prologue_end))
1893 {
1894 /* See comment in arm_scan_prologue for an explanation of
1895 this heuristics. */
1896 if (prologue_end > prologue_start + 64)
1897 {
1898 prologue_end = prologue_start + 64;
1899 }
1900 }
1901 else
1902 /* We're in the boondocks: we have no idea where the start of the
1903 function is. */
1904 return;
1905
1906 prologue_end = std::min (prologue_end, prev_pc);
1907
1908 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1909 }
1910
1911 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1912 otherwise. */
1913
1914 static int
1915 arm_instruction_restores_sp (unsigned int insn)
1916 {
1917 if (bits (insn, 28, 31) != INST_NV)
1918 {
1919 if ((insn & 0x0df0f000) == 0x0080d000
1920 /* ADD SP (register or immediate). */
1921 || (insn & 0x0df0f000) == 0x0040d000
1922 /* SUB SP (register or immediate). */
1923 || (insn & 0x0ffffff0) == 0x01a0d000
1924 /* MOV SP. */
1925 || (insn & 0x0fff0000) == 0x08bd0000
1926 /* POP (LDMIA). */
1927 || (insn & 0x0fff0000) == 0x049d0000)
1928 /* POP of a single register. */
1929 return 1;
1930 }
1931
1932 return 0;
1933 }
1934
1935 /* Implement immediate value decoding, as described in section A5.2.4
1936 (Modified immediate constants in ARM instructions) of the ARM Architecture
1937 Reference Manual (ARMv7-A and ARMv7-R edition). */
1938
1939 static uint32_t
1940 arm_expand_immediate (uint32_t imm)
1941 {
1942 /* Immediate values are 12 bits long. */
1943 gdb_assert ((imm & 0xfffff000) == 0);
1944
1945 uint32_t unrotated_value = imm & 0xff;
1946 uint32_t rotate_amount = (imm & 0xf00) >> 7;
1947
1948 if (rotate_amount == 0)
1949 return unrotated_value;
1950
1951 return ((unrotated_value >> rotate_amount)
1952 | (unrotated_value << (32 - rotate_amount)));
1953 }
1954
1955 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1956 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1957 fill it in. Return the first address not recognized as a prologue
1958 instruction.
1959
1960 We recognize all the instructions typically found in ARM prologues,
1961 plus harmless instructions which can be skipped (either for analysis
1962 purposes, or a more restrictive set that can be skipped when finding
1963 the end of the prologue). */
1964
1965 static CORE_ADDR
1966 arm_analyze_prologue (struct gdbarch *gdbarch,
1967 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1968 struct arm_prologue_cache *cache,
1969 const arm_instruction_reader &insn_reader)
1970 {
1971 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1972 int regno;
1973 CORE_ADDR offset, current_pc;
1974 pv_t regs[ARM_FPS_REGNUM];
1975 CORE_ADDR unrecognized_pc = 0;
1976 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
1977
1978 /* Search the prologue looking for instructions that set up the
1979 frame pointer, adjust the stack pointer, and save registers.
1980
1981 Be careful, however, and if it doesn't look like a prologue,
1982 don't try to scan it. If, for instance, a frameless function
1983 begins with stmfd sp!, then we will tell ourselves there is
1984 a frame, which will confuse stack traceback, as well as "finish"
1985 and other operations that rely on a knowledge of the stack
1986 traceback. */
1987
1988 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1989 regs[regno] = pv_register (regno, 0);
1990 pv_area stack (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1991
1992 for (current_pc = prologue_start;
1993 current_pc < prologue_end;
1994 current_pc += 4)
1995 {
1996 uint32_t insn = insn_reader.read (current_pc, byte_order_for_code);
1997
1998 if (insn == 0xe1a0c00d) /* mov ip, sp */
1999 {
2000 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
2001 continue;
2002 }
2003 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
2004 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2005 {
2006 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2007 int rd = bits (insn, 12, 15);
2008 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
2009 continue;
2010 }
2011 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
2012 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2013 {
2014 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2015 int rd = bits (insn, 12, 15);
2016 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
2017 continue;
2018 }
2019 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
2020 [sp, #-4]! */
2021 {
2022 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2023 break;
2024 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2025 stack.store (regs[ARM_SP_REGNUM], 4,
2026 regs[bits (insn, 12, 15)]);
2027 continue;
2028 }
2029 else if ((insn & 0xffff0000) == 0xe92d0000)
2030 /* stmfd sp!, {..., fp, ip, lr, pc}
2031 or
2032 stmfd sp!, {a1, a2, a3, a4} */
2033 {
2034 int mask = insn & 0xffff;
2035
2036 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2037 break;
2038
2039 /* Calculate offsets of saved registers. */
2040 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
2041 if (mask & (1 << regno))
2042 {
2043 regs[ARM_SP_REGNUM]
2044 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
2045 stack.store (regs[ARM_SP_REGNUM], 4, regs[regno]);
2046 }
2047 }
2048 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
2049 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
2050 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
2051 {
2052 /* No need to add this to saved_regs -- it's just an arg reg. */
2053 continue;
2054 }
2055 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
2056 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
2057 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
2058 {
2059 /* No need to add this to saved_regs -- it's just an arg reg. */
2060 continue;
2061 }
2062 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
2063 { registers } */
2064 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2065 {
2066 /* No need to add this to saved_regs -- it's just arg regs. */
2067 continue;
2068 }
2069 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
2070 {
2071 uint32_t imm = arm_expand_immediate (insn & 0xfff);
2072 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
2073 }
2074 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
2075 {
2076 uint32_t imm = arm_expand_immediate(insn & 0xfff);
2077 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
2078 }
2079 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
2080 [sp, -#c]! */
2081 && tdep->have_fpa_registers)
2082 {
2083 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2084 break;
2085
2086 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2087 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
2088 stack.store (regs[ARM_SP_REGNUM], 12, regs[regno]);
2089 }
2090 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
2091 [sp!] */
2092 && tdep->have_fpa_registers)
2093 {
2094 int n_saved_fp_regs;
2095 unsigned int fp_start_reg, fp_bound_reg;
2096
2097 if (stack.store_would_trash (regs[ARM_SP_REGNUM]))
2098 break;
2099
2100 if ((insn & 0x800) == 0x800) /* N0 is set */
2101 {
2102 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2103 n_saved_fp_regs = 3;
2104 else
2105 n_saved_fp_regs = 1;
2106 }
2107 else
2108 {
2109 if ((insn & 0x40000) == 0x40000) /* N1 is set */
2110 n_saved_fp_regs = 2;
2111 else
2112 n_saved_fp_regs = 4;
2113 }
2114
2115 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
2116 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
2117 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
2118 {
2119 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
2120 stack.store (regs[ARM_SP_REGNUM], 12,
2121 regs[fp_start_reg++]);
2122 }
2123 }
2124 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
2125 {
2126 /* Allow some special function calls when skipping the
2127 prologue; GCC generates these before storing arguments to
2128 the stack. */
2129 CORE_ADDR dest = BranchDest (current_pc, insn);
2130
2131 if (skip_prologue_function (gdbarch, dest, 0))
2132 continue;
2133 else
2134 break;
2135 }
2136 else if ((insn & 0xf0000000) != 0xe0000000)
2137 break; /* Condition not true, exit early. */
2138 else if (arm_instruction_changes_pc (insn))
2139 /* Don't scan past anything that might change control flow. */
2140 break;
2141 else if (arm_instruction_restores_sp (insn))
2142 {
2143 /* Don't scan past the epilogue. */
2144 break;
2145 }
2146 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
2147 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2148 /* Ignore block loads from the stack, potentially copying
2149 parameters from memory. */
2150 continue;
2151 else if ((insn & 0xfc500000) == 0xe4100000
2152 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
2153 /* Similarly ignore single loads from the stack. */
2154 continue;
2155 else if ((insn & 0xffff0ff0) == 0xe1a00000)
2156 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
2157 register instead of the stack. */
2158 continue;
2159 else
2160 {
2161 /* The optimizer might shove anything into the prologue, if
2162 we build up cache (cache != NULL) from scanning prologue,
2163 we just skip what we don't recognize and scan further to
2164 make cache as complete as possible. However, if we skip
2165 prologue, we'll stop immediately on unrecognized
2166 instruction. */
2167 unrecognized_pc = current_pc;
2168 if (cache != NULL)
2169 continue;
2170 else
2171 break;
2172 }
2173 }
2174
2175 if (unrecognized_pc == 0)
2176 unrecognized_pc = current_pc;
2177
2178 if (cache)
2179 {
2180 int framereg, framesize;
2181
2182 /* The frame size is just the distance from the frame register
2183 to the original stack pointer. */
2184 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
2185 {
2186 /* Frame pointer is fp. */
2187 framereg = ARM_FP_REGNUM;
2188 framesize = -regs[ARM_FP_REGNUM].k;
2189 }
2190 else
2191 {
2192 /* Try the stack pointer... this is a bit desperate. */
2193 framereg = ARM_SP_REGNUM;
2194 framesize = -regs[ARM_SP_REGNUM].k;
2195 }
2196
2197 cache->framereg = framereg;
2198 cache->framesize = framesize;
2199
2200 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
2201 if (stack.find_reg (gdbarch, regno, &offset))
2202 {
2203 cache->saved_regs[regno].set_addr (offset);
2204 if (regno == ARM_SP_REGNUM)
2205 arm_cache_set_active_sp_value(cache, tdep, offset);
2206 }
2207 }
2208
2209 arm_debug_printf ("Prologue scan stopped at %s",
2210 paddress (gdbarch, unrecognized_pc));
2211
2212 return unrecognized_pc;
2213 }
2214
2215 static void
2216 arm_scan_prologue (frame_info_ptr this_frame,
2217 struct arm_prologue_cache *cache)
2218 {
2219 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2220 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2221 CORE_ADDR prologue_start, prologue_end;
2222 CORE_ADDR prev_pc = get_frame_pc (this_frame);
2223 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
2224 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2225
2226 /* Assume there is no frame until proven otherwise. */
2227 cache->framereg = ARM_SP_REGNUM;
2228 cache->framesize = 0;
2229
2230 /* Check for Thumb prologue. */
2231 if (arm_frame_is_thumb (this_frame))
2232 {
2233 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
2234 return;
2235 }
2236
2237 /* Find the function prologue. If we can't find the function in
2238 the symbol table, peek in the stack frame to find the PC. */
2239 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
2240 &prologue_end))
2241 {
2242 /* One way to find the end of the prologue (which works well
2243 for unoptimized code) is to do the following:
2244
2245 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
2246
2247 if (sal.line == 0)
2248 prologue_end = prev_pc;
2249 else if (sal.end < prologue_end)
2250 prologue_end = sal.end;
2251
2252 This mechanism is very accurate so long as the optimizer
2253 doesn't move any instructions from the function body into the
2254 prologue. If this happens, sal.end will be the last
2255 instruction in the first hunk of prologue code just before
2256 the first instruction that the scheduler has moved from
2257 the body to the prologue.
2258
2259 In order to make sure that we scan all of the prologue
2260 instructions, we use a slightly less accurate mechanism which
2261 may scan more than necessary. To help compensate for this
2262 lack of accuracy, the prologue scanning loop below contains
2263 several clauses which'll cause the loop to terminate early if
2264 an implausible prologue instruction is encountered.
2265
2266 The expression
2267
2268 prologue_start + 64
2269
2270 is a suitable endpoint since it accounts for the largest
2271 possible prologue plus up to five instructions inserted by
2272 the scheduler. */
2273
2274 if (prologue_end > prologue_start + 64)
2275 {
2276 prologue_end = prologue_start + 64; /* See above. */
2277 }
2278 }
2279 else
2280 {
2281 /* We have no symbol information. Our only option is to assume this
2282 function has a standard stack frame and the normal frame register.
2283 Then, we can find the value of our frame pointer on entrance to
2284 the callee (or at the present moment if this is the innermost frame).
2285 The value stored there should be the address of the stmfd + 8. */
2286 CORE_ADDR frame_loc;
2287 ULONGEST return_value;
2288
2289 /* AAPCS does not use a frame register, so we can abort here. */
2290 if (tdep->arm_abi == ARM_ABI_AAPCS)
2291 return;
2292
2293 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2294 if (!safe_read_memory_unsigned_integer (frame_loc, 4, byte_order,
2295 &return_value))
2296 return;
2297 else
2298 {
2299 prologue_start = gdbarch_addr_bits_remove
2300 (gdbarch, return_value) - 8;
2301 prologue_end = prologue_start + 64; /* See above. */
2302 }
2303 }
2304
2305 if (prev_pc < prologue_end)
2306 prologue_end = prev_pc;
2307
2308 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache,
2309 target_arm_instruction_reader ());
2310 }
2311
2312 static struct arm_prologue_cache *
2313 arm_make_prologue_cache (frame_info_ptr this_frame)
2314 {
2315 int reg;
2316 struct arm_prologue_cache *cache;
2317 CORE_ADDR unwound_fp, prev_sp;
2318
2319 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2320 arm_cache_init (cache, this_frame);
2321
2322 arm_scan_prologue (this_frame, cache);
2323
2324 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2325 if (unwound_fp == 0)
2326 return cache;
2327
2328 arm_gdbarch_tdep *tdep =
2329 gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2330
2331 prev_sp = unwound_fp + cache->framesize;
2332 arm_cache_set_active_sp_value (cache, tdep, prev_sp);
2333
2334 /* Calculate actual addresses of saved registers using offsets
2335 determined by arm_scan_prologue. */
2336 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2337 if (cache->saved_regs[reg].is_addr ())
2338 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr () +
2339 prev_sp);
2340
2341 return cache;
2342 }
2343
2344 /* Implementation of the stop_reason hook for arm_prologue frames. */
2345
2346 static enum unwind_stop_reason
2347 arm_prologue_unwind_stop_reason (frame_info_ptr this_frame,
2348 void **this_cache)
2349 {
2350 struct arm_prologue_cache *cache;
2351 CORE_ADDR pc;
2352
2353 if (*this_cache == NULL)
2354 *this_cache = arm_make_prologue_cache (this_frame);
2355 cache = (struct arm_prologue_cache *) *this_cache;
2356
2357 /* This is meant to halt the backtrace at "_start". */
2358 pc = get_frame_pc (this_frame);
2359 gdbarch *arch = get_frame_arch (this_frame);
2360 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
2361 if (pc <= tdep->lowest_pc)
2362 return UNWIND_OUTERMOST;
2363
2364 /* If we've hit a wall, stop. */
2365 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
2366 return UNWIND_OUTERMOST;
2367
2368 return UNWIND_NO_REASON;
2369 }
2370
2371 /* Our frame ID for a normal frame is the current function's starting PC
2372 and the caller's SP when we were called. */
2373
2374 static void
2375 arm_prologue_this_id (frame_info_ptr this_frame,
2376 void **this_cache,
2377 struct frame_id *this_id)
2378 {
2379 struct arm_prologue_cache *cache;
2380 struct frame_id id;
2381 CORE_ADDR pc, func;
2382
2383 if (*this_cache == NULL)
2384 *this_cache = arm_make_prologue_cache (this_frame);
2385 cache = (struct arm_prologue_cache *) *this_cache;
2386
2387 arm_gdbarch_tdep *tdep
2388 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
2389
2390 /* Use function start address as part of the frame ID. If we cannot
2391 identify the start address (due to missing symbol information),
2392 fall back to just using the current PC. */
2393 pc = get_frame_pc (this_frame);
2394 func = get_frame_func (this_frame);
2395 if (!func)
2396 func = pc;
2397
2398 id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), func);
2399 *this_id = id;
2400 }
2401
2402 static struct value *
2403 arm_prologue_prev_register (frame_info_ptr this_frame,
2404 void **this_cache,
2405 int prev_regnum)
2406 {
2407 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2408 struct arm_prologue_cache *cache;
2409 CORE_ADDR sp_value;
2410
2411 if (*this_cache == NULL)
2412 *this_cache = arm_make_prologue_cache (this_frame);
2413 cache = (struct arm_prologue_cache *) *this_cache;
2414
2415 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
2416
2417 /* If this frame has signed the return address, mark it as so. */
2418 if (tdep->have_pacbti && cache->ra_signed_state.has_value ()
2419 && *cache->ra_signed_state)
2420 set_frame_previous_pc_masked (this_frame);
2421
2422 /* If we are asked to unwind the PC, then we need to return the LR
2423 instead. The prologue may save PC, but it will point into this
2424 frame's prologue, not the next frame's resume location. Also
2425 strip the saved T bit. A valid LR may have the low bit set, but
2426 a valid PC never does. */
2427 if (prev_regnum == ARM_PC_REGNUM)
2428 {
2429 CORE_ADDR lr;
2430
2431 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2432 return frame_unwind_got_constant (this_frame, prev_regnum,
2433 arm_addr_bits_remove (gdbarch, lr));
2434 }
2435
2436 /* SP is generally not saved to the stack, but this frame is
2437 identified by the next frame's stack pointer at the time of the call.
2438 The value was already reconstructed into PREV_SP. */
2439 if (prev_regnum == ARM_SP_REGNUM)
2440 return frame_unwind_got_constant (this_frame, prev_regnum,
2441 arm_cache_get_prev_sp_value (cache, tdep));
2442
2443 /* The value might be one of the alternative SP, if so, use the
2444 value already constructed. */
2445 if (arm_is_alternative_sp_register (tdep, prev_regnum))
2446 {
2447 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
2448 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
2449 }
2450
2451 /* The CPSR may have been changed by the call instruction and by the
2452 called function. The only bit we can reconstruct is the T bit,
2453 by checking the low bit of LR as of the call. This is a reliable
2454 indicator of Thumb-ness except for some ARM v4T pre-interworking
2455 Thumb code, which could get away with a clear low bit as long as
2456 the called function did not use bx. Guess that all other
2457 bits are unchanged; the condition flags are presumably lost,
2458 but the processor status is likely valid. */
2459 if (prev_regnum == ARM_PS_REGNUM)
2460 {
2461 ULONGEST cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2462 CORE_ADDR lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2463
2464 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
2465 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2466 }
2467
2468 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2469 prev_regnum);
2470 }
2471
2472 static frame_unwind arm_prologue_unwind = {
2473 "arm prologue",
2474 NORMAL_FRAME,
2475 arm_prologue_unwind_stop_reason,
2476 arm_prologue_this_id,
2477 arm_prologue_prev_register,
2478 NULL,
2479 default_frame_sniffer
2480 };
2481
2482 /* Maintain a list of ARM exception table entries per objfile, similar to the
2483 list of mapping symbols. We only cache entries for standard ARM-defined
2484 personality routines; the cache will contain only the frame unwinding
2485 instructions associated with the entry (not the descriptors). */
2486
2487 struct arm_exidx_entry
2488 {
2489 CORE_ADDR addr;
2490 gdb_byte *entry;
2491
2492 bool operator< (const arm_exidx_entry &other) const
2493 {
2494 return addr < other.addr;
2495 }
2496 };
2497
2498 struct arm_exidx_data
2499 {
2500 std::vector<std::vector<arm_exidx_entry>> section_maps;
2501 };
2502
2503 /* Per-BFD key to store exception handling information. */
2504 static const registry<bfd>::key<arm_exidx_data> arm_exidx_data_key;
2505
2506 static struct obj_section *
2507 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2508 {
2509 for (obj_section *osect : objfile->sections ())
2510 if (bfd_section_flags (osect->the_bfd_section) & SEC_ALLOC)
2511 {
2512 bfd_vma start, size;
2513 start = bfd_section_vma (osect->the_bfd_section);
2514 size = bfd_section_size (osect->the_bfd_section);
2515
2516 if (start <= vma && vma < start + size)
2517 return osect;
2518 }
2519
2520 return NULL;
2521 }
2522
2523 /* Parse contents of exception table and exception index sections
2524 of OBJFILE, and fill in the exception table entry cache.
2525
2526 For each entry that refers to a standard ARM-defined personality
2527 routine, extract the frame unwinding instructions (from either
2528 the index or the table section). The unwinding instructions
2529 are normalized by:
2530 - extracting them from the rest of the table data
2531 - converting to host endianness
2532 - appending the implicit 0xb0 ("Finish") code
2533
2534 The extracted and normalized instructions are stored for later
2535 retrieval by the arm_find_exidx_entry routine. */
2536
2537 static void
2538 arm_exidx_new_objfile (struct objfile *objfile)
2539 {
2540 struct arm_exidx_data *data;
2541 asection *exidx, *extab;
2542 bfd_vma exidx_vma = 0, extab_vma = 0;
2543 LONGEST i;
2544
2545 /* If we've already touched this file, do nothing. */
2546 if (arm_exidx_data_key.get (objfile->obfd.get ()) != nullptr)
2547 return;
2548
2549 /* Read contents of exception table and index. */
2550 exidx = bfd_get_section_by_name (objfile->obfd.get (),
2551 ELF_STRING_ARM_unwind);
2552 gdb::byte_vector exidx_data;
2553 if (exidx)
2554 {
2555 exidx_vma = bfd_section_vma (exidx);
2556 exidx_data.resize (bfd_section_size (exidx));
2557
2558 if (!bfd_get_section_contents (objfile->obfd.get (), exidx,
2559 exidx_data.data (), 0,
2560 exidx_data.size ()))
2561 return;
2562 }
2563
2564 extab = bfd_get_section_by_name (objfile->obfd.get (), ".ARM.extab");
2565 gdb::byte_vector extab_data;
2566 if (extab)
2567 {
2568 extab_vma = bfd_section_vma (extab);
2569 extab_data.resize (bfd_section_size (extab));
2570
2571 if (!bfd_get_section_contents (objfile->obfd.get (), extab,
2572 extab_data.data (), 0,
2573 extab_data.size ()))
2574 return;
2575 }
2576
2577 /* Allocate exception table data structure. */
2578 data = arm_exidx_data_key.emplace (objfile->obfd.get ());
2579 data->section_maps.resize (objfile->obfd->section_count);
2580
2581 /* Fill in exception table. */
2582 for (i = 0; i < exidx_data.size () / 8; i++)
2583 {
2584 struct arm_exidx_entry new_exidx_entry;
2585 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data.data () + i * 8);
2586 bfd_vma val = bfd_h_get_32 (objfile->obfd,
2587 exidx_data.data () + i * 8 + 4);
2588 bfd_vma addr = 0, word = 0;
2589 int n_bytes = 0, n_words = 0;
2590 struct obj_section *sec;
2591 gdb_byte *entry = NULL;
2592
2593 /* Extract address of start of function. */
2594 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2595 idx += exidx_vma + i * 8;
2596
2597 /* Find section containing function and compute section offset. */
2598 sec = arm_obj_section_from_vma (objfile, idx);
2599 if (sec == NULL)
2600 continue;
2601 idx -= bfd_section_vma (sec->the_bfd_section);
2602
2603 /* Determine address of exception table entry. */
2604 if (val == 1)
2605 {
2606 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2607 }
2608 else if ((val & 0xff000000) == 0x80000000)
2609 {
2610 /* Exception table entry embedded in .ARM.exidx
2611 -- must be short form. */
2612 word = val;
2613 n_bytes = 3;
2614 }
2615 else if (!(val & 0x80000000))
2616 {
2617 /* Exception table entry in .ARM.extab. */
2618 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2619 addr += exidx_vma + i * 8 + 4;
2620
2621 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_data.size ())
2622 {
2623 word = bfd_h_get_32 (objfile->obfd,
2624 extab_data.data () + addr - extab_vma);
2625 addr += 4;
2626
2627 if ((word & 0xff000000) == 0x80000000)
2628 {
2629 /* Short form. */
2630 n_bytes = 3;
2631 }
2632 else if ((word & 0xff000000) == 0x81000000
2633 || (word & 0xff000000) == 0x82000000)
2634 {
2635 /* Long form. */
2636 n_bytes = 2;
2637 n_words = ((word >> 16) & 0xff);
2638 }
2639 else if (!(word & 0x80000000))
2640 {
2641 bfd_vma pers;
2642 struct obj_section *pers_sec;
2643 int gnu_personality = 0;
2644
2645 /* Custom personality routine. */
2646 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2647 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2648
2649 /* Check whether we've got one of the variants of the
2650 GNU personality routines. */
2651 pers_sec = arm_obj_section_from_vma (objfile, pers);
2652 if (pers_sec)
2653 {
2654 static const char *personality[] =
2655 {
2656 "__gcc_personality_v0",
2657 "__gxx_personality_v0",
2658 "__gcj_personality_v0",
2659 "__gnu_objc_personality_v0",
2660 NULL
2661 };
2662
2663 CORE_ADDR pc = pers + pers_sec->offset ();
2664 int k;
2665
2666 for (k = 0; personality[k]; k++)
2667 if (lookup_minimal_symbol_by_pc_name
2668 (pc, personality[k], objfile))
2669 {
2670 gnu_personality = 1;
2671 break;
2672 }
2673 }
2674
2675 /* If so, the next word contains a word count in the high
2676 byte, followed by the same unwind instructions as the
2677 pre-defined forms. */
2678 if (gnu_personality
2679 && addr + 4 <= extab_vma + extab_data.size ())
2680 {
2681 word = bfd_h_get_32 (objfile->obfd,
2682 (extab_data.data ()
2683 + addr - extab_vma));
2684 addr += 4;
2685 n_bytes = 3;
2686 n_words = ((word >> 24) & 0xff);
2687 }
2688 }
2689 }
2690 }
2691
2692 /* Sanity check address. */
2693 if (n_words)
2694 if (addr < extab_vma
2695 || addr + 4 * n_words > extab_vma + extab_data.size ())
2696 n_words = n_bytes = 0;
2697
2698 /* The unwind instructions reside in WORD (only the N_BYTES least
2699 significant bytes are valid), followed by N_WORDS words in the
2700 extab section starting at ADDR. */
2701 if (n_bytes || n_words)
2702 {
2703 gdb_byte *p = entry
2704 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2705 n_bytes + n_words * 4 + 1);
2706
2707 while (n_bytes--)
2708 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2709
2710 while (n_words--)
2711 {
2712 word = bfd_h_get_32 (objfile->obfd,
2713 extab_data.data () + addr - extab_vma);
2714 addr += 4;
2715
2716 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2717 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2718 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2719 *p++ = (gdb_byte) (word & 0xff);
2720 }
2721
2722 /* Implied "Finish" to terminate the list. */
2723 *p++ = 0xb0;
2724 }
2725
2726 /* Push entry onto vector. They are guaranteed to always
2727 appear in order of increasing addresses. */
2728 new_exidx_entry.addr = idx;
2729 new_exidx_entry.entry = entry;
2730 data->section_maps[sec->the_bfd_section->index].push_back
2731 (new_exidx_entry);
2732 }
2733 }
2734
2735 /* Search for the exception table entry covering MEMADDR. If one is found,
2736 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2737 set *START to the start of the region covered by this entry. */
2738
2739 static gdb_byte *
2740 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2741 {
2742 struct obj_section *sec;
2743
2744 sec = find_pc_section (memaddr);
2745 if (sec != NULL)
2746 {
2747 struct arm_exidx_data *data;
2748 struct arm_exidx_entry map_key = { memaddr - sec->addr (), 0 };
2749
2750 data = arm_exidx_data_key.get (sec->objfile->obfd.get ());
2751 if (data != NULL)
2752 {
2753 std::vector<arm_exidx_entry> &map
2754 = data->section_maps[sec->the_bfd_section->index];
2755 if (!map.empty ())
2756 {
2757 auto idx = std::lower_bound (map.begin (), map.end (), map_key);
2758
2759 /* std::lower_bound finds the earliest ordered insertion
2760 point. If the following symbol starts at this exact
2761 address, we use that; otherwise, the preceding
2762 exception table entry covers this address. */
2763 if (idx < map.end ())
2764 {
2765 if (idx->addr == map_key.addr)
2766 {
2767 if (start)
2768 *start = idx->addr + sec->addr ();
2769 return idx->entry;
2770 }
2771 }
2772
2773 if (idx > map.begin ())
2774 {
2775 idx = idx - 1;
2776 if (start)
2777 *start = idx->addr + sec->addr ();
2778 return idx->entry;
2779 }
2780 }
2781 }
2782 }
2783
2784 return NULL;
2785 }
2786
2787 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2788 instruction list from the ARM exception table entry ENTRY, allocate and
2789 return a prologue cache structure describing how to unwind this frame.
2790
2791 Return NULL if the unwinding instruction list contains a "spare",
2792 "reserved" or "refuse to unwind" instruction as defined in section
2793 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2794 for the ARM Architecture" document. */
2795
2796 static struct arm_prologue_cache *
2797 arm_exidx_fill_cache (frame_info_ptr this_frame, gdb_byte *entry)
2798 {
2799 CORE_ADDR vsp = 0;
2800 int vsp_valid = 0;
2801
2802 struct arm_prologue_cache *cache;
2803 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2804 arm_cache_init (cache, this_frame);
2805
2806 for (;;)
2807 {
2808 gdb_byte insn;
2809
2810 /* Whenever we reload SP, we actually have to retrieve its
2811 actual value in the current frame. */
2812 if (!vsp_valid)
2813 {
2814 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
2815 {
2816 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
2817 vsp = get_frame_register_unsigned (this_frame, reg);
2818 }
2819 else
2820 {
2821 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr ();
2822 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2823 }
2824
2825 vsp_valid = 1;
2826 }
2827
2828 /* Decode next unwind instruction. */
2829 insn = *entry++;
2830
2831 if ((insn & 0xc0) == 0)
2832 {
2833 int offset = insn & 0x3f;
2834 vsp += (offset << 2) + 4;
2835 }
2836 else if ((insn & 0xc0) == 0x40)
2837 {
2838 int offset = insn & 0x3f;
2839 vsp -= (offset << 2) + 4;
2840 }
2841 else if ((insn & 0xf0) == 0x80)
2842 {
2843 int mask = ((insn & 0xf) << 8) | *entry++;
2844 int i;
2845
2846 /* The special case of an all-zero mask identifies
2847 "Refuse to unwind". We return NULL to fall back
2848 to the prologue analyzer. */
2849 if (mask == 0)
2850 return NULL;
2851
2852 /* Pop registers r4..r15 under mask. */
2853 for (i = 0; i < 12; i++)
2854 if (mask & (1 << i))
2855 {
2856 cache->saved_regs[4 + i].set_addr (vsp);
2857 vsp += 4;
2858 }
2859
2860 /* Special-case popping SP -- we need to reload vsp. */
2861 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2862 vsp_valid = 0;
2863 }
2864 else if ((insn & 0xf0) == 0x90)
2865 {
2866 int reg = insn & 0xf;
2867
2868 /* Reserved cases. */
2869 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2870 return NULL;
2871
2872 /* Set SP from another register and mark VSP for reload. */
2873 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2874 vsp_valid = 0;
2875 }
2876 else if ((insn & 0xf0) == 0xa0)
2877 {
2878 int count = insn & 0x7;
2879 int pop_lr = (insn & 0x8) != 0;
2880 int i;
2881
2882 /* Pop r4..r[4+count]. */
2883 for (i = 0; i <= count; i++)
2884 {
2885 cache->saved_regs[4 + i].set_addr (vsp);
2886 vsp += 4;
2887 }
2888
2889 /* If indicated by flag, pop LR as well. */
2890 if (pop_lr)
2891 {
2892 cache->saved_regs[ARM_LR_REGNUM].set_addr (vsp);
2893 vsp += 4;
2894 }
2895 }
2896 else if (insn == 0xb0)
2897 {
2898 /* We could only have updated PC by popping into it; if so, it
2899 will show up as address. Otherwise, copy LR into PC. */
2900 if (!cache->saved_regs[ARM_PC_REGNUM].is_addr ())
2901 cache->saved_regs[ARM_PC_REGNUM]
2902 = cache->saved_regs[ARM_LR_REGNUM];
2903
2904 /* We're done. */
2905 break;
2906 }
2907 else if (insn == 0xb1)
2908 {
2909 int mask = *entry++;
2910 int i;
2911
2912 /* All-zero mask and mask >= 16 is "spare". */
2913 if (mask == 0 || mask >= 16)
2914 return NULL;
2915
2916 /* Pop r0..r3 under mask. */
2917 for (i = 0; i < 4; i++)
2918 if (mask & (1 << i))
2919 {
2920 cache->saved_regs[i].set_addr (vsp);
2921 vsp += 4;
2922 }
2923 }
2924 else if (insn == 0xb2)
2925 {
2926 ULONGEST offset = 0;
2927 unsigned shift = 0;
2928
2929 do
2930 {
2931 offset |= (*entry & 0x7f) << shift;
2932 shift += 7;
2933 }
2934 while (*entry++ & 0x80);
2935
2936 vsp += 0x204 + (offset << 2);
2937 }
2938 else if (insn == 0xb3)
2939 {
2940 int start = *entry >> 4;
2941 int count = (*entry++) & 0xf;
2942 int i;
2943
2944 /* Only registers D0..D15 are valid here. */
2945 if (start + count >= 16)
2946 return NULL;
2947
2948 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2949 for (i = 0; i <= count; i++)
2950 {
2951 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
2952 vsp += 8;
2953 }
2954
2955 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2956 vsp += 4;
2957 }
2958 else if ((insn & 0xf8) == 0xb8)
2959 {
2960 int count = insn & 0x7;
2961 int i;
2962
2963 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2964 for (i = 0; i <= count; i++)
2965 {
2966 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
2967 vsp += 8;
2968 }
2969
2970 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2971 vsp += 4;
2972 }
2973 else if (insn == 0xc6)
2974 {
2975 int start = *entry >> 4;
2976 int count = (*entry++) & 0xf;
2977 int i;
2978
2979 /* Only registers WR0..WR15 are valid. */
2980 if (start + count >= 16)
2981 return NULL;
2982
2983 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2984 for (i = 0; i <= count; i++)
2985 {
2986 cache->saved_regs[ARM_WR0_REGNUM + start + i].set_addr (vsp);
2987 vsp += 8;
2988 }
2989 }
2990 else if (insn == 0xc7)
2991 {
2992 int mask = *entry++;
2993 int i;
2994
2995 /* All-zero mask and mask >= 16 is "spare". */
2996 if (mask == 0 || mask >= 16)
2997 return NULL;
2998
2999 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
3000 for (i = 0; i < 4; i++)
3001 if (mask & (1 << i))
3002 {
3003 cache->saved_regs[ARM_WCGR0_REGNUM + i].set_addr (vsp);
3004 vsp += 4;
3005 }
3006 }
3007 else if ((insn & 0xf8) == 0xc0)
3008 {
3009 int count = insn & 0x7;
3010 int i;
3011
3012 /* Pop iwmmx registers WR[10]..WR[10+count]. */
3013 for (i = 0; i <= count; i++)
3014 {
3015 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].set_addr (vsp);
3016 vsp += 8;
3017 }
3018 }
3019 else if (insn == 0xc8)
3020 {
3021 int start = *entry >> 4;
3022 int count = (*entry++) & 0xf;
3023 int i;
3024
3025 /* Only registers D0..D31 are valid. */
3026 if (start + count >= 16)
3027 return NULL;
3028
3029 /* Pop VFP double-precision registers
3030 D[16+start]..D[16+start+count]. */
3031 for (i = 0; i <= count; i++)
3032 {
3033 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].set_addr (vsp);
3034 vsp += 8;
3035 }
3036 }
3037 else if (insn == 0xc9)
3038 {
3039 int start = *entry >> 4;
3040 int count = (*entry++) & 0xf;
3041 int i;
3042
3043 /* Pop VFP double-precision registers D[start]..D[start+count]. */
3044 for (i = 0; i <= count; i++)
3045 {
3046 cache->saved_regs[ARM_D0_REGNUM + start + i].set_addr (vsp);
3047 vsp += 8;
3048 }
3049 }
3050 else if ((insn & 0xf8) == 0xd0)
3051 {
3052 int count = insn & 0x7;
3053 int i;
3054
3055 /* Pop VFP double-precision registers D[8]..D[8+count]. */
3056 for (i = 0; i <= count; i++)
3057 {
3058 cache->saved_regs[ARM_D0_REGNUM + 8 + i].set_addr (vsp);
3059 vsp += 8;
3060 }
3061 }
3062 else
3063 {
3064 /* Everything else is "spare". */
3065 return NULL;
3066 }
3067 }
3068
3069 /* If we restore SP from a register, assume this was the frame register.
3070 Otherwise just fall back to SP as frame register. */
3071 if (cache->saved_regs[ARM_SP_REGNUM].is_realreg ())
3072 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg ();
3073 else
3074 cache->framereg = ARM_SP_REGNUM;
3075
3076 /* Determine offset to previous frame. */
3077 cache->framesize
3078 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
3079
3080 /* We already got the previous SP. */
3081 arm_gdbarch_tdep *tdep
3082 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3083 arm_cache_set_active_sp_value (cache, tdep, vsp);
3084
3085 return cache;
3086 }
3087
3088 /* Unwinding via ARM exception table entries. Note that the sniffer
3089 already computes a filled-in prologue cache, which is then used
3090 with the same arm_prologue_this_id and arm_prologue_prev_register
3091 routines also used for prologue-parsing based unwinding. */
3092
3093 static int
3094 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
3095 frame_info_ptr this_frame,
3096 void **this_prologue_cache)
3097 {
3098 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3099 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3100 CORE_ADDR addr_in_block, exidx_region, func_start;
3101 struct arm_prologue_cache *cache;
3102 gdb_byte *entry;
3103
3104 /* See if we have an ARM exception table entry covering this address. */
3105 addr_in_block = get_frame_address_in_block (this_frame);
3106 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
3107 if (!entry)
3108 return 0;
3109
3110 /* The ARM exception table does not describe unwind information
3111 for arbitrary PC values, but is guaranteed to be correct only
3112 at call sites. We have to decide here whether we want to use
3113 ARM exception table information for this frame, or fall back
3114 to using prologue parsing. (Note that if we have DWARF CFI,
3115 this sniffer isn't even called -- CFI is always preferred.)
3116
3117 Before we make this decision, however, we check whether we
3118 actually have *symbol* information for the current frame.
3119 If not, prologue parsing would not work anyway, so we might
3120 as well use the exception table and hope for the best. */
3121 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
3122 {
3123 int exc_valid = 0;
3124
3125 /* If the next frame is "normal", we are at a call site in this
3126 frame, so exception information is guaranteed to be valid. */
3127 if (get_next_frame (this_frame)
3128 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
3129 exc_valid = 1;
3130
3131 /* Some syscalls keep PC pointing to the SVC instruction itself. */
3132 for (int shift = 0; shift <= 1 && !exc_valid; ++shift)
3133 {
3134 /* We also assume exception information is valid if we're currently
3135 blocked in a system call. The system library is supposed to
3136 ensure this, so that e.g. pthread cancellation works. */
3137 if (arm_frame_is_thumb (this_frame))
3138 {
3139 ULONGEST insn;
3140
3141 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3142 - (shift ? 2 : 0)),
3143 2, byte_order_for_code,
3144 &insn)
3145 && (insn & 0xff00) == 0xdf00 /* svc */)
3146 exc_valid = 1;
3147 }
3148 else
3149 {
3150 ULONGEST insn;
3151
3152 if (safe_read_memory_unsigned_integer ((get_frame_pc (this_frame)
3153 - (shift ? 4 : 0)),
3154 4, byte_order_for_code,
3155 &insn)
3156 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
3157 exc_valid = 1;
3158 }
3159 }
3160
3161 /* Bail out if we don't know that exception information is valid. */
3162 if (!exc_valid)
3163 return 0;
3164
3165 /* The ARM exception index does not mark the *end* of the region
3166 covered by the entry, and some functions will not have any entry.
3167 To correctly recognize the end of the covered region, the linker
3168 should have inserted dummy records with a CANTUNWIND marker.
3169
3170 Unfortunately, current versions of GNU ld do not reliably do
3171 this, and thus we may have found an incorrect entry above.
3172 As a (temporary) sanity check, we only use the entry if it
3173 lies *within* the bounds of the function. Note that this check
3174 might reject perfectly valid entries that just happen to cover
3175 multiple functions; therefore this check ought to be removed
3176 once the linker is fixed. */
3177 if (func_start > exidx_region)
3178 return 0;
3179 }
3180
3181 /* Decode the list of unwinding instructions into a prologue cache.
3182 Note that this may fail due to e.g. a "refuse to unwind" code. */
3183 cache = arm_exidx_fill_cache (this_frame, entry);
3184 if (!cache)
3185 return 0;
3186
3187 *this_prologue_cache = cache;
3188 return 1;
3189 }
3190
3191 struct frame_unwind arm_exidx_unwind = {
3192 "arm exidx",
3193 NORMAL_FRAME,
3194 default_frame_unwind_stop_reason,
3195 arm_prologue_this_id,
3196 arm_prologue_prev_register,
3197 NULL,
3198 arm_exidx_unwind_sniffer
3199 };
3200
3201 static struct arm_prologue_cache *
3202 arm_make_epilogue_frame_cache (frame_info_ptr this_frame)
3203 {
3204 struct arm_prologue_cache *cache;
3205 int reg;
3206
3207 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3208 arm_cache_init (cache, this_frame);
3209
3210 /* Still rely on the offset calculated from prologue. */
3211 arm_scan_prologue (this_frame, cache);
3212
3213 /* Since we are in epilogue, the SP has been restored. */
3214 arm_gdbarch_tdep *tdep
3215 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3216 arm_cache_set_active_sp_value (cache, tdep,
3217 get_frame_register_unsigned (this_frame,
3218 ARM_SP_REGNUM));
3219
3220 /* Calculate actual addresses of saved registers using offsets
3221 determined by arm_scan_prologue. */
3222 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
3223 if (cache->saved_regs[reg].is_addr ())
3224 cache->saved_regs[reg].set_addr (cache->saved_regs[reg].addr ()
3225 + arm_cache_get_prev_sp_value (cache, tdep));
3226
3227 return cache;
3228 }
3229
3230 /* Implementation of function hook 'this_id' in
3231 'struct frame_uwnind' for epilogue unwinder. */
3232
3233 static void
3234 arm_epilogue_frame_this_id (frame_info_ptr this_frame,
3235 void **this_cache,
3236 struct frame_id *this_id)
3237 {
3238 struct arm_prologue_cache *cache;
3239 CORE_ADDR pc, func;
3240
3241 if (*this_cache == NULL)
3242 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3243 cache = (struct arm_prologue_cache *) *this_cache;
3244
3245 /* Use function start address as part of the frame ID. If we cannot
3246 identify the start address (due to missing symbol information),
3247 fall back to just using the current PC. */
3248 pc = get_frame_pc (this_frame);
3249 func = get_frame_func (this_frame);
3250 if (func == 0)
3251 func = pc;
3252
3253 arm_gdbarch_tdep *tdep
3254 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3255 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep), pc);
3256 }
3257
3258 /* Implementation of function hook 'prev_register' in
3259 'struct frame_uwnind' for epilogue unwinder. */
3260
3261 static struct value *
3262 arm_epilogue_frame_prev_register (frame_info_ptr this_frame,
3263 void **this_cache, int regnum)
3264 {
3265 if (*this_cache == NULL)
3266 *this_cache = arm_make_epilogue_frame_cache (this_frame);
3267
3268 return arm_prologue_prev_register (this_frame, this_cache, regnum);
3269 }
3270
3271 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
3272 CORE_ADDR pc);
3273 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
3274 CORE_ADDR pc);
3275
3276 /* Implementation of function hook 'sniffer' in
3277 'struct frame_uwnind' for epilogue unwinder. */
3278
3279 static int
3280 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
3281 frame_info_ptr this_frame,
3282 void **this_prologue_cache)
3283 {
3284 if (frame_relative_level (this_frame) == 0)
3285 {
3286 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3287 CORE_ADDR pc = get_frame_pc (this_frame);
3288
3289 if (arm_frame_is_thumb (this_frame))
3290 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3291 else
3292 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3293 }
3294 else
3295 return 0;
3296 }
3297
3298 /* Frame unwinder from epilogue. */
3299
3300 static const struct frame_unwind arm_epilogue_frame_unwind =
3301 {
3302 "arm epilogue",
3303 NORMAL_FRAME,
3304 default_frame_unwind_stop_reason,
3305 arm_epilogue_frame_this_id,
3306 arm_epilogue_frame_prev_register,
3307 NULL,
3308 arm_epilogue_frame_sniffer,
3309 };
3310
3311 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
3312 trampoline, return the target PC. Otherwise return 0.
3313
3314 void call0a (char c, short s, int i, long l) {}
3315
3316 int main (void)
3317 {
3318 (*pointer_to_call0a) (c, s, i, l);
3319 }
3320
3321 Instead of calling a stub library function _call_via_xx (xx is
3322 the register name), GCC may inline the trampoline in the object
3323 file as below (register r2 has the address of call0a).
3324
3325 .global main
3326 .type main, %function
3327 ...
3328 bl .L1
3329 ...
3330 .size main, .-main
3331
3332 .L1:
3333 bx r2
3334
3335 The trampoline 'bx r2' doesn't belong to main. */
3336
3337 static CORE_ADDR
3338 arm_skip_bx_reg (frame_info_ptr frame, CORE_ADDR pc)
3339 {
3340 /* The heuristics of recognizing such trampoline is that FRAME is
3341 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
3342 if (arm_frame_is_thumb (frame))
3343 {
3344 gdb_byte buf[2];
3345
3346 if (target_read_memory (pc, buf, 2) == 0)
3347 {
3348 struct gdbarch *gdbarch = get_frame_arch (frame);
3349 enum bfd_endian byte_order_for_code
3350 = gdbarch_byte_order_for_code (gdbarch);
3351 uint16_t insn
3352 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3353
3354 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3355 {
3356 CORE_ADDR dest
3357 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
3358
3359 /* Clear the LSB so that gdb core sets step-resume
3360 breakpoint at the right address. */
3361 return UNMAKE_THUMB_ADDR (dest);
3362 }
3363 }
3364 }
3365
3366 return 0;
3367 }
3368
3369 static struct arm_prologue_cache *
3370 arm_make_stub_cache (frame_info_ptr this_frame)
3371 {
3372 struct arm_prologue_cache *cache;
3373
3374 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3375 arm_cache_init (cache, this_frame);
3376
3377 arm_gdbarch_tdep *tdep
3378 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3379 arm_cache_set_active_sp_value (cache, tdep,
3380 get_frame_register_unsigned (this_frame,
3381 ARM_SP_REGNUM));
3382
3383 return cache;
3384 }
3385
3386 /* Our frame ID for a stub frame is the current SP and LR. */
3387
3388 static void
3389 arm_stub_this_id (frame_info_ptr this_frame,
3390 void **this_cache,
3391 struct frame_id *this_id)
3392 {
3393 struct arm_prologue_cache *cache;
3394
3395 if (*this_cache == NULL)
3396 *this_cache = arm_make_stub_cache (this_frame);
3397 cache = (struct arm_prologue_cache *) *this_cache;
3398
3399 arm_gdbarch_tdep *tdep
3400 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3401 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3402 get_frame_pc (this_frame));
3403 }
3404
3405 static int
3406 arm_stub_unwind_sniffer (const struct frame_unwind *self,
3407 frame_info_ptr this_frame,
3408 void **this_prologue_cache)
3409 {
3410 CORE_ADDR addr_in_block;
3411 gdb_byte dummy[4];
3412 CORE_ADDR pc, start_addr;
3413 const char *name;
3414
3415 addr_in_block = get_frame_address_in_block (this_frame);
3416 pc = get_frame_pc (this_frame);
3417 if (in_plt_section (addr_in_block)
3418 /* We also use the stub winder if the target memory is unreadable
3419 to avoid having the prologue unwinder trying to read it. */
3420 || target_read_memory (pc, dummy, 4) != 0)
3421 return 1;
3422
3423 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
3424 && arm_skip_bx_reg (this_frame, pc) != 0)
3425 return 1;
3426
3427 return 0;
3428 }
3429
3430 struct frame_unwind arm_stub_unwind = {
3431 "arm stub",
3432 NORMAL_FRAME,
3433 default_frame_unwind_stop_reason,
3434 arm_stub_this_id,
3435 arm_prologue_prev_register,
3436 NULL,
3437 arm_stub_unwind_sniffer
3438 };
3439
3440 /* Put here the code to store, into CACHE->saved_regs, the addresses
3441 of the saved registers of frame described by THIS_FRAME. CACHE is
3442 returned. */
3443
3444 static struct arm_prologue_cache *
3445 arm_m_exception_cache (frame_info_ptr this_frame)
3446 {
3447 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3448 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
3449 struct arm_prologue_cache *cache;
3450
3451 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3452 arm_cache_init (cache, this_frame);
3453
3454 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
3455 describes which bits in LR that define which stack was used prior
3456 to the exception and if FPU is used (causing extended stack frame). */
3457
3458 /* In the lockup state PC contains a lockup magic value.
3459 The PC value of the the next outer frame is irreversibly
3460 lost. The other registers are intact so LR likely contains
3461 PC of some frame next to the outer one, but we cannot analyze
3462 the next outer frame without knowing its PC
3463 therefore we do not know SP fixup for this frame.
3464 Some heuristics to resynchronize SP might be possible.
3465 For simplicity, just terminate the unwinding to prevent it going
3466 astray and attempting to read data/addresses it shouldn't,
3467 which may cause further issues due to side-effects. */
3468 CORE_ADDR pc = get_frame_pc (this_frame);
3469 if (arm_m_addr_is_lockup (pc))
3470 {
3471 /* The lockup can be real just in the innermost frame
3472 as the CPU is stopped and cannot create more frames.
3473 If we hit lockup magic PC in the other frame, it is
3474 just a sentinel at the top of stack: do not warn then. */
3475 if (frame_relative_level (this_frame) == 0)
3476 warning (_("ARM M in lockup state, stack unwinding terminated."));
3477
3478 /* Terminate any further stack unwinding. */
3479 arm_cache_set_active_sp_value (cache, tdep, 0);
3480 return cache;
3481 }
3482
3483 CORE_ADDR lr = get_frame_register_unsigned (this_frame, ARM_LR_REGNUM);
3484
3485 /* ARMv7-M Architecture Reference "A2.3.1 Arm core registers"
3486 states that LR is set to 0xffffffff on reset. ARMv8-M Architecture
3487 Reference "B3.3 Registers" states that LR is set to 0xffffffff on warm
3488 reset if Main Extension is implemented, otherwise the value is unknown. */
3489 if (lr == 0xffffffff)
3490 {
3491 /* Terminate any further stack unwinding. */
3492 arm_cache_set_active_sp_value (cache, tdep, 0);
3493 return cache;
3494 }
3495
3496 /* Check FNC_RETURN indicator bits (24-31). */
3497 bool fnc_return = (((lr >> 24) & 0xff) == 0xfe);
3498 if (fnc_return)
3499 {
3500 /* FNC_RETURN is only valid for targets with Security Extension. */
3501 if (!tdep->have_sec_ext)
3502 {
3503 error (_("While unwinding an exception frame, found unexpected Link "
3504 "Register value %s that requires the security extension, "
3505 "but the extension was not found or is disabled. This "
3506 "should not happen and may be caused by corrupt data or a "
3507 "bug in GDB."), phex (lr, ARM_INT_REGISTER_SIZE));
3508 }
3509
3510 if (!arm_unwind_secure_frames)
3511 {
3512 warning (_("Non-secure to secure stack unwinding disabled."));
3513
3514 /* Terminate any further stack unwinding. */
3515 arm_cache_set_active_sp_value (cache, tdep, 0);
3516 return cache;
3517 }
3518
3519 ULONGEST xpsr = get_frame_register_unsigned (this_frame, ARM_PS_REGNUM);
3520 if ((xpsr & 0x1ff) != 0)
3521 /* Handler mode: This is the mode that exceptions are handled in. */
3522 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_msp_s_regnum);
3523 else
3524 /* Thread mode: This is the normal mode that programs run in. */
3525 arm_cache_switch_prev_sp (cache, tdep, tdep->m_profile_psp_s_regnum);
3526
3527 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3528
3529 /* Stack layout for a function call from Secure to Non-Secure state
3530 (ARMv8-M section B3.16):
3531
3532 SP Offset
3533
3534 +-------------------+
3535 0x08 | |
3536 +-------------------+ <-- Original SP
3537 0x04 | Partial xPSR |
3538 +-------------------+
3539 0x00 | Return Address |
3540 +===================+ <-- New SP */
3541
3542 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + 0x00);
3543 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + 0x00);
3544 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + 0x04);
3545
3546 arm_cache_set_active_sp_value (cache, tdep, unwound_sp + 0x08);
3547
3548 return cache;
3549 }
3550
3551 /* Check EXC_RETURN indicator bits (24-31). */
3552 bool exc_return = (((lr >> 24) & 0xff) == 0xff);
3553 if (exc_return)
3554 {
3555 int sp_regnum;
3556 bool secure_stack_used = false;
3557 bool default_callee_register_stacking = false;
3558 bool exception_domain_is_secure = false;
3559 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3560
3561 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
3562 bool process_stack_used = (bit (lr, 2) != 0);
3563
3564 if (tdep->have_sec_ext)
3565 {
3566 secure_stack_used = (bit (lr, 6) != 0);
3567 default_callee_register_stacking = (bit (lr, 5) != 0);
3568 exception_domain_is_secure = (bit (lr, 0) != 0);
3569
3570 /* Unwinding from non-secure to secure can trip security
3571 measures. In order to avoid the debugger being
3572 intrusive, rely on the user to configure the requested
3573 mode. */
3574 if (secure_stack_used && !exception_domain_is_secure
3575 && !arm_unwind_secure_frames)
3576 {
3577 warning (_("Non-secure to secure stack unwinding disabled."));
3578
3579 /* Terminate any further stack unwinding. */
3580 arm_cache_set_active_sp_value (cache, tdep, 0);
3581 return cache;
3582 }
3583
3584 if (process_stack_used)
3585 {
3586 if (secure_stack_used)
3587 /* Secure thread (process) stack used, use PSP_S as SP. */
3588 sp_regnum = tdep->m_profile_psp_s_regnum;
3589 else
3590 /* Non-secure thread (process) stack used, use PSP_NS as SP. */
3591 sp_regnum = tdep->m_profile_psp_ns_regnum;
3592 }
3593 else
3594 {
3595 if (secure_stack_used)
3596 /* Secure main stack used, use MSP_S as SP. */
3597 sp_regnum = tdep->m_profile_msp_s_regnum;
3598 else
3599 /* Non-secure main stack used, use MSP_NS as SP. */
3600 sp_regnum = tdep->m_profile_msp_ns_regnum;
3601 }
3602 }
3603 else
3604 {
3605 if (process_stack_used)
3606 /* Thread (process) stack used, use PSP as SP. */
3607 sp_regnum = tdep->m_profile_psp_regnum;
3608 else
3609 /* Main stack used, use MSP as SP. */
3610 sp_regnum = tdep->m_profile_msp_regnum;
3611 }
3612
3613 /* Set the active SP regnum. */
3614 arm_cache_switch_prev_sp (cache, tdep, sp_regnum);
3615
3616 /* Fetch the SP to use for this frame. */
3617 CORE_ADDR unwound_sp = arm_cache_get_prev_sp_value (cache, tdep);
3618
3619 /* Exception entry context stacking are described in ARMv8-M (section
3620 B3.19) and ARMv7-M (sections B1.5.6 and B1.5.7) Architecture Reference
3621 Manuals.
3622
3623 The following figure shows the structure of the stack frame when
3624 Security and Floating-point extensions are present.
3625
3626 SP Offsets
3627 Without With
3628 Callee Regs Callee Regs
3629 (Secure -> Non-Secure)
3630 +-------------------+
3631 0xA8 | | 0xD0
3632 +===================+ --+ <-- Original SP
3633 0xA4 | S31 | 0xCC |
3634 +-------------------+ |
3635 ... | Additional FP context
3636 +-------------------+ |
3637 0x68 | S16 | 0x90 |
3638 +===================+ --+
3639 0x64 | Reserved | 0x8C |
3640 +-------------------+ |
3641 0x60 | FPSCR | 0x88 |
3642 +-------------------+ |
3643 0x5C | S15 | 0x84 | FP context
3644 +-------------------+ |
3645 ... |
3646 +-------------------+ |
3647 0x20 | S0 | 0x48 |
3648 +===================+ --+
3649 0x1C | xPSR | 0x44 |
3650 +-------------------+ |
3651 0x18 | Return address | 0x40 |
3652 +-------------------+ |
3653 0x14 | LR(R14) | 0x3C |
3654 +-------------------+ |
3655 0x10 | R12 | 0x38 | State context
3656 +-------------------+ |
3657 0x0C | R3 | 0x34 |
3658 +-------------------+ |
3659 ... |
3660 +-------------------+ |
3661 0x00 | R0 | 0x28 |
3662 +===================+ --+
3663 | R11 | 0x24 |
3664 +-------------------+ |
3665 ... |
3666 +-------------------+ | Additional state
3667 | R4 | 0x08 | context when
3668 +-------------------+ | transitioning from
3669 | Reserved | 0x04 | Secure to Non-Secure
3670 +-------------------+ |
3671 | Magic signature | 0x00 |
3672 +===================+ --+ <-- New SP */
3673
3674 uint32_t sp_r0_offset = 0;
3675
3676 /* With the Security extension, the hardware saves R4..R11 too. */
3677 if (tdep->have_sec_ext && secure_stack_used
3678 && (!default_callee_register_stacking || !exception_domain_is_secure))
3679 {
3680 /* Read R4..R11 from the integer callee registers. */
3681 cache->saved_regs[4].set_addr (unwound_sp + 0x08);
3682 cache->saved_regs[5].set_addr (unwound_sp + 0x0C);
3683 cache->saved_regs[6].set_addr (unwound_sp + 0x10);
3684 cache->saved_regs[7].set_addr (unwound_sp + 0x14);
3685 cache->saved_regs[8].set_addr (unwound_sp + 0x18);
3686 cache->saved_regs[9].set_addr (unwound_sp + 0x1C);
3687 cache->saved_regs[10].set_addr (unwound_sp + 0x20);
3688 cache->saved_regs[11].set_addr (unwound_sp + 0x24);
3689 sp_r0_offset = 0x28;
3690 }
3691
3692 /* The hardware saves eight 32-bit words, comprising xPSR,
3693 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3694 "B1.5.6 Exception entry behavior" in
3695 "ARMv7-M Architecture Reference Manual". */
3696 cache->saved_regs[0].set_addr (unwound_sp + sp_r0_offset);
3697 cache->saved_regs[1].set_addr (unwound_sp + sp_r0_offset + 0x04);
3698 cache->saved_regs[2].set_addr (unwound_sp + sp_r0_offset + 0x08);
3699 cache->saved_regs[3].set_addr (unwound_sp + sp_r0_offset + 0x0C);
3700 cache->saved_regs[ARM_IP_REGNUM].set_addr (unwound_sp + sp_r0_offset
3701 + 0x10);
3702 cache->saved_regs[ARM_LR_REGNUM].set_addr (unwound_sp + sp_r0_offset
3703 + 0x14);
3704 cache->saved_regs[ARM_PC_REGNUM].set_addr (unwound_sp + sp_r0_offset
3705 + 0x18);
3706 cache->saved_regs[ARM_PS_REGNUM].set_addr (unwound_sp + sp_r0_offset
3707 + 0x1C);
3708
3709 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3710 type used. */
3711 bool extended_frame_used = (bit (lr, 4) == 0);
3712 if (extended_frame_used)
3713 {
3714 ULONGEST fpccr;
3715 ULONGEST fpcar;
3716
3717 /* Read FPCCR register. */
3718 if (!safe_read_memory_unsigned_integer (FPCCR, ARM_INT_REGISTER_SIZE,
3719 byte_order, &fpccr))
3720 {
3721 warning (_("Could not fetch required FPCCR content. Further "
3722 "unwinding is impossible."));
3723 arm_cache_set_active_sp_value (cache, tdep, 0);
3724 return cache;
3725 }
3726
3727 /* Read FPCAR register. */
3728 if (!safe_read_memory_unsigned_integer (FPCAR, ARM_INT_REGISTER_SIZE,
3729 byte_order, &fpcar))
3730 {
3731 warning (_("Could not fetch FPCAR content. Further unwinding of "
3732 "FP register values will be unreliable."));
3733 fpcar = 0;
3734 }
3735
3736 bool fpccr_aspen = bit (fpccr, 31);
3737 bool fpccr_lspen = bit (fpccr, 30);
3738 bool fpccr_ts = bit (fpccr, 26);
3739 bool fpccr_lspact = bit (fpccr, 0);
3740
3741 /* The LSPEN and ASPEN bits indicate if the lazy state preservation
3742 for FP registers is enabled or disabled. The LSPACT bit indicate,
3743 together with FPCAR, if the lazy state preservation feature is
3744 active for the current frame or for another frame.
3745 See "Lazy context save of FP state", in B1.5.7, also ARM AN298,
3746 supported by Cortex-M4F architecture for details. */
3747 bool fpcar_points_to_this_frame = ((unwound_sp + sp_r0_offset + 0x20)
3748 == (fpcar & ~0x7));
3749 bool read_fp_regs_from_stack = (!(fpccr_aspen && fpccr_lspen
3750 && fpccr_lspact
3751 && fpcar_points_to_this_frame));
3752
3753 /* Extended stack frame type used. */
3754 if (read_fp_regs_from_stack)
3755 {
3756 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x20;
3757 for (int i = 0; i < 8; i++)
3758 {
3759 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3760 addr += 8;
3761 }
3762 }
3763 cache->saved_regs[ARM_FPSCR_REGNUM].set_addr (unwound_sp
3764 + sp_r0_offset + 0x60);
3765
3766 if (tdep->have_sec_ext && !default_callee_register_stacking
3767 && fpccr_ts)
3768 {
3769 /* Handle floating-point callee saved registers. */
3770 if (read_fp_regs_from_stack)
3771 {
3772 CORE_ADDR addr = unwound_sp + sp_r0_offset + 0x68;
3773 for (int i = 8; i < 16; i++)
3774 {
3775 cache->saved_regs[ARM_D0_REGNUM + i].set_addr (addr);
3776 addr += 8;
3777 }
3778 }
3779
3780 arm_cache_set_active_sp_value (cache, tdep,
3781 unwound_sp + sp_r0_offset + 0xA8);
3782 }
3783 else
3784 {
3785 /* Offset 0x64 is reserved. */
3786 arm_cache_set_active_sp_value (cache, tdep,
3787 unwound_sp + sp_r0_offset + 0x68);
3788 }
3789 }
3790 else
3791 {
3792 /* Standard stack frame type used. */
3793 arm_cache_set_active_sp_value (cache, tdep,
3794 unwound_sp + sp_r0_offset + 0x20);
3795 }
3796
3797 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3798 aligner between the top of the 32-byte stack frame and the
3799 previous context's stack pointer. */
3800 ULONGEST xpsr;
3801 if (!safe_read_memory_unsigned_integer (cache->saved_regs[ARM_PS_REGNUM]
3802 .addr (), ARM_INT_REGISTER_SIZE,
3803 byte_order, &xpsr))
3804 {
3805 warning (_("Could not fetch required XPSR content. Further "
3806 "unwinding is impossible."));
3807 arm_cache_set_active_sp_value (cache, tdep, 0);
3808 return cache;
3809 }
3810
3811 if (bit (xpsr, 9) != 0)
3812 {
3813 CORE_ADDR new_sp = arm_cache_get_prev_sp_value (cache, tdep) + 4;
3814 arm_cache_set_active_sp_value (cache, tdep, new_sp);
3815 }
3816
3817 return cache;
3818 }
3819
3820 internal_error (_("While unwinding an exception frame, "
3821 "found unexpected Link Register value "
3822 "%s. This should not happen and may "
3823 "be caused by corrupt data or a bug in"
3824 " GDB."),
3825 phex (lr, ARM_INT_REGISTER_SIZE));
3826 }
3827
3828 /* Implementation of the stop_reason hook for arm_m_exception frames. */
3829
3830 static enum unwind_stop_reason
3831 arm_m_exception_frame_unwind_stop_reason (frame_info_ptr this_frame,
3832 void **this_cache)
3833 {
3834 struct arm_prologue_cache *cache;
3835 arm_gdbarch_tdep *tdep
3836 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3837
3838 if (*this_cache == NULL)
3839 *this_cache = arm_m_exception_cache (this_frame);
3840 cache = (struct arm_prologue_cache *) *this_cache;
3841
3842 /* If we've hit a wall, stop. */
3843 if (arm_cache_get_prev_sp_value (cache, tdep) == 0)
3844 return UNWIND_OUTERMOST;
3845
3846 return UNWIND_NO_REASON;
3847 }
3848
3849 /* Implementation of function hook 'this_id' in
3850 'struct frame_uwnind'. */
3851
3852 static void
3853 arm_m_exception_this_id (frame_info_ptr this_frame,
3854 void **this_cache,
3855 struct frame_id *this_id)
3856 {
3857 struct arm_prologue_cache *cache;
3858
3859 if (*this_cache == NULL)
3860 *this_cache = arm_m_exception_cache (this_frame);
3861 cache = (struct arm_prologue_cache *) *this_cache;
3862
3863 /* Our frame ID for a stub frame is the current SP and LR. */
3864 arm_gdbarch_tdep *tdep
3865 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3866 *this_id = frame_id_build (arm_cache_get_prev_sp_value (cache, tdep),
3867 get_frame_pc (this_frame));
3868 }
3869
3870 /* Implementation of function hook 'prev_register' in
3871 'struct frame_uwnind'. */
3872
3873 static struct value *
3874 arm_m_exception_prev_register (frame_info_ptr this_frame,
3875 void **this_cache,
3876 int prev_regnum)
3877 {
3878 struct arm_prologue_cache *cache;
3879 CORE_ADDR sp_value;
3880
3881 if (*this_cache == NULL)
3882 *this_cache = arm_m_exception_cache (this_frame);
3883 cache = (struct arm_prologue_cache *) *this_cache;
3884
3885 /* The value was already reconstructed into PREV_SP. */
3886 arm_gdbarch_tdep *tdep
3887 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3888 if (prev_regnum == ARM_SP_REGNUM)
3889 return frame_unwind_got_constant (this_frame, prev_regnum,
3890 arm_cache_get_prev_sp_value (cache, tdep));
3891
3892 /* If we are asked to unwind the PC, strip the saved T bit. */
3893 if (prev_regnum == ARM_PC_REGNUM)
3894 {
3895 struct value *value = trad_frame_get_prev_register (this_frame,
3896 cache->saved_regs,
3897 prev_regnum);
3898 CORE_ADDR pc = value_as_address (value);
3899 return frame_unwind_got_constant (this_frame, prev_regnum,
3900 UNMAKE_THUMB_ADDR (pc));
3901 }
3902
3903 /* The value might be one of the alternative SP, if so, use the
3904 value already constructed. */
3905 if (arm_is_alternative_sp_register (tdep, prev_regnum))
3906 {
3907 sp_value = arm_cache_get_sp_register (cache, tdep, prev_regnum);
3908 return frame_unwind_got_constant (this_frame, prev_regnum, sp_value);
3909 }
3910
3911 /* If we are asked to unwind the xPSR, set T bit if PC is in thumb mode.
3912 LR register is unreliable as it contains FNC_RETURN or EXC_RETURN
3913 pattern. */
3914 if (prev_regnum == ARM_PS_REGNUM)
3915 {
3916 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3917 struct value *value = trad_frame_get_prev_register (this_frame,
3918 cache->saved_regs,
3919 ARM_PC_REGNUM);
3920 CORE_ADDR pc = value_as_address (value);
3921 value = trad_frame_get_prev_register (this_frame, cache->saved_regs,
3922 ARM_PS_REGNUM);
3923 ULONGEST xpsr = value_as_long (value);
3924
3925 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3926 xpsr = reconstruct_t_bit (gdbarch, pc, xpsr);
3927 return frame_unwind_got_constant (this_frame, ARM_PS_REGNUM, xpsr);
3928 }
3929
3930 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3931 prev_regnum);
3932 }
3933
3934 /* Implementation of function hook 'sniffer' in
3935 'struct frame_uwnind'. */
3936
3937 static int
3938 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3939 frame_info_ptr this_frame,
3940 void **this_prologue_cache)
3941 {
3942 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3943 CORE_ADDR this_pc = get_frame_pc (this_frame);
3944
3945 /* No need to check is_m; this sniffer is only registered for
3946 M-profile architectures. */
3947
3948 /* Check if exception frame returns to a magic PC value. */
3949 return arm_m_addr_is_magic (gdbarch, this_pc);
3950 }
3951
3952 /* Frame unwinder for M-profile exceptions (EXC_RETURN on stack),
3953 lockup and secure/nonsecure interstate function calls (FNC_RETURN). */
3954
3955 struct frame_unwind arm_m_exception_unwind =
3956 {
3957 "arm m exception lockup sec_fnc",
3958 SIGTRAMP_FRAME,
3959 arm_m_exception_frame_unwind_stop_reason,
3960 arm_m_exception_this_id,
3961 arm_m_exception_prev_register,
3962 NULL,
3963 arm_m_exception_unwind_sniffer
3964 };
3965
3966 static CORE_ADDR
3967 arm_normal_frame_base (frame_info_ptr this_frame, void **this_cache)
3968 {
3969 struct arm_prologue_cache *cache;
3970
3971 if (*this_cache == NULL)
3972 *this_cache = arm_make_prologue_cache (this_frame);
3973 cache = (struct arm_prologue_cache *) *this_cache;
3974
3975 arm_gdbarch_tdep *tdep
3976 = gdbarch_tdep<arm_gdbarch_tdep> (get_frame_arch (this_frame));
3977 return arm_cache_get_prev_sp_value (cache, tdep) - cache->framesize;
3978 }
3979
3980 struct frame_base arm_normal_base = {
3981 &arm_prologue_unwind,
3982 arm_normal_frame_base,
3983 arm_normal_frame_base,
3984 arm_normal_frame_base
3985 };
3986
3987 struct arm_dwarf2_prev_register_cache
3988 {
3989 /* Cached value of the corresponding stack pointer for the inner frame. */
3990 CORE_ADDR sp;
3991 CORE_ADDR msp;
3992 CORE_ADDR msp_s;
3993 CORE_ADDR msp_ns;
3994 CORE_ADDR psp;
3995 CORE_ADDR psp_s;
3996 CORE_ADDR psp_ns;
3997 };
3998
3999 static struct value *
4000 arm_dwarf2_prev_register (frame_info_ptr this_frame, void **this_cache,
4001 int regnum)
4002 {
4003 struct gdbarch * gdbarch = get_frame_arch (this_frame);
4004 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4005 CORE_ADDR lr;
4006 ULONGEST cpsr;
4007 arm_dwarf2_prev_register_cache *cache
4008 = ((arm_dwarf2_prev_register_cache *)
4009 dwarf2_frame_get_fn_data (this_frame, this_cache,
4010 arm_dwarf2_prev_register));
4011
4012 if (!cache)
4013 {
4014 const unsigned int size = sizeof (struct arm_dwarf2_prev_register_cache);
4015 cache = ((arm_dwarf2_prev_register_cache *)
4016 dwarf2_frame_allocate_fn_data (this_frame, this_cache,
4017 arm_dwarf2_prev_register, size));
4018
4019 if (tdep->have_sec_ext)
4020 {
4021 cache->sp
4022 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4023
4024 cache->msp_s
4025 = get_frame_register_unsigned (this_frame,
4026 tdep->m_profile_msp_s_regnum);
4027 cache->msp_ns
4028 = get_frame_register_unsigned (this_frame,
4029 tdep->m_profile_msp_ns_regnum);
4030 cache->psp_s
4031 = get_frame_register_unsigned (this_frame,
4032 tdep->m_profile_psp_s_regnum);
4033 cache->psp_ns
4034 = get_frame_register_unsigned (this_frame,
4035 tdep->m_profile_psp_ns_regnum);
4036 }
4037 else if (tdep->is_m)
4038 {
4039 cache->sp
4040 = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
4041
4042 cache->msp
4043 = get_frame_register_unsigned (this_frame,
4044 tdep->m_profile_msp_regnum);
4045 cache->psp
4046 = get_frame_register_unsigned (this_frame,
4047 tdep->m_profile_psp_regnum);
4048 }
4049 }
4050
4051 if (regnum == ARM_PC_REGNUM)
4052 {
4053 /* The PC is normally copied from the return column, which
4054 describes saves of LR. However, that version may have an
4055 extra bit set to indicate Thumb state. The bit is not
4056 part of the PC. */
4057
4058 /* Record in the frame whether the return address was signed. */
4059 if (tdep->have_pacbti)
4060 {
4061 CORE_ADDR ra_auth_code
4062 = frame_unwind_register_unsigned (this_frame,
4063 tdep->pacbti_pseudo_base);
4064
4065 if (ra_auth_code != 0)
4066 set_frame_previous_pc_masked (this_frame);
4067 }
4068
4069 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4070 return frame_unwind_got_constant (this_frame, regnum,
4071 arm_addr_bits_remove (gdbarch, lr));
4072 }
4073 else if (regnum == ARM_PS_REGNUM)
4074 {
4075 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
4076 cpsr = get_frame_register_unsigned (this_frame, regnum);
4077 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
4078 cpsr = reconstruct_t_bit (gdbarch, lr, cpsr);
4079 return frame_unwind_got_constant (this_frame, regnum, cpsr);
4080 }
4081 else if (arm_is_alternative_sp_register (tdep, regnum))
4082 {
4083 /* Handle the alternative SP registers on Cortex-M. */
4084 bool override_with_sp_value = false;
4085 CORE_ADDR val;
4086
4087 if (tdep->have_sec_ext)
4088 {
4089 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4090 && (cache->msp_s == cache->sp || cache->msp_ns == cache->sp);
4091 bool is_msp_s = (regnum == tdep->m_profile_msp_s_regnum)
4092 && (cache->msp_s == cache->sp);
4093 bool is_msp_ns = (regnum == tdep->m_profile_msp_ns_regnum)
4094 && (cache->msp_ns == cache->sp);
4095 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4096 && (cache->psp_s == cache->sp || cache->psp_ns == cache->sp);
4097 bool is_psp_s = (regnum == tdep->m_profile_psp_s_regnum)
4098 && (cache->psp_s == cache->sp);
4099 bool is_psp_ns = (regnum == tdep->m_profile_psp_ns_regnum)
4100 && (cache->psp_ns == cache->sp);
4101
4102 override_with_sp_value = is_msp || is_msp_s || is_msp_ns
4103 || is_psp || is_psp_s || is_psp_ns;
4104
4105 }
4106 else if (tdep->is_m)
4107 {
4108 bool is_msp = (regnum == tdep->m_profile_msp_regnum)
4109 && (cache->sp == cache->msp);
4110 bool is_psp = (regnum == tdep->m_profile_psp_regnum)
4111 && (cache->sp == cache->psp);
4112
4113 override_with_sp_value = is_msp || is_psp;
4114 }
4115
4116 if (override_with_sp_value)
4117 {
4118 /* Use value of SP from previous frame. */
4119 frame_info_ptr prev_frame = get_prev_frame (this_frame);
4120 if (prev_frame)
4121 val = get_frame_register_unsigned (prev_frame, ARM_SP_REGNUM);
4122 else
4123 val = get_frame_base (this_frame);
4124 }
4125 else
4126 /* Use value for the register from previous frame. */
4127 val = get_frame_register_unsigned (this_frame, regnum);
4128
4129 return frame_unwind_got_constant (this_frame, regnum, val);
4130 }
4131
4132 internal_error (_("Unexpected register %d"), regnum);
4133 }
4134
4135 /* Implement the stack_frame_destroyed_p gdbarch method. */
4136
4137 static int
4138 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4139 {
4140 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4141 unsigned int insn, insn2;
4142 int found_return = 0, found_stack_adjust = 0;
4143 CORE_ADDR func_start, func_end;
4144 CORE_ADDR scan_pc;
4145 gdb_byte buf[4];
4146
4147 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4148 return 0;
4149
4150 /* The epilogue is a sequence of instructions along the following lines:
4151
4152 - add stack frame size to SP or FP
4153 - [if frame pointer used] restore SP from FP
4154 - restore registers from SP [may include PC]
4155 - a return-type instruction [if PC wasn't already restored]
4156
4157 In a first pass, we scan forward from the current PC and verify the
4158 instructions we find as compatible with this sequence, ending in a
4159 return instruction.
4160
4161 However, this is not sufficient to distinguish indirect function calls
4162 within a function from indirect tail calls in the epilogue in some cases.
4163 Therefore, if we didn't already find any SP-changing instruction during
4164 forward scan, we add a backward scanning heuristic to ensure we actually
4165 are in the epilogue. */
4166
4167 scan_pc = pc;
4168 while (scan_pc < func_end && !found_return)
4169 {
4170 if (target_read_memory (scan_pc, buf, 2))
4171 break;
4172
4173 scan_pc += 2;
4174 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4175
4176 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
4177 found_return = 1;
4178 else if (insn == 0x46f7) /* mov pc, lr */
4179 found_return = 1;
4180 else if (thumb_instruction_restores_sp (insn))
4181 {
4182 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
4183 found_return = 1;
4184 }
4185 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
4186 {
4187 if (target_read_memory (scan_pc, buf, 2))
4188 break;
4189
4190 scan_pc += 2;
4191 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
4192
4193 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4194 {
4195 if (insn2 & 0x8000) /* <registers> include PC. */
4196 found_return = 1;
4197 }
4198 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4199 && (insn2 & 0x0fff) == 0x0b04)
4200 {
4201 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
4202 found_return = 1;
4203 }
4204 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4205 && (insn2 & 0x0e00) == 0x0a00)
4206 ;
4207 else
4208 break;
4209 }
4210 else
4211 break;
4212 }
4213
4214 if (!found_return)
4215 return 0;
4216
4217 /* Since any instruction in the epilogue sequence, with the possible
4218 exception of return itself, updates the stack pointer, we need to
4219 scan backwards for at most one instruction. Try either a 16-bit or
4220 a 32-bit instruction. This is just a heuristic, so we do not worry
4221 too much about false positives. */
4222
4223 if (pc - 4 < func_start)
4224 return 0;
4225 if (target_read_memory (pc - 4, buf, 4))
4226 return 0;
4227
4228 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
4229 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
4230
4231 if (thumb_instruction_restores_sp (insn2))
4232 found_stack_adjust = 1;
4233 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
4234 found_stack_adjust = 1;
4235 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
4236 && (insn2 & 0x0fff) == 0x0b04)
4237 found_stack_adjust = 1;
4238 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
4239 && (insn2 & 0x0e00) == 0x0a00)
4240 found_stack_adjust = 1;
4241
4242 return found_stack_adjust;
4243 }
4244
4245 static int
4246 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
4247 {
4248 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4249 unsigned int insn;
4250 int found_return;
4251 CORE_ADDR func_start, func_end;
4252
4253 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
4254 return 0;
4255
4256 /* We are in the epilogue if the previous instruction was a stack
4257 adjustment and the next instruction is a possible return (bx, mov
4258 pc, or pop). We could have to scan backwards to find the stack
4259 adjustment, or forwards to find the return, but this is a decent
4260 approximation. First scan forwards. */
4261
4262 found_return = 0;
4263 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4264 if (bits (insn, 28, 31) != INST_NV)
4265 {
4266 if ((insn & 0x0ffffff0) == 0x012fff10)
4267 /* BX. */
4268 found_return = 1;
4269 else if ((insn & 0x0ffffff0) == 0x01a0f000)
4270 /* MOV PC. */
4271 found_return = 1;
4272 else if ((insn & 0x0fff0000) == 0x08bd0000
4273 && (insn & 0x0000c000) != 0)
4274 /* POP (LDMIA), including PC or LR. */
4275 found_return = 1;
4276 }
4277
4278 if (!found_return)
4279 return 0;
4280
4281 /* Scan backwards. This is just a heuristic, so do not worry about
4282 false positives from mode changes. */
4283
4284 if (pc < func_start + 4)
4285 return 0;
4286
4287 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
4288 if (arm_instruction_restores_sp (insn))
4289 return 1;
4290
4291 return 0;
4292 }
4293
4294 /* Implement the stack_frame_destroyed_p gdbarch method. */
4295
4296 static int
4297 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
4298 {
4299 if (arm_pc_is_thumb (gdbarch, pc))
4300 return thumb_stack_frame_destroyed_p (gdbarch, pc);
4301 else
4302 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
4303 }
4304
4305 /* When arguments must be pushed onto the stack, they go on in reverse
4306 order. The code below implements a FILO (stack) to do this. */
4307
4308 struct arm_stack_item
4309 {
4310 int len;
4311 struct arm_stack_item *prev;
4312 gdb_byte *data;
4313 };
4314
4315 static struct arm_stack_item *
4316 push_stack_item (struct arm_stack_item *prev, const gdb_byte *contents,
4317 int len)
4318 {
4319 struct arm_stack_item *si;
4320 si = XNEW (struct arm_stack_item);
4321 si->data = (gdb_byte *) xmalloc (len);
4322 si->len = len;
4323 si->prev = prev;
4324 memcpy (si->data, contents, len);
4325 return si;
4326 }
4327
4328 static struct arm_stack_item *
4329 pop_stack_item (struct arm_stack_item *si)
4330 {
4331 struct arm_stack_item *dead = si;
4332 si = si->prev;
4333 xfree (dead->data);
4334 xfree (dead);
4335 return si;
4336 }
4337
4338 /* Implement the gdbarch type alignment method, overrides the generic
4339 alignment algorithm for anything that is arm specific. */
4340
4341 static ULONGEST
4342 arm_type_align (gdbarch *gdbarch, struct type *t)
4343 {
4344 t = check_typedef (t);
4345 if (t->code () == TYPE_CODE_ARRAY && t->is_vector ())
4346 {
4347 /* Use the natural alignment for vector types (the same for
4348 scalar type), but the maximum alignment is 64-bit. */
4349 if (t->length () > 8)
4350 return 8;
4351 else
4352 return t->length ();
4353 }
4354
4355 /* Allow the common code to calculate the alignment. */
4356 return 0;
4357 }
4358
4359 /* Possible base types for a candidate for passing and returning in
4360 VFP registers. */
4361
4362 enum arm_vfp_cprc_base_type
4363 {
4364 VFP_CPRC_UNKNOWN,
4365 VFP_CPRC_SINGLE,
4366 VFP_CPRC_DOUBLE,
4367 VFP_CPRC_VEC64,
4368 VFP_CPRC_VEC128
4369 };
4370
4371 /* The length of one element of base type B. */
4372
4373 static unsigned
4374 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
4375 {
4376 switch (b)
4377 {
4378 case VFP_CPRC_SINGLE:
4379 return 4;
4380 case VFP_CPRC_DOUBLE:
4381 return 8;
4382 case VFP_CPRC_VEC64:
4383 return 8;
4384 case VFP_CPRC_VEC128:
4385 return 16;
4386 default:
4387 internal_error (_("Invalid VFP CPRC type: %d."),
4388 (int) b);
4389 }
4390 }
4391
4392 /* The character ('s', 'd' or 'q') for the type of VFP register used
4393 for passing base type B. */
4394
4395 static int
4396 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
4397 {
4398 switch (b)
4399 {
4400 case VFP_CPRC_SINGLE:
4401 return 's';
4402 case VFP_CPRC_DOUBLE:
4403 return 'd';
4404 case VFP_CPRC_VEC64:
4405 return 'd';
4406 case VFP_CPRC_VEC128:
4407 return 'q';
4408 default:
4409 internal_error (_("Invalid VFP CPRC type: %d."),
4410 (int) b);
4411 }
4412 }
4413
4414 /* Determine whether T may be part of a candidate for passing and
4415 returning in VFP registers, ignoring the limit on the total number
4416 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
4417 classification of the first valid component found; if it is not
4418 VFP_CPRC_UNKNOWN, all components must have the same classification
4419 as *BASE_TYPE. If it is found that T contains a type not permitted
4420 for passing and returning in VFP registers, a type differently
4421 classified from *BASE_TYPE, or two types differently classified
4422 from each other, return -1, otherwise return the total number of
4423 base-type elements found (possibly 0 in an empty structure or
4424 array). Vector types are not currently supported, matching the
4425 generic AAPCS support. */
4426
4427 static int
4428 arm_vfp_cprc_sub_candidate (struct type *t,
4429 enum arm_vfp_cprc_base_type *base_type)
4430 {
4431 t = check_typedef (t);
4432 switch (t->code ())
4433 {
4434 case TYPE_CODE_FLT:
4435 switch (t->length ())
4436 {
4437 case 4:
4438 if (*base_type == VFP_CPRC_UNKNOWN)
4439 *base_type = VFP_CPRC_SINGLE;
4440 else if (*base_type != VFP_CPRC_SINGLE)
4441 return -1;
4442 return 1;
4443
4444 case 8:
4445 if (*base_type == VFP_CPRC_UNKNOWN)
4446 *base_type = VFP_CPRC_DOUBLE;
4447 else if (*base_type != VFP_CPRC_DOUBLE)
4448 return -1;
4449 return 1;
4450
4451 default:
4452 return -1;
4453 }
4454 break;
4455
4456 case TYPE_CODE_COMPLEX:
4457 /* Arguments of complex T where T is one of the types float or
4458 double get treated as if they are implemented as:
4459
4460 struct complexT
4461 {
4462 T real;
4463 T imag;
4464 };
4465
4466 */
4467 switch (t->length ())
4468 {
4469 case 8:
4470 if (*base_type == VFP_CPRC_UNKNOWN)
4471 *base_type = VFP_CPRC_SINGLE;
4472 else if (*base_type != VFP_CPRC_SINGLE)
4473 return -1;
4474 return 2;
4475
4476 case 16:
4477 if (*base_type == VFP_CPRC_UNKNOWN)
4478 *base_type = VFP_CPRC_DOUBLE;
4479 else if (*base_type != VFP_CPRC_DOUBLE)
4480 return -1;
4481 return 2;
4482
4483 default:
4484 return -1;
4485 }
4486 break;
4487
4488 case TYPE_CODE_ARRAY:
4489 {
4490 if (t->is_vector ())
4491 {
4492 /* A 64-bit or 128-bit containerized vector type are VFP
4493 CPRCs. */
4494 switch (t->length ())
4495 {
4496 case 8:
4497 if (*base_type == VFP_CPRC_UNKNOWN)
4498 *base_type = VFP_CPRC_VEC64;
4499 return 1;
4500 case 16:
4501 if (*base_type == VFP_CPRC_UNKNOWN)
4502 *base_type = VFP_CPRC_VEC128;
4503 return 1;
4504 default:
4505 return -1;
4506 }
4507 }
4508 else
4509 {
4510 int count;
4511 unsigned unitlen;
4512
4513 count = arm_vfp_cprc_sub_candidate (t->target_type (),
4514 base_type);
4515 if (count == -1)
4516 return -1;
4517 if (t->length () == 0)
4518 {
4519 gdb_assert (count == 0);
4520 return 0;
4521 }
4522 else if (count == 0)
4523 return -1;
4524 unitlen = arm_vfp_cprc_unit_length (*base_type);
4525 gdb_assert ((t->length () % unitlen) == 0);
4526 return t->length () / unitlen;
4527 }
4528 }
4529 break;
4530
4531 case TYPE_CODE_STRUCT:
4532 {
4533 int count = 0;
4534 unsigned unitlen;
4535 int i;
4536 for (i = 0; i < t->num_fields (); i++)
4537 {
4538 int sub_count = 0;
4539
4540 if (!t->field (i).is_static ())
4541 sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4542 base_type);
4543 if (sub_count == -1)
4544 return -1;
4545 count += sub_count;
4546 }
4547 if (t->length () == 0)
4548 {
4549 gdb_assert (count == 0);
4550 return 0;
4551 }
4552 else if (count == 0)
4553 return -1;
4554 unitlen = arm_vfp_cprc_unit_length (*base_type);
4555 if (t->length () != unitlen * count)
4556 return -1;
4557 return count;
4558 }
4559
4560 case TYPE_CODE_UNION:
4561 {
4562 int count = 0;
4563 unsigned unitlen;
4564 int i;
4565 for (i = 0; i < t->num_fields (); i++)
4566 {
4567 int sub_count = arm_vfp_cprc_sub_candidate (t->field (i).type (),
4568 base_type);
4569 if (sub_count == -1)
4570 return -1;
4571 count = (count > sub_count ? count : sub_count);
4572 }
4573 if (t->length () == 0)
4574 {
4575 gdb_assert (count == 0);
4576 return 0;
4577 }
4578 else if (count == 0)
4579 return -1;
4580 unitlen = arm_vfp_cprc_unit_length (*base_type);
4581 if (t->length () != unitlen * count)
4582 return -1;
4583 return count;
4584 }
4585
4586 default:
4587 break;
4588 }
4589
4590 return -1;
4591 }
4592
4593 /* Determine whether T is a VFP co-processor register candidate (CPRC)
4594 if passed to or returned from a non-variadic function with the VFP
4595 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
4596 *BASE_TYPE to the base type for T and *COUNT to the number of
4597 elements of that base type before returning. */
4598
4599 static int
4600 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
4601 int *count)
4602 {
4603 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
4604 int c = arm_vfp_cprc_sub_candidate (t, &b);
4605 if (c <= 0 || c > 4)
4606 return 0;
4607 *base_type = b;
4608 *count = c;
4609 return 1;
4610 }
4611
4612 /* Return 1 if the VFP ABI should be used for passing arguments to and
4613 returning values from a function of type FUNC_TYPE, 0
4614 otherwise. */
4615
4616 static int
4617 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
4618 {
4619 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4620
4621 /* Variadic functions always use the base ABI. Assume that functions
4622 without debug info are not variadic. */
4623 if (func_type && check_typedef (func_type)->has_varargs ())
4624 return 0;
4625
4626 /* The VFP ABI is only supported as a variant of AAPCS. */
4627 if (tdep->arm_abi != ARM_ABI_AAPCS)
4628 return 0;
4629
4630 return tdep->fp_model == ARM_FLOAT_VFP;
4631 }
4632
4633 /* We currently only support passing parameters in integer registers, which
4634 conforms with GCC's default model, and VFP argument passing following
4635 the VFP variant of AAPCS. Several other variants exist and
4636 we should probably support some of them based on the selected ABI. */
4637
4638 static CORE_ADDR
4639 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
4640 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
4641 struct value **args, CORE_ADDR sp,
4642 function_call_return_method return_method,
4643 CORE_ADDR struct_addr)
4644 {
4645 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4646 int argnum;
4647 int argreg;
4648 int nstack;
4649 struct arm_stack_item *si = NULL;
4650 int use_vfp_abi;
4651 struct type *ftype;
4652 unsigned vfp_regs_free = (1 << 16) - 1;
4653 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4654
4655 /* Determine the type of this function and whether the VFP ABI
4656 applies. */
4657 ftype = check_typedef (function->type ());
4658 if (ftype->code () == TYPE_CODE_PTR)
4659 ftype = check_typedef (ftype->target_type ());
4660 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
4661
4662 /* Set the return address. For the ARM, the return breakpoint is
4663 always at BP_ADDR. */
4664 if (arm_pc_is_thumb (gdbarch, bp_addr))
4665 bp_addr |= 1;
4666 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
4667
4668 /* Walk through the list of args and determine how large a temporary
4669 stack is required. Need to take care here as structs may be
4670 passed on the stack, and we have to push them. */
4671 nstack = 0;
4672
4673 argreg = ARM_A1_REGNUM;
4674 nstack = 0;
4675
4676 /* The struct_return pointer occupies the first parameter
4677 passing register. */
4678 if (return_method == return_method_struct)
4679 {
4680 arm_debug_printf ("struct return in %s = %s",
4681 gdbarch_register_name (gdbarch, argreg),
4682 paddress (gdbarch, struct_addr));
4683
4684 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
4685 argreg++;
4686 }
4687
4688 for (argnum = 0; argnum < nargs; argnum++)
4689 {
4690 int len;
4691 struct type *arg_type;
4692 struct type *target_type;
4693 enum type_code typecode;
4694 const bfd_byte *val;
4695 int align;
4696 enum arm_vfp_cprc_base_type vfp_base_type;
4697 int vfp_base_count;
4698 int may_use_core_reg = 1;
4699
4700 arg_type = check_typedef (args[argnum]->type ());
4701 len = arg_type->length ();
4702 target_type = arg_type->target_type ();
4703 typecode = arg_type->code ();
4704 val = args[argnum]->contents ().data ();
4705
4706 align = type_align (arg_type);
4707 /* Round alignment up to a whole number of words. */
4708 align = (align + ARM_INT_REGISTER_SIZE - 1)
4709 & ~(ARM_INT_REGISTER_SIZE - 1);
4710 /* Different ABIs have different maximum alignments. */
4711 if (tdep->arm_abi == ARM_ABI_APCS)
4712 {
4713 /* The APCS ABI only requires word alignment. */
4714 align = ARM_INT_REGISTER_SIZE;
4715 }
4716 else
4717 {
4718 /* The AAPCS requires at most doubleword alignment. */
4719 if (align > ARM_INT_REGISTER_SIZE * 2)
4720 align = ARM_INT_REGISTER_SIZE * 2;
4721 }
4722
4723 if (use_vfp_abi
4724 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
4725 &vfp_base_count))
4726 {
4727 int regno;
4728 int unit_length;
4729 int shift;
4730 unsigned mask;
4731
4732 /* Because this is a CPRC it cannot go in a core register or
4733 cause a core register to be skipped for alignment.
4734 Either it goes in VFP registers and the rest of this loop
4735 iteration is skipped for this argument, or it goes on the
4736 stack (and the stack alignment code is correct for this
4737 case). */
4738 may_use_core_reg = 0;
4739
4740 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
4741 shift = unit_length / 4;
4742 mask = (1 << (shift * vfp_base_count)) - 1;
4743 for (regno = 0; regno < 16; regno += shift)
4744 if (((vfp_regs_free >> regno) & mask) == mask)
4745 break;
4746
4747 if (regno < 16)
4748 {
4749 int reg_char;
4750 int reg_scaled;
4751 int i;
4752
4753 vfp_regs_free &= ~(mask << regno);
4754 reg_scaled = regno / shift;
4755 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
4756 for (i = 0; i < vfp_base_count; i++)
4757 {
4758 char name_buf[4];
4759 int regnum;
4760 if (reg_char == 'q')
4761 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
4762 val + i * unit_length);
4763 else
4764 {
4765 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
4766 reg_char, reg_scaled + i);
4767 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
4768 strlen (name_buf));
4769 regcache->cooked_write (regnum, val + i * unit_length);
4770 }
4771 }
4772 continue;
4773 }
4774 else
4775 {
4776 /* This CPRC could not go in VFP registers, so all VFP
4777 registers are now marked as used. */
4778 vfp_regs_free = 0;
4779 }
4780 }
4781
4782 /* Push stack padding for doubleword alignment. */
4783 if (nstack & (align - 1))
4784 {
4785 si = push_stack_item (si, val, ARM_INT_REGISTER_SIZE);
4786 nstack += ARM_INT_REGISTER_SIZE;
4787 }
4788
4789 /* Doubleword aligned quantities must go in even register pairs. */
4790 if (may_use_core_reg
4791 && argreg <= ARM_LAST_ARG_REGNUM
4792 && align > ARM_INT_REGISTER_SIZE
4793 && argreg & 1)
4794 argreg++;
4795
4796 /* If the argument is a pointer to a function, and it is a
4797 Thumb function, create a LOCAL copy of the value and set
4798 the THUMB bit in it. */
4799 if (TYPE_CODE_PTR == typecode
4800 && target_type != NULL
4801 && TYPE_CODE_FUNC == check_typedef (target_type)->code ())
4802 {
4803 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
4804 if (arm_pc_is_thumb (gdbarch, regval))
4805 {
4806 bfd_byte *copy = (bfd_byte *) alloca (len);
4807 store_unsigned_integer (copy, len, byte_order,
4808 MAKE_THUMB_ADDR (regval));
4809 val = copy;
4810 }
4811 }
4812
4813 /* Copy the argument to general registers or the stack in
4814 register-sized pieces. Large arguments are split between
4815 registers and stack. */
4816 while (len > 0)
4817 {
4818 int partial_len = len < ARM_INT_REGISTER_SIZE
4819 ? len : ARM_INT_REGISTER_SIZE;
4820 CORE_ADDR regval
4821 = extract_unsigned_integer (val, partial_len, byte_order);
4822
4823 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
4824 {
4825 /* The argument is being passed in a general purpose
4826 register. */
4827 arm_debug_printf ("arg %d in %s = 0x%s", argnum,
4828 gdbarch_register_name (gdbarch, argreg),
4829 phex (regval, ARM_INT_REGISTER_SIZE));
4830
4831 regcache_cooked_write_unsigned (regcache, argreg, regval);
4832 argreg++;
4833 }
4834 else
4835 {
4836 gdb_byte buf[ARM_INT_REGISTER_SIZE];
4837
4838 memset (buf, 0, sizeof (buf));
4839 store_unsigned_integer (buf, partial_len, byte_order, regval);
4840
4841 /* Push the arguments onto the stack. */
4842 arm_debug_printf ("arg %d @ sp + %d", argnum, nstack);
4843 si = push_stack_item (si, buf, ARM_INT_REGISTER_SIZE);
4844 nstack += ARM_INT_REGISTER_SIZE;
4845 }
4846
4847 len -= partial_len;
4848 val += partial_len;
4849 }
4850 }
4851 /* If we have an odd number of words to push, then decrement the stack
4852 by one word now, so first stack argument will be dword aligned. */
4853 if (nstack & 4)
4854 sp -= 4;
4855
4856 while (si)
4857 {
4858 sp -= si->len;
4859 write_memory (sp, si->data, si->len);
4860 si = pop_stack_item (si);
4861 }
4862
4863 /* Finally, update teh SP register. */
4864 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
4865
4866 return sp;
4867 }
4868
4869
4870 /* Always align the frame to an 8-byte boundary. This is required on
4871 some platforms and harmless on the rest. */
4872
4873 static CORE_ADDR
4874 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
4875 {
4876 /* Align the stack to eight bytes. */
4877 return sp & ~ (CORE_ADDR) 7;
4878 }
4879
4880 static void
4881 print_fpu_flags (struct ui_file *file, int flags)
4882 {
4883 if (flags & (1 << 0))
4884 gdb_puts ("IVO ", file);
4885 if (flags & (1 << 1))
4886 gdb_puts ("DVZ ", file);
4887 if (flags & (1 << 2))
4888 gdb_puts ("OFL ", file);
4889 if (flags & (1 << 3))
4890 gdb_puts ("UFL ", file);
4891 if (flags & (1 << 4))
4892 gdb_puts ("INX ", file);
4893 gdb_putc ('\n', file);
4894 }
4895
4896 /* Print interesting information about the floating point processor
4897 (if present) or emulator. */
4898 static void
4899 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4900 frame_info_ptr frame, const char *args)
4901 {
4902 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4903 int type;
4904
4905 type = (status >> 24) & 127;
4906 if (status & (1 << 31))
4907 gdb_printf (file, _("Hardware FPU type %d\n"), type);
4908 else
4909 gdb_printf (file, _("Software FPU type %d\n"), type);
4910 /* i18n: [floating point unit] mask */
4911 gdb_puts (_("mask: "), file);
4912 print_fpu_flags (file, status >> 16);
4913 /* i18n: [floating point unit] flags */
4914 gdb_puts (_("flags: "), file);
4915 print_fpu_flags (file, status);
4916 }
4917
4918 /* Construct the ARM extended floating point type. */
4919 static struct type *
4920 arm_ext_type (struct gdbarch *gdbarch)
4921 {
4922 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4923
4924 if (!tdep->arm_ext_type)
4925 {
4926 type_allocator alloc (gdbarch);
4927 tdep->arm_ext_type
4928 = init_float_type (alloc, -1, "builtin_type_arm_ext",
4929 floatformats_arm_ext);
4930 }
4931
4932 return tdep->arm_ext_type;
4933 }
4934
4935 static struct type *
4936 arm_neon_double_type (struct gdbarch *gdbarch)
4937 {
4938 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4939
4940 if (tdep->neon_double_type == NULL)
4941 {
4942 struct type *t, *elem;
4943
4944 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4945 TYPE_CODE_UNION);
4946 elem = builtin_type (gdbarch)->builtin_uint8;
4947 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4948 elem = builtin_type (gdbarch)->builtin_uint16;
4949 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4950 elem = builtin_type (gdbarch)->builtin_uint32;
4951 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4952 elem = builtin_type (gdbarch)->builtin_uint64;
4953 append_composite_type_field (t, "u64", elem);
4954 elem = builtin_type (gdbarch)->builtin_float;
4955 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4956 elem = builtin_type (gdbarch)->builtin_double;
4957 append_composite_type_field (t, "f64", elem);
4958
4959 t->set_is_vector (true);
4960 t->set_name ("neon_d");
4961 tdep->neon_double_type = t;
4962 }
4963
4964 return tdep->neon_double_type;
4965 }
4966
4967 /* FIXME: The vector types are not correctly ordered on big-endian
4968 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4969 bits of d0 - regardless of what unit size is being held in d0. So
4970 the offset of the first uint8 in d0 is 7, but the offset of the
4971 first float is 4. This code works as-is for little-endian
4972 targets. */
4973
4974 static struct type *
4975 arm_neon_quad_type (struct gdbarch *gdbarch)
4976 {
4977 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
4978
4979 if (tdep->neon_quad_type == NULL)
4980 {
4981 struct type *t, *elem;
4982
4983 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4984 TYPE_CODE_UNION);
4985 elem = builtin_type (gdbarch)->builtin_uint8;
4986 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4987 elem = builtin_type (gdbarch)->builtin_uint16;
4988 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4989 elem = builtin_type (gdbarch)->builtin_uint32;
4990 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4991 elem = builtin_type (gdbarch)->builtin_uint64;
4992 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4993 elem = builtin_type (gdbarch)->builtin_float;
4994 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4995 elem = builtin_type (gdbarch)->builtin_double;
4996 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4997
4998 t->set_is_vector (true);
4999 t->set_name ("neon_q");
5000 tdep->neon_quad_type = t;
5001 }
5002
5003 return tdep->neon_quad_type;
5004 }
5005
5006 /* Return true if REGNUM is a Q pseudo register. Return false
5007 otherwise.
5008
5009 REGNUM is the raw register number and not a pseudo-relative register
5010 number. */
5011
5012 static bool
5013 is_q_pseudo (struct gdbarch *gdbarch, int regnum)
5014 {
5015 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5016
5017 /* Q pseudo registers are available for both NEON (Q0~Q15) and
5018 MVE (Q0~Q7) features. */
5019 if (tdep->have_q_pseudos
5020 && regnum >= tdep->q_pseudo_base
5021 && regnum < (tdep->q_pseudo_base + tdep->q_pseudo_count))
5022 return true;
5023
5024 return false;
5025 }
5026
5027 /* Return true if REGNUM is a VFP S pseudo register. Return false
5028 otherwise.
5029
5030 REGNUM is the raw register number and not a pseudo-relative register
5031 number. */
5032
5033 static bool
5034 is_s_pseudo (struct gdbarch *gdbarch, int regnum)
5035 {
5036 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5037
5038 if (tdep->have_s_pseudos
5039 && regnum >= tdep->s_pseudo_base
5040 && regnum < (tdep->s_pseudo_base + tdep->s_pseudo_count))
5041 return true;
5042
5043 return false;
5044 }
5045
5046 /* Return true if REGNUM is a MVE pseudo register (P0). Return false
5047 otherwise.
5048
5049 REGNUM is the raw register number and not a pseudo-relative register
5050 number. */
5051
5052 static bool
5053 is_mve_pseudo (struct gdbarch *gdbarch, int regnum)
5054 {
5055 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5056
5057 if (tdep->have_mve
5058 && regnum >= tdep->mve_pseudo_base
5059 && regnum < tdep->mve_pseudo_base + tdep->mve_pseudo_count)
5060 return true;
5061
5062 return false;
5063 }
5064
5065 /* Return true if REGNUM is a PACBTI pseudo register (ra_auth_code). Return
5066 false otherwise.
5067
5068 REGNUM is the raw register number and not a pseudo-relative register
5069 number. */
5070
5071 static bool
5072 is_pacbti_pseudo (struct gdbarch *gdbarch, int regnum)
5073 {
5074 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5075
5076 if (tdep->have_pacbti
5077 && regnum >= tdep->pacbti_pseudo_base
5078 && regnum < tdep->pacbti_pseudo_base + tdep->pacbti_pseudo_count)
5079 return true;
5080
5081 return false;
5082 }
5083
5084 /* Return the GDB type object for the "standard" data type of data in
5085 register N. */
5086
5087 static struct type *
5088 arm_register_type (struct gdbarch *gdbarch, int regnum)
5089 {
5090 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5091
5092 if (is_s_pseudo (gdbarch, regnum))
5093 return builtin_type (gdbarch)->builtin_float;
5094
5095 if (is_q_pseudo (gdbarch, regnum))
5096 return arm_neon_quad_type (gdbarch);
5097
5098 if (is_mve_pseudo (gdbarch, regnum))
5099 return builtin_type (gdbarch)->builtin_int16;
5100
5101 if (is_pacbti_pseudo (gdbarch, regnum))
5102 return builtin_type (gdbarch)->builtin_uint32;
5103
5104 /* If the target description has register information, we are only
5105 in this function so that we can override the types of
5106 double-precision registers for NEON. */
5107 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
5108 {
5109 struct type *t = tdesc_register_type (gdbarch, regnum);
5110
5111 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
5112 && t->code () == TYPE_CODE_FLT
5113 && tdep->have_neon)
5114 return arm_neon_double_type (gdbarch);
5115 else
5116 return t;
5117 }
5118
5119 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
5120 {
5121 if (!tdep->have_fpa_registers)
5122 return builtin_type (gdbarch)->builtin_void;
5123
5124 return arm_ext_type (gdbarch);
5125 }
5126 else if (regnum == ARM_SP_REGNUM)
5127 return builtin_type (gdbarch)->builtin_data_ptr;
5128 else if (regnum == ARM_PC_REGNUM)
5129 return builtin_type (gdbarch)->builtin_func_ptr;
5130 else if (regnum >= ARRAY_SIZE (arm_register_names))
5131 /* These registers are only supported on targets which supply
5132 an XML description. */
5133 return builtin_type (gdbarch)->builtin_int0;
5134 else
5135 return builtin_type (gdbarch)->builtin_uint32;
5136 }
5137
5138 /* Map a DWARF register REGNUM onto the appropriate GDB register
5139 number. */
5140
5141 static int
5142 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
5143 {
5144 /* Core integer regs. */
5145 if (reg >= 0 && reg <= 15)
5146 return reg;
5147
5148 /* Legacy FPA encoding. These were once used in a way which
5149 overlapped with VFP register numbering, so their use is
5150 discouraged, but GDB doesn't support the ARM toolchain
5151 which used them for VFP. */
5152 if (reg >= 16 && reg <= 23)
5153 return ARM_F0_REGNUM + reg - 16;
5154
5155 /* New assignments for the FPA registers. */
5156 if (reg >= 96 && reg <= 103)
5157 return ARM_F0_REGNUM + reg - 96;
5158
5159 /* WMMX register assignments. */
5160 if (reg >= 104 && reg <= 111)
5161 return ARM_WCGR0_REGNUM + reg - 104;
5162
5163 if (reg >= 112 && reg <= 127)
5164 return ARM_WR0_REGNUM + reg - 112;
5165
5166 /* PACBTI register containing the Pointer Authentication Code. */
5167 if (reg == ARM_DWARF_RA_AUTH_CODE)
5168 {
5169 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5170
5171 if (tdep->have_pacbti)
5172 return tdep->pacbti_pseudo_base;
5173
5174 return -1;
5175 }
5176
5177 if (reg >= 192 && reg <= 199)
5178 return ARM_WC0_REGNUM + reg - 192;
5179
5180 /* VFP v2 registers. A double precision value is actually
5181 in d1 rather than s2, but the ABI only defines numbering
5182 for the single precision registers. This will "just work"
5183 in GDB for little endian targets (we'll read eight bytes,
5184 starting in s0 and then progressing to s1), but will be
5185 reversed on big endian targets with VFP. This won't
5186 be a problem for the new Neon quad registers; you're supposed
5187 to use DW_OP_piece for those. */
5188 if (reg >= 64 && reg <= 95)
5189 {
5190 char name_buf[4];
5191
5192 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
5193 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5194 strlen (name_buf));
5195 }
5196
5197 /* VFP v3 / Neon registers. This range is also used for VFP v2
5198 registers, except that it now describes d0 instead of s0. */
5199 if (reg >= 256 && reg <= 287)
5200 {
5201 char name_buf[4];
5202
5203 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
5204 return user_reg_map_name_to_regnum (gdbarch, name_buf,
5205 strlen (name_buf));
5206 }
5207
5208 return -1;
5209 }
5210
5211 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
5212 static int
5213 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
5214 {
5215 int reg = regnum;
5216 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
5217
5218 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
5219 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
5220
5221 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
5222 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
5223
5224 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
5225 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
5226
5227 if (reg < NUM_GREGS)
5228 return SIM_ARM_R0_REGNUM + reg;
5229 reg -= NUM_GREGS;
5230
5231 if (reg < NUM_FREGS)
5232 return SIM_ARM_FP0_REGNUM + reg;
5233 reg -= NUM_FREGS;
5234
5235 if (reg < NUM_SREGS)
5236 return SIM_ARM_FPS_REGNUM + reg;
5237 reg -= NUM_SREGS;
5238
5239 internal_error (_("Bad REGNUM %d"), regnum);
5240 }
5241
5242 static const unsigned char op_lit0 = DW_OP_lit0;
5243
5244 static void
5245 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
5246 struct dwarf2_frame_state_reg *reg,
5247 frame_info_ptr this_frame)
5248 {
5249 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5250
5251 if (is_pacbti_pseudo (gdbarch, regnum))
5252 {
5253 /* Initialize RA_AUTH_CODE to zero. */
5254 reg->how = DWARF2_FRAME_REG_SAVED_VAL_EXP;
5255 reg->loc.exp.start = &op_lit0;
5256 reg->loc.exp.len = 1;
5257 return;
5258 }
5259
5260 if (regnum == ARM_PC_REGNUM || regnum == ARM_PS_REGNUM)
5261 {
5262 reg->how = DWARF2_FRAME_REG_FN;
5263 reg->loc.fn = arm_dwarf2_prev_register;
5264 }
5265 else if (regnum == ARM_SP_REGNUM)
5266 reg->how = DWARF2_FRAME_REG_CFA;
5267 else if (arm_is_alternative_sp_register (tdep, regnum))
5268 {
5269 /* Handle the alternative SP registers on Cortex-M. */
5270 reg->how = DWARF2_FRAME_REG_FN;
5271 reg->loc.fn = arm_dwarf2_prev_register;
5272 }
5273 }
5274
5275 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5276 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5277 NULL if an error occurs. BUF is freed. */
5278
5279 static gdb_byte *
5280 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5281 int old_len, int new_len)
5282 {
5283 gdb_byte *new_buf;
5284 int bytes_to_read = new_len - old_len;
5285
5286 new_buf = (gdb_byte *) xmalloc (new_len);
5287 memcpy (new_buf + bytes_to_read, buf, old_len);
5288 xfree (buf);
5289 if (target_read_code (endaddr - new_len, new_buf, bytes_to_read) != 0)
5290 {
5291 xfree (new_buf);
5292 return NULL;
5293 }
5294 return new_buf;
5295 }
5296
5297 /* An IT block is at most the 2-byte IT instruction followed by
5298 four 4-byte instructions. The furthest back we must search to
5299 find an IT block that affects the current instruction is thus
5300 2 + 3 * 4 == 14 bytes. */
5301 #define MAX_IT_BLOCK_PREFIX 14
5302
5303 /* Use a quick scan if there are more than this many bytes of
5304 code. */
5305 #define IT_SCAN_THRESHOLD 32
5306
5307 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5308 A breakpoint in an IT block may not be hit, depending on the
5309 condition flags. */
5310 static CORE_ADDR
5311 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5312 {
5313 gdb_byte *buf;
5314 char map_type;
5315 CORE_ADDR boundary, func_start;
5316 int buf_len;
5317 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5318 int i, any, last_it, last_it_count;
5319 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
5320
5321 /* If we are using BKPT breakpoints, none of this is necessary. */
5322 if (tdep->thumb2_breakpoint == NULL)
5323 return bpaddr;
5324
5325 /* ARM mode does not have this problem. */
5326 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5327 return bpaddr;
5328
5329 /* We are setting a breakpoint in Thumb code that could potentially
5330 contain an IT block. The first step is to find how much Thumb
5331 code there is; we do not need to read outside of known Thumb
5332 sequences. */
5333 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5334 if (map_type == 0)
5335 /* Thumb-2 code must have mapping symbols to have a chance. */
5336 return bpaddr;
5337
5338 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5339
5340 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL))
5341 {
5342 func_start = gdbarch_addr_bits_remove (gdbarch, func_start);
5343 if (func_start > boundary)
5344 boundary = func_start;
5345 }
5346
5347 /* Search for a candidate IT instruction. We have to do some fancy
5348 footwork to distinguish a real IT instruction from the second
5349 half of a 32-bit instruction, but there is no need for that if
5350 there's no candidate. */
5351 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
5352 if (buf_len == 0)
5353 /* No room for an IT instruction. */
5354 return bpaddr;
5355
5356 buf = (gdb_byte *) xmalloc (buf_len);
5357 if (target_read_code (bpaddr - buf_len, buf, buf_len) != 0)
5358 return bpaddr;
5359 any = 0;
5360 for (i = 0; i < buf_len; i += 2)
5361 {
5362 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5363 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5364 {
5365 any = 1;
5366 break;
5367 }
5368 }
5369
5370 if (any == 0)
5371 {
5372 xfree (buf);
5373 return bpaddr;
5374 }
5375
5376 /* OK, the code bytes before this instruction contain at least one
5377 halfword which resembles an IT instruction. We know that it's
5378 Thumb code, but there are still two possibilities. Either the
5379 halfword really is an IT instruction, or it is the second half of
5380 a 32-bit Thumb instruction. The only way we can tell is to
5381 scan forwards from a known instruction boundary. */
5382 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5383 {
5384 int definite;
5385
5386 /* There's a lot of code before this instruction. Start with an
5387 optimistic search; it's easy to recognize halfwords that can
5388 not be the start of a 32-bit instruction, and use that to
5389 lock on to the instruction boundaries. */
5390 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5391 if (buf == NULL)
5392 return bpaddr;
5393 buf_len = IT_SCAN_THRESHOLD;
5394
5395 definite = 0;
5396 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5397 {
5398 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5399 if (thumb_insn_size (inst1) == 2)
5400 {
5401 definite = 1;
5402 break;
5403 }
5404 }
5405
5406 /* At this point, if DEFINITE, BUF[I] is the first place we
5407 are sure that we know the instruction boundaries, and it is far
5408 enough from BPADDR that we could not miss an IT instruction
5409 affecting BPADDR. If ! DEFINITE, give up - start from a
5410 known boundary. */
5411 if (! definite)
5412 {
5413 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5414 bpaddr - boundary);
5415 if (buf == NULL)
5416 return bpaddr;
5417 buf_len = bpaddr - boundary;
5418 i = 0;
5419 }
5420 }
5421 else
5422 {
5423 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5424 if (buf == NULL)
5425 return bpaddr;
5426 buf_len = bpaddr - boundary;
5427 i = 0;
5428 }
5429
5430 /* Scan forwards. Find the last IT instruction before BPADDR. */
5431 last_it = -1;
5432 last_it_count = 0;
5433 while (i < buf_len)
5434 {
5435 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5436 last_it_count--;
5437 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5438 {
5439 last_it = i;
5440 if (inst1 & 0x0001)
5441 last_it_count = 4;
5442 else if (inst1 & 0x0002)
5443 last_it_count = 3;
5444 else if (inst1 & 0x0004)
5445 last_it_count = 2;
5446 else
5447 last_it_count = 1;
5448 }
5449 i += thumb_insn_size (inst1);
5450 }
5451
5452 xfree (buf);
5453
5454 if (last_it == -1)
5455 /* There wasn't really an IT instruction after all. */
5456 return bpaddr;
5457
5458 if (last_it_count < 1)
5459 /* It was too far away. */
5460 return bpaddr;
5461
5462 /* This really is a trouble spot. Move the breakpoint to the IT
5463 instruction. */
5464 return bpaddr - buf_len + last_it;
5465 }
5466
5467 /* ARM displaced stepping support.
5468
5469 Generally ARM displaced stepping works as follows:
5470
5471 1. When an instruction is to be single-stepped, it is first decoded by
5472 arm_process_displaced_insn. Depending on the type of instruction, it is
5473 then copied to a scratch location, possibly in a modified form. The
5474 copy_* set of functions performs such modification, as necessary. A
5475 breakpoint is placed after the modified instruction in the scratch space
5476 to return control to GDB. Note in particular that instructions which
5477 modify the PC will no longer do so after modification.
5478
5479 2. The instruction is single-stepped, by setting the PC to the scratch
5480 location address, and resuming. Control returns to GDB when the
5481 breakpoint is hit.
5482
5483 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5484 function used for the current instruction. This function's job is to
5485 put the CPU/memory state back to what it would have been if the
5486 instruction had been executed unmodified in its original location. */
5487
5488 /* NOP instruction (mov r0, r0). */
5489 #define ARM_NOP 0xe1a00000
5490 #define THUMB_NOP 0x4600
5491
5492 /* Helper for register reads for displaced stepping. In particular, this
5493 returns the PC as it would be seen by the instruction at its original
5494 location. */
5495
5496 ULONGEST
5497 displaced_read_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5498 int regno)
5499 {
5500 ULONGEST ret;
5501 CORE_ADDR from = dsc->insn_addr;
5502
5503 if (regno == ARM_PC_REGNUM)
5504 {
5505 /* Compute pipeline offset:
5506 - When executing an ARM instruction, PC reads as the address of the
5507 current instruction plus 8.
5508 - When executing a Thumb instruction, PC reads as the address of the
5509 current instruction plus 4. */
5510
5511 if (!dsc->is_thumb)
5512 from += 8;
5513 else
5514 from += 4;
5515
5516 displaced_debug_printf ("read pc value %.8lx",
5517 (unsigned long) from);
5518 return (ULONGEST) from;
5519 }
5520 else
5521 {
5522 regcache_cooked_read_unsigned (regs, regno, &ret);
5523
5524 displaced_debug_printf ("read r%d value %.8lx",
5525 regno, (unsigned long) ret);
5526
5527 return ret;
5528 }
5529 }
5530
5531 static int
5532 displaced_in_arm_mode (struct regcache *regs)
5533 {
5534 ULONGEST ps;
5535 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5536
5537 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5538
5539 return (ps & t_bit) == 0;
5540 }
5541
5542 /* Write to the PC as from a branch instruction. */
5543
5544 static void
5545 branch_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5546 ULONGEST val)
5547 {
5548 if (!dsc->is_thumb)
5549 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5550 architecture versions < 6. */
5551 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5552 val & ~(ULONGEST) 0x3);
5553 else
5554 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5555 val & ~(ULONGEST) 0x1);
5556 }
5557
5558 /* Write to the PC as from a branch-exchange instruction. */
5559
5560 static void
5561 bx_write_pc (struct regcache *regs, ULONGEST val)
5562 {
5563 ULONGEST ps;
5564 ULONGEST t_bit = arm_psr_thumb_bit (regs->arch ());
5565
5566 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5567
5568 if ((val & 1) == 1)
5569 {
5570 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5571 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5572 }
5573 else if ((val & 2) == 0)
5574 {
5575 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5576 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5577 }
5578 else
5579 {
5580 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5581 mode, align dest to 4 bytes). */
5582 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5583 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5584 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5585 }
5586 }
5587
5588 /* Write to the PC as if from a load instruction. */
5589
5590 static void
5591 load_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5592 ULONGEST val)
5593 {
5594 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5595 bx_write_pc (regs, val);
5596 else
5597 branch_write_pc (regs, dsc, val);
5598 }
5599
5600 /* Write to the PC as if from an ALU instruction. */
5601
5602 static void
5603 alu_write_pc (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5604 ULONGEST val)
5605 {
5606 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5607 bx_write_pc (regs, val);
5608 else
5609 branch_write_pc (regs, dsc, val);
5610 }
5611
5612 /* Helper for writing to registers for displaced stepping. Writing to the PC
5613 has a varying effects depending on the instruction which does the write:
5614 this is controlled by the WRITE_PC argument. */
5615
5616 void
5617 displaced_write_reg (regcache *regs, arm_displaced_step_copy_insn_closure *dsc,
5618 int regno, ULONGEST val, enum pc_write_style write_pc)
5619 {
5620 if (regno == ARM_PC_REGNUM)
5621 {
5622 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val);
5623
5624 switch (write_pc)
5625 {
5626 case BRANCH_WRITE_PC:
5627 branch_write_pc (regs, dsc, val);
5628 break;
5629
5630 case BX_WRITE_PC:
5631 bx_write_pc (regs, val);
5632 break;
5633
5634 case LOAD_WRITE_PC:
5635 load_write_pc (regs, dsc, val);
5636 break;
5637
5638 case ALU_WRITE_PC:
5639 alu_write_pc (regs, dsc, val);
5640 break;
5641
5642 case CANNOT_WRITE_PC:
5643 warning (_("Instruction wrote to PC in an unexpected way when "
5644 "single-stepping"));
5645 break;
5646
5647 default:
5648 internal_error (_("Invalid argument to displaced_write_reg"));
5649 }
5650
5651 dsc->wrote_to_pc = 1;
5652 }
5653 else
5654 {
5655 displaced_debug_printf ("writing r%d value %.8lx",
5656 regno, (unsigned long) val);
5657 regcache_cooked_write_unsigned (regs, regno, val);
5658 }
5659 }
5660
5661 /* This function is used to concisely determine if an instruction INSN
5662 references PC. Register fields of interest in INSN should have the
5663 corresponding fields of BITMASK set to 0b1111. The function
5664 returns return 1 if any of these fields in INSN reference the PC
5665 (also 0b1111, r15), else it returns 0. */
5666
5667 static int
5668 insn_references_pc (uint32_t insn, uint32_t bitmask)
5669 {
5670 uint32_t lowbit = 1;
5671
5672 while (bitmask != 0)
5673 {
5674 uint32_t mask;
5675
5676 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5677 ;
5678
5679 if (!lowbit)
5680 break;
5681
5682 mask = lowbit * 0xf;
5683
5684 if ((insn & mask) == mask)
5685 return 1;
5686
5687 bitmask &= ~mask;
5688 }
5689
5690 return 0;
5691 }
5692
5693 /* The simplest copy function. Many instructions have the same effect no
5694 matter what address they are executed at: in those cases, use this. */
5695
5696 static int
5697 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn, const char *iname,
5698 arm_displaced_step_copy_insn_closure *dsc)
5699 {
5700 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
5701 (unsigned long) insn, iname);
5702
5703 dsc->modinsn[0] = insn;
5704
5705 return 0;
5706 }
5707
5708 static int
5709 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, const char *iname,
5711 arm_displaced_step_copy_insn_closure *dsc)
5712 {
5713 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
5714 "unmodified", insn1, insn2, iname);
5715
5716 dsc->modinsn[0] = insn1;
5717 dsc->modinsn[1] = insn2;
5718 dsc->numinsns = 2;
5719
5720 return 0;
5721 }
5722
5723 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5724 modification. */
5725 static int
5726 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
5727 const char *iname,
5728 arm_displaced_step_copy_insn_closure *dsc)
5729 {
5730 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
5731 insn, iname);
5732
5733 dsc->modinsn[0] = insn;
5734
5735 return 0;
5736 }
5737
5738 /* Preload instructions with immediate offset. */
5739
5740 static void
5741 cleanup_preload (struct gdbarch *gdbarch, regcache *regs,
5742 arm_displaced_step_copy_insn_closure *dsc)
5743 {
5744 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5745 if (!dsc->u.preload.immed)
5746 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5747 }
5748
5749 static void
5750 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5751 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn)
5752 {
5753 ULONGEST rn_val;
5754 /* Preload instructions:
5755
5756 {pli/pld} [rn, #+/-imm]
5757 ->
5758 {pli/pld} [r0, #+/-imm]. */
5759
5760 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5761 rn_val = displaced_read_reg (regs, dsc, rn);
5762 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5763 dsc->u.preload.immed = 1;
5764
5765 dsc->cleanup = &cleanup_preload;
5766 }
5767
5768 static int
5769 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5770 arm_displaced_step_copy_insn_closure *dsc)
5771 {
5772 unsigned int rn = bits (insn, 16, 19);
5773
5774 if (!insn_references_pc (insn, 0x000f0000ul))
5775 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5776
5777 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn);
5778
5779 dsc->modinsn[0] = insn & 0xfff0ffff;
5780
5781 install_preload (gdbarch, regs, dsc, rn);
5782
5783 return 0;
5784 }
5785
5786 static int
5787 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5788 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
5789 {
5790 unsigned int rn = bits (insn1, 0, 3);
5791 unsigned int u_bit = bit (insn1, 7);
5792 int imm12 = bits (insn2, 0, 11);
5793 ULONGEST pc_val;
5794
5795 if (rn != ARM_PC_REGNUM)
5796 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5797
5798 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5799 PLD (literal) Encoding T1. */
5800 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
5801 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5802 imm12);
5803
5804 if (!u_bit)
5805 imm12 = -1 * imm12;
5806
5807 /* Rewrite instruction {pli/pld} PC imm12 into:
5808 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5809
5810 {pli/pld} [r0, r1]
5811
5812 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5813
5814 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5815 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5816
5817 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5818
5819 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5820 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5821 dsc->u.preload.immed = 0;
5822
5823 /* {pli/pld} [r0, r1] */
5824 dsc->modinsn[0] = insn1 & 0xfff0;
5825 dsc->modinsn[1] = 0xf001;
5826 dsc->numinsns = 2;
5827
5828 dsc->cleanup = &cleanup_preload;
5829 return 0;
5830 }
5831
5832 /* Preload instructions with register offset. */
5833
5834 static void
5835 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5836 arm_displaced_step_copy_insn_closure *dsc, unsigned int rn,
5837 unsigned int rm)
5838 {
5839 ULONGEST rn_val, rm_val;
5840
5841 /* Preload register-offset instructions:
5842
5843 {pli/pld} [rn, rm {, shift}]
5844 ->
5845 {pli/pld} [r0, r1 {, shift}]. */
5846
5847 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5848 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5849 rn_val = displaced_read_reg (regs, dsc, rn);
5850 rm_val = displaced_read_reg (regs, dsc, rm);
5851 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5852 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5853 dsc->u.preload.immed = 0;
5854
5855 dsc->cleanup = &cleanup_preload;
5856 }
5857
5858 static int
5859 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5860 struct regcache *regs,
5861 arm_displaced_step_copy_insn_closure *dsc)
5862 {
5863 unsigned int rn = bits (insn, 16, 19);
5864 unsigned int rm = bits (insn, 0, 3);
5865
5866
5867 if (!insn_references_pc (insn, 0x000f000ful))
5868 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5869
5870 displaced_debug_printf ("copying preload insn %.8lx",
5871 (unsigned long) insn);
5872
5873 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5874
5875 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5876 return 0;
5877 }
5878
5879 /* Copy/cleanup coprocessor load and store instructions. */
5880
5881 static void
5882 cleanup_copro_load_store (struct gdbarch *gdbarch,
5883 struct regcache *regs,
5884 arm_displaced_step_copy_insn_closure *dsc)
5885 {
5886 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5887
5888 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5889
5890 if (dsc->u.ldst.writeback)
5891 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5892 }
5893
5894 static void
5895 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5896 arm_displaced_step_copy_insn_closure *dsc,
5897 int writeback, unsigned int rn)
5898 {
5899 ULONGEST rn_val;
5900
5901 /* Coprocessor load/store instructions:
5902
5903 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5904 ->
5905 {stc/stc2} [r0, #+/-imm].
5906
5907 ldc/ldc2 are handled identically. */
5908
5909 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5910 rn_val = displaced_read_reg (regs, dsc, rn);
5911 /* PC should be 4-byte aligned. */
5912 rn_val = rn_val & 0xfffffffc;
5913 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5914
5915 dsc->u.ldst.writeback = writeback;
5916 dsc->u.ldst.rn = rn;
5917
5918 dsc->cleanup = &cleanup_copro_load_store;
5919 }
5920
5921 static int
5922 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5923 struct regcache *regs,
5924 arm_displaced_step_copy_insn_closure *dsc)
5925 {
5926 unsigned int rn = bits (insn, 16, 19);
5927
5928 if (!insn_references_pc (insn, 0x000f0000ul))
5929 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5930
5931 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
5932 (unsigned long) insn);
5933
5934 dsc->modinsn[0] = insn & 0xfff0ffff;
5935
5936 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5937
5938 return 0;
5939 }
5940
5941 static int
5942 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5943 uint16_t insn2, struct regcache *regs,
5944 arm_displaced_step_copy_insn_closure *dsc)
5945 {
5946 unsigned int rn = bits (insn1, 0, 3);
5947
5948 if (rn != ARM_PC_REGNUM)
5949 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5950 "copro load/store", dsc);
5951
5952 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
5953 insn1, insn2);
5954
5955 dsc->modinsn[0] = insn1 & 0xfff0;
5956 dsc->modinsn[1] = insn2;
5957 dsc->numinsns = 2;
5958
5959 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5960 doesn't support writeback, so pass 0. */
5961 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
5962
5963 return 0;
5964 }
5965
5966 /* Clean up branch instructions (actually perform the branch, by setting
5967 PC). */
5968
5969 static void
5970 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
5971 arm_displaced_step_copy_insn_closure *dsc)
5972 {
5973 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5974 int branch_taken = condition_true (dsc->u.branch.cond, status);
5975 enum pc_write_style write_pc = dsc->u.branch.exchange
5976 ? BX_WRITE_PC : BRANCH_WRITE_PC;
5977
5978 if (!branch_taken)
5979 return;
5980
5981 if (dsc->u.branch.link)
5982 {
5983 /* The value of LR should be the next insn of current one. In order
5984 not to confuse logic handling later insn `bx lr', if current insn mode
5985 is Thumb, the bit 0 of LR value should be set to 1. */
5986 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
5987
5988 if (dsc->is_thumb)
5989 next_insn_addr |= 0x1;
5990
5991 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
5992 CANNOT_WRITE_PC);
5993 }
5994
5995 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
5996 }
5997
5998 /* Copy B/BL/BLX instructions with immediate destinations. */
5999
6000 static void
6001 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6002 arm_displaced_step_copy_insn_closure *dsc,
6003 unsigned int cond, int exchange, int link, long offset)
6004 {
6005 /* Implement "BL<cond> <label>" as:
6006
6007 Preparation: cond <- instruction condition
6008 Insn: mov r0, r0 (nop)
6009 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6010
6011 B<cond> similar, but don't set r14 in cleanup. */
6012
6013 dsc->u.branch.cond = cond;
6014 dsc->u.branch.link = link;
6015 dsc->u.branch.exchange = exchange;
6016
6017 dsc->u.branch.dest = dsc->insn_addr;
6018 if (link && exchange)
6019 /* For BLX, offset is computed from the Align (PC, 4). */
6020 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6021
6022 if (dsc->is_thumb)
6023 dsc->u.branch.dest += 4 + offset;
6024 else
6025 dsc->u.branch.dest += 8 + offset;
6026
6027 dsc->cleanup = &cleanup_branch;
6028 }
6029 static int
6030 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6031 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6032 {
6033 unsigned int cond = bits (insn, 28, 31);
6034 int exchange = (cond == 0xf);
6035 int link = exchange || bit (insn, 24);
6036 long offset;
6037
6038 displaced_debug_printf ("copying %s immediate insn %.8lx",
6039 (exchange) ? "blx" : (link) ? "bl" : "b",
6040 (unsigned long) insn);
6041 if (exchange)
6042 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6043 then arrange the switch into Thumb mode. */
6044 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6045 else
6046 offset = bits (insn, 0, 23) << 2;
6047
6048 if (bit (offset, 25))
6049 offset = offset | ~0x3ffffff;
6050
6051 dsc->modinsn[0] = ARM_NOP;
6052
6053 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6054 return 0;
6055 }
6056
6057 static int
6058 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6059 uint16_t insn2, struct regcache *regs,
6060 arm_displaced_step_copy_insn_closure *dsc)
6061 {
6062 int link = bit (insn2, 14);
6063 int exchange = link && !bit (insn2, 12);
6064 int cond = INST_AL;
6065 long offset = 0;
6066 int j1 = bit (insn2, 13);
6067 int j2 = bit (insn2, 11);
6068 int s = sbits (insn1, 10, 10);
6069 int i1 = !(j1 ^ bit (insn1, 10));
6070 int i2 = !(j2 ^ bit (insn1, 10));
6071
6072 if (!link && !exchange) /* B */
6073 {
6074 offset = (bits (insn2, 0, 10) << 1);
6075 if (bit (insn2, 12)) /* Encoding T4 */
6076 {
6077 offset |= (bits (insn1, 0, 9) << 12)
6078 | (i2 << 22)
6079 | (i1 << 23)
6080 | (s << 24);
6081 cond = INST_AL;
6082 }
6083 else /* Encoding T3 */
6084 {
6085 offset |= (bits (insn1, 0, 5) << 12)
6086 | (j1 << 18)
6087 | (j2 << 19)
6088 | (s << 20);
6089 cond = bits (insn1, 6, 9);
6090 }
6091 }
6092 else
6093 {
6094 offset = (bits (insn1, 0, 9) << 12);
6095 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6096 offset |= exchange ?
6097 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6098 }
6099
6100 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
6101 link ? (exchange) ? "blx" : "bl" : "b",
6102 insn1, insn2, offset);
6103
6104 dsc->modinsn[0] = THUMB_NOP;
6105
6106 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6107 return 0;
6108 }
6109
6110 /* Copy B Thumb instructions. */
6111 static int
6112 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
6113 arm_displaced_step_copy_insn_closure *dsc)
6114 {
6115 unsigned int cond = 0;
6116 int offset = 0;
6117 unsigned short bit_12_15 = bits (insn, 12, 15);
6118 CORE_ADDR from = dsc->insn_addr;
6119
6120 if (bit_12_15 == 0xd)
6121 {
6122 /* offset = SignExtend (imm8:0, 32) */
6123 offset = sbits ((insn << 1), 0, 8);
6124 cond = bits (insn, 8, 11);
6125 }
6126 else if (bit_12_15 == 0xe) /* Encoding T2 */
6127 {
6128 offset = sbits ((insn << 1), 0, 11);
6129 cond = INST_AL;
6130 }
6131
6132 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
6133 insn, offset);
6134
6135 dsc->u.branch.cond = cond;
6136 dsc->u.branch.link = 0;
6137 dsc->u.branch.exchange = 0;
6138 dsc->u.branch.dest = from + 4 + offset;
6139
6140 dsc->modinsn[0] = THUMB_NOP;
6141
6142 dsc->cleanup = &cleanup_branch;
6143
6144 return 0;
6145 }
6146
6147 /* Copy BX/BLX with register-specified destinations. */
6148
6149 static void
6150 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6151 arm_displaced_step_copy_insn_closure *dsc, int link,
6152 unsigned int cond, unsigned int rm)
6153 {
6154 /* Implement {BX,BLX}<cond> <reg>" as:
6155
6156 Preparation: cond <- instruction condition
6157 Insn: mov r0, r0 (nop)
6158 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6159
6160 Don't set r14 in cleanup for BX. */
6161
6162 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6163
6164 dsc->u.branch.cond = cond;
6165 dsc->u.branch.link = link;
6166
6167 dsc->u.branch.exchange = 1;
6168
6169 dsc->cleanup = &cleanup_branch;
6170 }
6171
6172 static int
6173 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6174 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6175 {
6176 unsigned int cond = bits (insn, 28, 31);
6177 /* BX: x12xxx1x
6178 BLX: x12xxx3x. */
6179 int link = bit (insn, 5);
6180 unsigned int rm = bits (insn, 0, 3);
6181
6182 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn);
6183
6184 dsc->modinsn[0] = ARM_NOP;
6185
6186 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6187 return 0;
6188 }
6189
6190 static int
6191 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6192 struct regcache *regs,
6193 arm_displaced_step_copy_insn_closure *dsc)
6194 {
6195 int link = bit (insn, 7);
6196 unsigned int rm = bits (insn, 3, 6);
6197
6198 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn);
6199
6200 dsc->modinsn[0] = THUMB_NOP;
6201
6202 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6203
6204 return 0;
6205 }
6206
6207
6208 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6209
6210 static void
6211 cleanup_alu_imm (struct gdbarch *gdbarch,
6212 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6213 {
6214 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6215 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6216 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6217 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6218 }
6219
6220 static int
6221 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6222 arm_displaced_step_copy_insn_closure *dsc)
6223 {
6224 unsigned int rn = bits (insn, 16, 19);
6225 unsigned int rd = bits (insn, 12, 15);
6226 unsigned int op = bits (insn, 21, 24);
6227 int is_mov = (op == 0xd);
6228 ULONGEST rd_val, rn_val;
6229
6230 if (!insn_references_pc (insn, 0x000ff000ul))
6231 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6232
6233 displaced_debug_printf ("copying immediate %s insn %.8lx",
6234 is_mov ? "move" : "ALU",
6235 (unsigned long) insn);
6236
6237 /* Instruction is of form:
6238
6239 <op><cond> rd, [rn,] #imm
6240
6241 Rewrite as:
6242
6243 Preparation: tmp1, tmp2 <- r0, r1;
6244 r0, r1 <- rd, rn
6245 Insn: <op><cond> r0, r1, #imm
6246 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6247 */
6248
6249 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6250 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6251 rn_val = displaced_read_reg (regs, dsc, rn);
6252 rd_val = displaced_read_reg (regs, dsc, rd);
6253 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6254 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6255 dsc->rd = rd;
6256
6257 if (is_mov)
6258 dsc->modinsn[0] = insn & 0xfff00fff;
6259 else
6260 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6261
6262 dsc->cleanup = &cleanup_alu_imm;
6263
6264 return 0;
6265 }
6266
6267 static int
6268 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6269 uint16_t insn2, struct regcache *regs,
6270 arm_displaced_step_copy_insn_closure *dsc)
6271 {
6272 unsigned int op = bits (insn1, 5, 8);
6273 unsigned int rn, rm, rd;
6274 ULONGEST rd_val, rn_val;
6275
6276 rn = bits (insn1, 0, 3); /* Rn */
6277 rm = bits (insn2, 0, 3); /* Rm */
6278 rd = bits (insn2, 8, 11); /* Rd */
6279
6280 /* This routine is only called for instruction MOV. */
6281 gdb_assert (op == 0x2 && rn == 0xf);
6282
6283 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6284 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6285
6286 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1, insn2);
6287
6288 /* Instruction is of form:
6289
6290 <op><cond> rd, [rn,] #imm
6291
6292 Rewrite as:
6293
6294 Preparation: tmp1, tmp2 <- r0, r1;
6295 r0, r1 <- rd, rn
6296 Insn: <op><cond> r0, r1, #imm
6297 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6298 */
6299
6300 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6301 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6302 rn_val = displaced_read_reg (regs, dsc, rn);
6303 rd_val = displaced_read_reg (regs, dsc, rd);
6304 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6305 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6306 dsc->rd = rd;
6307
6308 dsc->modinsn[0] = insn1;
6309 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6310 dsc->numinsns = 2;
6311
6312 dsc->cleanup = &cleanup_alu_imm;
6313
6314 return 0;
6315 }
6316
6317 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6318
6319 static void
6320 cleanup_alu_reg (struct gdbarch *gdbarch,
6321 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6322 {
6323 ULONGEST rd_val;
6324 int i;
6325
6326 rd_val = displaced_read_reg (regs, dsc, 0);
6327
6328 for (i = 0; i < 3; i++)
6329 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6330
6331 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6332 }
6333
6334 static void
6335 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6336 arm_displaced_step_copy_insn_closure *dsc,
6337 unsigned int rd, unsigned int rn, unsigned int rm)
6338 {
6339 ULONGEST rd_val, rn_val, rm_val;
6340
6341 /* Instruction is of form:
6342
6343 <op><cond> rd, [rn,] rm [, <shift>]
6344
6345 Rewrite as:
6346
6347 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6348 r0, r1, r2 <- rd, rn, rm
6349 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6350 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6351 */
6352
6353 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6354 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6355 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6356 rd_val = displaced_read_reg (regs, dsc, rd);
6357 rn_val = displaced_read_reg (regs, dsc, rn);
6358 rm_val = displaced_read_reg (regs, dsc, rm);
6359 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6360 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6361 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6362 dsc->rd = rd;
6363
6364 dsc->cleanup = &cleanup_alu_reg;
6365 }
6366
6367 static int
6368 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6369 arm_displaced_step_copy_insn_closure *dsc)
6370 {
6371 unsigned int op = bits (insn, 21, 24);
6372 int is_mov = (op == 0xd);
6373
6374 if (!insn_references_pc (insn, 0x000ff00ful))
6375 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6376
6377 displaced_debug_printf ("copying reg %s insn %.8lx",
6378 is_mov ? "move" : "ALU", (unsigned long) insn);
6379
6380 if (is_mov)
6381 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6382 else
6383 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6384
6385 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6386 bits (insn, 0, 3));
6387 return 0;
6388 }
6389
6390 static int
6391 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6392 struct regcache *regs,
6393 arm_displaced_step_copy_insn_closure *dsc)
6394 {
6395 unsigned rm, rd;
6396
6397 rm = bits (insn, 3, 6);
6398 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6399
6400 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6401 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6402
6403 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn);
6404
6405 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6406
6407 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6408
6409 return 0;
6410 }
6411
6412 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6413
6414 static void
6415 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6416 struct regcache *regs,
6417 arm_displaced_step_copy_insn_closure *dsc)
6418 {
6419 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6420 int i;
6421
6422 for (i = 0; i < 4; i++)
6423 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6424
6425 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6426 }
6427
6428 static void
6429 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6430 arm_displaced_step_copy_insn_closure *dsc,
6431 unsigned int rd, unsigned int rn, unsigned int rm,
6432 unsigned rs)
6433 {
6434 int i;
6435 ULONGEST rd_val, rn_val, rm_val, rs_val;
6436
6437 /* Instruction is of form:
6438
6439 <op><cond> rd, [rn,] rm, <shift> rs
6440
6441 Rewrite as:
6442
6443 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6444 r0, r1, r2, r3 <- rd, rn, rm, rs
6445 Insn: <op><cond> r0, r1, r2, <shift> r3
6446 Cleanup: tmp5 <- r0
6447 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6448 rd <- tmp5
6449 */
6450
6451 for (i = 0; i < 4; i++)
6452 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6453
6454 rd_val = displaced_read_reg (regs, dsc, rd);
6455 rn_val = displaced_read_reg (regs, dsc, rn);
6456 rm_val = displaced_read_reg (regs, dsc, rm);
6457 rs_val = displaced_read_reg (regs, dsc, rs);
6458 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6459 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6460 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6461 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6462 dsc->rd = rd;
6463 dsc->cleanup = &cleanup_alu_shifted_reg;
6464 }
6465
6466 static int
6467 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6468 struct regcache *regs,
6469 arm_displaced_step_copy_insn_closure *dsc)
6470 {
6471 unsigned int op = bits (insn, 21, 24);
6472 int is_mov = (op == 0xd);
6473 unsigned int rd, rn, rm, rs;
6474
6475 if (!insn_references_pc (insn, 0x000fff0ful))
6476 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6477
6478 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
6479 is_mov ? "move" : "ALU",
6480 (unsigned long) insn);
6481
6482 rn = bits (insn, 16, 19);
6483 rm = bits (insn, 0, 3);
6484 rs = bits (insn, 8, 11);
6485 rd = bits (insn, 12, 15);
6486
6487 if (is_mov)
6488 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6489 else
6490 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6491
6492 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6493
6494 return 0;
6495 }
6496
6497 /* Clean up load instructions. */
6498
6499 static void
6500 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6501 arm_displaced_step_copy_insn_closure *dsc)
6502 {
6503 ULONGEST rt_val, rt_val2 = 0, rn_val;
6504
6505 rt_val = displaced_read_reg (regs, dsc, 0);
6506 if (dsc->u.ldst.xfersize == 8)
6507 rt_val2 = displaced_read_reg (regs, dsc, 1);
6508 rn_val = displaced_read_reg (regs, dsc, 2);
6509
6510 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6511 if (dsc->u.ldst.xfersize > 4)
6512 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6514 if (!dsc->u.ldst.immed)
6515 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6516
6517 /* Handle register writeback. */
6518 if (dsc->u.ldst.writeback)
6519 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6520 /* Put result in right place. */
6521 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6522 if (dsc->u.ldst.xfersize == 8)
6523 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6524 }
6525
6526 /* Clean up store instructions. */
6527
6528 static void
6529 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6530 arm_displaced_step_copy_insn_closure *dsc)
6531 {
6532 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6533
6534 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6535 if (dsc->u.ldst.xfersize > 4)
6536 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6537 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6538 if (!dsc->u.ldst.immed)
6539 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6540 if (!dsc->u.ldst.restore_r4)
6541 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6542
6543 /* Writeback. */
6544 if (dsc->u.ldst.writeback)
6545 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6546 }
6547
6548 /* Copy "extra" load/store instructions. These are halfword/doubleword
6549 transfers, which have a different encoding to byte/word transfers. */
6550
6551 static int
6552 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
6553 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
6554 {
6555 unsigned int op1 = bits (insn, 20, 24);
6556 unsigned int op2 = bits (insn, 5, 6);
6557 unsigned int rt = bits (insn, 12, 15);
6558 unsigned int rn = bits (insn, 16, 19);
6559 unsigned int rm = bits (insn, 0, 3);
6560 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6561 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6562 int immed = (op1 & 0x4) != 0;
6563 int opcode;
6564 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6565
6566 if (!insn_references_pc (insn, 0x000ff00ful))
6567 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6568
6569 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
6570 unprivileged ? "unprivileged " : "",
6571 (unsigned long) insn);
6572
6573 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6574
6575 if (opcode < 0)
6576 internal_error (_("copy_extra_ld_st: instruction decode error"));
6577
6578 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6579 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6580 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6581 if (!immed)
6582 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6583
6584 rt_val = displaced_read_reg (regs, dsc, rt);
6585 if (bytesize[opcode] == 8)
6586 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6587 rn_val = displaced_read_reg (regs, dsc, rn);
6588 if (!immed)
6589 rm_val = displaced_read_reg (regs, dsc, rm);
6590
6591 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6592 if (bytesize[opcode] == 8)
6593 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6594 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6595 if (!immed)
6596 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6597
6598 dsc->rd = rt;
6599 dsc->u.ldst.xfersize = bytesize[opcode];
6600 dsc->u.ldst.rn = rn;
6601 dsc->u.ldst.immed = immed;
6602 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6603 dsc->u.ldst.restore_r4 = 0;
6604
6605 if (immed)
6606 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6607 ->
6608 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6609 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6610 else
6611 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6612 ->
6613 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6614 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6615
6616 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6617
6618 return 0;
6619 }
6620
6621 /* Copy byte/half word/word loads and stores. */
6622
6623 static void
6624 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6625 arm_displaced_step_copy_insn_closure *dsc, int load,
6626 int immed, int writeback, int size, int usermode,
6627 int rt, int rm, int rn)
6628 {
6629 ULONGEST rt_val, rn_val, rm_val = 0;
6630
6631 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6632 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6633 if (!immed)
6634 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6635 if (!load)
6636 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6637
6638 rt_val = displaced_read_reg (regs, dsc, rt);
6639 rn_val = displaced_read_reg (regs, dsc, rn);
6640 if (!immed)
6641 rm_val = displaced_read_reg (regs, dsc, rm);
6642
6643 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6644 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6645 if (!immed)
6646 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6647 dsc->rd = rt;
6648 dsc->u.ldst.xfersize = size;
6649 dsc->u.ldst.rn = rn;
6650 dsc->u.ldst.immed = immed;
6651 dsc->u.ldst.writeback = writeback;
6652
6653 /* To write PC we can do:
6654
6655 Before this sequence of instructions:
6656 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6657 r2 is the Rn value got from displaced_read_reg.
6658
6659 Insn1: push {pc} Write address of STR instruction + offset on stack
6660 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6661 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6662 = addr(Insn1) + offset - addr(Insn3) - 8
6663 = offset - 16
6664 Insn4: add r4, r4, #8 r4 = offset - 8
6665 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6666 = from + offset
6667 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6668
6669 Otherwise we don't know what value to write for PC, since the offset is
6670 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6671 of this can be found in Section "Saving from r15" in
6672 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6673
6674 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6675 }
6676
6677
6678 static int
6679 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6680 uint16_t insn2, struct regcache *regs,
6681 arm_displaced_step_copy_insn_closure *dsc, int size)
6682 {
6683 unsigned int u_bit = bit (insn1, 7);
6684 unsigned int rt = bits (insn2, 12, 15);
6685 int imm12 = bits (insn2, 0, 11);
6686 ULONGEST pc_val;
6687
6688 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
6689 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6690 imm12);
6691
6692 if (!u_bit)
6693 imm12 = -1 * imm12;
6694
6695 /* Rewrite instruction LDR Rt imm12 into:
6696
6697 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6698
6699 LDR R0, R2, R3,
6700
6701 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6702
6703
6704 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6705 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6706 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6707
6708 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6709
6710 pc_val = pc_val & 0xfffffffc;
6711
6712 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6713 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6714
6715 dsc->rd = rt;
6716
6717 dsc->u.ldst.xfersize = size;
6718 dsc->u.ldst.immed = 0;
6719 dsc->u.ldst.writeback = 0;
6720 dsc->u.ldst.restore_r4 = 0;
6721
6722 /* LDR R0, R2, R3 */
6723 dsc->modinsn[0] = 0xf852;
6724 dsc->modinsn[1] = 0x3;
6725 dsc->numinsns = 2;
6726
6727 dsc->cleanup = &cleanup_load;
6728
6729 return 0;
6730 }
6731
6732 static int
6733 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6734 uint16_t insn2, struct regcache *regs,
6735 arm_displaced_step_copy_insn_closure *dsc,
6736 int writeback, int immed)
6737 {
6738 unsigned int rt = bits (insn2, 12, 15);
6739 unsigned int rn = bits (insn1, 0, 3);
6740 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6741 /* In LDR (register), there is also a register Rm, which is not allowed to
6742 be PC, so we don't have to check it. */
6743
6744 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6745 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6746 dsc);
6747
6748 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
6749 rt, rn, insn1, insn2);
6750
6751 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6752 0, rt, rm, rn);
6753
6754 dsc->u.ldst.restore_r4 = 0;
6755
6756 if (immed)
6757 /* ldr[b]<cond> rt, [rn, #imm], etc.
6758 ->
6759 ldr[b]<cond> r0, [r2, #imm]. */
6760 {
6761 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6762 dsc->modinsn[1] = insn2 & 0x0fff;
6763 }
6764 else
6765 /* ldr[b]<cond> rt, [rn, rm], etc.
6766 ->
6767 ldr[b]<cond> r0, [r2, r3]. */
6768 {
6769 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6770 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6771 }
6772
6773 dsc->numinsns = 2;
6774
6775 return 0;
6776 }
6777
6778
6779 static int
6780 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6781 struct regcache *regs,
6782 arm_displaced_step_copy_insn_closure *dsc,
6783 int load, int size, int usermode)
6784 {
6785 int immed = !bit (insn, 25);
6786 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6787 unsigned int rt = bits (insn, 12, 15);
6788 unsigned int rn = bits (insn, 16, 19);
6789 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6790
6791 if (!insn_references_pc (insn, 0x000ff00ful))
6792 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6793
6794 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
6795 load ? (size == 1 ? "ldrb" : "ldr")
6796 : (size == 1 ? "strb" : "str"),
6797 usermode ? "t" : "",
6798 rt, rn,
6799 (unsigned long) insn);
6800
6801 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6802 usermode, rt, rm, rn);
6803
6804 if (load || rt != ARM_PC_REGNUM)
6805 {
6806 dsc->u.ldst.restore_r4 = 0;
6807
6808 if (immed)
6809 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6810 ->
6811 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6812 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6813 else
6814 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6815 ->
6816 {ldr,str}[b]<cond> r0, [r2, r3]. */
6817 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6818 }
6819 else
6820 {
6821 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6822 dsc->u.ldst.restore_r4 = 1;
6823 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6824 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6825 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6826 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6827 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6828
6829 /* As above. */
6830 if (immed)
6831 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6832 else
6833 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6834
6835 dsc->numinsns = 6;
6836 }
6837
6838 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6839
6840 return 0;
6841 }
6842
6843 /* Cleanup LDM instructions with fully-populated register list. This is an
6844 unfortunate corner case: it's impossible to implement correctly by modifying
6845 the instruction. The issue is as follows: we have an instruction,
6846
6847 ldm rN, {r0-r15}
6848
6849 which we must rewrite to avoid loading PC. A possible solution would be to
6850 do the load in two halves, something like (with suitable cleanup
6851 afterwards):
6852
6853 mov r8, rN
6854 ldm[id][ab] r8!, {r0-r7}
6855 str r7, <temp>
6856 ldm[id][ab] r8, {r7-r14}
6857 <bkpt>
6858
6859 but at present there's no suitable place for <temp>, since the scratch space
6860 is overwritten before the cleanup routine is called. For now, we simply
6861 emulate the instruction. */
6862
6863 static void
6864 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6865 arm_displaced_step_copy_insn_closure *dsc)
6866 {
6867 int inc = dsc->u.block.increment;
6868 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6869 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6870 uint32_t regmask = dsc->u.block.regmask;
6871 int regno = inc ? 0 : 15;
6872 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6873 int exception_return = dsc->u.block.load && dsc->u.block.user
6874 && (regmask & 0x8000) != 0;
6875 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6876 int do_transfer = condition_true (dsc->u.block.cond, status);
6877 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6878
6879 if (!do_transfer)
6880 return;
6881
6882 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6883 sensible we can do here. Complain loudly. */
6884 if (exception_return)
6885 error (_("Cannot single-step exception return"));
6886
6887 /* We don't handle any stores here for now. */
6888 gdb_assert (dsc->u.block.load != 0);
6889
6890 displaced_debug_printf ("emulating block transfer: %s %s %s",
6891 dsc->u.block.load ? "ldm" : "stm",
6892 dsc->u.block.increment ? "inc" : "dec",
6893 dsc->u.block.before ? "before" : "after");
6894
6895 while (regmask)
6896 {
6897 uint32_t memword;
6898
6899 if (inc)
6900 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6901 regno++;
6902 else
6903 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6904 regno--;
6905
6906 xfer_addr += bump_before;
6907
6908 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6909 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6910
6911 xfer_addr += bump_after;
6912
6913 regmask &= ~(1 << regno);
6914 }
6915
6916 if (dsc->u.block.writeback)
6917 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6918 CANNOT_WRITE_PC);
6919 }
6920
6921 /* Clean up an STM which included the PC in the register list. */
6922
6923 static void
6924 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6925 arm_displaced_step_copy_insn_closure *dsc)
6926 {
6927 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6928 int store_executed = condition_true (dsc->u.block.cond, status);
6929 CORE_ADDR pc_stored_at, transferred_regs
6930 = count_one_bits (dsc->u.block.regmask);
6931 CORE_ADDR stm_insn_addr;
6932 uint32_t pc_val;
6933 long offset;
6934 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6935
6936 /* If condition code fails, there's nothing else to do. */
6937 if (!store_executed)
6938 return;
6939
6940 if (dsc->u.block.increment)
6941 {
6942 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
6943
6944 if (dsc->u.block.before)
6945 pc_stored_at += 4;
6946 }
6947 else
6948 {
6949 pc_stored_at = dsc->u.block.xfer_addr;
6950
6951 if (dsc->u.block.before)
6952 pc_stored_at -= 4;
6953 }
6954
6955 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
6956 stm_insn_addr = dsc->scratch_base;
6957 offset = pc_val - stm_insn_addr;
6958
6959 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
6960 offset);
6961
6962 /* Rewrite the stored PC to the proper value for the non-displaced original
6963 instruction. */
6964 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
6965 dsc->insn_addr + offset);
6966 }
6967
6968 /* Clean up an LDM which includes the PC in the register list. We clumped all
6969 the registers in the transferred list into a contiguous range r0...rX (to
6970 avoid loading PC directly and losing control of the debugged program), so we
6971 must undo that here. */
6972
6973 static void
6974 cleanup_block_load_pc (struct gdbarch *gdbarch,
6975 struct regcache *regs,
6976 arm_displaced_step_copy_insn_closure *dsc)
6977 {
6978 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6979 int load_executed = condition_true (dsc->u.block.cond, status);
6980 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
6981 unsigned int regs_loaded = count_one_bits (mask);
6982 unsigned int num_to_shuffle = regs_loaded, clobbered;
6983
6984 /* The method employed here will fail if the register list is fully populated
6985 (we need to avoid loading PC directly). */
6986 gdb_assert (num_to_shuffle < 16);
6987
6988 if (!load_executed)
6989 return;
6990
6991 clobbered = (1 << num_to_shuffle) - 1;
6992
6993 while (num_to_shuffle > 0)
6994 {
6995 if ((mask & (1 << write_reg)) != 0)
6996 {
6997 unsigned int read_reg = num_to_shuffle - 1;
6998
6999 if (read_reg != write_reg)
7000 {
7001 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7002 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7003 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
7004 read_reg, write_reg);
7005 }
7006 else
7007 displaced_debug_printf ("LDM: register r%d already in the right "
7008 "place", write_reg);
7009
7010 clobbered &= ~(1 << write_reg);
7011
7012 num_to_shuffle--;
7013 }
7014
7015 write_reg--;
7016 }
7017
7018 /* Restore any registers we scribbled over. */
7019 for (write_reg = 0; clobbered != 0; write_reg++)
7020 {
7021 if ((clobbered & (1 << write_reg)) != 0)
7022 {
7023 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7024 CANNOT_WRITE_PC);
7025 displaced_debug_printf ("LDM: restored clobbered register r%d",
7026 write_reg);
7027 clobbered &= ~(1 << write_reg);
7028 }
7029 }
7030
7031 /* Perform register writeback manually. */
7032 if (dsc->u.block.writeback)
7033 {
7034 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7035
7036 if (dsc->u.block.increment)
7037 new_rn_val += regs_loaded * 4;
7038 else
7039 new_rn_val -= regs_loaded * 4;
7040
7041 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7042 CANNOT_WRITE_PC);
7043 }
7044 }
7045
7046 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7047 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7048
7049 static int
7050 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7051 struct regcache *regs,
7052 arm_displaced_step_copy_insn_closure *dsc)
7053 {
7054 int load = bit (insn, 20);
7055 int user = bit (insn, 22);
7056 int increment = bit (insn, 23);
7057 int before = bit (insn, 24);
7058 int writeback = bit (insn, 21);
7059 int rn = bits (insn, 16, 19);
7060
7061 /* Block transfers which don't mention PC can be run directly
7062 out-of-line. */
7063 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7064 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7065
7066 if (rn == ARM_PC_REGNUM)
7067 {
7068 warning (_("displaced: Unpredictable LDM or STM with "
7069 "base register r15"));
7070 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7071 }
7072
7073 displaced_debug_printf ("copying block transfer insn %.8lx",
7074 (unsigned long) insn);
7075
7076 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7077 dsc->u.block.rn = rn;
7078
7079 dsc->u.block.load = load;
7080 dsc->u.block.user = user;
7081 dsc->u.block.increment = increment;
7082 dsc->u.block.before = before;
7083 dsc->u.block.writeback = writeback;
7084 dsc->u.block.cond = bits (insn, 28, 31);
7085
7086 dsc->u.block.regmask = insn & 0xffff;
7087
7088 if (load)
7089 {
7090 if ((insn & 0xffff) == 0xffff)
7091 {
7092 /* LDM with a fully-populated register list. This case is
7093 particularly tricky. Implement for now by fully emulating the
7094 instruction (which might not behave perfectly in all cases, but
7095 these instructions should be rare enough for that not to matter
7096 too much). */
7097 dsc->modinsn[0] = ARM_NOP;
7098
7099 dsc->cleanup = &cleanup_block_load_all;
7100 }
7101 else
7102 {
7103 /* LDM of a list of registers which includes PC. Implement by
7104 rewriting the list of registers to be transferred into a
7105 contiguous chunk r0...rX before doing the transfer, then shuffling
7106 registers into the correct places in the cleanup routine. */
7107 unsigned int regmask = insn & 0xffff;
7108 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7109 unsigned int i;
7110
7111 for (i = 0; i < num_in_list; i++)
7112 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7113
7114 /* Writeback makes things complicated. We need to avoid clobbering
7115 the base register with one of the registers in our modified
7116 register list, but just using a different register can't work in
7117 all cases, e.g.:
7118
7119 ldm r14!, {r0-r13,pc}
7120
7121 which would need to be rewritten as:
7122
7123 ldm rN!, {r0-r14}
7124
7125 but that can't work, because there's no free register for N.
7126
7127 Solve this by turning off the writeback bit, and emulating
7128 writeback manually in the cleanup routine. */
7129
7130 if (writeback)
7131 insn &= ~(1 << 21);
7132
7133 new_regmask = (1 << num_in_list) - 1;
7134
7135 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7136 "%.4x, modified list %.4x",
7137 rn, writeback ? "!" : "",
7138 (int) insn & 0xffff, new_regmask);
7139
7140 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7141
7142 dsc->cleanup = &cleanup_block_load_pc;
7143 }
7144 }
7145 else
7146 {
7147 /* STM of a list of registers which includes PC. Run the instruction
7148 as-is, but out of line: this will store the wrong value for the PC,
7149 so we must manually fix up the memory in the cleanup routine.
7150 Doing things this way has the advantage that we can auto-detect
7151 the offset of the PC write (which is architecture-dependent) in
7152 the cleanup routine. */
7153 dsc->modinsn[0] = insn;
7154
7155 dsc->cleanup = &cleanup_block_store_pc;
7156 }
7157
7158 return 0;
7159 }
7160
7161 static int
7162 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7163 struct regcache *regs,
7164 arm_displaced_step_copy_insn_closure *dsc)
7165 {
7166 int rn = bits (insn1, 0, 3);
7167 int load = bit (insn1, 4);
7168 int writeback = bit (insn1, 5);
7169
7170 /* Block transfers which don't mention PC can be run directly
7171 out-of-line. */
7172 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7173 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7174
7175 if (rn == ARM_PC_REGNUM)
7176 {
7177 warning (_("displaced: Unpredictable LDM or STM with "
7178 "base register r15"));
7179 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7180 "unpredictable ldm/stm", dsc);
7181 }
7182
7183 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
7184 insn1, insn2);
7185
7186 /* Clear bit 13, since it should be always zero. */
7187 dsc->u.block.regmask = (insn2 & 0xdfff);
7188 dsc->u.block.rn = rn;
7189
7190 dsc->u.block.load = load;
7191 dsc->u.block.user = 0;
7192 dsc->u.block.increment = bit (insn1, 7);
7193 dsc->u.block.before = bit (insn1, 8);
7194 dsc->u.block.writeback = writeback;
7195 dsc->u.block.cond = INST_AL;
7196 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7197
7198 if (load)
7199 {
7200 if (dsc->u.block.regmask == 0xffff)
7201 {
7202 /* This branch is impossible to happen. */
7203 gdb_assert (0);
7204 }
7205 else
7206 {
7207 unsigned int regmask = dsc->u.block.regmask;
7208 unsigned int num_in_list = count_one_bits (regmask), new_regmask;
7209 unsigned int i;
7210
7211 for (i = 0; i < num_in_list; i++)
7212 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7213
7214 if (writeback)
7215 insn1 &= ~(1 << 5);
7216
7217 new_regmask = (1 << num_in_list) - 1;
7218
7219 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
7220 "%.4x, modified list %.4x",
7221 rn, writeback ? "!" : "",
7222 (int) dsc->u.block.regmask, new_regmask);
7223
7224 dsc->modinsn[0] = insn1;
7225 dsc->modinsn[1] = (new_regmask & 0xffff);
7226 dsc->numinsns = 2;
7227
7228 dsc->cleanup = &cleanup_block_load_pc;
7229 }
7230 }
7231 else
7232 {
7233 dsc->modinsn[0] = insn1;
7234 dsc->modinsn[1] = insn2;
7235 dsc->numinsns = 2;
7236 dsc->cleanup = &cleanup_block_store_pc;
7237 }
7238 return 0;
7239 }
7240
7241 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
7242 This is used to avoid a dependency on BFD's bfd_endian enum. */
7243
7244 ULONGEST
7245 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
7246 int byte_order)
7247 {
7248 return read_memory_unsigned_integer (memaddr, len,
7249 (enum bfd_endian) byte_order);
7250 }
7251
7252 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
7253
7254 CORE_ADDR
7255 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
7256 CORE_ADDR val)
7257 {
7258 return gdbarch_addr_bits_remove (self->regcache->arch (), val);
7259 }
7260
7261 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
7262
7263 static CORE_ADDR
7264 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
7265 {
7266 return 0;
7267 }
7268
7269 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
7270
7271 int
7272 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
7273 {
7274 return arm_is_thumb (self->regcache);
7275 }
7276
7277 /* single_step() is called just before we want to resume the inferior,
7278 if we want to single-step it but there is no hardware or kernel
7279 single-step support. We find the target of the coming instructions
7280 and breakpoint them. */
7281
7282 std::vector<CORE_ADDR>
7283 arm_software_single_step (struct regcache *regcache)
7284 {
7285 struct gdbarch *gdbarch = regcache->arch ();
7286 struct arm_get_next_pcs next_pcs_ctx;
7287
7288 arm_get_next_pcs_ctor (&next_pcs_ctx,
7289 &arm_get_next_pcs_ops,
7290 gdbarch_byte_order (gdbarch),
7291 gdbarch_byte_order_for_code (gdbarch),
7292 0,
7293 regcache);
7294
7295 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
7296
7297 for (CORE_ADDR &pc_ref : next_pcs)
7298 pc_ref = gdbarch_addr_bits_remove (gdbarch, pc_ref);
7299
7300 return next_pcs;
7301 }
7302
7303 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7304 for Linux, where some SVC instructions must be treated specially. */
7305
7306 static void
7307 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7308 arm_displaced_step_copy_insn_closure *dsc)
7309 {
7310 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7311
7312 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
7313 (unsigned long) resume_addr);
7314
7315 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7316 }
7317
7318
7319 /* Common copy routine for svc instruction. */
7320
7321 static int
7322 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7323 arm_displaced_step_copy_insn_closure *dsc)
7324 {
7325 /* Preparation: none.
7326 Insn: unmodified svc.
7327 Cleanup: pc <- insn_addr + insn_size. */
7328
7329 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7330 instruction. */
7331 dsc->wrote_to_pc = 1;
7332
7333 /* Allow OS-specific code to override SVC handling. */
7334 if (dsc->u.svc.copy_svc_os)
7335 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7336 else
7337 {
7338 dsc->cleanup = &cleanup_svc;
7339 return 0;
7340 }
7341 }
7342
7343 static int
7344 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7345 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7346 {
7347
7348 displaced_debug_printf ("copying svc insn %.8lx",
7349 (unsigned long) insn);
7350
7351 dsc->modinsn[0] = insn;
7352
7353 return install_svc (gdbarch, regs, dsc);
7354 }
7355
7356 static int
7357 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7358 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7359 {
7360
7361 displaced_debug_printf ("copying svc insn %.4x", insn);
7362
7363 dsc->modinsn[0] = insn;
7364
7365 return install_svc (gdbarch, regs, dsc);
7366 }
7367
7368 /* Copy undefined instructions. */
7369
7370 static int
7371 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7372 arm_displaced_step_copy_insn_closure *dsc)
7373 {
7374 displaced_debug_printf ("copying undefined insn %.8lx",
7375 (unsigned long) insn);
7376
7377 dsc->modinsn[0] = insn;
7378
7379 return 0;
7380 }
7381
7382 static int
7383 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7384 arm_displaced_step_copy_insn_closure *dsc)
7385 {
7386
7387 displaced_debug_printf ("copying undefined insn %.4x %.4x",
7388 (unsigned short) insn1, (unsigned short) insn2);
7389
7390 dsc->modinsn[0] = insn1;
7391 dsc->modinsn[1] = insn2;
7392 dsc->numinsns = 2;
7393
7394 return 0;
7395 }
7396
7397 /* Copy unpredictable instructions. */
7398
7399 static int
7400 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7401 arm_displaced_step_copy_insn_closure *dsc)
7402 {
7403 displaced_debug_printf ("copying unpredictable insn %.8lx",
7404 (unsigned long) insn);
7405
7406 dsc->modinsn[0] = insn;
7407
7408 return 0;
7409 }
7410
7411 /* The decode_* functions are instruction decoding helpers. They mostly follow
7412 the presentation in the ARM ARM. */
7413
7414 static int
7415 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7416 struct regcache *regs,
7417 arm_displaced_step_copy_insn_closure *dsc)
7418 {
7419 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7420 unsigned int rn = bits (insn, 16, 19);
7421
7422 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0x1) == 0x0)
7423 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7424 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0x1) == 0x1)
7425 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7426 else if ((op1 & 0x60) == 0x20)
7427 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7428 else if ((op1 & 0x71) == 0x40)
7429 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7430 dsc);
7431 else if ((op1 & 0x77) == 0x41)
7432 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7433 else if ((op1 & 0x77) == 0x45)
7434 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7435 else if ((op1 & 0x77) == 0x51)
7436 {
7437 if (rn != 0xf)
7438 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7439 else
7440 return arm_copy_unpred (gdbarch, insn, dsc);
7441 }
7442 else if ((op1 & 0x77) == 0x55)
7443 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7444 else if (op1 == 0x57)
7445 switch (op2)
7446 {
7447 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7448 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7449 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7450 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7451 default: return arm_copy_unpred (gdbarch, insn, dsc);
7452 }
7453 else if ((op1 & 0x63) == 0x43)
7454 return arm_copy_unpred (gdbarch, insn, dsc);
7455 else if ((op2 & 0x1) == 0x0)
7456 switch (op1 & ~0x80)
7457 {
7458 case 0x61:
7459 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7460 case 0x65:
7461 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7462 case 0x71: case 0x75:
7463 /* pld/pldw reg. */
7464 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7465 case 0x63: case 0x67: case 0x73: case 0x77:
7466 return arm_copy_unpred (gdbarch, insn, dsc);
7467 default:
7468 return arm_copy_undef (gdbarch, insn, dsc);
7469 }
7470 else
7471 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7472 }
7473
7474 static int
7475 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7476 struct regcache *regs,
7477 arm_displaced_step_copy_insn_closure *dsc)
7478 {
7479 if (bit (insn, 27) == 0)
7480 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7481 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7482 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7483 {
7484 case 0x0: case 0x2:
7485 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7486
7487 case 0x1: case 0x3:
7488 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7489
7490 case 0x4: case 0x5: case 0x6: case 0x7:
7491 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7492
7493 case 0x8:
7494 switch ((insn & 0xe00000) >> 21)
7495 {
7496 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7497 /* stc/stc2. */
7498 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7499
7500 case 0x2:
7501 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7502
7503 default:
7504 return arm_copy_undef (gdbarch, insn, dsc);
7505 }
7506
7507 case 0x9:
7508 {
7509 int rn_f = (bits (insn, 16, 19) == 0xf);
7510 switch ((insn & 0xe00000) >> 21)
7511 {
7512 case 0x1: case 0x3:
7513 /* ldc/ldc2 imm (undefined for rn == pc). */
7514 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7515 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7516
7517 case 0x2:
7518 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7519
7520 case 0x4: case 0x5: case 0x6: case 0x7:
7521 /* ldc/ldc2 lit (undefined for rn != pc). */
7522 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7523 : arm_copy_undef (gdbarch, insn, dsc);
7524
7525 default:
7526 return arm_copy_undef (gdbarch, insn, dsc);
7527 }
7528 }
7529
7530 case 0xa:
7531 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7532
7533 case 0xb:
7534 if (bits (insn, 16, 19) == 0xf)
7535 /* ldc/ldc2 lit. */
7536 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7537 else
7538 return arm_copy_undef (gdbarch, insn, dsc);
7539
7540 case 0xc:
7541 if (bit (insn, 4))
7542 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7543 else
7544 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7545
7546 case 0xd:
7547 if (bit (insn, 4))
7548 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7549 else
7550 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7551
7552 default:
7553 return arm_copy_undef (gdbarch, insn, dsc);
7554 }
7555 }
7556
7557 /* Decode miscellaneous instructions in dp/misc encoding space. */
7558
7559 static int
7560 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7561 struct regcache *regs,
7562 arm_displaced_step_copy_insn_closure *dsc)
7563 {
7564 unsigned int op2 = bits (insn, 4, 6);
7565 unsigned int op = bits (insn, 21, 22);
7566
7567 switch (op2)
7568 {
7569 case 0x0:
7570 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7571
7572 case 0x1:
7573 if (op == 0x1) /* bx. */
7574 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7575 else if (op == 0x3)
7576 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7577 else
7578 return arm_copy_undef (gdbarch, insn, dsc);
7579
7580 case 0x2:
7581 if (op == 0x1)
7582 /* Not really supported. */
7583 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7584 else
7585 return arm_copy_undef (gdbarch, insn, dsc);
7586
7587 case 0x3:
7588 if (op == 0x1)
7589 return arm_copy_bx_blx_reg (gdbarch, insn,
7590 regs, dsc); /* blx register. */
7591 else
7592 return arm_copy_undef (gdbarch, insn, dsc);
7593
7594 case 0x5:
7595 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7596
7597 case 0x7:
7598 if (op == 0x1)
7599 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7600 else if (op == 0x3)
7601 /* Not really supported. */
7602 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7603 /* Fall through. */
7604
7605 default:
7606 return arm_copy_undef (gdbarch, insn, dsc);
7607 }
7608 }
7609
7610 static int
7611 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7612 struct regcache *regs,
7613 arm_displaced_step_copy_insn_closure *dsc)
7614 {
7615 if (bit (insn, 25))
7616 switch (bits (insn, 20, 24))
7617 {
7618 case 0x10:
7619 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7620
7621 case 0x14:
7622 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7623
7624 case 0x12: case 0x16:
7625 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7626
7627 default:
7628 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7629 }
7630 else
7631 {
7632 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7633
7634 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7635 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7636 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7637 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7638 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7639 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7640 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7641 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7642 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7643 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7644 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7645 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7646 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7647 /* 2nd arg means "unprivileged". */
7648 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7649 dsc);
7650 }
7651
7652 /* Should be unreachable. */
7653 return 1;
7654 }
7655
7656 static int
7657 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7658 struct regcache *regs,
7659 arm_displaced_step_copy_insn_closure *dsc)
7660 {
7661 int a = bit (insn, 25), b = bit (insn, 4);
7662 uint32_t op1 = bits (insn, 20, 24);
7663
7664 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7665 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7666 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7667 else if ((!a && (op1 & 0x17) == 0x02)
7668 || (a && (op1 & 0x17) == 0x02 && !b))
7669 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7670 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7671 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7672 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7673 else if ((!a && (op1 & 0x17) == 0x03)
7674 || (a && (op1 & 0x17) == 0x03 && !b))
7675 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7676 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7677 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7678 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7679 else if ((!a && (op1 & 0x17) == 0x06)
7680 || (a && (op1 & 0x17) == 0x06 && !b))
7681 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7682 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7683 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7684 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7685 else if ((!a && (op1 & 0x17) == 0x07)
7686 || (a && (op1 & 0x17) == 0x07 && !b))
7687 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7688
7689 /* Should be unreachable. */
7690 return 1;
7691 }
7692
7693 static int
7694 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7695 arm_displaced_step_copy_insn_closure *dsc)
7696 {
7697 switch (bits (insn, 20, 24))
7698 {
7699 case 0x00: case 0x01: case 0x02: case 0x03:
7700 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7701
7702 case 0x04: case 0x05: case 0x06: case 0x07:
7703 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7704
7705 case 0x08: case 0x09: case 0x0a: case 0x0b:
7706 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7707 return arm_copy_unmodified (gdbarch, insn,
7708 "decode/pack/unpack/saturate/reverse", dsc);
7709
7710 case 0x18:
7711 if (bits (insn, 5, 7) == 0) /* op2. */
7712 {
7713 if (bits (insn, 12, 15) == 0xf)
7714 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7715 else
7716 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7717 }
7718 else
7719 return arm_copy_undef (gdbarch, insn, dsc);
7720
7721 case 0x1a: case 0x1b:
7722 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7723 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7724 else
7725 return arm_copy_undef (gdbarch, insn, dsc);
7726
7727 case 0x1c: case 0x1d:
7728 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7729 {
7730 if (bits (insn, 0, 3) == 0xf)
7731 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7732 else
7733 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7734 }
7735 else
7736 return arm_copy_undef (gdbarch, insn, dsc);
7737
7738 case 0x1e: case 0x1f:
7739 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7740 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7741 else
7742 return arm_copy_undef (gdbarch, insn, dsc);
7743 }
7744
7745 /* Should be unreachable. */
7746 return 1;
7747 }
7748
7749 static int
7750 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
7751 struct regcache *regs,
7752 arm_displaced_step_copy_insn_closure *dsc)
7753 {
7754 if (bit (insn, 25))
7755 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7756 else
7757 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7758 }
7759
7760 static int
7761 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7762 struct regcache *regs,
7763 arm_displaced_step_copy_insn_closure *dsc)
7764 {
7765 unsigned int opcode = bits (insn, 20, 24);
7766
7767 switch (opcode)
7768 {
7769 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7770 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7771
7772 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7773 case 0x12: case 0x16:
7774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7775
7776 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7777 case 0x13: case 0x17:
7778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7779
7780 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7781 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7782 /* Note: no writeback for these instructions. Bit 25 will always be
7783 zero though (via caller), so the following works OK. */
7784 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7785 }
7786
7787 /* Should be unreachable. */
7788 return 1;
7789 }
7790
7791 /* Decode shifted register instructions. */
7792
7793 static int
7794 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7795 uint16_t insn2, struct regcache *regs,
7796 arm_displaced_step_copy_insn_closure *dsc)
7797 {
7798 /* PC is only allowed to be used in instruction MOV. */
7799
7800 unsigned int op = bits (insn1, 5, 8);
7801 unsigned int rn = bits (insn1, 0, 3);
7802
7803 if (op == 0x2 && rn == 0xf) /* MOV */
7804 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7805 else
7806 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7807 "dp (shift reg)", dsc);
7808 }
7809
7810
7811 /* Decode extension register load/store. Exactly the same as
7812 arm_decode_ext_reg_ld_st. */
7813
7814 static int
7815 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7816 uint16_t insn2, struct regcache *regs,
7817 arm_displaced_step_copy_insn_closure *dsc)
7818 {
7819 unsigned int opcode = bits (insn1, 4, 8);
7820
7821 switch (opcode)
7822 {
7823 case 0x04: case 0x05:
7824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7825 "vfp/neon vmov", dsc);
7826
7827 case 0x08: case 0x0c: /* 01x00 */
7828 case 0x0a: case 0x0e: /* 01x10 */
7829 case 0x12: case 0x16: /* 10x10 */
7830 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7831 "vfp/neon vstm/vpush", dsc);
7832
7833 case 0x09: case 0x0d: /* 01x01 */
7834 case 0x0b: case 0x0f: /* 01x11 */
7835 case 0x13: case 0x17: /* 10x11 */
7836 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7837 "vfp/neon vldm/vpop", dsc);
7838
7839 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7841 "vstr", dsc);
7842 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7843 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7844 }
7845
7846 /* Should be unreachable. */
7847 return 1;
7848 }
7849
7850 static int
7851 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
7852 regcache *regs, arm_displaced_step_copy_insn_closure *dsc)
7853 {
7854 unsigned int op1 = bits (insn, 20, 25);
7855 int op = bit (insn, 4);
7856 unsigned int coproc = bits (insn, 8, 11);
7857
7858 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7859 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7860 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7861 && (coproc & 0xe) != 0xa)
7862 /* stc/stc2. */
7863 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7864 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7865 && (coproc & 0xe) != 0xa)
7866 /* ldc/ldc2 imm/lit. */
7867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7868 else if ((op1 & 0x3e) == 0x00)
7869 return arm_copy_undef (gdbarch, insn, dsc);
7870 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7871 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7872 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7873 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7874 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7875 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7876 else if ((op1 & 0x30) == 0x20 && !op)
7877 {
7878 if ((coproc & 0xe) == 0xa)
7879 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7880 else
7881 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7882 }
7883 else if ((op1 & 0x30) == 0x20 && op)
7884 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7885 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7886 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7887 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7888 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7889 else if ((op1 & 0x30) == 0x30)
7890 return arm_copy_svc (gdbarch, insn, regs, dsc);
7891 else
7892 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7893 }
7894
7895 static int
7896 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7897 uint16_t insn2, struct regcache *regs,
7898 arm_displaced_step_copy_insn_closure *dsc)
7899 {
7900 unsigned int coproc = bits (insn2, 8, 11);
7901 unsigned int bit_5_8 = bits (insn1, 5, 8);
7902 unsigned int bit_9 = bit (insn1, 9);
7903 unsigned int bit_4 = bit (insn1, 4);
7904
7905 if (bit_9 == 0)
7906 {
7907 if (bit_5_8 == 2)
7908 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7909 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7910 dsc);
7911 else if (bit_5_8 == 0) /* UNDEFINED. */
7912 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7913 else
7914 {
7915 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7916 if ((coproc & 0xe) == 0xa)
7917 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7918 dsc);
7919 else /* coproc is not 101x. */
7920 {
7921 if (bit_4 == 0) /* STC/STC2. */
7922 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7923 "stc/stc2", dsc);
7924 else /* LDC/LDC2 {literal, immediate}. */
7925 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7926 regs, dsc);
7927 }
7928 }
7929 }
7930 else
7931 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7932
7933 return 0;
7934 }
7935
7936 static void
7937 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7938 arm_displaced_step_copy_insn_closure *dsc, int rd)
7939 {
7940 /* ADR Rd, #imm
7941
7942 Rewrite as:
7943
7944 Preparation: Rd <- PC
7945 Insn: ADD Rd, #imm
7946 Cleanup: Null.
7947 */
7948
7949 /* Rd <- PC */
7950 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7951 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7952 }
7953
7954 static int
7955 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7956 arm_displaced_step_copy_insn_closure *dsc,
7957 int rd, unsigned int imm)
7958 {
7959
7960 /* Encoding T2: ADDS Rd, #imm */
7961 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7962
7963 install_pc_relative (gdbarch, regs, dsc, rd);
7964
7965 return 0;
7966 }
7967
7968 static int
7969 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7970 struct regcache *regs,
7971 arm_displaced_step_copy_insn_closure *dsc)
7972 {
7973 unsigned int rd = bits (insn, 8, 10);
7974 unsigned int imm8 = bits (insn, 0, 7);
7975
7976 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
7977 rd, imm8, insn);
7978
7979 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
7980 }
7981
7982 static int
7983 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
7984 uint16_t insn2, struct regcache *regs,
7985 arm_displaced_step_copy_insn_closure *dsc)
7986 {
7987 unsigned int rd = bits (insn2, 8, 11);
7988 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
7989 extract raw immediate encoding rather than computing immediate. When
7990 generating ADD or SUB instruction, we can simply perform OR operation to
7991 set immediate into ADD. */
7992 unsigned int imm_3_8 = insn2 & 0x70ff;
7993 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7994
7995 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
7996 rd, imm_i, imm_3_8, insn1, insn2);
7997
7998 if (bit (insn1, 7)) /* Encoding T2 */
7999 {
8000 /* Encoding T3: SUB Rd, Rd, #imm */
8001 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8002 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8003 }
8004 else /* Encoding T3 */
8005 {
8006 /* Encoding T3: ADD Rd, Rd, #imm */
8007 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8008 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8009 }
8010 dsc->numinsns = 2;
8011
8012 install_pc_relative (gdbarch, regs, dsc, rd);
8013
8014 return 0;
8015 }
8016
8017 static int
8018 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
8019 struct regcache *regs,
8020 arm_displaced_step_copy_insn_closure *dsc)
8021 {
8022 unsigned int rt = bits (insn1, 8, 10);
8023 unsigned int pc;
8024 int imm8 = (bits (insn1, 0, 7) << 2);
8025
8026 /* LDR Rd, #imm8
8027
8028 Rwrite as:
8029
8030 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8031
8032 Insn: LDR R0, [R2, R3];
8033 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8034
8035 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt, imm8);
8036
8037 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8038 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8039 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8040 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8041 /* The assembler calculates the required value of the offset from the
8042 Align(PC,4) value of this instruction to the label. */
8043 pc = pc & 0xfffffffc;
8044
8045 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8046 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8047
8048 dsc->rd = rt;
8049 dsc->u.ldst.xfersize = 4;
8050 dsc->u.ldst.rn = 0;
8051 dsc->u.ldst.immed = 0;
8052 dsc->u.ldst.writeback = 0;
8053 dsc->u.ldst.restore_r4 = 0;
8054
8055 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8056
8057 dsc->cleanup = &cleanup_load;
8058
8059 return 0;
8060 }
8061
8062 /* Copy Thumb cbnz/cbz instruction. */
8063
8064 static int
8065 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8066 struct regcache *regs,
8067 arm_displaced_step_copy_insn_closure *dsc)
8068 {
8069 int non_zero = bit (insn1, 11);
8070 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8071 CORE_ADDR from = dsc->insn_addr;
8072 int rn = bits (insn1, 0, 2);
8073 int rn_val = displaced_read_reg (regs, dsc, rn);
8074
8075 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8076 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8077 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8078 condition is false, let it be, cleanup_branch will do nothing. */
8079 if (dsc->u.branch.cond)
8080 {
8081 dsc->u.branch.cond = INST_AL;
8082 dsc->u.branch.dest = from + 4 + imm5;
8083 }
8084 else
8085 dsc->u.branch.dest = from + 2;
8086
8087 dsc->u.branch.link = 0;
8088 dsc->u.branch.exchange = 0;
8089
8090 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
8091 non_zero ? "cbnz" : "cbz",
8092 rn, rn_val, insn1, dsc->u.branch.dest);
8093
8094 dsc->modinsn[0] = THUMB_NOP;
8095
8096 dsc->cleanup = &cleanup_branch;
8097 return 0;
8098 }
8099
8100 /* Copy Table Branch Byte/Halfword */
8101 static int
8102 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8103 uint16_t insn2, struct regcache *regs,
8104 arm_displaced_step_copy_insn_closure *dsc)
8105 {
8106 ULONGEST rn_val, rm_val;
8107 int is_tbh = bit (insn2, 4);
8108 CORE_ADDR halfwords = 0;
8109 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8110
8111 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8112 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8113
8114 if (is_tbh)
8115 {
8116 gdb_byte buf[2];
8117
8118 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8119 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8120 }
8121 else
8122 {
8123 gdb_byte buf[1];
8124
8125 target_read_memory (rn_val + rm_val, buf, 1);
8126 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8127 }
8128
8129 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
8130 is_tbh ? "tbh" : "tbb",
8131 (unsigned int) rn_val, (unsigned int) rm_val,
8132 (unsigned int) halfwords);
8133
8134 dsc->u.branch.cond = INST_AL;
8135 dsc->u.branch.link = 0;
8136 dsc->u.branch.exchange = 0;
8137 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8138
8139 dsc->cleanup = &cleanup_branch;
8140
8141 return 0;
8142 }
8143
8144 static void
8145 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8146 arm_displaced_step_copy_insn_closure *dsc)
8147 {
8148 /* PC <- r7 */
8149 int val = displaced_read_reg (regs, dsc, 7);
8150 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8151
8152 /* r7 <- r8 */
8153 val = displaced_read_reg (regs, dsc, 8);
8154 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8155
8156 /* r8 <- tmp[0] */
8157 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8158
8159 }
8160
8161 static int
8162 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
8163 struct regcache *regs,
8164 arm_displaced_step_copy_insn_closure *dsc)
8165 {
8166 dsc->u.block.regmask = insn1 & 0x00ff;
8167
8168 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8169 to :
8170
8171 (1) register list is full, that is, r0-r7 are used.
8172 Prepare: tmp[0] <- r8
8173
8174 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8175 MOV r8, r7; Move value of r7 to r8;
8176 POP {r7}; Store PC value into r7.
8177
8178 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8179
8180 (2) register list is not full, supposing there are N registers in
8181 register list (except PC, 0 <= N <= 7).
8182 Prepare: for each i, 0 - N, tmp[i] <- ri.
8183
8184 POP {r0, r1, ...., rN};
8185
8186 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8187 from tmp[] properly.
8188 */
8189 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
8190 dsc->u.block.regmask, insn1);
8191
8192 if (dsc->u.block.regmask == 0xff)
8193 {
8194 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8195
8196 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8197 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8198 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8199
8200 dsc->numinsns = 3;
8201 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8202 }
8203 else
8204 {
8205 unsigned int num_in_list = count_one_bits (dsc->u.block.regmask);
8206 unsigned int i;
8207 unsigned int new_regmask;
8208
8209 for (i = 0; i < num_in_list + 1; i++)
8210 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8211
8212 new_regmask = (1 << (num_in_list + 1)) - 1;
8213
8214 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
8215 "modified list %.4x",
8216 (int) dsc->u.block.regmask, new_regmask);
8217
8218 dsc->u.block.regmask |= 0x8000;
8219 dsc->u.block.writeback = 0;
8220 dsc->u.block.cond = INST_AL;
8221
8222 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8223
8224 dsc->cleanup = &cleanup_block_load_pc;
8225 }
8226
8227 return 0;
8228 }
8229
8230 static void
8231 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8232 struct regcache *regs,
8233 arm_displaced_step_copy_insn_closure *dsc)
8234 {
8235 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8236 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8237 int err = 0;
8238
8239 /* 16-bit thumb instructions. */
8240 switch (op_bit_12_15)
8241 {
8242 /* Shift (imme), add, subtract, move and compare. */
8243 case 0: case 1: case 2: case 3:
8244 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8245 "shift/add/sub/mov/cmp",
8246 dsc);
8247 break;
8248 case 4:
8249 switch (op_bit_10_11)
8250 {
8251 case 0: /* Data-processing */
8252 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8253 "data-processing",
8254 dsc);
8255 break;
8256 case 1: /* Special data instructions and branch and exchange. */
8257 {
8258 unsigned short op = bits (insn1, 7, 9);
8259 if (op == 6 || op == 7) /* BX or BLX */
8260 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8261 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8262 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8263 else
8264 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8265 dsc);
8266 }
8267 break;
8268 default: /* LDR (literal) */
8269 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8270 }
8271 break;
8272 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8273 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8274 break;
8275 case 10:
8276 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8277 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8278 else /* Generate SP-relative address */
8279 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8280 break;
8281 case 11: /* Misc 16-bit instructions */
8282 {
8283 switch (bits (insn1, 8, 11))
8284 {
8285 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8286 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8287 break;
8288 case 12: case 13: /* POP */
8289 if (bit (insn1, 8)) /* PC is in register list. */
8290 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8291 else
8292 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8293 break;
8294 case 15: /* If-Then, and hints */
8295 if (bits (insn1, 0, 3))
8296 /* If-Then makes up to four following instructions conditional.
8297 IT instruction itself is not conditional, so handle it as a
8298 common unmodified instruction. */
8299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8300 dsc);
8301 else
8302 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8303 break;
8304 default:
8305 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8306 }
8307 }
8308 break;
8309 case 12:
8310 if (op_bit_10_11 < 2) /* Store multiple registers */
8311 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8312 else /* Load multiple registers */
8313 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8314 break;
8315 case 13: /* Conditional branch and supervisor call */
8316 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8317 err = thumb_copy_b (gdbarch, insn1, dsc);
8318 else
8319 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8320 break;
8321 case 14: /* Unconditional branch */
8322 err = thumb_copy_b (gdbarch, insn1, dsc);
8323 break;
8324 default:
8325 err = 1;
8326 }
8327
8328 if (err)
8329 internal_error (_("thumb_process_displaced_16bit_insn: Instruction decode error"));
8330 }
8331
8332 static int
8333 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8334 uint16_t insn1, uint16_t insn2,
8335 struct regcache *regs,
8336 arm_displaced_step_copy_insn_closure *dsc)
8337 {
8338 int rt = bits (insn2, 12, 15);
8339 int rn = bits (insn1, 0, 3);
8340 int op1 = bits (insn1, 7, 8);
8341
8342 switch (bits (insn1, 5, 6))
8343 {
8344 case 0: /* Load byte and memory hints */
8345 if (rt == 0xf) /* PLD/PLI */
8346 {
8347 if (rn == 0xf)
8348 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8349 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8350 else
8351 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8352 "pli/pld", dsc);
8353 }
8354 else
8355 {
8356 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8357 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8358 1);
8359 else
8360 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8361 "ldrb{reg, immediate}/ldrbt",
8362 dsc);
8363 }
8364
8365 break;
8366 case 1: /* Load halfword and memory hints. */
8367 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8368 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8369 "pld/unalloc memhint", dsc);
8370 else
8371 {
8372 if (rn == 0xf)
8373 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8374 2);
8375 else
8376 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8377 "ldrh/ldrht", dsc);
8378 }
8379 break;
8380 case 2: /* Load word */
8381 {
8382 int insn2_bit_8_11 = bits (insn2, 8, 11);
8383
8384 if (rn == 0xf)
8385 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8386 else if (op1 == 0x1) /* Encoding T3 */
8387 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8388 0, 1);
8389 else /* op1 == 0x0 */
8390 {
8391 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8392 /* LDR (immediate) */
8393 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8394 dsc, bit (insn2, 8), 1);
8395 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8396 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8397 "ldrt", dsc);
8398 else
8399 /* LDR (register) */
8400 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8401 dsc, 0, 0);
8402 }
8403 break;
8404 }
8405 default:
8406 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8407 break;
8408 }
8409 return 0;
8410 }
8411
8412 static void
8413 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8414 uint16_t insn2, struct regcache *regs,
8415 arm_displaced_step_copy_insn_closure *dsc)
8416 {
8417 int err = 0;
8418 unsigned short op = bit (insn2, 15);
8419 unsigned int op1 = bits (insn1, 11, 12);
8420
8421 switch (op1)
8422 {
8423 case 1:
8424 {
8425 switch (bits (insn1, 9, 10))
8426 {
8427 case 0:
8428 if (bit (insn1, 6))
8429 {
8430 /* Load/store {dual, exclusive}, table branch. */
8431 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8432 && bits (insn2, 5, 7) == 0)
8433 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8434 dsc);
8435 else
8436 /* PC is not allowed to use in load/store {dual, exclusive}
8437 instructions. */
8438 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8439 "load/store dual/ex", dsc);
8440 }
8441 else /* load/store multiple */
8442 {
8443 switch (bits (insn1, 7, 8))
8444 {
8445 case 0: case 3: /* SRS, RFE */
8446 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8447 "srs/rfe", dsc);
8448 break;
8449 case 1: case 2: /* LDM/STM/PUSH/POP */
8450 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8451 break;
8452 }
8453 }
8454 break;
8455
8456 case 1:
8457 /* Data-processing (shift register). */
8458 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8459 dsc);
8460 break;
8461 default: /* Coprocessor instructions. */
8462 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8463 break;
8464 }
8465 break;
8466 }
8467 case 2: /* op1 = 2 */
8468 if (op) /* Branch and misc control. */
8469 {
8470 if (bit (insn2, 14) /* BLX/BL */
8471 || bit (insn2, 12) /* Unconditional branch */
8472 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8473 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8474 else
8475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8476 "misc ctrl", dsc);
8477 }
8478 else
8479 {
8480 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8481 {
8482 int dp_op = bits (insn1, 4, 8);
8483 int rn = bits (insn1, 0, 3);
8484 if ((dp_op == 0 || dp_op == 0xa) && rn == 0xf)
8485 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8486 regs, dsc);
8487 else
8488 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8489 "dp/pb", dsc);
8490 }
8491 else /* Data processing (modified immediate) */
8492 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8493 "dp/mi", dsc);
8494 }
8495 break;
8496 case 3: /* op1 = 3 */
8497 switch (bits (insn1, 9, 10))
8498 {
8499 case 0:
8500 if (bit (insn1, 4))
8501 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8502 regs, dsc);
8503 else /* NEON Load/Store and Store single data item */
8504 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8505 "neon elt/struct load/store",
8506 dsc);
8507 break;
8508 case 1: /* op1 = 3, bits (9, 10) == 1 */
8509 switch (bits (insn1, 7, 8))
8510 {
8511 case 0: case 1: /* Data processing (register) */
8512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8513 "dp(reg)", dsc);
8514 break;
8515 case 2: /* Multiply and absolute difference */
8516 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8517 "mul/mua/diff", dsc);
8518 break;
8519 case 3: /* Long multiply and divide */
8520 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8521 "lmul/lmua", dsc);
8522 break;
8523 }
8524 break;
8525 default: /* Coprocessor instructions */
8526 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8527 break;
8528 }
8529 break;
8530 default:
8531 err = 1;
8532 }
8533
8534 if (err)
8535 internal_error (_("thumb_process_displaced_32bit_insn: Instruction decode error"));
8536
8537 }
8538
8539 static void
8540 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8541 struct regcache *regs,
8542 arm_displaced_step_copy_insn_closure *dsc)
8543 {
8544 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8545 uint16_t insn1
8546 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8547
8548 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
8549 insn1, (unsigned long) from);
8550
8551 dsc->is_thumb = 1;
8552 dsc->insn_size = thumb_insn_size (insn1);
8553 if (thumb_insn_size (insn1) == 4)
8554 {
8555 uint16_t insn2
8556 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8557 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8558 }
8559 else
8560 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8561 }
8562
8563 void
8564 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8565 CORE_ADDR to, struct regcache *regs,
8566 arm_displaced_step_copy_insn_closure *dsc)
8567 {
8568 int err = 0;
8569 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8570 uint32_t insn;
8571
8572 /* Most displaced instructions use a 1-instruction scratch space, so set this
8573 here and override below if/when necessary. */
8574 dsc->numinsns = 1;
8575 dsc->insn_addr = from;
8576 dsc->scratch_base = to;
8577 dsc->cleanup = NULL;
8578 dsc->wrote_to_pc = 0;
8579
8580 if (!displaced_in_arm_mode (regs))
8581 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
8582
8583 dsc->is_thumb = 0;
8584 dsc->insn_size = 4;
8585 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8586 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
8587 (unsigned long) insn, (unsigned long) from);
8588
8589 if ((insn & 0xf0000000) == 0xf0000000)
8590 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8591 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8592 {
8593 case 0x0: case 0x1: case 0x2: case 0x3:
8594 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8595 break;
8596
8597 case 0x4: case 0x5: case 0x6:
8598 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8599 break;
8600
8601 case 0x7:
8602 err = arm_decode_media (gdbarch, insn, dsc);
8603 break;
8604
8605 case 0x8: case 0x9: case 0xa: case 0xb:
8606 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8607 break;
8608
8609 case 0xc: case 0xd: case 0xe: case 0xf:
8610 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
8611 break;
8612 }
8613
8614 if (err)
8615 internal_error (_("arm_process_displaced_insn: Instruction decode error"));
8616 }
8617
8618 /* Actually set up the scratch space for a displaced instruction. */
8619
8620 void
8621 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8622 CORE_ADDR to,
8623 arm_displaced_step_copy_insn_closure *dsc)
8624 {
8625 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8626 unsigned int i, len, offset;
8627 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8628 int size = dsc->is_thumb ? 2 : 4;
8629 const gdb_byte *bkp_insn;
8630
8631 offset = 0;
8632 /* Poke modified instruction(s). */
8633 for (i = 0; i < dsc->numinsns; i++)
8634 {
8635 if (size == 4)
8636 displaced_debug_printf ("writing insn %.8lx at %.8lx",
8637 dsc->modinsn[i], (unsigned long) to + offset);
8638 else if (size == 2)
8639 displaced_debug_printf ("writing insn %.4x at %.8lx",
8640 (unsigned short) dsc->modinsn[i],
8641 (unsigned long) to + offset);
8642
8643 write_memory_unsigned_integer (to + offset, size,
8644 byte_order_for_code,
8645 dsc->modinsn[i]);
8646 offset += size;
8647 }
8648
8649 /* Choose the correct breakpoint instruction. */
8650 if (dsc->is_thumb)
8651 {
8652 bkp_insn = tdep->thumb_breakpoint;
8653 len = tdep->thumb_breakpoint_size;
8654 }
8655 else
8656 {
8657 bkp_insn = tdep->arm_breakpoint;
8658 len = tdep->arm_breakpoint_size;
8659 }
8660
8661 /* Put breakpoint afterwards. */
8662 write_memory (to + offset, bkp_insn, len);
8663
8664 displaced_debug_printf ("copy %s->%s", paddress (gdbarch, from),
8665 paddress (gdbarch, to));
8666 }
8667
8668 /* Entry point for cleaning things up after a displaced instruction has been
8669 single-stepped. */
8670
8671 void
8672 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8673 struct displaced_step_copy_insn_closure *dsc_,
8674 CORE_ADDR from, CORE_ADDR to,
8675 struct regcache *regs, bool completed_p)
8676 {
8677 /* The following block exists as a temporary measure while displaced
8678 stepping is fixed architecture at a time within GDB.
8679
8680 In an earlier implementation of displaced stepping, if GDB thought the
8681 displaced instruction had not been executed then this fix up function
8682 was never called. As a consequence, things that should be fixed by
8683 this function were left in an unfixed state.
8684
8685 However, it's not as simple as always calling this function; this
8686 function needs to be updated to decide what should be fixed up based
8687 on whether the displaced step executed or not, which requires each
8688 architecture to be considered individually.
8689
8690 Until this architecture is updated, this block replicates the old
8691 behaviour; we just restore the program counter register, and leave
8692 everything else unfixed. */
8693 if (!completed_p)
8694 {
8695 CORE_ADDR pc = regcache_read_pc (regs);
8696 pc = from + (pc - to);
8697 regcache_write_pc (regs, pc);
8698 return;
8699 }
8700
8701 arm_displaced_step_copy_insn_closure *dsc
8702 = (arm_displaced_step_copy_insn_closure *) dsc_;
8703
8704 if (dsc->cleanup)
8705 dsc->cleanup (gdbarch, regs, dsc);
8706
8707 if (!dsc->wrote_to_pc)
8708 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8709 dsc->insn_addr + dsc->insn_size);
8710
8711 }
8712
8713 #include "bfd-in2.h"
8714 #include "libcoff.h"
8715
8716 static int
8717 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8718 {
8719 gdb_disassemble_info *di
8720 = static_cast<gdb_disassemble_info *> (info->application_data);
8721 struct gdbarch *gdbarch = di->arch ();
8722
8723 if (arm_pc_is_thumb (gdbarch, memaddr))
8724 {
8725 static asymbol *asym;
8726 static combined_entry_type ce;
8727 static struct coff_symbol_struct csym;
8728 static struct bfd fake_bfd;
8729 static bfd_target fake_target;
8730
8731 if (csym.native == NULL)
8732 {
8733 /* Create a fake symbol vector containing a Thumb symbol.
8734 This is solely so that the code in print_insn_little_arm()
8735 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8736 the presence of a Thumb symbol and switch to decoding
8737 Thumb instructions. */
8738
8739 fake_target.flavour = bfd_target_coff_flavour;
8740 fake_bfd.xvec = &fake_target;
8741 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8742 csym.native = &ce;
8743 csym.symbol.the_bfd = &fake_bfd;
8744 csym.symbol.name = "fake";
8745 asym = (asymbol *) & csym;
8746 }
8747
8748 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8749 info->symbols = &asym;
8750 }
8751 else
8752 info->symbols = NULL;
8753
8754 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
8755 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
8756 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
8757 the assert on the mismatch of info->mach and
8758 bfd_get_mach (current_program_space->exec_bfd ()) in
8759 default_print_insn. */
8760 if (current_program_space->exec_bfd () != NULL
8761 && (current_program_space->exec_bfd ()->arch_info
8762 == gdbarch_bfd_arch_info (gdbarch)))
8763 info->flags |= USER_SPECIFIED_MACHINE_TYPE;
8764
8765 return default_print_insn (memaddr, info);
8766 }
8767
8768 /* The following define instruction sequences that will cause ARM
8769 cpu's to take an undefined instruction trap. These are used to
8770 signal a breakpoint to GDB.
8771
8772 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8773 modes. A different instruction is required for each mode. The ARM
8774 cpu's can also be big or little endian. Thus four different
8775 instructions are needed to support all cases.
8776
8777 Note: ARMv4 defines several new instructions that will take the
8778 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8779 not in fact add the new instructions. The new undefined
8780 instructions in ARMv4 are all instructions that had no defined
8781 behaviour in earlier chips. There is no guarantee that they will
8782 raise an exception, but may be treated as NOP's. In practice, it
8783 may only safe to rely on instructions matching:
8784
8785 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8786 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8787 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8788
8789 Even this may only true if the condition predicate is true. The
8790 following use a condition predicate of ALWAYS so it is always TRUE.
8791
8792 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8793 and NetBSD all use a software interrupt rather than an undefined
8794 instruction to force a trap. This can be handled by by the
8795 abi-specific code during establishment of the gdbarch vector. */
8796
8797 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8798 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8799 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8800 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8801
8802 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8803 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8804 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8805 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8806
8807 /* Implement the breakpoint_kind_from_pc gdbarch method. */
8808
8809 static int
8810 arm_breakpoint_kind_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr)
8811 {
8812 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8813 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8814
8815 if (arm_pc_is_thumb (gdbarch, *pcptr))
8816 {
8817 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8818
8819 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8820 check whether we are replacing a 32-bit instruction. */
8821 if (tdep->thumb2_breakpoint != NULL)
8822 {
8823 gdb_byte buf[2];
8824
8825 if (target_read_memory (*pcptr, buf, 2) == 0)
8826 {
8827 unsigned short inst1;
8828
8829 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8830 if (thumb_insn_size (inst1) == 4)
8831 return ARM_BP_KIND_THUMB2;
8832 }
8833 }
8834
8835 return ARM_BP_KIND_THUMB;
8836 }
8837 else
8838 return ARM_BP_KIND_ARM;
8839
8840 }
8841
8842 /* Implement the sw_breakpoint_from_kind gdbarch method. */
8843
8844 static const gdb_byte *
8845 arm_sw_breakpoint_from_kind (struct gdbarch *gdbarch, int kind, int *size)
8846 {
8847 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8848
8849 switch (kind)
8850 {
8851 case ARM_BP_KIND_ARM:
8852 *size = tdep->arm_breakpoint_size;
8853 return tdep->arm_breakpoint;
8854 case ARM_BP_KIND_THUMB:
8855 *size = tdep->thumb_breakpoint_size;
8856 return tdep->thumb_breakpoint;
8857 case ARM_BP_KIND_THUMB2:
8858 *size = tdep->thumb2_breakpoint_size;
8859 return tdep->thumb2_breakpoint;
8860 default:
8861 gdb_assert_not_reached ("unexpected arm breakpoint kind");
8862 }
8863 }
8864
8865 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
8866
8867 static int
8868 arm_breakpoint_kind_from_current_state (struct gdbarch *gdbarch,
8869 struct regcache *regcache,
8870 CORE_ADDR *pcptr)
8871 {
8872 gdb_byte buf[4];
8873
8874 /* Check the memory pointed by PC is readable. */
8875 if (target_read_memory (regcache_read_pc (regcache), buf, 4) == 0)
8876 {
8877 struct arm_get_next_pcs next_pcs_ctx;
8878
8879 arm_get_next_pcs_ctor (&next_pcs_ctx,
8880 &arm_get_next_pcs_ops,
8881 gdbarch_byte_order (gdbarch),
8882 gdbarch_byte_order_for_code (gdbarch),
8883 0,
8884 regcache);
8885
8886 std::vector<CORE_ADDR> next_pcs = arm_get_next_pcs (&next_pcs_ctx);
8887
8888 /* If MEMADDR is the next instruction of current pc, do the
8889 software single step computation, and get the thumb mode by
8890 the destination address. */
8891 for (CORE_ADDR pc : next_pcs)
8892 {
8893 if (UNMAKE_THUMB_ADDR (pc) == *pcptr)
8894 {
8895 if (IS_THUMB_ADDR (pc))
8896 {
8897 *pcptr = MAKE_THUMB_ADDR (*pcptr);
8898 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8899 }
8900 else
8901 return ARM_BP_KIND_ARM;
8902 }
8903 }
8904 }
8905
8906 return arm_breakpoint_kind_from_pc (gdbarch, pcptr);
8907 }
8908
8909 /* Extract from an array REGBUF containing the (raw) register state a
8910 function return value of type TYPE, and copy that, in virtual
8911 format, into VALBUF. */
8912
8913 static void
8914 arm_extract_return_value (struct type *type, struct regcache *regs,
8915 gdb_byte *valbuf)
8916 {
8917 struct gdbarch *gdbarch = regs->arch ();
8918 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8919 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
8920
8921 while (type->code () == TYPE_CODE_RANGE)
8922 type = check_typedef (type->target_type ());
8923
8924 if (TYPE_CODE_FLT == type->code ())
8925 {
8926 switch (tdep->fp_model)
8927 {
8928 case ARM_FLOAT_FPA:
8929 {
8930 /* The value is in register F0 in internal format. We need to
8931 extract the raw value and then convert it to the desired
8932 internal type. */
8933 bfd_byte tmpbuf[ARM_FP_REGISTER_SIZE];
8934
8935 regs->cooked_read (ARM_F0_REGNUM, tmpbuf);
8936 target_float_convert (tmpbuf, arm_ext_type (gdbarch),
8937 valbuf, type);
8938 }
8939 break;
8940
8941 case ARM_FLOAT_SOFT_FPA:
8942 case ARM_FLOAT_SOFT_VFP:
8943 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8944 not using the VFP ABI code. */
8945 case ARM_FLOAT_VFP:
8946 regs->cooked_read (ARM_A1_REGNUM, valbuf);
8947 if (type->length () > 4)
8948 regs->cooked_read (ARM_A1_REGNUM + 1,
8949 valbuf + ARM_INT_REGISTER_SIZE);
8950 break;
8951
8952 default:
8953 internal_error (_("arm_extract_return_value: "
8954 "Floating point model not supported"));
8955 break;
8956 }
8957 }
8958 else if (type->code () == TYPE_CODE_INT
8959 || type->code () == TYPE_CODE_CHAR
8960 || type->code () == TYPE_CODE_BOOL
8961 || type->code () == TYPE_CODE_PTR
8962 || TYPE_IS_REFERENCE (type)
8963 || type->code () == TYPE_CODE_ENUM
8964 || is_fixed_point_type (type))
8965 {
8966 /* If the type is a plain integer, then the access is
8967 straight-forward. Otherwise we have to play around a bit
8968 more. */
8969 int len = type->length ();
8970 int regno = ARM_A1_REGNUM;
8971 ULONGEST tmp;
8972
8973 while (len > 0)
8974 {
8975 /* By using store_unsigned_integer we avoid having to do
8976 anything special for small big-endian values. */
8977 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8978 store_unsigned_integer (valbuf,
8979 (len > ARM_INT_REGISTER_SIZE
8980 ? ARM_INT_REGISTER_SIZE : len),
8981 byte_order, tmp);
8982 len -= ARM_INT_REGISTER_SIZE;
8983 valbuf += ARM_INT_REGISTER_SIZE;
8984 }
8985 }
8986 else
8987 {
8988 /* For a structure or union the behaviour is as if the value had
8989 been stored to word-aligned memory and then loaded into
8990 registers with 32-bit load instruction(s). */
8991 int len = type->length ();
8992 int regno = ARM_A1_REGNUM;
8993 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
8994
8995 while (len > 0)
8996 {
8997 regs->cooked_read (regno++, tmpbuf);
8998 memcpy (valbuf, tmpbuf,
8999 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9000 len -= ARM_INT_REGISTER_SIZE;
9001 valbuf += ARM_INT_REGISTER_SIZE;
9002 }
9003 }
9004 }
9005
9006
9007 /* Will a function return an aggregate type in memory or in a
9008 register? Return 0 if an aggregate type can be returned in a
9009 register, 1 if it must be returned in memory. */
9010
9011 static int
9012 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9013 {
9014 enum type_code code;
9015
9016 type = check_typedef (type);
9017
9018 /* Simple, non-aggregate types (ie not including vectors and
9019 complex) are always returned in a register (or registers). */
9020 code = type->code ();
9021 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9022 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9023 return 0;
9024
9025 if (TYPE_HAS_DYNAMIC_LENGTH (type))
9026 return 1;
9027
9028 if (TYPE_CODE_ARRAY == code && type->is_vector ())
9029 {
9030 /* Vector values should be returned using ARM registers if they
9031 are not over 16 bytes. */
9032 return (type->length () > 16);
9033 }
9034
9035 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9036 if (tdep->arm_abi != ARM_ABI_APCS)
9037 {
9038 /* The AAPCS says all aggregates not larger than a word are returned
9039 in a register. */
9040 if (type->length () <= ARM_INT_REGISTER_SIZE
9041 && language_pass_by_reference (type).trivially_copyable)
9042 return 0;
9043
9044 return 1;
9045 }
9046 else
9047 {
9048 int nRc;
9049
9050 /* All aggregate types that won't fit in a register must be returned
9051 in memory. */
9052 if (type->length () > ARM_INT_REGISTER_SIZE
9053 || !language_pass_by_reference (type).trivially_copyable)
9054 return 1;
9055
9056 /* In the ARM ABI, "integer" like aggregate types are returned in
9057 registers. For an aggregate type to be integer like, its size
9058 must be less than or equal to ARM_INT_REGISTER_SIZE and the
9059 offset of each addressable subfield must be zero. Note that bit
9060 fields are not addressable, and all addressable subfields of
9061 unions always start at offset zero.
9062
9063 This function is based on the behaviour of GCC 2.95.1.
9064 See: gcc/arm.c: arm_return_in_memory() for details.
9065
9066 Note: All versions of GCC before GCC 2.95.2 do not set up the
9067 parameters correctly for a function returning the following
9068 structure: struct { float f;}; This should be returned in memory,
9069 not a register. Richard Earnshaw sent me a patch, but I do not
9070 know of any way to detect if a function like the above has been
9071 compiled with the correct calling convention. */
9072
9073 /* Assume all other aggregate types can be returned in a register.
9074 Run a check for structures, unions and arrays. */
9075 nRc = 0;
9076
9077 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9078 {
9079 int i;
9080 /* Need to check if this struct/union is "integer" like. For
9081 this to be true, its size must be less than or equal to
9082 ARM_INT_REGISTER_SIZE and the offset of each addressable
9083 subfield must be zero. Note that bit fields are not
9084 addressable, and unions always start at offset zero. If any
9085 of the subfields is a floating point type, the struct/union
9086 cannot be an integer type. */
9087
9088 /* For each field in the object, check:
9089 1) Is it FP? --> yes, nRc = 1;
9090 2) Is it addressable (bitpos != 0) and
9091 not packed (bitsize == 0)?
9092 --> yes, nRc = 1
9093 */
9094
9095 for (i = 0; i < type->num_fields (); i++)
9096 {
9097 enum type_code field_type_code;
9098
9099 field_type_code
9100 = check_typedef (type->field (i).type ())->code ();
9101
9102 /* Is it a floating point type field? */
9103 if (field_type_code == TYPE_CODE_FLT)
9104 {
9105 nRc = 1;
9106 break;
9107 }
9108
9109 /* If bitpos != 0, then we have to care about it. */
9110 if (type->field (i).loc_bitpos () != 0)
9111 {
9112 /* Bitfields are not addressable. If the field bitsize is
9113 zero, then the field is not packed. Hence it cannot be
9114 a bitfield or any other packed type. */
9115 if (type->field (i).bitsize () == 0)
9116 {
9117 nRc = 1;
9118 break;
9119 }
9120 }
9121 }
9122 }
9123
9124 return nRc;
9125 }
9126 }
9127
9128 /* Write into appropriate registers a function return value of type
9129 TYPE, given in virtual format. */
9130
9131 static void
9132 arm_store_return_value (struct type *type, struct regcache *regs,
9133 const gdb_byte *valbuf)
9134 {
9135 struct gdbarch *gdbarch = regs->arch ();
9136 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9137
9138 while (type->code () == TYPE_CODE_RANGE)
9139 type = check_typedef (type->target_type ());
9140
9141 if (type->code () == TYPE_CODE_FLT)
9142 {
9143 gdb_byte buf[ARM_FP_REGISTER_SIZE];
9144 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9145
9146 switch (tdep->fp_model)
9147 {
9148 case ARM_FLOAT_FPA:
9149
9150 target_float_convert (valbuf, type, buf, arm_ext_type (gdbarch));
9151 regs->cooked_write (ARM_F0_REGNUM, buf);
9152 break;
9153
9154 case ARM_FLOAT_SOFT_FPA:
9155 case ARM_FLOAT_SOFT_VFP:
9156 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9157 not using the VFP ABI code. */
9158 case ARM_FLOAT_VFP:
9159 regs->cooked_write (ARM_A1_REGNUM, valbuf);
9160 if (type->length () > 4)
9161 regs->cooked_write (ARM_A1_REGNUM + 1,
9162 valbuf + ARM_INT_REGISTER_SIZE);
9163 break;
9164
9165 default:
9166 internal_error (_("arm_store_return_value: Floating "
9167 "point model not supported"));
9168 break;
9169 }
9170 }
9171 else if (type->code () == TYPE_CODE_INT
9172 || type->code () == TYPE_CODE_CHAR
9173 || type->code () == TYPE_CODE_BOOL
9174 || type->code () == TYPE_CODE_PTR
9175 || TYPE_IS_REFERENCE (type)
9176 || type->code () == TYPE_CODE_ENUM
9177 || is_fixed_point_type (type))
9178 {
9179 if (type->length () <= 4)
9180 {
9181 /* Values of one word or less are zero/sign-extended and
9182 returned in r0. */
9183 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9184
9185 if (is_fixed_point_type (type))
9186 {
9187 gdb_mpz unscaled;
9188 unscaled.read (gdb::make_array_view (valbuf, type->length ()),
9189 byte_order, type->is_unsigned ());
9190 unscaled.write (gdb::make_array_view (tmpbuf, sizeof (tmpbuf)),
9191 byte_order, type->is_unsigned ());
9192 }
9193 else
9194 {
9195 LONGEST val = unpack_long (type, valbuf);
9196 store_signed_integer (tmpbuf, ARM_INT_REGISTER_SIZE, byte_order, val);
9197 }
9198 regs->cooked_write (ARM_A1_REGNUM, tmpbuf);
9199 }
9200 else
9201 {
9202 /* Integral values greater than one word are stored in consecutive
9203 registers starting with r0. This will always be a multiple of
9204 the regiser size. */
9205 int len = type->length ();
9206 int regno = ARM_A1_REGNUM;
9207
9208 while (len > 0)
9209 {
9210 regs->cooked_write (regno++, valbuf);
9211 len -= ARM_INT_REGISTER_SIZE;
9212 valbuf += ARM_INT_REGISTER_SIZE;
9213 }
9214 }
9215 }
9216 else
9217 {
9218 /* For a structure or union the behaviour is as if the value had
9219 been stored to word-aligned memory and then loaded into
9220 registers with 32-bit load instruction(s). */
9221 int len = type->length ();
9222 int regno = ARM_A1_REGNUM;
9223 bfd_byte tmpbuf[ARM_INT_REGISTER_SIZE];
9224
9225 while (len > 0)
9226 {
9227 memcpy (tmpbuf, valbuf,
9228 len > ARM_INT_REGISTER_SIZE ? ARM_INT_REGISTER_SIZE : len);
9229 regs->cooked_write (regno++, tmpbuf);
9230 len -= ARM_INT_REGISTER_SIZE;
9231 valbuf += ARM_INT_REGISTER_SIZE;
9232 }
9233 }
9234 }
9235
9236
9237 /* Handle function return values. */
9238
9239 static enum return_value_convention
9240 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9241 struct type *valtype, struct regcache *regcache,
9242 struct value **read_value, const gdb_byte *writebuf)
9243 {
9244 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9245 struct type *func_type = function ? function->type () : NULL;
9246 enum arm_vfp_cprc_base_type vfp_base_type;
9247 int vfp_base_count;
9248
9249 if (arm_vfp_abi_for_function (gdbarch, func_type)
9250 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9251 {
9252 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9253 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9254 int i;
9255
9256 gdb_byte *readbuf = nullptr;
9257 if (read_value != nullptr)
9258 {
9259 *read_value = value::allocate (valtype);
9260 readbuf = (*read_value)->contents_raw ().data ();
9261 }
9262
9263 for (i = 0; i < vfp_base_count; i++)
9264 {
9265 if (reg_char == 'q')
9266 {
9267 if (writebuf)
9268 arm_neon_quad_write (gdbarch, regcache, i,
9269 writebuf + i * unit_length);
9270
9271 if (readbuf)
9272 arm_neon_quad_read (gdbarch, regcache, i,
9273 readbuf + i * unit_length);
9274 }
9275 else
9276 {
9277 char name_buf[4];
9278 int regnum;
9279
9280 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9281 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9282 strlen (name_buf));
9283 if (writebuf)
9284 regcache->cooked_write (regnum, writebuf + i * unit_length);
9285 if (readbuf)
9286 regcache->cooked_read (regnum, readbuf + i * unit_length);
9287 }
9288 }
9289 return RETURN_VALUE_REGISTER_CONVENTION;
9290 }
9291
9292 if (valtype->code () == TYPE_CODE_STRUCT
9293 || valtype->code () == TYPE_CODE_UNION
9294 || valtype->code () == TYPE_CODE_ARRAY)
9295 {
9296 /* From the AAPCS document:
9297
9298 Result return:
9299
9300 A Composite Type larger than 4 bytes, or whose size cannot be
9301 determined statically by both caller and callee, is stored in memory
9302 at an address passed as an extra argument when the function was
9303 called (Parameter Passing, rule A.4). The memory to be used for the
9304 result may be modified at any point during the function call.
9305
9306 Parameter Passing:
9307
9308 A.4: If the subroutine is a function that returns a result in memory,
9309 then the address for the result is placed in r0 and the NCRN is set
9310 to r1. */
9311 if (tdep->struct_return == pcc_struct_return
9312 || arm_return_in_memory (gdbarch, valtype))
9313 {
9314 if (read_value != nullptr)
9315 {
9316 CORE_ADDR addr;
9317
9318 regcache->cooked_read (ARM_A1_REGNUM, &addr);
9319 *read_value = value_at_non_lval (valtype, addr);
9320 }
9321 return RETURN_VALUE_ABI_RETURNS_ADDRESS;
9322 }
9323 }
9324 else if (valtype->code () == TYPE_CODE_COMPLEX)
9325 {
9326 if (arm_return_in_memory (gdbarch, valtype))
9327 return RETURN_VALUE_STRUCT_CONVENTION;
9328 }
9329
9330 if (writebuf)
9331 arm_store_return_value (valtype, regcache, writebuf);
9332
9333 if (read_value != nullptr)
9334 {
9335 *read_value = value::allocate (valtype);
9336 gdb_byte *readbuf = (*read_value)->contents_raw ().data ();
9337 arm_extract_return_value (valtype, regcache, readbuf);
9338 }
9339
9340 return RETURN_VALUE_REGISTER_CONVENTION;
9341 }
9342
9343
9344 static int
9345 arm_get_longjmp_target (frame_info_ptr frame, CORE_ADDR *pc)
9346 {
9347 struct gdbarch *gdbarch = get_frame_arch (frame);
9348 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9349 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9350 CORE_ADDR jb_addr;
9351 gdb_byte buf[ARM_INT_REGISTER_SIZE];
9352
9353 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9354
9355 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9356 ARM_INT_REGISTER_SIZE))
9357 return 0;
9358
9359 *pc = extract_unsigned_integer (buf, ARM_INT_REGISTER_SIZE, byte_order);
9360 return 1;
9361 }
9362 /* A call to cmse secure entry function "foo" at "a" is modified by
9363 GNU ld as "b".
9364 a) bl xxxx <foo>
9365
9366 <foo>
9367 xxxx:
9368
9369 b) bl yyyy <__acle_se_foo>
9370
9371 section .gnu.sgstubs:
9372 <foo>
9373 yyyy: sg // secure gateway
9374 b.w xxxx <__acle_se_foo> // original_branch_dest
9375
9376 <__acle_se_foo>
9377 xxxx:
9378
9379 When the control at "b", the pc contains "yyyy" (sg address) which is a
9380 trampoline and does not exist in source code. This function returns the
9381 target pc "xxxx". For more details please refer to section 5.4
9382 (Entry functions) and section 3.4.4 (C level development flow of secure code)
9383 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
9384 document on www.developer.arm.com. */
9385
9386 static CORE_ADDR
9387 arm_skip_cmse_entry (CORE_ADDR pc, const char *name, struct objfile *objfile)
9388 {
9389 int target_len = strlen (name) + strlen ("__acle_se_") + 1;
9390 char *target_name = (char *) alloca (target_len);
9391 xsnprintf (target_name, target_len, "%s%s", "__acle_se_", name);
9392
9393 struct bound_minimal_symbol minsym
9394 = lookup_minimal_symbol (target_name, NULL, objfile);
9395
9396 if (minsym.minsym != nullptr)
9397 return minsym.value_address ();
9398
9399 return 0;
9400 }
9401
9402 /* Return true when SEC points to ".gnu.sgstubs" section. */
9403
9404 static bool
9405 arm_is_sgstubs_section (struct obj_section *sec)
9406 {
9407 return (sec != nullptr
9408 && sec->the_bfd_section != nullptr
9409 && sec->the_bfd_section->name != nullptr
9410 && streq (sec->the_bfd_section->name, ".gnu.sgstubs"));
9411 }
9412
9413 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9414 return the target PC. Otherwise return 0. */
9415
9416 CORE_ADDR
9417 arm_skip_stub (frame_info_ptr frame, CORE_ADDR pc)
9418 {
9419 const char *name;
9420 int namelen;
9421 CORE_ADDR start_addr;
9422
9423 /* Find the starting address and name of the function containing the PC. */
9424 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9425 {
9426 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9427 check here. */
9428 start_addr = arm_skip_bx_reg (frame, pc);
9429 if (start_addr != 0)
9430 return start_addr;
9431
9432 return 0;
9433 }
9434
9435 /* If PC is in a Thumb call or return stub, return the address of the
9436 target PC, which is in a register. The thunk functions are called
9437 _call_via_xx, where x is the register name. The possible names
9438 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9439 functions, named __ARM_call_via_r[0-7]. */
9440 if (startswith (name, "_call_via_")
9441 || startswith (name, "__ARM_call_via_"))
9442 {
9443 /* Use the name suffix to determine which register contains the
9444 target PC. */
9445 static const char *table[15] =
9446 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9447 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9448 };
9449 int regno;
9450 int offset = strlen (name) - 2;
9451
9452 for (regno = 0; regno <= 14; regno++)
9453 if (strcmp (&name[offset], table[regno]) == 0)
9454 return get_frame_register_unsigned (frame, regno);
9455 }
9456
9457 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9458 non-interworking calls to foo. We could decode the stubs
9459 to find the target but it's easier to use the symbol table. */
9460 namelen = strlen (name);
9461 if (name[0] == '_' && name[1] == '_'
9462 && ((namelen > 2 + strlen ("_from_thumb")
9463 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9464 || (namelen > 2 + strlen ("_from_arm")
9465 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9466 {
9467 char *target_name;
9468 int target_len = namelen - 2;
9469 struct bound_minimal_symbol minsym;
9470 struct objfile *objfile;
9471 struct obj_section *sec;
9472
9473 if (name[namelen - 1] == 'b')
9474 target_len -= strlen ("_from_thumb");
9475 else
9476 target_len -= strlen ("_from_arm");
9477
9478 target_name = (char *) alloca (target_len + 1);
9479 memcpy (target_name, name + 2, target_len);
9480 target_name[target_len] = '\0';
9481
9482 sec = find_pc_section (pc);
9483 objfile = (sec == NULL) ? NULL : sec->objfile;
9484 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9485 if (minsym.minsym != NULL)
9486 return minsym.value_address ();
9487 else
9488 return 0;
9489 }
9490
9491 struct obj_section *section = find_pc_section (pc);
9492
9493 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
9494 if (arm_is_sgstubs_section (section))
9495 return arm_skip_cmse_entry (pc, name, section->objfile);
9496
9497 return 0; /* not a stub */
9498 }
9499
9500 static void
9501 arm_update_current_architecture (void)
9502 {
9503 /* If the current architecture is not ARM, we have nothing to do. */
9504 gdbarch *arch = current_inferior ()->arch ();
9505 if (gdbarch_bfd_arch_info (arch)->arch != bfd_arch_arm)
9506 return;
9507
9508 /* Update the architecture. */
9509 gdbarch_info info;
9510 if (!gdbarch_update_p (info))
9511 internal_error (_("could not update architecture"));
9512 }
9513
9514 static void
9515 set_fp_model_sfunc (const char *args, int from_tty,
9516 struct cmd_list_element *c)
9517 {
9518 int fp_model;
9519
9520 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9521 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9522 {
9523 arm_fp_model = (enum arm_float_model) fp_model;
9524 break;
9525 }
9526
9527 if (fp_model == ARM_FLOAT_LAST)
9528 internal_error (_("Invalid fp model accepted: %s."),
9529 current_fp_model);
9530
9531 arm_update_current_architecture ();
9532 }
9533
9534 static void
9535 show_fp_model (struct ui_file *file, int from_tty,
9536 struct cmd_list_element *c, const char *value)
9537 {
9538 gdbarch *arch = current_inferior ()->arch ();
9539 if (arm_fp_model == ARM_FLOAT_AUTO
9540 && gdbarch_bfd_arch_info (arch)->arch == bfd_arch_arm)
9541 {
9542 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
9543
9544 gdb_printf (file, _("\
9545 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9546 fp_model_strings[tdep->fp_model]);
9547 }
9548 else
9549 gdb_printf (file, _("\
9550 The current ARM floating point model is \"%s\".\n"),
9551 fp_model_strings[arm_fp_model]);
9552 }
9553
9554 static void
9555 arm_set_abi (const char *args, int from_tty,
9556 struct cmd_list_element *c)
9557 {
9558 int arm_abi;
9559
9560 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9561 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9562 {
9563 arm_abi_global = (enum arm_abi_kind) arm_abi;
9564 break;
9565 }
9566
9567 if (arm_abi == ARM_ABI_LAST)
9568 internal_error (_("Invalid ABI accepted: %s."),
9569 arm_abi_string);
9570
9571 arm_update_current_architecture ();
9572 }
9573
9574 static void
9575 arm_show_abi (struct ui_file *file, int from_tty,
9576 struct cmd_list_element *c, const char *value)
9577 {
9578 gdbarch *arch = current_inferior ()->arch ();
9579 if (arm_abi_global == ARM_ABI_AUTO
9580 && gdbarch_bfd_arch_info (arch)->arch == bfd_arch_arm)
9581 {
9582 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (arch);
9583
9584 gdb_printf (file, _("\
9585 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9586 arm_abi_strings[tdep->arm_abi]);
9587 }
9588 else
9589 gdb_printf (file, _("The current ARM ABI is \"%s\".\n"),
9590 arm_abi_string);
9591 }
9592
9593 static void
9594 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9595 struct cmd_list_element *c, const char *value)
9596 {
9597 gdb_printf (file,
9598 _("The current execution mode assumed "
9599 "(when symbols are unavailable) is \"%s\".\n"),
9600 arm_fallback_mode_string);
9601 }
9602
9603 static void
9604 arm_show_force_mode (struct ui_file *file, int from_tty,
9605 struct cmd_list_element *c, const char *value)
9606 {
9607 gdb_printf (file,
9608 _("The current execution mode assumed "
9609 "(even when symbols are available) is \"%s\".\n"),
9610 arm_force_mode_string);
9611 }
9612
9613 static void
9614 arm_show_unwind_secure_frames (struct ui_file *file, int from_tty,
9615 struct cmd_list_element *c, const char *value)
9616 {
9617 gdb_printf (file,
9618 _("Usage of non-secure to secure exception stack unwinding is %s.\n"),
9619 arm_unwind_secure_frames ? "on" : "off");
9620 }
9621
9622 /* If the user changes the register disassembly style used for info
9623 register and other commands, we have to also switch the style used
9624 in opcodes for disassembly output. This function is run in the "set
9625 arm disassembly" command, and does that. */
9626
9627 static void
9628 set_disassembly_style_sfunc (const char *args, int from_tty,
9629 struct cmd_list_element *c)
9630 {
9631 /* Convert the short style name into the long style name (eg, reg-names-*)
9632 before calling the generic set_disassembler_options() function. */
9633 std::string long_name = std::string ("reg-names-") + disassembly_style;
9634 set_disassembler_options (&long_name[0]);
9635 }
9636
9637 static void
9638 show_disassembly_style_sfunc (struct ui_file *file, int from_tty,
9639 struct cmd_list_element *c, const char *value)
9640 {
9641 struct gdbarch *gdbarch = get_current_arch ();
9642 char *options = get_disassembler_options (gdbarch);
9643 const char *style = "";
9644 int len = 0;
9645 const char *opt;
9646
9647 FOR_EACH_DISASSEMBLER_OPTION (opt, options)
9648 if (startswith (opt, "reg-names-"))
9649 {
9650 style = &opt[strlen ("reg-names-")];
9651 len = strcspn (style, ",");
9652 }
9653
9654 gdb_printf (file, "The disassembly style is \"%.*s\".\n", len, style);
9655 }
9656 \f
9657 /* Return the ARM register name corresponding to register I. */
9658 static const char *
9659 arm_register_name (struct gdbarch *gdbarch, int i)
9660 {
9661 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9662
9663 if (is_s_pseudo (gdbarch, i))
9664 {
9665 static const char *const s_pseudo_names[] = {
9666 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9667 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9668 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9669 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9670 };
9671
9672 return s_pseudo_names[i - tdep->s_pseudo_base];
9673 }
9674
9675 if (is_q_pseudo (gdbarch, i))
9676 {
9677 static const char *const q_pseudo_names[] = {
9678 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9679 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9680 };
9681
9682 return q_pseudo_names[i - tdep->q_pseudo_base];
9683 }
9684
9685 if (is_mve_pseudo (gdbarch, i))
9686 return "p0";
9687
9688 /* RA_AUTH_CODE is used for unwinding only. Do not assign it a name. */
9689 if (is_pacbti_pseudo (gdbarch, i))
9690 return "";
9691
9692 if (i >= ARRAY_SIZE (arm_register_names))
9693 /* These registers are only supported on targets which supply
9694 an XML description. */
9695 return "";
9696
9697 /* Non-pseudo registers. */
9698 return arm_register_names[i];
9699 }
9700
9701 /* Test whether the coff symbol specific value corresponds to a Thumb
9702 function. */
9703
9704 static int
9705 coff_sym_is_thumb (int val)
9706 {
9707 return (val == C_THUMBEXT
9708 || val == C_THUMBSTAT
9709 || val == C_THUMBEXTFUNC
9710 || val == C_THUMBSTATFUNC
9711 || val == C_THUMBLABEL);
9712 }
9713
9714 /* arm_coff_make_msymbol_special()
9715 arm_elf_make_msymbol_special()
9716
9717 These functions test whether the COFF or ELF symbol corresponds to
9718 an address in thumb code, and set a "special" bit in a minimal
9719 symbol to indicate that it does. */
9720
9721 static void
9722 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9723 {
9724 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
9725
9726 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
9727 == ST_BRANCH_TO_THUMB)
9728 MSYMBOL_SET_SPECIAL (msym);
9729 }
9730
9731 static void
9732 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9733 {
9734 if (coff_sym_is_thumb (val))
9735 MSYMBOL_SET_SPECIAL (msym);
9736 }
9737
9738 static void
9739 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9740 asymbol *sym)
9741 {
9742 const char *name = bfd_asymbol_name (sym);
9743 struct arm_per_bfd *data;
9744 struct arm_mapping_symbol new_map_sym;
9745
9746 gdb_assert (name[0] == '$');
9747 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9748 return;
9749
9750 data = arm_bfd_data_key.get (objfile->obfd.get ());
9751 if (data == NULL)
9752 data = arm_bfd_data_key.emplace (objfile->obfd.get (),
9753 objfile->obfd->section_count);
9754 arm_mapping_symbol_vec &map
9755 = data->section_maps[bfd_asymbol_section (sym)->index];
9756
9757 new_map_sym.value = sym->value;
9758 new_map_sym.type = name[1];
9759
9760 /* Insert at the end, the vector will be sorted on first use. */
9761 map.push_back (new_map_sym);
9762 }
9763
9764 static void
9765 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9766 {
9767 struct gdbarch *gdbarch = regcache->arch ();
9768 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9769
9770 /* If necessary, set the T bit. */
9771 if (arm_apcs_32)
9772 {
9773 ULONGEST val, t_bit;
9774 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9775 t_bit = arm_psr_thumb_bit (gdbarch);
9776 if (arm_pc_is_thumb (gdbarch, pc))
9777 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9778 val | t_bit);
9779 else
9780 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9781 val & ~t_bit);
9782 }
9783 }
9784
9785 /* Read the contents of a NEON quad register, by reading from two
9786 double registers. This is used to implement the quad pseudo
9787 registers, and for argument passing in case the quad registers are
9788 missing; vectors are passed in quad registers when using the VFP
9789 ABI, even if a NEON unit is not present. REGNUM is the index of
9790 the quad register, in [0, 15]. */
9791
9792 static enum register_status
9793 arm_neon_quad_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9794 int regnum, gdb_byte *buf)
9795 {
9796 char name_buf[4];
9797 gdb_byte reg_buf[8];
9798 int double_regnum;
9799 enum register_status status;
9800
9801 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9802 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9803 strlen (name_buf));
9804
9805 status = regcache->raw_read (double_regnum, reg_buf);
9806 if (status != REG_VALID)
9807 return status;
9808 memcpy (buf, reg_buf, 8);
9809
9810 status = regcache->raw_read (double_regnum + 1, reg_buf);
9811 if (status != REG_VALID)
9812 return status;
9813 memcpy (buf + 8, reg_buf, 8);
9814
9815 return REG_VALID;
9816 }
9817
9818 /* Read the contents of the MVE pseudo register REGNUM and store it
9819 in BUF. */
9820
9821 static enum register_status
9822 arm_mve_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9823 int regnum, gdb_byte *buf)
9824 {
9825 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9826
9827 /* P0 is the first 16 bits of VPR. */
9828 return regcache->raw_read_part (tdep->mve_vpr_regnum, 0, 2, buf);
9829 }
9830
9831 static enum register_status
9832 arm_pseudo_read (struct gdbarch *gdbarch, readable_regcache *regcache,
9833 int regnum, gdb_byte *buf)
9834 {
9835 const int num_regs = gdbarch_num_regs (gdbarch);
9836 char name_buf[4];
9837 gdb_byte reg_buf[8];
9838 int offset, double_regnum;
9839 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9840
9841 gdb_assert (regnum >= num_regs);
9842
9843 if (is_q_pseudo (gdbarch, regnum))
9844 {
9845 /* Quad-precision register. */
9846 return arm_neon_quad_read (gdbarch, regcache,
9847 regnum - tdep->q_pseudo_base, buf);
9848 }
9849 else if (is_mve_pseudo (gdbarch, regnum))
9850 return arm_mve_pseudo_read (gdbarch, regcache, regnum, buf);
9851 else
9852 {
9853 enum register_status status;
9854
9855 regnum -= tdep->s_pseudo_base;
9856 /* Single-precision register. */
9857 gdb_assert (regnum < 32);
9858
9859 /* s0 is always the least significant half of d0. */
9860 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9861 offset = (regnum & 1) ? 0 : 4;
9862 else
9863 offset = (regnum & 1) ? 4 : 0;
9864
9865 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9866 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9867 strlen (name_buf));
9868
9869 status = regcache->raw_read (double_regnum, reg_buf);
9870 if (status == REG_VALID)
9871 memcpy (buf, reg_buf + offset, 4);
9872 return status;
9873 }
9874 }
9875
9876 /* Store the contents of BUF to a NEON quad register, by writing to
9877 two double registers. This is used to implement the quad pseudo
9878 registers, and for argument passing in case the quad registers are
9879 missing; vectors are passed in quad registers when using the VFP
9880 ABI, even if a NEON unit is not present. REGNUM is the index
9881 of the quad register, in [0, 15]. */
9882
9883 static void
9884 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9885 int regnum, const gdb_byte *buf)
9886 {
9887 char name_buf[4];
9888 int double_regnum;
9889
9890 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9891 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9892 strlen (name_buf));
9893
9894 regcache->raw_write (double_regnum, buf);
9895 regcache->raw_write (double_regnum + 1, buf + 8);
9896 }
9897
9898 /* Store the contents of BUF to the MVE pseudo register REGNUM. */
9899
9900 static void
9901 arm_mve_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9902 int regnum, const gdb_byte *buf)
9903 {
9904 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9905
9906 /* P0 is the first 16 bits of VPR. */
9907 regcache->raw_write_part (tdep->mve_vpr_regnum, 0, 2, buf);
9908 }
9909
9910 static void
9911 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9912 int regnum, const gdb_byte *buf)
9913 {
9914 const int num_regs = gdbarch_num_regs (gdbarch);
9915 char name_buf[4];
9916 gdb_byte reg_buf[8];
9917 int offset, double_regnum;
9918 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
9919
9920 gdb_assert (regnum >= num_regs);
9921
9922 if (is_q_pseudo (gdbarch, regnum))
9923 {
9924 /* Quad-precision register. */
9925 arm_neon_quad_write (gdbarch, regcache,
9926 regnum - tdep->q_pseudo_base, buf);
9927 }
9928 else if (is_mve_pseudo (gdbarch, regnum))
9929 arm_mve_pseudo_write (gdbarch, regcache, regnum, buf);
9930 else
9931 {
9932 regnum -= tdep->s_pseudo_base;
9933 /* Single-precision register. */
9934 gdb_assert (regnum < 32);
9935
9936 /* s0 is always the least significant half of d0. */
9937 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9938 offset = (regnum & 1) ? 0 : 4;
9939 else
9940 offset = (regnum & 1) ? 4 : 0;
9941
9942 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9943 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9944 strlen (name_buf));
9945
9946 regcache->raw_read (double_regnum, reg_buf);
9947 memcpy (reg_buf + offset, buf, 4);
9948 regcache->raw_write (double_regnum, reg_buf);
9949 }
9950 }
9951
9952 static struct value *
9953 value_of_arm_user_reg (frame_info_ptr frame, const void *baton)
9954 {
9955 const int *reg_p = (const int *) baton;
9956 return value_of_register (*reg_p, frame);
9957 }
9958 \f
9959 static enum gdb_osabi
9960 arm_elf_osabi_sniffer (bfd *abfd)
9961 {
9962 unsigned int elfosabi;
9963 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9964
9965 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9966
9967 if (elfosabi == ELFOSABI_ARM)
9968 /* GNU tools use this value. Check note sections in this case,
9969 as well. */
9970 {
9971 for (asection *sect : gdb_bfd_sections (abfd))
9972 generic_elf_osabi_sniff_abi_tag_sections (abfd, sect, &osabi);
9973 }
9974
9975 /* Anything else will be handled by the generic ELF sniffer. */
9976 return osabi;
9977 }
9978
9979 static int
9980 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9981 const struct reggroup *group)
9982 {
9983 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9984 this, FPS register belongs to save_regroup, restore_reggroup, and
9985 all_reggroup, of course. */
9986 if (regnum == ARM_FPS_REGNUM)
9987 return (group == float_reggroup
9988 || group == save_reggroup
9989 || group == restore_reggroup
9990 || group == all_reggroup);
9991 else
9992 return default_register_reggroup_p (gdbarch, regnum, group);
9993 }
9994
9995 /* For backward-compatibility we allow two 'g' packet lengths with
9996 the remote protocol depending on whether FPA registers are
9997 supplied. M-profile targets do not have FPA registers, but some
9998 stubs already exist in the wild which use a 'g' packet which
9999 supplies them albeit with dummy values. The packet format which
10000 includes FPA registers should be considered deprecated for
10001 M-profile targets. */
10002
10003 static void
10004 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
10005 {
10006 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10007
10008 if (tdep->is_m)
10009 {
10010 const target_desc *tdesc;
10011
10012 /* If we know from the executable this is an M-profile target,
10013 cater for remote targets whose register set layout is the
10014 same as the FPA layout. */
10015 tdesc = arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA);
10016 register_remote_g_packet_guess (gdbarch,
10017 ARM_CORE_REGS_SIZE + ARM_FP_REGS_SIZE,
10018 tdesc);
10019
10020 /* The regular M-profile layout. */
10021 tdesc = arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE);
10022 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE,
10023 tdesc);
10024
10025 /* M-profile plus M4F VFP. */
10026 tdesc = arm_read_mprofile_description (ARM_M_TYPE_VFP_D16);
10027 register_remote_g_packet_guess (gdbarch,
10028 ARM_CORE_REGS_SIZE + ARM_VFP2_REGS_SIZE,
10029 tdesc);
10030 /* M-profile plus MVE. */
10031 tdesc = arm_read_mprofile_description (ARM_M_TYPE_MVE);
10032 register_remote_g_packet_guess (gdbarch, ARM_CORE_REGS_SIZE
10033 + ARM_VFP2_REGS_SIZE
10034 + ARM_INT_REGISTER_SIZE, tdesc);
10035
10036 /* M-profile system (stack pointers). */
10037 tdesc = arm_read_mprofile_description (ARM_M_TYPE_SYSTEM);
10038 register_remote_g_packet_guess (gdbarch, 2 * ARM_INT_REGISTER_SIZE, tdesc);
10039 }
10040
10041 /* Otherwise we don't have a useful guess. */
10042 }
10043
10044 /* Implement the code_of_frame_writable gdbarch method. */
10045
10046 static int
10047 arm_code_of_frame_writable (struct gdbarch *gdbarch, frame_info_ptr frame)
10048 {
10049 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10050
10051 if (tdep->is_m && get_frame_type (frame) == SIGTRAMP_FRAME)
10052 {
10053 /* M-profile exception frames return to some magic PCs, where
10054 isn't writable at all. */
10055 return 0;
10056 }
10057 else
10058 return 1;
10059 }
10060
10061 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
10062 to be postfixed by a version (eg armv7hl). */
10063
10064 static const char *
10065 arm_gnu_triplet_regexp (struct gdbarch *gdbarch)
10066 {
10067 if (strcmp (gdbarch_bfd_arch_info (gdbarch)->arch_name, "arm") == 0)
10068 return "arm(v[^- ]*)?";
10069 return gdbarch_bfd_arch_info (gdbarch)->arch_name;
10070 }
10071
10072 /* Implement the "get_pc_address_flags" gdbarch method. */
10073
10074 static std::string
10075 arm_get_pc_address_flags (frame_info_ptr frame, CORE_ADDR pc)
10076 {
10077 if (get_frame_pc_masked (frame))
10078 return "PAC";
10079
10080 return "";
10081 }
10082
10083 /* Initialize the current architecture based on INFO. If possible,
10084 re-use an architecture from ARCHES, which is a list of
10085 architectures already created during this debugging session.
10086
10087 Called e.g. at program startup, when reading a core file, and when
10088 reading a binary file. */
10089
10090 static struct gdbarch *
10091 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
10092 {
10093 struct gdbarch_list *best_arch;
10094 enum arm_abi_kind arm_abi = arm_abi_global;
10095 enum arm_float_model fp_model = arm_fp_model;
10096 tdesc_arch_data_up tdesc_data;
10097 int i;
10098 bool is_m = false;
10099 bool have_sec_ext = false;
10100 int vfp_register_count = 0;
10101 bool have_s_pseudos = false, have_q_pseudos = false;
10102 bool have_wmmx_registers = false;
10103 bool have_neon = false;
10104 bool have_fpa_registers = true;
10105 const struct target_desc *tdesc = info.target_desc;
10106 bool have_vfp = false;
10107 bool have_mve = false;
10108 bool have_pacbti = false;
10109 int mve_vpr_regnum = -1;
10110 int register_count = ARM_NUM_REGS;
10111 bool have_m_profile_msp = false;
10112 int m_profile_msp_regnum = -1;
10113 int m_profile_psp_regnum = -1;
10114 int m_profile_msp_ns_regnum = -1;
10115 int m_profile_psp_ns_regnum = -1;
10116 int m_profile_msp_s_regnum = -1;
10117 int m_profile_psp_s_regnum = -1;
10118 int tls_regnum = 0;
10119
10120 /* If we have an object to base this architecture on, try to determine
10121 its ABI. */
10122
10123 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
10124 {
10125 int ei_osabi, e_flags;
10126
10127 switch (bfd_get_flavour (info.abfd))
10128 {
10129 case bfd_target_coff_flavour:
10130 /* Assume it's an old APCS-style ABI. */
10131 /* XXX WinCE? */
10132 arm_abi = ARM_ABI_APCS;
10133 break;
10134
10135 case bfd_target_elf_flavour:
10136 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
10137 e_flags = elf_elfheader (info.abfd)->e_flags;
10138
10139 if (ei_osabi == ELFOSABI_ARM)
10140 {
10141 /* GNU tools used to use this value, but do not for EABI
10142 objects. There's nowhere to tag an EABI version
10143 anyway, so assume APCS. */
10144 arm_abi = ARM_ABI_APCS;
10145 }
10146 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
10147 {
10148 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10149
10150 switch (eabi_ver)
10151 {
10152 case EF_ARM_EABI_UNKNOWN:
10153 /* Assume GNU tools. */
10154 arm_abi = ARM_ABI_APCS;
10155 break;
10156
10157 case EF_ARM_EABI_VER4:
10158 case EF_ARM_EABI_VER5:
10159 arm_abi = ARM_ABI_AAPCS;
10160 /* EABI binaries default to VFP float ordering.
10161 They may also contain build attributes that can
10162 be used to identify if the VFP argument-passing
10163 ABI is in use. */
10164 if (fp_model == ARM_FLOAT_AUTO)
10165 {
10166 #ifdef HAVE_ELF
10167 switch (bfd_elf_get_obj_attr_int (info.abfd,
10168 OBJ_ATTR_PROC,
10169 Tag_ABI_VFP_args))
10170 {
10171 case AEABI_VFP_args_base:
10172 /* "The user intended FP parameter/result
10173 passing to conform to AAPCS, base
10174 variant". */
10175 fp_model = ARM_FLOAT_SOFT_VFP;
10176 break;
10177 case AEABI_VFP_args_vfp:
10178 /* "The user intended FP parameter/result
10179 passing to conform to AAPCS, VFP
10180 variant". */
10181 fp_model = ARM_FLOAT_VFP;
10182 break;
10183 case AEABI_VFP_args_toolchain:
10184 /* "The user intended FP parameter/result
10185 passing to conform to tool chain-specific
10186 conventions" - we don't know any such
10187 conventions, so leave it as "auto". */
10188 break;
10189 case AEABI_VFP_args_compatible:
10190 /* "Code is compatible with both the base
10191 and VFP variants; the user did not permit
10192 non-variadic functions to pass FP
10193 parameters/results" - leave it as
10194 "auto". */
10195 break;
10196 default:
10197 /* Attribute value not mentioned in the
10198 November 2012 ABI, so leave it as
10199 "auto". */
10200 break;
10201 }
10202 #else
10203 fp_model = ARM_FLOAT_SOFT_VFP;
10204 #endif
10205 }
10206 break;
10207
10208 default:
10209 /* Leave it as "auto". */
10210 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10211 break;
10212 }
10213
10214 #ifdef HAVE_ELF
10215 /* Detect M-profile programs. This only works if the
10216 executable file includes build attributes; GCC does
10217 copy them to the executable, but e.g. RealView does
10218 not. */
10219 int attr_arch
10220 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10221 Tag_CPU_arch);
10222 int attr_profile
10223 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10224 Tag_CPU_arch_profile);
10225
10226 /* GCC specifies the profile for v6-M; RealView only
10227 specifies the profile for architectures starting with
10228 V7 (as opposed to architectures with a tag
10229 numerically greater than TAG_CPU_ARCH_V7). */
10230 if (!tdesc_has_registers (tdesc)
10231 && (attr_arch == TAG_CPU_ARCH_V6_M
10232 || attr_arch == TAG_CPU_ARCH_V6S_M
10233 || attr_arch == TAG_CPU_ARCH_V7E_M
10234 || attr_arch == TAG_CPU_ARCH_V8M_BASE
10235 || attr_arch == TAG_CPU_ARCH_V8M_MAIN
10236 || attr_arch == TAG_CPU_ARCH_V8_1M_MAIN
10237 || attr_profile == 'M'))
10238 is_m = true;
10239
10240 /* Look for attributes that indicate support for ARMv8.1-m
10241 PACBTI. */
10242 if (!tdesc_has_registers (tdesc) && is_m)
10243 {
10244 int attr_pac_extension
10245 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10246 Tag_PAC_extension);
10247
10248 int attr_bti_extension
10249 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10250 Tag_BTI_extension);
10251
10252 int attr_pacret_use
10253 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10254 Tag_PACRET_use);
10255
10256 int attr_bti_use
10257 = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10258 Tag_BTI_use);
10259
10260 if (attr_pac_extension != 0 || attr_bti_extension != 0
10261 || attr_pacret_use != 0 || attr_bti_use != 0)
10262 have_pacbti = true;
10263 }
10264 #endif
10265 }
10266
10267 if (fp_model == ARM_FLOAT_AUTO)
10268 {
10269 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10270 {
10271 case 0:
10272 /* Leave it as "auto". Strictly speaking this case
10273 means FPA, but almost nobody uses that now, and
10274 many toolchains fail to set the appropriate bits
10275 for the floating-point model they use. */
10276 break;
10277 case EF_ARM_SOFT_FLOAT:
10278 fp_model = ARM_FLOAT_SOFT_FPA;
10279 break;
10280 case EF_ARM_VFP_FLOAT:
10281 fp_model = ARM_FLOAT_VFP;
10282 break;
10283 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10284 fp_model = ARM_FLOAT_SOFT_VFP;
10285 break;
10286 }
10287 }
10288
10289 if (e_flags & EF_ARM_BE8)
10290 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10291
10292 break;
10293
10294 default:
10295 /* Leave it as "auto". */
10296 break;
10297 }
10298 }
10299
10300 /* Check any target description for validity. */
10301 if (tdesc_has_registers (tdesc))
10302 {
10303 /* For most registers we require GDB's default names; but also allow
10304 the numeric names for sp / lr / pc, as a convenience. */
10305 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10306 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10307 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10308
10309 const struct tdesc_feature *feature;
10310 int valid_p;
10311
10312 feature = tdesc_find_feature (tdesc,
10313 "org.gnu.gdb.arm.core");
10314 if (feature == NULL)
10315 {
10316 feature = tdesc_find_feature (tdesc,
10317 "org.gnu.gdb.arm.m-profile");
10318 if (feature == NULL)
10319 return NULL;
10320 else
10321 is_m = true;
10322 }
10323
10324 tdesc_data = tdesc_data_alloc ();
10325
10326 valid_p = 1;
10327 for (i = 0; i < ARM_SP_REGNUM; i++)
10328 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10329 arm_register_names[i]);
10330 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10331 ARM_SP_REGNUM,
10332 arm_sp_names);
10333 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10334 ARM_LR_REGNUM,
10335 arm_lr_names);
10336 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data.get (),
10337 ARM_PC_REGNUM,
10338 arm_pc_names);
10339 if (is_m)
10340 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10341 ARM_PS_REGNUM, "xpsr");
10342 else
10343 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10344 ARM_PS_REGNUM, "cpsr");
10345
10346 if (!valid_p)
10347 return NULL;
10348
10349 if (is_m)
10350 {
10351 feature = tdesc_find_feature (tdesc,
10352 "org.gnu.gdb.arm.m-system");
10353 if (feature != nullptr)
10354 {
10355 /* MSP */
10356 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10357 register_count, "msp");
10358 if (!valid_p)
10359 {
10360 warning (_("M-profile m-system feature is missing required register msp."));
10361 return nullptr;
10362 }
10363 have_m_profile_msp = true;
10364 m_profile_msp_regnum = register_count++;
10365
10366 /* PSP */
10367 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10368 register_count, "psp");
10369 if (!valid_p)
10370 {
10371 warning (_("M-profile m-system feature is missing required register psp."));
10372 return nullptr;
10373 }
10374 m_profile_psp_regnum = register_count++;
10375 }
10376 }
10377
10378 feature = tdesc_find_feature (tdesc,
10379 "org.gnu.gdb.arm.fpa");
10380 if (feature != NULL)
10381 {
10382 valid_p = 1;
10383 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10384 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10385 arm_register_names[i]);
10386 if (!valid_p)
10387 return NULL;
10388 }
10389 else
10390 have_fpa_registers = false;
10391
10392 feature = tdesc_find_feature (tdesc,
10393 "org.gnu.gdb.xscale.iwmmxt");
10394 if (feature != NULL)
10395 {
10396 static const char *const iwmmxt_names[] = {
10397 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10398 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10399 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10400 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10401 };
10402
10403 valid_p = 1;
10404 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10405 valid_p
10406 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10407 iwmmxt_names[i - ARM_WR0_REGNUM]);
10408
10409 /* Check for the control registers, but do not fail if they
10410 are missing. */
10411 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10412 tdesc_numbered_register (feature, tdesc_data.get (), i,
10413 iwmmxt_names[i - ARM_WR0_REGNUM]);
10414
10415 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10416 valid_p
10417 &= tdesc_numbered_register (feature, tdesc_data.get (), i,
10418 iwmmxt_names[i - ARM_WR0_REGNUM]);
10419
10420 if (!valid_p)
10421 return NULL;
10422
10423 have_wmmx_registers = true;
10424 }
10425
10426 /* If we have a VFP unit, check whether the single precision registers
10427 are present. If not, then we will synthesize them as pseudo
10428 registers. */
10429 feature = tdesc_find_feature (tdesc,
10430 "org.gnu.gdb.arm.vfp");
10431 if (feature != NULL)
10432 {
10433 static const char *const vfp_double_names[] = {
10434 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10435 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10436 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10437 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10438 };
10439
10440 /* Require the double precision registers. There must be either
10441 16 or 32. */
10442 valid_p = 1;
10443 for (i = 0; i < 32; i++)
10444 {
10445 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10446 ARM_D0_REGNUM + i,
10447 vfp_double_names[i]);
10448 if (!valid_p)
10449 break;
10450 }
10451 if (!valid_p && i == 16)
10452 valid_p = 1;
10453
10454 /* Also require FPSCR. */
10455 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10456 ARM_FPSCR_REGNUM, "fpscr");
10457 if (!valid_p)
10458 return NULL;
10459
10460 have_vfp = true;
10461
10462 if (tdesc_unnumbered_register (feature, "s0") == 0)
10463 have_s_pseudos = true;
10464
10465 vfp_register_count = i;
10466
10467 /* If we have VFP, also check for NEON. The architecture allows
10468 NEON without VFP (integer vector operations only), but GDB
10469 does not support that. */
10470 feature = tdesc_find_feature (tdesc,
10471 "org.gnu.gdb.arm.neon");
10472 if (feature != NULL)
10473 {
10474 /* NEON requires 32 double-precision registers. */
10475 if (i != 32)
10476 return NULL;
10477
10478 /* If there are quad registers defined by the stub, use
10479 their type; otherwise (normally) provide them with
10480 the default type. */
10481 if (tdesc_unnumbered_register (feature, "q0") == 0)
10482 have_q_pseudos = true;
10483 }
10484 }
10485
10486 /* Check for the TLS register feature. */
10487 feature = tdesc_find_feature (tdesc, "org.gnu.gdb.arm.tls");
10488 if (feature != nullptr)
10489 {
10490 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10491 register_count, "tpidruro");
10492 if (!valid_p)
10493 return nullptr;
10494
10495 tls_regnum = register_count;
10496 register_count++;
10497 }
10498
10499 /* Check for MVE after all the checks for GPR's, VFP and Neon.
10500 MVE (Helium) is an M-profile extension. */
10501 if (is_m)
10502 {
10503 /* Do we have the MVE feature? */
10504 feature = tdesc_find_feature (tdesc,"org.gnu.gdb.arm.m-profile-mve");
10505
10506 if (feature != nullptr)
10507 {
10508 /* If we have MVE, we must always have the VPR register. */
10509 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10510 register_count, "vpr");
10511 if (!valid_p)
10512 {
10513 warning (_("MVE feature is missing required register vpr."));
10514 return nullptr;
10515 }
10516
10517 have_mve = true;
10518 mve_vpr_regnum = register_count;
10519 register_count++;
10520
10521 /* We can't have Q pseudo registers available here, as that
10522 would mean we have NEON features, and that is only available
10523 on A and R profiles. */
10524 gdb_assert (!have_q_pseudos);
10525
10526 /* Given we have a M-profile target description, if MVE is
10527 enabled and there are VFP registers, we should have Q
10528 pseudo registers (Q0 ~ Q7). */
10529 if (have_vfp)
10530 have_q_pseudos = true;
10531 }
10532
10533 /* Do we have the ARMv8.1-m PACBTI feature? */
10534 feature = tdesc_find_feature (tdesc,
10535 "org.gnu.gdb.arm.m-profile-pacbti");
10536 if (feature != nullptr)
10537 {
10538 /* By advertising this feature, the target acknowledges the
10539 presence of the ARMv8.1-m PACBTI extensions.
10540
10541 We don't care for any particular registers in this group, so
10542 the target is free to include whatever it deems appropriate.
10543
10544 The expectation is for this feature to include the PAC
10545 keys. */
10546 have_pacbti = true;
10547 }
10548
10549 /* Do we have the Security extension? */
10550 feature = tdesc_find_feature (tdesc,
10551 "org.gnu.gdb.arm.secext");
10552 if (feature != nullptr)
10553 {
10554 /* Secure/Non-secure stack pointers. */
10555 /* MSP_NS */
10556 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10557 register_count, "msp_ns");
10558 if (!valid_p)
10559 {
10560 warning (_("M-profile secext feature is missing required register msp_ns."));
10561 return nullptr;
10562 }
10563 m_profile_msp_ns_regnum = register_count++;
10564
10565 /* PSP_NS */
10566 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10567 register_count, "psp_ns");
10568 if (!valid_p)
10569 {
10570 warning (_("M-profile secext feature is missing required register psp_ns."));
10571 return nullptr;
10572 }
10573 m_profile_psp_ns_regnum = register_count++;
10574
10575 /* MSP_S */
10576 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10577 register_count, "msp_s");
10578 if (!valid_p)
10579 {
10580 warning (_("M-profile secext feature is missing required register msp_s."));
10581 return nullptr;
10582 }
10583 m_profile_msp_s_regnum = register_count++;
10584
10585 /* PSP_S */
10586 valid_p &= tdesc_numbered_register (feature, tdesc_data.get (),
10587 register_count, "psp_s");
10588 if (!valid_p)
10589 {
10590 warning (_("M-profile secext feature is missing required register psp_s."));
10591 return nullptr;
10592 }
10593 m_profile_psp_s_regnum = register_count++;
10594
10595 have_sec_ext = true;
10596 }
10597
10598 }
10599 }
10600
10601 /* If there is already a candidate, use it. */
10602 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10603 best_arch != NULL;
10604 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10605 {
10606 arm_gdbarch_tdep *tdep
10607 = gdbarch_tdep<arm_gdbarch_tdep> (best_arch->gdbarch);
10608
10609 if (arm_abi != ARM_ABI_AUTO && arm_abi != tdep->arm_abi)
10610 continue;
10611
10612 if (fp_model != ARM_FLOAT_AUTO && fp_model != tdep->fp_model)
10613 continue;
10614
10615 /* There are various other properties in tdep that we do not
10616 need to check here: those derived from a target description,
10617 since gdbarches with a different target description are
10618 automatically disqualified. */
10619
10620 /* Do check is_m, though, since it might come from the binary. */
10621 if (is_m != tdep->is_m)
10622 continue;
10623
10624 /* Also check for ARMv8.1-m PACBTI support, since it might come from
10625 the binary. */
10626 if (have_pacbti != tdep->have_pacbti)
10627 continue;
10628
10629 /* Found a match. */
10630 break;
10631 }
10632
10633 if (best_arch != NULL)
10634 return best_arch->gdbarch;
10635
10636 gdbarch *gdbarch
10637 = gdbarch_alloc (&info, gdbarch_tdep_up (new arm_gdbarch_tdep));
10638 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10639
10640 /* Record additional information about the architecture we are defining.
10641 These are gdbarch discriminators, like the OSABI. */
10642 tdep->arm_abi = arm_abi;
10643 tdep->fp_model = fp_model;
10644 tdep->is_m = is_m;
10645 tdep->have_sec_ext = have_sec_ext;
10646 tdep->have_fpa_registers = have_fpa_registers;
10647 tdep->have_wmmx_registers = have_wmmx_registers;
10648 gdb_assert (vfp_register_count == 0
10649 || vfp_register_count == 16
10650 || vfp_register_count == 32);
10651 tdep->vfp_register_count = vfp_register_count;
10652 tdep->have_s_pseudos = have_s_pseudos;
10653 tdep->have_q_pseudos = have_q_pseudos;
10654 tdep->have_neon = have_neon;
10655 tdep->tls_regnum = tls_regnum;
10656
10657 /* Adjust the MVE feature settings. */
10658 if (have_mve)
10659 {
10660 tdep->have_mve = true;
10661 tdep->mve_vpr_regnum = mve_vpr_regnum;
10662 }
10663
10664 /* Adjust the PACBTI feature settings. */
10665 tdep->have_pacbti = have_pacbti;
10666
10667 /* Adjust the M-profile stack pointers settings. */
10668 if (have_m_profile_msp)
10669 {
10670 tdep->m_profile_msp_regnum = m_profile_msp_regnum;
10671 tdep->m_profile_psp_regnum = m_profile_psp_regnum;
10672 tdep->m_profile_msp_ns_regnum = m_profile_msp_ns_regnum;
10673 tdep->m_profile_psp_ns_regnum = m_profile_psp_ns_regnum;
10674 tdep->m_profile_msp_s_regnum = m_profile_msp_s_regnum;
10675 tdep->m_profile_psp_s_regnum = m_profile_psp_s_regnum;
10676 }
10677
10678 arm_register_g_packet_guesses (gdbarch);
10679
10680 /* Breakpoints. */
10681 switch (info.byte_order_for_code)
10682 {
10683 case BFD_ENDIAN_BIG:
10684 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10685 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10686 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10687 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10688
10689 break;
10690
10691 case BFD_ENDIAN_LITTLE:
10692 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10693 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10694 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10695 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10696
10697 break;
10698
10699 default:
10700 internal_error (_("arm_gdbarch_init: bad byte order for float format"));
10701 }
10702
10703 /* On ARM targets char defaults to unsigned. */
10704 set_gdbarch_char_signed (gdbarch, 0);
10705
10706 /* wchar_t is unsigned under the AAPCS. */
10707 if (tdep->arm_abi == ARM_ABI_AAPCS)
10708 set_gdbarch_wchar_signed (gdbarch, 0);
10709 else
10710 set_gdbarch_wchar_signed (gdbarch, 1);
10711
10712 /* Compute type alignment. */
10713 set_gdbarch_type_align (gdbarch, arm_type_align);
10714
10715 /* Note: for displaced stepping, this includes the breakpoint, and one word
10716 of additional scratch space. This setting isn't used for anything beside
10717 displaced stepping at present. */
10718 set_gdbarch_displaced_step_buffer_length
10719 (gdbarch, 4 * ARM_DISPLACED_MODIFIED_INSNS);
10720 set_gdbarch_max_insn_length (gdbarch, 4);
10721
10722 /* This should be low enough for everything. */
10723 tdep->lowest_pc = 0x20;
10724 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10725
10726 /* The default, for both APCS and AAPCS, is to return small
10727 structures in registers. */
10728 tdep->struct_return = reg_struct_return;
10729
10730 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10731 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10732
10733 if (is_m)
10734 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
10735
10736 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10737
10738 frame_base_set_default (gdbarch, &arm_normal_base);
10739
10740 /* Address manipulation. */
10741 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10742
10743 /* Advance PC across function entry code. */
10744 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10745
10746 /* Detect whether PC is at a point where the stack has been destroyed. */
10747 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10748
10749 /* Skip trampolines. */
10750 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10751
10752 /* The stack grows downward. */
10753 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10754
10755 /* Breakpoint manipulation. */
10756 set_gdbarch_breakpoint_kind_from_pc (gdbarch, arm_breakpoint_kind_from_pc);
10757 set_gdbarch_sw_breakpoint_from_kind (gdbarch, arm_sw_breakpoint_from_kind);
10758 set_gdbarch_breakpoint_kind_from_current_state (gdbarch,
10759 arm_breakpoint_kind_from_current_state);
10760
10761 /* Information about registers, etc. */
10762 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10763 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10764 set_gdbarch_num_regs (gdbarch, register_count);
10765 set_gdbarch_register_type (gdbarch, arm_register_type);
10766 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10767
10768 /* This "info float" is FPA-specific. Use the generic version if we
10769 do not have FPA. */
10770 if (tdep->have_fpa_registers)
10771 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10772
10773 /* Internal <-> external register number maps. */
10774 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10775 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10776
10777 set_gdbarch_register_name (gdbarch, arm_register_name);
10778
10779 /* Returning results. */
10780 set_gdbarch_return_value_as_value (gdbarch, arm_return_value);
10781
10782 /* Disassembly. */
10783 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10784
10785 /* Minsymbol frobbing. */
10786 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10787 set_gdbarch_coff_make_msymbol_special (gdbarch,
10788 arm_coff_make_msymbol_special);
10789 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10790
10791 /* Thumb-2 IT block support. */
10792 set_gdbarch_adjust_breakpoint_address (gdbarch,
10793 arm_adjust_breakpoint_address);
10794
10795 /* Virtual tables. */
10796 set_gdbarch_vbit_in_delta (gdbarch, 1);
10797
10798 /* Hook in the ABI-specific overrides, if they have been registered. */
10799 gdbarch_init_osabi (info, gdbarch);
10800
10801 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10802
10803 /* Add some default predicates. */
10804 if (is_m)
10805 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10806 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10807 dwarf2_append_unwinders (gdbarch);
10808 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10809 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
10810 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10811
10812 /* Now we have tuned the configuration, set a few final things,
10813 based on what the OS ABI has told us. */
10814
10815 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10816 binaries are always marked. */
10817 if (tdep->arm_abi == ARM_ABI_AUTO)
10818 tdep->arm_abi = ARM_ABI_APCS;
10819
10820 /* Watchpoints are not steppable. */
10821 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10822
10823 /* We used to default to FPA for generic ARM, but almost nobody
10824 uses that now, and we now provide a way for the user to force
10825 the model. So default to the most useful variant. */
10826 if (tdep->fp_model == ARM_FLOAT_AUTO)
10827 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10828
10829 if (tdep->jb_pc >= 0)
10830 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10831
10832 /* Floating point sizes and format. */
10833 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10834 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10835 {
10836 set_gdbarch_double_format
10837 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10838 set_gdbarch_long_double_format
10839 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10840 }
10841 else
10842 {
10843 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10844 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10845 }
10846
10847 /* Hook used to decorate frames with signed return addresses, only available
10848 for ARMv8.1-m PACBTI. */
10849 if (is_m && have_pacbti)
10850 set_gdbarch_get_pc_address_flags (gdbarch, arm_get_pc_address_flags);
10851
10852 if (tdesc_data != nullptr)
10853 {
10854 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10855
10856 tdesc_use_registers (gdbarch, tdesc, std::move (tdesc_data));
10857 register_count = gdbarch_num_regs (gdbarch);
10858
10859 /* Override tdesc_register_type to adjust the types of VFP
10860 registers for NEON. */
10861 set_gdbarch_register_type (gdbarch, arm_register_type);
10862 }
10863
10864 /* Initialize the pseudo register data. */
10865 int num_pseudos = 0;
10866 if (tdep->have_s_pseudos)
10867 {
10868 /* VFP single precision pseudo registers (S0~S31). */
10869 tdep->s_pseudo_base = register_count;
10870 tdep->s_pseudo_count = 32;
10871 num_pseudos += tdep->s_pseudo_count;
10872
10873 if (tdep->have_q_pseudos)
10874 {
10875 /* NEON quad precision pseudo registers (Q0~Q15). */
10876 tdep->q_pseudo_base = register_count + num_pseudos;
10877
10878 if (have_neon)
10879 tdep->q_pseudo_count = 16;
10880 else if (have_mve)
10881 tdep->q_pseudo_count = ARM_MVE_NUM_Q_REGS;
10882
10883 num_pseudos += tdep->q_pseudo_count;
10884 }
10885 }
10886
10887 /* Do we have any MVE pseudo registers? */
10888 if (have_mve)
10889 {
10890 tdep->mve_pseudo_base = register_count + num_pseudos;
10891 tdep->mve_pseudo_count = 1;
10892 num_pseudos += tdep->mve_pseudo_count;
10893 }
10894
10895 /* Do we have any ARMv8.1-m PACBTI pseudo registers. */
10896 if (have_pacbti)
10897 {
10898 tdep->pacbti_pseudo_base = register_count + num_pseudos;
10899 tdep->pacbti_pseudo_count = 1;
10900 num_pseudos += tdep->pacbti_pseudo_count;
10901 }
10902
10903 /* Set some pseudo register hooks, if we have pseudo registers. */
10904 if (tdep->have_s_pseudos || have_mve || have_pacbti)
10905 {
10906 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10907 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10908 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10909 }
10910
10911 /* Add standard register aliases. We add aliases even for those
10912 names which are used by the current architecture - it's simpler,
10913 and does no harm, since nothing ever lists user registers. */
10914 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10915 user_reg_add (gdbarch, arm_register_aliases[i].name,
10916 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10917
10918 set_gdbarch_disassembler_options (gdbarch, &arm_disassembler_options);
10919 set_gdbarch_valid_disassembler_options (gdbarch, disassembler_options_arm ());
10920
10921 set_gdbarch_gnu_triplet_regexp (gdbarch, arm_gnu_triplet_regexp);
10922
10923 return gdbarch;
10924 }
10925
10926 static void
10927 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10928 {
10929 arm_gdbarch_tdep *tdep = gdbarch_tdep<arm_gdbarch_tdep> (gdbarch);
10930
10931 if (tdep == NULL)
10932 return;
10933
10934 gdb_printf (file, _("arm_dump_tdep: fp_model = %i\n"),
10935 (int) tdep->fp_model);
10936 gdb_printf (file, _("arm_dump_tdep: have_fpa_registers = %i\n"),
10937 (int) tdep->have_fpa_registers);
10938 gdb_printf (file, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
10939 (int) tdep->have_wmmx_registers);
10940 gdb_printf (file, _("arm_dump_tdep: vfp_register_count = %i\n"),
10941 (int) tdep->vfp_register_count);
10942 gdb_printf (file, _("arm_dump_tdep: have_s_pseudos = %s\n"),
10943 tdep->have_s_pseudos ? "true" : "false");
10944 gdb_printf (file, _("arm_dump_tdep: s_pseudo_base = %i\n"),
10945 (int) tdep->s_pseudo_base);
10946 gdb_printf (file, _("arm_dump_tdep: s_pseudo_count = %i\n"),
10947 (int) tdep->s_pseudo_count);
10948 gdb_printf (file, _("arm_dump_tdep: have_q_pseudos = %s\n"),
10949 tdep->have_q_pseudos ? "true" : "false");
10950 gdb_printf (file, _("arm_dump_tdep: q_pseudo_base = %i\n"),
10951 (int) tdep->q_pseudo_base);
10952 gdb_printf (file, _("arm_dump_tdep: q_pseudo_count = %i\n"),
10953 (int) tdep->q_pseudo_count);
10954 gdb_printf (file, _("arm_dump_tdep: have_neon = %i\n"),
10955 (int) tdep->have_neon);
10956 gdb_printf (file, _("arm_dump_tdep: have_mve = %s\n"),
10957 tdep->have_mve ? "yes" : "no");
10958 gdb_printf (file, _("arm_dump_tdep: mve_vpr_regnum = %i\n"),
10959 tdep->mve_vpr_regnum);
10960 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_base = %i\n"),
10961 tdep->mve_pseudo_base);
10962 gdb_printf (file, _("arm_dump_tdep: mve_pseudo_count = %i\n"),
10963 tdep->mve_pseudo_count);
10964 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_regnum = %i\n"),
10965 tdep->m_profile_msp_regnum);
10966 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_regnum = %i\n"),
10967 tdep->m_profile_psp_regnum);
10968 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_ns_regnum = %i\n"),
10969 tdep->m_profile_msp_ns_regnum);
10970 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_ns_regnum = %i\n"),
10971 tdep->m_profile_psp_ns_regnum);
10972 gdb_printf (file, _("arm_dump_tdep: m_profile_msp_s_regnum = %i\n"),
10973 tdep->m_profile_msp_s_regnum);
10974 gdb_printf (file, _("arm_dump_tdep: m_profile_psp_s_regnum = %i\n"),
10975 tdep->m_profile_psp_s_regnum);
10976 gdb_printf (file, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
10977 (unsigned long) tdep->lowest_pc);
10978 gdb_printf (file, _("arm_dump_tdep: have_pacbti = %s\n"),
10979 tdep->have_pacbti ? "yes" : "no");
10980 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_base = %i\n"),
10981 tdep->pacbti_pseudo_base);
10982 gdb_printf (file, _("arm_dump_tdep: pacbti_pseudo_count = %i\n"),
10983 tdep->pacbti_pseudo_count);
10984 gdb_printf (file, _("arm_dump_tdep: is_m = %s\n"),
10985 tdep->is_m ? "yes" : "no");
10986 }
10987
10988 #if GDB_SELF_TEST
10989 namespace selftests
10990 {
10991 static void arm_record_test (void);
10992 static void arm_analyze_prologue_test ();
10993 }
10994 #endif
10995
10996 void _initialize_arm_tdep ();
10997 void
10998 _initialize_arm_tdep ()
10999 {
11000 long length;
11001 int i, j;
11002 char regdesc[1024], *rdptr = regdesc;
11003 size_t rest = sizeof (regdesc);
11004
11005 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
11006
11007 /* Add ourselves to objfile event chain. */
11008 gdb::observers::new_objfile.attach (arm_exidx_new_objfile, "arm-tdep");
11009
11010 /* Register an ELF OS ABI sniffer for ARM binaries. */
11011 gdbarch_register_osabi_sniffer (bfd_arch_arm,
11012 bfd_target_elf_flavour,
11013 arm_elf_osabi_sniffer);
11014
11015 /* Add root prefix command for all "set arm"/"show arm" commands. */
11016 add_setshow_prefix_cmd ("arm", no_class,
11017 _("Various ARM-specific commands."),
11018 _("Various ARM-specific commands."),
11019 &setarmcmdlist, &showarmcmdlist,
11020 &setlist, &showlist);
11021
11022 arm_disassembler_options = xstrdup ("reg-names-std");
11023 const disasm_options_t *disasm_options
11024 = &disassembler_options_arm ()->options;
11025 int num_disassembly_styles = 0;
11026 for (i = 0; disasm_options->name[i] != NULL; i++)
11027 if (startswith (disasm_options->name[i], "reg-names-"))
11028 num_disassembly_styles++;
11029
11030 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
11031 valid_disassembly_styles = XNEWVEC (const char *,
11032 num_disassembly_styles + 1);
11033 for (i = j = 0; disasm_options->name[i] != NULL; i++)
11034 if (startswith (disasm_options->name[i], "reg-names-"))
11035 {
11036 size_t offset = strlen ("reg-names-");
11037 const char *style = disasm_options->name[i];
11038 valid_disassembly_styles[j++] = &style[offset];
11039 if (strcmp (&style[offset], "std") == 0)
11040 disassembly_style = &style[offset];
11041 length = snprintf (rdptr, rest, "%s - %s\n", &style[offset],
11042 disasm_options->description[i]);
11043 rdptr += length;
11044 rest -= length;
11045 }
11046 /* Mark the end of valid options. */
11047 valid_disassembly_styles[num_disassembly_styles] = NULL;
11048
11049 /* Create the help text. */
11050 std::string helptext = string_printf ("%s%s%s",
11051 _("The valid values are:\n"),
11052 regdesc,
11053 _("The default is \"std\"."));
11054
11055 add_setshow_enum_cmd("disassembler", no_class,
11056 valid_disassembly_styles, &disassembly_style,
11057 _("Set the disassembly style."),
11058 _("Show the disassembly style."),
11059 helptext.c_str (),
11060 set_disassembly_style_sfunc,
11061 show_disassembly_style_sfunc,
11062 &setarmcmdlist, &showarmcmdlist);
11063
11064 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
11065 _("Set usage of ARM 32-bit mode."),
11066 _("Show usage of ARM 32-bit mode."),
11067 _("When off, a 26-bit PC will be used."),
11068 NULL,
11069 NULL, /* FIXME: i18n: Usage of ARM 32-bit
11070 mode is %s. */
11071 &setarmcmdlist, &showarmcmdlist);
11072
11073 /* Add a command to allow the user to force the FPU model. */
11074 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
11075 _("Set the floating point type."),
11076 _("Show the floating point type."),
11077 _("auto - Determine the FP typefrom the OS-ABI.\n\
11078 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
11079 fpa - FPA co-processor (GCC compiled).\n\
11080 softvfp - Software FP with pure-endian doubles.\n\
11081 vfp - VFP co-processor."),
11082 set_fp_model_sfunc, show_fp_model,
11083 &setarmcmdlist, &showarmcmdlist);
11084
11085 /* Add a command to allow the user to force the ABI. */
11086 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
11087 _("Set the ABI."),
11088 _("Show the ABI."),
11089 NULL, arm_set_abi, arm_show_abi,
11090 &setarmcmdlist, &showarmcmdlist);
11091
11092 /* Add two commands to allow the user to force the assumed
11093 execution mode. */
11094 add_setshow_enum_cmd ("fallback-mode", class_support,
11095 arm_mode_strings, &arm_fallback_mode_string,
11096 _("Set the mode assumed when symbols are unavailable."),
11097 _("Show the mode assumed when symbols are unavailable."),
11098 NULL, NULL, arm_show_fallback_mode,
11099 &setarmcmdlist, &showarmcmdlist);
11100 add_setshow_enum_cmd ("force-mode", class_support,
11101 arm_mode_strings, &arm_force_mode_string,
11102 _("Set the mode assumed even when symbols are available."),
11103 _("Show the mode assumed even when symbols are available."),
11104 NULL, NULL, arm_show_force_mode,
11105 &setarmcmdlist, &showarmcmdlist);
11106
11107 /* Add a command to stop triggering security exceptions when
11108 unwinding exception stacks. */
11109 add_setshow_boolean_cmd ("unwind-secure-frames", no_class, &arm_unwind_secure_frames,
11110 _("Set usage of non-secure to secure exception stack unwinding."),
11111 _("Show usage of non-secure to secure exception stack unwinding."),
11112 _("When on, the debugger can trigger memory access traps."),
11113 NULL, arm_show_unwind_secure_frames,
11114 &setarmcmdlist, &showarmcmdlist);
11115
11116 /* Debugging flag. */
11117 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
11118 _("Set ARM debugging."),
11119 _("Show ARM debugging."),
11120 _("When on, arm-specific debugging is enabled."),
11121 NULL,
11122 NULL, /* FIXME: i18n: "ARM debugging is %s. */
11123 &setdebuglist, &showdebuglist);
11124
11125 #if GDB_SELF_TEST
11126 selftests::register_test ("arm-record", selftests::arm_record_test);
11127 selftests::register_test ("arm_analyze_prologue", selftests::arm_analyze_prologue_test);
11128 #endif
11129
11130 }
11131
11132 /* ARM-reversible process record data structures. */
11133
11134 #define ARM_INSN_SIZE_BYTES 4
11135 #define THUMB_INSN_SIZE_BYTES 2
11136 #define THUMB2_INSN_SIZE_BYTES 4
11137
11138
11139 /* Position of the bit within a 32-bit ARM instruction
11140 that defines whether the instruction is a load or store. */
11141 #define INSN_S_L_BIT_NUM 20
11142
11143 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
11144 do \
11145 { \
11146 unsigned int reg_len = LENGTH; \
11147 if (reg_len) \
11148 { \
11149 REGS = XNEWVEC (uint32_t, reg_len); \
11150 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
11151 } \
11152 } \
11153 while (0)
11154
11155 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
11156 do \
11157 { \
11158 unsigned int mem_len = LENGTH; \
11159 if (mem_len) \
11160 { \
11161 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
11162 memcpy(&MEMS->len, &RECORD_BUF[0], \
11163 sizeof(struct arm_mem_r) * LENGTH); \
11164 } \
11165 } \
11166 while (0)
11167
11168 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
11169 #define INSN_RECORDED(ARM_RECORD) \
11170 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
11171
11172 /* ARM memory record structure. */
11173 struct arm_mem_r
11174 {
11175 uint32_t len; /* Record length. */
11176 uint32_t addr; /* Memory address. */
11177 };
11178
11179 /* ARM instruction record contains opcode of current insn
11180 and execution state (before entry to decode_insn()),
11181 contains list of to-be-modified registers and
11182 memory blocks (on return from decode_insn()). */
11183
11184 struct arm_insn_decode_record
11185 {
11186 struct gdbarch *gdbarch;
11187 struct regcache *regcache;
11188 CORE_ADDR this_addr; /* Address of the insn being decoded. */
11189 uint32_t arm_insn; /* Should accommodate thumb. */
11190 uint32_t cond; /* Condition code. */
11191 uint32_t opcode; /* Insn opcode. */
11192 uint32_t decode; /* Insn decode bits. */
11193 uint32_t mem_rec_count; /* No of mem records. */
11194 uint32_t reg_rec_count; /* No of reg records. */
11195 uint32_t *arm_regs; /* Registers to be saved for this record. */
11196 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
11197 };
11198
11199
11200 /* Checks ARM SBZ and SBO mandatory fields. */
11201
11202 static int
11203 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
11204 {
11205 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
11206
11207 if (!len)
11208 return 1;
11209
11210 if (!sbo)
11211 ones = ~ones;
11212
11213 while (ones)
11214 {
11215 if (!(ones & sbo))
11216 {
11217 return 0;
11218 }
11219 ones = ones >> 1;
11220 }
11221 return 1;
11222 }
11223
11224 enum arm_record_result
11225 {
11226 ARM_RECORD_SUCCESS = 0,
11227 ARM_RECORD_FAILURE = 1
11228 };
11229
11230 enum arm_record_strx_t
11231 {
11232 ARM_RECORD_STRH=1,
11233 ARM_RECORD_STRD
11234 };
11235
11236 enum record_type_t
11237 {
11238 ARM_RECORD=1,
11239 THUMB_RECORD,
11240 THUMB2_RECORD
11241 };
11242
11243
11244 static int
11245 arm_record_strx (arm_insn_decode_record *arm_insn_r, uint32_t *record_buf,
11246 uint32_t *record_buf_mem, arm_record_strx_t str_type)
11247 {
11248
11249 struct regcache *reg_cache = arm_insn_r->regcache;
11250 ULONGEST u_regval[2]= {0};
11251
11252 uint32_t reg_src1 = 0, reg_src2 = 0;
11253 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
11254
11255 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11256 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11257
11258 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11259 {
11260 /* 1) Handle misc store, immediate offset. */
11261 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11262 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11263 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11264 regcache_raw_read_unsigned (reg_cache, reg_src1,
11265 &u_regval[0]);
11266 if (ARM_PC_REGNUM == reg_src1)
11267 {
11268 /* If R15 was used as Rn, hence current PC+8. */
11269 u_regval[0] = u_regval[0] + 8;
11270 }
11271 offset_8 = (immed_high << 4) | immed_low;
11272 /* Calculate target store address. */
11273 if (14 == arm_insn_r->opcode)
11274 {
11275 tgt_mem_addr = u_regval[0] + offset_8;
11276 }
11277 else
11278 {
11279 tgt_mem_addr = u_regval[0] - offset_8;
11280 }
11281 if (ARM_RECORD_STRH == str_type)
11282 {
11283 record_buf_mem[0] = 2;
11284 record_buf_mem[1] = tgt_mem_addr;
11285 arm_insn_r->mem_rec_count = 1;
11286 }
11287 else if (ARM_RECORD_STRD == str_type)
11288 {
11289 record_buf_mem[0] = 4;
11290 record_buf_mem[1] = tgt_mem_addr;
11291 record_buf_mem[2] = 4;
11292 record_buf_mem[3] = tgt_mem_addr + 4;
11293 arm_insn_r->mem_rec_count = 2;
11294 }
11295 }
11296 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
11297 {
11298 /* 2) Store, register offset. */
11299 /* Get Rm. */
11300 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11301 /* Get Rn. */
11302 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11303 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11304 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11305 if (15 == reg_src2)
11306 {
11307 /* If R15 was used as Rn, hence current PC+8. */
11308 u_regval[0] = u_regval[0] + 8;
11309 }
11310 /* Calculate target store address, Rn +/- Rm, register offset. */
11311 if (12 == arm_insn_r->opcode)
11312 {
11313 tgt_mem_addr = u_regval[0] + u_regval[1];
11314 }
11315 else
11316 {
11317 tgt_mem_addr = u_regval[1] - u_regval[0];
11318 }
11319 if (ARM_RECORD_STRH == str_type)
11320 {
11321 record_buf_mem[0] = 2;
11322 record_buf_mem[1] = tgt_mem_addr;
11323 arm_insn_r->mem_rec_count = 1;
11324 }
11325 else if (ARM_RECORD_STRD == str_type)
11326 {
11327 record_buf_mem[0] = 4;
11328 record_buf_mem[1] = tgt_mem_addr;
11329 record_buf_mem[2] = 4;
11330 record_buf_mem[3] = tgt_mem_addr + 4;
11331 arm_insn_r->mem_rec_count = 2;
11332 }
11333 }
11334 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11335 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11336 {
11337 /* 3) Store, immediate pre-indexed. */
11338 /* 5) Store, immediate post-indexed. */
11339 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
11340 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
11341 offset_8 = (immed_high << 4) | immed_low;
11342 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11343 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11344 /* Calculate target store address, Rn +/- Rm, register offset. */
11345 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
11346 {
11347 tgt_mem_addr = u_regval[0] + offset_8;
11348 }
11349 else
11350 {
11351 tgt_mem_addr = u_regval[0] - offset_8;
11352 }
11353 if (ARM_RECORD_STRH == str_type)
11354 {
11355 record_buf_mem[0] = 2;
11356 record_buf_mem[1] = tgt_mem_addr;
11357 arm_insn_r->mem_rec_count = 1;
11358 }
11359 else if (ARM_RECORD_STRD == str_type)
11360 {
11361 record_buf_mem[0] = 4;
11362 record_buf_mem[1] = tgt_mem_addr;
11363 record_buf_mem[2] = 4;
11364 record_buf_mem[3] = tgt_mem_addr + 4;
11365 arm_insn_r->mem_rec_count = 2;
11366 }
11367 /* Record Rn also as it changes. */
11368 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11369 arm_insn_r->reg_rec_count = 1;
11370 }
11371 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
11372 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11373 {
11374 /* 4) Store, register pre-indexed. */
11375 /* 6) Store, register post -indexed. */
11376 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11377 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11378 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11379 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11380 /* Calculate target store address, Rn +/- Rm, register offset. */
11381 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
11382 {
11383 tgt_mem_addr = u_regval[0] + u_regval[1];
11384 }
11385 else
11386 {
11387 tgt_mem_addr = u_regval[1] - u_regval[0];
11388 }
11389 if (ARM_RECORD_STRH == str_type)
11390 {
11391 record_buf_mem[0] = 2;
11392 record_buf_mem[1] = tgt_mem_addr;
11393 arm_insn_r->mem_rec_count = 1;
11394 }
11395 else if (ARM_RECORD_STRD == str_type)
11396 {
11397 record_buf_mem[0] = 4;
11398 record_buf_mem[1] = tgt_mem_addr;
11399 record_buf_mem[2] = 4;
11400 record_buf_mem[3] = tgt_mem_addr + 4;
11401 arm_insn_r->mem_rec_count = 2;
11402 }
11403 /* Record Rn also as it changes. */
11404 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
11405 arm_insn_r->reg_rec_count = 1;
11406 }
11407 return 0;
11408 }
11409
11410 /* Handling ARM extension space insns. */
11411
11412 static int
11413 arm_record_extension_space (arm_insn_decode_record *arm_insn_r)
11414 {
11415 int ret = 0; /* Return value: -1:record failure ; 0:success */
11416 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
11417 uint32_t record_buf[8], record_buf_mem[8];
11418 uint32_t reg_src1 = 0;
11419 struct regcache *reg_cache = arm_insn_r->regcache;
11420 ULONGEST u_regval = 0;
11421
11422 gdb_assert (!INSN_RECORDED(arm_insn_r));
11423 /* Handle unconditional insn extension space. */
11424
11425 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11426 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11427 if (arm_insn_r->cond)
11428 {
11429 /* PLD has no affect on architectural state, it just affects
11430 the caches. */
11431 if (5 == ((opcode1 & 0xE0) >> 5))
11432 {
11433 /* BLX(1) */
11434 record_buf[0] = ARM_PS_REGNUM;
11435 record_buf[1] = ARM_LR_REGNUM;
11436 arm_insn_r->reg_rec_count = 2;
11437 }
11438 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11439 }
11440
11441
11442 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11443 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11444 {
11445 ret = -1;
11446 /* Undefined instruction on ARM V5; need to handle if later
11447 versions define it. */
11448 }
11449
11450 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11451 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11452 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11453
11454 /* Handle arithmetic insn extension space. */
11455 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11456 && !INSN_RECORDED(arm_insn_r))
11457 {
11458 /* Handle MLA(S) and MUL(S). */
11459 if (in_inclusive_range (insn_op1, 0U, 3U))
11460 {
11461 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11462 record_buf[1] = ARM_PS_REGNUM;
11463 arm_insn_r->reg_rec_count = 2;
11464 }
11465 else if (in_inclusive_range (insn_op1, 4U, 15U))
11466 {
11467 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11468 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11469 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11470 record_buf[2] = ARM_PS_REGNUM;
11471 arm_insn_r->reg_rec_count = 3;
11472 }
11473 }
11474
11475 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11476 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11477 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11478
11479 /* Handle control insn extension space. */
11480
11481 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11482 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11483 {
11484 if (!bit (arm_insn_r->arm_insn,25))
11485 {
11486 if (!bits (arm_insn_r->arm_insn, 4, 7))
11487 {
11488 if ((0 == insn_op1) || (2 == insn_op1))
11489 {
11490 /* MRS. */
11491 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11492 arm_insn_r->reg_rec_count = 1;
11493 }
11494 else if (1 == insn_op1)
11495 {
11496 /* CSPR is going to be changed. */
11497 record_buf[0] = ARM_PS_REGNUM;
11498 arm_insn_r->reg_rec_count = 1;
11499 }
11500 else if (3 == insn_op1)
11501 {
11502 /* SPSR is going to be changed. */
11503 /* We need to get SPSR value, which is yet to be done. */
11504 return -1;
11505 }
11506 }
11507 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11508 {
11509 if (1 == insn_op1)
11510 {
11511 /* BX. */
11512 record_buf[0] = ARM_PS_REGNUM;
11513 arm_insn_r->reg_rec_count = 1;
11514 }
11515 else if (3 == insn_op1)
11516 {
11517 /* CLZ. */
11518 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11519 arm_insn_r->reg_rec_count = 1;
11520 }
11521 }
11522 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11523 {
11524 /* BLX. */
11525 record_buf[0] = ARM_PS_REGNUM;
11526 record_buf[1] = ARM_LR_REGNUM;
11527 arm_insn_r->reg_rec_count = 2;
11528 }
11529 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11530 {
11531 /* QADD, QSUB, QDADD, QDSUB */
11532 record_buf[0] = ARM_PS_REGNUM;
11533 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11534 arm_insn_r->reg_rec_count = 2;
11535 }
11536 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11537 {
11538 /* BKPT. */
11539 record_buf[0] = ARM_PS_REGNUM;
11540 record_buf[1] = ARM_LR_REGNUM;
11541 arm_insn_r->reg_rec_count = 2;
11542
11543 /* Save SPSR also;how? */
11544 return -1;
11545 }
11546 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11547 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11548 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11549 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11550 )
11551 {
11552 if (0 == insn_op1 || 1 == insn_op1)
11553 {
11554 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11555 /* We dont do optimization for SMULW<y> where we
11556 need only Rd. */
11557 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11558 record_buf[1] = ARM_PS_REGNUM;
11559 arm_insn_r->reg_rec_count = 2;
11560 }
11561 else if (2 == insn_op1)
11562 {
11563 /* SMLAL<x><y>. */
11564 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11565 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11566 arm_insn_r->reg_rec_count = 2;
11567 }
11568 else if (3 == insn_op1)
11569 {
11570 /* SMUL<x><y>. */
11571 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11572 arm_insn_r->reg_rec_count = 1;
11573 }
11574 }
11575 }
11576 else
11577 {
11578 /* MSR : immediate form. */
11579 if (1 == insn_op1)
11580 {
11581 /* CSPR is going to be changed. */
11582 record_buf[0] = ARM_PS_REGNUM;
11583 arm_insn_r->reg_rec_count = 1;
11584 }
11585 else if (3 == insn_op1)
11586 {
11587 /* SPSR is going to be changed. */
11588 /* we need to get SPSR value, which is yet to be done */
11589 return -1;
11590 }
11591 }
11592 }
11593
11594 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11595 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11596 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11597
11598 /* Handle load/store insn extension space. */
11599
11600 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11601 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11602 && !INSN_RECORDED(arm_insn_r))
11603 {
11604 /* SWP/SWPB. */
11605 if (0 == insn_op1)
11606 {
11607 /* These insn, changes register and memory as well. */
11608 /* SWP or SWPB insn. */
11609 /* Get memory address given by Rn. */
11610 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11611 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11612 /* SWP insn ?, swaps word. */
11613 if (8 == arm_insn_r->opcode)
11614 {
11615 record_buf_mem[0] = 4;
11616 }
11617 else
11618 {
11619 /* SWPB insn, swaps only byte. */
11620 record_buf_mem[0] = 1;
11621 }
11622 record_buf_mem[1] = u_regval;
11623 arm_insn_r->mem_rec_count = 1;
11624 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11625 arm_insn_r->reg_rec_count = 1;
11626 }
11627 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11628 {
11629 /* STRH. */
11630 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11631 ARM_RECORD_STRH);
11632 }
11633 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11634 {
11635 /* LDRD. */
11636 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11637 record_buf[1] = record_buf[0] + 1;
11638 arm_insn_r->reg_rec_count = 2;
11639 }
11640 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11641 {
11642 /* STRD. */
11643 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11644 ARM_RECORD_STRD);
11645 }
11646 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11647 {
11648 /* LDRH, LDRSB, LDRSH. */
11649 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11650 arm_insn_r->reg_rec_count = 1;
11651 }
11652
11653 }
11654
11655 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11656 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11657 && !INSN_RECORDED(arm_insn_r))
11658 {
11659 ret = -1;
11660 /* Handle coprocessor insn extension space. */
11661 }
11662
11663 /* To be done for ARMv5 and later; as of now we return -1. */
11664 if (-1 == ret)
11665 return ret;
11666
11667 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11668 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11669
11670 return ret;
11671 }
11672
11673 /* Handling opcode 000 insns. */
11674
11675 static int
11676 arm_record_data_proc_misc_ld_str (arm_insn_decode_record *arm_insn_r)
11677 {
11678 struct regcache *reg_cache = arm_insn_r->regcache;
11679 uint32_t record_buf[8], record_buf_mem[8];
11680 ULONGEST u_regval[2] = {0};
11681
11682 uint32_t reg_src1 = 0;
11683 uint32_t opcode1 = 0;
11684
11685 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11686 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11687 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11688
11689 if (!((opcode1 & 0x19) == 0x10))
11690 {
11691 /* Data-processing (register) and Data-processing (register-shifted
11692 register */
11693 /* Out of 11 shifter operands mode, all the insn modifies destination
11694 register, which is specified by 13-16 decode. */
11695 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11696 record_buf[1] = ARM_PS_REGNUM;
11697 arm_insn_r->reg_rec_count = 2;
11698 }
11699 else if ((arm_insn_r->decode < 8) && ((opcode1 & 0x19) == 0x10))
11700 {
11701 /* Miscellaneous instructions */
11702
11703 if (3 == arm_insn_r->decode && 0x12 == opcode1
11704 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11705 {
11706 /* Handle BLX, branch and link/exchange. */
11707 if (9 == arm_insn_r->opcode)
11708 {
11709 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11710 and R14 stores the return address. */
11711 record_buf[0] = ARM_PS_REGNUM;
11712 record_buf[1] = ARM_LR_REGNUM;
11713 arm_insn_r->reg_rec_count = 2;
11714 }
11715 }
11716 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11717 {
11718 /* Handle enhanced software breakpoint insn, BKPT. */
11719 /* CPSR is changed to be executed in ARM state, disabling normal
11720 interrupts, entering abort mode. */
11721 /* According to high vector configuration PC is set. */
11722 /* user hit breakpoint and type reverse, in
11723 that case, we need to go back with previous CPSR and
11724 Program Counter. */
11725 record_buf[0] = ARM_PS_REGNUM;
11726 record_buf[1] = ARM_LR_REGNUM;
11727 arm_insn_r->reg_rec_count = 2;
11728
11729 /* Save SPSR also; how? */
11730 return -1;
11731 }
11732 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11733 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11734 {
11735 /* Handle BX, branch and link/exchange. */
11736 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11737 record_buf[0] = ARM_PS_REGNUM;
11738 arm_insn_r->reg_rec_count = 1;
11739 }
11740 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11741 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11742 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11743 {
11744 /* Count leading zeros: CLZ. */
11745 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11746 arm_insn_r->reg_rec_count = 1;
11747 }
11748 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11749 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11750 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11751 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0))
11752 {
11753 /* Handle MRS insn. */
11754 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11755 arm_insn_r->reg_rec_count = 1;
11756 }
11757 }
11758 else if (9 == arm_insn_r->decode && opcode1 < 0x10)
11759 {
11760 /* Multiply and multiply-accumulate */
11761
11762 /* Handle multiply instructions. */
11763 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11764 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11765 {
11766 /* Handle MLA and MUL. */
11767 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11768 record_buf[1] = ARM_PS_REGNUM;
11769 arm_insn_r->reg_rec_count = 2;
11770 }
11771 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11772 {
11773 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11774 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11775 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11776 record_buf[2] = ARM_PS_REGNUM;
11777 arm_insn_r->reg_rec_count = 3;
11778 }
11779 }
11780 else if (9 == arm_insn_r->decode && opcode1 > 0x10)
11781 {
11782 /* Synchronization primitives */
11783
11784 /* Handling SWP, SWPB. */
11785 /* These insn, changes register and memory as well. */
11786 /* SWP or SWPB insn. */
11787
11788 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11789 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11790 /* SWP insn ?, swaps word. */
11791 if (8 == arm_insn_r->opcode)
11792 {
11793 record_buf_mem[0] = 4;
11794 }
11795 else
11796 {
11797 /* SWPB insn, swaps only byte. */
11798 record_buf_mem[0] = 1;
11799 }
11800 record_buf_mem[1] = u_regval[0];
11801 arm_insn_r->mem_rec_count = 1;
11802 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11803 arm_insn_r->reg_rec_count = 1;
11804 }
11805 else if (11 == arm_insn_r->decode || 13 == arm_insn_r->decode
11806 || 15 == arm_insn_r->decode)
11807 {
11808 if ((opcode1 & 0x12) == 2)
11809 {
11810 /* Extra load/store (unprivileged) */
11811 return -1;
11812 }
11813 else
11814 {
11815 /* Extra load/store */
11816 switch (bits (arm_insn_r->arm_insn, 5, 6))
11817 {
11818 case 1:
11819 if ((opcode1 & 0x05) == 0x0 || (opcode1 & 0x05) == 0x4)
11820 {
11821 /* STRH (register), STRH (immediate) */
11822 arm_record_strx (arm_insn_r, &record_buf[0],
11823 &record_buf_mem[0], ARM_RECORD_STRH);
11824 }
11825 else if ((opcode1 & 0x05) == 0x1)
11826 {
11827 /* LDRH (register) */
11828 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11829 arm_insn_r->reg_rec_count = 1;
11830
11831 if (bit (arm_insn_r->arm_insn, 21))
11832 {
11833 /* Write back to Rn. */
11834 record_buf[arm_insn_r->reg_rec_count++]
11835 = bits (arm_insn_r->arm_insn, 16, 19);
11836 }
11837 }
11838 else if ((opcode1 & 0x05) == 0x5)
11839 {
11840 /* LDRH (immediate), LDRH (literal) */
11841 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11842
11843 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11844 arm_insn_r->reg_rec_count = 1;
11845
11846 if (rn != 15)
11847 {
11848 /*LDRH (immediate) */
11849 if (bit (arm_insn_r->arm_insn, 21))
11850 {
11851 /* Write back to Rn. */
11852 record_buf[arm_insn_r->reg_rec_count++] = rn;
11853 }
11854 }
11855 }
11856 else
11857 return -1;
11858 break;
11859 case 2:
11860 if ((opcode1 & 0x05) == 0x0)
11861 {
11862 /* LDRD (register) */
11863 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11864 record_buf[1] = record_buf[0] + 1;
11865 arm_insn_r->reg_rec_count = 2;
11866
11867 if (bit (arm_insn_r->arm_insn, 21))
11868 {
11869 /* Write back to Rn. */
11870 record_buf[arm_insn_r->reg_rec_count++]
11871 = bits (arm_insn_r->arm_insn, 16, 19);
11872 }
11873 }
11874 else if ((opcode1 & 0x05) == 0x1)
11875 {
11876 /* LDRSB (register) */
11877 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11878 arm_insn_r->reg_rec_count = 1;
11879
11880 if (bit (arm_insn_r->arm_insn, 21))
11881 {
11882 /* Write back to Rn. */
11883 record_buf[arm_insn_r->reg_rec_count++]
11884 = bits (arm_insn_r->arm_insn, 16, 19);
11885 }
11886 }
11887 else if ((opcode1 & 0x05) == 0x4 || (opcode1 & 0x05) == 0x5)
11888 {
11889 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
11890 LDRSB (literal) */
11891 int rn = bits (arm_insn_r->arm_insn, 16, 19);
11892
11893 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11894 arm_insn_r->reg_rec_count = 1;
11895
11896 if (rn != 15)
11897 {
11898 /*LDRD (immediate), LDRSB (immediate) */
11899 if (bit (arm_insn_r->arm_insn, 21))
11900 {
11901 /* Write back to Rn. */
11902 record_buf[arm_insn_r->reg_rec_count++] = rn;
11903 }
11904 }
11905 }
11906 else
11907 return -1;
11908 break;
11909 case 3:
11910 if ((opcode1 & 0x05) == 0x0)
11911 {
11912 /* STRD (register) */
11913 arm_record_strx (arm_insn_r, &record_buf[0],
11914 &record_buf_mem[0], ARM_RECORD_STRD);
11915 }
11916 else if ((opcode1 & 0x05) == 0x1)
11917 {
11918 /* LDRSH (register) */
11919 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11920 arm_insn_r->reg_rec_count = 1;
11921
11922 if (bit (arm_insn_r->arm_insn, 21))
11923 {
11924 /* Write back to Rn. */
11925 record_buf[arm_insn_r->reg_rec_count++]
11926 = bits (arm_insn_r->arm_insn, 16, 19);
11927 }
11928 }
11929 else if ((opcode1 & 0x05) == 0x4)
11930 {
11931 /* STRD (immediate) */
11932 arm_record_strx (arm_insn_r, &record_buf[0],
11933 &record_buf_mem[0], ARM_RECORD_STRD);
11934 }
11935 else if ((opcode1 & 0x05) == 0x5)
11936 {
11937 /* LDRSH (immediate), LDRSH (literal) */
11938 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11939 arm_insn_r->reg_rec_count = 1;
11940
11941 if (bit (arm_insn_r->arm_insn, 21))
11942 {
11943 /* Write back to Rn. */
11944 record_buf[arm_insn_r->reg_rec_count++]
11945 = bits (arm_insn_r->arm_insn, 16, 19);
11946 }
11947 }
11948 else
11949 return -1;
11950 break;
11951 default:
11952 return -1;
11953 }
11954 }
11955 }
11956 else
11957 {
11958 return -1;
11959 }
11960
11961 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11962 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11963 return 0;
11964 }
11965
11966 /* Handling opcode 001 insns. */
11967
11968 static int
11969 arm_record_data_proc_imm (arm_insn_decode_record *arm_insn_r)
11970 {
11971 uint32_t record_buf[8], record_buf_mem[8];
11972
11973 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11974 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11975
11976 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11977 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11978 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11979 )
11980 {
11981 /* Handle MSR insn. */
11982 if (9 == arm_insn_r->opcode)
11983 {
11984 /* CSPR is going to be changed. */
11985 record_buf[0] = ARM_PS_REGNUM;
11986 arm_insn_r->reg_rec_count = 1;
11987 }
11988 else
11989 {
11990 /* SPSR is going to be changed. */
11991 }
11992 }
11993 else if (arm_insn_r->opcode <= 15)
11994 {
11995 /* Normal data processing insns. */
11996 /* Out of 11 shifter operands mode, all the insn modifies destination
11997 register, which is specified by 13-16 decode. */
11998 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11999 record_buf[1] = ARM_PS_REGNUM;
12000 arm_insn_r->reg_rec_count = 2;
12001 }
12002 else
12003 {
12004 return -1;
12005 }
12006
12007 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12008 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12009 return 0;
12010 }
12011
12012 static int
12013 arm_record_media (arm_insn_decode_record *arm_insn_r)
12014 {
12015 uint32_t record_buf[8];
12016
12017 switch (bits (arm_insn_r->arm_insn, 22, 24))
12018 {
12019 case 0:
12020 /* Parallel addition and subtraction, signed */
12021 case 1:
12022 /* Parallel addition and subtraction, unsigned */
12023 case 2:
12024 case 3:
12025 /* Packing, unpacking, saturation and reversal */
12026 {
12027 int rd = bits (arm_insn_r->arm_insn, 12, 15);
12028
12029 record_buf[arm_insn_r->reg_rec_count++] = rd;
12030 }
12031 break;
12032
12033 case 4:
12034 case 5:
12035 /* Signed multiplies */
12036 {
12037 int rd = bits (arm_insn_r->arm_insn, 16, 19);
12038 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
12039
12040 record_buf[arm_insn_r->reg_rec_count++] = rd;
12041 if (op1 == 0x0)
12042 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12043 else if (op1 == 0x4)
12044 record_buf[arm_insn_r->reg_rec_count++]
12045 = bits (arm_insn_r->arm_insn, 12, 15);
12046 }
12047 break;
12048
12049 case 6:
12050 {
12051 if (bit (arm_insn_r->arm_insn, 21)
12052 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
12053 {
12054 /* SBFX */
12055 record_buf[arm_insn_r->reg_rec_count++]
12056 = bits (arm_insn_r->arm_insn, 12, 15);
12057 }
12058 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
12059 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
12060 {
12061 /* USAD8 and USADA8 */
12062 record_buf[arm_insn_r->reg_rec_count++]
12063 = bits (arm_insn_r->arm_insn, 16, 19);
12064 }
12065 }
12066 break;
12067
12068 case 7:
12069 {
12070 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
12071 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
12072 {
12073 /* Permanently UNDEFINED */
12074 return -1;
12075 }
12076 else
12077 {
12078 /* BFC, BFI and UBFX */
12079 record_buf[arm_insn_r->reg_rec_count++]
12080 = bits (arm_insn_r->arm_insn, 12, 15);
12081 }
12082 }
12083 break;
12084
12085 default:
12086 return -1;
12087 }
12088
12089 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12090
12091 return 0;
12092 }
12093
12094 /* Handle ARM mode instructions with opcode 010. */
12095
12096 static int
12097 arm_record_ld_st_imm_offset (arm_insn_decode_record *arm_insn_r)
12098 {
12099 struct regcache *reg_cache = arm_insn_r->regcache;
12100
12101 uint32_t reg_base , reg_dest;
12102 uint32_t offset_12, tgt_mem_addr;
12103 uint32_t record_buf[8], record_buf_mem[8];
12104 unsigned char wback;
12105 ULONGEST u_regval;
12106
12107 /* Calculate wback. */
12108 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
12109 || (bit (arm_insn_r->arm_insn, 21) == 1);
12110
12111 arm_insn_r->reg_rec_count = 0;
12112 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12113
12114 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12115 {
12116 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
12117 and LDRT. */
12118
12119 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12120 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
12121
12122 /* The LDR instruction is capable of doing branching. If MOV LR, PC
12123 preceeds a LDR instruction having R15 as reg_base, it
12124 emulates a branch and link instruction, and hence we need to save
12125 CPSR and PC as well. */
12126 if (ARM_PC_REGNUM == reg_dest)
12127 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12128
12129 /* If wback is true, also save the base register, which is going to be
12130 written to. */
12131 if (wback)
12132 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12133 }
12134 else
12135 {
12136 /* STR (immediate), STRB (immediate), STRBT and STRT. */
12137
12138 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
12139 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12140
12141 /* Handle bit U. */
12142 if (bit (arm_insn_r->arm_insn, 23))
12143 {
12144 /* U == 1: Add the offset. */
12145 tgt_mem_addr = (uint32_t) u_regval + offset_12;
12146 }
12147 else
12148 {
12149 /* U == 0: subtract the offset. */
12150 tgt_mem_addr = (uint32_t) u_regval - offset_12;
12151 }
12152
12153 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
12154 bytes. */
12155 if (bit (arm_insn_r->arm_insn, 22))
12156 {
12157 /* STRB and STRBT: 1 byte. */
12158 record_buf_mem[0] = 1;
12159 }
12160 else
12161 {
12162 /* STR and STRT: 4 bytes. */
12163 record_buf_mem[0] = 4;
12164 }
12165
12166 /* Handle bit P. */
12167 if (bit (arm_insn_r->arm_insn, 24))
12168 record_buf_mem[1] = tgt_mem_addr;
12169 else
12170 record_buf_mem[1] = (uint32_t) u_regval;
12171
12172 arm_insn_r->mem_rec_count = 1;
12173
12174 /* If wback is true, also save the base register, which is going to be
12175 written to. */
12176 if (wback)
12177 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12178 }
12179
12180 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12181 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12182 return 0;
12183 }
12184
12185 /* Handling opcode 011 insns. */
12186
12187 static int
12188 arm_record_ld_st_reg_offset (arm_insn_decode_record *arm_insn_r)
12189 {
12190 struct regcache *reg_cache = arm_insn_r->regcache;
12191
12192 uint32_t shift_imm = 0;
12193 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
12194 uint32_t offset_12 = 0, tgt_mem_addr = 0;
12195 uint32_t record_buf[8], record_buf_mem[8];
12196
12197 LONGEST s_word;
12198 ULONGEST u_regval[2];
12199
12200 if (bit (arm_insn_r->arm_insn, 4))
12201 return arm_record_media (arm_insn_r);
12202
12203 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
12204 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
12205
12206 /* Handle enhanced store insns and LDRD DSP insn,
12207 order begins according to addressing modes for store insns
12208 STRH insn. */
12209
12210 /* LDR or STR? */
12211 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12212 {
12213 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
12214 /* LDR insn has a capability to do branching, if
12215 MOV LR, PC is preceded by LDR insn having Rn as R15
12216 in that case, it emulates branch and link insn, and hence we
12217 need to save CSPR and PC as well. */
12218 if (15 != reg_dest)
12219 {
12220 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12221 arm_insn_r->reg_rec_count = 1;
12222 }
12223 else
12224 {
12225 record_buf[0] = reg_dest;
12226 record_buf[1] = ARM_PS_REGNUM;
12227 arm_insn_r->reg_rec_count = 2;
12228 }
12229 }
12230 else
12231 {
12232 if (! bits (arm_insn_r->arm_insn, 4, 11))
12233 {
12234 /* Store insn, register offset and register pre-indexed,
12235 register post-indexed. */
12236 /* Get Rm. */
12237 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12238 /* Get Rn. */
12239 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12240 regcache_raw_read_unsigned (reg_cache, reg_src1
12241 , &u_regval[0]);
12242 regcache_raw_read_unsigned (reg_cache, reg_src2
12243 , &u_regval[1]);
12244 if (15 == reg_src2)
12245 {
12246 /* If R15 was used as Rn, hence current PC+8. */
12247 /* Pre-indexed mode doesnt reach here ; illegal insn. */
12248 u_regval[0] = u_regval[0] + 8;
12249 }
12250 /* Calculate target store address, Rn +/- Rm, register offset. */
12251 /* U == 1. */
12252 if (bit (arm_insn_r->arm_insn, 23))
12253 {
12254 tgt_mem_addr = u_regval[0] + u_regval[1];
12255 }
12256 else
12257 {
12258 tgt_mem_addr = u_regval[1] - u_regval[0];
12259 }
12260
12261 switch (arm_insn_r->opcode)
12262 {
12263 /* STR. */
12264 case 8:
12265 case 12:
12266 /* STR. */
12267 case 9:
12268 case 13:
12269 /* STRT. */
12270 case 1:
12271 case 5:
12272 /* STR. */
12273 case 0:
12274 case 4:
12275 record_buf_mem[0] = 4;
12276 break;
12277
12278 /* STRB. */
12279 case 10:
12280 case 14:
12281 /* STRB. */
12282 case 11:
12283 case 15:
12284 /* STRBT. */
12285 case 3:
12286 case 7:
12287 /* STRB. */
12288 case 2:
12289 case 6:
12290 record_buf_mem[0] = 1;
12291 break;
12292
12293 default:
12294 gdb_assert_not_reached ("no decoding pattern found");
12295 break;
12296 }
12297 record_buf_mem[1] = tgt_mem_addr;
12298 arm_insn_r->mem_rec_count = 1;
12299
12300 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12301 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12302 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12303 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12304 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12305 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12306 )
12307 {
12308 /* Rn is going to be changed in pre-indexed mode and
12309 post-indexed mode as well. */
12310 record_buf[0] = reg_src2;
12311 arm_insn_r->reg_rec_count = 1;
12312 }
12313 }
12314 else
12315 {
12316 /* Store insn, scaled register offset; scaled pre-indexed. */
12317 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
12318 /* Get Rm. */
12319 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
12320 /* Get Rn. */
12321 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
12322 /* Get shift_imm. */
12323 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
12324 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12325 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
12326 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12327 /* Offset_12 used as shift. */
12328 switch (offset_12)
12329 {
12330 case 0:
12331 /* Offset_12 used as index. */
12332 offset_12 = u_regval[0] << shift_imm;
12333 break;
12334
12335 case 1:
12336 offset_12 = (!shift_imm) ? 0 : u_regval[0] >> shift_imm;
12337 break;
12338
12339 case 2:
12340 if (!shift_imm)
12341 {
12342 if (bit (u_regval[0], 31))
12343 {
12344 offset_12 = 0xFFFFFFFF;
12345 }
12346 else
12347 {
12348 offset_12 = 0;
12349 }
12350 }
12351 else
12352 {
12353 /* This is arithmetic shift. */
12354 offset_12 = s_word >> shift_imm;
12355 }
12356 break;
12357
12358 case 3:
12359 if (!shift_imm)
12360 {
12361 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
12362 &u_regval[1]);
12363 /* Get C flag value and shift it by 31. */
12364 offset_12 = (((bit (u_regval[1], 29)) << 31) \
12365 | (u_regval[0]) >> 1);
12366 }
12367 else
12368 {
12369 offset_12 = (u_regval[0] >> shift_imm) \
12370 | (u_regval[0] <<
12371 (sizeof(uint32_t) - shift_imm));
12372 }
12373 break;
12374
12375 default:
12376 gdb_assert_not_reached ("no decoding pattern found");
12377 break;
12378 }
12379
12380 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12381 /* bit U set. */
12382 if (bit (arm_insn_r->arm_insn, 23))
12383 {
12384 tgt_mem_addr = u_regval[1] + offset_12;
12385 }
12386 else
12387 {
12388 tgt_mem_addr = u_regval[1] - offset_12;
12389 }
12390
12391 switch (arm_insn_r->opcode)
12392 {
12393 /* STR. */
12394 case 8:
12395 case 12:
12396 /* STR. */
12397 case 9:
12398 case 13:
12399 /* STRT. */
12400 case 1:
12401 case 5:
12402 /* STR. */
12403 case 0:
12404 case 4:
12405 record_buf_mem[0] = 4;
12406 break;
12407
12408 /* STRB. */
12409 case 10:
12410 case 14:
12411 /* STRB. */
12412 case 11:
12413 case 15:
12414 /* STRBT. */
12415 case 3:
12416 case 7:
12417 /* STRB. */
12418 case 2:
12419 case 6:
12420 record_buf_mem[0] = 1;
12421 break;
12422
12423 default:
12424 gdb_assert_not_reached ("no decoding pattern found");
12425 break;
12426 }
12427 record_buf_mem[1] = tgt_mem_addr;
12428 arm_insn_r->mem_rec_count = 1;
12429
12430 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
12431 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
12432 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
12433 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
12434 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
12435 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
12436 )
12437 {
12438 /* Rn is going to be changed in register scaled pre-indexed
12439 mode,and scaled post indexed mode. */
12440 record_buf[0] = reg_src2;
12441 arm_insn_r->reg_rec_count = 1;
12442 }
12443 }
12444 }
12445
12446 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12447 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12448 return 0;
12449 }
12450
12451 /* Handle ARM mode instructions with opcode 100. */
12452
12453 static int
12454 arm_record_ld_st_multiple (arm_insn_decode_record *arm_insn_r)
12455 {
12456 struct regcache *reg_cache = arm_insn_r->regcache;
12457 uint32_t register_count = 0, register_bits;
12458 uint32_t reg_base, addr_mode;
12459 uint32_t record_buf[24], record_buf_mem[48];
12460 uint32_t wback;
12461 ULONGEST u_regval;
12462
12463 /* Fetch the list of registers. */
12464 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
12465 arm_insn_r->reg_rec_count = 0;
12466
12467 /* Fetch the base register that contains the address we are loading data
12468 to. */
12469 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
12470
12471 /* Calculate wback. */
12472 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
12473
12474 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12475 {
12476 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
12477
12478 /* Find out which registers are going to be loaded from memory. */
12479 while (register_bits)
12480 {
12481 if (register_bits & 0x00000001)
12482 record_buf[arm_insn_r->reg_rec_count++] = register_count;
12483 register_bits = register_bits >> 1;
12484 register_count++;
12485 }
12486
12487
12488 /* If wback is true, also save the base register, which is going to be
12489 written to. */
12490 if (wback)
12491 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12492
12493 /* Save the CPSR register. */
12494 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
12495 }
12496 else
12497 {
12498 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
12499
12500 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
12501
12502 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
12503
12504 /* Find out how many registers are going to be stored to memory. */
12505 while (register_bits)
12506 {
12507 if (register_bits & 0x00000001)
12508 register_count++;
12509 register_bits = register_bits >> 1;
12510 }
12511
12512 switch (addr_mode)
12513 {
12514 /* STMDA (STMED): Decrement after. */
12515 case 0:
12516 record_buf_mem[1] = (uint32_t) u_regval
12517 - register_count * ARM_INT_REGISTER_SIZE + 4;
12518 break;
12519 /* STM (STMIA, STMEA): Increment after. */
12520 case 1:
12521 record_buf_mem[1] = (uint32_t) u_regval;
12522 break;
12523 /* STMDB (STMFD): Decrement before. */
12524 case 2:
12525 record_buf_mem[1] = (uint32_t) u_regval
12526 - register_count * ARM_INT_REGISTER_SIZE;
12527 break;
12528 /* STMIB (STMFA): Increment before. */
12529 case 3:
12530 record_buf_mem[1] = (uint32_t) u_regval + ARM_INT_REGISTER_SIZE;
12531 break;
12532 default:
12533 gdb_assert_not_reached ("no decoding pattern found");
12534 break;
12535 }
12536
12537 record_buf_mem[0] = register_count * ARM_INT_REGISTER_SIZE;
12538 arm_insn_r->mem_rec_count = 1;
12539
12540 /* If wback is true, also save the base register, which is going to be
12541 written to. */
12542 if (wback)
12543 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
12544 }
12545
12546 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12547 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12548 return 0;
12549 }
12550
12551 /* Handling opcode 101 insns. */
12552
12553 static int
12554 arm_record_b_bl (arm_insn_decode_record *arm_insn_r)
12555 {
12556 uint32_t record_buf[8];
12557
12558 /* Handle B, BL, BLX(1) insns. */
12559 /* B simply branches so we do nothing here. */
12560 /* Note: BLX(1) doesnt fall here but instead it falls into
12561 extension space. */
12562 if (bit (arm_insn_r->arm_insn, 24))
12563 {
12564 record_buf[0] = ARM_LR_REGNUM;
12565 arm_insn_r->reg_rec_count = 1;
12566 }
12567
12568 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12569
12570 return 0;
12571 }
12572
12573 static int
12574 arm_record_unsupported_insn (arm_insn_decode_record *arm_insn_r)
12575 {
12576 gdb_printf (gdb_stderr,
12577 _("Process record does not support instruction "
12578 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
12579 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
12580
12581 return -1;
12582 }
12583
12584 /* Record handler for vector data transfer instructions. */
12585
12586 static int
12587 arm_record_vdata_transfer_insn (arm_insn_decode_record *arm_insn_r)
12588 {
12589 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12590 uint32_t record_buf[4];
12591
12592 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12593 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12594 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12595 bit_l = bit (arm_insn_r->arm_insn, 20);
12596 bit_c = bit (arm_insn_r->arm_insn, 8);
12597
12598 /* Handle VMOV instruction. */
12599 if (bit_l && bit_c)
12600 {
12601 record_buf[0] = reg_t;
12602 arm_insn_r->reg_rec_count = 1;
12603 }
12604 else if (bit_l && !bit_c)
12605 {
12606 /* Handle VMOV instruction. */
12607 if (bits_a == 0x00)
12608 {
12609 record_buf[0] = reg_t;
12610 arm_insn_r->reg_rec_count = 1;
12611 }
12612 /* Handle VMRS instruction. */
12613 else if (bits_a == 0x07)
12614 {
12615 if (reg_t == 15)
12616 reg_t = ARM_PS_REGNUM;
12617
12618 record_buf[0] = reg_t;
12619 arm_insn_r->reg_rec_count = 1;
12620 }
12621 }
12622 else if (!bit_l && !bit_c)
12623 {
12624 /* Handle VMOV instruction. */
12625 if (bits_a == 0x00)
12626 {
12627 record_buf[0] = ARM_D0_REGNUM + reg_v;
12628
12629 arm_insn_r->reg_rec_count = 1;
12630 }
12631 /* Handle VMSR instruction. */
12632 else if (bits_a == 0x07)
12633 {
12634 record_buf[0] = ARM_FPSCR_REGNUM;
12635 arm_insn_r->reg_rec_count = 1;
12636 }
12637 }
12638 else if (!bit_l && bit_c)
12639 {
12640 /* Handle VMOV instruction. */
12641 if (!(bits_a & 0x04))
12642 {
12643 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12644 + ARM_D0_REGNUM;
12645 arm_insn_r->reg_rec_count = 1;
12646 }
12647 /* Handle VDUP instruction. */
12648 else
12649 {
12650 if (bit (arm_insn_r->arm_insn, 21))
12651 {
12652 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12653 record_buf[0] = reg_v + ARM_D0_REGNUM;
12654 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12655 arm_insn_r->reg_rec_count = 2;
12656 }
12657 else
12658 {
12659 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12660 record_buf[0] = reg_v + ARM_D0_REGNUM;
12661 arm_insn_r->reg_rec_count = 1;
12662 }
12663 }
12664 }
12665
12666 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12667 return 0;
12668 }
12669
12670 /* Record handler for extension register load/store instructions. */
12671
12672 static int
12673 arm_record_exreg_ld_st_insn (arm_insn_decode_record *arm_insn_r)
12674 {
12675 uint32_t opcode, single_reg;
12676 uint8_t op_vldm_vstm;
12677 uint32_t record_buf[8], record_buf_mem[128];
12678 ULONGEST u_regval = 0;
12679
12680 struct regcache *reg_cache = arm_insn_r->regcache;
12681
12682 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12683 single_reg = !bit (arm_insn_r->arm_insn, 8);
12684 op_vldm_vstm = opcode & 0x1b;
12685
12686 /* Handle VMOV instructions. */
12687 if ((opcode & 0x1e) == 0x04)
12688 {
12689 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
12690 {
12691 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12692 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12693 arm_insn_r->reg_rec_count = 2;
12694 }
12695 else
12696 {
12697 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
12698 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
12699
12700 if (single_reg)
12701 {
12702 /* The first S register number m is REG_M:M (M is bit 5),
12703 the corresponding D register number is REG_M:M / 2, which
12704 is REG_M. */
12705 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
12706 /* The second S register number is REG_M:M + 1, the
12707 corresponding D register number is (REG_M:M + 1) / 2.
12708 IOW, if bit M is 1, the first and second S registers
12709 are mapped to different D registers, otherwise, they are
12710 in the same D register. */
12711 if (bit_m)
12712 {
12713 record_buf[arm_insn_r->reg_rec_count++]
12714 = ARM_D0_REGNUM + reg_m + 1;
12715 }
12716 }
12717 else
12718 {
12719 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
12720 arm_insn_r->reg_rec_count = 1;
12721 }
12722 }
12723 }
12724 /* Handle VSTM and VPUSH instructions. */
12725 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12726 || op_vldm_vstm == 0x12)
12727 {
12728 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12729 uint32_t memory_index = 0;
12730
12731 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12732 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12733 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12734 imm_off32 = imm_off8 << 2;
12735 memory_count = imm_off8;
12736
12737 if (bit (arm_insn_r->arm_insn, 23))
12738 start_address = u_regval;
12739 else
12740 start_address = u_regval - imm_off32;
12741
12742 if (bit (arm_insn_r->arm_insn, 21))
12743 {
12744 record_buf[0] = reg_rn;
12745 arm_insn_r->reg_rec_count = 1;
12746 }
12747
12748 while (memory_count > 0)
12749 {
12750 if (single_reg)
12751 {
12752 record_buf_mem[memory_index] = 4;
12753 record_buf_mem[memory_index + 1] = start_address;
12754 start_address = start_address + 4;
12755 memory_index = memory_index + 2;
12756 }
12757 else
12758 {
12759 record_buf_mem[memory_index] = 4;
12760 record_buf_mem[memory_index + 1] = start_address;
12761 record_buf_mem[memory_index + 2] = 4;
12762 record_buf_mem[memory_index + 3] = start_address + 4;
12763 start_address = start_address + 8;
12764 memory_index = memory_index + 4;
12765 }
12766 memory_count--;
12767 }
12768 arm_insn_r->mem_rec_count = (memory_index >> 1);
12769 }
12770 /* Handle VLDM instructions. */
12771 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12772 || op_vldm_vstm == 0x13)
12773 {
12774 uint32_t reg_count, reg_vd;
12775 uint32_t reg_index = 0;
12776 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
12777
12778 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12779 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12780
12781 /* REG_VD is the first D register number. If the instruction
12782 loads memory to S registers (SINGLE_REG is TRUE), the register
12783 number is (REG_VD << 1 | bit D), so the corresponding D
12784 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
12785 if (!single_reg)
12786 reg_vd = reg_vd | (bit_d << 4);
12787
12788 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
12789 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12790
12791 /* If the instruction loads memory to D register, REG_COUNT should
12792 be divided by 2, according to the ARM Architecture Reference
12793 Manual. If the instruction loads memory to S register, divide by
12794 2 as well because two S registers are mapped to D register. */
12795 reg_count = reg_count / 2;
12796 if (single_reg && bit_d)
12797 {
12798 /* Increase the register count if S register list starts from
12799 an odd number (bit d is one). */
12800 reg_count++;
12801 }
12802
12803 while (reg_count > 0)
12804 {
12805 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12806 reg_count--;
12807 }
12808 arm_insn_r->reg_rec_count = reg_index;
12809 }
12810 /* VSTR Vector store register. */
12811 else if ((opcode & 0x13) == 0x10)
12812 {
12813 uint32_t start_address, reg_rn, imm_off32, imm_off8;
12814 uint32_t memory_index = 0;
12815
12816 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12817 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12818 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12819 imm_off32 = imm_off8 << 2;
12820
12821 if (bit (arm_insn_r->arm_insn, 23))
12822 start_address = u_regval + imm_off32;
12823 else
12824 start_address = u_regval - imm_off32;
12825
12826 if (single_reg)
12827 {
12828 record_buf_mem[memory_index] = 4;
12829 record_buf_mem[memory_index + 1] = start_address;
12830 arm_insn_r->mem_rec_count = 1;
12831 }
12832 else
12833 {
12834 record_buf_mem[memory_index] = 4;
12835 record_buf_mem[memory_index + 1] = start_address;
12836 record_buf_mem[memory_index + 2] = 4;
12837 record_buf_mem[memory_index + 3] = start_address + 4;
12838 arm_insn_r->mem_rec_count = 2;
12839 }
12840 }
12841 /* VLDR Vector load register. */
12842 else if ((opcode & 0x13) == 0x11)
12843 {
12844 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12845
12846 if (!single_reg)
12847 {
12848 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12849 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12850 }
12851 else
12852 {
12853 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12854 /* Record register D rather than pseudo register S. */
12855 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
12856 }
12857 arm_insn_r->reg_rec_count = 1;
12858 }
12859
12860 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12861 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12862 return 0;
12863 }
12864
12865 /* Record handler for arm/thumb mode VFP data processing instructions. */
12866
12867 static int
12868 arm_record_vfp_data_proc_insn (arm_insn_decode_record *arm_insn_r)
12869 {
12870 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12871 uint32_t record_buf[4];
12872 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12873 enum insn_types curr_insn_type = INSN_INV;
12874
12875 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12876 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12877 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12878 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12879 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12880 bit_d = bit (arm_insn_r->arm_insn, 22);
12881 /* Mask off the "D" bit. */
12882 opc1 = opc1 & ~0x04;
12883
12884 /* Handle VMLA, VMLS. */
12885 if (opc1 == 0x00)
12886 {
12887 if (bit (arm_insn_r->arm_insn, 10))
12888 {
12889 if (bit (arm_insn_r->arm_insn, 6))
12890 curr_insn_type = INSN_T0;
12891 else
12892 curr_insn_type = INSN_T1;
12893 }
12894 else
12895 {
12896 if (dp_op_sz)
12897 curr_insn_type = INSN_T1;
12898 else
12899 curr_insn_type = INSN_T2;
12900 }
12901 }
12902 /* Handle VNMLA, VNMLS, VNMUL. */
12903 else if (opc1 == 0x01)
12904 {
12905 if (dp_op_sz)
12906 curr_insn_type = INSN_T1;
12907 else
12908 curr_insn_type = INSN_T2;
12909 }
12910 /* Handle VMUL. */
12911 else if (opc1 == 0x02 && !(opc3 & 0x01))
12912 {
12913 if (bit (arm_insn_r->arm_insn, 10))
12914 {
12915 if (bit (arm_insn_r->arm_insn, 6))
12916 curr_insn_type = INSN_T0;
12917 else
12918 curr_insn_type = INSN_T1;
12919 }
12920 else
12921 {
12922 if (dp_op_sz)
12923 curr_insn_type = INSN_T1;
12924 else
12925 curr_insn_type = INSN_T2;
12926 }
12927 }
12928 /* Handle VADD, VSUB. */
12929 else if (opc1 == 0x03)
12930 {
12931 if (!bit (arm_insn_r->arm_insn, 9))
12932 {
12933 if (bit (arm_insn_r->arm_insn, 6))
12934 curr_insn_type = INSN_T0;
12935 else
12936 curr_insn_type = INSN_T1;
12937 }
12938 else
12939 {
12940 if (dp_op_sz)
12941 curr_insn_type = INSN_T1;
12942 else
12943 curr_insn_type = INSN_T2;
12944 }
12945 }
12946 /* Handle VDIV. */
12947 else if (opc1 == 0x08)
12948 {
12949 if (dp_op_sz)
12950 curr_insn_type = INSN_T1;
12951 else
12952 curr_insn_type = INSN_T2;
12953 }
12954 /* Handle all other vfp data processing instructions. */
12955 else if (opc1 == 0x0b)
12956 {
12957 /* Handle VMOV. */
12958 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12959 {
12960 if (bit (arm_insn_r->arm_insn, 4))
12961 {
12962 if (bit (arm_insn_r->arm_insn, 6))
12963 curr_insn_type = INSN_T0;
12964 else
12965 curr_insn_type = INSN_T1;
12966 }
12967 else
12968 {
12969 if (dp_op_sz)
12970 curr_insn_type = INSN_T1;
12971 else
12972 curr_insn_type = INSN_T2;
12973 }
12974 }
12975 /* Handle VNEG and VABS. */
12976 else if ((opc2 == 0x01 && opc3 == 0x01)
12977 || (opc2 == 0x00 && opc3 == 0x03))
12978 {
12979 if (!bit (arm_insn_r->arm_insn, 11))
12980 {
12981 if (bit (arm_insn_r->arm_insn, 6))
12982 curr_insn_type = INSN_T0;
12983 else
12984 curr_insn_type = INSN_T1;
12985 }
12986 else
12987 {
12988 if (dp_op_sz)
12989 curr_insn_type = INSN_T1;
12990 else
12991 curr_insn_type = INSN_T2;
12992 }
12993 }
12994 /* Handle VSQRT. */
12995 else if (opc2 == 0x01 && opc3 == 0x03)
12996 {
12997 if (dp_op_sz)
12998 curr_insn_type = INSN_T1;
12999 else
13000 curr_insn_type = INSN_T2;
13001 }
13002 /* Handle VCVT. */
13003 else if (opc2 == 0x07 && opc3 == 0x03)
13004 {
13005 if (!dp_op_sz)
13006 curr_insn_type = INSN_T1;
13007 else
13008 curr_insn_type = INSN_T2;
13009 }
13010 else if (opc3 & 0x01)
13011 {
13012 /* Handle VCVT. */
13013 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
13014 {
13015 if (!bit (arm_insn_r->arm_insn, 18))
13016 curr_insn_type = INSN_T2;
13017 else
13018 {
13019 if (dp_op_sz)
13020 curr_insn_type = INSN_T1;
13021 else
13022 curr_insn_type = INSN_T2;
13023 }
13024 }
13025 /* Handle VCVT. */
13026 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
13027 {
13028 if (dp_op_sz)
13029 curr_insn_type = INSN_T1;
13030 else
13031 curr_insn_type = INSN_T2;
13032 }
13033 /* Handle VCVTB, VCVTT. */
13034 else if ((opc2 & 0x0e) == 0x02)
13035 curr_insn_type = INSN_T2;
13036 /* Handle VCMP, VCMPE. */
13037 else if ((opc2 & 0x0e) == 0x04)
13038 curr_insn_type = INSN_T3;
13039 }
13040 }
13041
13042 switch (curr_insn_type)
13043 {
13044 case INSN_T0:
13045 reg_vd = reg_vd | (bit_d << 4);
13046 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13047 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
13048 arm_insn_r->reg_rec_count = 2;
13049 break;
13050
13051 case INSN_T1:
13052 reg_vd = reg_vd | (bit_d << 4);
13053 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13054 arm_insn_r->reg_rec_count = 1;
13055 break;
13056
13057 case INSN_T2:
13058 reg_vd = (reg_vd << 1) | bit_d;
13059 record_buf[0] = reg_vd + ARM_D0_REGNUM;
13060 arm_insn_r->reg_rec_count = 1;
13061 break;
13062
13063 case INSN_T3:
13064 record_buf[0] = ARM_FPSCR_REGNUM;
13065 arm_insn_r->reg_rec_count = 1;
13066 break;
13067
13068 default:
13069 gdb_assert_not_reached ("no decoding pattern found");
13070 break;
13071 }
13072
13073 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
13074 return 0;
13075 }
13076
13077 /* Handling opcode 110 insns. */
13078
13079 static int
13080 arm_record_asimd_vfp_coproc (arm_insn_decode_record *arm_insn_r)
13081 {
13082 uint32_t op1, op1_ebit, coproc;
13083
13084 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13085 op1 = bits (arm_insn_r->arm_insn, 20, 25);
13086 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13087
13088 if ((coproc & 0x0e) == 0x0a)
13089 {
13090 /* Handle extension register ld/st instructions. */
13091 if (!(op1 & 0x20))
13092 return arm_record_exreg_ld_st_insn (arm_insn_r);
13093
13094 /* 64-bit transfers between arm core and extension registers. */
13095 if ((op1 & 0x3e) == 0x04)
13096 return arm_record_exreg_ld_st_insn (arm_insn_r);
13097 }
13098 else
13099 {
13100 /* Handle coprocessor ld/st instructions. */
13101 if (!(op1 & 0x3a))
13102 {
13103 /* Store. */
13104 if (!op1_ebit)
13105 return arm_record_unsupported_insn (arm_insn_r);
13106 else
13107 /* Load. */
13108 return arm_record_unsupported_insn (arm_insn_r);
13109 }
13110
13111 /* Move to coprocessor from two arm core registers. */
13112 if (op1 == 0x4)
13113 return arm_record_unsupported_insn (arm_insn_r);
13114
13115 /* Move to two arm core registers from coprocessor. */
13116 if (op1 == 0x5)
13117 {
13118 uint32_t reg_t[2];
13119
13120 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
13121 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
13122 arm_insn_r->reg_rec_count = 2;
13123
13124 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
13125 return 0;
13126 }
13127 }
13128 return arm_record_unsupported_insn (arm_insn_r);
13129 }
13130
13131 /* Handling opcode 111 insns. */
13132
13133 static int
13134 arm_record_coproc_data_proc (arm_insn_decode_record *arm_insn_r)
13135 {
13136 uint32_t op, op1_ebit, coproc, bits_24_25;
13137 arm_gdbarch_tdep *tdep
13138 = gdbarch_tdep<arm_gdbarch_tdep> (arm_insn_r->gdbarch);
13139 struct regcache *reg_cache = arm_insn_r->regcache;
13140
13141 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
13142 coproc = bits (arm_insn_r->arm_insn, 8, 11);
13143 op1_ebit = bit (arm_insn_r->arm_insn, 20);
13144 op = bit (arm_insn_r->arm_insn, 4);
13145 bits_24_25 = bits (arm_insn_r->arm_insn, 24, 25);
13146
13147 /* Handle arm SWI/SVC system call instructions. */
13148 if (bits_24_25 == 0x3)
13149 {
13150 if (tdep->arm_syscall_record != NULL)
13151 {
13152 ULONGEST svc_operand, svc_number;
13153
13154 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
13155
13156 if (svc_operand) /* OABI. */
13157 svc_number = svc_operand - 0x900000;
13158 else /* EABI. */
13159 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
13160
13161 return tdep->arm_syscall_record (reg_cache, svc_number);
13162 }
13163 else
13164 {
13165 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13166 return -1;
13167 }
13168 }
13169 else if (bits_24_25 == 0x02)
13170 {
13171 if (op)
13172 {
13173 if ((coproc & 0x0e) == 0x0a)
13174 {
13175 /* 8, 16, and 32-bit transfer */
13176 return arm_record_vdata_transfer_insn (arm_insn_r);
13177 }
13178 else
13179 {
13180 if (op1_ebit)
13181 {
13182 /* MRC, MRC2 */
13183 uint32_t record_buf[1];
13184
13185 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
13186 if (record_buf[0] == 15)
13187 record_buf[0] = ARM_PS_REGNUM;
13188
13189 arm_insn_r->reg_rec_count = 1;
13190 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
13191 record_buf);
13192 return 0;
13193 }
13194 else
13195 {
13196 /* MCR, MCR2 */
13197 return -1;
13198 }
13199 }
13200 }
13201 else
13202 {
13203 if ((coproc & 0x0e) == 0x0a)
13204 {
13205 /* VFP data-processing instructions. */
13206 return arm_record_vfp_data_proc_insn (arm_insn_r);
13207 }
13208 else
13209 {
13210 /* CDP, CDP2 */
13211 return -1;
13212 }
13213 }
13214 }
13215 else
13216 {
13217 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 25);
13218
13219 if (op1 == 5)
13220 {
13221 if ((coproc & 0x0e) != 0x0a)
13222 {
13223 /* MRRC, MRRC2 */
13224 return -1;
13225 }
13226 }
13227 else if (op1 == 4 || op1 == 5)
13228 {
13229 if ((coproc & 0x0e) == 0x0a)
13230 {
13231 /* 64-bit transfers between ARM core and extension */
13232 return -1;
13233 }
13234 else if (op1 == 4)
13235 {
13236 /* MCRR, MCRR2 */
13237 return -1;
13238 }
13239 }
13240 else if (op1 == 0 || op1 == 1)
13241 {
13242 /* UNDEFINED */
13243 return -1;
13244 }
13245 else
13246 {
13247 if ((coproc & 0x0e) == 0x0a)
13248 {
13249 /* Extension register load/store */
13250 }
13251 else
13252 {
13253 /* STC, STC2, LDC, LDC2 */
13254 }
13255 return -1;
13256 }
13257 }
13258
13259 return -1;
13260 }
13261
13262 /* Handling opcode 000 insns. */
13263
13264 static int
13265 thumb_record_shift_add_sub (arm_insn_decode_record *thumb_insn_r)
13266 {
13267 uint32_t record_buf[8];
13268 uint32_t reg_src1 = 0;
13269
13270 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13271
13272 record_buf[0] = ARM_PS_REGNUM;
13273 record_buf[1] = reg_src1;
13274 thumb_insn_r->reg_rec_count = 2;
13275
13276 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13277
13278 return 0;
13279 }
13280
13281
13282 /* Handling opcode 001 insns. */
13283
13284 static int
13285 thumb_record_add_sub_cmp_mov (arm_insn_decode_record *thumb_insn_r)
13286 {
13287 uint32_t record_buf[8];
13288 uint32_t reg_src1 = 0;
13289
13290 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13291
13292 record_buf[0] = ARM_PS_REGNUM;
13293 record_buf[1] = reg_src1;
13294 thumb_insn_r->reg_rec_count = 2;
13295
13296 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13297
13298 return 0;
13299 }
13300
13301 /* Handling opcode 010 insns. */
13302
13303 static int
13304 thumb_record_ld_st_reg_offset (arm_insn_decode_record *thumb_insn_r)
13305 {
13306 struct regcache *reg_cache = thumb_insn_r->regcache;
13307 uint32_t record_buf[8], record_buf_mem[8];
13308
13309 uint32_t reg_src1 = 0, reg_src2 = 0;
13310 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
13311
13312 ULONGEST u_regval[2] = {0};
13313
13314 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
13315
13316 if (bit (thumb_insn_r->arm_insn, 12))
13317 {
13318 /* Handle load/store register offset. */
13319 uint32_t opB = bits (thumb_insn_r->arm_insn, 9, 11);
13320
13321 if (in_inclusive_range (opB, 4U, 7U))
13322 {
13323 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
13324 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
13325 record_buf[0] = reg_src1;
13326 thumb_insn_r->reg_rec_count = 1;
13327 }
13328 else if (in_inclusive_range (opB, 0U, 2U))
13329 {
13330 /* STR(2), STRB(2), STRH(2) . */
13331 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13332 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
13333 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
13334 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
13335 if (0 == opB)
13336 record_buf_mem[0] = 4; /* STR (2). */
13337 else if (2 == opB)
13338 record_buf_mem[0] = 1; /* STRB (2). */
13339 else if (1 == opB)
13340 record_buf_mem[0] = 2; /* STRH (2). */
13341 record_buf_mem[1] = u_regval[0] + u_regval[1];
13342 thumb_insn_r->mem_rec_count = 1;
13343 }
13344 }
13345 else if (bit (thumb_insn_r->arm_insn, 11))
13346 {
13347 /* Handle load from literal pool. */
13348 /* LDR(3). */
13349 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13350 record_buf[0] = reg_src1;
13351 thumb_insn_r->reg_rec_count = 1;
13352 }
13353 else if (opcode1)
13354 {
13355 /* Special data instructions and branch and exchange */
13356 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
13357 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
13358 if ((3 == opcode2) && (!opcode3))
13359 {
13360 /* Branch with exchange. */
13361 record_buf[0] = ARM_PS_REGNUM;
13362 thumb_insn_r->reg_rec_count = 1;
13363 }
13364 else
13365 {
13366 /* Format 8; special data processing insns. */
13367 record_buf[0] = ARM_PS_REGNUM;
13368 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
13369 | bits (thumb_insn_r->arm_insn, 0, 2));
13370 thumb_insn_r->reg_rec_count = 2;
13371 }
13372 }
13373 else
13374 {
13375 /* Format 5; data processing insns. */
13376 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13377 if (bit (thumb_insn_r->arm_insn, 7))
13378 {
13379 reg_src1 = reg_src1 + 8;
13380 }
13381 record_buf[0] = ARM_PS_REGNUM;
13382 record_buf[1] = reg_src1;
13383 thumb_insn_r->reg_rec_count = 2;
13384 }
13385
13386 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13387 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13388 record_buf_mem);
13389
13390 return 0;
13391 }
13392
13393 /* Handling opcode 001 insns. */
13394
13395 static int
13396 thumb_record_ld_st_imm_offset (arm_insn_decode_record *thumb_insn_r)
13397 {
13398 struct regcache *reg_cache = thumb_insn_r->regcache;
13399 uint32_t record_buf[8], record_buf_mem[8];
13400
13401 uint32_t reg_src1 = 0;
13402 uint32_t opcode = 0, immed_5 = 0;
13403
13404 ULONGEST u_regval = 0;
13405
13406 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13407
13408 if (opcode)
13409 {
13410 /* LDR(1). */
13411 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13412 record_buf[0] = reg_src1;
13413 thumb_insn_r->reg_rec_count = 1;
13414 }
13415 else
13416 {
13417 /* STR(1). */
13418 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13419 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13420 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13421 record_buf_mem[0] = 4;
13422 record_buf_mem[1] = u_regval + (immed_5 * 4);
13423 thumb_insn_r->mem_rec_count = 1;
13424 }
13425
13426 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13427 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13428 record_buf_mem);
13429
13430 return 0;
13431 }
13432
13433 /* Handling opcode 100 insns. */
13434
13435 static int
13436 thumb_record_ld_st_stack (arm_insn_decode_record *thumb_insn_r)
13437 {
13438 struct regcache *reg_cache = thumb_insn_r->regcache;
13439 uint32_t record_buf[8], record_buf_mem[8];
13440
13441 uint32_t reg_src1 = 0;
13442 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
13443
13444 ULONGEST u_regval = 0;
13445
13446 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13447
13448 if (3 == opcode)
13449 {
13450 /* LDR(4). */
13451 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13452 record_buf[0] = reg_src1;
13453 thumb_insn_r->reg_rec_count = 1;
13454 }
13455 else if (1 == opcode)
13456 {
13457 /* LDRH(1). */
13458 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
13459 record_buf[0] = reg_src1;
13460 thumb_insn_r->reg_rec_count = 1;
13461 }
13462 else if (2 == opcode)
13463 {
13464 /* STR(3). */
13465 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
13466 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13467 record_buf_mem[0] = 4;
13468 record_buf_mem[1] = u_regval + (immed_8 * 4);
13469 thumb_insn_r->mem_rec_count = 1;
13470 }
13471 else if (0 == opcode)
13472 {
13473 /* STRH(1). */
13474 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
13475 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
13476 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13477 record_buf_mem[0] = 2;
13478 record_buf_mem[1] = u_regval + (immed_5 * 2);
13479 thumb_insn_r->mem_rec_count = 1;
13480 }
13481
13482 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13483 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13484 record_buf_mem);
13485
13486 return 0;
13487 }
13488
13489 /* Handling opcode 101 insns. */
13490
13491 static int
13492 thumb_record_misc (arm_insn_decode_record *thumb_insn_r)
13493 {
13494 struct regcache *reg_cache = thumb_insn_r->regcache;
13495
13496 uint32_t opcode = 0;
13497 uint32_t register_bits = 0, register_count = 0;
13498 uint32_t index = 0, start_address = 0;
13499 uint32_t record_buf[24], record_buf_mem[48];
13500 uint32_t reg_src1;
13501
13502 ULONGEST u_regval = 0;
13503
13504 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
13505
13506 if (opcode == 0 || opcode == 1)
13507 {
13508 /* ADR and ADD (SP plus immediate) */
13509
13510 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13511 record_buf[0] = reg_src1;
13512 thumb_insn_r->reg_rec_count = 1;
13513 }
13514 else
13515 {
13516 /* Miscellaneous 16-bit instructions */
13517 uint32_t opcode2 = bits (thumb_insn_r->arm_insn, 8, 11);
13518
13519 switch (opcode2)
13520 {
13521 case 6:
13522 /* SETEND and CPS */
13523 break;
13524 case 0:
13525 /* ADD/SUB (SP plus immediate) */
13526 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13527 record_buf[0] = ARM_SP_REGNUM;
13528 thumb_insn_r->reg_rec_count = 1;
13529 break;
13530 case 1: /* fall through */
13531 case 3: /* fall through */
13532 case 9: /* fall through */
13533 case 11:
13534 /* CBNZ, CBZ */
13535 break;
13536 case 2:
13537 /* SXTH, SXTB, UXTH, UXTB */
13538 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13539 thumb_insn_r->reg_rec_count = 1;
13540 break;
13541 case 4: /* fall through */
13542 case 5:
13543 /* PUSH. */
13544 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13545 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
13546 while (register_bits)
13547 {
13548 if (register_bits & 0x00000001)
13549 register_count++;
13550 register_bits = register_bits >> 1;
13551 }
13552 start_address = u_regval - \
13553 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
13554 thumb_insn_r->mem_rec_count = register_count;
13555 while (register_count)
13556 {
13557 record_buf_mem[(register_count * 2) - 1] = start_address;
13558 record_buf_mem[(register_count * 2) - 2] = 4;
13559 start_address = start_address + 4;
13560 register_count--;
13561 }
13562 record_buf[0] = ARM_SP_REGNUM;
13563 thumb_insn_r->reg_rec_count = 1;
13564 break;
13565 case 10:
13566 /* REV, REV16, REVSH */
13567 record_buf[0] = bits (thumb_insn_r->arm_insn, 0, 2);
13568 thumb_insn_r->reg_rec_count = 1;
13569 break;
13570 case 12: /* fall through */
13571 case 13:
13572 /* POP. */
13573 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13574 while (register_bits)
13575 {
13576 if (register_bits & 0x00000001)
13577 record_buf[index++] = register_count;
13578 register_bits = register_bits >> 1;
13579 register_count++;
13580 }
13581 record_buf[index++] = ARM_PS_REGNUM;
13582 record_buf[index++] = ARM_SP_REGNUM;
13583 thumb_insn_r->reg_rec_count = index;
13584 break;
13585 case 0xe:
13586 /* BKPT insn. */
13587 /* Handle enhanced software breakpoint insn, BKPT. */
13588 /* CPSR is changed to be executed in ARM state, disabling normal
13589 interrupts, entering abort mode. */
13590 /* According to high vector configuration PC is set. */
13591 /* User hits breakpoint and type reverse, in that case, we need to go back with
13592 previous CPSR and Program Counter. */
13593 record_buf[0] = ARM_PS_REGNUM;
13594 record_buf[1] = ARM_LR_REGNUM;
13595 thumb_insn_r->reg_rec_count = 2;
13596 /* We need to save SPSR value, which is not yet done. */
13597 gdb_printf (gdb_stderr,
13598 _("Process record does not support instruction "
13599 "0x%0x at address %s.\n"),
13600 thumb_insn_r->arm_insn,
13601 paddress (thumb_insn_r->gdbarch,
13602 thumb_insn_r->this_addr));
13603 return -1;
13604
13605 case 0xf:
13606 /* If-Then, and hints */
13607 break;
13608 default:
13609 return -1;
13610 };
13611 }
13612
13613 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13614 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13615 record_buf_mem);
13616
13617 return 0;
13618 }
13619
13620 /* Handling opcode 110 insns. */
13621
13622 static int
13623 thumb_record_ldm_stm_swi (arm_insn_decode_record *thumb_insn_r)
13624 {
13625 arm_gdbarch_tdep *tdep
13626 = gdbarch_tdep<arm_gdbarch_tdep> (thumb_insn_r->gdbarch);
13627 struct regcache *reg_cache = thumb_insn_r->regcache;
13628
13629 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
13630 uint32_t reg_src1 = 0;
13631 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
13632 uint32_t index = 0, start_address = 0;
13633 uint32_t record_buf[24], record_buf_mem[48];
13634
13635 ULONGEST u_regval = 0;
13636
13637 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
13638 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
13639
13640 if (1 == opcode2)
13641 {
13642
13643 /* LDMIA. */
13644 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13645 /* Get Rn. */
13646 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13647 while (register_bits)
13648 {
13649 if (register_bits & 0x00000001)
13650 record_buf[index++] = register_count;
13651 register_bits = register_bits >> 1;
13652 register_count++;
13653 }
13654 record_buf[index++] = reg_src1;
13655 thumb_insn_r->reg_rec_count = index;
13656 }
13657 else if (0 == opcode2)
13658 {
13659 /* It handles both STMIA. */
13660 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
13661 /* Get Rn. */
13662 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
13663 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
13664 while (register_bits)
13665 {
13666 if (register_bits & 0x00000001)
13667 register_count++;
13668 register_bits = register_bits >> 1;
13669 }
13670 start_address = u_regval;
13671 thumb_insn_r->mem_rec_count = register_count;
13672 while (register_count)
13673 {
13674 record_buf_mem[(register_count * 2) - 1] = start_address;
13675 record_buf_mem[(register_count * 2) - 2] = 4;
13676 start_address = start_address + 4;
13677 register_count--;
13678 }
13679 }
13680 else if (0x1F == opcode1)
13681 {
13682 /* Handle arm syscall insn. */
13683 if (tdep->arm_syscall_record != NULL)
13684 {
13685 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
13686 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13687 }
13688 else
13689 {
13690 gdb_printf (gdb_stderr, _("no syscall record support\n"));
13691 return -1;
13692 }
13693 }
13694
13695 /* B (1), conditional branch is automatically taken care in process_record,
13696 as PC is saved there. */
13697
13698 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13699 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13700 record_buf_mem);
13701
13702 return ret;
13703 }
13704
13705 /* Handling opcode 111 insns. */
13706
13707 static int
13708 thumb_record_branch (arm_insn_decode_record *thumb_insn_r)
13709 {
13710 uint32_t record_buf[8];
13711 uint32_t bits_h = 0;
13712
13713 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13714
13715 if (2 == bits_h || 3 == bits_h)
13716 {
13717 /* BL */
13718 record_buf[0] = ARM_LR_REGNUM;
13719 thumb_insn_r->reg_rec_count = 1;
13720 }
13721 else if (1 == bits_h)
13722 {
13723 /* BLX(1). */
13724 record_buf[0] = ARM_PS_REGNUM;
13725 record_buf[1] = ARM_LR_REGNUM;
13726 thumb_insn_r->reg_rec_count = 2;
13727 }
13728
13729 /* B(2) is automatically taken care in process_record, as PC is
13730 saved there. */
13731
13732 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13733
13734 return 0;
13735 }
13736
13737 /* Handler for thumb2 load/store multiple instructions. */
13738
13739 static int
13740 thumb2_record_ld_st_multiple (arm_insn_decode_record *thumb2_insn_r)
13741 {
13742 struct regcache *reg_cache = thumb2_insn_r->regcache;
13743
13744 uint32_t reg_rn, op;
13745 uint32_t register_bits = 0, register_count = 0;
13746 uint32_t index = 0, start_address = 0;
13747 uint32_t record_buf[24], record_buf_mem[48];
13748
13749 ULONGEST u_regval = 0;
13750
13751 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13752 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13753
13754 if (0 == op || 3 == op)
13755 {
13756 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13757 {
13758 /* Handle RFE instruction. */
13759 record_buf[0] = ARM_PS_REGNUM;
13760 thumb2_insn_r->reg_rec_count = 1;
13761 }
13762 else
13763 {
13764 /* Handle SRS instruction after reading banked SP. */
13765 return arm_record_unsupported_insn (thumb2_insn_r);
13766 }
13767 }
13768 else if (1 == op || 2 == op)
13769 {
13770 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13771 {
13772 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13773 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13774 while (register_bits)
13775 {
13776 if (register_bits & 0x00000001)
13777 record_buf[index++] = register_count;
13778
13779 register_count++;
13780 register_bits = register_bits >> 1;
13781 }
13782 record_buf[index++] = reg_rn;
13783 record_buf[index++] = ARM_PS_REGNUM;
13784 thumb2_insn_r->reg_rec_count = index;
13785 }
13786 else
13787 {
13788 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13789 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13790 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13791 while (register_bits)
13792 {
13793 if (register_bits & 0x00000001)
13794 register_count++;
13795
13796 register_bits = register_bits >> 1;
13797 }
13798
13799 if (1 == op)
13800 {
13801 /* Start address calculation for LDMDB/LDMEA. */
13802 start_address = u_regval;
13803 }
13804 else if (2 == op)
13805 {
13806 /* Start address calculation for LDMDB/LDMEA. */
13807 start_address = u_regval - register_count * 4;
13808 }
13809
13810 thumb2_insn_r->mem_rec_count = register_count;
13811 while (register_count)
13812 {
13813 record_buf_mem[register_count * 2 - 1] = start_address;
13814 record_buf_mem[register_count * 2 - 2] = 4;
13815 start_address = start_address + 4;
13816 register_count--;
13817 }
13818 record_buf[0] = reg_rn;
13819 record_buf[1] = ARM_PS_REGNUM;
13820 thumb2_insn_r->reg_rec_count = 2;
13821 }
13822 }
13823
13824 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13825 record_buf_mem);
13826 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13827 record_buf);
13828 return ARM_RECORD_SUCCESS;
13829 }
13830
13831 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13832 instructions. */
13833
13834 static int
13835 thumb2_record_ld_st_dual_ex_tbb (arm_insn_decode_record *thumb2_insn_r)
13836 {
13837 struct regcache *reg_cache = thumb2_insn_r->regcache;
13838
13839 uint32_t reg_rd, reg_rn, offset_imm;
13840 uint32_t reg_dest1, reg_dest2;
13841 uint32_t address, offset_addr;
13842 uint32_t record_buf[8], record_buf_mem[8];
13843 uint32_t op1, op2, op3;
13844
13845 ULONGEST u_regval[2];
13846
13847 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13848 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13849 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13850
13851 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13852 {
13853 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13854 {
13855 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13856 record_buf[0] = reg_dest1;
13857 record_buf[1] = ARM_PS_REGNUM;
13858 thumb2_insn_r->reg_rec_count = 2;
13859 }
13860
13861 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13862 {
13863 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13864 record_buf[2] = reg_dest2;
13865 thumb2_insn_r->reg_rec_count = 3;
13866 }
13867 }
13868 else
13869 {
13870 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13871 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13872
13873 if (0 == op1 && 0 == op2)
13874 {
13875 /* Handle STREX. */
13876 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13877 address = u_regval[0] + (offset_imm * 4);
13878 record_buf_mem[0] = 4;
13879 record_buf_mem[1] = address;
13880 thumb2_insn_r->mem_rec_count = 1;
13881 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13882 record_buf[0] = reg_rd;
13883 thumb2_insn_r->reg_rec_count = 1;
13884 }
13885 else if (1 == op1 && 0 == op2)
13886 {
13887 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13888 record_buf[0] = reg_rd;
13889 thumb2_insn_r->reg_rec_count = 1;
13890 address = u_regval[0];
13891 record_buf_mem[1] = address;
13892
13893 if (4 == op3)
13894 {
13895 /* Handle STREXB. */
13896 record_buf_mem[0] = 1;
13897 thumb2_insn_r->mem_rec_count = 1;
13898 }
13899 else if (5 == op3)
13900 {
13901 /* Handle STREXH. */
13902 record_buf_mem[0] = 2 ;
13903 thumb2_insn_r->mem_rec_count = 1;
13904 }
13905 else if (7 == op3)
13906 {
13907 /* Handle STREXD. */
13908 address = u_regval[0];
13909 record_buf_mem[0] = 4;
13910 record_buf_mem[2] = 4;
13911 record_buf_mem[3] = address + 4;
13912 thumb2_insn_r->mem_rec_count = 2;
13913 }
13914 }
13915 else
13916 {
13917 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13918
13919 if (bit (thumb2_insn_r->arm_insn, 24))
13920 {
13921 if (bit (thumb2_insn_r->arm_insn, 23))
13922 offset_addr = u_regval[0] + (offset_imm * 4);
13923 else
13924 offset_addr = u_regval[0] - (offset_imm * 4);
13925
13926 address = offset_addr;
13927 }
13928 else
13929 address = u_regval[0];
13930
13931 record_buf_mem[0] = 4;
13932 record_buf_mem[1] = address;
13933 record_buf_mem[2] = 4;
13934 record_buf_mem[3] = address + 4;
13935 thumb2_insn_r->mem_rec_count = 2;
13936 record_buf[0] = reg_rn;
13937 thumb2_insn_r->reg_rec_count = 1;
13938 }
13939 }
13940
13941 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13942 record_buf);
13943 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13944 record_buf_mem);
13945 return ARM_RECORD_SUCCESS;
13946 }
13947
13948 /* Handler for thumb2 data processing (shift register and modified immediate)
13949 instructions. */
13950
13951 static int
13952 thumb2_record_data_proc_sreg_mimm (arm_insn_decode_record *thumb2_insn_r)
13953 {
13954 uint32_t reg_rd, op;
13955 uint32_t record_buf[8];
13956
13957 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13958 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13959
13960 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13961 {
13962 record_buf[0] = ARM_PS_REGNUM;
13963 thumb2_insn_r->reg_rec_count = 1;
13964 }
13965 else
13966 {
13967 record_buf[0] = reg_rd;
13968 record_buf[1] = ARM_PS_REGNUM;
13969 thumb2_insn_r->reg_rec_count = 2;
13970 }
13971
13972 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13973 record_buf);
13974 return ARM_RECORD_SUCCESS;
13975 }
13976
13977 /* Generic handler for thumb2 instructions which effect destination and PS
13978 registers. */
13979
13980 static int
13981 thumb2_record_ps_dest_generic (arm_insn_decode_record *thumb2_insn_r)
13982 {
13983 uint32_t reg_rd;
13984 uint32_t record_buf[8];
13985
13986 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13987
13988 record_buf[0] = reg_rd;
13989 record_buf[1] = ARM_PS_REGNUM;
13990 thumb2_insn_r->reg_rec_count = 2;
13991
13992 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13993 record_buf);
13994 return ARM_RECORD_SUCCESS;
13995 }
13996
13997 /* Handler for thumb2 branch and miscellaneous control instructions. */
13998
13999 static int
14000 thumb2_record_branch_misc_cntrl (arm_insn_decode_record *thumb2_insn_r)
14001 {
14002 uint32_t op, op1, op2;
14003 uint32_t record_buf[8];
14004
14005 op = bits (thumb2_insn_r->arm_insn, 20, 26);
14006 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
14007 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
14008
14009 /* Handle MSR insn. */
14010 if (!(op1 & 0x2) && 0x38 == op)
14011 {
14012 if (!(op2 & 0x3))
14013 {
14014 /* CPSR is going to be changed. */
14015 record_buf[0] = ARM_PS_REGNUM;
14016 thumb2_insn_r->reg_rec_count = 1;
14017 }
14018 else
14019 {
14020 arm_record_unsupported_insn(thumb2_insn_r);
14021 return -1;
14022 }
14023 }
14024 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
14025 {
14026 /* BLX. */
14027 record_buf[0] = ARM_PS_REGNUM;
14028 record_buf[1] = ARM_LR_REGNUM;
14029 thumb2_insn_r->reg_rec_count = 2;
14030 }
14031
14032 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14033 record_buf);
14034 return ARM_RECORD_SUCCESS;
14035 }
14036
14037 /* Handler for thumb2 store single data item instructions. */
14038
14039 static int
14040 thumb2_record_str_single_data (arm_insn_decode_record *thumb2_insn_r)
14041 {
14042 struct regcache *reg_cache = thumb2_insn_r->regcache;
14043
14044 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
14045 uint32_t address, offset_addr;
14046 uint32_t record_buf[8], record_buf_mem[8];
14047 uint32_t op1, op2;
14048
14049 ULONGEST u_regval[2];
14050
14051 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
14052 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
14053 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14054 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
14055
14056 if (bit (thumb2_insn_r->arm_insn, 23))
14057 {
14058 /* T2 encoding. */
14059 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
14060 offset_addr = u_regval[0] + offset_imm;
14061 address = offset_addr;
14062 }
14063 else
14064 {
14065 /* T3 encoding. */
14066 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
14067 {
14068 /* Handle STRB (register). */
14069 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
14070 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
14071 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
14072 offset_addr = u_regval[1] << shift_imm;
14073 address = u_regval[0] + offset_addr;
14074 }
14075 else
14076 {
14077 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
14078 if (bit (thumb2_insn_r->arm_insn, 10))
14079 {
14080 if (bit (thumb2_insn_r->arm_insn, 9))
14081 offset_addr = u_regval[0] + offset_imm;
14082 else
14083 offset_addr = u_regval[0] - offset_imm;
14084
14085 address = offset_addr;
14086 }
14087 else
14088 address = u_regval[0];
14089 }
14090 }
14091
14092 switch (op1)
14093 {
14094 /* Store byte instructions. */
14095 case 4:
14096 case 0:
14097 record_buf_mem[0] = 1;
14098 break;
14099 /* Store half word instructions. */
14100 case 1:
14101 case 5:
14102 record_buf_mem[0] = 2;
14103 break;
14104 /* Store word instructions. */
14105 case 2:
14106 case 6:
14107 record_buf_mem[0] = 4;
14108 break;
14109
14110 default:
14111 gdb_assert_not_reached ("no decoding pattern found");
14112 break;
14113 }
14114
14115 record_buf_mem[1] = address;
14116 thumb2_insn_r->mem_rec_count = 1;
14117 record_buf[0] = reg_rn;
14118 thumb2_insn_r->reg_rec_count = 1;
14119
14120 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14121 record_buf);
14122 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14123 record_buf_mem);
14124 return ARM_RECORD_SUCCESS;
14125 }
14126
14127 /* Handler for thumb2 load memory hints instructions. */
14128
14129 static int
14130 thumb2_record_ld_mem_hints (arm_insn_decode_record *thumb2_insn_r)
14131 {
14132 uint32_t record_buf[8];
14133 uint32_t reg_rt, reg_rn;
14134
14135 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
14136 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14137
14138 if (ARM_PC_REGNUM != reg_rt)
14139 {
14140 record_buf[0] = reg_rt;
14141 record_buf[1] = reg_rn;
14142 record_buf[2] = ARM_PS_REGNUM;
14143 thumb2_insn_r->reg_rec_count = 3;
14144
14145 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14146 record_buf);
14147 return ARM_RECORD_SUCCESS;
14148 }
14149
14150 return ARM_RECORD_FAILURE;
14151 }
14152
14153 /* Handler for thumb2 load word instructions. */
14154
14155 static int
14156 thumb2_record_ld_word (arm_insn_decode_record *thumb2_insn_r)
14157 {
14158 uint32_t record_buf[8];
14159
14160 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
14161 record_buf[1] = ARM_PS_REGNUM;
14162 thumb2_insn_r->reg_rec_count = 2;
14163
14164 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14165 record_buf);
14166 return ARM_RECORD_SUCCESS;
14167 }
14168
14169 /* Handler for thumb2 long multiply, long multiply accumulate, and
14170 divide instructions. */
14171
14172 static int
14173 thumb2_record_lmul_lmla_div (arm_insn_decode_record *thumb2_insn_r)
14174 {
14175 uint32_t opcode1 = 0, opcode2 = 0;
14176 uint32_t record_buf[8];
14177
14178 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
14179 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
14180
14181 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
14182 {
14183 /* Handle SMULL, UMULL, SMULAL. */
14184 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
14185 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14186 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14187 record_buf[2] = ARM_PS_REGNUM;
14188 thumb2_insn_r->reg_rec_count = 3;
14189 }
14190 else if (1 == opcode1 || 3 == opcode2)
14191 {
14192 /* Handle SDIV and UDIV. */
14193 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
14194 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
14195 record_buf[2] = ARM_PS_REGNUM;
14196 thumb2_insn_r->reg_rec_count = 3;
14197 }
14198 else
14199 return ARM_RECORD_FAILURE;
14200
14201 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14202 record_buf);
14203 return ARM_RECORD_SUCCESS;
14204 }
14205
14206 /* Record handler for thumb32 coprocessor instructions. */
14207
14208 static int
14209 thumb2_record_coproc_insn (arm_insn_decode_record *thumb2_insn_r)
14210 {
14211 if (bit (thumb2_insn_r->arm_insn, 25))
14212 return arm_record_coproc_data_proc (thumb2_insn_r);
14213 else
14214 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
14215 }
14216
14217 /* Record handler for advance SIMD structure load/store instructions. */
14218
14219 static int
14220 thumb2_record_asimd_struct_ld_st (arm_insn_decode_record *thumb2_insn_r)
14221 {
14222 struct regcache *reg_cache = thumb2_insn_r->regcache;
14223 uint32_t l_bit, a_bit, b_bits;
14224 uint32_t record_buf[128], record_buf_mem[128];
14225 uint32_t reg_rn, reg_vd, address, f_elem;
14226 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
14227 uint8_t f_ebytes;
14228
14229 l_bit = bit (thumb2_insn_r->arm_insn, 21);
14230 a_bit = bit (thumb2_insn_r->arm_insn, 23);
14231 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
14232 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
14233 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
14234 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
14235 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
14236 f_elem = 8 / f_ebytes;
14237
14238 if (!l_bit)
14239 {
14240 ULONGEST u_regval = 0;
14241 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
14242 address = u_regval;
14243
14244 if (!a_bit)
14245 {
14246 /* Handle VST1. */
14247 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14248 {
14249 if (b_bits == 0x07)
14250 bf_regs = 1;
14251 else if (b_bits == 0x0a)
14252 bf_regs = 2;
14253 else if (b_bits == 0x06)
14254 bf_regs = 3;
14255 else if (b_bits == 0x02)
14256 bf_regs = 4;
14257 else
14258 bf_regs = 0;
14259
14260 for (index_r = 0; index_r < bf_regs; index_r++)
14261 {
14262 for (index_e = 0; index_e < f_elem; index_e++)
14263 {
14264 record_buf_mem[index_m++] = f_ebytes;
14265 record_buf_mem[index_m++] = address;
14266 address = address + f_ebytes;
14267 thumb2_insn_r->mem_rec_count += 1;
14268 }
14269 }
14270 }
14271 /* Handle VST2. */
14272 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14273 {
14274 if (b_bits == 0x09 || b_bits == 0x08)
14275 bf_regs = 1;
14276 else if (b_bits == 0x03)
14277 bf_regs = 2;
14278 else
14279 bf_regs = 0;
14280
14281 for (index_r = 0; index_r < bf_regs; index_r++)
14282 for (index_e = 0; index_e < f_elem; index_e++)
14283 {
14284 for (loop_t = 0; loop_t < 2; loop_t++)
14285 {
14286 record_buf_mem[index_m++] = f_ebytes;
14287 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14288 thumb2_insn_r->mem_rec_count += 1;
14289 }
14290 address = address + (2 * f_ebytes);
14291 }
14292 }
14293 /* Handle VST3. */
14294 else if ((b_bits & 0x0e) == 0x04)
14295 {
14296 for (index_e = 0; index_e < f_elem; index_e++)
14297 {
14298 for (loop_t = 0; loop_t < 3; loop_t++)
14299 {
14300 record_buf_mem[index_m++] = f_ebytes;
14301 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14302 thumb2_insn_r->mem_rec_count += 1;
14303 }
14304 address = address + (3 * f_ebytes);
14305 }
14306 }
14307 /* Handle VST4. */
14308 else if (!(b_bits & 0x0e))
14309 {
14310 for (index_e = 0; index_e < f_elem; index_e++)
14311 {
14312 for (loop_t = 0; loop_t < 4; loop_t++)
14313 {
14314 record_buf_mem[index_m++] = f_ebytes;
14315 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
14316 thumb2_insn_r->mem_rec_count += 1;
14317 }
14318 address = address + (4 * f_ebytes);
14319 }
14320 }
14321 }
14322 else
14323 {
14324 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
14325
14326 if (bft_size == 0x00)
14327 f_ebytes = 1;
14328 else if (bft_size == 0x01)
14329 f_ebytes = 2;
14330 else if (bft_size == 0x02)
14331 f_ebytes = 4;
14332 else
14333 f_ebytes = 0;
14334
14335 /* Handle VST1. */
14336 if (!(b_bits & 0x0b) || b_bits == 0x08)
14337 thumb2_insn_r->mem_rec_count = 1;
14338 /* Handle VST2. */
14339 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
14340 thumb2_insn_r->mem_rec_count = 2;
14341 /* Handle VST3. */
14342 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
14343 thumb2_insn_r->mem_rec_count = 3;
14344 /* Handle VST4. */
14345 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
14346 thumb2_insn_r->mem_rec_count = 4;
14347
14348 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
14349 {
14350 record_buf_mem[index_m] = f_ebytes;
14351 record_buf_mem[index_m] = address + (index_m * f_ebytes);
14352 }
14353 }
14354 }
14355 else
14356 {
14357 if (!a_bit)
14358 {
14359 /* Handle VLD1. */
14360 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
14361 thumb2_insn_r->reg_rec_count = 1;
14362 /* Handle VLD2. */
14363 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
14364 thumb2_insn_r->reg_rec_count = 2;
14365 /* Handle VLD3. */
14366 else if ((b_bits & 0x0e) == 0x04)
14367 thumb2_insn_r->reg_rec_count = 3;
14368 /* Handle VLD4. */
14369 else if (!(b_bits & 0x0e))
14370 thumb2_insn_r->reg_rec_count = 4;
14371 }
14372 else
14373 {
14374 /* Handle VLD1. */
14375 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
14376 thumb2_insn_r->reg_rec_count = 1;
14377 /* Handle VLD2. */
14378 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
14379 thumb2_insn_r->reg_rec_count = 2;
14380 /* Handle VLD3. */
14381 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
14382 thumb2_insn_r->reg_rec_count = 3;
14383 /* Handle VLD4. */
14384 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
14385 thumb2_insn_r->reg_rec_count = 4;
14386
14387 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
14388 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
14389 }
14390 }
14391
14392 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
14393 {
14394 record_buf[index_r] = reg_rn;
14395 thumb2_insn_r->reg_rec_count += 1;
14396 }
14397
14398 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
14399 record_buf);
14400 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
14401 record_buf_mem);
14402 return 0;
14403 }
14404
14405 /* Decodes thumb2 instruction type and invokes its record handler. */
14406
14407 static unsigned int
14408 thumb2_record_decode_insn_handler (arm_insn_decode_record *thumb2_insn_r)
14409 {
14410 uint32_t op, op1, op2;
14411
14412 op = bit (thumb2_insn_r->arm_insn, 15);
14413 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
14414 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
14415
14416 if (op1 == 0x01)
14417 {
14418 if (!(op2 & 0x64 ))
14419 {
14420 /* Load/store multiple instruction. */
14421 return thumb2_record_ld_st_multiple (thumb2_insn_r);
14422 }
14423 else if ((op2 & 0x64) == 0x4)
14424 {
14425 /* Load/store (dual/exclusive) and table branch instruction. */
14426 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
14427 }
14428 else if ((op2 & 0x60) == 0x20)
14429 {
14430 /* Data-processing (shifted register). */
14431 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14432 }
14433 else if (op2 & 0x40)
14434 {
14435 /* Co-processor instructions. */
14436 return thumb2_record_coproc_insn (thumb2_insn_r);
14437 }
14438 }
14439 else if (op1 == 0x02)
14440 {
14441 if (op)
14442 {
14443 /* Branches and miscellaneous control instructions. */
14444 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
14445 }
14446 else if (op2 & 0x20)
14447 {
14448 /* Data-processing (plain binary immediate) instruction. */
14449 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14450 }
14451 else
14452 {
14453 /* Data-processing (modified immediate). */
14454 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
14455 }
14456 }
14457 else if (op1 == 0x03)
14458 {
14459 if (!(op2 & 0x71 ))
14460 {
14461 /* Store single data item. */
14462 return thumb2_record_str_single_data (thumb2_insn_r);
14463 }
14464 else if (!((op2 & 0x71) ^ 0x10))
14465 {
14466 /* Advanced SIMD or structure load/store instructions. */
14467 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
14468 }
14469 else if (!((op2 & 0x67) ^ 0x01))
14470 {
14471 /* Load byte, memory hints instruction. */
14472 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14473 }
14474 else if (!((op2 & 0x67) ^ 0x03))
14475 {
14476 /* Load halfword, memory hints instruction. */
14477 return thumb2_record_ld_mem_hints (thumb2_insn_r);
14478 }
14479 else if (!((op2 & 0x67) ^ 0x05))
14480 {
14481 /* Load word instruction. */
14482 return thumb2_record_ld_word (thumb2_insn_r);
14483 }
14484 else if (!((op2 & 0x70) ^ 0x20))
14485 {
14486 /* Data-processing (register) instruction. */
14487 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14488 }
14489 else if (!((op2 & 0x78) ^ 0x30))
14490 {
14491 /* Multiply, multiply accumulate, abs diff instruction. */
14492 return thumb2_record_ps_dest_generic (thumb2_insn_r);
14493 }
14494 else if (!((op2 & 0x78) ^ 0x38))
14495 {
14496 /* Long multiply, long multiply accumulate, and divide. */
14497 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
14498 }
14499 else if (op2 & 0x40)
14500 {
14501 /* Co-processor instructions. */
14502 return thumb2_record_coproc_insn (thumb2_insn_r);
14503 }
14504 }
14505
14506 return -1;
14507 }
14508
14509 namespace {
14510 /* Abstract instruction reader. */
14511
14512 class abstract_instruction_reader
14513 {
14514 public:
14515 /* Read one instruction of size LEN from address MEMADDR and using
14516 BYTE_ORDER endianness. */
14517
14518 virtual ULONGEST read (CORE_ADDR memaddr, const size_t len,
14519 enum bfd_endian byte_order) = 0;
14520 };
14521
14522 /* Instruction reader from real target. */
14523
14524 class instruction_reader : public abstract_instruction_reader
14525 {
14526 public:
14527 ULONGEST read (CORE_ADDR memaddr, const size_t len,
14528 enum bfd_endian byte_order) override
14529 {
14530 return read_code_unsigned_integer (memaddr, len, byte_order);
14531 }
14532 };
14533
14534 } // namespace
14535
14536 typedef int (*sti_arm_hdl_fp_t) (arm_insn_decode_record*);
14537
14538 /* Decode arm/thumb insn depending on condition cods and opcodes; and
14539 dispatch it. */
14540
14541 static int
14542 decode_insn (abstract_instruction_reader &reader,
14543 arm_insn_decode_record *arm_record,
14544 record_type_t record_type, uint32_t insn_size)
14545 {
14546
14547 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
14548 instruction. */
14549 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
14550 {
14551 arm_record_data_proc_misc_ld_str, /* 000. */
14552 arm_record_data_proc_imm, /* 001. */
14553 arm_record_ld_st_imm_offset, /* 010. */
14554 arm_record_ld_st_reg_offset, /* 011. */
14555 arm_record_ld_st_multiple, /* 100. */
14556 arm_record_b_bl, /* 101. */
14557 arm_record_asimd_vfp_coproc, /* 110. */
14558 arm_record_coproc_data_proc /* 111. */
14559 };
14560
14561 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
14562 instruction. */
14563 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
14564 { \
14565 thumb_record_shift_add_sub, /* 000. */
14566 thumb_record_add_sub_cmp_mov, /* 001. */
14567 thumb_record_ld_st_reg_offset, /* 010. */
14568 thumb_record_ld_st_imm_offset, /* 011. */
14569 thumb_record_ld_st_stack, /* 100. */
14570 thumb_record_misc, /* 101. */
14571 thumb_record_ldm_stm_swi, /* 110. */
14572 thumb_record_branch /* 111. */
14573 };
14574
14575 uint32_t ret = 0; /* return value: negative:failure 0:success. */
14576 uint32_t insn_id = 0;
14577 enum bfd_endian code_endian
14578 = gdbarch_byte_order_for_code (arm_record->gdbarch);
14579 arm_record->arm_insn
14580 = reader.read (arm_record->this_addr, insn_size, code_endian);
14581
14582 if (ARM_RECORD == record_type)
14583 {
14584 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
14585 insn_id = bits (arm_record->arm_insn, 25, 27);
14586
14587 if (arm_record->cond == 0xf)
14588 ret = arm_record_extension_space (arm_record);
14589 else
14590 {
14591 /* If this insn has fallen into extension space
14592 then we need not decode it anymore. */
14593 ret = arm_handle_insn[insn_id] (arm_record);
14594 }
14595 if (ret != ARM_RECORD_SUCCESS)
14596 {
14597 arm_record_unsupported_insn (arm_record);
14598 ret = -1;
14599 }
14600 }
14601 else if (THUMB_RECORD == record_type)
14602 {
14603 /* As thumb does not have condition codes, we set negative. */
14604 arm_record->cond = -1;
14605 insn_id = bits (arm_record->arm_insn, 13, 15);
14606 ret = thumb_handle_insn[insn_id] (arm_record);
14607 if (ret != ARM_RECORD_SUCCESS)
14608 {
14609 arm_record_unsupported_insn (arm_record);
14610 ret = -1;
14611 }
14612 }
14613 else if (THUMB2_RECORD == record_type)
14614 {
14615 /* As thumb does not have condition codes, we set negative. */
14616 arm_record->cond = -1;
14617
14618 /* Swap first half of 32bit thumb instruction with second half. */
14619 arm_record->arm_insn
14620 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
14621
14622 ret = thumb2_record_decode_insn_handler (arm_record);
14623
14624 if (ret != ARM_RECORD_SUCCESS)
14625 {
14626 arm_record_unsupported_insn (arm_record);
14627 ret = -1;
14628 }
14629 }
14630 else
14631 {
14632 /* Throw assertion. */
14633 gdb_assert_not_reached ("not a valid instruction, could not decode");
14634 }
14635
14636 return ret;
14637 }
14638
14639 #if GDB_SELF_TEST
14640 namespace selftests {
14641
14642 /* Instruction reader class for selftests.
14643
14644 For 16-bit Thumb instructions, an array of uint16_t should be used.
14645
14646 For 32-bit Thumb instructions and regular 32-bit Arm instructions, an array
14647 of uint32_t should be used. */
14648
14649 template<typename T>
14650 class instruction_reader_selftest : public abstract_instruction_reader
14651 {
14652 public:
14653 template<size_t SIZE>
14654 instruction_reader_selftest (const T (&insns)[SIZE])
14655 : m_insns (insns), m_insns_size (SIZE)
14656 {}
14657
14658 ULONGEST read (CORE_ADDR memaddr, const size_t length,
14659 enum bfd_endian byte_order) override
14660 {
14661 SELF_CHECK (length == sizeof (T));
14662 SELF_CHECK (memaddr % sizeof (T) == 0);
14663 SELF_CHECK ((memaddr / sizeof (T)) < m_insns_size);
14664
14665 return m_insns[memaddr / sizeof (T)];
14666 }
14667
14668 private:
14669 const T *m_insns;
14670 const size_t m_insns_size;
14671 };
14672
14673 static void
14674 arm_record_test (void)
14675 {
14676 struct gdbarch_info info;
14677 info.bfd_arch_info = bfd_scan_arch ("arm");
14678
14679 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14680
14681 SELF_CHECK (gdbarch != NULL);
14682
14683 /* 16-bit Thumb instructions. */
14684 {
14685 arm_insn_decode_record arm_record;
14686
14687 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14688 arm_record.gdbarch = gdbarch;
14689
14690 /* Use the endian-free representation of the instructions here. The test
14691 will handle endianness conversions. */
14692 static const uint16_t insns[] = {
14693 /* db b2 uxtb r3, r3 */
14694 0xb2db,
14695 /* cd 58 ldr r5, [r1, r3] */
14696 0x58cd,
14697 };
14698
14699 instruction_reader_selftest<uint16_t> reader (insns);
14700 int ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14701 THUMB_INSN_SIZE_BYTES);
14702
14703 SELF_CHECK (ret == 0);
14704 SELF_CHECK (arm_record.mem_rec_count == 0);
14705 SELF_CHECK (arm_record.reg_rec_count == 1);
14706 SELF_CHECK (arm_record.arm_regs[0] == 3);
14707
14708 arm_record.this_addr += 2;
14709 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14710 THUMB_INSN_SIZE_BYTES);
14711
14712 SELF_CHECK (ret == 0);
14713 SELF_CHECK (arm_record.mem_rec_count == 0);
14714 SELF_CHECK (arm_record.reg_rec_count == 1);
14715 SELF_CHECK (arm_record.arm_regs[0] == 5);
14716 }
14717
14718 /* 32-bit Thumb-2 instructions. */
14719 {
14720 arm_insn_decode_record arm_record;
14721
14722 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14723 arm_record.gdbarch = gdbarch;
14724
14725 /* Use the endian-free representation of the instruction here. The test
14726 will handle endianness conversions. */
14727 static const uint32_t insns[] = {
14728 /* mrc 15, 0, r7, cr13, cr0, {3} */
14729 0x7f70ee1d,
14730 };
14731
14732 instruction_reader_selftest<uint32_t> reader (insns);
14733 int ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14734 THUMB2_INSN_SIZE_BYTES);
14735
14736 SELF_CHECK (ret == 0);
14737 SELF_CHECK (arm_record.mem_rec_count == 0);
14738 SELF_CHECK (arm_record.reg_rec_count == 1);
14739 SELF_CHECK (arm_record.arm_regs[0] == 7);
14740 }
14741
14742 /* 32-bit instructions. */
14743 {
14744 arm_insn_decode_record arm_record;
14745
14746 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14747 arm_record.gdbarch = gdbarch;
14748
14749 /* Use the endian-free representation of the instruction here. The test
14750 will handle endianness conversions. */
14751 static const uint32_t insns[] = {
14752 /* mov r5, r0 */
14753 0xe1a05000,
14754 };
14755
14756 instruction_reader_selftest<uint32_t> reader (insns);
14757 int ret = decode_insn (reader, &arm_record, ARM_RECORD,
14758 ARM_INSN_SIZE_BYTES);
14759
14760 SELF_CHECK (ret == 0);
14761 }
14762 }
14763
14764 /* Instruction reader from manually cooked instruction sequences. */
14765
14766 class test_arm_instruction_reader : public arm_instruction_reader
14767 {
14768 public:
14769 explicit test_arm_instruction_reader (gdb::array_view<const uint32_t> insns)
14770 : m_insns (insns)
14771 {}
14772
14773 uint32_t read (CORE_ADDR memaddr, enum bfd_endian byte_order) const override
14774 {
14775 SELF_CHECK (memaddr % 4 == 0);
14776 SELF_CHECK (memaddr / 4 < m_insns.size ());
14777
14778 return m_insns[memaddr / 4];
14779 }
14780
14781 private:
14782 const gdb::array_view<const uint32_t> m_insns;
14783 };
14784
14785 static void
14786 arm_analyze_prologue_test ()
14787 {
14788 for (bfd_endian endianness : {BFD_ENDIAN_LITTLE, BFD_ENDIAN_BIG})
14789 {
14790 struct gdbarch_info info;
14791 info.byte_order = endianness;
14792 info.byte_order_for_code = endianness;
14793 info.bfd_arch_info = bfd_scan_arch ("arm");
14794
14795 struct gdbarch *gdbarch = gdbarch_find_by_info (info);
14796
14797 SELF_CHECK (gdbarch != NULL);
14798
14799 /* The "sub" instruction contains an immediate value rotate count of 0,
14800 which resulted in a 32-bit shift of a 32-bit value, caught by
14801 UBSan. */
14802 const uint32_t insns[] = {
14803 0xe92d4ff0, /* push {r4, r5, r6, r7, r8, r9, sl, fp, lr} */
14804 0xe1a05000, /* mov r5, r0 */
14805 0xe5903020, /* ldr r3, [r0, #32] */
14806 0xe24dd044, /* sub sp, sp, #68 ; 0x44 */
14807 };
14808
14809 test_arm_instruction_reader mem_reader (insns);
14810 arm_prologue_cache cache;
14811 arm_cache_init (&cache, gdbarch);
14812
14813 arm_analyze_prologue (gdbarch, 0, sizeof (insns) - 1, &cache, mem_reader);
14814 }
14815 }
14816
14817 } // namespace selftests
14818 #endif /* GDB_SELF_TEST */
14819
14820 /* Cleans up local record registers and memory allocations. */
14821
14822 static void
14823 deallocate_reg_mem (arm_insn_decode_record *record)
14824 {
14825 xfree (record->arm_regs);
14826 xfree (record->arm_mems);
14827 }
14828
14829
14830 /* Parse the current instruction and record the values of the registers and
14831 memory that will be changed in current instruction to record_arch_list".
14832 Return -1 if something is wrong. */
14833
14834 int
14835 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
14836 CORE_ADDR insn_addr)
14837 {
14838
14839 uint32_t no_of_rec = 0;
14840 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
14841 ULONGEST t_bit = 0, insn_id = 0;
14842
14843 ULONGEST u_regval = 0;
14844
14845 arm_insn_decode_record arm_record;
14846
14847 memset (&arm_record, 0, sizeof (arm_insn_decode_record));
14848 arm_record.regcache = regcache;
14849 arm_record.this_addr = insn_addr;
14850 arm_record.gdbarch = gdbarch;
14851
14852
14853 if (record_debug > 1)
14854 {
14855 gdb_printf (gdb_stdlog, "Process record: arm_process_record "
14856 "addr = %s\n",
14857 paddress (gdbarch, arm_record.this_addr));
14858 }
14859
14860 instruction_reader reader;
14861 enum bfd_endian code_endian
14862 = gdbarch_byte_order_for_code (arm_record.gdbarch);
14863 arm_record.arm_insn
14864 = reader.read (arm_record.this_addr, 2, code_endian);
14865
14866 /* Check the insn, whether it is thumb or arm one. */
14867
14868 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
14869 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
14870
14871
14872 if (!(u_regval & t_bit))
14873 {
14874 /* We are decoding arm insn. */
14875 ret = decode_insn (reader, &arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14876 }
14877 else
14878 {
14879 insn_id = bits (arm_record.arm_insn, 11, 15);
14880 /* is it thumb2 insn? */
14881 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14882 {
14883 ret = decode_insn (reader, &arm_record, THUMB2_RECORD,
14884 THUMB2_INSN_SIZE_BYTES);
14885 }
14886 else
14887 {
14888 /* We are decoding thumb insn. */
14889 ret = decode_insn (reader, &arm_record, THUMB_RECORD,
14890 THUMB_INSN_SIZE_BYTES);
14891 }
14892 }
14893
14894 if (0 == ret)
14895 {
14896 /* Record registers. */
14897 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14898 if (arm_record.arm_regs)
14899 {
14900 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14901 {
14902 if (record_full_arch_list_add_reg
14903 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14904 ret = -1;
14905 }
14906 }
14907 /* Record memories. */
14908 if (arm_record.arm_mems)
14909 {
14910 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14911 {
14912 if (record_full_arch_list_add_mem
14913 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14914 arm_record.arm_mems[no_of_rec].len))
14915 ret = -1;
14916 }
14917 }
14918
14919 if (record_full_arch_list_add_end ())
14920 ret = -1;
14921 }
14922
14923
14924 deallocate_reg_mem (&arm_record);
14925
14926 return ret;
14927 }
14928
14929 /* See arm-tdep.h. */
14930
14931 const target_desc *
14932 arm_read_description (arm_fp_type fp_type, bool tls)
14933 {
14934 struct target_desc *tdesc = tdesc_arm_list[fp_type][tls];
14935
14936 if (tdesc == nullptr)
14937 {
14938 tdesc = arm_create_target_description (fp_type, tls);
14939 tdesc_arm_list[fp_type][tls] = tdesc;
14940 }
14941
14942 return tdesc;
14943 }
14944
14945 /* See arm-tdep.h. */
14946
14947 const target_desc *
14948 arm_read_mprofile_description (arm_m_profile_type m_type)
14949 {
14950 struct target_desc *tdesc = tdesc_arm_mprofile_list[m_type];
14951
14952 if (tdesc == nullptr)
14953 {
14954 tdesc = arm_create_mprofile_target_description (m_type);
14955 tdesc_arm_mprofile_list[m_type] = tdesc;
14956 }
14957
14958 return tdesc;
14959 }