Eliminate literal line numbers in mi-console.exp
[binutils-gdb.git] / gdb / arm-tdep.c
blob9e632a7609108588889d5f3f50213f4a55836564
1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "defs.h"
22 #include <ctype.h> /* XXX for isupper (). */
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
48 #include "arm-tdep.h"
49 #include "gdb/sim-arm.h"
51 #include "elf-bfd.h"
52 #include "coff/internal.h"
53 #include "elf/arm.h"
55 #include "vec.h"
57 #include "record.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
68 static int arm_debug;
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data *arm_objfile_data_key;
86 struct arm_mapping_symbol
88 bfd_vma value;
89 char type;
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
92 DEF_VEC_O(arm_mapping_symbol_s);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s) **section_maps;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element *setarmcmdlist = NULL;
101 static struct cmd_list_element *showarmcmdlist = NULL;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings[] =
107 "auto",
108 "softfpa",
109 "fpa",
110 "softvfp",
111 "vfp",
112 NULL
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
117 static const char *current_fp_model = "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings[] =
122 "auto",
123 "APCS",
124 "AAPCS",
125 NULL
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
130 static const char *arm_abi_string = "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings[] =
135 "auto",
136 "arm",
137 "thumb",
138 NULL
141 static const char *arm_fallback_mode_string = "auto";
142 static const char *arm_force_mode_string = "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode = -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
158 static const struct
160 const char *name;
161 int regnum;
162 } arm_register_aliases[] = {
163 /* Basic register numbers. */
164 { "r0", 0 },
165 { "r1", 1 },
166 { "r2", 2 },
167 { "r3", 3 },
168 { "r4", 4 },
169 { "r5", 5 },
170 { "r6", 6 },
171 { "r7", 7 },
172 { "r8", 8 },
173 { "r9", 9 },
174 { "r10", 10 },
175 { "r11", 11 },
176 { "r12", 12 },
177 { "r13", 13 },
178 { "r14", 14 },
179 { "r15", 15 },
180 /* Synonyms (argument and variable registers). */
181 { "a1", 0 },
182 { "a2", 1 },
183 { "a3", 2 },
184 { "a4", 3 },
185 { "v1", 4 },
186 { "v2", 5 },
187 { "v3", 6 },
188 { "v4", 7 },
189 { "v5", 8 },
190 { "v6", 9 },
191 { "v7", 10 },
192 { "v8", 11 },
193 /* Other platform-specific names for r9. */
194 { "sb", 9 },
195 { "tr", 9 },
196 /* Special names. */
197 { "ip", 12 },
198 { "lr", 14 },
199 /* Names used by GCC (not listed in the ARM EABI). */
200 { "sl", 10 },
201 /* A special name from the older ATPCS. */
202 { "wr", 7 },
205 static const char *const arm_register_names[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
221 style. */
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element *);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat *, const void *,
227 void *, int);
228 static void convert_to_extended (const struct floatformat *, void *,
229 const void *, int);
231 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
232 struct regcache *regcache,
233 int regnum, gdb_byte *buf);
234 static void arm_neon_quad_write (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, const gdb_byte *buf);
238 static int thumb_insn_size (unsigned short inst1);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
245 CORE_ADDR prev_sp;
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
251 int framesize;
253 /* The register used to hold the frame pointer for this frame. */
254 int framereg;
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg *saved_regs;
260 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
261 CORE_ADDR prologue_start,
262 CORE_ADDR prologue_end,
263 struct arm_prologue_cache *cache);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
278 int arm_apcs_32 = 1;
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch *gdbarch)
285 if (gdbarch_tdep (gdbarch)->is_m)
286 return XPSR_T;
287 else
288 return CPSR_T;
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info *frame)
296 CORE_ADDR cpsr;
297 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
305 return (cpsr & t_bit) != 0;
308 /* Callback for VEC_lower_bound. */
310 static inline int
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
312 const struct arm_mapping_symbol *rhs)
314 return lhs->value < rhs->value;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
321 static char
322 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
324 struct obj_section *sec;
326 /* If there are mapping symbols, consult them. */
327 sec = find_pc_section (memaddr);
328 if (sec != NULL)
330 struct arm_per_objfile *data;
331 VEC(arm_mapping_symbol_s) *map;
332 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
333 0 };
334 unsigned int idx;
336 data = objfile_data (sec->objfile, arm_objfile_data_key);
337 if (data != NULL)
339 map = data->section_maps[sec->the_bfd_section->index];
340 if (!VEC_empty (arm_mapping_symbol_s, map))
342 struct arm_mapping_symbol *map_sym;
344 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
345 arm_compare_mapping_symbols);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx < VEC_length (arm_mapping_symbol_s, map))
353 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
354 if (map_sym->value == map_key.value)
356 if (start)
357 *start = map_sym->value + obj_section_addr (sec);
358 return map_sym->type;
362 if (idx > 0)
364 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
365 if (start)
366 *start = map_sym->value + obj_section_addr (sec);
367 return map_sym->type;
373 return 0;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
383 struct bound_minimal_symbol sym;
384 char type;
385 struct displaced_step_closure* dsc
386 = get_displaced_step_closure_by_addr(memaddr);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
390 if (dsc)
392 if (debug_displaced)
393 fprintf_unfiltered (gdb_stdlog,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc->insn_addr,
396 (unsigned long) memaddr);
397 memaddr = dsc->insn_addr;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr))
402 return 1;
404 /* Respect internal mode override if active. */
405 if (arm_override_mode != -1)
406 return arm_override_mode;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string, "arm") == 0)
410 return 0;
411 if (strcmp (arm_force_mode_string, "thumb") == 0)
412 return 1;
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch)->is_m)
416 return 1;
418 /* If there are mapping symbols, consult them. */
419 type = arm_find_mapping_symbol (memaddr, NULL);
420 if (type)
421 return type == 't';
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym = lookup_minimal_symbol_by_pc (memaddr);
425 if (sym.minsym)
426 return (MSYMBOL_IS_SPECIAL (sym.minsym));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string, "arm") == 0)
430 return 0;
431 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
432 return 1;
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
443 return 0;
446 /* Remove useless bits from addresses in a running program. */
447 static CORE_ADDR
448 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch)->is_m
453 && (val & 0xfffffff0) == 0xfffffff0)
454 return val;
456 if (arm_apcs_32)
457 return UNMAKE_THUMB_ADDR (val);
458 else
459 return (val & 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
465 is being called. */
466 static int
467 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
469 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
470 struct bound_minimal_symbol msym;
472 msym = lookup_minimal_symbol_by_pc (pc);
473 if (msym.minsym != NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
475 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
477 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
479 /* The GNU linker's Thumb call stub to foo is named
480 __foo_from_thumb. */
481 if (strstr (name, "_from_thumb") != NULL)
482 name += 2;
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
486 functions. */
487 if (strncmp (name, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
488 return 1;
489 if (strncmp (name, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
490 return 1;
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
494 return 1;
495 if (strncmp (name, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
496 return 1;
498 else
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
505 if (!is_thumb
506 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
509 == 0xe240f01f) /* sub pc, r0, #31 */
510 return 1;
513 return 0;
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
527 instruction. */
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
542 static unsigned int
543 thumb_expand_immediate (unsigned int imm)
545 unsigned int count = imm >> 7;
547 if (count < 8)
548 switch (count / 2)
550 case 0:
551 return imm & 0xff;
552 case 1:
553 return (imm & 0xff) | ((imm & 0xff) << 16);
554 case 2:
555 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
556 case 3:
557 return (imm & 0xff) | ((imm & 0xff) << 8)
558 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
561 return (0x80 | (imm & 0x7f)) << (32 - count);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
567 static int
568 thumb_instruction_changes_pc (unsigned short inst)
570 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
571 return 1;
573 if ((inst & 0xf000) == 0xd000) /* conditional branch */
574 return 1;
576 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
577 return 1;
579 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
580 return 1;
582 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
583 return 1;
585 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
586 return 1;
588 return 0;
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
594 static int
595 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
597 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
603 /* B, BL, BLX. */
604 return 1;
606 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
609 return 1;
611 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
613 /* Conditional branch. */
614 return 1;
617 return 0;
620 if ((inst1 & 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1, 7) && !bit (inst1, 8))
626 /* LDMIA or POP */
627 if (bit (inst2, 15))
628 return 1;
630 else if (!bit (inst1, 7) && bit (inst1, 8))
632 /* LDMDB */
633 if (bit (inst2, 15))
634 return 1;
636 else if (bit (inst1, 7) && bit (inst1, 8))
638 /* RFEIA */
639 return 1;
641 else if (!bit (inst1, 7) && !bit (inst1, 8))
643 /* RFEDB */
644 return 1;
647 return 0;
650 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
653 return 1;
656 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
658 /* LDR PC. */
659 if (bits (inst1, 0, 3) == 15)
660 return 1;
661 if (bit (inst1, 7))
662 return 1;
663 if (bit (inst2, 11))
664 return 1;
665 if ((inst2 & 0x0fc0) == 0x0000)
666 return 1;
668 return 0;
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
673 /* TBB. */
674 return 1;
677 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
679 /* TBH. */
680 return 1;
683 return 0;
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
689 static int
690 thumb_instruction_restores_sp (unsigned short insn)
692 return (insn == 0x46bd /* mov sp, r7 */
693 || (insn & 0xff80) == 0xb000 /* add sp, imm */
694 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
703 static CORE_ADDR
704 thumb_analyze_prologue (struct gdbarch *gdbarch,
705 CORE_ADDR start, CORE_ADDR limit,
706 struct arm_prologue_cache *cache)
708 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
709 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
710 int i;
711 pv_t regs[16];
712 struct pv_area *stack;
713 struct cleanup *back_to;
714 CORE_ADDR offset;
715 CORE_ADDR unrecognized_pc = 0;
717 for (i = 0; i < 16; i++)
718 regs[i] = pv_register (i, 0);
719 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
720 back_to = make_cleanup_free_pv_area (stack);
722 while (start < limit)
724 unsigned short insn;
726 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
728 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
730 int regno;
731 int mask;
733 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
734 break;
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask = (insn & 0xff) | ((insn & 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
742 if (mask & (1 << regno))
744 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
745 -4);
746 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
749 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
751 offset = (insn & 0x7f) << 2; /* get scaled offset */
752 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
753 -offset);
755 else if (thumb_instruction_restores_sp (insn))
757 /* Don't scan past the epilogue. */
758 break;
760 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
762 (insn & 0xff) << 2);
763 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
765 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
766 bits (insn, 6, 8));
767 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
769 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
770 bits (insn, 0, 7));
771 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
773 && pv_is_constant (regs[bits (insn, 3, 5)]))
774 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
775 regs[bits (insn, 6, 8)]);
776 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs[bits (insn, 3, 6)]))
779 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
780 int rm = bits (insn, 3, 6);
781 regs[rd] = pv_add (regs[rd], regs[rm]);
783 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
786 int src_reg = (insn & 0x78) >> 3;
787 regs[dst_reg] = regs[src_reg];
789 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno = (insn >> 8) & 0x7;
795 pv_t addr;
797 offset = (insn & 0xff) << 2;
798 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
800 if (pv_area_store_would_trash (stack, addr))
801 break;
803 pv_area_store (stack, addr, 4, regs[regno]);
805 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd = bits (insn, 0, 2);
808 int rn = bits (insn, 3, 5);
809 pv_t addr;
811 offset = bits (insn, 6, 10) << 2;
812 addr = pv_add_constant (regs[rn], offset);
814 if (pv_area_store_would_trash (stack, addr))
815 break;
817 pv_area_store (stack, addr, 4, regs[rd]);
819 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
841 on Thumb. */
842 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
843 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant;
847 CORE_ADDR loc;
849 loc = start + 4 + bits (insn, 0, 7) * 4;
850 constant = read_memory_unsigned_integer (loc, 4, byte_order);
851 regs[bits (insn, 8, 10)] = pv_constant (constant);
853 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2;
857 inst2 = read_memory_unsigned_integer (start + 2, 2,
858 byte_order_for_code);
860 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
865 CORE_ADDR nextpc;
866 int j1, j2, imm1, imm2;
868 imm1 = sbits (insn, 0, 10);
869 imm2 = bits (inst2, 0, 10);
870 j1 = bit (inst2, 13);
871 j2 = bit (inst2, 11);
873 offset = ((imm1 << 12) + (imm2 << 1));
874 offset ^= ((!j2) << 22) | ((!j1) << 23);
876 nextpc = start + 4 + offset;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2, 12) == 0)
879 nextpc = nextpc & 0xfffffffc;
881 if (!skip_prologue_function (gdbarch, nextpc,
882 bit (inst2, 12) != 0))
883 break;
886 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
887 { registers } */
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
890 pv_t addr = regs[bits (insn, 0, 3)];
891 int regno;
893 if (pv_area_store_would_trash (stack, addr))
894 break;
896 /* Calculate offsets of saved registers. */
897 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
898 if (inst2 & (1 << regno))
900 addr = pv_add_constant (addr, -4);
901 pv_area_store (stack, addr, 4, regs[regno]);
904 if (insn & 0x0020)
905 regs[bits (insn, 0, 3)] = addr;
908 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
909 [Rn, #+/-imm]{!} */
910 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
912 int regno1 = bits (inst2, 12, 15);
913 int regno2 = bits (inst2, 8, 11);
914 pv_t addr = regs[bits (insn, 0, 3)];
916 offset = inst2 & 0xff;
917 if (insn & 0x0080)
918 addr = pv_add_constant (addr, offset);
919 else
920 addr = pv_add_constant (addr, -offset);
922 if (pv_area_store_would_trash (stack, addr))
923 break;
925 pv_area_store (stack, addr, 4, regs[regno1]);
926 pv_area_store (stack, pv_add_constant (addr, 4),
927 4, regs[regno2]);
929 if (insn & 0x0020)
930 regs[bits (insn, 0, 3)] = addr;
933 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2 & 0x0c00) == 0x0c00
935 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
937 int regno = bits (inst2, 12, 15);
938 pv_t addr = regs[bits (insn, 0, 3)];
940 offset = inst2 & 0xff;
941 if (inst2 & 0x0200)
942 addr = pv_add_constant (addr, offset);
943 else
944 addr = pv_add_constant (addr, -offset);
946 if (pv_area_store_would_trash (stack, addr))
947 break;
949 pv_area_store (stack, addr, 4, regs[regno]);
951 if (inst2 & 0x0100)
952 regs[bits (insn, 0, 3)] = addr;
955 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
958 int regno = bits (inst2, 12, 15);
959 pv_t addr;
961 offset = inst2 & 0xfff;
962 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
964 if (pv_area_store_would_trash (stack, addr))
965 break;
967 pv_area_store (stack, addr, 4, regs[regno]);
970 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2 & 0x0d00) == 0x0c00
977 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
982 { registers } */
983 && (inst2 & 0x8000) == 0x0000
984 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
990 [Rn, #+/-imm] */
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2 & 0x0d00) == 0x0c00
997 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2 & 0x8000) == 0x0000)
1009 unsigned int imm = ((bits (insn, 10, 10) << 11)
1010 | (bits (inst2, 12, 14) << 8)
1011 | bits (inst2, 0, 7));
1013 regs[bits (inst2, 8, 11)]
1014 = pv_add_constant (regs[bits (insn, 0, 3)],
1015 thumb_expand_immediate (imm));
1018 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2 & 0x8000) == 0x0000)
1021 unsigned int imm = ((bits (insn, 10, 10) << 11)
1022 | (bits (inst2, 12, 14) << 8)
1023 | bits (inst2, 0, 7));
1025 regs[bits (inst2, 8, 11)]
1026 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1029 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2 & 0x8000) == 0x0000)
1032 unsigned int imm = ((bits (insn, 10, 10) << 11)
1033 | (bits (inst2, 12, 14) << 8)
1034 | bits (inst2, 0, 7));
1036 regs[bits (inst2, 8, 11)]
1037 = pv_add_constant (regs[bits (insn, 0, 3)],
1038 - (CORE_ADDR) thumb_expand_immediate (imm));
1041 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2 & 0x8000) == 0x0000)
1044 unsigned int imm = ((bits (insn, 10, 10) << 11)
1045 | (bits (inst2, 12, 14) << 8)
1046 | bits (inst2, 0, 7));
1048 regs[bits (inst2, 8, 11)]
1049 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1052 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm = ((bits (insn, 10, 10) << 11)
1055 | (bits (inst2, 12, 14) << 8)
1056 | bits (inst2, 0, 7));
1058 regs[bits (inst2, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm));
1062 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1064 unsigned int imm
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1067 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1070 else if (insn == 0xea5f /* mov.w Rd,Rm */
1071 && (inst2 & 0xf0f0) == 0)
1073 int dst_reg = (inst2 & 0x0f00) >> 8;
1074 int src_reg = inst2 & 0xf;
1075 regs[dst_reg] = regs[src_reg];
1078 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant;
1082 CORE_ADDR loc;
1084 offset = bits (inst2, 0, 11);
1085 if (insn & 0x0080)
1086 loc = start + 4 + offset;
1087 else
1088 loc = start + 4 - offset;
1090 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1091 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1094 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant;
1098 CORE_ADDR loc;
1100 offset = bits (inst2, 0, 7) << 2;
1101 if (insn & 0x0080)
1102 loc = start + 4 + offset;
1103 else
1104 loc = start + 4 - offset;
1106 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1107 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1109 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1110 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1113 else if (thumb2_instruction_changes_pc (insn, inst2))
1115 /* Don't scan past anything that might change control flow. */
1116 break;
1118 else
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc = start;
1125 start += 2;
1127 else if (thumb_instruction_changes_pc (insn))
1129 /* Don't scan past anything that might change control flow. */
1130 break;
1132 else
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc = start;
1139 start += 2;
1142 if (arm_debug)
1143 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch, start));
1146 if (unrecognized_pc == 0)
1147 unrecognized_pc = start;
1149 if (cache == NULL)
1151 do_cleanups (back_to);
1152 return unrecognized_pc;
1155 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache->framereg = ARM_FP_REGNUM;
1159 cache->framesize = -regs[ARM_FP_REGNUM].k;
1161 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache->framereg = THUMB_FP_REGNUM;
1165 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1167 else
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache->framereg = ARM_SP_REGNUM;
1171 cache->framesize = -regs[ARM_SP_REGNUM].k;
1174 for (i = 0; i < 16; i++)
1175 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1176 cache->saved_regs[i].addr = offset;
1178 do_cleanups (back_to);
1179 return unrecognized_pc;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1187 not recognized. */
1189 static CORE_ADDR
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1191 unsigned int *destreg, int *offset)
1193 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1194 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1195 unsigned int low, high, address;
1197 address = 0;
1198 if (is_thumb)
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1203 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg = bits (insn1, 8, 10);
1206 *offset = 2;
1207 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1208 address = read_memory_unsigned_integer (address, 4,
1209 byte_order_for_code);
1211 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1216 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1218 insn1
1219 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1220 insn2
1221 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1223 /* movt Rd, #const */
1224 if ((insn1 & 0xfbc0) == 0xf2c0)
1226 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1227 *destreg = bits (insn2, 8, 11);
1228 *offset = 8;
1229 address = (high << 16 | low);
1233 else
1235 unsigned int insn
1236 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1238 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address = bits (insn, 0, 11) + pc + 8;
1241 address = read_memory_unsigned_integer (address, 4,
1242 byte_order_for_code);
1244 *destreg = bits (insn, 12, 15);
1245 *offset = 4;
1247 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251 insn
1252 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1254 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1257 *destreg = bits (insn, 12, 15);
1258 *offset = 8;
1259 address = (high << 16 | low);
1264 return address;
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1280 they are,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1285 On ARMv5t, it is,
1287 ldr Rn, .Label
1288 ....
1289 .Lable:
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1297 static CORE_ADDR
1298 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1300 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1301 unsigned int basereg;
1302 struct bound_minimal_symbol stack_chk_guard;
1303 int offset;
1304 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1305 CORE_ADDR addr;
1307 /* Try to parse the instructions in Step 1. */
1308 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1309 &basereg, &offset);
1310 if (!addr)
1311 return pc;
1313 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard.minsym == NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1320 return pc;
1322 if (is_thumb)
1324 unsigned int destreg;
1325 unsigned short insn
1326 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn & 0xf800) != 0x6800)
1330 return pc;
1331 if (bits (insn, 3, 5) != basereg)
1332 return pc;
1333 destreg = bits (insn, 0, 2);
1335 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1336 byte_order_for_code);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn & 0xf800) != 0x6000)
1339 return pc;
1340 if (destreg != bits (insn, 0, 2))
1341 return pc;
1343 else
1345 unsigned int destreg;
1346 unsigned int insn
1347 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn & 0x0e500000) != 0x04100000)
1351 return pc;
1352 if (bits (insn, 16, 19) != basereg)
1353 return pc;
1354 destreg = bits (insn, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn = read_memory_unsigned_integer (pc + offset + 4,
1357 4, byte_order_for_code);
1358 if ((insn & 0x0e500000) != 0x04000000)
1359 return pc;
1360 if (bits (insn, 12, 15) != destreg)
1361 return pc;
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1364 on arm. */
1365 if (is_thumb)
1366 return pc + offset + 4;
1367 else
1368 return pc + offset + 8;
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1374 The APCS (ARM Procedure Call Standard) defines the following
1375 prologue:
1377 mov ip, sp
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1386 static CORE_ADDR
1387 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1389 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1390 unsigned long inst;
1391 CORE_ADDR skip_pc;
1392 CORE_ADDR func_addr, limit_pc;
1394 /* See if we can determine the end of the prologue via the symbol table.
1395 If so, then return either PC, or the PC after the prologue, whichever
1396 is greater. */
1397 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1399 CORE_ADDR post_prologue_pc
1400 = skip_prologue_using_sal (gdbarch, func_addr);
1401 struct symtab *s = find_pc_symtab (func_addr);
1403 if (post_prologue_pc)
1404 post_prologue_pc
1405 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1408 /* GCC always emits a line note before the prologue and another
1409 one after, even if the two are at the same address or on the
1410 same line. Take advantage of this so that we do not need to
1411 know every instruction that might appear in the prologue. We
1412 will have producer information for most binaries; if it is
1413 missing (e.g. for -gstabs), assuming the GNU tools. */
1414 if (post_prologue_pc
1415 && (s == NULL
1416 || s->producer == NULL
1417 || strncmp (s->producer, "GNU ", sizeof ("GNU ") - 1) == 0
1418 || strncmp (s->producer, "clang ", sizeof ("clang ") - 1) == 0))
1419 return post_prologue_pc;
1421 if (post_prologue_pc != 0)
1423 CORE_ADDR analyzed_limit;
1425 /* For non-GCC compilers, make sure the entire line is an
1426 acceptable prologue; GDB will round this function's
1427 return value up to the end of the following line so we
1428 can not skip just part of a line (and we do not want to).
1430 RealView does not treat the prologue specially, but does
1431 associate prologue code with the opening brace; so this
1432 lets us skip the first line if we think it is the opening
1433 brace. */
1434 if (arm_pc_is_thumb (gdbarch, func_addr))
1435 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1436 post_prologue_pc, NULL);
1437 else
1438 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1439 post_prologue_pc, NULL);
1441 if (analyzed_limit != post_prologue_pc)
1442 return func_addr;
1444 return post_prologue_pc;
1448 /* Can't determine prologue from the symbol table, need to examine
1449 instructions. */
1451 /* Find an upper limit on the function prologue using the debug
1452 information. If the debug information could not be used to provide
1453 that bound, then use an arbitrary large number as the upper bound. */
1454 /* Like arm_scan_prologue, stop no later than pc + 64. */
1455 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1456 if (limit_pc == 0)
1457 limit_pc = pc + 64; /* Magic. */
1460 /* Check if this is Thumb code. */
1461 if (arm_pc_is_thumb (gdbarch, pc))
1462 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1464 for (skip_pc = pc; skip_pc < limit_pc; skip_pc += 4)
1466 inst = read_memory_unsigned_integer (skip_pc, 4, byte_order_for_code);
1468 /* "mov ip, sp" is no longer a required part of the prologue. */
1469 if (inst == 0xe1a0c00d) /* mov ip, sp */
1470 continue;
1472 if ((inst & 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1473 continue;
1475 if ((inst & 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1476 continue;
1478 /* Some prologues begin with "str lr, [sp, #-4]!". */
1479 if (inst == 0xe52de004) /* str lr, [sp, #-4]! */
1480 continue;
1482 if ((inst & 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1483 continue;
1485 if ((inst & 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1486 continue;
1488 /* Any insns after this point may float into the code, if it makes
1489 for better instruction scheduling, so we skip them only if we
1490 find them, but still consider the function to be frame-ful. */
1492 /* We may have either one sfmfd instruction here, or several stfe
1493 insns, depending on the version of floating point code we
1494 support. */
1495 if ((inst & 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1496 continue;
1498 if ((inst & 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1499 continue;
1501 if ((inst & 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1502 continue;
1504 if ((inst & 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1505 continue;
1507 if ((inst & 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1508 || (inst & 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1509 || (inst & 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1510 continue;
1512 if ((inst & 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1513 || (inst & 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1514 || (inst & 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1515 continue;
1517 /* Un-recognized instruction; stop scanning. */
1518 break;
1521 return skip_pc; /* End of prologue. */
1524 /* *INDENT-OFF* */
1525 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1526 This function decodes a Thumb function prologue to determine:
1527 1) the size of the stack frame
1528 2) which registers are saved on it
1529 3) the offsets of saved regs
1530 4) the offset from the stack pointer to the frame pointer
1532 A typical Thumb function prologue would create this stack frame
1533 (offsets relative to FP)
1534 old SP -> 24 stack parameters
1535 20 LR
1536 16 R7
1537 R7 -> 0 local variables (16 bytes)
1538 SP -> -12 additional stack space (12 bytes)
1539 The frame size would thus be 36 bytes, and the frame offset would be
1540 12 bytes. The frame register is R7.
1542 The comments for thumb_skip_prolog() describe the algorithm we use
1543 to detect the end of the prolog. */
1544 /* *INDENT-ON* */
1546 static void
1547 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1548 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1550 CORE_ADDR prologue_start;
1551 CORE_ADDR prologue_end;
1553 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1554 &prologue_end))
1556 /* See comment in arm_scan_prologue for an explanation of
1557 this heuristics. */
1558 if (prologue_end > prologue_start + 64)
1560 prologue_end = prologue_start + 64;
1563 else
1564 /* We're in the boondocks: we have no idea where the start of the
1565 function is. */
1566 return;
1568 prologue_end = min (prologue_end, prev_pc);
1570 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1573 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1575 static int
1576 arm_instruction_changes_pc (uint32_t this_instr)
1578 if (bits (this_instr, 28, 31) == INST_NV)
1579 /* Unconditional instructions. */
1580 switch (bits (this_instr, 24, 27))
1582 case 0xa:
1583 case 0xb:
1584 /* Branch with Link and change to Thumb. */
1585 return 1;
1586 case 0xc:
1587 case 0xd:
1588 case 0xe:
1589 /* Coprocessor register transfer. */
1590 if (bits (this_instr, 12, 15) == 15)
1591 error (_("Invalid update to pc in instruction"));
1592 return 0;
1593 default:
1594 return 0;
1596 else
1597 switch (bits (this_instr, 25, 27))
1599 case 0x0:
1600 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1602 /* Multiplies and extra load/stores. */
1603 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1604 /* Neither multiplies nor extension load/stores are allowed
1605 to modify PC. */
1606 return 0;
1608 /* Otherwise, miscellaneous instructions. */
1610 /* BX <reg>, BXJ <reg>, BLX <reg> */
1611 if (bits (this_instr, 4, 27) == 0x12fff1
1612 || bits (this_instr, 4, 27) == 0x12fff2
1613 || bits (this_instr, 4, 27) == 0x12fff3)
1614 return 1;
1616 /* Other miscellaneous instructions are unpredictable if they
1617 modify PC. */
1618 return 0;
1620 /* Data processing instruction. Fall through. */
1622 case 0x1:
1623 if (bits (this_instr, 12, 15) == 15)
1624 return 1;
1625 else
1626 return 0;
1628 case 0x2:
1629 case 0x3:
1630 /* Media instructions and architecturally undefined instructions. */
1631 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1632 return 0;
1634 /* Stores. */
1635 if (bit (this_instr, 20) == 0)
1636 return 0;
1638 /* Loads. */
1639 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1640 return 1;
1641 else
1642 return 0;
1644 case 0x4:
1645 /* Load/store multiple. */
1646 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1647 return 1;
1648 else
1649 return 0;
1651 case 0x5:
1652 /* Branch and branch with link. */
1653 return 1;
1655 case 0x6:
1656 case 0x7:
1657 /* Coprocessor transfers or SWIs can not affect PC. */
1658 return 0;
1660 default:
1661 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1665 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1666 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1667 fill it in. Return the first address not recognized as a prologue
1668 instruction.
1670 We recognize all the instructions typically found in ARM prologues,
1671 plus harmless instructions which can be skipped (either for analysis
1672 purposes, or a more restrictive set that can be skipped when finding
1673 the end of the prologue). */
1675 static CORE_ADDR
1676 arm_analyze_prologue (struct gdbarch *gdbarch,
1677 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1678 struct arm_prologue_cache *cache)
1680 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1681 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1682 int regno;
1683 CORE_ADDR offset, current_pc;
1684 pv_t regs[ARM_FPS_REGNUM];
1685 struct pv_area *stack;
1686 struct cleanup *back_to;
1687 int framereg, framesize;
1688 CORE_ADDR unrecognized_pc = 0;
1690 /* Search the prologue looking for instructions that set up the
1691 frame pointer, adjust the stack pointer, and save registers.
1693 Be careful, however, and if it doesn't look like a prologue,
1694 don't try to scan it. If, for instance, a frameless function
1695 begins with stmfd sp!, then we will tell ourselves there is
1696 a frame, which will confuse stack traceback, as well as "finish"
1697 and other operations that rely on a knowledge of the stack
1698 traceback. */
1700 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1701 regs[regno] = pv_register (regno, 0);
1702 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1703 back_to = make_cleanup_free_pv_area (stack);
1705 for (current_pc = prologue_start;
1706 current_pc < prologue_end;
1707 current_pc += 4)
1709 unsigned int insn
1710 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1712 if (insn == 0xe1a0c00d) /* mov ip, sp */
1714 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1715 continue;
1717 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1718 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1720 unsigned imm = insn & 0xff; /* immediate value */
1721 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1722 int rd = bits (insn, 12, 15);
1723 imm = (imm >> rot) | (imm << (32 - rot));
1724 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1725 continue;
1727 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1728 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1730 unsigned imm = insn & 0xff; /* immediate value */
1731 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1732 int rd = bits (insn, 12, 15);
1733 imm = (imm >> rot) | (imm << (32 - rot));
1734 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1735 continue;
1737 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1738 [sp, #-4]! */
1740 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1741 break;
1742 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1743 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1744 regs[bits (insn, 12, 15)]);
1745 continue;
1747 else if ((insn & 0xffff0000) == 0xe92d0000)
1748 /* stmfd sp!, {..., fp, ip, lr, pc}
1750 stmfd sp!, {a1, a2, a3, a4} */
1752 int mask = insn & 0xffff;
1754 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1755 break;
1757 /* Calculate offsets of saved registers. */
1758 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1759 if (mask & (1 << regno))
1761 regs[ARM_SP_REGNUM]
1762 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1763 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1766 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1767 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1768 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1770 /* No need to add this to saved_regs -- it's just an arg reg. */
1771 continue;
1773 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1774 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1775 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1777 /* No need to add this to saved_regs -- it's just an arg reg. */
1778 continue;
1780 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1781 { registers } */
1782 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1784 /* No need to add this to saved_regs -- it's just arg regs. */
1785 continue;
1787 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1789 unsigned imm = insn & 0xff; /* immediate value */
1790 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1791 imm = (imm >> rot) | (imm << (32 - rot));
1792 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1794 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1796 unsigned imm = insn & 0xff; /* immediate value */
1797 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1798 imm = (imm >> rot) | (imm << (32 - rot));
1799 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1801 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1802 [sp, -#c]! */
1803 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1805 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1806 break;
1808 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1809 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1810 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1812 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1813 [sp!] */
1814 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1816 int n_saved_fp_regs;
1817 unsigned int fp_start_reg, fp_bound_reg;
1819 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1820 break;
1822 if ((insn & 0x800) == 0x800) /* N0 is set */
1824 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1825 n_saved_fp_regs = 3;
1826 else
1827 n_saved_fp_regs = 1;
1829 else
1831 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1832 n_saved_fp_regs = 2;
1833 else
1834 n_saved_fp_regs = 4;
1837 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1838 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1839 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1841 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1842 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1843 regs[fp_start_reg++]);
1846 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1848 /* Allow some special function calls when skipping the
1849 prologue; GCC generates these before storing arguments to
1850 the stack. */
1851 CORE_ADDR dest = BranchDest (current_pc, insn);
1853 if (skip_prologue_function (gdbarch, dest, 0))
1854 continue;
1855 else
1856 break;
1858 else if ((insn & 0xf0000000) != 0xe0000000)
1859 break; /* Condition not true, exit early. */
1860 else if (arm_instruction_changes_pc (insn))
1861 /* Don't scan past anything that might change control flow. */
1862 break;
1863 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1864 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1865 /* Ignore block loads from the stack, potentially copying
1866 parameters from memory. */
1867 continue;
1868 else if ((insn & 0xfc500000) == 0xe4100000
1869 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1870 /* Similarly ignore single loads from the stack. */
1871 continue;
1872 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1873 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1874 register instead of the stack. */
1875 continue;
1876 else
1878 /* The optimizer might shove anything into the prologue,
1879 so we just skip what we don't recognize. */
1880 unrecognized_pc = current_pc;
1881 continue;
1885 if (unrecognized_pc == 0)
1886 unrecognized_pc = current_pc;
1888 /* The frame size is just the distance from the frame register
1889 to the original stack pointer. */
1890 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1892 /* Frame pointer is fp. */
1893 framereg = ARM_FP_REGNUM;
1894 framesize = -regs[ARM_FP_REGNUM].k;
1896 else
1898 /* Try the stack pointer... this is a bit desperate. */
1899 framereg = ARM_SP_REGNUM;
1900 framesize = -regs[ARM_SP_REGNUM].k;
1903 if (cache)
1905 cache->framereg = framereg;
1906 cache->framesize = framesize;
1908 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1909 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1910 cache->saved_regs[regno].addr = offset;
1913 if (arm_debug)
1914 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1915 paddress (gdbarch, unrecognized_pc));
1917 do_cleanups (back_to);
1918 return unrecognized_pc;
1921 static void
1922 arm_scan_prologue (struct frame_info *this_frame,
1923 struct arm_prologue_cache *cache)
1925 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1926 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1927 int regno;
1928 CORE_ADDR prologue_start, prologue_end, current_pc;
1929 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1930 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1931 pv_t regs[ARM_FPS_REGNUM];
1932 struct pv_area *stack;
1933 struct cleanup *back_to;
1934 CORE_ADDR offset;
1936 /* Assume there is no frame until proven otherwise. */
1937 cache->framereg = ARM_SP_REGNUM;
1938 cache->framesize = 0;
1940 /* Check for Thumb prologue. */
1941 if (arm_frame_is_thumb (this_frame))
1943 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1944 return;
1947 /* Find the function prologue. If we can't find the function in
1948 the symbol table, peek in the stack frame to find the PC. */
1949 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1950 &prologue_end))
1952 /* One way to find the end of the prologue (which works well
1953 for unoptimized code) is to do the following:
1955 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1957 if (sal.line == 0)
1958 prologue_end = prev_pc;
1959 else if (sal.end < prologue_end)
1960 prologue_end = sal.end;
1962 This mechanism is very accurate so long as the optimizer
1963 doesn't move any instructions from the function body into the
1964 prologue. If this happens, sal.end will be the last
1965 instruction in the first hunk of prologue code just before
1966 the first instruction that the scheduler has moved from
1967 the body to the prologue.
1969 In order to make sure that we scan all of the prologue
1970 instructions, we use a slightly less accurate mechanism which
1971 may scan more than necessary. To help compensate for this
1972 lack of accuracy, the prologue scanning loop below contains
1973 several clauses which'll cause the loop to terminate early if
1974 an implausible prologue instruction is encountered.
1976 The expression
1978 prologue_start + 64
1980 is a suitable endpoint since it accounts for the largest
1981 possible prologue plus up to five instructions inserted by
1982 the scheduler. */
1984 if (prologue_end > prologue_start + 64)
1986 prologue_end = prologue_start + 64; /* See above. */
1989 else
1991 /* We have no symbol information. Our only option is to assume this
1992 function has a standard stack frame and the normal frame register.
1993 Then, we can find the value of our frame pointer on entrance to
1994 the callee (or at the present moment if this is the innermost frame).
1995 The value stored there should be the address of the stmfd + 8. */
1996 CORE_ADDR frame_loc;
1997 LONGEST return_value;
1999 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
2000 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
2001 return;
2002 else
2004 prologue_start = gdbarch_addr_bits_remove
2005 (gdbarch, return_value) - 8;
2006 prologue_end = prologue_start + 64; /* See above. */
2010 if (prev_pc < prologue_end)
2011 prologue_end = prev_pc;
2013 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
2016 static struct arm_prologue_cache *
2017 arm_make_prologue_cache (struct frame_info *this_frame)
2019 int reg;
2020 struct arm_prologue_cache *cache;
2021 CORE_ADDR unwound_fp;
2023 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2024 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2026 arm_scan_prologue (this_frame, cache);
2028 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2029 if (unwound_fp == 0)
2030 return cache;
2032 cache->prev_sp = unwound_fp + cache->framesize;
2034 /* Calculate actual addresses of saved registers using offsets
2035 determined by arm_scan_prologue. */
2036 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2037 if (trad_frame_addr_p (cache->saved_regs, reg))
2038 cache->saved_regs[reg].addr += cache->prev_sp;
2040 return cache;
2043 /* Our frame ID for a normal frame is the current function's starting PC
2044 and the caller's SP when we were called. */
2046 static void
2047 arm_prologue_this_id (struct frame_info *this_frame,
2048 void **this_cache,
2049 struct frame_id *this_id)
2051 struct arm_prologue_cache *cache;
2052 struct frame_id id;
2053 CORE_ADDR pc, func;
2055 if (*this_cache == NULL)
2056 *this_cache = arm_make_prologue_cache (this_frame);
2057 cache = *this_cache;
2059 /* This is meant to halt the backtrace at "_start". */
2060 pc = get_frame_pc (this_frame);
2061 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2062 return;
2064 /* If we've hit a wall, stop. */
2065 if (cache->prev_sp == 0)
2066 return;
2068 /* Use function start address as part of the frame ID. If we cannot
2069 identify the start address (due to missing symbol information),
2070 fall back to just using the current PC. */
2071 func = get_frame_func (this_frame);
2072 if (!func)
2073 func = pc;
2075 id = frame_id_build (cache->prev_sp, func);
2076 *this_id = id;
2079 static struct value *
2080 arm_prologue_prev_register (struct frame_info *this_frame,
2081 void **this_cache,
2082 int prev_regnum)
2084 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2085 struct arm_prologue_cache *cache;
2087 if (*this_cache == NULL)
2088 *this_cache = arm_make_prologue_cache (this_frame);
2089 cache = *this_cache;
2091 /* If we are asked to unwind the PC, then we need to return the LR
2092 instead. The prologue may save PC, but it will point into this
2093 frame's prologue, not the next frame's resume location. Also
2094 strip the saved T bit. A valid LR may have the low bit set, but
2095 a valid PC never does. */
2096 if (prev_regnum == ARM_PC_REGNUM)
2098 CORE_ADDR lr;
2100 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2101 return frame_unwind_got_constant (this_frame, prev_regnum,
2102 arm_addr_bits_remove (gdbarch, lr));
2105 /* SP is generally not saved to the stack, but this frame is
2106 identified by the next frame's stack pointer at the time of the call.
2107 The value was already reconstructed into PREV_SP. */
2108 if (prev_regnum == ARM_SP_REGNUM)
2109 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2111 /* The CPSR may have been changed by the call instruction and by the
2112 called function. The only bit we can reconstruct is the T bit,
2113 by checking the low bit of LR as of the call. This is a reliable
2114 indicator of Thumb-ness except for some ARM v4T pre-interworking
2115 Thumb code, which could get away with a clear low bit as long as
2116 the called function did not use bx. Guess that all other
2117 bits are unchanged; the condition flags are presumably lost,
2118 but the processor status is likely valid. */
2119 if (prev_regnum == ARM_PS_REGNUM)
2121 CORE_ADDR lr, cpsr;
2122 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2124 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2125 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2126 if (IS_THUMB_ADDR (lr))
2127 cpsr |= t_bit;
2128 else
2129 cpsr &= ~t_bit;
2130 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2133 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2134 prev_regnum);
2137 struct frame_unwind arm_prologue_unwind = {
2138 NORMAL_FRAME,
2139 default_frame_unwind_stop_reason,
2140 arm_prologue_this_id,
2141 arm_prologue_prev_register,
2142 NULL,
2143 default_frame_sniffer
2146 /* Maintain a list of ARM exception table entries per objfile, similar to the
2147 list of mapping symbols. We only cache entries for standard ARM-defined
2148 personality routines; the cache will contain only the frame unwinding
2149 instructions associated with the entry (not the descriptors). */
2151 static const struct objfile_data *arm_exidx_data_key;
2153 struct arm_exidx_entry
2155 bfd_vma addr;
2156 gdb_byte *entry;
2158 typedef struct arm_exidx_entry arm_exidx_entry_s;
2159 DEF_VEC_O(arm_exidx_entry_s);
2161 struct arm_exidx_data
2163 VEC(arm_exidx_entry_s) **section_maps;
2166 static void
2167 arm_exidx_data_free (struct objfile *objfile, void *arg)
2169 struct arm_exidx_data *data = arg;
2170 unsigned int i;
2172 for (i = 0; i < objfile->obfd->section_count; i++)
2173 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2176 static inline int
2177 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2178 const struct arm_exidx_entry *rhs)
2180 return lhs->addr < rhs->addr;
2183 static struct obj_section *
2184 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2186 struct obj_section *osect;
2188 ALL_OBJFILE_OSECTIONS (objfile, osect)
2189 if (bfd_get_section_flags (objfile->obfd,
2190 osect->the_bfd_section) & SEC_ALLOC)
2192 bfd_vma start, size;
2193 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2194 size = bfd_get_section_size (osect->the_bfd_section);
2196 if (start <= vma && vma < start + size)
2197 return osect;
2200 return NULL;
2203 /* Parse contents of exception table and exception index sections
2204 of OBJFILE, and fill in the exception table entry cache.
2206 For each entry that refers to a standard ARM-defined personality
2207 routine, extract the frame unwinding instructions (from either
2208 the index or the table section). The unwinding instructions
2209 are normalized by:
2210 - extracting them from the rest of the table data
2211 - converting to host endianness
2212 - appending the implicit 0xb0 ("Finish") code
2214 The extracted and normalized instructions are stored for later
2215 retrieval by the arm_find_exidx_entry routine. */
2217 static void
2218 arm_exidx_new_objfile (struct objfile *objfile)
2220 struct cleanup *cleanups;
2221 struct arm_exidx_data *data;
2222 asection *exidx, *extab;
2223 bfd_vma exidx_vma = 0, extab_vma = 0;
2224 bfd_size_type exidx_size = 0, extab_size = 0;
2225 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2226 LONGEST i;
2228 /* If we've already touched this file, do nothing. */
2229 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2230 return;
2231 cleanups = make_cleanup (null_cleanup, NULL);
2233 /* Read contents of exception table and index. */
2234 exidx = bfd_get_section_by_name (objfile->obfd, ".ARM.exidx");
2235 if (exidx)
2237 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2238 exidx_size = bfd_get_section_size (exidx);
2239 exidx_data = xmalloc (exidx_size);
2240 make_cleanup (xfree, exidx_data);
2242 if (!bfd_get_section_contents (objfile->obfd, exidx,
2243 exidx_data, 0, exidx_size))
2245 do_cleanups (cleanups);
2246 return;
2250 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2251 if (extab)
2253 extab_vma = bfd_section_vma (objfile->obfd, extab);
2254 extab_size = bfd_get_section_size (extab);
2255 extab_data = xmalloc (extab_size);
2256 make_cleanup (xfree, extab_data);
2258 if (!bfd_get_section_contents (objfile->obfd, extab,
2259 extab_data, 0, extab_size))
2261 do_cleanups (cleanups);
2262 return;
2266 /* Allocate exception table data structure. */
2267 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2268 set_objfile_data (objfile, arm_exidx_data_key, data);
2269 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2270 objfile->obfd->section_count,
2271 VEC(arm_exidx_entry_s) *);
2273 /* Fill in exception table. */
2274 for (i = 0; i < exidx_size / 8; i++)
2276 struct arm_exidx_entry new_exidx_entry;
2277 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2278 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2279 bfd_vma addr = 0, word = 0;
2280 int n_bytes = 0, n_words = 0;
2281 struct obj_section *sec;
2282 gdb_byte *entry = NULL;
2284 /* Extract address of start of function. */
2285 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2286 idx += exidx_vma + i * 8;
2288 /* Find section containing function and compute section offset. */
2289 sec = arm_obj_section_from_vma (objfile, idx);
2290 if (sec == NULL)
2291 continue;
2292 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2294 /* Determine address of exception table entry. */
2295 if (val == 1)
2297 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2299 else if ((val & 0xff000000) == 0x80000000)
2301 /* Exception table entry embedded in .ARM.exidx
2302 -- must be short form. */
2303 word = val;
2304 n_bytes = 3;
2306 else if (!(val & 0x80000000))
2308 /* Exception table entry in .ARM.extab. */
2309 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2310 addr += exidx_vma + i * 8 + 4;
2312 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2314 word = bfd_h_get_32 (objfile->obfd,
2315 extab_data + addr - extab_vma);
2316 addr += 4;
2318 if ((word & 0xff000000) == 0x80000000)
2320 /* Short form. */
2321 n_bytes = 3;
2323 else if ((word & 0xff000000) == 0x81000000
2324 || (word & 0xff000000) == 0x82000000)
2326 /* Long form. */
2327 n_bytes = 2;
2328 n_words = ((word >> 16) & 0xff);
2330 else if (!(word & 0x80000000))
2332 bfd_vma pers;
2333 struct obj_section *pers_sec;
2334 int gnu_personality = 0;
2336 /* Custom personality routine. */
2337 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2338 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2340 /* Check whether we've got one of the variants of the
2341 GNU personality routines. */
2342 pers_sec = arm_obj_section_from_vma (objfile, pers);
2343 if (pers_sec)
2345 static const char *personality[] =
2347 "__gcc_personality_v0",
2348 "__gxx_personality_v0",
2349 "__gcj_personality_v0",
2350 "__gnu_objc_personality_v0",
2351 NULL
2354 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2355 int k;
2357 for (k = 0; personality[k]; k++)
2358 if (lookup_minimal_symbol_by_pc_name
2359 (pc, personality[k], objfile))
2361 gnu_personality = 1;
2362 break;
2366 /* If so, the next word contains a word count in the high
2367 byte, followed by the same unwind instructions as the
2368 pre-defined forms. */
2369 if (gnu_personality
2370 && addr + 4 <= extab_vma + extab_size)
2372 word = bfd_h_get_32 (objfile->obfd,
2373 extab_data + addr - extab_vma);
2374 addr += 4;
2375 n_bytes = 3;
2376 n_words = ((word >> 24) & 0xff);
2382 /* Sanity check address. */
2383 if (n_words)
2384 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2385 n_words = n_bytes = 0;
2387 /* The unwind instructions reside in WORD (only the N_BYTES least
2388 significant bytes are valid), followed by N_WORDS words in the
2389 extab section starting at ADDR. */
2390 if (n_bytes || n_words)
2392 gdb_byte *p = entry = obstack_alloc (&objfile->objfile_obstack,
2393 n_bytes + n_words * 4 + 1);
2395 while (n_bytes--)
2396 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2398 while (n_words--)
2400 word = bfd_h_get_32 (objfile->obfd,
2401 extab_data + addr - extab_vma);
2402 addr += 4;
2404 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2405 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2406 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2407 *p++ = (gdb_byte) (word & 0xff);
2410 /* Implied "Finish" to terminate the list. */
2411 *p++ = 0xb0;
2414 /* Push entry onto vector. They are guaranteed to always
2415 appear in order of increasing addresses. */
2416 new_exidx_entry.addr = idx;
2417 new_exidx_entry.entry = entry;
2418 VEC_safe_push (arm_exidx_entry_s,
2419 data->section_maps[sec->the_bfd_section->index],
2420 &new_exidx_entry);
2423 do_cleanups (cleanups);
2426 /* Search for the exception table entry covering MEMADDR. If one is found,
2427 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2428 set *START to the start of the region covered by this entry. */
2430 static gdb_byte *
2431 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2433 struct obj_section *sec;
2435 sec = find_pc_section (memaddr);
2436 if (sec != NULL)
2438 struct arm_exidx_data *data;
2439 VEC(arm_exidx_entry_s) *map;
2440 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2441 unsigned int idx;
2443 data = objfile_data (sec->objfile, arm_exidx_data_key);
2444 if (data != NULL)
2446 map = data->section_maps[sec->the_bfd_section->index];
2447 if (!VEC_empty (arm_exidx_entry_s, map))
2449 struct arm_exidx_entry *map_sym;
2451 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2452 arm_compare_exidx_entries);
2454 /* VEC_lower_bound finds the earliest ordered insertion
2455 point. If the following symbol starts at this exact
2456 address, we use that; otherwise, the preceding
2457 exception table entry covers this address. */
2458 if (idx < VEC_length (arm_exidx_entry_s, map))
2460 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2461 if (map_sym->addr == map_key.addr)
2463 if (start)
2464 *start = map_sym->addr + obj_section_addr (sec);
2465 return map_sym->entry;
2469 if (idx > 0)
2471 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2472 if (start)
2473 *start = map_sym->addr + obj_section_addr (sec);
2474 return map_sym->entry;
2480 return NULL;
2483 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2484 instruction list from the ARM exception table entry ENTRY, allocate and
2485 return a prologue cache structure describing how to unwind this frame.
2487 Return NULL if the unwinding instruction list contains a "spare",
2488 "reserved" or "refuse to unwind" instruction as defined in section
2489 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2490 for the ARM Architecture" document. */
2492 static struct arm_prologue_cache *
2493 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2495 CORE_ADDR vsp = 0;
2496 int vsp_valid = 0;
2498 struct arm_prologue_cache *cache;
2499 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2500 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2502 for (;;)
2504 gdb_byte insn;
2506 /* Whenever we reload SP, we actually have to retrieve its
2507 actual value in the current frame. */
2508 if (!vsp_valid)
2510 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2512 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2513 vsp = get_frame_register_unsigned (this_frame, reg);
2515 else
2517 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2518 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2521 vsp_valid = 1;
2524 /* Decode next unwind instruction. */
2525 insn = *entry++;
2527 if ((insn & 0xc0) == 0)
2529 int offset = insn & 0x3f;
2530 vsp += (offset << 2) + 4;
2532 else if ((insn & 0xc0) == 0x40)
2534 int offset = insn & 0x3f;
2535 vsp -= (offset << 2) + 4;
2537 else if ((insn & 0xf0) == 0x80)
2539 int mask = ((insn & 0xf) << 8) | *entry++;
2540 int i;
2542 /* The special case of an all-zero mask identifies
2543 "Refuse to unwind". We return NULL to fall back
2544 to the prologue analyzer. */
2545 if (mask == 0)
2546 return NULL;
2548 /* Pop registers r4..r15 under mask. */
2549 for (i = 0; i < 12; i++)
2550 if (mask & (1 << i))
2552 cache->saved_regs[4 + i].addr = vsp;
2553 vsp += 4;
2556 /* Special-case popping SP -- we need to reload vsp. */
2557 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2558 vsp_valid = 0;
2560 else if ((insn & 0xf0) == 0x90)
2562 int reg = insn & 0xf;
2564 /* Reserved cases. */
2565 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2566 return NULL;
2568 /* Set SP from another register and mark VSP for reload. */
2569 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2570 vsp_valid = 0;
2572 else if ((insn & 0xf0) == 0xa0)
2574 int count = insn & 0x7;
2575 int pop_lr = (insn & 0x8) != 0;
2576 int i;
2578 /* Pop r4..r[4+count]. */
2579 for (i = 0; i <= count; i++)
2581 cache->saved_regs[4 + i].addr = vsp;
2582 vsp += 4;
2585 /* If indicated by flag, pop LR as well. */
2586 if (pop_lr)
2588 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2589 vsp += 4;
2592 else if (insn == 0xb0)
2594 /* We could only have updated PC by popping into it; if so, it
2595 will show up as address. Otherwise, copy LR into PC. */
2596 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2597 cache->saved_regs[ARM_PC_REGNUM]
2598 = cache->saved_regs[ARM_LR_REGNUM];
2600 /* We're done. */
2601 break;
2603 else if (insn == 0xb1)
2605 int mask = *entry++;
2606 int i;
2608 /* All-zero mask and mask >= 16 is "spare". */
2609 if (mask == 0 || mask >= 16)
2610 return NULL;
2612 /* Pop r0..r3 under mask. */
2613 for (i = 0; i < 4; i++)
2614 if (mask & (1 << i))
2616 cache->saved_regs[i].addr = vsp;
2617 vsp += 4;
2620 else if (insn == 0xb2)
2622 ULONGEST offset = 0;
2623 unsigned shift = 0;
2627 offset |= (*entry & 0x7f) << shift;
2628 shift += 7;
2630 while (*entry++ & 0x80);
2632 vsp += 0x204 + (offset << 2);
2634 else if (insn == 0xb3)
2636 int start = *entry >> 4;
2637 int count = (*entry++) & 0xf;
2638 int i;
2640 /* Only registers D0..D15 are valid here. */
2641 if (start + count >= 16)
2642 return NULL;
2644 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2645 for (i = 0; i <= count; i++)
2647 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2648 vsp += 8;
2651 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2652 vsp += 4;
2654 else if ((insn & 0xf8) == 0xb8)
2656 int count = insn & 0x7;
2657 int i;
2659 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2660 for (i = 0; i <= count; i++)
2662 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2663 vsp += 8;
2666 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2667 vsp += 4;
2669 else if (insn == 0xc6)
2671 int start = *entry >> 4;
2672 int count = (*entry++) & 0xf;
2673 int i;
2675 /* Only registers WR0..WR15 are valid. */
2676 if (start + count >= 16)
2677 return NULL;
2679 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2680 for (i = 0; i <= count; i++)
2682 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2683 vsp += 8;
2686 else if (insn == 0xc7)
2688 int mask = *entry++;
2689 int i;
2691 /* All-zero mask and mask >= 16 is "spare". */
2692 if (mask == 0 || mask >= 16)
2693 return NULL;
2695 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2696 for (i = 0; i < 4; i++)
2697 if (mask & (1 << i))
2699 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2700 vsp += 4;
2703 else if ((insn & 0xf8) == 0xc0)
2705 int count = insn & 0x7;
2706 int i;
2708 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2709 for (i = 0; i <= count; i++)
2711 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2712 vsp += 8;
2715 else if (insn == 0xc8)
2717 int start = *entry >> 4;
2718 int count = (*entry++) & 0xf;
2719 int i;
2721 /* Only registers D0..D31 are valid. */
2722 if (start + count >= 16)
2723 return NULL;
2725 /* Pop VFP double-precision registers
2726 D[16+start]..D[16+start+count]. */
2727 for (i = 0; i <= count; i++)
2729 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2730 vsp += 8;
2733 else if (insn == 0xc9)
2735 int start = *entry >> 4;
2736 int count = (*entry++) & 0xf;
2737 int i;
2739 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2740 for (i = 0; i <= count; i++)
2742 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2743 vsp += 8;
2746 else if ((insn & 0xf8) == 0xd0)
2748 int count = insn & 0x7;
2749 int i;
2751 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2752 for (i = 0; i <= count; i++)
2754 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2755 vsp += 8;
2758 else
2760 /* Everything else is "spare". */
2761 return NULL;
2765 /* If we restore SP from a register, assume this was the frame register.
2766 Otherwise just fall back to SP as frame register. */
2767 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2768 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2769 else
2770 cache->framereg = ARM_SP_REGNUM;
2772 /* Determine offset to previous frame. */
2773 cache->framesize
2774 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2776 /* We already got the previous SP. */
2777 cache->prev_sp = vsp;
2779 return cache;
2782 /* Unwinding via ARM exception table entries. Note that the sniffer
2783 already computes a filled-in prologue cache, which is then used
2784 with the same arm_prologue_this_id and arm_prologue_prev_register
2785 routines also used for prologue-parsing based unwinding. */
2787 static int
2788 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2789 struct frame_info *this_frame,
2790 void **this_prologue_cache)
2792 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2793 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2794 CORE_ADDR addr_in_block, exidx_region, func_start;
2795 struct arm_prologue_cache *cache;
2796 gdb_byte *entry;
2798 /* See if we have an ARM exception table entry covering this address. */
2799 addr_in_block = get_frame_address_in_block (this_frame);
2800 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2801 if (!entry)
2802 return 0;
2804 /* The ARM exception table does not describe unwind information
2805 for arbitrary PC values, but is guaranteed to be correct only
2806 at call sites. We have to decide here whether we want to use
2807 ARM exception table information for this frame, or fall back
2808 to using prologue parsing. (Note that if we have DWARF CFI,
2809 this sniffer isn't even called -- CFI is always preferred.)
2811 Before we make this decision, however, we check whether we
2812 actually have *symbol* information for the current frame.
2813 If not, prologue parsing would not work anyway, so we might
2814 as well use the exception table and hope for the best. */
2815 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2817 int exc_valid = 0;
2819 /* If the next frame is "normal", we are at a call site in this
2820 frame, so exception information is guaranteed to be valid. */
2821 if (get_next_frame (this_frame)
2822 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2823 exc_valid = 1;
2825 /* We also assume exception information is valid if we're currently
2826 blocked in a system call. The system library is supposed to
2827 ensure this, so that e.g. pthread cancellation works. */
2828 if (arm_frame_is_thumb (this_frame))
2830 LONGEST insn;
2832 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2833 byte_order_for_code, &insn)
2834 && (insn & 0xff00) == 0xdf00 /* svc */)
2835 exc_valid = 1;
2837 else
2839 LONGEST insn;
2841 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2842 byte_order_for_code, &insn)
2843 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2844 exc_valid = 1;
2847 /* Bail out if we don't know that exception information is valid. */
2848 if (!exc_valid)
2849 return 0;
2851 /* The ARM exception index does not mark the *end* of the region
2852 covered by the entry, and some functions will not have any entry.
2853 To correctly recognize the end of the covered region, the linker
2854 should have inserted dummy records with a CANTUNWIND marker.
2856 Unfortunately, current versions of GNU ld do not reliably do
2857 this, and thus we may have found an incorrect entry above.
2858 As a (temporary) sanity check, we only use the entry if it
2859 lies *within* the bounds of the function. Note that this check
2860 might reject perfectly valid entries that just happen to cover
2861 multiple functions; therefore this check ought to be removed
2862 once the linker is fixed. */
2863 if (func_start > exidx_region)
2864 return 0;
2867 /* Decode the list of unwinding instructions into a prologue cache.
2868 Note that this may fail due to e.g. a "refuse to unwind" code. */
2869 cache = arm_exidx_fill_cache (this_frame, entry);
2870 if (!cache)
2871 return 0;
2873 *this_prologue_cache = cache;
2874 return 1;
2877 struct frame_unwind arm_exidx_unwind = {
2878 NORMAL_FRAME,
2879 default_frame_unwind_stop_reason,
2880 arm_prologue_this_id,
2881 arm_prologue_prev_register,
2882 NULL,
2883 arm_exidx_unwind_sniffer
2886 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2887 trampoline, return the target PC. Otherwise return 0.
2889 void call0a (char c, short s, int i, long l) {}
2891 int main (void)
2893 (*pointer_to_call0a) (c, s, i, l);
2896 Instead of calling a stub library function _call_via_xx (xx is
2897 the register name), GCC may inline the trampoline in the object
2898 file as below (register r2 has the address of call0a).
2900 .global main
2901 .type main, %function
2903 bl .L1
2905 .size main, .-main
2907 .L1:
2908 bx r2
2910 The trampoline 'bx r2' doesn't belong to main. */
2912 static CORE_ADDR
2913 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2915 /* The heuristics of recognizing such trampoline is that FRAME is
2916 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2917 if (arm_frame_is_thumb (frame))
2919 gdb_byte buf[2];
2921 if (target_read_memory (pc, buf, 2) == 0)
2923 struct gdbarch *gdbarch = get_frame_arch (frame);
2924 enum bfd_endian byte_order_for_code
2925 = gdbarch_byte_order_for_code (gdbarch);
2926 uint16_t insn
2927 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2929 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2931 CORE_ADDR dest
2932 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2934 /* Clear the LSB so that gdb core sets step-resume
2935 breakpoint at the right address. */
2936 return UNMAKE_THUMB_ADDR (dest);
2941 return 0;
2944 static struct arm_prologue_cache *
2945 arm_make_stub_cache (struct frame_info *this_frame)
2947 struct arm_prologue_cache *cache;
2949 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2950 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2952 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2954 return cache;
2957 /* Our frame ID for a stub frame is the current SP and LR. */
2959 static void
2960 arm_stub_this_id (struct frame_info *this_frame,
2961 void **this_cache,
2962 struct frame_id *this_id)
2964 struct arm_prologue_cache *cache;
2966 if (*this_cache == NULL)
2967 *this_cache = arm_make_stub_cache (this_frame);
2968 cache = *this_cache;
2970 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2973 static int
2974 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2975 struct frame_info *this_frame,
2976 void **this_prologue_cache)
2978 CORE_ADDR addr_in_block;
2979 gdb_byte dummy[4];
2980 CORE_ADDR pc, start_addr;
2981 const char *name;
2983 addr_in_block = get_frame_address_in_block (this_frame);
2984 pc = get_frame_pc (this_frame);
2985 if (in_plt_section (addr_in_block)
2986 /* We also use the stub winder if the target memory is unreadable
2987 to avoid having the prologue unwinder trying to read it. */
2988 || target_read_memory (pc, dummy, 4) != 0)
2989 return 1;
2991 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2992 && arm_skip_bx_reg (this_frame, pc) != 0)
2993 return 1;
2995 return 0;
2998 struct frame_unwind arm_stub_unwind = {
2999 NORMAL_FRAME,
3000 default_frame_unwind_stop_reason,
3001 arm_stub_this_id,
3002 arm_prologue_prev_register,
3003 NULL,
3004 arm_stub_unwind_sniffer
3007 /* Put here the code to store, into CACHE->saved_regs, the addresses
3008 of the saved registers of frame described by THIS_FRAME. CACHE is
3009 returned. */
3011 static struct arm_prologue_cache *
3012 arm_m_exception_cache (struct frame_info *this_frame)
3014 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3015 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3016 struct arm_prologue_cache *cache;
3017 CORE_ADDR unwound_sp;
3018 LONGEST xpsr;
3020 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3021 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3023 unwound_sp = get_frame_register_unsigned (this_frame,
3024 ARM_SP_REGNUM);
3026 /* The hardware saves eight 32-bit words, comprising xPSR,
3027 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3028 "B1.5.6 Exception entry behavior" in
3029 "ARMv7-M Architecture Reference Manual". */
3030 cache->saved_regs[0].addr = unwound_sp;
3031 cache->saved_regs[1].addr = unwound_sp + 4;
3032 cache->saved_regs[2].addr = unwound_sp + 8;
3033 cache->saved_regs[3].addr = unwound_sp + 12;
3034 cache->saved_regs[12].addr = unwound_sp + 16;
3035 cache->saved_regs[14].addr = unwound_sp + 20;
3036 cache->saved_regs[15].addr = unwound_sp + 24;
3037 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3039 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3040 aligner between the top of the 32-byte stack frame and the
3041 previous context's stack pointer. */
3042 cache->prev_sp = unwound_sp + 32;
3043 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3044 && (xpsr & (1 << 9)) != 0)
3045 cache->prev_sp += 4;
3047 return cache;
3050 /* Implementation of function hook 'this_id' in
3051 'struct frame_uwnind'. */
3053 static void
3054 arm_m_exception_this_id (struct frame_info *this_frame,
3055 void **this_cache,
3056 struct frame_id *this_id)
3058 struct arm_prologue_cache *cache;
3060 if (*this_cache == NULL)
3061 *this_cache = arm_m_exception_cache (this_frame);
3062 cache = *this_cache;
3064 /* Our frame ID for a stub frame is the current SP and LR. */
3065 *this_id = frame_id_build (cache->prev_sp,
3066 get_frame_pc (this_frame));
3069 /* Implementation of function hook 'prev_register' in
3070 'struct frame_uwnind'. */
3072 static struct value *
3073 arm_m_exception_prev_register (struct frame_info *this_frame,
3074 void **this_cache,
3075 int prev_regnum)
3077 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3078 struct arm_prologue_cache *cache;
3080 if (*this_cache == NULL)
3081 *this_cache = arm_m_exception_cache (this_frame);
3082 cache = *this_cache;
3084 /* The value was already reconstructed into PREV_SP. */
3085 if (prev_regnum == ARM_SP_REGNUM)
3086 return frame_unwind_got_constant (this_frame, prev_regnum,
3087 cache->prev_sp);
3089 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3090 prev_regnum);
3093 /* Implementation of function hook 'sniffer' in
3094 'struct frame_uwnind'. */
3096 static int
3097 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3098 struct frame_info *this_frame,
3099 void **this_prologue_cache)
3101 CORE_ADDR this_pc = get_frame_pc (this_frame);
3103 /* No need to check is_m; this sniffer is only registered for
3104 M-profile architectures. */
3106 /* Exception frames return to one of these magic PCs. Other values
3107 are not defined as of v7-M. See details in "B1.5.8 Exception
3108 return behavior" in "ARMv7-M Architecture Reference Manual". */
3109 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3110 || this_pc == 0xfffffffd)
3111 return 1;
3113 return 0;
3116 /* Frame unwinder for M-profile exceptions. */
3118 struct frame_unwind arm_m_exception_unwind =
3120 SIGTRAMP_FRAME,
3121 default_frame_unwind_stop_reason,
3122 arm_m_exception_this_id,
3123 arm_m_exception_prev_register,
3124 NULL,
3125 arm_m_exception_unwind_sniffer
3128 static CORE_ADDR
3129 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3131 struct arm_prologue_cache *cache;
3133 if (*this_cache == NULL)
3134 *this_cache = arm_make_prologue_cache (this_frame);
3135 cache = *this_cache;
3137 return cache->prev_sp - cache->framesize;
3140 struct frame_base arm_normal_base = {
3141 &arm_prologue_unwind,
3142 arm_normal_frame_base,
3143 arm_normal_frame_base,
3144 arm_normal_frame_base
3147 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3148 dummy frame. The frame ID's base needs to match the TOS value
3149 saved by save_dummy_frame_tos() and returned from
3150 arm_push_dummy_call, and the PC needs to match the dummy frame's
3151 breakpoint. */
3153 static struct frame_id
3154 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3156 return frame_id_build (get_frame_register_unsigned (this_frame,
3157 ARM_SP_REGNUM),
3158 get_frame_pc (this_frame));
3161 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3162 be used to construct the previous frame's ID, after looking up the
3163 containing function). */
3165 static CORE_ADDR
3166 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3168 CORE_ADDR pc;
3169 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3170 return arm_addr_bits_remove (gdbarch, pc);
3173 static CORE_ADDR
3174 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3176 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3179 static struct value *
3180 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3181 int regnum)
3183 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3184 CORE_ADDR lr, cpsr;
3185 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3187 switch (regnum)
3189 case ARM_PC_REGNUM:
3190 /* The PC is normally copied from the return column, which
3191 describes saves of LR. However, that version may have an
3192 extra bit set to indicate Thumb state. The bit is not
3193 part of the PC. */
3194 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3195 return frame_unwind_got_constant (this_frame, regnum,
3196 arm_addr_bits_remove (gdbarch, lr));
3198 case ARM_PS_REGNUM:
3199 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3200 cpsr = get_frame_register_unsigned (this_frame, regnum);
3201 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3202 if (IS_THUMB_ADDR (lr))
3203 cpsr |= t_bit;
3204 else
3205 cpsr &= ~t_bit;
3206 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3208 default:
3209 internal_error (__FILE__, __LINE__,
3210 _("Unexpected register %d"), regnum);
3214 static void
3215 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3216 struct dwarf2_frame_state_reg *reg,
3217 struct frame_info *this_frame)
3219 switch (regnum)
3221 case ARM_PC_REGNUM:
3222 case ARM_PS_REGNUM:
3223 reg->how = DWARF2_FRAME_REG_FN;
3224 reg->loc.fn = arm_dwarf2_prev_register;
3225 break;
3226 case ARM_SP_REGNUM:
3227 reg->how = DWARF2_FRAME_REG_CFA;
3228 break;
3232 /* Return true if we are in the function's epilogue, i.e. after the
3233 instruction that destroyed the function's stack frame. */
3235 static int
3236 thumb_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3238 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3239 unsigned int insn, insn2;
3240 int found_return = 0, found_stack_adjust = 0;
3241 CORE_ADDR func_start, func_end;
3242 CORE_ADDR scan_pc;
3243 gdb_byte buf[4];
3245 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3246 return 0;
3248 /* The epilogue is a sequence of instructions along the following lines:
3250 - add stack frame size to SP or FP
3251 - [if frame pointer used] restore SP from FP
3252 - restore registers from SP [may include PC]
3253 - a return-type instruction [if PC wasn't already restored]
3255 In a first pass, we scan forward from the current PC and verify the
3256 instructions we find as compatible with this sequence, ending in a
3257 return instruction.
3259 However, this is not sufficient to distinguish indirect function calls
3260 within a function from indirect tail calls in the epilogue in some cases.
3261 Therefore, if we didn't already find any SP-changing instruction during
3262 forward scan, we add a backward scanning heuristic to ensure we actually
3263 are in the epilogue. */
3265 scan_pc = pc;
3266 while (scan_pc < func_end && !found_return)
3268 if (target_read_memory (scan_pc, buf, 2))
3269 break;
3271 scan_pc += 2;
3272 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3274 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3275 found_return = 1;
3276 else if (insn == 0x46f7) /* mov pc, lr */
3277 found_return = 1;
3278 else if (thumb_instruction_restores_sp (insn))
3280 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3281 found_return = 1;
3283 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3285 if (target_read_memory (scan_pc, buf, 2))
3286 break;
3288 scan_pc += 2;
3289 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3291 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3293 if (insn2 & 0x8000) /* <registers> include PC. */
3294 found_return = 1;
3296 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3297 && (insn2 & 0x0fff) == 0x0b04)
3299 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3300 found_return = 1;
3302 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3303 && (insn2 & 0x0e00) == 0x0a00)
3305 else
3306 break;
3308 else
3309 break;
3312 if (!found_return)
3313 return 0;
3315 /* Since any instruction in the epilogue sequence, with the possible
3316 exception of return itself, updates the stack pointer, we need to
3317 scan backwards for at most one instruction. Try either a 16-bit or
3318 a 32-bit instruction. This is just a heuristic, so we do not worry
3319 too much about false positives. */
3321 if (pc - 4 < func_start)
3322 return 0;
3323 if (target_read_memory (pc - 4, buf, 4))
3324 return 0;
3326 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3327 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3329 if (thumb_instruction_restores_sp (insn2))
3330 found_stack_adjust = 1;
3331 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3332 found_stack_adjust = 1;
3333 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3334 && (insn2 & 0x0fff) == 0x0b04)
3335 found_stack_adjust = 1;
3336 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3337 && (insn2 & 0x0e00) == 0x0a00)
3338 found_stack_adjust = 1;
3340 return found_stack_adjust;
3343 /* Return true if we are in the function's epilogue, i.e. after the
3344 instruction that destroyed the function's stack frame. */
3346 static int
3347 arm_in_function_epilogue_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3349 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3350 unsigned int insn;
3351 int found_return, found_stack_adjust;
3352 CORE_ADDR func_start, func_end;
3354 if (arm_pc_is_thumb (gdbarch, pc))
3355 return thumb_in_function_epilogue_p (gdbarch, pc);
3357 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3358 return 0;
3360 /* We are in the epilogue if the previous instruction was a stack
3361 adjustment and the next instruction is a possible return (bx, mov
3362 pc, or pop). We could have to scan backwards to find the stack
3363 adjustment, or forwards to find the return, but this is a decent
3364 approximation. First scan forwards. */
3366 found_return = 0;
3367 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3368 if (bits (insn, 28, 31) != INST_NV)
3370 if ((insn & 0x0ffffff0) == 0x012fff10)
3371 /* BX. */
3372 found_return = 1;
3373 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3374 /* MOV PC. */
3375 found_return = 1;
3376 else if ((insn & 0x0fff0000) == 0x08bd0000
3377 && (insn & 0x0000c000) != 0)
3378 /* POP (LDMIA), including PC or LR. */
3379 found_return = 1;
3382 if (!found_return)
3383 return 0;
3385 /* Scan backwards. This is just a heuristic, so do not worry about
3386 false positives from mode changes. */
3388 if (pc < func_start + 4)
3389 return 0;
3391 found_stack_adjust = 0;
3392 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3393 if (bits (insn, 28, 31) != INST_NV)
3395 if ((insn & 0x0df0f000) == 0x0080d000)
3396 /* ADD SP (register or immediate). */
3397 found_stack_adjust = 1;
3398 else if ((insn & 0x0df0f000) == 0x0040d000)
3399 /* SUB SP (register or immediate). */
3400 found_stack_adjust = 1;
3401 else if ((insn & 0x0ffffff0) == 0x01a0d000)
3402 /* MOV SP. */
3403 found_stack_adjust = 1;
3404 else if ((insn & 0x0fff0000) == 0x08bd0000)
3405 /* POP (LDMIA). */
3406 found_stack_adjust = 1;
3407 else if ((insn & 0x0fff0000) == 0x049d0000)
3408 /* POP of a single register. */
3409 found_stack_adjust = 1;
3412 if (found_stack_adjust)
3413 return 1;
3415 return 0;
3419 /* When arguments must be pushed onto the stack, they go on in reverse
3420 order. The code below implements a FILO (stack) to do this. */
3422 struct stack_item
3424 int len;
3425 struct stack_item *prev;
3426 void *data;
3429 static struct stack_item *
3430 push_stack_item (struct stack_item *prev, const void *contents, int len)
3432 struct stack_item *si;
3433 si = xmalloc (sizeof (struct stack_item));
3434 si->data = xmalloc (len);
3435 si->len = len;
3436 si->prev = prev;
3437 memcpy (si->data, contents, len);
3438 return si;
3441 static struct stack_item *
3442 pop_stack_item (struct stack_item *si)
3444 struct stack_item *dead = si;
3445 si = si->prev;
3446 xfree (dead->data);
3447 xfree (dead);
3448 return si;
3452 /* Return the alignment (in bytes) of the given type. */
3454 static int
3455 arm_type_align (struct type *t)
3457 int n;
3458 int align;
3459 int falign;
3461 t = check_typedef (t);
3462 switch (TYPE_CODE (t))
3464 default:
3465 /* Should never happen. */
3466 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3467 return 4;
3469 case TYPE_CODE_PTR:
3470 case TYPE_CODE_ENUM:
3471 case TYPE_CODE_INT:
3472 case TYPE_CODE_FLT:
3473 case TYPE_CODE_SET:
3474 case TYPE_CODE_RANGE:
3475 case TYPE_CODE_REF:
3476 case TYPE_CODE_CHAR:
3477 case TYPE_CODE_BOOL:
3478 return TYPE_LENGTH (t);
3480 case TYPE_CODE_ARRAY:
3481 case TYPE_CODE_COMPLEX:
3482 /* TODO: What about vector types? */
3483 return arm_type_align (TYPE_TARGET_TYPE (t));
3485 case TYPE_CODE_STRUCT:
3486 case TYPE_CODE_UNION:
3487 align = 1;
3488 for (n = 0; n < TYPE_NFIELDS (t); n++)
3490 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3491 if (falign > align)
3492 align = falign;
3494 return align;
3498 /* Possible base types for a candidate for passing and returning in
3499 VFP registers. */
3501 enum arm_vfp_cprc_base_type
3503 VFP_CPRC_UNKNOWN,
3504 VFP_CPRC_SINGLE,
3505 VFP_CPRC_DOUBLE,
3506 VFP_CPRC_VEC64,
3507 VFP_CPRC_VEC128
3510 /* The length of one element of base type B. */
3512 static unsigned
3513 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3515 switch (b)
3517 case VFP_CPRC_SINGLE:
3518 return 4;
3519 case VFP_CPRC_DOUBLE:
3520 return 8;
3521 case VFP_CPRC_VEC64:
3522 return 8;
3523 case VFP_CPRC_VEC128:
3524 return 16;
3525 default:
3526 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3527 (int) b);
3531 /* The character ('s', 'd' or 'q') for the type of VFP register used
3532 for passing base type B. */
3534 static int
3535 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3537 switch (b)
3539 case VFP_CPRC_SINGLE:
3540 return 's';
3541 case VFP_CPRC_DOUBLE:
3542 return 'd';
3543 case VFP_CPRC_VEC64:
3544 return 'd';
3545 case VFP_CPRC_VEC128:
3546 return 'q';
3547 default:
3548 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3549 (int) b);
3553 /* Determine whether T may be part of a candidate for passing and
3554 returning in VFP registers, ignoring the limit on the total number
3555 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3556 classification of the first valid component found; if it is not
3557 VFP_CPRC_UNKNOWN, all components must have the same classification
3558 as *BASE_TYPE. If it is found that T contains a type not permitted
3559 for passing and returning in VFP registers, a type differently
3560 classified from *BASE_TYPE, or two types differently classified
3561 from each other, return -1, otherwise return the total number of
3562 base-type elements found (possibly 0 in an empty structure or
3563 array). Vector types are not currently supported, matching the
3564 generic AAPCS support. */
3566 static int
3567 arm_vfp_cprc_sub_candidate (struct type *t,
3568 enum arm_vfp_cprc_base_type *base_type)
3570 t = check_typedef (t);
3571 switch (TYPE_CODE (t))
3573 case TYPE_CODE_FLT:
3574 switch (TYPE_LENGTH (t))
3576 case 4:
3577 if (*base_type == VFP_CPRC_UNKNOWN)
3578 *base_type = VFP_CPRC_SINGLE;
3579 else if (*base_type != VFP_CPRC_SINGLE)
3580 return -1;
3581 return 1;
3583 case 8:
3584 if (*base_type == VFP_CPRC_UNKNOWN)
3585 *base_type = VFP_CPRC_DOUBLE;
3586 else if (*base_type != VFP_CPRC_DOUBLE)
3587 return -1;
3588 return 1;
3590 default:
3591 return -1;
3593 break;
3595 case TYPE_CODE_COMPLEX:
3596 /* Arguments of complex T where T is one of the types float or
3597 double get treated as if they are implemented as:
3599 struct complexT
3601 T real;
3602 T imag;
3606 switch (TYPE_LENGTH (t))
3608 case 8:
3609 if (*base_type == VFP_CPRC_UNKNOWN)
3610 *base_type = VFP_CPRC_SINGLE;
3611 else if (*base_type != VFP_CPRC_SINGLE)
3612 return -1;
3613 return 2;
3615 case 16:
3616 if (*base_type == VFP_CPRC_UNKNOWN)
3617 *base_type = VFP_CPRC_DOUBLE;
3618 else if (*base_type != VFP_CPRC_DOUBLE)
3619 return -1;
3620 return 2;
3622 default:
3623 return -1;
3625 break;
3627 case TYPE_CODE_ARRAY:
3629 int count;
3630 unsigned unitlen;
3631 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3632 if (count == -1)
3633 return -1;
3634 if (TYPE_LENGTH (t) == 0)
3636 gdb_assert (count == 0);
3637 return 0;
3639 else if (count == 0)
3640 return -1;
3641 unitlen = arm_vfp_cprc_unit_length (*base_type);
3642 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3643 return TYPE_LENGTH (t) / unitlen;
3645 break;
3647 case TYPE_CODE_STRUCT:
3649 int count = 0;
3650 unsigned unitlen;
3651 int i;
3652 for (i = 0; i < TYPE_NFIELDS (t); i++)
3654 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3655 base_type);
3656 if (sub_count == -1)
3657 return -1;
3658 count += sub_count;
3660 if (TYPE_LENGTH (t) == 0)
3662 gdb_assert (count == 0);
3663 return 0;
3665 else if (count == 0)
3666 return -1;
3667 unitlen = arm_vfp_cprc_unit_length (*base_type);
3668 if (TYPE_LENGTH (t) != unitlen * count)
3669 return -1;
3670 return count;
3673 case TYPE_CODE_UNION:
3675 int count = 0;
3676 unsigned unitlen;
3677 int i;
3678 for (i = 0; i < TYPE_NFIELDS (t); i++)
3680 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3681 base_type);
3682 if (sub_count == -1)
3683 return -1;
3684 count = (count > sub_count ? count : sub_count);
3686 if (TYPE_LENGTH (t) == 0)
3688 gdb_assert (count == 0);
3689 return 0;
3691 else if (count == 0)
3692 return -1;
3693 unitlen = arm_vfp_cprc_unit_length (*base_type);
3694 if (TYPE_LENGTH (t) != unitlen * count)
3695 return -1;
3696 return count;
3699 default:
3700 break;
3703 return -1;
3706 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3707 if passed to or returned from a non-variadic function with the VFP
3708 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3709 *BASE_TYPE to the base type for T and *COUNT to the number of
3710 elements of that base type before returning. */
3712 static int
3713 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3714 int *count)
3716 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3717 int c = arm_vfp_cprc_sub_candidate (t, &b);
3718 if (c <= 0 || c > 4)
3719 return 0;
3720 *base_type = b;
3721 *count = c;
3722 return 1;
3725 /* Return 1 if the VFP ABI should be used for passing arguments to and
3726 returning values from a function of type FUNC_TYPE, 0
3727 otherwise. */
3729 static int
3730 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3732 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3733 /* Variadic functions always use the base ABI. Assume that functions
3734 without debug info are not variadic. */
3735 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3736 return 0;
3737 /* The VFP ABI is only supported as a variant of AAPCS. */
3738 if (tdep->arm_abi != ARM_ABI_AAPCS)
3739 return 0;
3740 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3743 /* We currently only support passing parameters in integer registers, which
3744 conforms with GCC's default model, and VFP argument passing following
3745 the VFP variant of AAPCS. Several other variants exist and
3746 we should probably support some of them based on the selected ABI. */
3748 static CORE_ADDR
3749 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3750 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3751 struct value **args, CORE_ADDR sp, int struct_return,
3752 CORE_ADDR struct_addr)
3754 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3755 int argnum;
3756 int argreg;
3757 int nstack;
3758 struct stack_item *si = NULL;
3759 int use_vfp_abi;
3760 struct type *ftype;
3761 unsigned vfp_regs_free = (1 << 16) - 1;
3763 /* Determine the type of this function and whether the VFP ABI
3764 applies. */
3765 ftype = check_typedef (value_type (function));
3766 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3767 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3768 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3770 /* Set the return address. For the ARM, the return breakpoint is
3771 always at BP_ADDR. */
3772 if (arm_pc_is_thumb (gdbarch, bp_addr))
3773 bp_addr |= 1;
3774 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3776 /* Walk through the list of args and determine how large a temporary
3777 stack is required. Need to take care here as structs may be
3778 passed on the stack, and we have to push them. */
3779 nstack = 0;
3781 argreg = ARM_A1_REGNUM;
3782 nstack = 0;
3784 /* The struct_return pointer occupies the first parameter
3785 passing register. */
3786 if (struct_return)
3788 if (arm_debug)
3789 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3790 gdbarch_register_name (gdbarch, argreg),
3791 paddress (gdbarch, struct_addr));
3792 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3793 argreg++;
3796 for (argnum = 0; argnum < nargs; argnum++)
3798 int len;
3799 struct type *arg_type;
3800 struct type *target_type;
3801 enum type_code typecode;
3802 const bfd_byte *val;
3803 int align;
3804 enum arm_vfp_cprc_base_type vfp_base_type;
3805 int vfp_base_count;
3806 int may_use_core_reg = 1;
3808 arg_type = check_typedef (value_type (args[argnum]));
3809 len = TYPE_LENGTH (arg_type);
3810 target_type = TYPE_TARGET_TYPE (arg_type);
3811 typecode = TYPE_CODE (arg_type);
3812 val = value_contents (args[argnum]);
3814 align = arm_type_align (arg_type);
3815 /* Round alignment up to a whole number of words. */
3816 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3817 /* Different ABIs have different maximum alignments. */
3818 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3820 /* The APCS ABI only requires word alignment. */
3821 align = INT_REGISTER_SIZE;
3823 else
3825 /* The AAPCS requires at most doubleword alignment. */
3826 if (align > INT_REGISTER_SIZE * 2)
3827 align = INT_REGISTER_SIZE * 2;
3830 if (use_vfp_abi
3831 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3832 &vfp_base_count))
3834 int regno;
3835 int unit_length;
3836 int shift;
3837 unsigned mask;
3839 /* Because this is a CPRC it cannot go in a core register or
3840 cause a core register to be skipped for alignment.
3841 Either it goes in VFP registers and the rest of this loop
3842 iteration is skipped for this argument, or it goes on the
3843 stack (and the stack alignment code is correct for this
3844 case). */
3845 may_use_core_reg = 0;
3847 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3848 shift = unit_length / 4;
3849 mask = (1 << (shift * vfp_base_count)) - 1;
3850 for (regno = 0; regno < 16; regno += shift)
3851 if (((vfp_regs_free >> regno) & mask) == mask)
3852 break;
3854 if (regno < 16)
3856 int reg_char;
3857 int reg_scaled;
3858 int i;
3860 vfp_regs_free &= ~(mask << regno);
3861 reg_scaled = regno / shift;
3862 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3863 for (i = 0; i < vfp_base_count; i++)
3865 char name_buf[4];
3866 int regnum;
3867 if (reg_char == 'q')
3868 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3869 val + i * unit_length);
3870 else
3872 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3873 reg_char, reg_scaled + i);
3874 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3875 strlen (name_buf));
3876 regcache_cooked_write (regcache, regnum,
3877 val + i * unit_length);
3880 continue;
3882 else
3884 /* This CPRC could not go in VFP registers, so all VFP
3885 registers are now marked as used. */
3886 vfp_regs_free = 0;
3890 /* Push stack padding for dowubleword alignment. */
3891 if (nstack & (align - 1))
3893 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3894 nstack += INT_REGISTER_SIZE;
3897 /* Doubleword aligned quantities must go in even register pairs. */
3898 if (may_use_core_reg
3899 && argreg <= ARM_LAST_ARG_REGNUM
3900 && align > INT_REGISTER_SIZE
3901 && argreg & 1)
3902 argreg++;
3904 /* If the argument is a pointer to a function, and it is a
3905 Thumb function, create a LOCAL copy of the value and set
3906 the THUMB bit in it. */
3907 if (TYPE_CODE_PTR == typecode
3908 && target_type != NULL
3909 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3911 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3912 if (arm_pc_is_thumb (gdbarch, regval))
3914 bfd_byte *copy = alloca (len);
3915 store_unsigned_integer (copy, len, byte_order,
3916 MAKE_THUMB_ADDR (regval));
3917 val = copy;
3921 /* Copy the argument to general registers or the stack in
3922 register-sized pieces. Large arguments are split between
3923 registers and stack. */
3924 while (len > 0)
3926 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3928 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3930 /* The argument is being passed in a general purpose
3931 register. */
3932 CORE_ADDR regval
3933 = extract_unsigned_integer (val, partial_len, byte_order);
3934 if (byte_order == BFD_ENDIAN_BIG)
3935 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3936 if (arm_debug)
3937 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3938 argnum,
3939 gdbarch_register_name
3940 (gdbarch, argreg),
3941 phex (regval, INT_REGISTER_SIZE));
3942 regcache_cooked_write_unsigned (regcache, argreg, regval);
3943 argreg++;
3945 else
3947 /* Push the arguments onto the stack. */
3948 if (arm_debug)
3949 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3950 argnum, nstack);
3951 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3952 nstack += INT_REGISTER_SIZE;
3955 len -= partial_len;
3956 val += partial_len;
3959 /* If we have an odd number of words to push, then decrement the stack
3960 by one word now, so first stack argument will be dword aligned. */
3961 if (nstack & 4)
3962 sp -= 4;
3964 while (si)
3966 sp -= si->len;
3967 write_memory (sp, si->data, si->len);
3968 si = pop_stack_item (si);
3971 /* Finally, update teh SP register. */
3972 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3974 return sp;
3978 /* Always align the frame to an 8-byte boundary. This is required on
3979 some platforms and harmless on the rest. */
3981 static CORE_ADDR
3982 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3984 /* Align the stack to eight bytes. */
3985 return sp & ~ (CORE_ADDR) 7;
3988 static void
3989 print_fpu_flags (struct ui_file *file, int flags)
3991 if (flags & (1 << 0))
3992 fputs_filtered ("IVO ", file);
3993 if (flags & (1 << 1))
3994 fputs_filtered ("DVZ ", file);
3995 if (flags & (1 << 2))
3996 fputs_filtered ("OFL ", file);
3997 if (flags & (1 << 3))
3998 fputs_filtered ("UFL ", file);
3999 if (flags & (1 << 4))
4000 fputs_filtered ("INX ", file);
4001 fputc_filtered ('\n', file);
4004 /* Print interesting information about the floating point processor
4005 (if present) or emulator. */
4006 static void
4007 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4008 struct frame_info *frame, const char *args)
4010 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4011 int type;
4013 type = (status >> 24) & 127;
4014 if (status & (1 << 31))
4015 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4016 else
4017 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4018 /* i18n: [floating point unit] mask */
4019 fputs_filtered (_("mask: "), file);
4020 print_fpu_flags (file, status >> 16);
4021 /* i18n: [floating point unit] flags */
4022 fputs_filtered (_("flags: "), file);
4023 print_fpu_flags (file, status);
4026 /* Construct the ARM extended floating point type. */
4027 static struct type *
4028 arm_ext_type (struct gdbarch *gdbarch)
4030 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4032 if (!tdep->arm_ext_type)
4033 tdep->arm_ext_type
4034 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4035 floatformats_arm_ext);
4037 return tdep->arm_ext_type;
4040 static struct type *
4041 arm_neon_double_type (struct gdbarch *gdbarch)
4043 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4045 if (tdep->neon_double_type == NULL)
4047 struct type *t, *elem;
4049 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4050 TYPE_CODE_UNION);
4051 elem = builtin_type (gdbarch)->builtin_uint8;
4052 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4053 elem = builtin_type (gdbarch)->builtin_uint16;
4054 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4055 elem = builtin_type (gdbarch)->builtin_uint32;
4056 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4057 elem = builtin_type (gdbarch)->builtin_uint64;
4058 append_composite_type_field (t, "u64", elem);
4059 elem = builtin_type (gdbarch)->builtin_float;
4060 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4061 elem = builtin_type (gdbarch)->builtin_double;
4062 append_composite_type_field (t, "f64", elem);
4064 TYPE_VECTOR (t) = 1;
4065 TYPE_NAME (t) = "neon_d";
4066 tdep->neon_double_type = t;
4069 return tdep->neon_double_type;
4072 /* FIXME: The vector types are not correctly ordered on big-endian
4073 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4074 bits of d0 - regardless of what unit size is being held in d0. So
4075 the offset of the first uint8 in d0 is 7, but the offset of the
4076 first float is 4. This code works as-is for little-endian
4077 targets. */
4079 static struct type *
4080 arm_neon_quad_type (struct gdbarch *gdbarch)
4082 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4084 if (tdep->neon_quad_type == NULL)
4086 struct type *t, *elem;
4088 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4089 TYPE_CODE_UNION);
4090 elem = builtin_type (gdbarch)->builtin_uint8;
4091 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4092 elem = builtin_type (gdbarch)->builtin_uint16;
4093 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4094 elem = builtin_type (gdbarch)->builtin_uint32;
4095 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4096 elem = builtin_type (gdbarch)->builtin_uint64;
4097 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4098 elem = builtin_type (gdbarch)->builtin_float;
4099 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4100 elem = builtin_type (gdbarch)->builtin_double;
4101 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4103 TYPE_VECTOR (t) = 1;
4104 TYPE_NAME (t) = "neon_q";
4105 tdep->neon_quad_type = t;
4108 return tdep->neon_quad_type;
4111 /* Return the GDB type object for the "standard" data type of data in
4112 register N. */
4114 static struct type *
4115 arm_register_type (struct gdbarch *gdbarch, int regnum)
4117 int num_regs = gdbarch_num_regs (gdbarch);
4119 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4120 && regnum >= num_regs && regnum < num_regs + 32)
4121 return builtin_type (gdbarch)->builtin_float;
4123 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4124 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4125 return arm_neon_quad_type (gdbarch);
4127 /* If the target description has register information, we are only
4128 in this function so that we can override the types of
4129 double-precision registers for NEON. */
4130 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4132 struct type *t = tdesc_register_type (gdbarch, regnum);
4134 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4135 && TYPE_CODE (t) == TYPE_CODE_FLT
4136 && gdbarch_tdep (gdbarch)->have_neon)
4137 return arm_neon_double_type (gdbarch);
4138 else
4139 return t;
4142 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4144 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4145 return builtin_type (gdbarch)->builtin_void;
4147 return arm_ext_type (gdbarch);
4149 else if (regnum == ARM_SP_REGNUM)
4150 return builtin_type (gdbarch)->builtin_data_ptr;
4151 else if (regnum == ARM_PC_REGNUM)
4152 return builtin_type (gdbarch)->builtin_func_ptr;
4153 else if (regnum >= ARRAY_SIZE (arm_register_names))
4154 /* These registers are only supported on targets which supply
4155 an XML description. */
4156 return builtin_type (gdbarch)->builtin_int0;
4157 else
4158 return builtin_type (gdbarch)->builtin_uint32;
4161 /* Map a DWARF register REGNUM onto the appropriate GDB register
4162 number. */
4164 static int
4165 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4167 /* Core integer regs. */
4168 if (reg >= 0 && reg <= 15)
4169 return reg;
4171 /* Legacy FPA encoding. These were once used in a way which
4172 overlapped with VFP register numbering, so their use is
4173 discouraged, but GDB doesn't support the ARM toolchain
4174 which used them for VFP. */
4175 if (reg >= 16 && reg <= 23)
4176 return ARM_F0_REGNUM + reg - 16;
4178 /* New assignments for the FPA registers. */
4179 if (reg >= 96 && reg <= 103)
4180 return ARM_F0_REGNUM + reg - 96;
4182 /* WMMX register assignments. */
4183 if (reg >= 104 && reg <= 111)
4184 return ARM_WCGR0_REGNUM + reg - 104;
4186 if (reg >= 112 && reg <= 127)
4187 return ARM_WR0_REGNUM + reg - 112;
4189 if (reg >= 192 && reg <= 199)
4190 return ARM_WC0_REGNUM + reg - 192;
4192 /* VFP v2 registers. A double precision value is actually
4193 in d1 rather than s2, but the ABI only defines numbering
4194 for the single precision registers. This will "just work"
4195 in GDB for little endian targets (we'll read eight bytes,
4196 starting in s0 and then progressing to s1), but will be
4197 reversed on big endian targets with VFP. This won't
4198 be a problem for the new Neon quad registers; you're supposed
4199 to use DW_OP_piece for those. */
4200 if (reg >= 64 && reg <= 95)
4202 char name_buf[4];
4204 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4205 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4206 strlen (name_buf));
4209 /* VFP v3 / Neon registers. This range is also used for VFP v2
4210 registers, except that it now describes d0 instead of s0. */
4211 if (reg >= 256 && reg <= 287)
4213 char name_buf[4];
4215 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4216 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4217 strlen (name_buf));
4220 return -1;
4223 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4224 static int
4225 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4227 int reg = regnum;
4228 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4230 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4231 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4233 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4234 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4236 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4237 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4239 if (reg < NUM_GREGS)
4240 return SIM_ARM_R0_REGNUM + reg;
4241 reg -= NUM_GREGS;
4243 if (reg < NUM_FREGS)
4244 return SIM_ARM_FP0_REGNUM + reg;
4245 reg -= NUM_FREGS;
4247 if (reg < NUM_SREGS)
4248 return SIM_ARM_FPS_REGNUM + reg;
4249 reg -= NUM_SREGS;
4251 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4254 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4255 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4256 It is thought that this is is the floating-point register format on
4257 little-endian systems. */
4259 static void
4260 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4261 void *dbl, int endianess)
4263 DOUBLEST d;
4265 if (endianess == BFD_ENDIAN_BIG)
4266 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4267 else
4268 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4269 ptr, &d);
4270 floatformat_from_doublest (fmt, &d, dbl);
4273 static void
4274 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4275 int endianess)
4277 DOUBLEST d;
4279 floatformat_to_doublest (fmt, ptr, &d);
4280 if (endianess == BFD_ENDIAN_BIG)
4281 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4282 else
4283 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4284 &d, dbl);
4287 static int
4288 condition_true (unsigned long cond, unsigned long status_reg)
4290 if (cond == INST_AL || cond == INST_NV)
4291 return 1;
4293 switch (cond)
4295 case INST_EQ:
4296 return ((status_reg & FLAG_Z) != 0);
4297 case INST_NE:
4298 return ((status_reg & FLAG_Z) == 0);
4299 case INST_CS:
4300 return ((status_reg & FLAG_C) != 0);
4301 case INST_CC:
4302 return ((status_reg & FLAG_C) == 0);
4303 case INST_MI:
4304 return ((status_reg & FLAG_N) != 0);
4305 case INST_PL:
4306 return ((status_reg & FLAG_N) == 0);
4307 case INST_VS:
4308 return ((status_reg & FLAG_V) != 0);
4309 case INST_VC:
4310 return ((status_reg & FLAG_V) == 0);
4311 case INST_HI:
4312 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4313 case INST_LS:
4314 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4315 case INST_GE:
4316 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4317 case INST_LT:
4318 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4319 case INST_GT:
4320 return (((status_reg & FLAG_Z) == 0)
4321 && (((status_reg & FLAG_N) == 0)
4322 == ((status_reg & FLAG_V) == 0)));
4323 case INST_LE:
4324 return (((status_reg & FLAG_Z) != 0)
4325 || (((status_reg & FLAG_N) == 0)
4326 != ((status_reg & FLAG_V) == 0)));
4328 return 1;
4331 static unsigned long
4332 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4333 unsigned long pc_val, unsigned long status_reg)
4335 unsigned long res, shift;
4336 int rm = bits (inst, 0, 3);
4337 unsigned long shifttype = bits (inst, 5, 6);
4339 if (bit (inst, 4))
4341 int rs = bits (inst, 8, 11);
4342 shift = (rs == 15 ? pc_val + 8
4343 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4345 else
4346 shift = bits (inst, 7, 11);
4348 res = (rm == ARM_PC_REGNUM
4349 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4350 : get_frame_register_unsigned (frame, rm));
4352 switch (shifttype)
4354 case 0: /* LSL */
4355 res = shift >= 32 ? 0 : res << shift;
4356 break;
4358 case 1: /* LSR */
4359 res = shift >= 32 ? 0 : res >> shift;
4360 break;
4362 case 2: /* ASR */
4363 if (shift >= 32)
4364 shift = 31;
4365 res = ((res & 0x80000000L)
4366 ? ~((~res) >> shift) : res >> shift);
4367 break;
4369 case 3: /* ROR/RRX */
4370 shift &= 31;
4371 if (shift == 0)
4372 res = (res >> 1) | (carry ? 0x80000000L : 0);
4373 else
4374 res = (res >> shift) | (res << (32 - shift));
4375 break;
4378 return res & 0xffffffff;
4381 /* Return number of 1-bits in VAL. */
4383 static int
4384 bitcount (unsigned long val)
4386 int nbits;
4387 for (nbits = 0; val != 0; nbits++)
4388 val &= val - 1; /* Delete rightmost 1-bit in val. */
4389 return nbits;
4392 /* Return the size in bytes of the complete Thumb instruction whose
4393 first halfword is INST1. */
4395 static int
4396 thumb_insn_size (unsigned short inst1)
4398 if ((inst1 & 0xe000) == 0xe000 && (inst1 & 0x1800) != 0)
4399 return 4;
4400 else
4401 return 2;
4404 static int
4405 thumb_advance_itstate (unsigned int itstate)
4407 /* Preserve IT[7:5], the first three bits of the condition. Shift
4408 the upcoming condition flags left by one bit. */
4409 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4411 /* If we have finished the IT block, clear the state. */
4412 if ((itstate & 0x0f) == 0)
4413 itstate = 0;
4415 return itstate;
4418 /* Find the next PC after the current instruction executes. In some
4419 cases we can not statically determine the answer (see the IT state
4420 handling in this function); in that case, a breakpoint may be
4421 inserted in addition to the returned PC, which will be used to set
4422 another breakpoint by our caller. */
4424 static CORE_ADDR
4425 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4427 struct gdbarch *gdbarch = get_frame_arch (frame);
4428 struct address_space *aspace = get_frame_address_space (frame);
4429 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4430 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4431 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4432 unsigned short inst1;
4433 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4434 unsigned long offset;
4435 ULONGEST status, itstate;
4437 nextpc = MAKE_THUMB_ADDR (nextpc);
4438 pc_val = MAKE_THUMB_ADDR (pc_val);
4440 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4442 /* Thumb-2 conditional execution support. There are eight bits in
4443 the CPSR which describe conditional execution state. Once
4444 reconstructed (they're in a funny order), the low five bits
4445 describe the low bit of the condition for each instruction and
4446 how many instructions remain. The high three bits describe the
4447 base condition. One of the low four bits will be set if an IT
4448 block is active. These bits read as zero on earlier
4449 processors. */
4450 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4451 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4453 /* If-Then handling. On GNU/Linux, where this routine is used, we
4454 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4455 can disable execution of the undefined instruction. So we might
4456 miss the breakpoint if we set it on a skipped conditional
4457 instruction. Because conditional instructions can change the
4458 flags, affecting the execution of further instructions, we may
4459 need to set two breakpoints. */
4461 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4463 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4465 /* An IT instruction. Because this instruction does not
4466 modify the flags, we can accurately predict the next
4467 executed instruction. */
4468 itstate = inst1 & 0x00ff;
4469 pc += thumb_insn_size (inst1);
4471 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4473 inst1 = read_memory_unsigned_integer (pc, 2,
4474 byte_order_for_code);
4475 pc += thumb_insn_size (inst1);
4476 itstate = thumb_advance_itstate (itstate);
4479 return MAKE_THUMB_ADDR (pc);
4481 else if (itstate != 0)
4483 /* We are in a conditional block. Check the condition. */
4484 if (! condition_true (itstate >> 4, status))
4486 /* Advance to the next executed instruction. */
4487 pc += thumb_insn_size (inst1);
4488 itstate = thumb_advance_itstate (itstate);
4490 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4492 inst1 = read_memory_unsigned_integer (pc, 2,
4493 byte_order_for_code);
4494 pc += thumb_insn_size (inst1);
4495 itstate = thumb_advance_itstate (itstate);
4498 return MAKE_THUMB_ADDR (pc);
4500 else if ((itstate & 0x0f) == 0x08)
4502 /* This is the last instruction of the conditional
4503 block, and it is executed. We can handle it normally
4504 because the following instruction is not conditional,
4505 and we must handle it normally because it is
4506 permitted to branch. Fall through. */
4508 else
4510 int cond_negated;
4512 /* There are conditional instructions after this one.
4513 If this instruction modifies the flags, then we can
4514 not predict what the next executed instruction will
4515 be. Fortunately, this instruction is architecturally
4516 forbidden to branch; we know it will fall through.
4517 Start by skipping past it. */
4518 pc += thumb_insn_size (inst1);
4519 itstate = thumb_advance_itstate (itstate);
4521 /* Set a breakpoint on the following instruction. */
4522 gdb_assert ((itstate & 0x0f) != 0);
4523 arm_insert_single_step_breakpoint (gdbarch, aspace,
4524 MAKE_THUMB_ADDR (pc));
4525 cond_negated = (itstate >> 4) & 1;
4527 /* Skip all following instructions with the same
4528 condition. If there is a later instruction in the IT
4529 block with the opposite condition, set the other
4530 breakpoint there. If not, then set a breakpoint on
4531 the instruction after the IT block. */
4534 inst1 = read_memory_unsigned_integer (pc, 2,
4535 byte_order_for_code);
4536 pc += thumb_insn_size (inst1);
4537 itstate = thumb_advance_itstate (itstate);
4539 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4541 return MAKE_THUMB_ADDR (pc);
4545 else if (itstate & 0x0f)
4547 /* We are in a conditional block. Check the condition. */
4548 int cond = itstate >> 4;
4550 if (! condition_true (cond, status))
4551 /* Advance to the next instruction. All the 32-bit
4552 instructions share a common prefix. */
4553 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4555 /* Otherwise, handle the instruction normally. */
4558 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4560 CORE_ADDR sp;
4562 /* Fetch the saved PC from the stack. It's stored above
4563 all of the other registers. */
4564 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4565 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4566 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4568 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4570 unsigned long cond = bits (inst1, 8, 11);
4571 if (cond == 0x0f) /* 0x0f = SWI */
4573 struct gdbarch_tdep *tdep;
4574 tdep = gdbarch_tdep (gdbarch);
4576 if (tdep->syscall_next_pc != NULL)
4577 nextpc = tdep->syscall_next_pc (frame);
4580 else if (cond != 0x0f && condition_true (cond, status))
4581 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4583 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4585 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4587 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4589 unsigned short inst2;
4590 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4592 /* Default to the next instruction. */
4593 nextpc = pc + 4;
4594 nextpc = MAKE_THUMB_ADDR (nextpc);
4596 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4598 /* Branches and miscellaneous control instructions. */
4600 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4602 /* B, BL, BLX. */
4603 int j1, j2, imm1, imm2;
4605 imm1 = sbits (inst1, 0, 10);
4606 imm2 = bits (inst2, 0, 10);
4607 j1 = bit (inst2, 13);
4608 j2 = bit (inst2, 11);
4610 offset = ((imm1 << 12) + (imm2 << 1));
4611 offset ^= ((!j2) << 22) | ((!j1) << 23);
4613 nextpc = pc_val + offset;
4614 /* For BLX make sure to clear the low bits. */
4615 if (bit (inst2, 12) == 0)
4616 nextpc = nextpc & 0xfffffffc;
4618 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4620 /* SUBS PC, LR, #imm8. */
4621 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4622 nextpc -= inst2 & 0x00ff;
4624 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4626 /* Conditional branch. */
4627 if (condition_true (bits (inst1, 6, 9), status))
4629 int sign, j1, j2, imm1, imm2;
4631 sign = sbits (inst1, 10, 10);
4632 imm1 = bits (inst1, 0, 5);
4633 imm2 = bits (inst2, 0, 10);
4634 j1 = bit (inst2, 13);
4635 j2 = bit (inst2, 11);
4637 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4638 offset += (imm1 << 12) + (imm2 << 1);
4640 nextpc = pc_val + offset;
4644 else if ((inst1 & 0xfe50) == 0xe810)
4646 /* Load multiple or RFE. */
4647 int rn, offset, load_pc = 1;
4649 rn = bits (inst1, 0, 3);
4650 if (bit (inst1, 7) && !bit (inst1, 8))
4652 /* LDMIA or POP */
4653 if (!bit (inst2, 15))
4654 load_pc = 0;
4655 offset = bitcount (inst2) * 4 - 4;
4657 else if (!bit (inst1, 7) && bit (inst1, 8))
4659 /* LDMDB */
4660 if (!bit (inst2, 15))
4661 load_pc = 0;
4662 offset = -4;
4664 else if (bit (inst1, 7) && bit (inst1, 8))
4666 /* RFEIA */
4667 offset = 0;
4669 else if (!bit (inst1, 7) && !bit (inst1, 8))
4671 /* RFEDB */
4672 offset = -8;
4674 else
4675 load_pc = 0;
4677 if (load_pc)
4679 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4680 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4683 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4685 /* MOV PC or MOVS PC. */
4686 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4687 nextpc = MAKE_THUMB_ADDR (nextpc);
4689 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4691 /* LDR PC. */
4692 CORE_ADDR base;
4693 int rn, load_pc = 1;
4695 rn = bits (inst1, 0, 3);
4696 base = get_frame_register_unsigned (frame, rn);
4697 if (rn == ARM_PC_REGNUM)
4699 base = (base + 4) & ~(CORE_ADDR) 0x3;
4700 if (bit (inst1, 7))
4701 base += bits (inst2, 0, 11);
4702 else
4703 base -= bits (inst2, 0, 11);
4705 else if (bit (inst1, 7))
4706 base += bits (inst2, 0, 11);
4707 else if (bit (inst2, 11))
4709 if (bit (inst2, 10))
4711 if (bit (inst2, 9))
4712 base += bits (inst2, 0, 7);
4713 else
4714 base -= bits (inst2, 0, 7);
4717 else if ((inst2 & 0x0fc0) == 0x0000)
4719 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4720 base += get_frame_register_unsigned (frame, rm) << shift;
4722 else
4723 /* Reserved. */
4724 load_pc = 0;
4726 if (load_pc)
4727 nextpc = get_frame_memory_unsigned (frame, base, 4);
4729 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4731 /* TBB. */
4732 CORE_ADDR tbl_reg, table, offset, length;
4734 tbl_reg = bits (inst1, 0, 3);
4735 if (tbl_reg == 0x0f)
4736 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4737 else
4738 table = get_frame_register_unsigned (frame, tbl_reg);
4740 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4741 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4742 nextpc = pc_val + length;
4744 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4746 /* TBH. */
4747 CORE_ADDR tbl_reg, table, offset, length;
4749 tbl_reg = bits (inst1, 0, 3);
4750 if (tbl_reg == 0x0f)
4751 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4752 else
4753 table = get_frame_register_unsigned (frame, tbl_reg);
4755 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4756 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4757 nextpc = pc_val + length;
4760 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4762 if (bits (inst1, 3, 6) == 0x0f)
4763 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4764 else
4765 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4767 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4769 if (bits (inst1, 3, 6) == 0x0f)
4770 nextpc = pc_val;
4771 else
4772 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4774 nextpc = MAKE_THUMB_ADDR (nextpc);
4776 else if ((inst1 & 0xf500) == 0xb100)
4778 /* CBNZ or CBZ. */
4779 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4780 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4782 if (bit (inst1, 11) && reg != 0)
4783 nextpc = pc_val + imm;
4784 else if (!bit (inst1, 11) && reg == 0)
4785 nextpc = pc_val + imm;
4787 return nextpc;
4790 /* Get the raw next address. PC is the current program counter, in
4791 FRAME, which is assumed to be executing in ARM mode.
4793 The value returned has the execution state of the next instruction
4794 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4795 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4796 address. */
4798 static CORE_ADDR
4799 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4801 struct gdbarch *gdbarch = get_frame_arch (frame);
4802 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4803 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4804 unsigned long pc_val;
4805 unsigned long this_instr;
4806 unsigned long status;
4807 CORE_ADDR nextpc;
4809 pc_val = (unsigned long) pc;
4810 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4812 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4813 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4815 if (bits (this_instr, 28, 31) == INST_NV)
4816 switch (bits (this_instr, 24, 27))
4818 case 0xa:
4819 case 0xb:
4821 /* Branch with Link and change to Thumb. */
4822 nextpc = BranchDest (pc, this_instr);
4823 nextpc |= bit (this_instr, 24) << 1;
4824 nextpc = MAKE_THUMB_ADDR (nextpc);
4825 break;
4827 case 0xc:
4828 case 0xd:
4829 case 0xe:
4830 /* Coprocessor register transfer. */
4831 if (bits (this_instr, 12, 15) == 15)
4832 error (_("Invalid update to pc in instruction"));
4833 break;
4835 else if (condition_true (bits (this_instr, 28, 31), status))
4837 switch (bits (this_instr, 24, 27))
4839 case 0x0:
4840 case 0x1: /* data processing */
4841 case 0x2:
4842 case 0x3:
4844 unsigned long operand1, operand2, result = 0;
4845 unsigned long rn;
4846 int c;
4848 if (bits (this_instr, 12, 15) != 15)
4849 break;
4851 if (bits (this_instr, 22, 25) == 0
4852 && bits (this_instr, 4, 7) == 9) /* multiply */
4853 error (_("Invalid update to pc in instruction"));
4855 /* BX <reg>, BLX <reg> */
4856 if (bits (this_instr, 4, 27) == 0x12fff1
4857 || bits (this_instr, 4, 27) == 0x12fff3)
4859 rn = bits (this_instr, 0, 3);
4860 nextpc = ((rn == ARM_PC_REGNUM)
4861 ? (pc_val + 8)
4862 : get_frame_register_unsigned (frame, rn));
4864 return nextpc;
4867 /* Multiply into PC. */
4868 c = (status & FLAG_C) ? 1 : 0;
4869 rn = bits (this_instr, 16, 19);
4870 operand1 = ((rn == ARM_PC_REGNUM)
4871 ? (pc_val + 8)
4872 : get_frame_register_unsigned (frame, rn));
4874 if (bit (this_instr, 25))
4876 unsigned long immval = bits (this_instr, 0, 7);
4877 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4878 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4879 & 0xffffffff;
4881 else /* operand 2 is a shifted register. */
4882 operand2 = shifted_reg_val (frame, this_instr, c,
4883 pc_val, status);
4885 switch (bits (this_instr, 21, 24))
4887 case 0x0: /*and */
4888 result = operand1 & operand2;
4889 break;
4891 case 0x1: /*eor */
4892 result = operand1 ^ operand2;
4893 break;
4895 case 0x2: /*sub */
4896 result = operand1 - operand2;
4897 break;
4899 case 0x3: /*rsb */
4900 result = operand2 - operand1;
4901 break;
4903 case 0x4: /*add */
4904 result = operand1 + operand2;
4905 break;
4907 case 0x5: /*adc */
4908 result = operand1 + operand2 + c;
4909 break;
4911 case 0x6: /*sbc */
4912 result = operand1 - operand2 + c;
4913 break;
4915 case 0x7: /*rsc */
4916 result = operand2 - operand1 + c;
4917 break;
4919 case 0x8:
4920 case 0x9:
4921 case 0xa:
4922 case 0xb: /* tst, teq, cmp, cmn */
4923 result = (unsigned long) nextpc;
4924 break;
4926 case 0xc: /*orr */
4927 result = operand1 | operand2;
4928 break;
4930 case 0xd: /*mov */
4931 /* Always step into a function. */
4932 result = operand2;
4933 break;
4935 case 0xe: /*bic */
4936 result = operand1 & ~operand2;
4937 break;
4939 case 0xf: /*mvn */
4940 result = ~operand2;
4941 break;
4944 /* In 26-bit APCS the bottom two bits of the result are
4945 ignored, and we always end up in ARM state. */
4946 if (!arm_apcs_32)
4947 nextpc = arm_addr_bits_remove (gdbarch, result);
4948 else
4949 nextpc = result;
4951 break;
4954 case 0x4:
4955 case 0x5: /* data transfer */
4956 case 0x6:
4957 case 0x7:
4958 if (bit (this_instr, 20))
4960 /* load */
4961 if (bits (this_instr, 12, 15) == 15)
4963 /* rd == pc */
4964 unsigned long rn;
4965 unsigned long base;
4967 if (bit (this_instr, 22))
4968 error (_("Invalid update to pc in instruction"));
4970 /* byte write to PC */
4971 rn = bits (this_instr, 16, 19);
4972 base = ((rn == ARM_PC_REGNUM)
4973 ? (pc_val + 8)
4974 : get_frame_register_unsigned (frame, rn));
4976 if (bit (this_instr, 24))
4978 /* pre-indexed */
4979 int c = (status & FLAG_C) ? 1 : 0;
4980 unsigned long offset =
4981 (bit (this_instr, 25)
4982 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4983 : bits (this_instr, 0, 11));
4985 if (bit (this_instr, 23))
4986 base += offset;
4987 else
4988 base -= offset;
4990 nextpc =
4991 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4992 4, byte_order);
4995 break;
4997 case 0x8:
4998 case 0x9: /* block transfer */
4999 if (bit (this_instr, 20))
5001 /* LDM */
5002 if (bit (this_instr, 15))
5004 /* loading pc */
5005 int offset = 0;
5006 unsigned long rn_val
5007 = get_frame_register_unsigned (frame,
5008 bits (this_instr, 16, 19));
5010 if (bit (this_instr, 23))
5012 /* up */
5013 unsigned long reglist = bits (this_instr, 0, 14);
5014 offset = bitcount (reglist) * 4;
5015 if (bit (this_instr, 24)) /* pre */
5016 offset += 4;
5018 else if (bit (this_instr, 24))
5019 offset = -4;
5021 nextpc =
5022 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5023 (rn_val + offset),
5024 4, byte_order);
5027 break;
5029 case 0xb: /* branch & link */
5030 case 0xa: /* branch */
5032 nextpc = BranchDest (pc, this_instr);
5033 break;
5036 case 0xc:
5037 case 0xd:
5038 case 0xe: /* coproc ops */
5039 break;
5040 case 0xf: /* SWI */
5042 struct gdbarch_tdep *tdep;
5043 tdep = gdbarch_tdep (gdbarch);
5045 if (tdep->syscall_next_pc != NULL)
5046 nextpc = tdep->syscall_next_pc (frame);
5049 break;
5051 default:
5052 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5053 return (pc);
5057 return nextpc;
5060 /* Determine next PC after current instruction executes. Will call either
5061 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5062 loop is detected. */
5064 CORE_ADDR
5065 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5067 CORE_ADDR nextpc;
5069 if (arm_frame_is_thumb (frame))
5070 nextpc = thumb_get_next_pc_raw (frame, pc);
5071 else
5072 nextpc = arm_get_next_pc_raw (frame, pc);
5074 return nextpc;
5077 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5078 of the appropriate mode (as encoded in the PC value), even if this
5079 differs from what would be expected according to the symbol tables. */
5081 void
5082 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5083 struct address_space *aspace,
5084 CORE_ADDR pc)
5086 struct cleanup *old_chain
5087 = make_cleanup_restore_integer (&arm_override_mode);
5089 arm_override_mode = IS_THUMB_ADDR (pc);
5090 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5092 insert_single_step_breakpoint (gdbarch, aspace, pc);
5094 do_cleanups (old_chain);
5097 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5098 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5099 is found, attempt to step through it. A breakpoint is placed at the end of
5100 the sequence. */
5102 static int
5103 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5105 struct gdbarch *gdbarch = get_frame_arch (frame);
5106 struct address_space *aspace = get_frame_address_space (frame);
5107 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5108 CORE_ADDR pc = get_frame_pc (frame);
5109 CORE_ADDR breaks[2] = {-1, -1};
5110 CORE_ADDR loc = pc;
5111 unsigned short insn1, insn2;
5112 int insn_count;
5113 int index;
5114 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5115 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5116 ULONGEST status, itstate;
5118 /* We currently do not support atomic sequences within an IT block. */
5119 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5120 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5121 if (itstate & 0x0f)
5122 return 0;
5124 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5125 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5126 loc += 2;
5127 if (thumb_insn_size (insn1) != 4)
5128 return 0;
5130 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5131 loc += 2;
5132 if (!((insn1 & 0xfff0) == 0xe850
5133 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5134 return 0;
5136 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5137 instructions. */
5138 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5140 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5141 loc += 2;
5143 if (thumb_insn_size (insn1) != 4)
5145 /* Assume that there is at most one conditional branch in the
5146 atomic sequence. If a conditional branch is found, put a
5147 breakpoint in its destination address. */
5148 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5150 if (last_breakpoint > 0)
5151 return 0; /* More than one conditional branch found,
5152 fallback to the standard code. */
5154 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5155 last_breakpoint++;
5158 /* We do not support atomic sequences that use any *other*
5159 instructions but conditional branches to change the PC.
5160 Fall back to standard code to avoid losing control of
5161 execution. */
5162 else if (thumb_instruction_changes_pc (insn1))
5163 return 0;
5165 else
5167 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5168 loc += 2;
5170 /* Assume that there is at most one conditional branch in the
5171 atomic sequence. If a conditional branch is found, put a
5172 breakpoint in its destination address. */
5173 if ((insn1 & 0xf800) == 0xf000
5174 && (insn2 & 0xd000) == 0x8000
5175 && (insn1 & 0x0380) != 0x0380)
5177 int sign, j1, j2, imm1, imm2;
5178 unsigned int offset;
5180 sign = sbits (insn1, 10, 10);
5181 imm1 = bits (insn1, 0, 5);
5182 imm2 = bits (insn2, 0, 10);
5183 j1 = bit (insn2, 13);
5184 j2 = bit (insn2, 11);
5186 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5187 offset += (imm1 << 12) + (imm2 << 1);
5189 if (last_breakpoint > 0)
5190 return 0; /* More than one conditional branch found,
5191 fallback to the standard code. */
5193 breaks[1] = loc + offset;
5194 last_breakpoint++;
5197 /* We do not support atomic sequences that use any *other*
5198 instructions but conditional branches to change the PC.
5199 Fall back to standard code to avoid losing control of
5200 execution. */
5201 else if (thumb2_instruction_changes_pc (insn1, insn2))
5202 return 0;
5204 /* If we find a strex{,b,h,d}, we're done. */
5205 if ((insn1 & 0xfff0) == 0xe840
5206 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5207 break;
5211 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5212 if (insn_count == atomic_sequence_length)
5213 return 0;
5215 /* Insert a breakpoint right after the end of the atomic sequence. */
5216 breaks[0] = loc;
5218 /* Check for duplicated breakpoints. Check also for a breakpoint
5219 placed (branch instruction's destination) anywhere in sequence. */
5220 if (last_breakpoint
5221 && (breaks[1] == breaks[0]
5222 || (breaks[1] >= pc && breaks[1] < loc)))
5223 last_breakpoint = 0;
5225 /* Effectively inserts the breakpoints. */
5226 for (index = 0; index <= last_breakpoint; index++)
5227 arm_insert_single_step_breakpoint (gdbarch, aspace,
5228 MAKE_THUMB_ADDR (breaks[index]));
5230 return 1;
5233 static int
5234 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5236 struct gdbarch *gdbarch = get_frame_arch (frame);
5237 struct address_space *aspace = get_frame_address_space (frame);
5238 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5239 CORE_ADDR pc = get_frame_pc (frame);
5240 CORE_ADDR breaks[2] = {-1, -1};
5241 CORE_ADDR loc = pc;
5242 unsigned int insn;
5243 int insn_count;
5244 int index;
5245 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5246 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5248 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5249 Note that we do not currently support conditionally executed atomic
5250 instructions. */
5251 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5252 loc += 4;
5253 if ((insn & 0xff9000f0) != 0xe1900090)
5254 return 0;
5256 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5257 instructions. */
5258 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5260 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5261 loc += 4;
5263 /* Assume that there is at most one conditional branch in the atomic
5264 sequence. If a conditional branch is found, put a breakpoint in
5265 its destination address. */
5266 if (bits (insn, 24, 27) == 0xa)
5268 if (last_breakpoint > 0)
5269 return 0; /* More than one conditional branch found, fallback
5270 to the standard single-step code. */
5272 breaks[1] = BranchDest (loc - 4, insn);
5273 last_breakpoint++;
5276 /* We do not support atomic sequences that use any *other* instructions
5277 but conditional branches to change the PC. Fall back to standard
5278 code to avoid losing control of execution. */
5279 else if (arm_instruction_changes_pc (insn))
5280 return 0;
5282 /* If we find a strex{,b,h,d}, we're done. */
5283 if ((insn & 0xff9000f0) == 0xe1800090)
5284 break;
5287 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5288 if (insn_count == atomic_sequence_length)
5289 return 0;
5291 /* Insert a breakpoint right after the end of the atomic sequence. */
5292 breaks[0] = loc;
5294 /* Check for duplicated breakpoints. Check also for a breakpoint
5295 placed (branch instruction's destination) anywhere in sequence. */
5296 if (last_breakpoint
5297 && (breaks[1] == breaks[0]
5298 || (breaks[1] >= pc && breaks[1] < loc)))
5299 last_breakpoint = 0;
5301 /* Effectively inserts the breakpoints. */
5302 for (index = 0; index <= last_breakpoint; index++)
5303 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5305 return 1;
5309 arm_deal_with_atomic_sequence (struct frame_info *frame)
5311 if (arm_frame_is_thumb (frame))
5312 return thumb_deal_with_atomic_sequence_raw (frame);
5313 else
5314 return arm_deal_with_atomic_sequence_raw (frame);
5317 /* single_step() is called just before we want to resume the inferior,
5318 if we want to single-step it but there is no hardware or kernel
5319 single-step support. We find the target of the coming instruction
5320 and breakpoint it. */
5323 arm_software_single_step (struct frame_info *frame)
5325 struct gdbarch *gdbarch = get_frame_arch (frame);
5326 struct address_space *aspace = get_frame_address_space (frame);
5327 CORE_ADDR next_pc;
5329 if (arm_deal_with_atomic_sequence (frame))
5330 return 1;
5332 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5333 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5335 return 1;
5338 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5339 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5340 NULL if an error occurs. BUF is freed. */
5342 static gdb_byte *
5343 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5344 int old_len, int new_len)
5346 gdb_byte *new_buf;
5347 int bytes_to_read = new_len - old_len;
5349 new_buf = xmalloc (new_len);
5350 memcpy (new_buf + bytes_to_read, buf, old_len);
5351 xfree (buf);
5352 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5354 xfree (new_buf);
5355 return NULL;
5357 return new_buf;
5360 /* An IT block is at most the 2-byte IT instruction followed by
5361 four 4-byte instructions. The furthest back we must search to
5362 find an IT block that affects the current instruction is thus
5363 2 + 3 * 4 == 14 bytes. */
5364 #define MAX_IT_BLOCK_PREFIX 14
5366 /* Use a quick scan if there are more than this many bytes of
5367 code. */
5368 #define IT_SCAN_THRESHOLD 32
5370 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5371 A breakpoint in an IT block may not be hit, depending on the
5372 condition flags. */
5373 static CORE_ADDR
5374 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5376 gdb_byte *buf;
5377 char map_type;
5378 CORE_ADDR boundary, func_start;
5379 int buf_len;
5380 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5381 int i, any, last_it, last_it_count;
5383 /* If we are using BKPT breakpoints, none of this is necessary. */
5384 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5385 return bpaddr;
5387 /* ARM mode does not have this problem. */
5388 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5389 return bpaddr;
5391 /* We are setting a breakpoint in Thumb code that could potentially
5392 contain an IT block. The first step is to find how much Thumb
5393 code there is; we do not need to read outside of known Thumb
5394 sequences. */
5395 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5396 if (map_type == 0)
5397 /* Thumb-2 code must have mapping symbols to have a chance. */
5398 return bpaddr;
5400 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5402 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5403 && func_start > boundary)
5404 boundary = func_start;
5406 /* Search for a candidate IT instruction. We have to do some fancy
5407 footwork to distinguish a real IT instruction from the second
5408 half of a 32-bit instruction, but there is no need for that if
5409 there's no candidate. */
5410 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5411 if (buf_len == 0)
5412 /* No room for an IT instruction. */
5413 return bpaddr;
5415 buf = xmalloc (buf_len);
5416 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5417 return bpaddr;
5418 any = 0;
5419 for (i = 0; i < buf_len; i += 2)
5421 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5422 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5424 any = 1;
5425 break;
5428 if (any == 0)
5430 xfree (buf);
5431 return bpaddr;
5434 /* OK, the code bytes before this instruction contain at least one
5435 halfword which resembles an IT instruction. We know that it's
5436 Thumb code, but there are still two possibilities. Either the
5437 halfword really is an IT instruction, or it is the second half of
5438 a 32-bit Thumb instruction. The only way we can tell is to
5439 scan forwards from a known instruction boundary. */
5440 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5442 int definite;
5444 /* There's a lot of code before this instruction. Start with an
5445 optimistic search; it's easy to recognize halfwords that can
5446 not be the start of a 32-bit instruction, and use that to
5447 lock on to the instruction boundaries. */
5448 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5449 if (buf == NULL)
5450 return bpaddr;
5451 buf_len = IT_SCAN_THRESHOLD;
5453 definite = 0;
5454 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5456 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5457 if (thumb_insn_size (inst1) == 2)
5459 definite = 1;
5460 break;
5464 /* At this point, if DEFINITE, BUF[I] is the first place we
5465 are sure that we know the instruction boundaries, and it is far
5466 enough from BPADDR that we could not miss an IT instruction
5467 affecting BPADDR. If ! DEFINITE, give up - start from a
5468 known boundary. */
5469 if (! definite)
5471 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5472 bpaddr - boundary);
5473 if (buf == NULL)
5474 return bpaddr;
5475 buf_len = bpaddr - boundary;
5476 i = 0;
5479 else
5481 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5482 if (buf == NULL)
5483 return bpaddr;
5484 buf_len = bpaddr - boundary;
5485 i = 0;
5488 /* Scan forwards. Find the last IT instruction before BPADDR. */
5489 last_it = -1;
5490 last_it_count = 0;
5491 while (i < buf_len)
5493 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5494 last_it_count--;
5495 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5497 last_it = i;
5498 if (inst1 & 0x0001)
5499 last_it_count = 4;
5500 else if (inst1 & 0x0002)
5501 last_it_count = 3;
5502 else if (inst1 & 0x0004)
5503 last_it_count = 2;
5504 else
5505 last_it_count = 1;
5507 i += thumb_insn_size (inst1);
5510 xfree (buf);
5512 if (last_it == -1)
5513 /* There wasn't really an IT instruction after all. */
5514 return bpaddr;
5516 if (last_it_count < 1)
5517 /* It was too far away. */
5518 return bpaddr;
5520 /* This really is a trouble spot. Move the breakpoint to the IT
5521 instruction. */
5522 return bpaddr - buf_len + last_it;
5525 /* ARM displaced stepping support.
5527 Generally ARM displaced stepping works as follows:
5529 1. When an instruction is to be single-stepped, it is first decoded by
5530 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5531 Depending on the type of instruction, it is then copied to a scratch
5532 location, possibly in a modified form. The copy_* set of functions
5533 performs such modification, as necessary. A breakpoint is placed after
5534 the modified instruction in the scratch space to return control to GDB.
5535 Note in particular that instructions which modify the PC will no longer
5536 do so after modification.
5538 2. The instruction is single-stepped, by setting the PC to the scratch
5539 location address, and resuming. Control returns to GDB when the
5540 breakpoint is hit.
5542 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5543 function used for the current instruction. This function's job is to
5544 put the CPU/memory state back to what it would have been if the
5545 instruction had been executed unmodified in its original location. */
5547 /* NOP instruction (mov r0, r0). */
5548 #define ARM_NOP 0xe1a00000
5549 #define THUMB_NOP 0x4600
5551 /* Helper for register reads for displaced stepping. In particular, this
5552 returns the PC as it would be seen by the instruction at its original
5553 location. */
5555 ULONGEST
5556 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5557 int regno)
5559 ULONGEST ret;
5560 CORE_ADDR from = dsc->insn_addr;
5562 if (regno == ARM_PC_REGNUM)
5564 /* Compute pipeline offset:
5565 - When executing an ARM instruction, PC reads as the address of the
5566 current instruction plus 8.
5567 - When executing a Thumb instruction, PC reads as the address of the
5568 current instruction plus 4. */
5570 if (!dsc->is_thumb)
5571 from += 8;
5572 else
5573 from += 4;
5575 if (debug_displaced)
5576 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5577 (unsigned long) from);
5578 return (ULONGEST) from;
5580 else
5582 regcache_cooked_read_unsigned (regs, regno, &ret);
5583 if (debug_displaced)
5584 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5585 regno, (unsigned long) ret);
5586 return ret;
5590 static int
5591 displaced_in_arm_mode (struct regcache *regs)
5593 ULONGEST ps;
5594 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5596 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5598 return (ps & t_bit) == 0;
5601 /* Write to the PC as from a branch instruction. */
5603 static void
5604 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5605 ULONGEST val)
5607 if (!dsc->is_thumb)
5608 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5609 architecture versions < 6. */
5610 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5611 val & ~(ULONGEST) 0x3);
5612 else
5613 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5614 val & ~(ULONGEST) 0x1);
5617 /* Write to the PC as from a branch-exchange instruction. */
5619 static void
5620 bx_write_pc (struct regcache *regs, ULONGEST val)
5622 ULONGEST ps;
5623 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5625 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5627 if ((val & 1) == 1)
5629 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5630 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5632 else if ((val & 2) == 0)
5634 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5635 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5637 else
5639 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5640 mode, align dest to 4 bytes). */
5641 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5642 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5643 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5647 /* Write to the PC as if from a load instruction. */
5649 static void
5650 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5651 ULONGEST val)
5653 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5654 bx_write_pc (regs, val);
5655 else
5656 branch_write_pc (regs, dsc, val);
5659 /* Write to the PC as if from an ALU instruction. */
5661 static void
5662 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5663 ULONGEST val)
5665 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5666 bx_write_pc (regs, val);
5667 else
5668 branch_write_pc (regs, dsc, val);
5671 /* Helper for writing to registers for displaced stepping. Writing to the PC
5672 has a varying effects depending on the instruction which does the write:
5673 this is controlled by the WRITE_PC argument. */
5675 void
5676 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5677 int regno, ULONGEST val, enum pc_write_style write_pc)
5679 if (regno == ARM_PC_REGNUM)
5681 if (debug_displaced)
5682 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5683 (unsigned long) val);
5684 switch (write_pc)
5686 case BRANCH_WRITE_PC:
5687 branch_write_pc (regs, dsc, val);
5688 break;
5690 case BX_WRITE_PC:
5691 bx_write_pc (regs, val);
5692 break;
5694 case LOAD_WRITE_PC:
5695 load_write_pc (regs, dsc, val);
5696 break;
5698 case ALU_WRITE_PC:
5699 alu_write_pc (regs, dsc, val);
5700 break;
5702 case CANNOT_WRITE_PC:
5703 warning (_("Instruction wrote to PC in an unexpected way when "
5704 "single-stepping"));
5705 break;
5707 default:
5708 internal_error (__FILE__, __LINE__,
5709 _("Invalid argument to displaced_write_reg"));
5712 dsc->wrote_to_pc = 1;
5714 else
5716 if (debug_displaced)
5717 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5718 regno, (unsigned long) val);
5719 regcache_cooked_write_unsigned (regs, regno, val);
5723 /* This function is used to concisely determine if an instruction INSN
5724 references PC. Register fields of interest in INSN should have the
5725 corresponding fields of BITMASK set to 0b1111. The function
5726 returns return 1 if any of these fields in INSN reference the PC
5727 (also 0b1111, r15), else it returns 0. */
5729 static int
5730 insn_references_pc (uint32_t insn, uint32_t bitmask)
5732 uint32_t lowbit = 1;
5734 while (bitmask != 0)
5736 uint32_t mask;
5738 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5741 if (!lowbit)
5742 break;
5744 mask = lowbit * 0xf;
5746 if ((insn & mask) == mask)
5747 return 1;
5749 bitmask &= ~mask;
5752 return 0;
5755 /* The simplest copy function. Many instructions have the same effect no
5756 matter what address they are executed at: in those cases, use this. */
5758 static int
5759 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5760 const char *iname, struct displaced_step_closure *dsc)
5762 if (debug_displaced)
5763 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5764 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5765 iname);
5767 dsc->modinsn[0] = insn;
5769 return 0;
5772 static int
5773 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5774 uint16_t insn2, const char *iname,
5775 struct displaced_step_closure *dsc)
5777 if (debug_displaced)
5778 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5779 "opcode/class '%s' unmodified\n", insn1, insn2,
5780 iname);
5782 dsc->modinsn[0] = insn1;
5783 dsc->modinsn[1] = insn2;
5784 dsc->numinsns = 2;
5786 return 0;
5789 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5790 modification. */
5791 static int
5792 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5793 const char *iname,
5794 struct displaced_step_closure *dsc)
5796 if (debug_displaced)
5797 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5798 "opcode/class '%s' unmodified\n", insn,
5799 iname);
5801 dsc->modinsn[0] = insn;
5803 return 0;
5806 /* Preload instructions with immediate offset. */
5808 static void
5809 cleanup_preload (struct gdbarch *gdbarch,
5810 struct regcache *regs, struct displaced_step_closure *dsc)
5812 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5813 if (!dsc->u.preload.immed)
5814 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5817 static void
5818 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5819 struct displaced_step_closure *dsc, unsigned int rn)
5821 ULONGEST rn_val;
5822 /* Preload instructions:
5824 {pli/pld} [rn, #+/-imm]
5826 {pli/pld} [r0, #+/-imm]. */
5828 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5829 rn_val = displaced_read_reg (regs, dsc, rn);
5830 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5831 dsc->u.preload.immed = 1;
5833 dsc->cleanup = &cleanup_preload;
5836 static int
5837 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5838 struct displaced_step_closure *dsc)
5840 unsigned int rn = bits (insn, 16, 19);
5842 if (!insn_references_pc (insn, 0x000f0000ul))
5843 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5845 if (debug_displaced)
5846 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5847 (unsigned long) insn);
5849 dsc->modinsn[0] = insn & 0xfff0ffff;
5851 install_preload (gdbarch, regs, dsc, rn);
5853 return 0;
5856 static int
5857 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5858 struct regcache *regs, struct displaced_step_closure *dsc)
5860 unsigned int rn = bits (insn1, 0, 3);
5861 unsigned int u_bit = bit (insn1, 7);
5862 int imm12 = bits (insn2, 0, 11);
5863 ULONGEST pc_val;
5865 if (rn != ARM_PC_REGNUM)
5866 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5868 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5869 PLD (literal) Encoding T1. */
5870 if (debug_displaced)
5871 fprintf_unfiltered (gdb_stdlog,
5872 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5873 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5874 imm12);
5876 if (!u_bit)
5877 imm12 = -1 * imm12;
5879 /* Rewrite instruction {pli/pld} PC imm12 into:
5880 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5882 {pli/pld} [r0, r1]
5884 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5886 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5887 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5889 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5891 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5892 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5893 dsc->u.preload.immed = 0;
5895 /* {pli/pld} [r0, r1] */
5896 dsc->modinsn[0] = insn1 & 0xfff0;
5897 dsc->modinsn[1] = 0xf001;
5898 dsc->numinsns = 2;
5900 dsc->cleanup = &cleanup_preload;
5901 return 0;
5904 /* Preload instructions with register offset. */
5906 static void
5907 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5908 struct displaced_step_closure *dsc, unsigned int rn,
5909 unsigned int rm)
5911 ULONGEST rn_val, rm_val;
5913 /* Preload register-offset instructions:
5915 {pli/pld} [rn, rm {, shift}]
5917 {pli/pld} [r0, r1 {, shift}]. */
5919 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5920 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5921 rn_val = displaced_read_reg (regs, dsc, rn);
5922 rm_val = displaced_read_reg (regs, dsc, rm);
5923 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5924 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5925 dsc->u.preload.immed = 0;
5927 dsc->cleanup = &cleanup_preload;
5930 static int
5931 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5932 struct regcache *regs,
5933 struct displaced_step_closure *dsc)
5935 unsigned int rn = bits (insn, 16, 19);
5936 unsigned int rm = bits (insn, 0, 3);
5939 if (!insn_references_pc (insn, 0x000f000ful))
5940 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5942 if (debug_displaced)
5943 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5944 (unsigned long) insn);
5946 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5948 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5949 return 0;
5952 /* Copy/cleanup coprocessor load and store instructions. */
5954 static void
5955 cleanup_copro_load_store (struct gdbarch *gdbarch,
5956 struct regcache *regs,
5957 struct displaced_step_closure *dsc)
5959 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5961 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5963 if (dsc->u.ldst.writeback)
5964 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5967 static void
5968 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5969 struct displaced_step_closure *dsc,
5970 int writeback, unsigned int rn)
5972 ULONGEST rn_val;
5974 /* Coprocessor load/store instructions:
5976 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5978 {stc/stc2} [r0, #+/-imm].
5980 ldc/ldc2 are handled identically. */
5982 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5983 rn_val = displaced_read_reg (regs, dsc, rn);
5984 /* PC should be 4-byte aligned. */
5985 rn_val = rn_val & 0xfffffffc;
5986 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5988 dsc->u.ldst.writeback = writeback;
5989 dsc->u.ldst.rn = rn;
5991 dsc->cleanup = &cleanup_copro_load_store;
5994 static int
5995 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5996 struct regcache *regs,
5997 struct displaced_step_closure *dsc)
5999 unsigned int rn = bits (insn, 16, 19);
6001 if (!insn_references_pc (insn, 0x000f0000ul))
6002 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6004 if (debug_displaced)
6005 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6006 "load/store insn %.8lx\n", (unsigned long) insn);
6008 dsc->modinsn[0] = insn & 0xfff0ffff;
6010 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6012 return 0;
6015 static int
6016 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6017 uint16_t insn2, struct regcache *regs,
6018 struct displaced_step_closure *dsc)
6020 unsigned int rn = bits (insn1, 0, 3);
6022 if (rn != ARM_PC_REGNUM)
6023 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6024 "copro load/store", dsc);
6026 if (debug_displaced)
6027 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6028 "load/store insn %.4x%.4x\n", insn1, insn2);
6030 dsc->modinsn[0] = insn1 & 0xfff0;
6031 dsc->modinsn[1] = insn2;
6032 dsc->numinsns = 2;
6034 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6035 doesn't support writeback, so pass 0. */
6036 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6038 return 0;
6041 /* Clean up branch instructions (actually perform the branch, by setting
6042 PC). */
6044 static void
6045 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6046 struct displaced_step_closure *dsc)
6048 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6049 int branch_taken = condition_true (dsc->u.branch.cond, status);
6050 enum pc_write_style write_pc = dsc->u.branch.exchange
6051 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6053 if (!branch_taken)
6054 return;
6056 if (dsc->u.branch.link)
6058 /* The value of LR should be the next insn of current one. In order
6059 not to confuse logic hanlding later insn `bx lr', if current insn mode
6060 is Thumb, the bit 0 of LR value should be set to 1. */
6061 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6063 if (dsc->is_thumb)
6064 next_insn_addr |= 0x1;
6066 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6067 CANNOT_WRITE_PC);
6070 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6073 /* Copy B/BL/BLX instructions with immediate destinations. */
6075 static void
6076 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6077 struct displaced_step_closure *dsc,
6078 unsigned int cond, int exchange, int link, long offset)
6080 /* Implement "BL<cond> <label>" as:
6082 Preparation: cond <- instruction condition
6083 Insn: mov r0, r0 (nop)
6084 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6086 B<cond> similar, but don't set r14 in cleanup. */
6088 dsc->u.branch.cond = cond;
6089 dsc->u.branch.link = link;
6090 dsc->u.branch.exchange = exchange;
6092 dsc->u.branch.dest = dsc->insn_addr;
6093 if (link && exchange)
6094 /* For BLX, offset is computed from the Align (PC, 4). */
6095 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6097 if (dsc->is_thumb)
6098 dsc->u.branch.dest += 4 + offset;
6099 else
6100 dsc->u.branch.dest += 8 + offset;
6102 dsc->cleanup = &cleanup_branch;
6104 static int
6105 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6106 struct regcache *regs, struct displaced_step_closure *dsc)
6108 unsigned int cond = bits (insn, 28, 31);
6109 int exchange = (cond == 0xf);
6110 int link = exchange || bit (insn, 24);
6111 long offset;
6113 if (debug_displaced)
6114 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6115 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6116 (unsigned long) insn);
6117 if (exchange)
6118 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6119 then arrange the switch into Thumb mode. */
6120 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6121 else
6122 offset = bits (insn, 0, 23) << 2;
6124 if (bit (offset, 25))
6125 offset = offset | ~0x3ffffff;
6127 dsc->modinsn[0] = ARM_NOP;
6129 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6130 return 0;
6133 static int
6134 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6135 uint16_t insn2, struct regcache *regs,
6136 struct displaced_step_closure *dsc)
6138 int link = bit (insn2, 14);
6139 int exchange = link && !bit (insn2, 12);
6140 int cond = INST_AL;
6141 long offset = 0;
6142 int j1 = bit (insn2, 13);
6143 int j2 = bit (insn2, 11);
6144 int s = sbits (insn1, 10, 10);
6145 int i1 = !(j1 ^ bit (insn1, 10));
6146 int i2 = !(j2 ^ bit (insn1, 10));
6148 if (!link && !exchange) /* B */
6150 offset = (bits (insn2, 0, 10) << 1);
6151 if (bit (insn2, 12)) /* Encoding T4 */
6153 offset |= (bits (insn1, 0, 9) << 12)
6154 | (i2 << 22)
6155 | (i1 << 23)
6156 | (s << 24);
6157 cond = INST_AL;
6159 else /* Encoding T3 */
6161 offset |= (bits (insn1, 0, 5) << 12)
6162 | (j1 << 18)
6163 | (j2 << 19)
6164 | (s << 20);
6165 cond = bits (insn1, 6, 9);
6168 else
6170 offset = (bits (insn1, 0, 9) << 12);
6171 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6172 offset |= exchange ?
6173 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6176 if (debug_displaced)
6177 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6178 "%.4x %.4x with offset %.8lx\n",
6179 link ? (exchange) ? "blx" : "bl" : "b",
6180 insn1, insn2, offset);
6182 dsc->modinsn[0] = THUMB_NOP;
6184 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6185 return 0;
6188 /* Copy B Thumb instructions. */
6189 static int
6190 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6191 struct displaced_step_closure *dsc)
6193 unsigned int cond = 0;
6194 int offset = 0;
6195 unsigned short bit_12_15 = bits (insn, 12, 15);
6196 CORE_ADDR from = dsc->insn_addr;
6198 if (bit_12_15 == 0xd)
6200 /* offset = SignExtend (imm8:0, 32) */
6201 offset = sbits ((insn << 1), 0, 8);
6202 cond = bits (insn, 8, 11);
6204 else if (bit_12_15 == 0xe) /* Encoding T2 */
6206 offset = sbits ((insn << 1), 0, 11);
6207 cond = INST_AL;
6210 if (debug_displaced)
6211 fprintf_unfiltered (gdb_stdlog,
6212 "displaced: copying b immediate insn %.4x "
6213 "with offset %d\n", insn, offset);
6215 dsc->u.branch.cond = cond;
6216 dsc->u.branch.link = 0;
6217 dsc->u.branch.exchange = 0;
6218 dsc->u.branch.dest = from + 4 + offset;
6220 dsc->modinsn[0] = THUMB_NOP;
6222 dsc->cleanup = &cleanup_branch;
6224 return 0;
6227 /* Copy BX/BLX with register-specified destinations. */
6229 static void
6230 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6231 struct displaced_step_closure *dsc, int link,
6232 unsigned int cond, unsigned int rm)
6234 /* Implement {BX,BLX}<cond> <reg>" as:
6236 Preparation: cond <- instruction condition
6237 Insn: mov r0, r0 (nop)
6238 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6240 Don't set r14 in cleanup for BX. */
6242 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6244 dsc->u.branch.cond = cond;
6245 dsc->u.branch.link = link;
6247 dsc->u.branch.exchange = 1;
6249 dsc->cleanup = &cleanup_branch;
6252 static int
6253 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6254 struct regcache *regs, struct displaced_step_closure *dsc)
6256 unsigned int cond = bits (insn, 28, 31);
6257 /* BX: x12xxx1x
6258 BLX: x12xxx3x. */
6259 int link = bit (insn, 5);
6260 unsigned int rm = bits (insn, 0, 3);
6262 if (debug_displaced)
6263 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6264 (unsigned long) insn);
6266 dsc->modinsn[0] = ARM_NOP;
6268 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6269 return 0;
6272 static int
6273 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6274 struct regcache *regs,
6275 struct displaced_step_closure *dsc)
6277 int link = bit (insn, 7);
6278 unsigned int rm = bits (insn, 3, 6);
6280 if (debug_displaced)
6281 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6282 (unsigned short) insn);
6284 dsc->modinsn[0] = THUMB_NOP;
6286 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6288 return 0;
6292 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6294 static void
6295 cleanup_alu_imm (struct gdbarch *gdbarch,
6296 struct regcache *regs, struct displaced_step_closure *dsc)
6298 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6299 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6300 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6301 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6304 static int
6305 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6306 struct displaced_step_closure *dsc)
6308 unsigned int rn = bits (insn, 16, 19);
6309 unsigned int rd = bits (insn, 12, 15);
6310 unsigned int op = bits (insn, 21, 24);
6311 int is_mov = (op == 0xd);
6312 ULONGEST rd_val, rn_val;
6314 if (!insn_references_pc (insn, 0x000ff000ul))
6315 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6317 if (debug_displaced)
6318 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6319 "%.8lx\n", is_mov ? "move" : "ALU",
6320 (unsigned long) insn);
6322 /* Instruction is of form:
6324 <op><cond> rd, [rn,] #imm
6326 Rewrite as:
6328 Preparation: tmp1, tmp2 <- r0, r1;
6329 r0, r1 <- rd, rn
6330 Insn: <op><cond> r0, r1, #imm
6331 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6334 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6335 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6336 rn_val = displaced_read_reg (regs, dsc, rn);
6337 rd_val = displaced_read_reg (regs, dsc, rd);
6338 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6339 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6340 dsc->rd = rd;
6342 if (is_mov)
6343 dsc->modinsn[0] = insn & 0xfff00fff;
6344 else
6345 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6347 dsc->cleanup = &cleanup_alu_imm;
6349 return 0;
6352 static int
6353 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6354 uint16_t insn2, struct regcache *regs,
6355 struct displaced_step_closure *dsc)
6357 unsigned int op = bits (insn1, 5, 8);
6358 unsigned int rn, rm, rd;
6359 ULONGEST rd_val, rn_val;
6361 rn = bits (insn1, 0, 3); /* Rn */
6362 rm = bits (insn2, 0, 3); /* Rm */
6363 rd = bits (insn2, 8, 11); /* Rd */
6365 /* This routine is only called for instruction MOV. */
6366 gdb_assert (op == 0x2 && rn == 0xf);
6368 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6371 if (debug_displaced)
6372 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6373 "ALU", insn1, insn2);
6375 /* Instruction is of form:
6377 <op><cond> rd, [rn,] #imm
6379 Rewrite as:
6381 Preparation: tmp1, tmp2 <- r0, r1;
6382 r0, r1 <- rd, rn
6383 Insn: <op><cond> r0, r1, #imm
6384 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6387 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6388 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6389 rn_val = displaced_read_reg (regs, dsc, rn);
6390 rd_val = displaced_read_reg (regs, dsc, rd);
6391 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6392 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6393 dsc->rd = rd;
6395 dsc->modinsn[0] = insn1;
6396 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6397 dsc->numinsns = 2;
6399 dsc->cleanup = &cleanup_alu_imm;
6401 return 0;
6404 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6406 static void
6407 cleanup_alu_reg (struct gdbarch *gdbarch,
6408 struct regcache *regs, struct displaced_step_closure *dsc)
6410 ULONGEST rd_val;
6411 int i;
6413 rd_val = displaced_read_reg (regs, dsc, 0);
6415 for (i = 0; i < 3; i++)
6416 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6418 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6421 static void
6422 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6423 struct displaced_step_closure *dsc,
6424 unsigned int rd, unsigned int rn, unsigned int rm)
6426 ULONGEST rd_val, rn_val, rm_val;
6428 /* Instruction is of form:
6430 <op><cond> rd, [rn,] rm [, <shift>]
6432 Rewrite as:
6434 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6435 r0, r1, r2 <- rd, rn, rm
6436 Insn: <op><cond> r0, r1, r2 [, <shift>]
6437 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6440 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6441 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6442 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6443 rd_val = displaced_read_reg (regs, dsc, rd);
6444 rn_val = displaced_read_reg (regs, dsc, rn);
6445 rm_val = displaced_read_reg (regs, dsc, rm);
6446 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6447 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6448 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6449 dsc->rd = rd;
6451 dsc->cleanup = &cleanup_alu_reg;
6454 static int
6455 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6456 struct displaced_step_closure *dsc)
6458 unsigned int op = bits (insn, 21, 24);
6459 int is_mov = (op == 0xd);
6461 if (!insn_references_pc (insn, 0x000ff00ful))
6462 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6464 if (debug_displaced)
6465 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6466 is_mov ? "move" : "ALU", (unsigned long) insn);
6468 if (is_mov)
6469 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6470 else
6471 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6473 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6474 bits (insn, 0, 3));
6475 return 0;
6478 static int
6479 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6480 struct regcache *regs,
6481 struct displaced_step_closure *dsc)
6483 unsigned rn, rm, rd;
6485 rd = bits (insn, 3, 6);
6486 rn = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6487 rm = 2;
6489 if (rd != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6490 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6492 if (debug_displaced)
6493 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x\n",
6494 "ALU", (unsigned short) insn);
6496 dsc->modinsn[0] = ((insn & 0xff00) | 0x08);
6498 install_alu_reg (gdbarch, regs, dsc, rd, rn, rm);
6500 return 0;
6503 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6505 static void
6506 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6507 struct regcache *regs,
6508 struct displaced_step_closure *dsc)
6510 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6511 int i;
6513 for (i = 0; i < 4; i++)
6514 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6516 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6519 static void
6520 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6521 struct displaced_step_closure *dsc,
6522 unsigned int rd, unsigned int rn, unsigned int rm,
6523 unsigned rs)
6525 int i;
6526 ULONGEST rd_val, rn_val, rm_val, rs_val;
6528 /* Instruction is of form:
6530 <op><cond> rd, [rn,] rm, <shift> rs
6532 Rewrite as:
6534 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6535 r0, r1, r2, r3 <- rd, rn, rm, rs
6536 Insn: <op><cond> r0, r1, r2, <shift> r3
6537 Cleanup: tmp5 <- r0
6538 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6539 rd <- tmp5
6542 for (i = 0; i < 4; i++)
6543 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6545 rd_val = displaced_read_reg (regs, dsc, rd);
6546 rn_val = displaced_read_reg (regs, dsc, rn);
6547 rm_val = displaced_read_reg (regs, dsc, rm);
6548 rs_val = displaced_read_reg (regs, dsc, rs);
6549 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6550 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6551 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6552 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6553 dsc->rd = rd;
6554 dsc->cleanup = &cleanup_alu_shifted_reg;
6557 static int
6558 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6559 struct regcache *regs,
6560 struct displaced_step_closure *dsc)
6562 unsigned int op = bits (insn, 21, 24);
6563 int is_mov = (op == 0xd);
6564 unsigned int rd, rn, rm, rs;
6566 if (!insn_references_pc (insn, 0x000fff0ful))
6567 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6569 if (debug_displaced)
6570 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6571 "%.8lx\n", is_mov ? "move" : "ALU",
6572 (unsigned long) insn);
6574 rn = bits (insn, 16, 19);
6575 rm = bits (insn, 0, 3);
6576 rs = bits (insn, 8, 11);
6577 rd = bits (insn, 12, 15);
6579 if (is_mov)
6580 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6581 else
6582 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6584 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6586 return 0;
6589 /* Clean up load instructions. */
6591 static void
6592 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6593 struct displaced_step_closure *dsc)
6595 ULONGEST rt_val, rt_val2 = 0, rn_val;
6597 rt_val = displaced_read_reg (regs, dsc, 0);
6598 if (dsc->u.ldst.xfersize == 8)
6599 rt_val2 = displaced_read_reg (regs, dsc, 1);
6600 rn_val = displaced_read_reg (regs, dsc, 2);
6602 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6603 if (dsc->u.ldst.xfersize > 4)
6604 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6605 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6606 if (!dsc->u.ldst.immed)
6607 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6609 /* Handle register writeback. */
6610 if (dsc->u.ldst.writeback)
6611 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6612 /* Put result in right place. */
6613 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6614 if (dsc->u.ldst.xfersize == 8)
6615 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6618 /* Clean up store instructions. */
6620 static void
6621 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6622 struct displaced_step_closure *dsc)
6624 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6626 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6627 if (dsc->u.ldst.xfersize > 4)
6628 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6629 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6630 if (!dsc->u.ldst.immed)
6631 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6632 if (!dsc->u.ldst.restore_r4)
6633 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6635 /* Writeback. */
6636 if (dsc->u.ldst.writeback)
6637 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6640 /* Copy "extra" load/store instructions. These are halfword/doubleword
6641 transfers, which have a different encoding to byte/word transfers. */
6643 static int
6644 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6645 struct regcache *regs, struct displaced_step_closure *dsc)
6647 unsigned int op1 = bits (insn, 20, 24);
6648 unsigned int op2 = bits (insn, 5, 6);
6649 unsigned int rt = bits (insn, 12, 15);
6650 unsigned int rn = bits (insn, 16, 19);
6651 unsigned int rm = bits (insn, 0, 3);
6652 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6653 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6654 int immed = (op1 & 0x4) != 0;
6655 int opcode;
6656 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6658 if (!insn_references_pc (insn, 0x000ff00ful))
6659 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6661 if (debug_displaced)
6662 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6663 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6664 (unsigned long) insn);
6666 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6668 if (opcode < 0)
6669 internal_error (__FILE__, __LINE__,
6670 _("copy_extra_ld_st: instruction decode error"));
6672 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6673 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6674 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6675 if (!immed)
6676 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6678 rt_val = displaced_read_reg (regs, dsc, rt);
6679 if (bytesize[opcode] == 8)
6680 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6681 rn_val = displaced_read_reg (regs, dsc, rn);
6682 if (!immed)
6683 rm_val = displaced_read_reg (regs, dsc, rm);
6685 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6686 if (bytesize[opcode] == 8)
6687 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6688 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6689 if (!immed)
6690 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6692 dsc->rd = rt;
6693 dsc->u.ldst.xfersize = bytesize[opcode];
6694 dsc->u.ldst.rn = rn;
6695 dsc->u.ldst.immed = immed;
6696 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6697 dsc->u.ldst.restore_r4 = 0;
6699 if (immed)
6700 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6702 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6703 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6704 else
6705 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6707 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6708 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6710 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6712 return 0;
6715 /* Copy byte/half word/word loads and stores. */
6717 static void
6718 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6719 struct displaced_step_closure *dsc, int load,
6720 int immed, int writeback, int size, int usermode,
6721 int rt, int rm, int rn)
6723 ULONGEST rt_val, rn_val, rm_val = 0;
6725 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6726 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6727 if (!immed)
6728 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6729 if (!load)
6730 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6732 rt_val = displaced_read_reg (regs, dsc, rt);
6733 rn_val = displaced_read_reg (regs, dsc, rn);
6734 if (!immed)
6735 rm_val = displaced_read_reg (regs, dsc, rm);
6737 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6738 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6739 if (!immed)
6740 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6741 dsc->rd = rt;
6742 dsc->u.ldst.xfersize = size;
6743 dsc->u.ldst.rn = rn;
6744 dsc->u.ldst.immed = immed;
6745 dsc->u.ldst.writeback = writeback;
6747 /* To write PC we can do:
6749 Before this sequence of instructions:
6750 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6751 r2 is the Rn value got from dispalced_read_reg.
6753 Insn1: push {pc} Write address of STR instruction + offset on stack
6754 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6755 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6756 = addr(Insn1) + offset - addr(Insn3) - 8
6757 = offset - 16
6758 Insn4: add r4, r4, #8 r4 = offset - 8
6759 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6760 = from + offset
6761 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6763 Otherwise we don't know what value to write for PC, since the offset is
6764 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6765 of this can be found in Section "Saving from r15" in
6766 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6768 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6772 static int
6773 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6774 uint16_t insn2, struct regcache *regs,
6775 struct displaced_step_closure *dsc, int size)
6777 unsigned int u_bit = bit (insn1, 7);
6778 unsigned int rt = bits (insn2, 12, 15);
6779 int imm12 = bits (insn2, 0, 11);
6780 ULONGEST pc_val;
6782 if (debug_displaced)
6783 fprintf_unfiltered (gdb_stdlog,
6784 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6785 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6786 imm12);
6788 if (!u_bit)
6789 imm12 = -1 * imm12;
6791 /* Rewrite instruction LDR Rt imm12 into:
6793 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6795 LDR R0, R2, R3,
6797 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6801 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6802 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6804 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6806 pc_val = pc_val & 0xfffffffc;
6808 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6809 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6811 dsc->rd = rt;
6813 dsc->u.ldst.xfersize = size;
6814 dsc->u.ldst.immed = 0;
6815 dsc->u.ldst.writeback = 0;
6816 dsc->u.ldst.restore_r4 = 0;
6818 /* LDR R0, R2, R3 */
6819 dsc->modinsn[0] = 0xf852;
6820 dsc->modinsn[1] = 0x3;
6821 dsc->numinsns = 2;
6823 dsc->cleanup = &cleanup_load;
6825 return 0;
6828 static int
6829 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6830 uint16_t insn2, struct regcache *regs,
6831 struct displaced_step_closure *dsc,
6832 int writeback, int immed)
6834 unsigned int rt = bits (insn2, 12, 15);
6835 unsigned int rn = bits (insn1, 0, 3);
6836 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6837 /* In LDR (register), there is also a register Rm, which is not allowed to
6838 be PC, so we don't have to check it. */
6840 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6842 dsc);
6844 if (debug_displaced)
6845 fprintf_unfiltered (gdb_stdlog,
6846 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6847 rt, rn, insn1, insn2);
6849 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6850 0, rt, rm, rn);
6852 dsc->u.ldst.restore_r4 = 0;
6854 if (immed)
6855 /* ldr[b]<cond> rt, [rn, #imm], etc.
6857 ldr[b]<cond> r0, [r2, #imm]. */
6859 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6860 dsc->modinsn[1] = insn2 & 0x0fff;
6862 else
6863 /* ldr[b]<cond> rt, [rn, rm], etc.
6865 ldr[b]<cond> r0, [r2, r3]. */
6867 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6868 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6871 dsc->numinsns = 2;
6873 return 0;
6877 static int
6878 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6879 struct regcache *regs,
6880 struct displaced_step_closure *dsc,
6881 int load, int size, int usermode)
6883 int immed = !bit (insn, 25);
6884 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6885 unsigned int rt = bits (insn, 12, 15);
6886 unsigned int rn = bits (insn, 16, 19);
6887 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6889 if (!insn_references_pc (insn, 0x000ff00ful))
6890 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6892 if (debug_displaced)
6893 fprintf_unfiltered (gdb_stdlog,
6894 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6895 load ? (size == 1 ? "ldrb" : "ldr")
6896 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6897 rt, rn,
6898 (unsigned long) insn);
6900 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6901 usermode, rt, rm, rn);
6903 if (load || rt != ARM_PC_REGNUM)
6905 dsc->u.ldst.restore_r4 = 0;
6907 if (immed)
6908 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6910 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6911 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6912 else
6913 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6915 {ldr,str}[b]<cond> r0, [r2, r3]. */
6916 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6918 else
6920 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6921 dsc->u.ldst.restore_r4 = 1;
6922 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6923 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6924 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6925 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6926 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6928 /* As above. */
6929 if (immed)
6930 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6931 else
6932 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6934 dsc->numinsns = 6;
6937 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6939 return 0;
6942 /* Cleanup LDM instructions with fully-populated register list. This is an
6943 unfortunate corner case: it's impossible to implement correctly by modifying
6944 the instruction. The issue is as follows: we have an instruction,
6946 ldm rN, {r0-r15}
6948 which we must rewrite to avoid loading PC. A possible solution would be to
6949 do the load in two halves, something like (with suitable cleanup
6950 afterwards):
6952 mov r8, rN
6953 ldm[id][ab] r8!, {r0-r7}
6954 str r7, <temp>
6955 ldm[id][ab] r8, {r7-r14}
6956 <bkpt>
6958 but at present there's no suitable place for <temp>, since the scratch space
6959 is overwritten before the cleanup routine is called. For now, we simply
6960 emulate the instruction. */
6962 static void
6963 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6964 struct displaced_step_closure *dsc)
6966 int inc = dsc->u.block.increment;
6967 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6968 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6969 uint32_t regmask = dsc->u.block.regmask;
6970 int regno = inc ? 0 : 15;
6971 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6972 int exception_return = dsc->u.block.load && dsc->u.block.user
6973 && (regmask & 0x8000) != 0;
6974 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6975 int do_transfer = condition_true (dsc->u.block.cond, status);
6976 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6978 if (!do_transfer)
6979 return;
6981 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6982 sensible we can do here. Complain loudly. */
6983 if (exception_return)
6984 error (_("Cannot single-step exception return"));
6986 /* We don't handle any stores here for now. */
6987 gdb_assert (dsc->u.block.load != 0);
6989 if (debug_displaced)
6990 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6991 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6992 dsc->u.block.increment ? "inc" : "dec",
6993 dsc->u.block.before ? "before" : "after");
6995 while (regmask)
6997 uint32_t memword;
6999 if (inc)
7000 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
7001 regno++;
7002 else
7003 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7004 regno--;
7006 xfer_addr += bump_before;
7008 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7009 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7011 xfer_addr += bump_after;
7013 regmask &= ~(1 << regno);
7016 if (dsc->u.block.writeback)
7017 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7018 CANNOT_WRITE_PC);
7021 /* Clean up an STM which included the PC in the register list. */
7023 static void
7024 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7025 struct displaced_step_closure *dsc)
7027 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7028 int store_executed = condition_true (dsc->u.block.cond, status);
7029 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7030 CORE_ADDR stm_insn_addr;
7031 uint32_t pc_val;
7032 long offset;
7033 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7035 /* If condition code fails, there's nothing else to do. */
7036 if (!store_executed)
7037 return;
7039 if (dsc->u.block.increment)
7041 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7043 if (dsc->u.block.before)
7044 pc_stored_at += 4;
7046 else
7048 pc_stored_at = dsc->u.block.xfer_addr;
7050 if (dsc->u.block.before)
7051 pc_stored_at -= 4;
7054 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7055 stm_insn_addr = dsc->scratch_base;
7056 offset = pc_val - stm_insn_addr;
7058 if (debug_displaced)
7059 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7060 "STM instruction\n", offset);
7062 /* Rewrite the stored PC to the proper value for the non-displaced original
7063 instruction. */
7064 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7065 dsc->insn_addr + offset);
7068 /* Clean up an LDM which includes the PC in the register list. We clumped all
7069 the registers in the transferred list into a contiguous range r0...rX (to
7070 avoid loading PC directly and losing control of the debugged program), so we
7071 must undo that here. */
7073 static void
7074 cleanup_block_load_pc (struct gdbarch *gdbarch,
7075 struct regcache *regs,
7076 struct displaced_step_closure *dsc)
7078 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7079 int load_executed = condition_true (dsc->u.block.cond, status);
7080 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7081 unsigned int regs_loaded = bitcount (mask);
7082 unsigned int num_to_shuffle = regs_loaded, clobbered;
7084 /* The method employed here will fail if the register list is fully populated
7085 (we need to avoid loading PC directly). */
7086 gdb_assert (num_to_shuffle < 16);
7088 if (!load_executed)
7089 return;
7091 clobbered = (1 << num_to_shuffle) - 1;
7093 while (num_to_shuffle > 0)
7095 if ((mask & (1 << write_reg)) != 0)
7097 unsigned int read_reg = num_to_shuffle - 1;
7099 if (read_reg != write_reg)
7101 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7102 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7103 if (debug_displaced)
7104 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7105 "loaded register r%d to r%d\n"), read_reg,
7106 write_reg);
7108 else if (debug_displaced)
7109 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7110 "r%d already in the right place\n"),
7111 write_reg);
7113 clobbered &= ~(1 << write_reg);
7115 num_to_shuffle--;
7118 write_reg--;
7121 /* Restore any registers we scribbled over. */
7122 for (write_reg = 0; clobbered != 0; write_reg++)
7124 if ((clobbered & (1 << write_reg)) != 0)
7126 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7127 CANNOT_WRITE_PC);
7128 if (debug_displaced)
7129 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7130 "clobbered register r%d\n"), write_reg);
7131 clobbered &= ~(1 << write_reg);
7135 /* Perform register writeback manually. */
7136 if (dsc->u.block.writeback)
7138 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7140 if (dsc->u.block.increment)
7141 new_rn_val += regs_loaded * 4;
7142 else
7143 new_rn_val -= regs_loaded * 4;
7145 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7146 CANNOT_WRITE_PC);
7150 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7151 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7153 static int
7154 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7155 struct regcache *regs,
7156 struct displaced_step_closure *dsc)
7158 int load = bit (insn, 20);
7159 int user = bit (insn, 22);
7160 int increment = bit (insn, 23);
7161 int before = bit (insn, 24);
7162 int writeback = bit (insn, 21);
7163 int rn = bits (insn, 16, 19);
7165 /* Block transfers which don't mention PC can be run directly
7166 out-of-line. */
7167 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7168 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7170 if (rn == ARM_PC_REGNUM)
7172 warning (_("displaced: Unpredictable LDM or STM with "
7173 "base register r15"));
7174 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7177 if (debug_displaced)
7178 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7179 "%.8lx\n", (unsigned long) insn);
7181 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7182 dsc->u.block.rn = rn;
7184 dsc->u.block.load = load;
7185 dsc->u.block.user = user;
7186 dsc->u.block.increment = increment;
7187 dsc->u.block.before = before;
7188 dsc->u.block.writeback = writeback;
7189 dsc->u.block.cond = bits (insn, 28, 31);
7191 dsc->u.block.regmask = insn & 0xffff;
7193 if (load)
7195 if ((insn & 0xffff) == 0xffff)
7197 /* LDM with a fully-populated register list. This case is
7198 particularly tricky. Implement for now by fully emulating the
7199 instruction (which might not behave perfectly in all cases, but
7200 these instructions should be rare enough for that not to matter
7201 too much). */
7202 dsc->modinsn[0] = ARM_NOP;
7204 dsc->cleanup = &cleanup_block_load_all;
7206 else
7208 /* LDM of a list of registers which includes PC. Implement by
7209 rewriting the list of registers to be transferred into a
7210 contiguous chunk r0...rX before doing the transfer, then shuffling
7211 registers into the correct places in the cleanup routine. */
7212 unsigned int regmask = insn & 0xffff;
7213 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7214 unsigned int to = 0, from = 0, i, new_rn;
7216 for (i = 0; i < num_in_list; i++)
7217 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7219 /* Writeback makes things complicated. We need to avoid clobbering
7220 the base register with one of the registers in our modified
7221 register list, but just using a different register can't work in
7222 all cases, e.g.:
7224 ldm r14!, {r0-r13,pc}
7226 which would need to be rewritten as:
7228 ldm rN!, {r0-r14}
7230 but that can't work, because there's no free register for N.
7232 Solve this by turning off the writeback bit, and emulating
7233 writeback manually in the cleanup routine. */
7235 if (writeback)
7236 insn &= ~(1 << 21);
7238 new_regmask = (1 << num_in_list) - 1;
7240 if (debug_displaced)
7241 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7242 "{..., pc}: original reg list %.4x, modified "
7243 "list %.4x\n"), rn, writeback ? "!" : "",
7244 (int) insn & 0xffff, new_regmask);
7246 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7248 dsc->cleanup = &cleanup_block_load_pc;
7251 else
7253 /* STM of a list of registers which includes PC. Run the instruction
7254 as-is, but out of line: this will store the wrong value for the PC,
7255 so we must manually fix up the memory in the cleanup routine.
7256 Doing things this way has the advantage that we can auto-detect
7257 the offset of the PC write (which is architecture-dependent) in
7258 the cleanup routine. */
7259 dsc->modinsn[0] = insn;
7261 dsc->cleanup = &cleanup_block_store_pc;
7264 return 0;
7267 static int
7268 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7269 struct regcache *regs,
7270 struct displaced_step_closure *dsc)
7272 int rn = bits (insn1, 0, 3);
7273 int load = bit (insn1, 4);
7274 int writeback = bit (insn1, 5);
7276 /* Block transfers which don't mention PC can be run directly
7277 out-of-line. */
7278 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7279 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7281 if (rn == ARM_PC_REGNUM)
7283 warning (_("displaced: Unpredictable LDM or STM with "
7284 "base register r15"));
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7286 "unpredictable ldm/stm", dsc);
7289 if (debug_displaced)
7290 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7291 "%.4x%.4x\n", insn1, insn2);
7293 /* Clear bit 13, since it should be always zero. */
7294 dsc->u.block.regmask = (insn2 & 0xdfff);
7295 dsc->u.block.rn = rn;
7297 dsc->u.block.load = load;
7298 dsc->u.block.user = 0;
7299 dsc->u.block.increment = bit (insn1, 7);
7300 dsc->u.block.before = bit (insn1, 8);
7301 dsc->u.block.writeback = writeback;
7302 dsc->u.block.cond = INST_AL;
7303 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7305 if (load)
7307 if (dsc->u.block.regmask == 0xffff)
7309 /* This branch is impossible to happen. */
7310 gdb_assert (0);
7312 else
7314 unsigned int regmask = dsc->u.block.regmask;
7315 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7316 unsigned int to = 0, from = 0, i, new_rn;
7318 for (i = 0; i < num_in_list; i++)
7319 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7321 if (writeback)
7322 insn1 &= ~(1 << 5);
7324 new_regmask = (1 << num_in_list) - 1;
7326 if (debug_displaced)
7327 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7328 "{..., pc}: original reg list %.4x, modified "
7329 "list %.4x\n"), rn, writeback ? "!" : "",
7330 (int) dsc->u.block.regmask, new_regmask);
7332 dsc->modinsn[0] = insn1;
7333 dsc->modinsn[1] = (new_regmask & 0xffff);
7334 dsc->numinsns = 2;
7336 dsc->cleanup = &cleanup_block_load_pc;
7339 else
7341 dsc->modinsn[0] = insn1;
7342 dsc->modinsn[1] = insn2;
7343 dsc->numinsns = 2;
7344 dsc->cleanup = &cleanup_block_store_pc;
7346 return 0;
7349 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7350 for Linux, where some SVC instructions must be treated specially. */
7352 static void
7353 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7354 struct displaced_step_closure *dsc)
7356 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7358 if (debug_displaced)
7359 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7360 "%.8lx\n", (unsigned long) resume_addr);
7362 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7366 /* Common copy routine for svc instruciton. */
7368 static int
7369 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7370 struct displaced_step_closure *dsc)
7372 /* Preparation: none.
7373 Insn: unmodified svc.
7374 Cleanup: pc <- insn_addr + insn_size. */
7376 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7377 instruction. */
7378 dsc->wrote_to_pc = 1;
7380 /* Allow OS-specific code to override SVC handling. */
7381 if (dsc->u.svc.copy_svc_os)
7382 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7383 else
7385 dsc->cleanup = &cleanup_svc;
7386 return 0;
7390 static int
7391 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7392 struct regcache *regs, struct displaced_step_closure *dsc)
7395 if (debug_displaced)
7396 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7397 (unsigned long) insn);
7399 dsc->modinsn[0] = insn;
7401 return install_svc (gdbarch, regs, dsc);
7404 static int
7405 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7406 struct regcache *regs, struct displaced_step_closure *dsc)
7409 if (debug_displaced)
7410 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7411 insn);
7413 dsc->modinsn[0] = insn;
7415 return install_svc (gdbarch, regs, dsc);
7418 /* Copy undefined instructions. */
7420 static int
7421 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7422 struct displaced_step_closure *dsc)
7424 if (debug_displaced)
7425 fprintf_unfiltered (gdb_stdlog,
7426 "displaced: copying undefined insn %.8lx\n",
7427 (unsigned long) insn);
7429 dsc->modinsn[0] = insn;
7431 return 0;
7434 static int
7435 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7436 struct displaced_step_closure *dsc)
7439 if (debug_displaced)
7440 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7441 "%.4x %.4x\n", (unsigned short) insn1,
7442 (unsigned short) insn2);
7444 dsc->modinsn[0] = insn1;
7445 dsc->modinsn[1] = insn2;
7446 dsc->numinsns = 2;
7448 return 0;
7451 /* Copy unpredictable instructions. */
7453 static int
7454 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7455 struct displaced_step_closure *dsc)
7457 if (debug_displaced)
7458 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7459 "%.8lx\n", (unsigned long) insn);
7461 dsc->modinsn[0] = insn;
7463 return 0;
7466 /* The decode_* functions are instruction decoding helpers. They mostly follow
7467 the presentation in the ARM ARM. */
7469 static int
7470 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7471 struct regcache *regs,
7472 struct displaced_step_closure *dsc)
7474 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7475 unsigned int rn = bits (insn, 16, 19);
7477 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7478 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7479 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7480 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7481 else if ((op1 & 0x60) == 0x20)
7482 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7483 else if ((op1 & 0x71) == 0x40)
7484 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7485 dsc);
7486 else if ((op1 & 0x77) == 0x41)
7487 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7488 else if ((op1 & 0x77) == 0x45)
7489 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7490 else if ((op1 & 0x77) == 0x51)
7492 if (rn != 0xf)
7493 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7494 else
7495 return arm_copy_unpred (gdbarch, insn, dsc);
7497 else if ((op1 & 0x77) == 0x55)
7498 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7499 else if (op1 == 0x57)
7500 switch (op2)
7502 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7503 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7504 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7505 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7506 default: return arm_copy_unpred (gdbarch, insn, dsc);
7508 else if ((op1 & 0x63) == 0x43)
7509 return arm_copy_unpred (gdbarch, insn, dsc);
7510 else if ((op2 & 0x1) == 0x0)
7511 switch (op1 & ~0x80)
7513 case 0x61:
7514 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7515 case 0x65:
7516 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7517 case 0x71: case 0x75:
7518 /* pld/pldw reg. */
7519 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7520 case 0x63: case 0x67: case 0x73: case 0x77:
7521 return arm_copy_unpred (gdbarch, insn, dsc);
7522 default:
7523 return arm_copy_undef (gdbarch, insn, dsc);
7525 else
7526 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7529 static int
7530 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7531 struct regcache *regs,
7532 struct displaced_step_closure *dsc)
7534 if (bit (insn, 27) == 0)
7535 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7536 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7537 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7539 case 0x0: case 0x2:
7540 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7542 case 0x1: case 0x3:
7543 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7545 case 0x4: case 0x5: case 0x6: case 0x7:
7546 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7548 case 0x8:
7549 switch ((insn & 0xe00000) >> 21)
7551 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7552 /* stc/stc2. */
7553 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7555 case 0x2:
7556 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7558 default:
7559 return arm_copy_undef (gdbarch, insn, dsc);
7562 case 0x9:
7564 int rn_f = (bits (insn, 16, 19) == 0xf);
7565 switch ((insn & 0xe00000) >> 21)
7567 case 0x1: case 0x3:
7568 /* ldc/ldc2 imm (undefined for rn == pc). */
7569 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7570 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7572 case 0x2:
7573 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7575 case 0x4: case 0x5: case 0x6: case 0x7:
7576 /* ldc/ldc2 lit (undefined for rn != pc). */
7577 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7578 : arm_copy_undef (gdbarch, insn, dsc);
7580 default:
7581 return arm_copy_undef (gdbarch, insn, dsc);
7585 case 0xa:
7586 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7588 case 0xb:
7589 if (bits (insn, 16, 19) == 0xf)
7590 /* ldc/ldc2 lit. */
7591 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7592 else
7593 return arm_copy_undef (gdbarch, insn, dsc);
7595 case 0xc:
7596 if (bit (insn, 4))
7597 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7598 else
7599 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7601 case 0xd:
7602 if (bit (insn, 4))
7603 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7604 else
7605 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7607 default:
7608 return arm_copy_undef (gdbarch, insn, dsc);
7612 /* Decode miscellaneous instructions in dp/misc encoding space. */
7614 static int
7615 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7616 struct regcache *regs,
7617 struct displaced_step_closure *dsc)
7619 unsigned int op2 = bits (insn, 4, 6);
7620 unsigned int op = bits (insn, 21, 22);
7621 unsigned int op1 = bits (insn, 16, 19);
7623 switch (op2)
7625 case 0x0:
7626 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7628 case 0x1:
7629 if (op == 0x1) /* bx. */
7630 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7631 else if (op == 0x3)
7632 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7633 else
7634 return arm_copy_undef (gdbarch, insn, dsc);
7636 case 0x2:
7637 if (op == 0x1)
7638 /* Not really supported. */
7639 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7640 else
7641 return arm_copy_undef (gdbarch, insn, dsc);
7643 case 0x3:
7644 if (op == 0x1)
7645 return arm_copy_bx_blx_reg (gdbarch, insn,
7646 regs, dsc); /* blx register. */
7647 else
7648 return arm_copy_undef (gdbarch, insn, dsc);
7650 case 0x5:
7651 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7653 case 0x7:
7654 if (op == 0x1)
7655 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7656 else if (op == 0x3)
7657 /* Not really supported. */
7658 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7660 default:
7661 return arm_copy_undef (gdbarch, insn, dsc);
7665 static int
7666 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7667 struct regcache *regs,
7668 struct displaced_step_closure *dsc)
7670 if (bit (insn, 25))
7671 switch (bits (insn, 20, 24))
7673 case 0x10:
7674 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7676 case 0x14:
7677 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7679 case 0x12: case 0x16:
7680 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7682 default:
7683 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7685 else
7687 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7689 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7690 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7691 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7692 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7693 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7694 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7695 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7696 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7697 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7698 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7699 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7700 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7701 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7702 /* 2nd arg means "unpriveleged". */
7703 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7704 dsc);
7707 /* Should be unreachable. */
7708 return 1;
7711 static int
7712 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7713 struct regcache *regs,
7714 struct displaced_step_closure *dsc)
7716 int a = bit (insn, 25), b = bit (insn, 4);
7717 uint32_t op1 = bits (insn, 20, 24);
7718 int rn_f = bits (insn, 16, 19) == 0xf;
7720 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7721 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7722 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7723 else if ((!a && (op1 & 0x17) == 0x02)
7724 || (a && (op1 & 0x17) == 0x02 && !b))
7725 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7726 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7727 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7728 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7729 else if ((!a && (op1 & 0x17) == 0x03)
7730 || (a && (op1 & 0x17) == 0x03 && !b))
7731 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7732 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7733 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7734 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7735 else if ((!a && (op1 & 0x17) == 0x06)
7736 || (a && (op1 & 0x17) == 0x06 && !b))
7737 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7738 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7739 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7740 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7741 else if ((!a && (op1 & 0x17) == 0x07)
7742 || (a && (op1 & 0x17) == 0x07 && !b))
7743 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7745 /* Should be unreachable. */
7746 return 1;
7749 static int
7750 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7751 struct displaced_step_closure *dsc)
7753 switch (bits (insn, 20, 24))
7755 case 0x00: case 0x01: case 0x02: case 0x03:
7756 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7758 case 0x04: case 0x05: case 0x06: case 0x07:
7759 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7761 case 0x08: case 0x09: case 0x0a: case 0x0b:
7762 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7763 return arm_copy_unmodified (gdbarch, insn,
7764 "decode/pack/unpack/saturate/reverse", dsc);
7766 case 0x18:
7767 if (bits (insn, 5, 7) == 0) /* op2. */
7769 if (bits (insn, 12, 15) == 0xf)
7770 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7771 else
7772 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7774 else
7775 return arm_copy_undef (gdbarch, insn, dsc);
7777 case 0x1a: case 0x1b:
7778 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7779 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7780 else
7781 return arm_copy_undef (gdbarch, insn, dsc);
7783 case 0x1c: case 0x1d:
7784 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7786 if (bits (insn, 0, 3) == 0xf)
7787 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7788 else
7789 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7791 else
7792 return arm_copy_undef (gdbarch, insn, dsc);
7794 case 0x1e: case 0x1f:
7795 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7796 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7797 else
7798 return arm_copy_undef (gdbarch, insn, dsc);
7801 /* Should be unreachable. */
7802 return 1;
7805 static int
7806 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7807 struct regcache *regs,
7808 struct displaced_step_closure *dsc)
7810 if (bit (insn, 25))
7811 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7812 else
7813 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7816 static int
7817 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7818 struct regcache *regs,
7819 struct displaced_step_closure *dsc)
7821 unsigned int opcode = bits (insn, 20, 24);
7823 switch (opcode)
7825 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7826 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7828 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7829 case 0x12: case 0x16:
7830 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7832 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7833 case 0x13: case 0x17:
7834 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7836 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7837 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7838 /* Note: no writeback for these instructions. Bit 25 will always be
7839 zero though (via caller), so the following works OK. */
7840 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7843 /* Should be unreachable. */
7844 return 1;
7847 /* Decode shifted register instructions. */
7849 static int
7850 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7851 uint16_t insn2, struct regcache *regs,
7852 struct displaced_step_closure *dsc)
7854 /* PC is only allowed to be used in instruction MOV. */
7856 unsigned int op = bits (insn1, 5, 8);
7857 unsigned int rn = bits (insn1, 0, 3);
7859 if (op == 0x2 && rn == 0xf) /* MOV */
7860 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7861 else
7862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7863 "dp (shift reg)", dsc);
7867 /* Decode extension register load/store. Exactly the same as
7868 arm_decode_ext_reg_ld_st. */
7870 static int
7871 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7872 uint16_t insn2, struct regcache *regs,
7873 struct displaced_step_closure *dsc)
7875 unsigned int opcode = bits (insn1, 4, 8);
7877 switch (opcode)
7879 case 0x04: case 0x05:
7880 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7881 "vfp/neon vmov", dsc);
7883 case 0x08: case 0x0c: /* 01x00 */
7884 case 0x0a: case 0x0e: /* 01x10 */
7885 case 0x12: case 0x16: /* 10x10 */
7886 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7887 "vfp/neon vstm/vpush", dsc);
7889 case 0x09: case 0x0d: /* 01x01 */
7890 case 0x0b: case 0x0f: /* 01x11 */
7891 case 0x13: case 0x17: /* 10x11 */
7892 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7893 "vfp/neon vldm/vpop", dsc);
7895 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7896 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7897 "vstr", dsc);
7898 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7899 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7902 /* Should be unreachable. */
7903 return 1;
7906 static int
7907 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7908 struct regcache *regs, struct displaced_step_closure *dsc)
7910 unsigned int op1 = bits (insn, 20, 25);
7911 int op = bit (insn, 4);
7912 unsigned int coproc = bits (insn, 8, 11);
7913 unsigned int rn = bits (insn, 16, 19);
7915 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7916 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7917 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7918 && (coproc & 0xe) != 0xa)
7919 /* stc/stc2. */
7920 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7921 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7922 && (coproc & 0xe) != 0xa)
7923 /* ldc/ldc2 imm/lit. */
7924 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7925 else if ((op1 & 0x3e) == 0x00)
7926 return arm_copy_undef (gdbarch, insn, dsc);
7927 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7928 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7929 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7930 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7931 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7932 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7933 else if ((op1 & 0x30) == 0x20 && !op)
7935 if ((coproc & 0xe) == 0xa)
7936 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7937 else
7938 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7940 else if ((op1 & 0x30) == 0x20 && op)
7941 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7942 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7943 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7944 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7945 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7946 else if ((op1 & 0x30) == 0x30)
7947 return arm_copy_svc (gdbarch, insn, regs, dsc);
7948 else
7949 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7952 static int
7953 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7954 uint16_t insn2, struct regcache *regs,
7955 struct displaced_step_closure *dsc)
7957 unsigned int coproc = bits (insn2, 8, 11);
7958 unsigned int op1 = bits (insn1, 4, 9);
7959 unsigned int bit_5_8 = bits (insn1, 5, 8);
7960 unsigned int bit_9 = bit (insn1, 9);
7961 unsigned int bit_4 = bit (insn1, 4);
7962 unsigned int rn = bits (insn1, 0, 3);
7964 if (bit_9 == 0)
7966 if (bit_5_8 == 2)
7967 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7968 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7969 dsc);
7970 else if (bit_5_8 == 0) /* UNDEFINED. */
7971 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7972 else
7974 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7975 if ((coproc & 0xe) == 0xa)
7976 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7977 dsc);
7978 else /* coproc is not 101x. */
7980 if (bit_4 == 0) /* STC/STC2. */
7981 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7982 "stc/stc2", dsc);
7983 else /* LDC/LDC2 {literal, immeidate}. */
7984 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7985 regs, dsc);
7989 else
7990 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7992 return 0;
7995 static void
7996 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7997 struct displaced_step_closure *dsc, int rd)
7999 /* ADR Rd, #imm
8001 Rewrite as:
8003 Preparation: Rd <- PC
8004 Insn: ADD Rd, #imm
8005 Cleanup: Null.
8008 /* Rd <- PC */
8009 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8010 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8013 static int
8014 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8015 struct displaced_step_closure *dsc,
8016 int rd, unsigned int imm)
8019 /* Encoding T2: ADDS Rd, #imm */
8020 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8022 install_pc_relative (gdbarch, regs, dsc, rd);
8024 return 0;
8027 static int
8028 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8029 struct regcache *regs,
8030 struct displaced_step_closure *dsc)
8032 unsigned int rd = bits (insn, 8, 10);
8033 unsigned int imm8 = bits (insn, 0, 7);
8035 if (debug_displaced)
8036 fprintf_unfiltered (gdb_stdlog,
8037 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8038 rd, imm8, insn);
8040 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8043 static int
8044 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8045 uint16_t insn2, struct regcache *regs,
8046 struct displaced_step_closure *dsc)
8048 unsigned int rd = bits (insn2, 8, 11);
8049 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8050 extract raw immediate encoding rather than computing immediate. When
8051 generating ADD or SUB instruction, we can simply perform OR operation to
8052 set immediate into ADD. */
8053 unsigned int imm_3_8 = insn2 & 0x70ff;
8054 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8056 if (debug_displaced)
8057 fprintf_unfiltered (gdb_stdlog,
8058 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8059 rd, imm_i, imm_3_8, insn1, insn2);
8061 if (bit (insn1, 7)) /* Encoding T2 */
8063 /* Encoding T3: SUB Rd, Rd, #imm */
8064 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8065 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8067 else /* Encoding T3 */
8069 /* Encoding T3: ADD Rd, Rd, #imm */
8070 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8071 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8073 dsc->numinsns = 2;
8075 install_pc_relative (gdbarch, regs, dsc, rd);
8077 return 0;
8080 static int
8081 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8082 struct regcache *regs,
8083 struct displaced_step_closure *dsc)
8085 unsigned int rt = bits (insn1, 8, 10);
8086 unsigned int pc;
8087 int imm8 = (bits (insn1, 0, 7) << 2);
8088 CORE_ADDR from = dsc->insn_addr;
8090 /* LDR Rd, #imm8
8092 Rwrite as:
8094 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8096 Insn: LDR R0, [R2, R3];
8097 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8099 if (debug_displaced)
8100 fprintf_unfiltered (gdb_stdlog,
8101 "displaced: copying thumb ldr r%d [pc #%d]\n"
8102 , rt, imm8);
8104 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8105 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8106 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8107 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8108 /* The assembler calculates the required value of the offset from the
8109 Align(PC,4) value of this instruction to the label. */
8110 pc = pc & 0xfffffffc;
8112 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8113 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8115 dsc->rd = rt;
8116 dsc->u.ldst.xfersize = 4;
8117 dsc->u.ldst.rn = 0;
8118 dsc->u.ldst.immed = 0;
8119 dsc->u.ldst.writeback = 0;
8120 dsc->u.ldst.restore_r4 = 0;
8122 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8124 dsc->cleanup = &cleanup_load;
8126 return 0;
8129 /* Copy Thumb cbnz/cbz insruction. */
8131 static int
8132 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8133 struct regcache *regs,
8134 struct displaced_step_closure *dsc)
8136 int non_zero = bit (insn1, 11);
8137 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8138 CORE_ADDR from = dsc->insn_addr;
8139 int rn = bits (insn1, 0, 2);
8140 int rn_val = displaced_read_reg (regs, dsc, rn);
8142 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8143 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8144 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8145 condition is false, let it be, cleanup_branch will do nothing. */
8146 if (dsc->u.branch.cond)
8148 dsc->u.branch.cond = INST_AL;
8149 dsc->u.branch.dest = from + 4 + imm5;
8151 else
8152 dsc->u.branch.dest = from + 2;
8154 dsc->u.branch.link = 0;
8155 dsc->u.branch.exchange = 0;
8157 if (debug_displaced)
8158 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8159 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8160 rn, rn_val, insn1, dsc->u.branch.dest);
8162 dsc->modinsn[0] = THUMB_NOP;
8164 dsc->cleanup = &cleanup_branch;
8165 return 0;
8168 /* Copy Table Branch Byte/Halfword */
8169 static int
8170 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8171 uint16_t insn2, struct regcache *regs,
8172 struct displaced_step_closure *dsc)
8174 ULONGEST rn_val, rm_val;
8175 int is_tbh = bit (insn2, 4);
8176 CORE_ADDR halfwords = 0;
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8179 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8180 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8182 if (is_tbh)
8184 gdb_byte buf[2];
8186 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8187 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8189 else
8191 gdb_byte buf[1];
8193 target_read_memory (rn_val + rm_val, buf, 1);
8194 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8197 if (debug_displaced)
8198 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8199 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8200 (unsigned int) rn_val, (unsigned int) rm_val,
8201 (unsigned int) halfwords);
8203 dsc->u.branch.cond = INST_AL;
8204 dsc->u.branch.link = 0;
8205 dsc->u.branch.exchange = 0;
8206 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8208 dsc->cleanup = &cleanup_branch;
8210 return 0;
8213 static void
8214 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8215 struct displaced_step_closure *dsc)
8217 /* PC <- r7 */
8218 int val = displaced_read_reg (regs, dsc, 7);
8219 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8221 /* r7 <- r8 */
8222 val = displaced_read_reg (regs, dsc, 8);
8223 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8225 /* r8 <- tmp[0] */
8226 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8230 static int
8231 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8232 struct regcache *regs,
8233 struct displaced_step_closure *dsc)
8235 dsc->u.block.regmask = insn1 & 0x00ff;
8237 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8238 to :
8240 (1) register list is full, that is, r0-r7 are used.
8241 Prepare: tmp[0] <- r8
8243 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8244 MOV r8, r7; Move value of r7 to r8;
8245 POP {r7}; Store PC value into r7.
8247 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8249 (2) register list is not full, supposing there are N registers in
8250 register list (except PC, 0 <= N <= 7).
8251 Prepare: for each i, 0 - N, tmp[i] <- ri.
8253 POP {r0, r1, ...., rN};
8255 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8256 from tmp[] properly.
8258 if (debug_displaced)
8259 fprintf_unfiltered (gdb_stdlog,
8260 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8261 dsc->u.block.regmask, insn1);
8263 if (dsc->u.block.regmask == 0xff)
8265 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8267 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8268 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8269 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8271 dsc->numinsns = 3;
8272 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8274 else
8276 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8277 unsigned int new_regmask, bit = 1;
8278 unsigned int to = 0, from = 0, i, new_rn;
8280 for (i = 0; i < num_in_list + 1; i++)
8281 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8283 new_regmask = (1 << (num_in_list + 1)) - 1;
8285 if (debug_displaced)
8286 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8287 "{..., pc}: original reg list %.4x,"
8288 " modified list %.4x\n"),
8289 (int) dsc->u.block.regmask, new_regmask);
8291 dsc->u.block.regmask |= 0x8000;
8292 dsc->u.block.writeback = 0;
8293 dsc->u.block.cond = INST_AL;
8295 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8297 dsc->cleanup = &cleanup_block_load_pc;
8300 return 0;
8303 static void
8304 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8305 struct regcache *regs,
8306 struct displaced_step_closure *dsc)
8308 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8309 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8310 int err = 0;
8312 /* 16-bit thumb instructions. */
8313 switch (op_bit_12_15)
8315 /* Shift (imme), add, subtract, move and compare. */
8316 case 0: case 1: case 2: case 3:
8317 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8318 "shift/add/sub/mov/cmp",
8319 dsc);
8320 break;
8321 case 4:
8322 switch (op_bit_10_11)
8324 case 0: /* Data-processing */
8325 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8326 "data-processing",
8327 dsc);
8328 break;
8329 case 1: /* Special data instructions and branch and exchange. */
8331 unsigned short op = bits (insn1, 7, 9);
8332 if (op == 6 || op == 7) /* BX or BLX */
8333 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8334 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8335 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8336 else
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8338 dsc);
8340 break;
8341 default: /* LDR (literal) */
8342 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8344 break;
8345 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8347 break;
8348 case 10:
8349 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8350 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8351 else /* Generate SP-relative address */
8352 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8353 break;
8354 case 11: /* Misc 16-bit instructions */
8356 switch (bits (insn1, 8, 11))
8358 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8359 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8360 break;
8361 case 12: case 13: /* POP */
8362 if (bit (insn1, 8)) /* PC is in register list. */
8363 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8364 else
8365 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8366 break;
8367 case 15: /* If-Then, and hints */
8368 if (bits (insn1, 0, 3))
8369 /* If-Then makes up to four following instructions conditional.
8370 IT instruction itself is not conditional, so handle it as a
8371 common unmodified instruction. */
8372 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8373 dsc);
8374 else
8375 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8376 break;
8377 default:
8378 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8381 break;
8382 case 12:
8383 if (op_bit_10_11 < 2) /* Store multiple registers */
8384 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8385 else /* Load multiple registers */
8386 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8387 break;
8388 case 13: /* Conditional branch and supervisor call */
8389 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8390 err = thumb_copy_b (gdbarch, insn1, dsc);
8391 else
8392 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8393 break;
8394 case 14: /* Unconditional branch */
8395 err = thumb_copy_b (gdbarch, insn1, dsc);
8396 break;
8397 default:
8398 err = 1;
8401 if (err)
8402 internal_error (__FILE__, __LINE__,
8403 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8406 static int
8407 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8408 uint16_t insn1, uint16_t insn2,
8409 struct regcache *regs,
8410 struct displaced_step_closure *dsc)
8412 int rt = bits (insn2, 12, 15);
8413 int rn = bits (insn1, 0, 3);
8414 int op1 = bits (insn1, 7, 8);
8415 int err = 0;
8417 switch (bits (insn1, 5, 6))
8419 case 0: /* Load byte and memory hints */
8420 if (rt == 0xf) /* PLD/PLI */
8422 if (rn == 0xf)
8423 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8424 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8425 else
8426 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8427 "pli/pld", dsc);
8429 else
8431 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8432 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8434 else
8435 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8436 "ldrb{reg, immediate}/ldrbt",
8437 dsc);
8440 break;
8441 case 1: /* Load halfword and memory hints. */
8442 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8443 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8444 "pld/unalloc memhint", dsc);
8445 else
8447 if (rn == 0xf)
8448 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8450 else
8451 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8452 "ldrh/ldrht", dsc);
8454 break;
8455 case 2: /* Load word */
8457 int insn2_bit_8_11 = bits (insn2, 8, 11);
8459 if (rn == 0xf)
8460 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8461 else if (op1 == 0x1) /* Encoding T3 */
8462 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8463 0, 1);
8464 else /* op1 == 0x0 */
8466 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8467 /* LDR (immediate) */
8468 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8469 dsc, bit (insn2, 8), 1);
8470 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8471 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8472 "ldrt", dsc);
8473 else
8474 /* LDR (register) */
8475 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8476 dsc, 0, 0);
8478 break;
8480 default:
8481 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8482 break;
8484 return 0;
8487 static void
8488 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8489 uint16_t insn2, struct regcache *regs,
8490 struct displaced_step_closure *dsc)
8492 int err = 0;
8493 unsigned short op = bit (insn2, 15);
8494 unsigned int op1 = bits (insn1, 11, 12);
8496 switch (op1)
8498 case 1:
8500 switch (bits (insn1, 9, 10))
8502 case 0:
8503 if (bit (insn1, 6))
8505 /* Load/store {dual, execlusive}, table branch. */
8506 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8507 && bits (insn2, 5, 7) == 0)
8508 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8509 dsc);
8510 else
8511 /* PC is not allowed to use in load/store {dual, exclusive}
8512 instructions. */
8513 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8514 "load/store dual/ex", dsc);
8516 else /* load/store multiple */
8518 switch (bits (insn1, 7, 8))
8520 case 0: case 3: /* SRS, RFE */
8521 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8522 "srs/rfe", dsc);
8523 break;
8524 case 1: case 2: /* LDM/STM/PUSH/POP */
8525 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8526 break;
8529 break;
8531 case 1:
8532 /* Data-processing (shift register). */
8533 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8534 dsc);
8535 break;
8536 default: /* Coprocessor instructions. */
8537 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8538 break;
8540 break;
8542 case 2: /* op1 = 2 */
8543 if (op) /* Branch and misc control. */
8545 if (bit (insn2, 14) /* BLX/BL */
8546 || bit (insn2, 12) /* Unconditional branch */
8547 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8548 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8549 else
8550 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8551 "misc ctrl", dsc);
8553 else
8555 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8557 int op = bits (insn1, 4, 8);
8558 int rn = bits (insn1, 0, 3);
8559 if ((op == 0 || op == 0xa) && rn == 0xf)
8560 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8561 regs, dsc);
8562 else
8563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8564 "dp/pb", dsc);
8566 else /* Data processing (modified immeidate) */
8567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8568 "dp/mi", dsc);
8570 break;
8571 case 3: /* op1 = 3 */
8572 switch (bits (insn1, 9, 10))
8574 case 0:
8575 if (bit (insn1, 4))
8576 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8577 regs, dsc);
8578 else /* NEON Load/Store and Store single data item */
8579 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8580 "neon elt/struct load/store",
8581 dsc);
8582 break;
8583 case 1: /* op1 = 3, bits (9, 10) == 1 */
8584 switch (bits (insn1, 7, 8))
8586 case 0: case 1: /* Data processing (register) */
8587 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8588 "dp(reg)", dsc);
8589 break;
8590 case 2: /* Multiply and absolute difference */
8591 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8592 "mul/mua/diff", dsc);
8593 break;
8594 case 3: /* Long multiply and divide */
8595 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8596 "lmul/lmua", dsc);
8597 break;
8599 break;
8600 default: /* Coprocessor instructions */
8601 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8602 break;
8604 break;
8605 default:
8606 err = 1;
8609 if (err)
8610 internal_error (__FILE__, __LINE__,
8611 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8615 static void
8616 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8617 CORE_ADDR to, struct regcache *regs,
8618 struct displaced_step_closure *dsc)
8620 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8621 uint16_t insn1
8622 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8624 if (debug_displaced)
8625 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8626 "at %.8lx\n", insn1, (unsigned long) from);
8628 dsc->is_thumb = 1;
8629 dsc->insn_size = thumb_insn_size (insn1);
8630 if (thumb_insn_size (insn1) == 4)
8632 uint16_t insn2
8633 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8634 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8636 else
8637 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8640 void
8641 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8642 CORE_ADDR to, struct regcache *regs,
8643 struct displaced_step_closure *dsc)
8645 int err = 0;
8646 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8647 uint32_t insn;
8649 /* Most displaced instructions use a 1-instruction scratch space, so set this
8650 here and override below if/when necessary. */
8651 dsc->numinsns = 1;
8652 dsc->insn_addr = from;
8653 dsc->scratch_base = to;
8654 dsc->cleanup = NULL;
8655 dsc->wrote_to_pc = 0;
8657 if (!displaced_in_arm_mode (regs))
8658 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8660 dsc->is_thumb = 0;
8661 dsc->insn_size = 4;
8662 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8663 if (debug_displaced)
8664 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8665 "at %.8lx\n", (unsigned long) insn,
8666 (unsigned long) from);
8668 if ((insn & 0xf0000000) == 0xf0000000)
8669 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8670 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8672 case 0x0: case 0x1: case 0x2: case 0x3:
8673 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8674 break;
8676 case 0x4: case 0x5: case 0x6:
8677 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8678 break;
8680 case 0x7:
8681 err = arm_decode_media (gdbarch, insn, dsc);
8682 break;
8684 case 0x8: case 0x9: case 0xa: case 0xb:
8685 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8686 break;
8688 case 0xc: case 0xd: case 0xe: case 0xf:
8689 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8690 break;
8693 if (err)
8694 internal_error (__FILE__, __LINE__,
8695 _("arm_process_displaced_insn: Instruction decode error"));
8698 /* Actually set up the scratch space for a displaced instruction. */
8700 void
8701 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8702 CORE_ADDR to, struct displaced_step_closure *dsc)
8704 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8705 unsigned int i, len, offset;
8706 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8707 int size = dsc->is_thumb? 2 : 4;
8708 const gdb_byte *bkp_insn;
8710 offset = 0;
8711 /* Poke modified instruction(s). */
8712 for (i = 0; i < dsc->numinsns; i++)
8714 if (debug_displaced)
8716 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8717 if (size == 4)
8718 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8719 dsc->modinsn[i]);
8720 else if (size == 2)
8721 fprintf_unfiltered (gdb_stdlog, "%.4x",
8722 (unsigned short)dsc->modinsn[i]);
8724 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8725 (unsigned long) to + offset);
8728 write_memory_unsigned_integer (to + offset, size,
8729 byte_order_for_code,
8730 dsc->modinsn[i]);
8731 offset += size;
8734 /* Choose the correct breakpoint instruction. */
8735 if (dsc->is_thumb)
8737 bkp_insn = tdep->thumb_breakpoint;
8738 len = tdep->thumb_breakpoint_size;
8740 else
8742 bkp_insn = tdep->arm_breakpoint;
8743 len = tdep->arm_breakpoint_size;
8746 /* Put breakpoint afterwards. */
8747 write_memory (to + offset, bkp_insn, len);
8749 if (debug_displaced)
8750 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8751 paddress (gdbarch, from), paddress (gdbarch, to));
8754 /* Entry point for copying an instruction into scratch space for displaced
8755 stepping. */
8757 struct displaced_step_closure *
8758 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8759 CORE_ADDR from, CORE_ADDR to,
8760 struct regcache *regs)
8762 struct displaced_step_closure *dsc
8763 = xmalloc (sizeof (struct displaced_step_closure));
8764 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8765 arm_displaced_init_closure (gdbarch, from, to, dsc);
8767 return dsc;
8770 /* Entry point for cleaning things up after a displaced instruction has been
8771 single-stepped. */
8773 void
8774 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8775 struct displaced_step_closure *dsc,
8776 CORE_ADDR from, CORE_ADDR to,
8777 struct regcache *regs)
8779 if (dsc->cleanup)
8780 dsc->cleanup (gdbarch, regs, dsc);
8782 if (!dsc->wrote_to_pc)
8783 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8784 dsc->insn_addr + dsc->insn_size);
8788 #include "bfd-in2.h"
8789 #include "libcoff.h"
8791 static int
8792 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8794 struct gdbarch *gdbarch = info->application_data;
8796 if (arm_pc_is_thumb (gdbarch, memaddr))
8798 static asymbol *asym;
8799 static combined_entry_type ce;
8800 static struct coff_symbol_struct csym;
8801 static struct bfd fake_bfd;
8802 static bfd_target fake_target;
8804 if (csym.native == NULL)
8806 /* Create a fake symbol vector containing a Thumb symbol.
8807 This is solely so that the code in print_insn_little_arm()
8808 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8809 the presence of a Thumb symbol and switch to decoding
8810 Thumb instructions. */
8812 fake_target.flavour = bfd_target_coff_flavour;
8813 fake_bfd.xvec = &fake_target;
8814 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8815 csym.native = &ce;
8816 csym.symbol.the_bfd = &fake_bfd;
8817 csym.symbol.name = "fake";
8818 asym = (asymbol *) & csym;
8821 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8822 info->symbols = &asym;
8824 else
8825 info->symbols = NULL;
8827 if (info->endian == BFD_ENDIAN_BIG)
8828 return print_insn_big_arm (memaddr, info);
8829 else
8830 return print_insn_little_arm (memaddr, info);
8833 /* The following define instruction sequences that will cause ARM
8834 cpu's to take an undefined instruction trap. These are used to
8835 signal a breakpoint to GDB.
8837 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8838 modes. A different instruction is required for each mode. The ARM
8839 cpu's can also be big or little endian. Thus four different
8840 instructions are needed to support all cases.
8842 Note: ARMv4 defines several new instructions that will take the
8843 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8844 not in fact add the new instructions. The new undefined
8845 instructions in ARMv4 are all instructions that had no defined
8846 behaviour in earlier chips. There is no guarantee that they will
8847 raise an exception, but may be treated as NOP's. In practice, it
8848 may only safe to rely on instructions matching:
8850 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8851 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8852 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8854 Even this may only true if the condition predicate is true. The
8855 following use a condition predicate of ALWAYS so it is always TRUE.
8857 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8858 and NetBSD all use a software interrupt rather than an undefined
8859 instruction to force a trap. This can be handled by by the
8860 abi-specific code during establishment of the gdbarch vector. */
8862 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8863 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8864 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8865 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8867 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8868 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8869 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8870 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8872 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8873 the program counter value to determine whether a 16-bit or 32-bit
8874 breakpoint should be used. It returns a pointer to a string of
8875 bytes that encode a breakpoint instruction, stores the length of
8876 the string to *lenptr, and adjusts the program counter (if
8877 necessary) to point to the actual memory location where the
8878 breakpoint should be inserted. */
8880 static const unsigned char *
8881 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8883 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8884 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8886 if (arm_pc_is_thumb (gdbarch, *pcptr))
8888 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8890 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8891 check whether we are replacing a 32-bit instruction. */
8892 if (tdep->thumb2_breakpoint != NULL)
8894 gdb_byte buf[2];
8895 if (target_read_memory (*pcptr, buf, 2) == 0)
8897 unsigned short inst1;
8898 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8899 if (thumb_insn_size (inst1) == 4)
8901 *lenptr = tdep->thumb2_breakpoint_size;
8902 return tdep->thumb2_breakpoint;
8907 *lenptr = tdep->thumb_breakpoint_size;
8908 return tdep->thumb_breakpoint;
8910 else
8912 *lenptr = tdep->arm_breakpoint_size;
8913 return tdep->arm_breakpoint;
8917 static void
8918 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8919 int *kindptr)
8921 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8923 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8924 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8925 that this is not confused with a 32-bit ARM breakpoint. */
8926 *kindptr = 3;
8929 /* Extract from an array REGBUF containing the (raw) register state a
8930 function return value of type TYPE, and copy that, in virtual
8931 format, into VALBUF. */
8933 static void
8934 arm_extract_return_value (struct type *type, struct regcache *regs,
8935 gdb_byte *valbuf)
8937 struct gdbarch *gdbarch = get_regcache_arch (regs);
8938 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8940 if (TYPE_CODE_FLT == TYPE_CODE (type))
8942 switch (gdbarch_tdep (gdbarch)->fp_model)
8944 case ARM_FLOAT_FPA:
8946 /* The value is in register F0 in internal format. We need to
8947 extract the raw value and then convert it to the desired
8948 internal type. */
8949 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8951 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8952 convert_from_extended (floatformat_from_type (type), tmpbuf,
8953 valbuf, gdbarch_byte_order (gdbarch));
8955 break;
8957 case ARM_FLOAT_SOFT_FPA:
8958 case ARM_FLOAT_SOFT_VFP:
8959 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8960 not using the VFP ABI code. */
8961 case ARM_FLOAT_VFP:
8962 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8963 if (TYPE_LENGTH (type) > 4)
8964 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8965 valbuf + INT_REGISTER_SIZE);
8966 break;
8968 default:
8969 internal_error (__FILE__, __LINE__,
8970 _("arm_extract_return_value: "
8971 "Floating point model not supported"));
8972 break;
8975 else if (TYPE_CODE (type) == TYPE_CODE_INT
8976 || TYPE_CODE (type) == TYPE_CODE_CHAR
8977 || TYPE_CODE (type) == TYPE_CODE_BOOL
8978 || TYPE_CODE (type) == TYPE_CODE_PTR
8979 || TYPE_CODE (type) == TYPE_CODE_REF
8980 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8982 /* If the type is a plain integer, then the access is
8983 straight-forward. Otherwise we have to play around a bit
8984 more. */
8985 int len = TYPE_LENGTH (type);
8986 int regno = ARM_A1_REGNUM;
8987 ULONGEST tmp;
8989 while (len > 0)
8991 /* By using store_unsigned_integer we avoid having to do
8992 anything special for small big-endian values. */
8993 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8994 store_unsigned_integer (valbuf,
8995 (len > INT_REGISTER_SIZE
8996 ? INT_REGISTER_SIZE : len),
8997 byte_order, tmp);
8998 len -= INT_REGISTER_SIZE;
8999 valbuf += INT_REGISTER_SIZE;
9002 else
9004 /* For a structure or union the behaviour is as if the value had
9005 been stored to word-aligned memory and then loaded into
9006 registers with 32-bit load instruction(s). */
9007 int len = TYPE_LENGTH (type);
9008 int regno = ARM_A1_REGNUM;
9009 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9011 while (len > 0)
9013 regcache_cooked_read (regs, regno++, tmpbuf);
9014 memcpy (valbuf, tmpbuf,
9015 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9016 len -= INT_REGISTER_SIZE;
9017 valbuf += INT_REGISTER_SIZE;
9023 /* Will a function return an aggregate type in memory or in a
9024 register? Return 0 if an aggregate type can be returned in a
9025 register, 1 if it must be returned in memory. */
9027 static int
9028 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9030 int nRc;
9031 enum type_code code;
9033 CHECK_TYPEDEF (type);
9035 /* In the ARM ABI, "integer" like aggregate types are returned in
9036 registers. For an aggregate type to be integer like, its size
9037 must be less than or equal to INT_REGISTER_SIZE and the
9038 offset of each addressable subfield must be zero. Note that bit
9039 fields are not addressable, and all addressable subfields of
9040 unions always start at offset zero.
9042 This function is based on the behaviour of GCC 2.95.1.
9043 See: gcc/arm.c: arm_return_in_memory() for details.
9045 Note: All versions of GCC before GCC 2.95.2 do not set up the
9046 parameters correctly for a function returning the following
9047 structure: struct { float f;}; This should be returned in memory,
9048 not a register. Richard Earnshaw sent me a patch, but I do not
9049 know of any way to detect if a function like the above has been
9050 compiled with the correct calling convention. */
9052 /* All aggregate types that won't fit in a register must be returned
9053 in memory. */
9054 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9056 return 1;
9059 /* The AAPCS says all aggregates not larger than a word are returned
9060 in a register. */
9061 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9062 return 0;
9064 /* The only aggregate types that can be returned in a register are
9065 structs and unions. Arrays must be returned in memory. */
9066 code = TYPE_CODE (type);
9067 if ((TYPE_CODE_STRUCT != code) && (TYPE_CODE_UNION != code))
9069 return 1;
9072 /* Assume all other aggregate types can be returned in a register.
9073 Run a check for structures, unions and arrays. */
9074 nRc = 0;
9076 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9078 int i;
9079 /* Need to check if this struct/union is "integer" like. For
9080 this to be true, its size must be less than or equal to
9081 INT_REGISTER_SIZE and the offset of each addressable
9082 subfield must be zero. Note that bit fields are not
9083 addressable, and unions always start at offset zero. If any
9084 of the subfields is a floating point type, the struct/union
9085 cannot be an integer type. */
9087 /* For each field in the object, check:
9088 1) Is it FP? --> yes, nRc = 1;
9089 2) Is it addressable (bitpos != 0) and
9090 not packed (bitsize == 0)?
9091 --> yes, nRc = 1
9094 for (i = 0; i < TYPE_NFIELDS (type); i++)
9096 enum type_code field_type_code;
9097 field_type_code = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9098 i)));
9100 /* Is it a floating point type field? */
9101 if (field_type_code == TYPE_CODE_FLT)
9103 nRc = 1;
9104 break;
9107 /* If bitpos != 0, then we have to care about it. */
9108 if (TYPE_FIELD_BITPOS (type, i) != 0)
9110 /* Bitfields are not addressable. If the field bitsize is
9111 zero, then the field is not packed. Hence it cannot be
9112 a bitfield or any other packed type. */
9113 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9115 nRc = 1;
9116 break;
9122 return nRc;
9125 /* Write into appropriate registers a function return value of type
9126 TYPE, given in virtual format. */
9128 static void
9129 arm_store_return_value (struct type *type, struct regcache *regs,
9130 const gdb_byte *valbuf)
9132 struct gdbarch *gdbarch = get_regcache_arch (regs);
9133 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9135 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9137 gdb_byte buf[MAX_REGISTER_SIZE];
9139 switch (gdbarch_tdep (gdbarch)->fp_model)
9141 case ARM_FLOAT_FPA:
9143 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9144 gdbarch_byte_order (gdbarch));
9145 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9146 break;
9148 case ARM_FLOAT_SOFT_FPA:
9149 case ARM_FLOAT_SOFT_VFP:
9150 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9151 not using the VFP ABI code. */
9152 case ARM_FLOAT_VFP:
9153 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9154 if (TYPE_LENGTH (type) > 4)
9155 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9156 valbuf + INT_REGISTER_SIZE);
9157 break;
9159 default:
9160 internal_error (__FILE__, __LINE__,
9161 _("arm_store_return_value: Floating "
9162 "point model not supported"));
9163 break;
9166 else if (TYPE_CODE (type) == TYPE_CODE_INT
9167 || TYPE_CODE (type) == TYPE_CODE_CHAR
9168 || TYPE_CODE (type) == TYPE_CODE_BOOL
9169 || TYPE_CODE (type) == TYPE_CODE_PTR
9170 || TYPE_CODE (type) == TYPE_CODE_REF
9171 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9173 if (TYPE_LENGTH (type) <= 4)
9175 /* Values of one word or less are zero/sign-extended and
9176 returned in r0. */
9177 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9178 LONGEST val = unpack_long (type, valbuf);
9180 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9181 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9183 else
9185 /* Integral values greater than one word are stored in consecutive
9186 registers starting with r0. This will always be a multiple of
9187 the regiser size. */
9188 int len = TYPE_LENGTH (type);
9189 int regno = ARM_A1_REGNUM;
9191 while (len > 0)
9193 regcache_cooked_write (regs, regno++, valbuf);
9194 len -= INT_REGISTER_SIZE;
9195 valbuf += INT_REGISTER_SIZE;
9199 else
9201 /* For a structure or union the behaviour is as if the value had
9202 been stored to word-aligned memory and then loaded into
9203 registers with 32-bit load instruction(s). */
9204 int len = TYPE_LENGTH (type);
9205 int regno = ARM_A1_REGNUM;
9206 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9208 while (len > 0)
9210 memcpy (tmpbuf, valbuf,
9211 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9212 regcache_cooked_write (regs, regno++, tmpbuf);
9213 len -= INT_REGISTER_SIZE;
9214 valbuf += INT_REGISTER_SIZE;
9220 /* Handle function return values. */
9222 static enum return_value_convention
9223 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9224 struct type *valtype, struct regcache *regcache,
9225 gdb_byte *readbuf, const gdb_byte *writebuf)
9227 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9228 struct type *func_type = function ? value_type (function) : NULL;
9229 enum arm_vfp_cprc_base_type vfp_base_type;
9230 int vfp_base_count;
9232 if (arm_vfp_abi_for_function (gdbarch, func_type)
9233 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9235 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9236 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9237 int i;
9238 for (i = 0; i < vfp_base_count; i++)
9240 if (reg_char == 'q')
9242 if (writebuf)
9243 arm_neon_quad_write (gdbarch, regcache, i,
9244 writebuf + i * unit_length);
9246 if (readbuf)
9247 arm_neon_quad_read (gdbarch, regcache, i,
9248 readbuf + i * unit_length);
9250 else
9252 char name_buf[4];
9253 int regnum;
9255 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9256 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9257 strlen (name_buf));
9258 if (writebuf)
9259 regcache_cooked_write (regcache, regnum,
9260 writebuf + i * unit_length);
9261 if (readbuf)
9262 regcache_cooked_read (regcache, regnum,
9263 readbuf + i * unit_length);
9266 return RETURN_VALUE_REGISTER_CONVENTION;
9269 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9270 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9271 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9273 if (tdep->struct_return == pcc_struct_return
9274 || arm_return_in_memory (gdbarch, valtype))
9275 return RETURN_VALUE_STRUCT_CONVENTION;
9278 /* AAPCS returns complex types longer than a register in memory. */
9279 if (tdep->arm_abi != ARM_ABI_APCS
9280 && TYPE_CODE (valtype) == TYPE_CODE_COMPLEX
9281 && TYPE_LENGTH (valtype) > INT_REGISTER_SIZE)
9282 return RETURN_VALUE_STRUCT_CONVENTION;
9284 if (writebuf)
9285 arm_store_return_value (valtype, regcache, writebuf);
9287 if (readbuf)
9288 arm_extract_return_value (valtype, regcache, readbuf);
9290 return RETURN_VALUE_REGISTER_CONVENTION;
9294 static int
9295 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9297 struct gdbarch *gdbarch = get_frame_arch (frame);
9298 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9299 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9300 CORE_ADDR jb_addr;
9301 gdb_byte buf[INT_REGISTER_SIZE];
9303 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9305 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9306 INT_REGISTER_SIZE))
9307 return 0;
9309 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9310 return 1;
9313 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9314 return the target PC. Otherwise return 0. */
9316 CORE_ADDR
9317 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9319 const char *name;
9320 int namelen;
9321 CORE_ADDR start_addr;
9323 /* Find the starting address and name of the function containing the PC. */
9324 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9326 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9327 check here. */
9328 start_addr = arm_skip_bx_reg (frame, pc);
9329 if (start_addr != 0)
9330 return start_addr;
9332 return 0;
9335 /* If PC is in a Thumb call or return stub, return the address of the
9336 target PC, which is in a register. The thunk functions are called
9337 _call_via_xx, where x is the register name. The possible names
9338 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9339 functions, named __ARM_call_via_r[0-7]. */
9340 if (strncmp (name, "_call_via_", 10) == 0
9341 || strncmp (name, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9343 /* Use the name suffix to determine which register contains the
9344 target PC. */
9345 static char *table[15] =
9346 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9347 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9349 int regno;
9350 int offset = strlen (name) - 2;
9352 for (regno = 0; regno <= 14; regno++)
9353 if (strcmp (&name[offset], table[regno]) == 0)
9354 return get_frame_register_unsigned (frame, regno);
9357 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9358 non-interworking calls to foo. We could decode the stubs
9359 to find the target but it's easier to use the symbol table. */
9360 namelen = strlen (name);
9361 if (name[0] == '_' && name[1] == '_'
9362 && ((namelen > 2 + strlen ("_from_thumb")
9363 && strncmp (name + namelen - strlen ("_from_thumb"), "_from_thumb",
9364 strlen ("_from_thumb")) == 0)
9365 || (namelen > 2 + strlen ("_from_arm")
9366 && strncmp (name + namelen - strlen ("_from_arm"), "_from_arm",
9367 strlen ("_from_arm")) == 0)))
9369 char *target_name;
9370 int target_len = namelen - 2;
9371 struct bound_minimal_symbol minsym;
9372 struct objfile *objfile;
9373 struct obj_section *sec;
9375 if (name[namelen - 1] == 'b')
9376 target_len -= strlen ("_from_thumb");
9377 else
9378 target_len -= strlen ("_from_arm");
9380 target_name = alloca (target_len + 1);
9381 memcpy (target_name, name + 2, target_len);
9382 target_name[target_len] = '\0';
9384 sec = find_pc_section (pc);
9385 objfile = (sec == NULL) ? NULL : sec->objfile;
9386 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9387 if (minsym.minsym != NULL)
9388 return BMSYMBOL_VALUE_ADDRESS (minsym);
9389 else
9390 return 0;
9393 return 0; /* not a stub */
9396 static void
9397 set_arm_command (char *args, int from_tty)
9399 printf_unfiltered (_("\
9400 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9401 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9404 static void
9405 show_arm_command (char *args, int from_tty)
9407 cmd_show_list (showarmcmdlist, from_tty, "");
9410 static void
9411 arm_update_current_architecture (void)
9413 struct gdbarch_info info;
9415 /* If the current architecture is not ARM, we have nothing to do. */
9416 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9417 return;
9419 /* Update the architecture. */
9420 gdbarch_info_init (&info);
9422 if (!gdbarch_update_p (info))
9423 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9426 static void
9427 set_fp_model_sfunc (char *args, int from_tty,
9428 struct cmd_list_element *c)
9430 enum arm_float_model fp_model;
9432 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9433 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9435 arm_fp_model = fp_model;
9436 break;
9439 if (fp_model == ARM_FLOAT_LAST)
9440 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9441 current_fp_model);
9443 arm_update_current_architecture ();
9446 static void
9447 show_fp_model (struct ui_file *file, int from_tty,
9448 struct cmd_list_element *c, const char *value)
9450 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9452 if (arm_fp_model == ARM_FLOAT_AUTO
9453 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9454 fprintf_filtered (file, _("\
9455 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9456 fp_model_strings[tdep->fp_model]);
9457 else
9458 fprintf_filtered (file, _("\
9459 The current ARM floating point model is \"%s\".\n"),
9460 fp_model_strings[arm_fp_model]);
9463 static void
9464 arm_set_abi (char *args, int from_tty,
9465 struct cmd_list_element *c)
9467 enum arm_abi_kind arm_abi;
9469 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9470 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9472 arm_abi_global = arm_abi;
9473 break;
9476 if (arm_abi == ARM_ABI_LAST)
9477 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9478 arm_abi_string);
9480 arm_update_current_architecture ();
9483 static void
9484 arm_show_abi (struct ui_file *file, int from_tty,
9485 struct cmd_list_element *c, const char *value)
9487 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9489 if (arm_abi_global == ARM_ABI_AUTO
9490 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9491 fprintf_filtered (file, _("\
9492 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9493 arm_abi_strings[tdep->arm_abi]);
9494 else
9495 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9496 arm_abi_string);
9499 static void
9500 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9501 struct cmd_list_element *c, const char *value)
9503 fprintf_filtered (file,
9504 _("The current execution mode assumed "
9505 "(when symbols are unavailable) is \"%s\".\n"),
9506 arm_fallback_mode_string);
9509 static void
9510 arm_show_force_mode (struct ui_file *file, int from_tty,
9511 struct cmd_list_element *c, const char *value)
9513 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9515 fprintf_filtered (file,
9516 _("The current execution mode assumed "
9517 "(even when symbols are available) is \"%s\".\n"),
9518 arm_force_mode_string);
9521 /* If the user changes the register disassembly style used for info
9522 register and other commands, we have to also switch the style used
9523 in opcodes for disassembly output. This function is run in the "set
9524 arm disassembly" command, and does that. */
9526 static void
9527 set_disassembly_style_sfunc (char *args, int from_tty,
9528 struct cmd_list_element *c)
9530 set_disassembly_style ();
9533 /* Return the ARM register name corresponding to register I. */
9534 static const char *
9535 arm_register_name (struct gdbarch *gdbarch, int i)
9537 const int num_regs = gdbarch_num_regs (gdbarch);
9539 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9540 && i >= num_regs && i < num_regs + 32)
9542 static const char *const vfp_pseudo_names[] = {
9543 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9544 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9545 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9546 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9549 return vfp_pseudo_names[i - num_regs];
9552 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9553 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9555 static const char *const neon_pseudo_names[] = {
9556 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9557 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9560 return neon_pseudo_names[i - num_regs - 32];
9563 if (i >= ARRAY_SIZE (arm_register_names))
9564 /* These registers are only supported on targets which supply
9565 an XML description. */
9566 return "";
9568 return arm_register_names[i];
9571 static void
9572 set_disassembly_style (void)
9574 int current;
9576 /* Find the style that the user wants. */
9577 for (current = 0; current < num_disassembly_options; current++)
9578 if (disassembly_style == valid_disassembly_styles[current])
9579 break;
9580 gdb_assert (current < num_disassembly_options);
9582 /* Synchronize the disassembler. */
9583 set_arm_regname_option (current);
9586 /* Test whether the coff symbol specific value corresponds to a Thumb
9587 function. */
9589 static int
9590 coff_sym_is_thumb (int val)
9592 return (val == C_THUMBEXT
9593 || val == C_THUMBSTAT
9594 || val == C_THUMBEXTFUNC
9595 || val == C_THUMBSTATFUNC
9596 || val == C_THUMBLABEL);
9599 /* arm_coff_make_msymbol_special()
9600 arm_elf_make_msymbol_special()
9602 These functions test whether the COFF or ELF symbol corresponds to
9603 an address in thumb code, and set a "special" bit in a minimal
9604 symbol to indicate that it does. */
9606 static void
9607 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9609 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9610 == ST_BRANCH_TO_THUMB)
9611 MSYMBOL_SET_SPECIAL (msym);
9614 static void
9615 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9617 if (coff_sym_is_thumb (val))
9618 MSYMBOL_SET_SPECIAL (msym);
9621 static void
9622 arm_objfile_data_free (struct objfile *objfile, void *arg)
9624 struct arm_per_objfile *data = arg;
9625 unsigned int i;
9627 for (i = 0; i < objfile->obfd->section_count; i++)
9628 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9631 static void
9632 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9633 asymbol *sym)
9635 const char *name = bfd_asymbol_name (sym);
9636 struct arm_per_objfile *data;
9637 VEC(arm_mapping_symbol_s) **map_p;
9638 struct arm_mapping_symbol new_map_sym;
9640 gdb_assert (name[0] == '$');
9641 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9642 return;
9644 data = objfile_data (objfile, arm_objfile_data_key);
9645 if (data == NULL)
9647 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9648 struct arm_per_objfile);
9649 set_objfile_data (objfile, arm_objfile_data_key, data);
9650 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9651 objfile->obfd->section_count,
9652 VEC(arm_mapping_symbol_s) *);
9654 map_p = &data->section_maps[bfd_get_section (sym)->index];
9656 new_map_sym.value = sym->value;
9657 new_map_sym.type = name[1];
9659 /* Assume that most mapping symbols appear in order of increasing
9660 value. If they were randomly distributed, it would be faster to
9661 always push here and then sort at first use. */
9662 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9664 struct arm_mapping_symbol *prev_map_sym;
9666 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9667 if (prev_map_sym->value >= sym->value)
9669 unsigned int idx;
9670 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9671 arm_compare_mapping_symbols);
9672 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9673 return;
9677 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9680 static void
9681 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9683 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9684 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9686 /* If necessary, set the T bit. */
9687 if (arm_apcs_32)
9689 ULONGEST val, t_bit;
9690 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9691 t_bit = arm_psr_thumb_bit (gdbarch);
9692 if (arm_pc_is_thumb (gdbarch, pc))
9693 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9694 val | t_bit);
9695 else
9696 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9697 val & ~t_bit);
9701 /* Read the contents of a NEON quad register, by reading from two
9702 double registers. This is used to implement the quad pseudo
9703 registers, and for argument passing in case the quad registers are
9704 missing; vectors are passed in quad registers when using the VFP
9705 ABI, even if a NEON unit is not present. REGNUM is the index of
9706 the quad register, in [0, 15]. */
9708 static enum register_status
9709 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9710 int regnum, gdb_byte *buf)
9712 char name_buf[4];
9713 gdb_byte reg_buf[8];
9714 int offset, double_regnum;
9715 enum register_status status;
9717 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9718 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9719 strlen (name_buf));
9721 /* d0 is always the least significant half of q0. */
9722 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9723 offset = 8;
9724 else
9725 offset = 0;
9727 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9728 if (status != REG_VALID)
9729 return status;
9730 memcpy (buf + offset, reg_buf, 8);
9732 offset = 8 - offset;
9733 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9734 if (status != REG_VALID)
9735 return status;
9736 memcpy (buf + offset, reg_buf, 8);
9738 return REG_VALID;
9741 static enum register_status
9742 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9743 int regnum, gdb_byte *buf)
9745 const int num_regs = gdbarch_num_regs (gdbarch);
9746 char name_buf[4];
9747 gdb_byte reg_buf[8];
9748 int offset, double_regnum;
9750 gdb_assert (regnum >= num_regs);
9751 regnum -= num_regs;
9753 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9754 /* Quad-precision register. */
9755 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9756 else
9758 enum register_status status;
9760 /* Single-precision register. */
9761 gdb_assert (regnum < 32);
9763 /* s0 is always the least significant half of d0. */
9764 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9765 offset = (regnum & 1) ? 0 : 4;
9766 else
9767 offset = (regnum & 1) ? 4 : 0;
9769 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9770 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9771 strlen (name_buf));
9773 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9774 if (status == REG_VALID)
9775 memcpy (buf, reg_buf + offset, 4);
9776 return status;
9780 /* Store the contents of BUF to a NEON quad register, by writing to
9781 two double registers. This is used to implement the quad pseudo
9782 registers, and for argument passing in case the quad registers are
9783 missing; vectors are passed in quad registers when using the VFP
9784 ABI, even if a NEON unit is not present. REGNUM is the index
9785 of the quad register, in [0, 15]. */
9787 static void
9788 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9789 int regnum, const gdb_byte *buf)
9791 char name_buf[4];
9792 int offset, double_regnum;
9794 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9795 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9796 strlen (name_buf));
9798 /* d0 is always the least significant half of q0. */
9799 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9800 offset = 8;
9801 else
9802 offset = 0;
9804 regcache_raw_write (regcache, double_regnum, buf + offset);
9805 offset = 8 - offset;
9806 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9809 static void
9810 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9811 int regnum, const gdb_byte *buf)
9813 const int num_regs = gdbarch_num_regs (gdbarch);
9814 char name_buf[4];
9815 gdb_byte reg_buf[8];
9816 int offset, double_regnum;
9818 gdb_assert (regnum >= num_regs);
9819 regnum -= num_regs;
9821 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9822 /* Quad-precision register. */
9823 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9824 else
9826 /* Single-precision register. */
9827 gdb_assert (regnum < 32);
9829 /* s0 is always the least significant half of d0. */
9830 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9831 offset = (regnum & 1) ? 0 : 4;
9832 else
9833 offset = (regnum & 1) ? 4 : 0;
9835 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9836 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9837 strlen (name_buf));
9839 regcache_raw_read (regcache, double_regnum, reg_buf);
9840 memcpy (reg_buf + offset, buf, 4);
9841 regcache_raw_write (regcache, double_regnum, reg_buf);
9845 static struct value *
9846 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9848 const int *reg_p = baton;
9849 return value_of_register (*reg_p, frame);
9852 static enum gdb_osabi
9853 arm_elf_osabi_sniffer (bfd *abfd)
9855 unsigned int elfosabi;
9856 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9858 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9860 if (elfosabi == ELFOSABI_ARM)
9861 /* GNU tools use this value. Check note sections in this case,
9862 as well. */
9863 bfd_map_over_sections (abfd,
9864 generic_elf_osabi_sniff_abi_tag_sections,
9865 &osabi);
9867 /* Anything else will be handled by the generic ELF sniffer. */
9868 return osabi;
9871 static int
9872 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9873 struct reggroup *group)
9875 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9876 this, FPS register belongs to save_regroup, restore_reggroup, and
9877 all_reggroup, of course. */
9878 if (regnum == ARM_FPS_REGNUM)
9879 return (group == float_reggroup
9880 || group == save_reggroup
9881 || group == restore_reggroup
9882 || group == all_reggroup);
9883 else
9884 return default_register_reggroup_p (gdbarch, regnum, group);
9888 /* For backward-compatibility we allow two 'g' packet lengths with
9889 the remote protocol depending on whether FPA registers are
9890 supplied. M-profile targets do not have FPA registers, but some
9891 stubs already exist in the wild which use a 'g' packet which
9892 supplies them albeit with dummy values. The packet format which
9893 includes FPA registers should be considered deprecated for
9894 M-profile targets. */
9896 static void
9897 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9899 if (gdbarch_tdep (gdbarch)->is_m)
9901 /* If we know from the executable this is an M-profile target,
9902 cater for remote targets whose register set layout is the
9903 same as the FPA layout. */
9904 register_remote_g_packet_guess (gdbarch,
9905 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9906 (16 * INT_REGISTER_SIZE)
9907 + (8 * FP_REGISTER_SIZE)
9908 + (2 * INT_REGISTER_SIZE),
9909 tdesc_arm_with_m_fpa_layout);
9911 /* The regular M-profile layout. */
9912 register_remote_g_packet_guess (gdbarch,
9913 /* r0-r12,sp,lr,pc; xpsr */
9914 (16 * INT_REGISTER_SIZE)
9915 + INT_REGISTER_SIZE,
9916 tdesc_arm_with_m);
9918 /* M-profile plus M4F VFP. */
9919 register_remote_g_packet_guess (gdbarch,
9920 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9921 (16 * INT_REGISTER_SIZE)
9922 + (16 * VFP_REGISTER_SIZE)
9923 + (2 * INT_REGISTER_SIZE),
9924 tdesc_arm_with_m_vfp_d16);
9927 /* Otherwise we don't have a useful guess. */
9931 /* Initialize the current architecture based on INFO. If possible,
9932 re-use an architecture from ARCHES, which is a list of
9933 architectures already created during this debugging session.
9935 Called e.g. at program startup, when reading a core file, and when
9936 reading a binary file. */
9938 static struct gdbarch *
9939 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9941 struct gdbarch_tdep *tdep;
9942 struct gdbarch *gdbarch;
9943 struct gdbarch_list *best_arch;
9944 enum arm_abi_kind arm_abi = arm_abi_global;
9945 enum arm_float_model fp_model = arm_fp_model;
9946 struct tdesc_arch_data *tdesc_data = NULL;
9947 int i, is_m = 0;
9948 int have_vfp_registers = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9949 int have_neon = 0;
9950 int have_fpa_registers = 1;
9951 const struct target_desc *tdesc = info.target_desc;
9953 /* If we have an object to base this architecture on, try to determine
9954 its ABI. */
9956 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9958 int ei_osabi, e_flags;
9960 switch (bfd_get_flavour (info.abfd))
9962 case bfd_target_aout_flavour:
9963 /* Assume it's an old APCS-style ABI. */
9964 arm_abi = ARM_ABI_APCS;
9965 break;
9967 case bfd_target_coff_flavour:
9968 /* Assume it's an old APCS-style ABI. */
9969 /* XXX WinCE? */
9970 arm_abi = ARM_ABI_APCS;
9971 break;
9973 case bfd_target_elf_flavour:
9974 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9975 e_flags = elf_elfheader (info.abfd)->e_flags;
9977 if (ei_osabi == ELFOSABI_ARM)
9979 /* GNU tools used to use this value, but do not for EABI
9980 objects. There's nowhere to tag an EABI version
9981 anyway, so assume APCS. */
9982 arm_abi = ARM_ABI_APCS;
9984 else if (ei_osabi == ELFOSABI_NONE)
9986 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9987 int attr_arch, attr_profile;
9989 switch (eabi_ver)
9991 case EF_ARM_EABI_UNKNOWN:
9992 /* Assume GNU tools. */
9993 arm_abi = ARM_ABI_APCS;
9994 break;
9996 case EF_ARM_EABI_VER4:
9997 case EF_ARM_EABI_VER5:
9998 arm_abi = ARM_ABI_AAPCS;
9999 /* EABI binaries default to VFP float ordering.
10000 They may also contain build attributes that can
10001 be used to identify if the VFP argument-passing
10002 ABI is in use. */
10003 if (fp_model == ARM_FLOAT_AUTO)
10005 #ifdef HAVE_ELF
10006 switch (bfd_elf_get_obj_attr_int (info.abfd,
10007 OBJ_ATTR_PROC,
10008 Tag_ABI_VFP_args))
10010 case 0:
10011 /* "The user intended FP parameter/result
10012 passing to conform to AAPCS, base
10013 variant". */
10014 fp_model = ARM_FLOAT_SOFT_VFP;
10015 break;
10016 case 1:
10017 /* "The user intended FP parameter/result
10018 passing to conform to AAPCS, VFP
10019 variant". */
10020 fp_model = ARM_FLOAT_VFP;
10021 break;
10022 case 2:
10023 /* "The user intended FP parameter/result
10024 passing to conform to tool chain-specific
10025 conventions" - we don't know any such
10026 conventions, so leave it as "auto". */
10027 break;
10028 default:
10029 /* Attribute value not mentioned in the
10030 October 2008 ABI, so leave it as
10031 "auto". */
10032 break;
10034 #else
10035 fp_model = ARM_FLOAT_SOFT_VFP;
10036 #endif
10038 break;
10040 default:
10041 /* Leave it as "auto". */
10042 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10043 break;
10046 #ifdef HAVE_ELF
10047 /* Detect M-profile programs. This only works if the
10048 executable file includes build attributes; GCC does
10049 copy them to the executable, but e.g. RealView does
10050 not. */
10051 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10052 Tag_CPU_arch);
10053 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10054 OBJ_ATTR_PROC,
10055 Tag_CPU_arch_profile);
10056 /* GCC specifies the profile for v6-M; RealView only
10057 specifies the profile for architectures starting with
10058 V7 (as opposed to architectures with a tag
10059 numerically greater than TAG_CPU_ARCH_V7). */
10060 if (!tdesc_has_registers (tdesc)
10061 && (attr_arch == TAG_CPU_ARCH_V6_M
10062 || attr_arch == TAG_CPU_ARCH_V6S_M
10063 || attr_profile == 'M'))
10064 is_m = 1;
10065 #endif
10068 if (fp_model == ARM_FLOAT_AUTO)
10070 int e_flags = elf_elfheader (info.abfd)->e_flags;
10072 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10074 case 0:
10075 /* Leave it as "auto". Strictly speaking this case
10076 means FPA, but almost nobody uses that now, and
10077 many toolchains fail to set the appropriate bits
10078 for the floating-point model they use. */
10079 break;
10080 case EF_ARM_SOFT_FLOAT:
10081 fp_model = ARM_FLOAT_SOFT_FPA;
10082 break;
10083 case EF_ARM_VFP_FLOAT:
10084 fp_model = ARM_FLOAT_VFP;
10085 break;
10086 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10087 fp_model = ARM_FLOAT_SOFT_VFP;
10088 break;
10092 if (e_flags & EF_ARM_BE8)
10093 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10095 break;
10097 default:
10098 /* Leave it as "auto". */
10099 break;
10103 /* Check any target description for validity. */
10104 if (tdesc_has_registers (tdesc))
10106 /* For most registers we require GDB's default names; but also allow
10107 the numeric names for sp / lr / pc, as a convenience. */
10108 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10109 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10110 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10112 const struct tdesc_feature *feature;
10113 int valid_p;
10115 feature = tdesc_find_feature (tdesc,
10116 "org.gnu.gdb.arm.core");
10117 if (feature == NULL)
10119 feature = tdesc_find_feature (tdesc,
10120 "org.gnu.gdb.arm.m-profile");
10121 if (feature == NULL)
10122 return NULL;
10123 else
10124 is_m = 1;
10127 tdesc_data = tdesc_data_alloc ();
10129 valid_p = 1;
10130 for (i = 0; i < ARM_SP_REGNUM; i++)
10131 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10132 arm_register_names[i]);
10133 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10134 ARM_SP_REGNUM,
10135 arm_sp_names);
10136 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10137 ARM_LR_REGNUM,
10138 arm_lr_names);
10139 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10140 ARM_PC_REGNUM,
10141 arm_pc_names);
10142 if (is_m)
10143 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10144 ARM_PS_REGNUM, "xpsr");
10145 else
10146 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10147 ARM_PS_REGNUM, "cpsr");
10149 if (!valid_p)
10151 tdesc_data_cleanup (tdesc_data);
10152 return NULL;
10155 feature = tdesc_find_feature (tdesc,
10156 "org.gnu.gdb.arm.fpa");
10157 if (feature != NULL)
10159 valid_p = 1;
10160 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10161 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10162 arm_register_names[i]);
10163 if (!valid_p)
10165 tdesc_data_cleanup (tdesc_data);
10166 return NULL;
10169 else
10170 have_fpa_registers = 0;
10172 feature = tdesc_find_feature (tdesc,
10173 "org.gnu.gdb.xscale.iwmmxt");
10174 if (feature != NULL)
10176 static const char *const iwmmxt_names[] = {
10177 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10178 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10179 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10180 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10183 valid_p = 1;
10184 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10185 valid_p
10186 &= tdesc_numbered_register (feature, tdesc_data, i,
10187 iwmmxt_names[i - ARM_WR0_REGNUM]);
10189 /* Check for the control registers, but do not fail if they
10190 are missing. */
10191 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10192 tdesc_numbered_register (feature, tdesc_data, i,
10193 iwmmxt_names[i - ARM_WR0_REGNUM]);
10195 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10196 valid_p
10197 &= tdesc_numbered_register (feature, tdesc_data, i,
10198 iwmmxt_names[i - ARM_WR0_REGNUM]);
10200 if (!valid_p)
10202 tdesc_data_cleanup (tdesc_data);
10203 return NULL;
10207 /* If we have a VFP unit, check whether the single precision registers
10208 are present. If not, then we will synthesize them as pseudo
10209 registers. */
10210 feature = tdesc_find_feature (tdesc,
10211 "org.gnu.gdb.arm.vfp");
10212 if (feature != NULL)
10214 static const char *const vfp_double_names[] = {
10215 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10216 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10217 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10218 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10221 /* Require the double precision registers. There must be either
10222 16 or 32. */
10223 valid_p = 1;
10224 for (i = 0; i < 32; i++)
10226 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10227 ARM_D0_REGNUM + i,
10228 vfp_double_names[i]);
10229 if (!valid_p)
10230 break;
10232 if (!valid_p && i == 16)
10233 valid_p = 1;
10235 /* Also require FPSCR. */
10236 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10237 ARM_FPSCR_REGNUM, "fpscr");
10238 if (!valid_p)
10240 tdesc_data_cleanup (tdesc_data);
10241 return NULL;
10244 if (tdesc_unnumbered_register (feature, "s0") == 0)
10245 have_vfp_pseudos = 1;
10247 have_vfp_registers = 1;
10249 /* If we have VFP, also check for NEON. The architecture allows
10250 NEON without VFP (integer vector operations only), but GDB
10251 does not support that. */
10252 feature = tdesc_find_feature (tdesc,
10253 "org.gnu.gdb.arm.neon");
10254 if (feature != NULL)
10256 /* NEON requires 32 double-precision registers. */
10257 if (i != 32)
10259 tdesc_data_cleanup (tdesc_data);
10260 return NULL;
10263 /* If there are quad registers defined by the stub, use
10264 their type; otherwise (normally) provide them with
10265 the default type. */
10266 if (tdesc_unnumbered_register (feature, "q0") == 0)
10267 have_neon_pseudos = 1;
10269 have_neon = 1;
10274 /* If there is already a candidate, use it. */
10275 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10276 best_arch != NULL;
10277 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10279 if (arm_abi != ARM_ABI_AUTO
10280 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10281 continue;
10283 if (fp_model != ARM_FLOAT_AUTO
10284 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10285 continue;
10287 /* There are various other properties in tdep that we do not
10288 need to check here: those derived from a target description,
10289 since gdbarches with a different target description are
10290 automatically disqualified. */
10292 /* Do check is_m, though, since it might come from the binary. */
10293 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10294 continue;
10296 /* Found a match. */
10297 break;
10300 if (best_arch != NULL)
10302 if (tdesc_data != NULL)
10303 tdesc_data_cleanup (tdesc_data);
10304 return best_arch->gdbarch;
10307 tdep = xcalloc (1, sizeof (struct gdbarch_tdep));
10308 gdbarch = gdbarch_alloc (&info, tdep);
10310 /* Record additional information about the architecture we are defining.
10311 These are gdbarch discriminators, like the OSABI. */
10312 tdep->arm_abi = arm_abi;
10313 tdep->fp_model = fp_model;
10314 tdep->is_m = is_m;
10315 tdep->have_fpa_registers = have_fpa_registers;
10316 tdep->have_vfp_registers = have_vfp_registers;
10317 tdep->have_vfp_pseudos = have_vfp_pseudos;
10318 tdep->have_neon_pseudos = have_neon_pseudos;
10319 tdep->have_neon = have_neon;
10321 arm_register_g_packet_guesses (gdbarch);
10323 /* Breakpoints. */
10324 switch (info.byte_order_for_code)
10326 case BFD_ENDIAN_BIG:
10327 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10328 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10329 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10330 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10332 break;
10334 case BFD_ENDIAN_LITTLE:
10335 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10336 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10337 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10338 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10340 break;
10342 default:
10343 internal_error (__FILE__, __LINE__,
10344 _("arm_gdbarch_init: bad byte order for float format"));
10347 /* On ARM targets char defaults to unsigned. */
10348 set_gdbarch_char_signed (gdbarch, 0);
10350 /* Note: for displaced stepping, this includes the breakpoint, and one word
10351 of additional scratch space. This setting isn't used for anything beside
10352 displaced stepping at present. */
10353 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10355 /* This should be low enough for everything. */
10356 tdep->lowest_pc = 0x20;
10357 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10359 /* The default, for both APCS and AAPCS, is to return small
10360 structures in registers. */
10361 tdep->struct_return = reg_struct_return;
10363 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10364 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10366 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10368 /* Frame handling. */
10369 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10370 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10371 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10373 frame_base_set_default (gdbarch, &arm_normal_base);
10375 /* Address manipulation. */
10376 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10378 /* Advance PC across function entry code. */
10379 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10381 /* Detect whether PC is in function epilogue. */
10382 set_gdbarch_in_function_epilogue_p (gdbarch, arm_in_function_epilogue_p);
10384 /* Skip trampolines. */
10385 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10387 /* The stack grows downward. */
10388 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10390 /* Breakpoint manipulation. */
10391 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10392 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10393 arm_remote_breakpoint_from_pc);
10395 /* Information about registers, etc. */
10396 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10397 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10398 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10399 set_gdbarch_register_type (gdbarch, arm_register_type);
10400 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10402 /* This "info float" is FPA-specific. Use the generic version if we
10403 do not have FPA. */
10404 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10405 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10407 /* Internal <-> external register number maps. */
10408 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10409 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10411 set_gdbarch_register_name (gdbarch, arm_register_name);
10413 /* Returning results. */
10414 set_gdbarch_return_value (gdbarch, arm_return_value);
10416 /* Disassembly. */
10417 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10419 /* Minsymbol frobbing. */
10420 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10421 set_gdbarch_coff_make_msymbol_special (gdbarch,
10422 arm_coff_make_msymbol_special);
10423 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10425 /* Thumb-2 IT block support. */
10426 set_gdbarch_adjust_breakpoint_address (gdbarch,
10427 arm_adjust_breakpoint_address);
10429 /* Virtual tables. */
10430 set_gdbarch_vbit_in_delta (gdbarch, 1);
10432 /* Hook in the ABI-specific overrides, if they have been registered. */
10433 gdbarch_init_osabi (info, gdbarch);
10435 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10437 /* Add some default predicates. */
10438 if (is_m)
10439 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10440 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10441 dwarf2_append_unwinders (gdbarch);
10442 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10443 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10445 /* Now we have tuned the configuration, set a few final things,
10446 based on what the OS ABI has told us. */
10448 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10449 binaries are always marked. */
10450 if (tdep->arm_abi == ARM_ABI_AUTO)
10451 tdep->arm_abi = ARM_ABI_APCS;
10453 /* Watchpoints are not steppable. */
10454 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10456 /* We used to default to FPA for generic ARM, but almost nobody
10457 uses that now, and we now provide a way for the user to force
10458 the model. So default to the most useful variant. */
10459 if (tdep->fp_model == ARM_FLOAT_AUTO)
10460 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10462 if (tdep->jb_pc >= 0)
10463 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10465 /* Floating point sizes and format. */
10466 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10467 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10469 set_gdbarch_double_format
10470 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10471 set_gdbarch_long_double_format
10472 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10474 else
10476 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10477 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10480 if (have_vfp_pseudos)
10482 /* NOTE: These are the only pseudo registers used by
10483 the ARM target at the moment. If more are added, a
10484 little more care in numbering will be needed. */
10486 int num_pseudos = 32;
10487 if (have_neon_pseudos)
10488 num_pseudos += 16;
10489 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10490 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10491 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10494 if (tdesc_data)
10496 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10498 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10500 /* Override tdesc_register_type to adjust the types of VFP
10501 registers for NEON. */
10502 set_gdbarch_register_type (gdbarch, arm_register_type);
10505 /* Add standard register aliases. We add aliases even for those
10506 nanes which are used by the current architecture - it's simpler,
10507 and does no harm, since nothing ever lists user registers. */
10508 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10509 user_reg_add (gdbarch, arm_register_aliases[i].name,
10510 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10512 return gdbarch;
10515 static void
10516 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10518 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10520 if (tdep == NULL)
10521 return;
10523 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10524 (unsigned long) tdep->lowest_pc);
10527 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10529 void
10530 _initialize_arm_tdep (void)
10532 struct ui_file *stb;
10533 long length;
10534 struct cmd_list_element *new_set, *new_show;
10535 const char *setname;
10536 const char *setdesc;
10537 const char *const *regnames;
10538 int numregs, i, j;
10539 static char *helptext;
10540 char regdesc[1024], *rdptr = regdesc;
10541 size_t rest = sizeof (regdesc);
10543 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10545 arm_objfile_data_key
10546 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10548 /* Add ourselves to objfile event chain. */
10549 observer_attach_new_objfile (arm_exidx_new_objfile);
10550 arm_exidx_data_key
10551 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10553 /* Register an ELF OS ABI sniffer for ARM binaries. */
10554 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10555 bfd_target_elf_flavour,
10556 arm_elf_osabi_sniffer);
10558 /* Initialize the standard target descriptions. */
10559 initialize_tdesc_arm_with_m ();
10560 initialize_tdesc_arm_with_m_fpa_layout ();
10561 initialize_tdesc_arm_with_m_vfp_d16 ();
10562 initialize_tdesc_arm_with_iwmmxt ();
10563 initialize_tdesc_arm_with_vfpv2 ();
10564 initialize_tdesc_arm_with_vfpv3 ();
10565 initialize_tdesc_arm_with_neon ();
10567 /* Get the number of possible sets of register names defined in opcodes. */
10568 num_disassembly_options = get_arm_regname_num_options ();
10570 /* Add root prefix command for all "set arm"/"show arm" commands. */
10571 add_prefix_cmd ("arm", no_class, set_arm_command,
10572 _("Various ARM-specific commands."),
10573 &setarmcmdlist, "set arm ", 0, &setlist);
10575 add_prefix_cmd ("arm", no_class, show_arm_command,
10576 _("Various ARM-specific commands."),
10577 &showarmcmdlist, "show arm ", 0, &showlist);
10579 /* Sync the opcode insn printer with our register viewer. */
10580 parse_arm_disassembler_option ("reg-names-std");
10582 /* Initialize the array that will be passed to
10583 add_setshow_enum_cmd(). */
10584 valid_disassembly_styles
10585 = xmalloc ((num_disassembly_options + 1) * sizeof (char *));
10586 for (i = 0; i < num_disassembly_options; i++)
10588 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10589 valid_disassembly_styles[i] = setname;
10590 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10591 rdptr += length;
10592 rest -= length;
10593 /* When we find the default names, tell the disassembler to use
10594 them. */
10595 if (!strcmp (setname, "std"))
10597 disassembly_style = setname;
10598 set_arm_regname_option (i);
10601 /* Mark the end of valid options. */
10602 valid_disassembly_styles[num_disassembly_options] = NULL;
10604 /* Create the help text. */
10605 stb = mem_fileopen ();
10606 fprintf_unfiltered (stb, "%s%s%s",
10607 _("The valid values are:\n"),
10608 regdesc,
10609 _("The default is \"std\"."));
10610 helptext = ui_file_xstrdup (stb, NULL);
10611 ui_file_delete (stb);
10613 add_setshow_enum_cmd("disassembler", no_class,
10614 valid_disassembly_styles, &disassembly_style,
10615 _("Set the disassembly style."),
10616 _("Show the disassembly style."),
10617 helptext,
10618 set_disassembly_style_sfunc,
10619 NULL, /* FIXME: i18n: The disassembly style is
10620 \"%s\". */
10621 &setarmcmdlist, &showarmcmdlist);
10623 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10624 _("Set usage of ARM 32-bit mode."),
10625 _("Show usage of ARM 32-bit mode."),
10626 _("When off, a 26-bit PC will be used."),
10627 NULL,
10628 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10629 mode is %s. */
10630 &setarmcmdlist, &showarmcmdlist);
10632 /* Add a command to allow the user to force the FPU model. */
10633 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10634 _("Set the floating point type."),
10635 _("Show the floating point type."),
10636 _("auto - Determine the FP typefrom the OS-ABI.\n\
10637 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10638 fpa - FPA co-processor (GCC compiled).\n\
10639 softvfp - Software FP with pure-endian doubles.\n\
10640 vfp - VFP co-processor."),
10641 set_fp_model_sfunc, show_fp_model,
10642 &setarmcmdlist, &showarmcmdlist);
10644 /* Add a command to allow the user to force the ABI. */
10645 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10646 _("Set the ABI."),
10647 _("Show the ABI."),
10648 NULL, arm_set_abi, arm_show_abi,
10649 &setarmcmdlist, &showarmcmdlist);
10651 /* Add two commands to allow the user to force the assumed
10652 execution mode. */
10653 add_setshow_enum_cmd ("fallback-mode", class_support,
10654 arm_mode_strings, &arm_fallback_mode_string,
10655 _("Set the mode assumed when symbols are unavailable."),
10656 _("Show the mode assumed when symbols are unavailable."),
10657 NULL, NULL, arm_show_fallback_mode,
10658 &setarmcmdlist, &showarmcmdlist);
10659 add_setshow_enum_cmd ("force-mode", class_support,
10660 arm_mode_strings, &arm_force_mode_string,
10661 _("Set the mode assumed even when symbols are available."),
10662 _("Show the mode assumed even when symbols are available."),
10663 NULL, NULL, arm_show_force_mode,
10664 &setarmcmdlist, &showarmcmdlist);
10666 /* Debugging flag. */
10667 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10668 _("Set ARM debugging."),
10669 _("Show ARM debugging."),
10670 _("When on, arm-specific debugging is enabled."),
10671 NULL,
10672 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10673 &setdebuglist, &showdebuglist);
10676 /* ARM-reversible process record data structures. */
10678 #define ARM_INSN_SIZE_BYTES 4
10679 #define THUMB_INSN_SIZE_BYTES 2
10680 #define THUMB2_INSN_SIZE_BYTES 4
10683 /* Position of the bit within a 32-bit ARM instruction
10684 that defines whether the instruction is a load or store. */
10685 #define INSN_S_L_BIT_NUM 20
10687 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10688 do \
10690 unsigned int reg_len = LENGTH; \
10691 if (reg_len) \
10693 REGS = XNEWVEC (uint32_t, reg_len); \
10694 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10697 while (0)
10699 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10700 do \
10702 unsigned int mem_len = LENGTH; \
10703 if (mem_len) \
10705 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10706 memcpy(&MEMS->len, &RECORD_BUF[0], \
10707 sizeof(struct arm_mem_r) * LENGTH); \
10710 while (0)
10712 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10713 #define INSN_RECORDED(ARM_RECORD) \
10714 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10716 /* ARM memory record structure. */
10717 struct arm_mem_r
10719 uint32_t len; /* Record length. */
10720 uint32_t addr; /* Memory address. */
10723 /* ARM instruction record contains opcode of current insn
10724 and execution state (before entry to decode_insn()),
10725 contains list of to-be-modified registers and
10726 memory blocks (on return from decode_insn()). */
10728 typedef struct insn_decode_record_t
10730 struct gdbarch *gdbarch;
10731 struct regcache *regcache;
10732 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10733 uint32_t arm_insn; /* Should accommodate thumb. */
10734 uint32_t cond; /* Condition code. */
10735 uint32_t opcode; /* Insn opcode. */
10736 uint32_t decode; /* Insn decode bits. */
10737 uint32_t mem_rec_count; /* No of mem records. */
10738 uint32_t reg_rec_count; /* No of reg records. */
10739 uint32_t *arm_regs; /* Registers to be saved for this record. */
10740 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10741 } insn_decode_record;
10744 /* Checks ARM SBZ and SBO mandatory fields. */
10746 static int
10747 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10749 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10751 if (!len)
10752 return 1;
10754 if (!sbo)
10755 ones = ~ones;
10757 while (ones)
10759 if (!(ones & sbo))
10761 return 0;
10763 ones = ones >> 1;
10765 return 1;
10768 enum arm_record_result
10770 ARM_RECORD_SUCCESS = 0,
10771 ARM_RECORD_FAILURE = 1
10774 typedef enum
10776 ARM_RECORD_STRH=1,
10777 ARM_RECORD_STRD
10778 } arm_record_strx_t;
10780 typedef enum
10782 ARM_RECORD=1,
10783 THUMB_RECORD,
10784 THUMB2_RECORD
10785 } record_type_t;
10788 static int
10789 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10790 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10793 struct regcache *reg_cache = arm_insn_r->regcache;
10794 ULONGEST u_regval[2]= {0};
10796 uint32_t reg_src1 = 0, reg_src2 = 0;
10797 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10798 uint32_t opcode1 = 0;
10800 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10801 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10802 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10805 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10807 /* 1) Handle misc store, immediate offset. */
10808 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10809 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10810 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10811 regcache_raw_read_unsigned (reg_cache, reg_src1,
10812 &u_regval[0]);
10813 if (ARM_PC_REGNUM == reg_src1)
10815 /* If R15 was used as Rn, hence current PC+8. */
10816 u_regval[0] = u_regval[0] + 8;
10818 offset_8 = (immed_high << 4) | immed_low;
10819 /* Calculate target store address. */
10820 if (14 == arm_insn_r->opcode)
10822 tgt_mem_addr = u_regval[0] + offset_8;
10824 else
10826 tgt_mem_addr = u_regval[0] - offset_8;
10828 if (ARM_RECORD_STRH == str_type)
10830 record_buf_mem[0] = 2;
10831 record_buf_mem[1] = tgt_mem_addr;
10832 arm_insn_r->mem_rec_count = 1;
10834 else if (ARM_RECORD_STRD == str_type)
10836 record_buf_mem[0] = 4;
10837 record_buf_mem[1] = tgt_mem_addr;
10838 record_buf_mem[2] = 4;
10839 record_buf_mem[3] = tgt_mem_addr + 4;
10840 arm_insn_r->mem_rec_count = 2;
10843 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10845 /* 2) Store, register offset. */
10846 /* Get Rm. */
10847 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10848 /* Get Rn. */
10849 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10850 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10851 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10852 if (15 == reg_src2)
10854 /* If R15 was used as Rn, hence current PC+8. */
10855 u_regval[0] = u_regval[0] + 8;
10857 /* Calculate target store address, Rn +/- Rm, register offset. */
10858 if (12 == arm_insn_r->opcode)
10860 tgt_mem_addr = u_regval[0] + u_regval[1];
10862 else
10864 tgt_mem_addr = u_regval[1] - u_regval[0];
10866 if (ARM_RECORD_STRH == str_type)
10868 record_buf_mem[0] = 2;
10869 record_buf_mem[1] = tgt_mem_addr;
10870 arm_insn_r->mem_rec_count = 1;
10872 else if (ARM_RECORD_STRD == str_type)
10874 record_buf_mem[0] = 4;
10875 record_buf_mem[1] = tgt_mem_addr;
10876 record_buf_mem[2] = 4;
10877 record_buf_mem[3] = tgt_mem_addr + 4;
10878 arm_insn_r->mem_rec_count = 2;
10881 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10882 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10884 /* 3) Store, immediate pre-indexed. */
10885 /* 5) Store, immediate post-indexed. */
10886 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10887 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10888 offset_8 = (immed_high << 4) | immed_low;
10889 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10890 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10891 /* Calculate target store address, Rn +/- Rm, register offset. */
10892 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10894 tgt_mem_addr = u_regval[0] + offset_8;
10896 else
10898 tgt_mem_addr = u_regval[0] - offset_8;
10900 if (ARM_RECORD_STRH == str_type)
10902 record_buf_mem[0] = 2;
10903 record_buf_mem[1] = tgt_mem_addr;
10904 arm_insn_r->mem_rec_count = 1;
10906 else if (ARM_RECORD_STRD == str_type)
10908 record_buf_mem[0] = 4;
10909 record_buf_mem[1] = tgt_mem_addr;
10910 record_buf_mem[2] = 4;
10911 record_buf_mem[3] = tgt_mem_addr + 4;
10912 arm_insn_r->mem_rec_count = 2;
10914 /* Record Rn also as it changes. */
10915 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10916 arm_insn_r->reg_rec_count = 1;
10918 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10919 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10921 /* 4) Store, register pre-indexed. */
10922 /* 6) Store, register post -indexed. */
10923 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10924 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10925 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10926 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10927 /* Calculate target store address, Rn +/- Rm, register offset. */
10928 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10930 tgt_mem_addr = u_regval[0] + u_regval[1];
10932 else
10934 tgt_mem_addr = u_regval[1] - u_regval[0];
10936 if (ARM_RECORD_STRH == str_type)
10938 record_buf_mem[0] = 2;
10939 record_buf_mem[1] = tgt_mem_addr;
10940 arm_insn_r->mem_rec_count = 1;
10942 else if (ARM_RECORD_STRD == str_type)
10944 record_buf_mem[0] = 4;
10945 record_buf_mem[1] = tgt_mem_addr;
10946 record_buf_mem[2] = 4;
10947 record_buf_mem[3] = tgt_mem_addr + 4;
10948 arm_insn_r->mem_rec_count = 2;
10950 /* Record Rn also as it changes. */
10951 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10952 arm_insn_r->reg_rec_count = 1;
10954 return 0;
10957 /* Handling ARM extension space insns. */
10959 static int
10960 arm_record_extension_space (insn_decode_record *arm_insn_r)
10962 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10963 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10964 uint32_t record_buf[8], record_buf_mem[8];
10965 uint32_t reg_src1 = 0;
10966 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10967 struct regcache *reg_cache = arm_insn_r->regcache;
10968 ULONGEST u_regval = 0;
10970 gdb_assert (!INSN_RECORDED(arm_insn_r));
10971 /* Handle unconditional insn extension space. */
10973 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10974 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10975 if (arm_insn_r->cond)
10977 /* PLD has no affect on architectural state, it just affects
10978 the caches. */
10979 if (5 == ((opcode1 & 0xE0) >> 5))
10981 /* BLX(1) */
10982 record_buf[0] = ARM_PS_REGNUM;
10983 record_buf[1] = ARM_LR_REGNUM;
10984 arm_insn_r->reg_rec_count = 2;
10986 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10990 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10991 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10993 ret = -1;
10994 /* Undefined instruction on ARM V5; need to handle if later
10995 versions define it. */
10998 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10999 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11000 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11002 /* Handle arithmetic insn extension space. */
11003 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11004 && !INSN_RECORDED(arm_insn_r))
11006 /* Handle MLA(S) and MUL(S). */
11007 if (0 <= insn_op1 && 3 >= insn_op1)
11009 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11010 record_buf[1] = ARM_PS_REGNUM;
11011 arm_insn_r->reg_rec_count = 2;
11013 else if (4 <= insn_op1 && 15 >= insn_op1)
11015 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11016 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11017 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11018 record_buf[2] = ARM_PS_REGNUM;
11019 arm_insn_r->reg_rec_count = 3;
11023 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11024 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11025 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11027 /* Handle control insn extension space. */
11029 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11030 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11032 if (!bit (arm_insn_r->arm_insn,25))
11034 if (!bits (arm_insn_r->arm_insn, 4, 7))
11036 if ((0 == insn_op1) || (2 == insn_op1))
11038 /* MRS. */
11039 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11040 arm_insn_r->reg_rec_count = 1;
11042 else if (1 == insn_op1)
11044 /* CSPR is going to be changed. */
11045 record_buf[0] = ARM_PS_REGNUM;
11046 arm_insn_r->reg_rec_count = 1;
11048 else if (3 == insn_op1)
11050 /* SPSR is going to be changed. */
11051 /* We need to get SPSR value, which is yet to be done. */
11052 printf_unfiltered (_("Process record does not support "
11053 "instruction 0x%0x at address %s.\n"),
11054 arm_insn_r->arm_insn,
11055 paddress (arm_insn_r->gdbarch,
11056 arm_insn_r->this_addr));
11057 return -1;
11060 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11062 if (1 == insn_op1)
11064 /* BX. */
11065 record_buf[0] = ARM_PS_REGNUM;
11066 arm_insn_r->reg_rec_count = 1;
11068 else if (3 == insn_op1)
11070 /* CLZ. */
11071 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11072 arm_insn_r->reg_rec_count = 1;
11075 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11077 /* BLX. */
11078 record_buf[0] = ARM_PS_REGNUM;
11079 record_buf[1] = ARM_LR_REGNUM;
11080 arm_insn_r->reg_rec_count = 2;
11082 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11084 /* QADD, QSUB, QDADD, QDSUB */
11085 record_buf[0] = ARM_PS_REGNUM;
11086 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11087 arm_insn_r->reg_rec_count = 2;
11089 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11091 /* BKPT. */
11092 record_buf[0] = ARM_PS_REGNUM;
11093 record_buf[1] = ARM_LR_REGNUM;
11094 arm_insn_r->reg_rec_count = 2;
11096 /* Save SPSR also;how? */
11097 printf_unfiltered (_("Process record does not support "
11098 "instruction 0x%0x at address %s.\n"),
11099 arm_insn_r->arm_insn,
11100 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11101 return -1;
11103 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11104 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11105 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11106 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11109 if (0 == insn_op1 || 1 == insn_op1)
11111 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11112 /* We dont do optimization for SMULW<y> where we
11113 need only Rd. */
11114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11115 record_buf[1] = ARM_PS_REGNUM;
11116 arm_insn_r->reg_rec_count = 2;
11118 else if (2 == insn_op1)
11120 /* SMLAL<x><y>. */
11121 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11122 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11123 arm_insn_r->reg_rec_count = 2;
11125 else if (3 == insn_op1)
11127 /* SMUL<x><y>. */
11128 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11129 arm_insn_r->reg_rec_count = 1;
11133 else
11135 /* MSR : immediate form. */
11136 if (1 == insn_op1)
11138 /* CSPR is going to be changed. */
11139 record_buf[0] = ARM_PS_REGNUM;
11140 arm_insn_r->reg_rec_count = 1;
11142 else if (3 == insn_op1)
11144 /* SPSR is going to be changed. */
11145 /* we need to get SPSR value, which is yet to be done */
11146 printf_unfiltered (_("Process record does not support "
11147 "instruction 0x%0x at address %s.\n"),
11148 arm_insn_r->arm_insn,
11149 paddress (arm_insn_r->gdbarch,
11150 arm_insn_r->this_addr));
11151 return -1;
11156 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11157 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11158 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11160 /* Handle load/store insn extension space. */
11162 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11163 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11164 && !INSN_RECORDED(arm_insn_r))
11166 /* SWP/SWPB. */
11167 if (0 == insn_op1)
11169 /* These insn, changes register and memory as well. */
11170 /* SWP or SWPB insn. */
11171 /* Get memory address given by Rn. */
11172 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11173 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11174 /* SWP insn ?, swaps word. */
11175 if (8 == arm_insn_r->opcode)
11177 record_buf_mem[0] = 4;
11179 else
11181 /* SWPB insn, swaps only byte. */
11182 record_buf_mem[0] = 1;
11184 record_buf_mem[1] = u_regval;
11185 arm_insn_r->mem_rec_count = 1;
11186 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11187 arm_insn_r->reg_rec_count = 1;
11189 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11191 /* STRH. */
11192 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11193 ARM_RECORD_STRH);
11195 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11197 /* LDRD. */
11198 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11199 record_buf[1] = record_buf[0] + 1;
11200 arm_insn_r->reg_rec_count = 2;
11202 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11204 /* STRD. */
11205 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11206 ARM_RECORD_STRD);
11208 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11210 /* LDRH, LDRSB, LDRSH. */
11211 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11212 arm_insn_r->reg_rec_count = 1;
11217 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11218 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11219 && !INSN_RECORDED(arm_insn_r))
11221 ret = -1;
11222 /* Handle coprocessor insn extension space. */
11225 /* To be done for ARMv5 and later; as of now we return -1. */
11226 if (-1 == ret)
11227 printf_unfiltered (_("Process record does not support instruction x%0x "
11228 "at address %s.\n"),arm_insn_r->arm_insn,
11229 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11232 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11233 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11235 return ret;
11238 /* Handling opcode 000 insns. */
11240 static int
11241 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11243 struct regcache *reg_cache = arm_insn_r->regcache;
11244 uint32_t record_buf[8], record_buf_mem[8];
11245 ULONGEST u_regval[2] = {0};
11247 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11248 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11249 uint32_t opcode1 = 0;
11251 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11252 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11253 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11255 /* Data processing insn /multiply insn. */
11256 if (9 == arm_insn_r->decode
11257 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11258 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11260 /* Handle multiply instructions. */
11261 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11262 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11264 /* Handle MLA and MUL. */
11265 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11266 record_buf[1] = ARM_PS_REGNUM;
11267 arm_insn_r->reg_rec_count = 2;
11269 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11271 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11272 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11273 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11274 record_buf[2] = ARM_PS_REGNUM;
11275 arm_insn_r->reg_rec_count = 3;
11278 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11279 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11281 /* Handle misc load insns, as 20th bit (L = 1). */
11282 /* LDR insn has a capability to do branching, if
11283 MOV LR, PC is precceded by LDR insn having Rn as R15
11284 in that case, it emulates branch and link insn, and hence we
11285 need to save CSPR and PC as well. I am not sure this is right
11286 place; as opcode = 010 LDR insn make this happen, if R15 was
11287 used. */
11288 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11289 if (15 != reg_dest)
11291 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11292 arm_insn_r->reg_rec_count = 1;
11294 else
11296 record_buf[0] = reg_dest;
11297 record_buf[1] = ARM_PS_REGNUM;
11298 arm_insn_r->reg_rec_count = 2;
11301 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11302 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11303 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11304 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11306 /* Handle MSR insn. */
11307 if (9 == arm_insn_r->opcode)
11309 /* CSPR is going to be changed. */
11310 record_buf[0] = ARM_PS_REGNUM;
11311 arm_insn_r->reg_rec_count = 1;
11313 else
11315 /* SPSR is going to be changed. */
11316 /* How to read SPSR value? */
11317 printf_unfiltered (_("Process record does not support instruction "
11318 "0x%0x at address %s.\n"),
11319 arm_insn_r->arm_insn,
11320 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11321 return -1;
11324 else if (9 == arm_insn_r->decode
11325 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11326 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11328 /* Handling SWP, SWPB. */
11329 /* These insn, changes register and memory as well. */
11330 /* SWP or SWPB insn. */
11332 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11333 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11334 /* SWP insn ?, swaps word. */
11335 if (8 == arm_insn_r->opcode)
11337 record_buf_mem[0] = 4;
11339 else
11341 /* SWPB insn, swaps only byte. */
11342 record_buf_mem[0] = 1;
11344 record_buf_mem[1] = u_regval[0];
11345 arm_insn_r->mem_rec_count = 1;
11346 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11347 arm_insn_r->reg_rec_count = 1;
11349 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11350 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11352 /* Handle BLX, branch and link/exchange. */
11353 if (9 == arm_insn_r->opcode)
11355 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11356 and R14 stores the return address. */
11357 record_buf[0] = ARM_PS_REGNUM;
11358 record_buf[1] = ARM_LR_REGNUM;
11359 arm_insn_r->reg_rec_count = 2;
11362 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11364 /* Handle enhanced software breakpoint insn, BKPT. */
11365 /* CPSR is changed to be executed in ARM state, disabling normal
11366 interrupts, entering abort mode. */
11367 /* According to high vector configuration PC is set. */
11368 /* user hit breakpoint and type reverse, in
11369 that case, we need to go back with previous CPSR and
11370 Program Counter. */
11371 record_buf[0] = ARM_PS_REGNUM;
11372 record_buf[1] = ARM_LR_REGNUM;
11373 arm_insn_r->reg_rec_count = 2;
11375 /* Save SPSR also; how? */
11376 printf_unfiltered (_("Process record does not support instruction "
11377 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11378 paddress (arm_insn_r->gdbarch,
11379 arm_insn_r->this_addr));
11380 return -1;
11382 else if (11 == arm_insn_r->decode
11383 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11385 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11387 /* Handle str(x) insn */
11388 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11389 ARM_RECORD_STRH);
11391 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11392 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11394 /* Handle BX, branch and link/exchange. */
11395 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11396 record_buf[0] = ARM_PS_REGNUM;
11397 arm_insn_r->reg_rec_count = 1;
11399 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11400 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11401 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11403 /* Count leading zeros: CLZ. */
11404 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11405 arm_insn_r->reg_rec_count = 1;
11407 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11408 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11409 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11410 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11413 /* Handle MRS insn. */
11414 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11415 arm_insn_r->reg_rec_count = 1;
11417 else if (arm_insn_r->opcode <= 15)
11419 /* Normal data processing insns. */
11420 /* Out of 11 shifter operands mode, all the insn modifies destination
11421 register, which is specified by 13-16 decode. */
11422 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11423 record_buf[1] = ARM_PS_REGNUM;
11424 arm_insn_r->reg_rec_count = 2;
11426 else
11428 return -1;
11431 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11432 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11433 return 0;
11436 /* Handling opcode 001 insns. */
11438 static int
11439 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11441 uint32_t record_buf[8], record_buf_mem[8];
11443 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11444 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11446 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11447 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11448 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11451 /* Handle MSR insn. */
11452 if (9 == arm_insn_r->opcode)
11454 /* CSPR is going to be changed. */
11455 record_buf[0] = ARM_PS_REGNUM;
11456 arm_insn_r->reg_rec_count = 1;
11458 else
11460 /* SPSR is going to be changed. */
11463 else if (arm_insn_r->opcode <= 15)
11465 /* Normal data processing insns. */
11466 /* Out of 11 shifter operands mode, all the insn modifies destination
11467 register, which is specified by 13-16 decode. */
11468 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11469 record_buf[1] = ARM_PS_REGNUM;
11470 arm_insn_r->reg_rec_count = 2;
11472 else
11474 return -1;
11477 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11478 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11479 return 0;
11482 /* Handle ARM mode instructions with opcode 010. */
11484 static int
11485 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11487 struct regcache *reg_cache = arm_insn_r->regcache;
11489 uint32_t reg_base , reg_dest;
11490 uint32_t offset_12, tgt_mem_addr;
11491 uint32_t record_buf[8], record_buf_mem[8];
11492 unsigned char wback;
11493 ULONGEST u_regval;
11495 /* Calculate wback. */
11496 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11497 || (bit (arm_insn_r->arm_insn, 21) == 1);
11499 arm_insn_r->reg_rec_count = 0;
11500 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11502 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11504 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11505 and LDRT. */
11507 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11508 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11510 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11511 preceeds a LDR instruction having R15 as reg_base, it
11512 emulates a branch and link instruction, and hence we need to save
11513 CPSR and PC as well. */
11514 if (ARM_PC_REGNUM == reg_dest)
11515 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11517 /* If wback is true, also save the base register, which is going to be
11518 written to. */
11519 if (wback)
11520 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11522 else
11524 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11526 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11527 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11529 /* Handle bit U. */
11530 if (bit (arm_insn_r->arm_insn, 23))
11532 /* U == 1: Add the offset. */
11533 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11535 else
11537 /* U == 0: subtract the offset. */
11538 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11541 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11542 bytes. */
11543 if (bit (arm_insn_r->arm_insn, 22))
11545 /* STRB and STRBT: 1 byte. */
11546 record_buf_mem[0] = 1;
11548 else
11550 /* STR and STRT: 4 bytes. */
11551 record_buf_mem[0] = 4;
11554 /* Handle bit P. */
11555 if (bit (arm_insn_r->arm_insn, 24))
11556 record_buf_mem[1] = tgt_mem_addr;
11557 else
11558 record_buf_mem[1] = (uint32_t) u_regval;
11560 arm_insn_r->mem_rec_count = 1;
11562 /* If wback is true, also save the base register, which is going to be
11563 written to. */
11564 if (wback)
11565 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11568 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11569 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11570 return 0;
11573 /* Handling opcode 011 insns. */
11575 static int
11576 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11578 struct regcache *reg_cache = arm_insn_r->regcache;
11580 uint32_t shift_imm = 0;
11581 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11582 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11583 uint32_t record_buf[8], record_buf_mem[8];
11585 LONGEST s_word;
11586 ULONGEST u_regval[2];
11588 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11589 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11591 /* Handle enhanced store insns and LDRD DSP insn,
11592 order begins according to addressing modes for store insns
11593 STRH insn. */
11595 /* LDR or STR? */
11596 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11598 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11599 /* LDR insn has a capability to do branching, if
11600 MOV LR, PC is precedded by LDR insn having Rn as R15
11601 in that case, it emulates branch and link insn, and hence we
11602 need to save CSPR and PC as well. */
11603 if (15 != reg_dest)
11605 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11606 arm_insn_r->reg_rec_count = 1;
11608 else
11610 record_buf[0] = reg_dest;
11611 record_buf[1] = ARM_PS_REGNUM;
11612 arm_insn_r->reg_rec_count = 2;
11615 else
11617 if (! bits (arm_insn_r->arm_insn, 4, 11))
11619 /* Store insn, register offset and register pre-indexed,
11620 register post-indexed. */
11621 /* Get Rm. */
11622 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11623 /* Get Rn. */
11624 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11625 regcache_raw_read_unsigned (reg_cache, reg_src1
11626 , &u_regval[0]);
11627 regcache_raw_read_unsigned (reg_cache, reg_src2
11628 , &u_regval[1]);
11629 if (15 == reg_src2)
11631 /* If R15 was used as Rn, hence current PC+8. */
11632 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11633 u_regval[0] = u_regval[0] + 8;
11635 /* Calculate target store address, Rn +/- Rm, register offset. */
11636 /* U == 1. */
11637 if (bit (arm_insn_r->arm_insn, 23))
11639 tgt_mem_addr = u_regval[0] + u_regval[1];
11641 else
11643 tgt_mem_addr = u_regval[1] - u_regval[0];
11646 switch (arm_insn_r->opcode)
11648 /* STR. */
11649 case 8:
11650 case 12:
11651 /* STR. */
11652 case 9:
11653 case 13:
11654 /* STRT. */
11655 case 1:
11656 case 5:
11657 /* STR. */
11658 case 0:
11659 case 4:
11660 record_buf_mem[0] = 4;
11661 break;
11663 /* STRB. */
11664 case 10:
11665 case 14:
11666 /* STRB. */
11667 case 11:
11668 case 15:
11669 /* STRBT. */
11670 case 3:
11671 case 7:
11672 /* STRB. */
11673 case 2:
11674 case 6:
11675 record_buf_mem[0] = 1;
11676 break;
11678 default:
11679 gdb_assert_not_reached ("no decoding pattern found");
11680 break;
11682 record_buf_mem[1] = tgt_mem_addr;
11683 arm_insn_r->mem_rec_count = 1;
11685 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11686 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11687 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11688 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11689 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11690 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11693 /* Rn is going to be changed in pre-indexed mode and
11694 post-indexed mode as well. */
11695 record_buf[0] = reg_src2;
11696 arm_insn_r->reg_rec_count = 1;
11699 else
11701 /* Store insn, scaled register offset; scaled pre-indexed. */
11702 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11703 /* Get Rm. */
11704 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11705 /* Get Rn. */
11706 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11707 /* Get shift_imm. */
11708 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11709 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11710 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11711 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11712 /* Offset_12 used as shift. */
11713 switch (offset_12)
11715 case 0:
11716 /* Offset_12 used as index. */
11717 offset_12 = u_regval[0] << shift_imm;
11718 break;
11720 case 1:
11721 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11722 break;
11724 case 2:
11725 if (!shift_imm)
11727 if (bit (u_regval[0], 31))
11729 offset_12 = 0xFFFFFFFF;
11731 else
11733 offset_12 = 0;
11736 else
11738 /* This is arithmetic shift. */
11739 offset_12 = s_word >> shift_imm;
11741 break;
11743 case 3:
11744 if (!shift_imm)
11746 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11747 &u_regval[1]);
11748 /* Get C flag value and shift it by 31. */
11749 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11750 | (u_regval[0]) >> 1);
11752 else
11754 offset_12 = (u_regval[0] >> shift_imm) \
11755 | (u_regval[0] <<
11756 (sizeof(uint32_t) - shift_imm));
11758 break;
11760 default:
11761 gdb_assert_not_reached ("no decoding pattern found");
11762 break;
11765 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11766 /* bit U set. */
11767 if (bit (arm_insn_r->arm_insn, 23))
11769 tgt_mem_addr = u_regval[1] + offset_12;
11771 else
11773 tgt_mem_addr = u_regval[1] - offset_12;
11776 switch (arm_insn_r->opcode)
11778 /* STR. */
11779 case 8:
11780 case 12:
11781 /* STR. */
11782 case 9:
11783 case 13:
11784 /* STRT. */
11785 case 1:
11786 case 5:
11787 /* STR. */
11788 case 0:
11789 case 4:
11790 record_buf_mem[0] = 4;
11791 break;
11793 /* STRB. */
11794 case 10:
11795 case 14:
11796 /* STRB. */
11797 case 11:
11798 case 15:
11799 /* STRBT. */
11800 case 3:
11801 case 7:
11802 /* STRB. */
11803 case 2:
11804 case 6:
11805 record_buf_mem[0] = 1;
11806 break;
11808 default:
11809 gdb_assert_not_reached ("no decoding pattern found");
11810 break;
11812 record_buf_mem[1] = tgt_mem_addr;
11813 arm_insn_r->mem_rec_count = 1;
11815 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11816 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11817 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11818 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11819 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11820 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11823 /* Rn is going to be changed in register scaled pre-indexed
11824 mode,and scaled post indexed mode. */
11825 record_buf[0] = reg_src2;
11826 arm_insn_r->reg_rec_count = 1;
11831 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11832 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11833 return 0;
11836 /* Handle ARM mode instructions with opcode 100. */
11838 static int
11839 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11841 struct regcache *reg_cache = arm_insn_r->regcache;
11842 uint32_t register_count = 0, register_bits;
11843 uint32_t reg_base, addr_mode;
11844 uint32_t record_buf[24], record_buf_mem[48];
11845 uint32_t wback;
11846 ULONGEST u_regval;
11848 /* Fetch the list of registers. */
11849 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11850 arm_insn_r->reg_rec_count = 0;
11852 /* Fetch the base register that contains the address we are loading data
11853 to. */
11854 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11856 /* Calculate wback. */
11857 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11859 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11861 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11863 /* Find out which registers are going to be loaded from memory. */
11864 while (register_bits)
11866 if (register_bits & 0x00000001)
11867 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11868 register_bits = register_bits >> 1;
11869 register_count++;
11873 /* If wback is true, also save the base register, which is going to be
11874 written to. */
11875 if (wback)
11876 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11878 /* Save the CPSR register. */
11879 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11881 else
11883 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11885 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11887 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11889 /* Find out how many registers are going to be stored to memory. */
11890 while (register_bits)
11892 if (register_bits & 0x00000001)
11893 register_count++;
11894 register_bits = register_bits >> 1;
11897 switch (addr_mode)
11899 /* STMDA (STMED): Decrement after. */
11900 case 0:
11901 record_buf_mem[1] = (uint32_t) u_regval
11902 - register_count * INT_REGISTER_SIZE + 4;
11903 break;
11904 /* STM (STMIA, STMEA): Increment after. */
11905 case 1:
11906 record_buf_mem[1] = (uint32_t) u_regval;
11907 break;
11908 /* STMDB (STMFD): Decrement before. */
11909 case 2:
11910 record_buf_mem[1] = (uint32_t) u_regval
11911 - register_count * INT_REGISTER_SIZE;
11912 break;
11913 /* STMIB (STMFA): Increment before. */
11914 case 3:
11915 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11916 break;
11917 default:
11918 gdb_assert_not_reached ("no decoding pattern found");
11919 break;
11922 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11923 arm_insn_r->mem_rec_count = 1;
11925 /* If wback is true, also save the base register, which is going to be
11926 written to. */
11927 if (wback)
11928 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11931 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11932 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11933 return 0;
11936 /* Handling opcode 101 insns. */
11938 static int
11939 arm_record_b_bl (insn_decode_record *arm_insn_r)
11941 uint32_t record_buf[8];
11943 /* Handle B, BL, BLX(1) insns. */
11944 /* B simply branches so we do nothing here. */
11945 /* Note: BLX(1) doesnt fall here but instead it falls into
11946 extension space. */
11947 if (bit (arm_insn_r->arm_insn, 24))
11949 record_buf[0] = ARM_LR_REGNUM;
11950 arm_insn_r->reg_rec_count = 1;
11953 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11955 return 0;
11958 /* Handling opcode 110 insns. */
11960 static int
11961 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11963 printf_unfiltered (_("Process record does not support instruction "
11964 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11965 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11967 return -1;
11970 /* Record handler for vector data transfer instructions. */
11972 static int
11973 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11975 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11976 uint32_t record_buf[4];
11978 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11979 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11980 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11981 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11982 bit_l = bit (arm_insn_r->arm_insn, 20);
11983 bit_c = bit (arm_insn_r->arm_insn, 8);
11985 /* Handle VMOV instruction. */
11986 if (bit_l && bit_c)
11988 record_buf[0] = reg_t;
11989 arm_insn_r->reg_rec_count = 1;
11991 else if (bit_l && !bit_c)
11993 /* Handle VMOV instruction. */
11994 if (bits_a == 0x00)
11996 if (bit (arm_insn_r->arm_insn, 20))
11997 record_buf[0] = reg_t;
11998 else
11999 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12000 (reg_v << 1));
12002 arm_insn_r->reg_rec_count = 1;
12004 /* Handle VMRS instruction. */
12005 else if (bits_a == 0x07)
12007 if (reg_t == 15)
12008 reg_t = ARM_PS_REGNUM;
12010 record_buf[0] = reg_t;
12011 arm_insn_r->reg_rec_count = 1;
12014 else if (!bit_l && !bit_c)
12016 /* Handle VMOV instruction. */
12017 if (bits_a == 0x00)
12019 if (bit (arm_insn_r->arm_insn, 20))
12020 record_buf[0] = reg_t;
12021 else
12022 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12023 (reg_v << 1));
12025 arm_insn_r->reg_rec_count = 1;
12027 /* Handle VMSR instruction. */
12028 else if (bits_a == 0x07)
12030 record_buf[0] = ARM_FPSCR_REGNUM;
12031 arm_insn_r->reg_rec_count = 1;
12034 else if (!bit_l && bit_c)
12036 /* Handle VMOV instruction. */
12037 if (!(bits_a & 0x04))
12039 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12040 + ARM_D0_REGNUM;
12041 arm_insn_r->reg_rec_count = 1;
12043 /* Handle VDUP instruction. */
12044 else
12046 if (bit (arm_insn_r->arm_insn, 21))
12048 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12049 record_buf[0] = reg_v + ARM_D0_REGNUM;
12050 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12051 arm_insn_r->reg_rec_count = 2;
12053 else
12055 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12056 record_buf[0] = reg_v + ARM_D0_REGNUM;
12057 arm_insn_r->reg_rec_count = 1;
12062 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12063 return 0;
12066 /* Record handler for extension register load/store instructions. */
12068 static int
12069 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12071 uint32_t opcode, single_reg;
12072 uint8_t op_vldm_vstm;
12073 uint32_t record_buf[8], record_buf_mem[128];
12074 ULONGEST u_regval = 0;
12076 struct regcache *reg_cache = arm_insn_r->regcache;
12077 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12079 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12080 single_reg = bit (arm_insn_r->arm_insn, 8);
12081 op_vldm_vstm = opcode & 0x1b;
12083 /* Handle VMOV instructions. */
12084 if ((opcode & 0x1e) == 0x04)
12086 if (bit (arm_insn_r->arm_insn, 4))
12088 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12089 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12090 arm_insn_r->reg_rec_count = 2;
12092 else
12094 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12095 | bit (arm_insn_r->arm_insn, 5);
12097 if (!single_reg)
12099 record_buf[0] = num_regs + reg_m;
12100 record_buf[1] = num_regs + reg_m + 1;
12101 arm_insn_r->reg_rec_count = 2;
12103 else
12105 record_buf[0] = reg_m + ARM_D0_REGNUM;
12106 arm_insn_r->reg_rec_count = 1;
12110 /* Handle VSTM and VPUSH instructions. */
12111 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12112 || op_vldm_vstm == 0x12)
12114 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12115 uint32_t memory_index = 0;
12117 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12118 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12119 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12120 imm_off32 = imm_off8 << 24;
12121 memory_count = imm_off8;
12123 if (bit (arm_insn_r->arm_insn, 23))
12124 start_address = u_regval;
12125 else
12126 start_address = u_regval - imm_off32;
12128 if (bit (arm_insn_r->arm_insn, 21))
12130 record_buf[0] = reg_rn;
12131 arm_insn_r->reg_rec_count = 1;
12134 while (memory_count > 0)
12136 if (!single_reg)
12138 record_buf_mem[memory_index] = start_address;
12139 record_buf_mem[memory_index + 1] = 4;
12140 start_address = start_address + 4;
12141 memory_index = memory_index + 2;
12143 else
12145 record_buf_mem[memory_index] = start_address;
12146 record_buf_mem[memory_index + 1] = 4;
12147 record_buf_mem[memory_index + 2] = start_address + 4;
12148 record_buf_mem[memory_index + 3] = 4;
12149 start_address = start_address + 8;
12150 memory_index = memory_index + 4;
12152 memory_count--;
12154 arm_insn_r->mem_rec_count = (memory_index >> 1);
12156 /* Handle VLDM instructions. */
12157 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12158 || op_vldm_vstm == 0x13)
12160 uint32_t reg_count, reg_vd;
12161 uint32_t reg_index = 0;
12163 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12164 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12166 if (single_reg)
12167 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12168 else
12169 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12171 if (bit (arm_insn_r->arm_insn, 21))
12172 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12174 while (reg_count > 0)
12176 if (single_reg)
12177 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12178 else
12179 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12181 reg_count--;
12183 arm_insn_r->reg_rec_count = reg_index;
12185 /* VSTR Vector store register. */
12186 else if ((opcode & 0x13) == 0x10)
12188 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12189 uint32_t memory_index = 0;
12191 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12192 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12193 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12194 imm_off32 = imm_off8 << 24;
12195 memory_count = imm_off8;
12197 if (bit (arm_insn_r->arm_insn, 23))
12198 start_address = u_regval + imm_off32;
12199 else
12200 start_address = u_regval - imm_off32;
12202 if (single_reg)
12204 record_buf_mem[memory_index] = start_address;
12205 record_buf_mem[memory_index + 1] = 4;
12206 arm_insn_r->mem_rec_count = 1;
12208 else
12210 record_buf_mem[memory_index] = start_address;
12211 record_buf_mem[memory_index + 1] = 4;
12212 record_buf_mem[memory_index + 2] = start_address + 4;
12213 record_buf_mem[memory_index + 3] = 4;
12214 arm_insn_r->mem_rec_count = 2;
12217 /* VLDR Vector load register. */
12218 else if ((opcode & 0x13) == 0x11)
12220 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12222 if (!single_reg)
12224 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12225 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12227 else
12229 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12230 record_buf[0] = num_regs + reg_vd;
12232 arm_insn_r->reg_rec_count = 1;
12235 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12236 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12237 return 0;
12240 /* Record handler for arm/thumb mode VFP data processing instructions. */
12242 static int
12243 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12245 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12246 uint32_t record_buf[4];
12247 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12248 enum insn_types curr_insn_type = INSN_INV;
12250 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12251 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12252 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12253 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12254 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12255 bit_d = bit (arm_insn_r->arm_insn, 22);
12256 opc1 = opc1 & 0x04;
12258 /* Handle VMLA, VMLS. */
12259 if (opc1 == 0x00)
12261 if (bit (arm_insn_r->arm_insn, 10))
12263 if (bit (arm_insn_r->arm_insn, 6))
12264 curr_insn_type = INSN_T0;
12265 else
12266 curr_insn_type = INSN_T1;
12268 else
12270 if (dp_op_sz)
12271 curr_insn_type = INSN_T1;
12272 else
12273 curr_insn_type = INSN_T2;
12276 /* Handle VNMLA, VNMLS, VNMUL. */
12277 else if (opc1 == 0x01)
12279 if (dp_op_sz)
12280 curr_insn_type = INSN_T1;
12281 else
12282 curr_insn_type = INSN_T2;
12284 /* Handle VMUL. */
12285 else if (opc1 == 0x02 && !(opc3 & 0x01))
12287 if (bit (arm_insn_r->arm_insn, 10))
12289 if (bit (arm_insn_r->arm_insn, 6))
12290 curr_insn_type = INSN_T0;
12291 else
12292 curr_insn_type = INSN_T1;
12294 else
12296 if (dp_op_sz)
12297 curr_insn_type = INSN_T1;
12298 else
12299 curr_insn_type = INSN_T2;
12302 /* Handle VADD, VSUB. */
12303 else if (opc1 == 0x03)
12305 if (!bit (arm_insn_r->arm_insn, 9))
12307 if (bit (arm_insn_r->arm_insn, 6))
12308 curr_insn_type = INSN_T0;
12309 else
12310 curr_insn_type = INSN_T1;
12312 else
12314 if (dp_op_sz)
12315 curr_insn_type = INSN_T1;
12316 else
12317 curr_insn_type = INSN_T2;
12320 /* Handle VDIV. */
12321 else if (opc1 == 0x0b)
12323 if (dp_op_sz)
12324 curr_insn_type = INSN_T1;
12325 else
12326 curr_insn_type = INSN_T2;
12328 /* Handle all other vfp data processing instructions. */
12329 else if (opc1 == 0x0b)
12331 /* Handle VMOV. */
12332 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12334 if (bit (arm_insn_r->arm_insn, 4))
12336 if (bit (arm_insn_r->arm_insn, 6))
12337 curr_insn_type = INSN_T0;
12338 else
12339 curr_insn_type = INSN_T1;
12341 else
12343 if (dp_op_sz)
12344 curr_insn_type = INSN_T1;
12345 else
12346 curr_insn_type = INSN_T2;
12349 /* Handle VNEG and VABS. */
12350 else if ((opc2 == 0x01 && opc3 == 0x01)
12351 || (opc2 == 0x00 && opc3 == 0x03))
12353 if (!bit (arm_insn_r->arm_insn, 11))
12355 if (bit (arm_insn_r->arm_insn, 6))
12356 curr_insn_type = INSN_T0;
12357 else
12358 curr_insn_type = INSN_T1;
12360 else
12362 if (dp_op_sz)
12363 curr_insn_type = INSN_T1;
12364 else
12365 curr_insn_type = INSN_T2;
12368 /* Handle VSQRT. */
12369 else if (opc2 == 0x01 && opc3 == 0x03)
12371 if (dp_op_sz)
12372 curr_insn_type = INSN_T1;
12373 else
12374 curr_insn_type = INSN_T2;
12376 /* Handle VCVT. */
12377 else if (opc2 == 0x07 && opc3 == 0x03)
12379 if (!dp_op_sz)
12380 curr_insn_type = INSN_T1;
12381 else
12382 curr_insn_type = INSN_T2;
12384 else if (opc3 & 0x01)
12386 /* Handle VCVT. */
12387 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12389 if (!bit (arm_insn_r->arm_insn, 18))
12390 curr_insn_type = INSN_T2;
12391 else
12393 if (dp_op_sz)
12394 curr_insn_type = INSN_T1;
12395 else
12396 curr_insn_type = INSN_T2;
12399 /* Handle VCVT. */
12400 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12402 if (dp_op_sz)
12403 curr_insn_type = INSN_T1;
12404 else
12405 curr_insn_type = INSN_T2;
12407 /* Handle VCVTB, VCVTT. */
12408 else if ((opc2 & 0x0e) == 0x02)
12409 curr_insn_type = INSN_T2;
12410 /* Handle VCMP, VCMPE. */
12411 else if ((opc2 & 0x0e) == 0x04)
12412 curr_insn_type = INSN_T3;
12416 switch (curr_insn_type)
12418 case INSN_T0:
12419 reg_vd = reg_vd | (bit_d << 4);
12420 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12421 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12422 arm_insn_r->reg_rec_count = 2;
12423 break;
12425 case INSN_T1:
12426 reg_vd = reg_vd | (bit_d << 4);
12427 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12428 arm_insn_r->reg_rec_count = 1;
12429 break;
12431 case INSN_T2:
12432 reg_vd = (reg_vd << 1) | bit_d;
12433 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12434 arm_insn_r->reg_rec_count = 1;
12435 break;
12437 case INSN_T3:
12438 record_buf[0] = ARM_FPSCR_REGNUM;
12439 arm_insn_r->reg_rec_count = 1;
12440 break;
12442 default:
12443 gdb_assert_not_reached ("no decoding pattern found");
12444 break;
12447 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12448 return 0;
12451 /* Handling opcode 110 insns. */
12453 static int
12454 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12456 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12458 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12459 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12460 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12462 if ((coproc & 0x0e) == 0x0a)
12464 /* Handle extension register ld/st instructions. */
12465 if (!(op1 & 0x20))
12466 return arm_record_exreg_ld_st_insn (arm_insn_r);
12468 /* 64-bit transfers between arm core and extension registers. */
12469 if ((op1 & 0x3e) == 0x04)
12470 return arm_record_exreg_ld_st_insn (arm_insn_r);
12472 else
12474 /* Handle coprocessor ld/st instructions. */
12475 if (!(op1 & 0x3a))
12477 /* Store. */
12478 if (!op1_ebit)
12479 return arm_record_unsupported_insn (arm_insn_r);
12480 else
12481 /* Load. */
12482 return arm_record_unsupported_insn (arm_insn_r);
12485 /* Move to coprocessor from two arm core registers. */
12486 if (op1 == 0x4)
12487 return arm_record_unsupported_insn (arm_insn_r);
12489 /* Move to two arm core registers from coprocessor. */
12490 if (op1 == 0x5)
12492 uint32_t reg_t[2];
12494 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12495 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12496 arm_insn_r->reg_rec_count = 2;
12498 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12499 return 0;
12502 return arm_record_unsupported_insn (arm_insn_r);
12505 /* Handling opcode 111 insns. */
12507 static int
12508 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12510 uint32_t op, op1_sbit, op1_ebit, coproc;
12511 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12512 struct regcache *reg_cache = arm_insn_r->regcache;
12513 ULONGEST u_regval = 0;
12515 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12516 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12517 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12518 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12519 op = bit (arm_insn_r->arm_insn, 4);
12521 /* Handle arm SWI/SVC system call instructions. */
12522 if (op1_sbit)
12524 if (tdep->arm_syscall_record != NULL)
12526 ULONGEST svc_operand, svc_number;
12528 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12530 if (svc_operand) /* OABI. */
12531 svc_number = svc_operand - 0x900000;
12532 else /* EABI. */
12533 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12535 return tdep->arm_syscall_record (reg_cache, svc_number);
12537 else
12539 printf_unfiltered (_("no syscall record support\n"));
12540 return -1;
12544 if ((coproc & 0x0e) == 0x0a)
12546 /* VFP data-processing instructions. */
12547 if (!op1_sbit && !op)
12548 return arm_record_vfp_data_proc_insn (arm_insn_r);
12550 /* Advanced SIMD, VFP instructions. */
12551 if (!op1_sbit && op)
12552 return arm_record_vdata_transfer_insn (arm_insn_r);
12554 else
12556 /* Coprocessor data operations. */
12557 if (!op1_sbit && !op)
12558 return arm_record_unsupported_insn (arm_insn_r);
12560 /* Move to Coprocessor from ARM core register. */
12561 if (!op1_sbit && !op1_ebit && op)
12562 return arm_record_unsupported_insn (arm_insn_r);
12564 /* Move to arm core register from coprocessor. */
12565 if (!op1_sbit && op1_ebit && op)
12567 uint32_t record_buf[1];
12569 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12570 if (record_buf[0] == 15)
12571 record_buf[0] = ARM_PS_REGNUM;
12573 arm_insn_r->reg_rec_count = 1;
12574 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12575 record_buf);
12576 return 0;
12580 return arm_record_unsupported_insn (arm_insn_r);
12583 /* Handling opcode 000 insns. */
12585 static int
12586 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12588 uint32_t record_buf[8];
12589 uint32_t reg_src1 = 0;
12591 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12593 record_buf[0] = ARM_PS_REGNUM;
12594 record_buf[1] = reg_src1;
12595 thumb_insn_r->reg_rec_count = 2;
12597 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12599 return 0;
12603 /* Handling opcode 001 insns. */
12605 static int
12606 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12608 uint32_t record_buf[8];
12609 uint32_t reg_src1 = 0;
12611 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12613 record_buf[0] = ARM_PS_REGNUM;
12614 record_buf[1] = reg_src1;
12615 thumb_insn_r->reg_rec_count = 2;
12617 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12619 return 0;
12622 /* Handling opcode 010 insns. */
12624 static int
12625 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12627 struct regcache *reg_cache = thumb_insn_r->regcache;
12628 uint32_t record_buf[8], record_buf_mem[8];
12630 uint32_t reg_src1 = 0, reg_src2 = 0;
12631 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12633 ULONGEST u_regval[2] = {0};
12635 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12637 if (bit (thumb_insn_r->arm_insn, 12))
12639 /* Handle load/store register offset. */
12640 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12641 if (opcode2 >= 12 && opcode2 <= 15)
12643 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12644 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12645 record_buf[0] = reg_src1;
12646 thumb_insn_r->reg_rec_count = 1;
12648 else if (opcode2 >= 8 && opcode2 <= 10)
12650 /* STR(2), STRB(2), STRH(2) . */
12651 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12652 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12653 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12654 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12655 if (8 == opcode2)
12656 record_buf_mem[0] = 4; /* STR (2). */
12657 else if (10 == opcode2)
12658 record_buf_mem[0] = 1; /* STRB (2). */
12659 else if (9 == opcode2)
12660 record_buf_mem[0] = 2; /* STRH (2). */
12661 record_buf_mem[1] = u_regval[0] + u_regval[1];
12662 thumb_insn_r->mem_rec_count = 1;
12665 else if (bit (thumb_insn_r->arm_insn, 11))
12667 /* Handle load from literal pool. */
12668 /* LDR(3). */
12669 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12670 record_buf[0] = reg_src1;
12671 thumb_insn_r->reg_rec_count = 1;
12673 else if (opcode1)
12675 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12676 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12677 if ((3 == opcode2) && (!opcode3))
12679 /* Branch with exchange. */
12680 record_buf[0] = ARM_PS_REGNUM;
12681 thumb_insn_r->reg_rec_count = 1;
12683 else
12685 /* Format 8; special data processing insns. */
12686 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12687 record_buf[0] = ARM_PS_REGNUM;
12688 record_buf[1] = reg_src1;
12689 thumb_insn_r->reg_rec_count = 2;
12692 else
12694 /* Format 5; data processing insns. */
12695 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12696 if (bit (thumb_insn_r->arm_insn, 7))
12698 reg_src1 = reg_src1 + 8;
12700 record_buf[0] = ARM_PS_REGNUM;
12701 record_buf[1] = reg_src1;
12702 thumb_insn_r->reg_rec_count = 2;
12705 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12706 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12707 record_buf_mem);
12709 return 0;
12712 /* Handling opcode 001 insns. */
12714 static int
12715 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12717 struct regcache *reg_cache = thumb_insn_r->regcache;
12718 uint32_t record_buf[8], record_buf_mem[8];
12720 uint32_t reg_src1 = 0;
12721 uint32_t opcode = 0, immed_5 = 0;
12723 ULONGEST u_regval = 0;
12725 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12727 if (opcode)
12729 /* LDR(1). */
12730 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12731 record_buf[0] = reg_src1;
12732 thumb_insn_r->reg_rec_count = 1;
12734 else
12736 /* STR(1). */
12737 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12738 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12739 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12740 record_buf_mem[0] = 4;
12741 record_buf_mem[1] = u_regval + (immed_5 * 4);
12742 thumb_insn_r->mem_rec_count = 1;
12745 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12746 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12747 record_buf_mem);
12749 return 0;
12752 /* Handling opcode 100 insns. */
12754 static int
12755 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12757 struct regcache *reg_cache = thumb_insn_r->regcache;
12758 uint32_t record_buf[8], record_buf_mem[8];
12760 uint32_t reg_src1 = 0;
12761 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12763 ULONGEST u_regval = 0;
12765 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12767 if (3 == opcode)
12769 /* LDR(4). */
12770 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12771 record_buf[0] = reg_src1;
12772 thumb_insn_r->reg_rec_count = 1;
12774 else if (1 == opcode)
12776 /* LDRH(1). */
12777 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12778 record_buf[0] = reg_src1;
12779 thumb_insn_r->reg_rec_count = 1;
12781 else if (2 == opcode)
12783 /* STR(3). */
12784 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12785 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12786 record_buf_mem[0] = 4;
12787 record_buf_mem[1] = u_regval + (immed_8 * 4);
12788 thumb_insn_r->mem_rec_count = 1;
12790 else if (0 == opcode)
12792 /* STRH(1). */
12793 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12794 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12795 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12796 record_buf_mem[0] = 2;
12797 record_buf_mem[1] = u_regval + (immed_5 * 2);
12798 thumb_insn_r->mem_rec_count = 1;
12801 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12802 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12803 record_buf_mem);
12805 return 0;
12808 /* Handling opcode 101 insns. */
12810 static int
12811 thumb_record_misc (insn_decode_record *thumb_insn_r)
12813 struct regcache *reg_cache = thumb_insn_r->regcache;
12815 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12816 uint32_t register_bits = 0, register_count = 0;
12817 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12818 uint32_t record_buf[24], record_buf_mem[48];
12819 uint32_t reg_src1;
12821 ULONGEST u_regval = 0;
12823 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12824 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12825 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12827 if (14 == opcode2)
12829 /* POP. */
12830 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12831 while (register_bits)
12833 if (register_bits & 0x00000001)
12834 record_buf[index++] = register_count;
12835 register_bits = register_bits >> 1;
12836 register_count++;
12838 record_buf[index++] = ARM_PS_REGNUM;
12839 record_buf[index++] = ARM_SP_REGNUM;
12840 thumb_insn_r->reg_rec_count = index;
12842 else if (10 == opcode2)
12844 /* PUSH. */
12845 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12846 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12847 while (register_bits)
12849 if (register_bits & 0x00000001)
12850 register_count++;
12851 register_bits = register_bits >> 1;
12853 start_address = u_regval - \
12854 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12855 thumb_insn_r->mem_rec_count = register_count;
12856 while (register_count)
12858 record_buf_mem[(register_count * 2) - 1] = start_address;
12859 record_buf_mem[(register_count * 2) - 2] = 4;
12860 start_address = start_address + 4;
12861 register_count--;
12863 record_buf[0] = ARM_SP_REGNUM;
12864 thumb_insn_r->reg_rec_count = 1;
12866 else if (0x1E == opcode1)
12868 /* BKPT insn. */
12869 /* Handle enhanced software breakpoint insn, BKPT. */
12870 /* CPSR is changed to be executed in ARM state, disabling normal
12871 interrupts, entering abort mode. */
12872 /* According to high vector configuration PC is set. */
12873 /* User hits breakpoint and type reverse, in that case, we need to go back with
12874 previous CPSR and Program Counter. */
12875 record_buf[0] = ARM_PS_REGNUM;
12876 record_buf[1] = ARM_LR_REGNUM;
12877 thumb_insn_r->reg_rec_count = 2;
12878 /* We need to save SPSR value, which is not yet done. */
12879 printf_unfiltered (_("Process record does not support instruction "
12880 "0x%0x at address %s.\n"),
12881 thumb_insn_r->arm_insn,
12882 paddress (thumb_insn_r->gdbarch,
12883 thumb_insn_r->this_addr));
12884 return -1;
12886 else if ((0 == opcode) || (1 == opcode))
12888 /* ADD(5), ADD(6). */
12889 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12890 record_buf[0] = reg_src1;
12891 thumb_insn_r->reg_rec_count = 1;
12893 else if (2 == opcode)
12895 /* ADD(7), SUB(4). */
12896 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12897 record_buf[0] = ARM_SP_REGNUM;
12898 thumb_insn_r->reg_rec_count = 1;
12901 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12902 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12903 record_buf_mem);
12905 return 0;
12908 /* Handling opcode 110 insns. */
12910 static int
12911 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12913 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12914 struct regcache *reg_cache = thumb_insn_r->regcache;
12916 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12917 uint32_t reg_src1 = 0;
12918 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12919 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12920 uint32_t record_buf[24], record_buf_mem[48];
12922 ULONGEST u_regval = 0;
12924 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12925 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12927 if (1 == opcode2)
12930 /* LDMIA. */
12931 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12932 /* Get Rn. */
12933 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12934 while (register_bits)
12936 if (register_bits & 0x00000001)
12937 record_buf[index++] = register_count;
12938 register_bits = register_bits >> 1;
12939 register_count++;
12941 record_buf[index++] = reg_src1;
12942 thumb_insn_r->reg_rec_count = index;
12944 else if (0 == opcode2)
12946 /* It handles both STMIA. */
12947 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12948 /* Get Rn. */
12949 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12950 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12951 while (register_bits)
12953 if (register_bits & 0x00000001)
12954 register_count++;
12955 register_bits = register_bits >> 1;
12957 start_address = u_regval;
12958 thumb_insn_r->mem_rec_count = register_count;
12959 while (register_count)
12961 record_buf_mem[(register_count * 2) - 1] = start_address;
12962 record_buf_mem[(register_count * 2) - 2] = 4;
12963 start_address = start_address + 4;
12964 register_count--;
12967 else if (0x1F == opcode1)
12969 /* Handle arm syscall insn. */
12970 if (tdep->arm_syscall_record != NULL)
12972 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12973 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12975 else
12977 printf_unfiltered (_("no syscall record support\n"));
12978 return -1;
12982 /* B (1), conditional branch is automatically taken care in process_record,
12983 as PC is saved there. */
12985 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12986 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12987 record_buf_mem);
12989 return ret;
12992 /* Handling opcode 111 insns. */
12994 static int
12995 thumb_record_branch (insn_decode_record *thumb_insn_r)
12997 uint32_t record_buf[8];
12998 uint32_t bits_h = 0;
13000 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13002 if (2 == bits_h || 3 == bits_h)
13004 /* BL */
13005 record_buf[0] = ARM_LR_REGNUM;
13006 thumb_insn_r->reg_rec_count = 1;
13008 else if (1 == bits_h)
13010 /* BLX(1). */
13011 record_buf[0] = ARM_PS_REGNUM;
13012 record_buf[1] = ARM_LR_REGNUM;
13013 thumb_insn_r->reg_rec_count = 2;
13016 /* B(2) is automatically taken care in process_record, as PC is
13017 saved there. */
13019 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13021 return 0;
13024 /* Handler for thumb2 load/store multiple instructions. */
13026 static int
13027 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13029 struct regcache *reg_cache = thumb2_insn_r->regcache;
13031 uint32_t reg_rn, op;
13032 uint32_t register_bits = 0, register_count = 0;
13033 uint32_t index = 0, start_address = 0;
13034 uint32_t record_buf[24], record_buf_mem[48];
13036 ULONGEST u_regval = 0;
13038 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13039 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13041 if (0 == op || 3 == op)
13043 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13045 /* Handle RFE instruction. */
13046 record_buf[0] = ARM_PS_REGNUM;
13047 thumb2_insn_r->reg_rec_count = 1;
13049 else
13051 /* Handle SRS instruction after reading banked SP. */
13052 return arm_record_unsupported_insn (thumb2_insn_r);
13055 else if (1 == op || 2 == op)
13057 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13059 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13060 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13061 while (register_bits)
13063 if (register_bits & 0x00000001)
13064 record_buf[index++] = register_count;
13066 register_count++;
13067 register_bits = register_bits >> 1;
13069 record_buf[index++] = reg_rn;
13070 record_buf[index++] = ARM_PS_REGNUM;
13071 thumb2_insn_r->reg_rec_count = index;
13073 else
13075 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13076 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13077 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13078 while (register_bits)
13080 if (register_bits & 0x00000001)
13081 register_count++;
13083 register_bits = register_bits >> 1;
13086 if (1 == op)
13088 /* Start address calculation for LDMDB/LDMEA. */
13089 start_address = u_regval;
13091 else if (2 == op)
13093 /* Start address calculation for LDMDB/LDMEA. */
13094 start_address = u_regval - register_count * 4;
13097 thumb2_insn_r->mem_rec_count = register_count;
13098 while (register_count)
13100 record_buf_mem[register_count * 2 - 1] = start_address;
13101 record_buf_mem[register_count * 2 - 2] = 4;
13102 start_address = start_address + 4;
13103 register_count--;
13105 record_buf[0] = reg_rn;
13106 record_buf[1] = ARM_PS_REGNUM;
13107 thumb2_insn_r->reg_rec_count = 2;
13111 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13112 record_buf_mem);
13113 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13114 record_buf);
13115 return ARM_RECORD_SUCCESS;
13118 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13119 instructions. */
13121 static int
13122 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13124 struct regcache *reg_cache = thumb2_insn_r->regcache;
13126 uint32_t reg_rd, reg_rn, offset_imm;
13127 uint32_t reg_dest1, reg_dest2;
13128 uint32_t address, offset_addr;
13129 uint32_t record_buf[8], record_buf_mem[8];
13130 uint32_t op1, op2, op3;
13131 LONGEST s_word;
13133 ULONGEST u_regval[2];
13135 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13136 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13137 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13139 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13141 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13143 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13144 record_buf[0] = reg_dest1;
13145 record_buf[1] = ARM_PS_REGNUM;
13146 thumb2_insn_r->reg_rec_count = 2;
13149 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13151 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13152 record_buf[2] = reg_dest2;
13153 thumb2_insn_r->reg_rec_count = 3;
13156 else
13158 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13159 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13161 if (0 == op1 && 0 == op2)
13163 /* Handle STREX. */
13164 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13165 address = u_regval[0] + (offset_imm * 4);
13166 record_buf_mem[0] = 4;
13167 record_buf_mem[1] = address;
13168 thumb2_insn_r->mem_rec_count = 1;
13169 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13170 record_buf[0] = reg_rd;
13171 thumb2_insn_r->reg_rec_count = 1;
13173 else if (1 == op1 && 0 == op2)
13175 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13176 record_buf[0] = reg_rd;
13177 thumb2_insn_r->reg_rec_count = 1;
13178 address = u_regval[0];
13179 record_buf_mem[1] = address;
13181 if (4 == op3)
13183 /* Handle STREXB. */
13184 record_buf_mem[0] = 1;
13185 thumb2_insn_r->mem_rec_count = 1;
13187 else if (5 == op3)
13189 /* Handle STREXH. */
13190 record_buf_mem[0] = 2 ;
13191 thumb2_insn_r->mem_rec_count = 1;
13193 else if (7 == op3)
13195 /* Handle STREXD. */
13196 address = u_regval[0];
13197 record_buf_mem[0] = 4;
13198 record_buf_mem[2] = 4;
13199 record_buf_mem[3] = address + 4;
13200 thumb2_insn_r->mem_rec_count = 2;
13203 else
13205 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13207 if (bit (thumb2_insn_r->arm_insn, 24))
13209 if (bit (thumb2_insn_r->arm_insn, 23))
13210 offset_addr = u_regval[0] + (offset_imm * 4);
13211 else
13212 offset_addr = u_regval[0] - (offset_imm * 4);
13214 address = offset_addr;
13216 else
13217 address = u_regval[0];
13219 record_buf_mem[0] = 4;
13220 record_buf_mem[1] = address;
13221 record_buf_mem[2] = 4;
13222 record_buf_mem[3] = address + 4;
13223 thumb2_insn_r->mem_rec_count = 2;
13224 record_buf[0] = reg_rn;
13225 thumb2_insn_r->reg_rec_count = 1;
13229 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13230 record_buf);
13231 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13232 record_buf_mem);
13233 return ARM_RECORD_SUCCESS;
13236 /* Handler for thumb2 data processing (shift register and modified immediate)
13237 instructions. */
13239 static int
13240 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13242 uint32_t reg_rd, op;
13243 uint32_t record_buf[8];
13245 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13246 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13248 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13250 record_buf[0] = ARM_PS_REGNUM;
13251 thumb2_insn_r->reg_rec_count = 1;
13253 else
13255 record_buf[0] = reg_rd;
13256 record_buf[1] = ARM_PS_REGNUM;
13257 thumb2_insn_r->reg_rec_count = 2;
13260 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13261 record_buf);
13262 return ARM_RECORD_SUCCESS;
13265 /* Generic handler for thumb2 instructions which effect destination and PS
13266 registers. */
13268 static int
13269 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13271 uint32_t reg_rd;
13272 uint32_t record_buf[8];
13274 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13276 record_buf[0] = reg_rd;
13277 record_buf[1] = ARM_PS_REGNUM;
13278 thumb2_insn_r->reg_rec_count = 2;
13280 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13281 record_buf);
13282 return ARM_RECORD_SUCCESS;
13285 /* Handler for thumb2 branch and miscellaneous control instructions. */
13287 static int
13288 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13290 uint32_t op, op1, op2;
13291 uint32_t record_buf[8];
13293 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13294 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13295 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13297 /* Handle MSR insn. */
13298 if (!(op1 & 0x2) && 0x38 == op)
13300 if (!(op2 & 0x3))
13302 /* CPSR is going to be changed. */
13303 record_buf[0] = ARM_PS_REGNUM;
13304 thumb2_insn_r->reg_rec_count = 1;
13306 else
13308 arm_record_unsupported_insn(thumb2_insn_r);
13309 return -1;
13312 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13314 /* BLX. */
13315 record_buf[0] = ARM_PS_REGNUM;
13316 record_buf[1] = ARM_LR_REGNUM;
13317 thumb2_insn_r->reg_rec_count = 2;
13320 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13321 record_buf);
13322 return ARM_RECORD_SUCCESS;
13325 /* Handler for thumb2 store single data item instructions. */
13327 static int
13328 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13330 struct regcache *reg_cache = thumb2_insn_r->regcache;
13332 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13333 uint32_t address, offset_addr;
13334 uint32_t record_buf[8], record_buf_mem[8];
13335 uint32_t op1, op2;
13337 ULONGEST u_regval[2];
13339 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13340 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13341 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13342 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13344 if (bit (thumb2_insn_r->arm_insn, 23))
13346 /* T2 encoding. */
13347 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13348 offset_addr = u_regval[0] + offset_imm;
13349 address = offset_addr;
13351 else
13353 /* T3 encoding. */
13354 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13356 /* Handle STRB (register). */
13357 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13358 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13359 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13360 offset_addr = u_regval[1] << shift_imm;
13361 address = u_regval[0] + offset_addr;
13363 else
13365 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13366 if (bit (thumb2_insn_r->arm_insn, 10))
13368 if (bit (thumb2_insn_r->arm_insn, 9))
13369 offset_addr = u_regval[0] + offset_imm;
13370 else
13371 offset_addr = u_regval[0] - offset_imm;
13373 address = offset_addr;
13375 else
13376 address = u_regval[0];
13380 switch (op1)
13382 /* Store byte instructions. */
13383 case 4:
13384 case 0:
13385 record_buf_mem[0] = 1;
13386 break;
13387 /* Store half word instructions. */
13388 case 1:
13389 case 5:
13390 record_buf_mem[0] = 2;
13391 break;
13392 /* Store word instructions. */
13393 case 2:
13394 case 6:
13395 record_buf_mem[0] = 4;
13396 break;
13398 default:
13399 gdb_assert_not_reached ("no decoding pattern found");
13400 break;
13403 record_buf_mem[1] = address;
13404 thumb2_insn_r->mem_rec_count = 1;
13405 record_buf[0] = reg_rn;
13406 thumb2_insn_r->reg_rec_count = 1;
13408 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13409 record_buf);
13410 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13411 record_buf_mem);
13412 return ARM_RECORD_SUCCESS;
13415 /* Handler for thumb2 load memory hints instructions. */
13417 static int
13418 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13420 uint32_t record_buf[8];
13421 uint32_t reg_rt, reg_rn;
13423 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13424 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13426 if (ARM_PC_REGNUM != reg_rt)
13428 record_buf[0] = reg_rt;
13429 record_buf[1] = reg_rn;
13430 record_buf[2] = ARM_PS_REGNUM;
13431 thumb2_insn_r->reg_rec_count = 3;
13433 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13434 record_buf);
13435 return ARM_RECORD_SUCCESS;
13438 return ARM_RECORD_FAILURE;
13441 /* Handler for thumb2 load word instructions. */
13443 static int
13444 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13446 uint32_t opcode1 = 0, opcode2 = 0;
13447 uint32_t record_buf[8];
13449 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13450 record_buf[1] = ARM_PS_REGNUM;
13451 thumb2_insn_r->reg_rec_count = 2;
13453 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13454 record_buf);
13455 return ARM_RECORD_SUCCESS;
13458 /* Handler for thumb2 long multiply, long multiply accumulate, and
13459 divide instructions. */
13461 static int
13462 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13464 uint32_t opcode1 = 0, opcode2 = 0;
13465 uint32_t record_buf[8];
13466 uint32_t reg_src1 = 0;
13468 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13469 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13471 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13473 /* Handle SMULL, UMULL, SMULAL. */
13474 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13475 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13476 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13477 record_buf[2] = ARM_PS_REGNUM;
13478 thumb2_insn_r->reg_rec_count = 3;
13480 else if (1 == opcode1 || 3 == opcode2)
13482 /* Handle SDIV and UDIV. */
13483 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13484 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13485 record_buf[2] = ARM_PS_REGNUM;
13486 thumb2_insn_r->reg_rec_count = 3;
13488 else
13489 return ARM_RECORD_FAILURE;
13491 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13492 record_buf);
13493 return ARM_RECORD_SUCCESS;
13496 /* Record handler for thumb32 coprocessor instructions. */
13498 static int
13499 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13501 if (bit (thumb2_insn_r->arm_insn, 25))
13502 return arm_record_coproc_data_proc (thumb2_insn_r);
13503 else
13504 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13507 /* Record handler for advance SIMD structure load/store instructions. */
13509 static int
13510 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13512 struct regcache *reg_cache = thumb2_insn_r->regcache;
13513 uint32_t l_bit, a_bit, b_bits;
13514 uint32_t record_buf[128], record_buf_mem[128];
13515 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13516 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13517 uint8_t f_ebytes;
13519 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13520 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13521 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13522 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13523 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13524 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13525 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13526 f_esize = 8 * f_ebytes;
13527 f_elem = 8 / f_ebytes;
13529 if (!l_bit)
13531 ULONGEST u_regval = 0;
13532 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13533 address = u_regval;
13535 if (!a_bit)
13537 /* Handle VST1. */
13538 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13540 if (b_bits == 0x07)
13541 bf_regs = 1;
13542 else if (b_bits == 0x0a)
13543 bf_regs = 2;
13544 else if (b_bits == 0x06)
13545 bf_regs = 3;
13546 else if (b_bits == 0x02)
13547 bf_regs = 4;
13548 else
13549 bf_regs = 0;
13551 for (index_r = 0; index_r < bf_regs; index_r++)
13553 for (index_e = 0; index_e < f_elem; index_e++)
13555 record_buf_mem[index_m++] = f_ebytes;
13556 record_buf_mem[index_m++] = address;
13557 address = address + f_ebytes;
13558 thumb2_insn_r->mem_rec_count += 1;
13562 /* Handle VST2. */
13563 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13565 if (b_bits == 0x09 || b_bits == 0x08)
13566 bf_regs = 1;
13567 else if (b_bits == 0x03)
13568 bf_regs = 2;
13569 else
13570 bf_regs = 0;
13572 for (index_r = 0; index_r < bf_regs; index_r++)
13573 for (index_e = 0; index_e < f_elem; index_e++)
13575 for (loop_t = 0; loop_t < 2; loop_t++)
13577 record_buf_mem[index_m++] = f_ebytes;
13578 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13579 thumb2_insn_r->mem_rec_count += 1;
13581 address = address + (2 * f_ebytes);
13584 /* Handle VST3. */
13585 else if ((b_bits & 0x0e) == 0x04)
13587 for (index_e = 0; index_e < f_elem; index_e++)
13589 for (loop_t = 0; loop_t < 3; loop_t++)
13591 record_buf_mem[index_m++] = f_ebytes;
13592 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13593 thumb2_insn_r->mem_rec_count += 1;
13595 address = address + (3 * f_ebytes);
13598 /* Handle VST4. */
13599 else if (!(b_bits & 0x0e))
13601 for (index_e = 0; index_e < f_elem; index_e++)
13603 for (loop_t = 0; loop_t < 4; loop_t++)
13605 record_buf_mem[index_m++] = f_ebytes;
13606 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13607 thumb2_insn_r->mem_rec_count += 1;
13609 address = address + (4 * f_ebytes);
13613 else
13615 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13617 if (bft_size == 0x00)
13618 f_ebytes = 1;
13619 else if (bft_size == 0x01)
13620 f_ebytes = 2;
13621 else if (bft_size == 0x02)
13622 f_ebytes = 4;
13623 else
13624 f_ebytes = 0;
13626 /* Handle VST1. */
13627 if (!(b_bits & 0x0b) || b_bits == 0x08)
13628 thumb2_insn_r->mem_rec_count = 1;
13629 /* Handle VST2. */
13630 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13631 thumb2_insn_r->mem_rec_count = 2;
13632 /* Handle VST3. */
13633 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13634 thumb2_insn_r->mem_rec_count = 3;
13635 /* Handle VST4. */
13636 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13637 thumb2_insn_r->mem_rec_count = 4;
13639 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13641 record_buf_mem[index_m] = f_ebytes;
13642 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13646 else
13648 if (!a_bit)
13650 /* Handle VLD1. */
13651 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13652 thumb2_insn_r->reg_rec_count = 1;
13653 /* Handle VLD2. */
13654 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13655 thumb2_insn_r->reg_rec_count = 2;
13656 /* Handle VLD3. */
13657 else if ((b_bits & 0x0e) == 0x04)
13658 thumb2_insn_r->reg_rec_count = 3;
13659 /* Handle VLD4. */
13660 else if (!(b_bits & 0x0e))
13661 thumb2_insn_r->reg_rec_count = 4;
13663 else
13665 /* Handle VLD1. */
13666 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13667 thumb2_insn_r->reg_rec_count = 1;
13668 /* Handle VLD2. */
13669 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13670 thumb2_insn_r->reg_rec_count = 2;
13671 /* Handle VLD3. */
13672 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13673 thumb2_insn_r->reg_rec_count = 3;
13674 /* Handle VLD4. */
13675 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13676 thumb2_insn_r->reg_rec_count = 4;
13678 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13679 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13683 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13685 record_buf[index_r] = reg_rn;
13686 thumb2_insn_r->reg_rec_count += 1;
13689 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13690 record_buf);
13691 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13692 record_buf_mem);
13693 return 0;
13696 /* Decodes thumb2 instruction type and invokes its record handler. */
13698 static unsigned int
13699 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13701 uint32_t op, op1, op2;
13703 op = bit (thumb2_insn_r->arm_insn, 15);
13704 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13705 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13707 if (op1 == 0x01)
13709 if (!(op2 & 0x64 ))
13711 /* Load/store multiple instruction. */
13712 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13714 else if (!((op2 & 0x64) ^ 0x04))
13716 /* Load/store (dual/exclusive) and table branch instruction. */
13717 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13719 else if (!((op2 & 0x20) ^ 0x20))
13721 /* Data-processing (shifted register). */
13722 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13724 else if (op2 & 0x40)
13726 /* Co-processor instructions. */
13727 return thumb2_record_coproc_insn (thumb2_insn_r);
13730 else if (op1 == 0x02)
13732 if (op)
13734 /* Branches and miscellaneous control instructions. */
13735 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13737 else if (op2 & 0x20)
13739 /* Data-processing (plain binary immediate) instruction. */
13740 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13742 else
13744 /* Data-processing (modified immediate). */
13745 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13748 else if (op1 == 0x03)
13750 if (!(op2 & 0x71 ))
13752 /* Store single data item. */
13753 return thumb2_record_str_single_data (thumb2_insn_r);
13755 else if (!((op2 & 0x71) ^ 0x10))
13757 /* Advanced SIMD or structure load/store instructions. */
13758 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13760 else if (!((op2 & 0x67) ^ 0x01))
13762 /* Load byte, memory hints instruction. */
13763 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13765 else if (!((op2 & 0x67) ^ 0x03))
13767 /* Load halfword, memory hints instruction. */
13768 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13770 else if (!((op2 & 0x67) ^ 0x05))
13772 /* Load word instruction. */
13773 return thumb2_record_ld_word (thumb2_insn_r);
13775 else if (!((op2 & 0x70) ^ 0x20))
13777 /* Data-processing (register) instruction. */
13778 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13780 else if (!((op2 & 0x78) ^ 0x30))
13782 /* Multiply, multiply accumulate, abs diff instruction. */
13783 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13785 else if (!((op2 & 0x78) ^ 0x38))
13787 /* Long multiply, long multiply accumulate, and divide. */
13788 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13790 else if (op2 & 0x40)
13792 /* Co-processor instructions. */
13793 return thumb2_record_coproc_insn (thumb2_insn_r);
13797 return -1;
13800 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13801 and positive val on fauilure. */
13803 static int
13804 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13806 gdb_byte buf[insn_size];
13808 memset (&buf[0], 0, insn_size);
13810 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13811 return 1;
13812 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13813 insn_size,
13814 gdbarch_byte_order_for_code (insn_record->gdbarch));
13815 return 0;
13818 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13820 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13821 dispatch it. */
13823 static int
13824 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13825 uint32_t insn_size)
13828 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13829 static const sti_arm_hdl_fp_t const arm_handle_insn[8] =
13831 arm_record_data_proc_misc_ld_str, /* 000. */
13832 arm_record_data_proc_imm, /* 001. */
13833 arm_record_ld_st_imm_offset, /* 010. */
13834 arm_record_ld_st_reg_offset, /* 011. */
13835 arm_record_ld_st_multiple, /* 100. */
13836 arm_record_b_bl, /* 101. */
13837 arm_record_asimd_vfp_coproc, /* 110. */
13838 arm_record_coproc_data_proc /* 111. */
13841 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13842 static const sti_arm_hdl_fp_t const thumb_handle_insn[8] =
13844 thumb_record_shift_add_sub, /* 000. */
13845 thumb_record_add_sub_cmp_mov, /* 001. */
13846 thumb_record_ld_st_reg_offset, /* 010. */
13847 thumb_record_ld_st_imm_offset, /* 011. */
13848 thumb_record_ld_st_stack, /* 100. */
13849 thumb_record_misc, /* 101. */
13850 thumb_record_ldm_stm_swi, /* 110. */
13851 thumb_record_branch /* 111. */
13854 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13855 uint32_t insn_id = 0;
13857 if (extract_arm_insn (arm_record, insn_size))
13859 if (record_debug)
13861 printf_unfiltered (_("Process record: error reading memory at "
13862 "addr %s len = %d.\n"),
13863 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13865 return -1;
13867 else if (ARM_RECORD == record_type)
13869 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13870 insn_id = bits (arm_record->arm_insn, 25, 27);
13871 ret = arm_record_extension_space (arm_record);
13872 /* If this insn has fallen into extension space
13873 then we need not decode it anymore. */
13874 if (ret != -1 && !INSN_RECORDED(arm_record))
13876 ret = arm_handle_insn[insn_id] (arm_record);
13879 else if (THUMB_RECORD == record_type)
13881 /* As thumb does not have condition codes, we set negative. */
13882 arm_record->cond = -1;
13883 insn_id = bits (arm_record->arm_insn, 13, 15);
13884 ret = thumb_handle_insn[insn_id] (arm_record);
13886 else if (THUMB2_RECORD == record_type)
13888 /* As thumb does not have condition codes, we set negative. */
13889 arm_record->cond = -1;
13891 /* Swap first half of 32bit thumb instruction with second half. */
13892 arm_record->arm_insn
13893 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13895 insn_id = thumb2_record_decode_insn_handler (arm_record);
13897 if (insn_id != ARM_RECORD_SUCCESS)
13899 arm_record_unsupported_insn (arm_record);
13900 ret = -1;
13903 else
13905 /* Throw assertion. */
13906 gdb_assert_not_reached ("not a valid instruction, could not decode");
13909 return ret;
13913 /* Cleans up local record registers and memory allocations. */
13915 static void
13916 deallocate_reg_mem (insn_decode_record *record)
13918 xfree (record->arm_regs);
13919 xfree (record->arm_mems);
13923 /* Parse the current instruction and record the values of the registers and
13924 memory that will be changed in current instruction to record_arch_list".
13925 Return -1 if something is wrong. */
13928 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13929 CORE_ADDR insn_addr)
13932 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13933 uint32_t no_of_rec = 0;
13934 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13935 ULONGEST t_bit = 0, insn_id = 0;
13937 ULONGEST u_regval = 0;
13939 insn_decode_record arm_record;
13941 memset (&arm_record, 0, sizeof (insn_decode_record));
13942 arm_record.regcache = regcache;
13943 arm_record.this_addr = insn_addr;
13944 arm_record.gdbarch = gdbarch;
13947 if (record_debug > 1)
13949 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13950 "addr = %s\n",
13951 paddress (gdbarch, arm_record.this_addr));
13954 if (extract_arm_insn (&arm_record, 2))
13956 if (record_debug)
13958 printf_unfiltered (_("Process record: error reading memory at "
13959 "addr %s len = %d.\n"),
13960 paddress (arm_record.gdbarch,
13961 arm_record.this_addr), 2);
13963 return -1;
13966 /* Check the insn, whether it is thumb or arm one. */
13968 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13969 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13972 if (!(u_regval & t_bit))
13974 /* We are decoding arm insn. */
13975 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13977 else
13979 insn_id = bits (arm_record.arm_insn, 11, 15);
13980 /* is it thumb2 insn? */
13981 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13983 ret = decode_insn (&arm_record, THUMB2_RECORD,
13984 THUMB2_INSN_SIZE_BYTES);
13986 else
13988 /* We are decoding thumb insn. */
13989 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13993 if (0 == ret)
13995 /* Record registers. */
13996 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13997 if (arm_record.arm_regs)
13999 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14001 if (record_full_arch_list_add_reg
14002 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14003 ret = -1;
14006 /* Record memories. */
14007 if (arm_record.arm_mems)
14009 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14011 if (record_full_arch_list_add_mem
14012 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14013 arm_record.arm_mems[no_of_rec].len))
14014 ret = -1;
14018 if (record_full_arch_list_add_end ())
14019 ret = -1;
14023 deallocate_reg_mem (&arm_record);
14025 return ret;