S/390: Move start of 64 bit binaries from 2GB to 256MB.
[binutils-gdb.git] / gdb / arm-tdep.c
blob4dfd76be74e528a99458013625399ea32546ef53
1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "defs.h"
22 #include <ctype.h> /* XXX for isupper (). */
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
48 #include "arch/arm.h"
49 #include "arch/arm-get-next-pcs.h"
50 #include "arm-tdep.h"
51 #include "gdb/sim-arm.h"
53 #include "elf-bfd.h"
54 #include "coff/internal.h"
55 #include "elf/arm.h"
57 #include "vec.h"
59 #include "record.h"
60 #include "record-full.h"
61 #include <algorithm>
63 #include "features/arm-with-m.c"
64 #include "features/arm-with-m-fpa-layout.c"
65 #include "features/arm-with-m-vfp-d16.c"
66 #include "features/arm-with-iwmmxt.c"
67 #include "features/arm-with-vfpv2.c"
68 #include "features/arm-with-vfpv3.c"
69 #include "features/arm-with-neon.c"
71 static int arm_debug;
73 /* Macros for setting and testing a bit in a minimal symbol that marks
74 it as Thumb function. The MSB of the minimal symbol's "info" field
75 is used for this purpose.
77 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
78 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
80 #define MSYMBOL_SET_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym) = 1
83 #define MSYMBOL_IS_SPECIAL(msym) \
84 MSYMBOL_TARGET_FLAG_1 (msym)
86 /* Per-objfile data used for mapping symbols. */
87 static const struct objfile_data *arm_objfile_data_key;
89 struct arm_mapping_symbol
91 bfd_vma value;
92 char type;
94 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
95 DEF_VEC_O(arm_mapping_symbol_s);
97 struct arm_per_objfile
99 VEC(arm_mapping_symbol_s) **section_maps;
102 /* The list of available "set arm ..." and "show arm ..." commands. */
103 static struct cmd_list_element *setarmcmdlist = NULL;
104 static struct cmd_list_element *showarmcmdlist = NULL;
106 /* The type of floating-point to use. Keep this in sync with enum
107 arm_float_model, and the help string in _initialize_arm_tdep. */
108 static const char *const fp_model_strings[] =
110 "auto",
111 "softfpa",
112 "fpa",
113 "softvfp",
114 "vfp",
115 NULL
118 /* A variable that can be configured by the user. */
119 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
120 static const char *current_fp_model = "auto";
122 /* The ABI to use. Keep this in sync with arm_abi_kind. */
123 static const char *const arm_abi_strings[] =
125 "auto",
126 "APCS",
127 "AAPCS",
128 NULL
131 /* A variable that can be configured by the user. */
132 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
133 static const char *arm_abi_string = "auto";
135 /* The execution mode to assume. */
136 static const char *const arm_mode_strings[] =
138 "auto",
139 "arm",
140 "thumb",
141 NULL
144 static const char *arm_fallback_mode_string = "auto";
145 static const char *arm_force_mode_string = "auto";
147 /* Internal override of the execution mode. -1 means no override,
148 0 means override to ARM mode, 1 means override to Thumb mode.
149 The effect is the same as if arm_force_mode has been set by the
150 user (except the internal override has precedence over a user's
151 arm_force_mode override). */
152 static int arm_override_mode = -1;
154 /* Number of different reg name sets (options). */
155 static int num_disassembly_options;
157 /* The standard register names, and all the valid aliases for them. Note
158 that `fp', `sp' and `pc' are not added in this alias list, because they
159 have been added as builtin user registers in
160 std-regs.c:_initialize_frame_reg. */
161 static const struct
163 const char *name;
164 int regnum;
165 } arm_register_aliases[] = {
166 /* Basic register numbers. */
167 { "r0", 0 },
168 { "r1", 1 },
169 { "r2", 2 },
170 { "r3", 3 },
171 { "r4", 4 },
172 { "r5", 5 },
173 { "r6", 6 },
174 { "r7", 7 },
175 { "r8", 8 },
176 { "r9", 9 },
177 { "r10", 10 },
178 { "r11", 11 },
179 { "r12", 12 },
180 { "r13", 13 },
181 { "r14", 14 },
182 { "r15", 15 },
183 /* Synonyms (argument and variable registers). */
184 { "a1", 0 },
185 { "a2", 1 },
186 { "a3", 2 },
187 { "a4", 3 },
188 { "v1", 4 },
189 { "v2", 5 },
190 { "v3", 6 },
191 { "v4", 7 },
192 { "v5", 8 },
193 { "v6", 9 },
194 { "v7", 10 },
195 { "v8", 11 },
196 /* Other platform-specific names for r9. */
197 { "sb", 9 },
198 { "tr", 9 },
199 /* Special names. */
200 { "ip", 12 },
201 { "lr", 14 },
202 /* Names used by GCC (not listed in the ARM EABI). */
203 { "sl", 10 },
204 /* A special name from the older ATPCS. */
205 { "wr", 7 },
208 static const char *const arm_register_names[] =
209 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
210 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
211 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
212 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
213 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
214 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
215 "fps", "cpsr" }; /* 24 25 */
217 /* Valid register name styles. */
218 static const char **valid_disassembly_styles;
220 /* Disassembly style to use. Default to "std" register names. */
221 static const char *disassembly_style;
223 /* This is used to keep the bfd arch_info in sync with the disassembly
224 style. */
225 static void set_disassembly_style_sfunc(char *, int,
226 struct cmd_list_element *);
227 static void set_disassembly_style (void);
229 static void convert_from_extended (const struct floatformat *, const void *,
230 void *, int);
231 static void convert_to_extended (const struct floatformat *, void *,
232 const void *, int);
234 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
235 struct regcache *regcache,
236 int regnum, gdb_byte *buf);
237 static void arm_neon_quad_write (struct gdbarch *gdbarch,
238 struct regcache *regcache,
239 int regnum, const gdb_byte *buf);
241 static CORE_ADDR
242 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self);
245 /* get_next_pcs operations. */
246 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops = {
247 arm_get_next_pcs_read_memory_unsigned_integer,
248 arm_get_next_pcs_syscall_next_pc,
249 arm_get_next_pcs_addr_bits_remove,
250 arm_get_next_pcs_is_thumb,
251 NULL,
254 struct arm_prologue_cache
256 /* The stack pointer at the time this frame was created; i.e. the
257 caller's stack pointer when this function was called. It is used
258 to identify this frame. */
259 CORE_ADDR prev_sp;
261 /* The frame base for this frame is just prev_sp - frame size.
262 FRAMESIZE is the distance from the frame pointer to the
263 initial stack pointer. */
265 int framesize;
267 /* The register used to hold the frame pointer for this frame. */
268 int framereg;
270 /* Saved register offsets. */
271 struct trad_frame_saved_reg *saved_regs;
274 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
275 CORE_ADDR prologue_start,
276 CORE_ADDR prologue_end,
277 struct arm_prologue_cache *cache);
279 /* Architecture version for displaced stepping. This effects the behaviour of
280 certain instructions, and really should not be hard-wired. */
282 #define DISPLACED_STEPPING_ARCH_VERSION 5
284 /* Set to true if the 32-bit mode is in use. */
286 int arm_apcs_32 = 1;
288 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
291 arm_psr_thumb_bit (struct gdbarch *gdbarch)
293 if (gdbarch_tdep (gdbarch)->is_m)
294 return XPSR_T;
295 else
296 return CPSR_T;
299 /* Determine if the processor is currently executing in Thumb mode. */
302 arm_is_thumb (struct regcache *regcache)
304 ULONGEST cpsr;
305 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regcache));
307 cpsr = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
309 return (cpsr & t_bit) != 0;
312 /* Determine if FRAME is executing in Thumb mode. */
315 arm_frame_is_thumb (struct frame_info *frame)
317 CORE_ADDR cpsr;
318 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
320 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
321 directly (from a signal frame or dummy frame) or by interpreting
322 the saved LR (from a prologue or DWARF frame). So consult it and
323 trust the unwinders. */
324 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
326 return (cpsr & t_bit) != 0;
329 /* Callback for VEC_lower_bound. */
331 static inline int
332 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
333 const struct arm_mapping_symbol *rhs)
335 return lhs->value < rhs->value;
338 /* Search for the mapping symbol covering MEMADDR. If one is found,
339 return its type. Otherwise, return 0. If START is non-NULL,
340 set *START to the location of the mapping symbol. */
342 static char
343 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
345 struct obj_section *sec;
347 /* If there are mapping symbols, consult them. */
348 sec = find_pc_section (memaddr);
349 if (sec != NULL)
351 struct arm_per_objfile *data;
352 VEC(arm_mapping_symbol_s) *map;
353 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
354 0 };
355 unsigned int idx;
357 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
358 arm_objfile_data_key);
359 if (data != NULL)
361 map = data->section_maps[sec->the_bfd_section->index];
362 if (!VEC_empty (arm_mapping_symbol_s, map))
364 struct arm_mapping_symbol *map_sym;
366 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
367 arm_compare_mapping_symbols);
369 /* VEC_lower_bound finds the earliest ordered insertion
370 point. If the following symbol starts at this exact
371 address, we use that; otherwise, the preceding
372 mapping symbol covers this address. */
373 if (idx < VEC_length (arm_mapping_symbol_s, map))
375 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
376 if (map_sym->value == map_key.value)
378 if (start)
379 *start = map_sym->value + obj_section_addr (sec);
380 return map_sym->type;
384 if (idx > 0)
386 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
387 if (start)
388 *start = map_sym->value + obj_section_addr (sec);
389 return map_sym->type;
395 return 0;
398 /* Determine if the program counter specified in MEMADDR is in a Thumb
399 function. This function should be called for addresses unrelated to
400 any executing frame; otherwise, prefer arm_frame_is_thumb. */
403 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
405 struct bound_minimal_symbol sym;
406 char type;
407 struct displaced_step_closure* dsc
408 = get_displaced_step_closure_by_addr(memaddr);
410 /* If checking the mode of displaced instruction in copy area, the mode
411 should be determined by instruction on the original address. */
412 if (dsc)
414 if (debug_displaced)
415 fprintf_unfiltered (gdb_stdlog,
416 "displaced: check mode of %.8lx instead of %.8lx\n",
417 (unsigned long) dsc->insn_addr,
418 (unsigned long) memaddr);
419 memaddr = dsc->insn_addr;
422 /* If bit 0 of the address is set, assume this is a Thumb address. */
423 if (IS_THUMB_ADDR (memaddr))
424 return 1;
426 /* Respect internal mode override if active. */
427 if (arm_override_mode != -1)
428 return arm_override_mode;
430 /* If the user wants to override the symbol table, let him. */
431 if (strcmp (arm_force_mode_string, "arm") == 0)
432 return 0;
433 if (strcmp (arm_force_mode_string, "thumb") == 0)
434 return 1;
436 /* ARM v6-M and v7-M are always in Thumb mode. */
437 if (gdbarch_tdep (gdbarch)->is_m)
438 return 1;
440 /* If there are mapping symbols, consult them. */
441 type = arm_find_mapping_symbol (memaddr, NULL);
442 if (type)
443 return type == 't';
445 /* Thumb functions have a "special" bit set in minimal symbols. */
446 sym = lookup_minimal_symbol_by_pc (memaddr);
447 if (sym.minsym)
448 return (MSYMBOL_IS_SPECIAL (sym.minsym));
450 /* If the user wants to override the fallback mode, let them. */
451 if (strcmp (arm_fallback_mode_string, "arm") == 0)
452 return 0;
453 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
454 return 1;
456 /* If we couldn't find any symbol, but we're talking to a running
457 target, then trust the current value of $cpsr. This lets
458 "display/i $pc" always show the correct mode (though if there is
459 a symbol table we will not reach here, so it still may not be
460 displayed in the mode it will be executed). */
461 if (target_has_registers)
462 return arm_frame_is_thumb (get_current_frame ());
464 /* Otherwise we're out of luck; we assume ARM. */
465 return 0;
468 /* Remove useless bits from addresses in a running program. */
469 static CORE_ADDR
470 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
472 /* On M-profile devices, do not strip the low bit from EXC_RETURN
473 (the magic exception return address). */
474 if (gdbarch_tdep (gdbarch)->is_m
475 && (val & 0xfffffff0) == 0xfffffff0)
476 return val;
478 if (arm_apcs_32)
479 return UNMAKE_THUMB_ADDR (val);
480 else
481 return (val & 0x03fffffc);
484 /* Return 1 if PC is the start of a compiler helper function which
485 can be safely ignored during prologue skipping. IS_THUMB is true
486 if the function is known to be a Thumb function due to the way it
487 is being called. */
488 static int
489 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
491 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
492 struct bound_minimal_symbol msym;
494 msym = lookup_minimal_symbol_by_pc (pc);
495 if (msym.minsym != NULL
496 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
497 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
499 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
501 /* The GNU linker's Thumb call stub to foo is named
502 __foo_from_thumb. */
503 if (strstr (name, "_from_thumb") != NULL)
504 name += 2;
506 /* On soft-float targets, __truncdfsf2 is called to convert promoted
507 arguments to their argument types in non-prototyped
508 functions. */
509 if (startswith (name, "__truncdfsf2"))
510 return 1;
511 if (startswith (name, "__aeabi_d2f"))
512 return 1;
514 /* Internal functions related to thread-local storage. */
515 if (startswith (name, "__tls_get_addr"))
516 return 1;
517 if (startswith (name, "__aeabi_read_tp"))
518 return 1;
520 else
522 /* If we run against a stripped glibc, we may be unable to identify
523 special functions by name. Check for one important case,
524 __aeabi_read_tp, by comparing the *code* against the default
525 implementation (this is hand-written ARM assembler in glibc). */
527 if (!is_thumb
528 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
529 == 0xe3e00a0f /* mov r0, #0xffff0fff */
530 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
531 == 0xe240f01f) /* sub pc, r0, #31 */
532 return 1;
535 return 0;
538 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
539 the first 16-bit of instruction, and INSN2 is the second 16-bit of
540 instruction. */
541 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
542 ((bits ((insn1), 0, 3) << 12) \
543 | (bits ((insn1), 10, 10) << 11) \
544 | (bits ((insn2), 12, 14) << 8) \
545 | bits ((insn2), 0, 7))
547 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
548 the 32-bit instruction. */
549 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
550 ((bits ((insn), 16, 19) << 12) \
551 | bits ((insn), 0, 11))
553 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
555 static unsigned int
556 thumb_expand_immediate (unsigned int imm)
558 unsigned int count = imm >> 7;
560 if (count < 8)
561 switch (count / 2)
563 case 0:
564 return imm & 0xff;
565 case 1:
566 return (imm & 0xff) | ((imm & 0xff) << 16);
567 case 2:
568 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
569 case 3:
570 return (imm & 0xff) | ((imm & 0xff) << 8)
571 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
574 return (0x80 | (imm & 0x7f)) << (32 - count);
577 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
578 epilogue, 0 otherwise. */
580 static int
581 thumb_instruction_restores_sp (unsigned short insn)
583 return (insn == 0x46bd /* mov sp, r7 */
584 || (insn & 0xff80) == 0xb000 /* add sp, imm */
585 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
588 /* Analyze a Thumb prologue, looking for a recognizable stack frame
589 and frame pointer. Scan until we encounter a store that could
590 clobber the stack frame unexpectedly, or an unknown instruction.
591 Return the last address which is definitely safe to skip for an
592 initial breakpoint. */
594 static CORE_ADDR
595 thumb_analyze_prologue (struct gdbarch *gdbarch,
596 CORE_ADDR start, CORE_ADDR limit,
597 struct arm_prologue_cache *cache)
599 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
600 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
601 int i;
602 pv_t regs[16];
603 struct pv_area *stack;
604 struct cleanup *back_to;
605 CORE_ADDR offset;
606 CORE_ADDR unrecognized_pc = 0;
608 for (i = 0; i < 16; i++)
609 regs[i] = pv_register (i, 0);
610 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
611 back_to = make_cleanup_free_pv_area (stack);
613 while (start < limit)
615 unsigned short insn;
617 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
619 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
621 int regno;
622 int mask;
624 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
625 break;
627 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
628 whether to save LR (R14). */
629 mask = (insn & 0xff) | ((insn & 0x100) << 6);
631 /* Calculate offsets of saved R0-R7 and LR. */
632 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
633 if (mask & (1 << regno))
635 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
636 -4);
637 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
640 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
642 offset = (insn & 0x7f) << 2; /* get scaled offset */
643 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
644 -offset);
646 else if (thumb_instruction_restores_sp (insn))
648 /* Don't scan past the epilogue. */
649 break;
651 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
652 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
653 (insn & 0xff) << 2);
654 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
655 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
656 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
657 bits (insn, 6, 8));
658 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
659 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
660 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
661 bits (insn, 0, 7));
662 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
663 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
664 && pv_is_constant (regs[bits (insn, 3, 5)]))
665 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
666 regs[bits (insn, 6, 8)]);
667 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
668 && pv_is_constant (regs[bits (insn, 3, 6)]))
670 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
671 int rm = bits (insn, 3, 6);
672 regs[rd] = pv_add (regs[rd], regs[rm]);
674 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
676 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
677 int src_reg = (insn & 0x78) >> 3;
678 regs[dst_reg] = regs[src_reg];
680 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
682 /* Handle stores to the stack. Normally pushes are used,
683 but with GCC -mtpcs-frame, there may be other stores
684 in the prologue to create the frame. */
685 int regno = (insn >> 8) & 0x7;
686 pv_t addr;
688 offset = (insn & 0xff) << 2;
689 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
691 if (pv_area_store_would_trash (stack, addr))
692 break;
694 pv_area_store (stack, addr, 4, regs[regno]);
696 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
698 int rd = bits (insn, 0, 2);
699 int rn = bits (insn, 3, 5);
700 pv_t addr;
702 offset = bits (insn, 6, 10) << 2;
703 addr = pv_add_constant (regs[rn], offset);
705 if (pv_area_store_would_trash (stack, addr))
706 break;
708 pv_area_store (stack, addr, 4, regs[rd]);
710 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
711 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
712 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
713 /* Ignore stores of argument registers to the stack. */
715 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
716 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
717 /* Ignore block loads from the stack, potentially copying
718 parameters from memory. */
720 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
721 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
722 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
723 /* Similarly ignore single loads from the stack. */
725 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
726 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
727 /* Skip register copies, i.e. saves to another register
728 instead of the stack. */
730 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
731 /* Recognize constant loads; even with small stacks these are necessary
732 on Thumb. */
733 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
734 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
736 /* Constant pool loads, for the same reason. */
737 unsigned int constant;
738 CORE_ADDR loc;
740 loc = start + 4 + bits (insn, 0, 7) * 4;
741 constant = read_memory_unsigned_integer (loc, 4, byte_order);
742 regs[bits (insn, 8, 10)] = pv_constant (constant);
744 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
746 unsigned short inst2;
748 inst2 = read_memory_unsigned_integer (start + 2, 2,
749 byte_order_for_code);
751 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
753 /* BL, BLX. Allow some special function calls when
754 skipping the prologue; GCC generates these before
755 storing arguments to the stack. */
756 CORE_ADDR nextpc;
757 int j1, j2, imm1, imm2;
759 imm1 = sbits (insn, 0, 10);
760 imm2 = bits (inst2, 0, 10);
761 j1 = bit (inst2, 13);
762 j2 = bit (inst2, 11);
764 offset = ((imm1 << 12) + (imm2 << 1));
765 offset ^= ((!j2) << 22) | ((!j1) << 23);
767 nextpc = start + 4 + offset;
768 /* For BLX make sure to clear the low bits. */
769 if (bit (inst2, 12) == 0)
770 nextpc = nextpc & 0xfffffffc;
772 if (!skip_prologue_function (gdbarch, nextpc,
773 bit (inst2, 12) != 0))
774 break;
777 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
778 { registers } */
779 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
781 pv_t addr = regs[bits (insn, 0, 3)];
782 int regno;
784 if (pv_area_store_would_trash (stack, addr))
785 break;
787 /* Calculate offsets of saved registers. */
788 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
789 if (inst2 & (1 << regno))
791 addr = pv_add_constant (addr, -4);
792 pv_area_store (stack, addr, 4, regs[regno]);
795 if (insn & 0x0020)
796 regs[bits (insn, 0, 3)] = addr;
799 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
800 [Rn, #+/-imm]{!} */
801 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
803 int regno1 = bits (inst2, 12, 15);
804 int regno2 = bits (inst2, 8, 11);
805 pv_t addr = regs[bits (insn, 0, 3)];
807 offset = inst2 & 0xff;
808 if (insn & 0x0080)
809 addr = pv_add_constant (addr, offset);
810 else
811 addr = pv_add_constant (addr, -offset);
813 if (pv_area_store_would_trash (stack, addr))
814 break;
816 pv_area_store (stack, addr, 4, regs[regno1]);
817 pv_area_store (stack, pv_add_constant (addr, 4),
818 4, regs[regno2]);
820 if (insn & 0x0020)
821 regs[bits (insn, 0, 3)] = addr;
824 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
825 && (inst2 & 0x0c00) == 0x0c00
826 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
828 int regno = bits (inst2, 12, 15);
829 pv_t addr = regs[bits (insn, 0, 3)];
831 offset = inst2 & 0xff;
832 if (inst2 & 0x0200)
833 addr = pv_add_constant (addr, offset);
834 else
835 addr = pv_add_constant (addr, -offset);
837 if (pv_area_store_would_trash (stack, addr))
838 break;
840 pv_area_store (stack, addr, 4, regs[regno]);
842 if (inst2 & 0x0100)
843 regs[bits (insn, 0, 3)] = addr;
846 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
847 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
849 int regno = bits (inst2, 12, 15);
850 pv_t addr;
852 offset = inst2 & 0xfff;
853 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
855 if (pv_area_store_would_trash (stack, addr))
856 break;
858 pv_area_store (stack, addr, 4, regs[regno]);
861 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
862 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
863 /* Ignore stores of argument registers to the stack. */
866 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
867 && (inst2 & 0x0d00) == 0x0c00
868 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
869 /* Ignore stores of argument registers to the stack. */
872 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
873 { registers } */
874 && (inst2 & 0x8000) == 0x0000
875 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
876 /* Ignore block loads from the stack, potentially copying
877 parameters from memory. */
880 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
881 [Rn, #+/-imm] */
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 /* Similarly ignore dual loads from the stack. */
886 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
887 && (inst2 & 0x0d00) == 0x0c00
888 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
889 /* Similarly ignore single loads from the stack. */
892 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
893 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
894 /* Similarly ignore single loads from the stack. */
897 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
898 && (inst2 & 0x8000) == 0x0000)
900 unsigned int imm = ((bits (insn, 10, 10) << 11)
901 | (bits (inst2, 12, 14) << 8)
902 | bits (inst2, 0, 7));
904 regs[bits (inst2, 8, 11)]
905 = pv_add_constant (regs[bits (insn, 0, 3)],
906 thumb_expand_immediate (imm));
909 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
910 && (inst2 & 0x8000) == 0x0000)
912 unsigned int imm = ((bits (insn, 10, 10) << 11)
913 | (bits (inst2, 12, 14) << 8)
914 | bits (inst2, 0, 7));
916 regs[bits (inst2, 8, 11)]
917 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
920 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
921 && (inst2 & 0x8000) == 0x0000)
923 unsigned int imm = ((bits (insn, 10, 10) << 11)
924 | (bits (inst2, 12, 14) << 8)
925 | bits (inst2, 0, 7));
927 regs[bits (inst2, 8, 11)]
928 = pv_add_constant (regs[bits (insn, 0, 3)],
929 - (CORE_ADDR) thumb_expand_immediate (imm));
932 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
933 && (inst2 & 0x8000) == 0x0000)
935 unsigned int imm = ((bits (insn, 10, 10) << 11)
936 | (bits (inst2, 12, 14) << 8)
937 | bits (inst2, 0, 7));
939 regs[bits (inst2, 8, 11)]
940 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
943 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
945 unsigned int imm = ((bits (insn, 10, 10) << 11)
946 | (bits (inst2, 12, 14) << 8)
947 | bits (inst2, 0, 7));
949 regs[bits (inst2, 8, 11)]
950 = pv_constant (thumb_expand_immediate (imm));
953 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
955 unsigned int imm
956 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
958 regs[bits (inst2, 8, 11)] = pv_constant (imm);
961 else if (insn == 0xea5f /* mov.w Rd,Rm */
962 && (inst2 & 0xf0f0) == 0)
964 int dst_reg = (inst2 & 0x0f00) >> 8;
965 int src_reg = inst2 & 0xf;
966 regs[dst_reg] = regs[src_reg];
969 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
971 /* Constant pool loads. */
972 unsigned int constant;
973 CORE_ADDR loc;
975 offset = bits (inst2, 0, 11);
976 if (insn & 0x0080)
977 loc = start + 4 + offset;
978 else
979 loc = start + 4 - offset;
981 constant = read_memory_unsigned_integer (loc, 4, byte_order);
982 regs[bits (inst2, 12, 15)] = pv_constant (constant);
985 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
987 /* Constant pool loads. */
988 unsigned int constant;
989 CORE_ADDR loc;
991 offset = bits (inst2, 0, 7) << 2;
992 if (insn & 0x0080)
993 loc = start + 4 + offset;
994 else
995 loc = start + 4 - offset;
997 constant = read_memory_unsigned_integer (loc, 4, byte_order);
998 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1000 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1001 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1004 else if (thumb2_instruction_changes_pc (insn, inst2))
1006 /* Don't scan past anything that might change control flow. */
1007 break;
1009 else
1011 /* The optimizer might shove anything into the prologue,
1012 so we just skip what we don't recognize. */
1013 unrecognized_pc = start;
1016 start += 2;
1018 else if (thumb_instruction_changes_pc (insn))
1020 /* Don't scan past anything that might change control flow. */
1021 break;
1023 else
1025 /* The optimizer might shove anything into the prologue,
1026 so we just skip what we don't recognize. */
1027 unrecognized_pc = start;
1030 start += 2;
1033 if (arm_debug)
1034 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1035 paddress (gdbarch, start));
1037 if (unrecognized_pc == 0)
1038 unrecognized_pc = start;
1040 if (cache == NULL)
1042 do_cleanups (back_to);
1043 return unrecognized_pc;
1046 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1048 /* Frame pointer is fp. Frame size is constant. */
1049 cache->framereg = ARM_FP_REGNUM;
1050 cache->framesize = -regs[ARM_FP_REGNUM].k;
1052 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1054 /* Frame pointer is r7. Frame size is constant. */
1055 cache->framereg = THUMB_FP_REGNUM;
1056 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1058 else
1060 /* Try the stack pointer... this is a bit desperate. */
1061 cache->framereg = ARM_SP_REGNUM;
1062 cache->framesize = -regs[ARM_SP_REGNUM].k;
1065 for (i = 0; i < 16; i++)
1066 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1067 cache->saved_regs[i].addr = offset;
1069 do_cleanups (back_to);
1070 return unrecognized_pc;
1074 /* Try to analyze the instructions starting from PC, which load symbol
1075 __stack_chk_guard. Return the address of instruction after loading this
1076 symbol, set the dest register number to *BASEREG, and set the size of
1077 instructions for loading symbol in OFFSET. Return 0 if instructions are
1078 not recognized. */
1080 static CORE_ADDR
1081 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1082 unsigned int *destreg, int *offset)
1084 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1085 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1086 unsigned int low, high, address;
1088 address = 0;
1089 if (is_thumb)
1091 unsigned short insn1
1092 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1094 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1096 *destreg = bits (insn1, 8, 10);
1097 *offset = 2;
1098 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1099 address = read_memory_unsigned_integer (address, 4,
1100 byte_order_for_code);
1102 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1104 unsigned short insn2
1105 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1107 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1109 insn1
1110 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1111 insn2
1112 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1114 /* movt Rd, #const */
1115 if ((insn1 & 0xfbc0) == 0xf2c0)
1117 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1118 *destreg = bits (insn2, 8, 11);
1119 *offset = 8;
1120 address = (high << 16 | low);
1124 else
1126 unsigned int insn
1127 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1129 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1131 address = bits (insn, 0, 11) + pc + 8;
1132 address = read_memory_unsigned_integer (address, 4,
1133 byte_order_for_code);
1135 *destreg = bits (insn, 12, 15);
1136 *offset = 4;
1138 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1140 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1142 insn
1143 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1145 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1147 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1148 *destreg = bits (insn, 12, 15);
1149 *offset = 8;
1150 address = (high << 16 | low);
1155 return address;
1158 /* Try to skip a sequence of instructions used for stack protector. If PC
1159 points to the first instruction of this sequence, return the address of
1160 first instruction after this sequence, otherwise, return original PC.
1162 On arm, this sequence of instructions is composed of mainly three steps,
1163 Step 1: load symbol __stack_chk_guard,
1164 Step 2: load from address of __stack_chk_guard,
1165 Step 3: store it to somewhere else.
1167 Usually, instructions on step 2 and step 3 are the same on various ARM
1168 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1169 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1170 instructions in step 1 vary from different ARM architectures. On ARMv7,
1171 they are,
1173 movw Rn, #:lower16:__stack_chk_guard
1174 movt Rn, #:upper16:__stack_chk_guard
1176 On ARMv5t, it is,
1178 ldr Rn, .Label
1179 ....
1180 .Lable:
1181 .word __stack_chk_guard
1183 Since ldr/str is a very popular instruction, we can't use them as
1184 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1185 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1186 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1188 static CORE_ADDR
1189 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1191 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1192 unsigned int basereg;
1193 struct bound_minimal_symbol stack_chk_guard;
1194 int offset;
1195 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1196 CORE_ADDR addr;
1198 /* Try to parse the instructions in Step 1. */
1199 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1200 &basereg, &offset);
1201 if (!addr)
1202 return pc;
1204 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1205 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1206 Otherwise, this sequence cannot be for stack protector. */
1207 if (stack_chk_guard.minsym == NULL
1208 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1209 return pc;
1211 if (is_thumb)
1213 unsigned int destreg;
1214 unsigned short insn
1215 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1217 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1218 if ((insn & 0xf800) != 0x6800)
1219 return pc;
1220 if (bits (insn, 3, 5) != basereg)
1221 return pc;
1222 destreg = bits (insn, 0, 2);
1224 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1225 byte_order_for_code);
1226 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1227 if ((insn & 0xf800) != 0x6000)
1228 return pc;
1229 if (destreg != bits (insn, 0, 2))
1230 return pc;
1232 else
1234 unsigned int destreg;
1235 unsigned int insn
1236 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1238 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1239 if ((insn & 0x0e500000) != 0x04100000)
1240 return pc;
1241 if (bits (insn, 16, 19) != basereg)
1242 return pc;
1243 destreg = bits (insn, 12, 15);
1244 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1245 insn = read_memory_unsigned_integer (pc + offset + 4,
1246 4, byte_order_for_code);
1247 if ((insn & 0x0e500000) != 0x04000000)
1248 return pc;
1249 if (bits (insn, 12, 15) != destreg)
1250 return pc;
1252 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1253 on arm. */
1254 if (is_thumb)
1255 return pc + offset + 4;
1256 else
1257 return pc + offset + 8;
1260 /* Advance the PC across any function entry prologue instructions to
1261 reach some "real" code.
1263 The APCS (ARM Procedure Call Standard) defines the following
1264 prologue:
1266 mov ip, sp
1267 [stmfd sp!, {a1,a2,a3,a4}]
1268 stmfd sp!, {...,fp,ip,lr,pc}
1269 [stfe f7, [sp, #-12]!]
1270 [stfe f6, [sp, #-12]!]
1271 [stfe f5, [sp, #-12]!]
1272 [stfe f4, [sp, #-12]!]
1273 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1275 static CORE_ADDR
1276 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1278 CORE_ADDR func_addr, limit_pc;
1280 /* See if we can determine the end of the prologue via the symbol table.
1281 If so, then return either PC, or the PC after the prologue, whichever
1282 is greater. */
1283 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1285 CORE_ADDR post_prologue_pc
1286 = skip_prologue_using_sal (gdbarch, func_addr);
1287 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1289 if (post_prologue_pc)
1290 post_prologue_pc
1291 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1294 /* GCC always emits a line note before the prologue and another
1295 one after, even if the two are at the same address or on the
1296 same line. Take advantage of this so that we do not need to
1297 know every instruction that might appear in the prologue. We
1298 will have producer information for most binaries; if it is
1299 missing (e.g. for -gstabs), assuming the GNU tools. */
1300 if (post_prologue_pc
1301 && (cust == NULL
1302 || COMPUNIT_PRODUCER (cust) == NULL
1303 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1304 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1305 return post_prologue_pc;
1307 if (post_prologue_pc != 0)
1309 CORE_ADDR analyzed_limit;
1311 /* For non-GCC compilers, make sure the entire line is an
1312 acceptable prologue; GDB will round this function's
1313 return value up to the end of the following line so we
1314 can not skip just part of a line (and we do not want to).
1316 RealView does not treat the prologue specially, but does
1317 associate prologue code with the opening brace; so this
1318 lets us skip the first line if we think it is the opening
1319 brace. */
1320 if (arm_pc_is_thumb (gdbarch, func_addr))
1321 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1322 post_prologue_pc, NULL);
1323 else
1324 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1325 post_prologue_pc, NULL);
1327 if (analyzed_limit != post_prologue_pc)
1328 return func_addr;
1330 return post_prologue_pc;
1334 /* Can't determine prologue from the symbol table, need to examine
1335 instructions. */
1337 /* Find an upper limit on the function prologue using the debug
1338 information. If the debug information could not be used to provide
1339 that bound, then use an arbitrary large number as the upper bound. */
1340 /* Like arm_scan_prologue, stop no later than pc + 64. */
1341 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1342 if (limit_pc == 0)
1343 limit_pc = pc + 64; /* Magic. */
1346 /* Check if this is Thumb code. */
1347 if (arm_pc_is_thumb (gdbarch, pc))
1348 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1349 else
1350 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1353 /* *INDENT-OFF* */
1354 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1355 This function decodes a Thumb function prologue to determine:
1356 1) the size of the stack frame
1357 2) which registers are saved on it
1358 3) the offsets of saved regs
1359 4) the offset from the stack pointer to the frame pointer
1361 A typical Thumb function prologue would create this stack frame
1362 (offsets relative to FP)
1363 old SP -> 24 stack parameters
1364 20 LR
1365 16 R7
1366 R7 -> 0 local variables (16 bytes)
1367 SP -> -12 additional stack space (12 bytes)
1368 The frame size would thus be 36 bytes, and the frame offset would be
1369 12 bytes. The frame register is R7.
1371 The comments for thumb_skip_prolog() describe the algorithm we use
1372 to detect the end of the prolog. */
1373 /* *INDENT-ON* */
1375 static void
1376 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1377 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1379 CORE_ADDR prologue_start;
1380 CORE_ADDR prologue_end;
1382 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1383 &prologue_end))
1385 /* See comment in arm_scan_prologue for an explanation of
1386 this heuristics. */
1387 if (prologue_end > prologue_start + 64)
1389 prologue_end = prologue_start + 64;
1392 else
1393 /* We're in the boondocks: we have no idea where the start of the
1394 function is. */
1395 return;
1397 prologue_end = std::min (prologue_end, prev_pc);
1399 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1402 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1403 otherwise. */
1405 static int
1406 arm_instruction_restores_sp (unsigned int insn)
1408 if (bits (insn, 28, 31) != INST_NV)
1410 if ((insn & 0x0df0f000) == 0x0080d000
1411 /* ADD SP (register or immediate). */
1412 || (insn & 0x0df0f000) == 0x0040d000
1413 /* SUB SP (register or immediate). */
1414 || (insn & 0x0ffffff0) == 0x01a0d000
1415 /* MOV SP. */
1416 || (insn & 0x0fff0000) == 0x08bd0000
1417 /* POP (LDMIA). */
1418 || (insn & 0x0fff0000) == 0x049d0000)
1419 /* POP of a single register. */
1420 return 1;
1423 return 0;
1426 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1427 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1428 fill it in. Return the first address not recognized as a prologue
1429 instruction.
1431 We recognize all the instructions typically found in ARM prologues,
1432 plus harmless instructions which can be skipped (either for analysis
1433 purposes, or a more restrictive set that can be skipped when finding
1434 the end of the prologue). */
1436 static CORE_ADDR
1437 arm_analyze_prologue (struct gdbarch *gdbarch,
1438 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1439 struct arm_prologue_cache *cache)
1441 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1442 int regno;
1443 CORE_ADDR offset, current_pc;
1444 pv_t regs[ARM_FPS_REGNUM];
1445 struct pv_area *stack;
1446 struct cleanup *back_to;
1447 CORE_ADDR unrecognized_pc = 0;
1449 /* Search the prologue looking for instructions that set up the
1450 frame pointer, adjust the stack pointer, and save registers.
1452 Be careful, however, and if it doesn't look like a prologue,
1453 don't try to scan it. If, for instance, a frameless function
1454 begins with stmfd sp!, then we will tell ourselves there is
1455 a frame, which will confuse stack traceback, as well as "finish"
1456 and other operations that rely on a knowledge of the stack
1457 traceback. */
1459 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1460 regs[regno] = pv_register (regno, 0);
1461 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1462 back_to = make_cleanup_free_pv_area (stack);
1464 for (current_pc = prologue_start;
1465 current_pc < prologue_end;
1466 current_pc += 4)
1468 unsigned int insn
1469 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1471 if (insn == 0xe1a0c00d) /* mov ip, sp */
1473 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1474 continue;
1476 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1477 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1479 unsigned imm = insn & 0xff; /* immediate value */
1480 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1481 int rd = bits (insn, 12, 15);
1482 imm = (imm >> rot) | (imm << (32 - rot));
1483 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1484 continue;
1486 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1487 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1489 unsigned imm = insn & 0xff; /* immediate value */
1490 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1491 int rd = bits (insn, 12, 15);
1492 imm = (imm >> rot) | (imm << (32 - rot));
1493 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1494 continue;
1496 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1497 [sp, #-4]! */
1499 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1500 break;
1501 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1502 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1503 regs[bits (insn, 12, 15)]);
1504 continue;
1506 else if ((insn & 0xffff0000) == 0xe92d0000)
1507 /* stmfd sp!, {..., fp, ip, lr, pc}
1509 stmfd sp!, {a1, a2, a3, a4} */
1511 int mask = insn & 0xffff;
1513 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1514 break;
1516 /* Calculate offsets of saved registers. */
1517 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1518 if (mask & (1 << regno))
1520 regs[ARM_SP_REGNUM]
1521 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1522 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1525 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1526 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1527 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1529 /* No need to add this to saved_regs -- it's just an arg reg. */
1530 continue;
1532 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1533 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1534 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1536 /* No need to add this to saved_regs -- it's just an arg reg. */
1537 continue;
1539 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1540 { registers } */
1541 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1543 /* No need to add this to saved_regs -- it's just arg regs. */
1544 continue;
1546 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1548 unsigned imm = insn & 0xff; /* immediate value */
1549 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1550 imm = (imm >> rot) | (imm << (32 - rot));
1551 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1553 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1555 unsigned imm = insn & 0xff; /* immediate value */
1556 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1557 imm = (imm >> rot) | (imm << (32 - rot));
1558 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1560 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1561 [sp, -#c]! */
1562 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1564 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1565 break;
1567 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1568 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1569 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1571 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1572 [sp!] */
1573 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1575 int n_saved_fp_regs;
1576 unsigned int fp_start_reg, fp_bound_reg;
1578 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1579 break;
1581 if ((insn & 0x800) == 0x800) /* N0 is set */
1583 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1584 n_saved_fp_regs = 3;
1585 else
1586 n_saved_fp_regs = 1;
1588 else
1590 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1591 n_saved_fp_regs = 2;
1592 else
1593 n_saved_fp_regs = 4;
1596 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1597 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1598 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1600 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1601 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1602 regs[fp_start_reg++]);
1605 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1607 /* Allow some special function calls when skipping the
1608 prologue; GCC generates these before storing arguments to
1609 the stack. */
1610 CORE_ADDR dest = BranchDest (current_pc, insn);
1612 if (skip_prologue_function (gdbarch, dest, 0))
1613 continue;
1614 else
1615 break;
1617 else if ((insn & 0xf0000000) != 0xe0000000)
1618 break; /* Condition not true, exit early. */
1619 else if (arm_instruction_changes_pc (insn))
1620 /* Don't scan past anything that might change control flow. */
1621 break;
1622 else if (arm_instruction_restores_sp (insn))
1624 /* Don't scan past the epilogue. */
1625 break;
1627 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1628 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1629 /* Ignore block loads from the stack, potentially copying
1630 parameters from memory. */
1631 continue;
1632 else if ((insn & 0xfc500000) == 0xe4100000
1633 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1634 /* Similarly ignore single loads from the stack. */
1635 continue;
1636 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1637 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1638 register instead of the stack. */
1639 continue;
1640 else
1642 /* The optimizer might shove anything into the prologue, if
1643 we build up cache (cache != NULL) from scanning prologue,
1644 we just skip what we don't recognize and scan further to
1645 make cache as complete as possible. However, if we skip
1646 prologue, we'll stop immediately on unrecognized
1647 instruction. */
1648 unrecognized_pc = current_pc;
1649 if (cache != NULL)
1650 continue;
1651 else
1652 break;
1656 if (unrecognized_pc == 0)
1657 unrecognized_pc = current_pc;
1659 if (cache)
1661 int framereg, framesize;
1663 /* The frame size is just the distance from the frame register
1664 to the original stack pointer. */
1665 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1667 /* Frame pointer is fp. */
1668 framereg = ARM_FP_REGNUM;
1669 framesize = -regs[ARM_FP_REGNUM].k;
1671 else
1673 /* Try the stack pointer... this is a bit desperate. */
1674 framereg = ARM_SP_REGNUM;
1675 framesize = -regs[ARM_SP_REGNUM].k;
1678 cache->framereg = framereg;
1679 cache->framesize = framesize;
1681 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1682 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1683 cache->saved_regs[regno].addr = offset;
1686 if (arm_debug)
1687 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1688 paddress (gdbarch, unrecognized_pc));
1690 do_cleanups (back_to);
1691 return unrecognized_pc;
1694 static void
1695 arm_scan_prologue (struct frame_info *this_frame,
1696 struct arm_prologue_cache *cache)
1698 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1699 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1700 CORE_ADDR prologue_start, prologue_end;
1701 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1702 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1704 /* Assume there is no frame until proven otherwise. */
1705 cache->framereg = ARM_SP_REGNUM;
1706 cache->framesize = 0;
1708 /* Check for Thumb prologue. */
1709 if (arm_frame_is_thumb (this_frame))
1711 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1712 return;
1715 /* Find the function prologue. If we can't find the function in
1716 the symbol table, peek in the stack frame to find the PC. */
1717 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1718 &prologue_end))
1720 /* One way to find the end of the prologue (which works well
1721 for unoptimized code) is to do the following:
1723 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1725 if (sal.line == 0)
1726 prologue_end = prev_pc;
1727 else if (sal.end < prologue_end)
1728 prologue_end = sal.end;
1730 This mechanism is very accurate so long as the optimizer
1731 doesn't move any instructions from the function body into the
1732 prologue. If this happens, sal.end will be the last
1733 instruction in the first hunk of prologue code just before
1734 the first instruction that the scheduler has moved from
1735 the body to the prologue.
1737 In order to make sure that we scan all of the prologue
1738 instructions, we use a slightly less accurate mechanism which
1739 may scan more than necessary. To help compensate for this
1740 lack of accuracy, the prologue scanning loop below contains
1741 several clauses which'll cause the loop to terminate early if
1742 an implausible prologue instruction is encountered.
1744 The expression
1746 prologue_start + 64
1748 is a suitable endpoint since it accounts for the largest
1749 possible prologue plus up to five instructions inserted by
1750 the scheduler. */
1752 if (prologue_end > prologue_start + 64)
1754 prologue_end = prologue_start + 64; /* See above. */
1757 else
1759 /* We have no symbol information. Our only option is to assume this
1760 function has a standard stack frame and the normal frame register.
1761 Then, we can find the value of our frame pointer on entrance to
1762 the callee (or at the present moment if this is the innermost frame).
1763 The value stored there should be the address of the stmfd + 8. */
1764 CORE_ADDR frame_loc;
1765 LONGEST return_value;
1767 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1768 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1769 return;
1770 else
1772 prologue_start = gdbarch_addr_bits_remove
1773 (gdbarch, return_value) - 8;
1774 prologue_end = prologue_start + 64; /* See above. */
1778 if (prev_pc < prologue_end)
1779 prologue_end = prev_pc;
1781 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1784 static struct arm_prologue_cache *
1785 arm_make_prologue_cache (struct frame_info *this_frame)
1787 int reg;
1788 struct arm_prologue_cache *cache;
1789 CORE_ADDR unwound_fp;
1791 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1792 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1794 arm_scan_prologue (this_frame, cache);
1796 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
1797 if (unwound_fp == 0)
1798 return cache;
1800 cache->prev_sp = unwound_fp + cache->framesize;
1802 /* Calculate actual addresses of saved registers using offsets
1803 determined by arm_scan_prologue. */
1804 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
1805 if (trad_frame_addr_p (cache->saved_regs, reg))
1806 cache->saved_regs[reg].addr += cache->prev_sp;
1808 return cache;
1811 /* Implementation of the stop_reason hook for arm_prologue frames. */
1813 static enum unwind_stop_reason
1814 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
1815 void **this_cache)
1817 struct arm_prologue_cache *cache;
1818 CORE_ADDR pc;
1820 if (*this_cache == NULL)
1821 *this_cache = arm_make_prologue_cache (this_frame);
1822 cache = (struct arm_prologue_cache *) *this_cache;
1824 /* This is meant to halt the backtrace at "_start". */
1825 pc = get_frame_pc (this_frame);
1826 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
1827 return UNWIND_OUTERMOST;
1829 /* If we've hit a wall, stop. */
1830 if (cache->prev_sp == 0)
1831 return UNWIND_OUTERMOST;
1833 return UNWIND_NO_REASON;
1836 /* Our frame ID for a normal frame is the current function's starting PC
1837 and the caller's SP when we were called. */
1839 static void
1840 arm_prologue_this_id (struct frame_info *this_frame,
1841 void **this_cache,
1842 struct frame_id *this_id)
1844 struct arm_prologue_cache *cache;
1845 struct frame_id id;
1846 CORE_ADDR pc, func;
1848 if (*this_cache == NULL)
1849 *this_cache = arm_make_prologue_cache (this_frame);
1850 cache = (struct arm_prologue_cache *) *this_cache;
1852 /* Use function start address as part of the frame ID. If we cannot
1853 identify the start address (due to missing symbol information),
1854 fall back to just using the current PC. */
1855 pc = get_frame_pc (this_frame);
1856 func = get_frame_func (this_frame);
1857 if (!func)
1858 func = pc;
1860 id = frame_id_build (cache->prev_sp, func);
1861 *this_id = id;
1864 static struct value *
1865 arm_prologue_prev_register (struct frame_info *this_frame,
1866 void **this_cache,
1867 int prev_regnum)
1869 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1870 struct arm_prologue_cache *cache;
1872 if (*this_cache == NULL)
1873 *this_cache = arm_make_prologue_cache (this_frame);
1874 cache = (struct arm_prologue_cache *) *this_cache;
1876 /* If we are asked to unwind the PC, then we need to return the LR
1877 instead. The prologue may save PC, but it will point into this
1878 frame's prologue, not the next frame's resume location. Also
1879 strip the saved T bit. A valid LR may have the low bit set, but
1880 a valid PC never does. */
1881 if (prev_regnum == ARM_PC_REGNUM)
1883 CORE_ADDR lr;
1885 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1886 return frame_unwind_got_constant (this_frame, prev_regnum,
1887 arm_addr_bits_remove (gdbarch, lr));
1890 /* SP is generally not saved to the stack, but this frame is
1891 identified by the next frame's stack pointer at the time of the call.
1892 The value was already reconstructed into PREV_SP. */
1893 if (prev_regnum == ARM_SP_REGNUM)
1894 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
1896 /* The CPSR may have been changed by the call instruction and by the
1897 called function. The only bit we can reconstruct is the T bit,
1898 by checking the low bit of LR as of the call. This is a reliable
1899 indicator of Thumb-ness except for some ARM v4T pre-interworking
1900 Thumb code, which could get away with a clear low bit as long as
1901 the called function did not use bx. Guess that all other
1902 bits are unchanged; the condition flags are presumably lost,
1903 but the processor status is likely valid. */
1904 if (prev_regnum == ARM_PS_REGNUM)
1906 CORE_ADDR lr, cpsr;
1907 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
1909 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
1910 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
1911 if (IS_THUMB_ADDR (lr))
1912 cpsr |= t_bit;
1913 else
1914 cpsr &= ~t_bit;
1915 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
1918 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
1919 prev_regnum);
1922 struct frame_unwind arm_prologue_unwind = {
1923 NORMAL_FRAME,
1924 arm_prologue_unwind_stop_reason,
1925 arm_prologue_this_id,
1926 arm_prologue_prev_register,
1927 NULL,
1928 default_frame_sniffer
1931 /* Maintain a list of ARM exception table entries per objfile, similar to the
1932 list of mapping symbols. We only cache entries for standard ARM-defined
1933 personality routines; the cache will contain only the frame unwinding
1934 instructions associated with the entry (not the descriptors). */
1936 static const struct objfile_data *arm_exidx_data_key;
1938 struct arm_exidx_entry
1940 bfd_vma addr;
1941 gdb_byte *entry;
1943 typedef struct arm_exidx_entry arm_exidx_entry_s;
1944 DEF_VEC_O(arm_exidx_entry_s);
1946 struct arm_exidx_data
1948 VEC(arm_exidx_entry_s) **section_maps;
1951 static void
1952 arm_exidx_data_free (struct objfile *objfile, void *arg)
1954 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
1955 unsigned int i;
1957 for (i = 0; i < objfile->obfd->section_count; i++)
1958 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
1961 static inline int
1962 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
1963 const struct arm_exidx_entry *rhs)
1965 return lhs->addr < rhs->addr;
1968 static struct obj_section *
1969 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
1971 struct obj_section *osect;
1973 ALL_OBJFILE_OSECTIONS (objfile, osect)
1974 if (bfd_get_section_flags (objfile->obfd,
1975 osect->the_bfd_section) & SEC_ALLOC)
1977 bfd_vma start, size;
1978 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
1979 size = bfd_get_section_size (osect->the_bfd_section);
1981 if (start <= vma && vma < start + size)
1982 return osect;
1985 return NULL;
1988 /* Parse contents of exception table and exception index sections
1989 of OBJFILE, and fill in the exception table entry cache.
1991 For each entry that refers to a standard ARM-defined personality
1992 routine, extract the frame unwinding instructions (from either
1993 the index or the table section). The unwinding instructions
1994 are normalized by:
1995 - extracting them from the rest of the table data
1996 - converting to host endianness
1997 - appending the implicit 0xb0 ("Finish") code
1999 The extracted and normalized instructions are stored for later
2000 retrieval by the arm_find_exidx_entry routine. */
2002 static void
2003 arm_exidx_new_objfile (struct objfile *objfile)
2005 struct cleanup *cleanups;
2006 struct arm_exidx_data *data;
2007 asection *exidx, *extab;
2008 bfd_vma exidx_vma = 0, extab_vma = 0;
2009 bfd_size_type exidx_size = 0, extab_size = 0;
2010 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2011 LONGEST i;
2013 /* If we've already touched this file, do nothing. */
2014 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2015 return;
2016 cleanups = make_cleanup (null_cleanup, NULL);
2018 /* Read contents of exception table and index. */
2019 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2020 if (exidx)
2022 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2023 exidx_size = bfd_get_section_size (exidx);
2024 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2025 make_cleanup (xfree, exidx_data);
2027 if (!bfd_get_section_contents (objfile->obfd, exidx,
2028 exidx_data, 0, exidx_size))
2030 do_cleanups (cleanups);
2031 return;
2035 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2036 if (extab)
2038 extab_vma = bfd_section_vma (objfile->obfd, extab);
2039 extab_size = bfd_get_section_size (extab);
2040 extab_data = (gdb_byte *) xmalloc (extab_size);
2041 make_cleanup (xfree, extab_data);
2043 if (!bfd_get_section_contents (objfile->obfd, extab,
2044 extab_data, 0, extab_size))
2046 do_cleanups (cleanups);
2047 return;
2051 /* Allocate exception table data structure. */
2052 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2053 set_objfile_data (objfile, arm_exidx_data_key, data);
2054 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2055 objfile->obfd->section_count,
2056 VEC(arm_exidx_entry_s) *);
2058 /* Fill in exception table. */
2059 for (i = 0; i < exidx_size / 8; i++)
2061 struct arm_exidx_entry new_exidx_entry;
2062 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2063 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2064 bfd_vma addr = 0, word = 0;
2065 int n_bytes = 0, n_words = 0;
2066 struct obj_section *sec;
2067 gdb_byte *entry = NULL;
2069 /* Extract address of start of function. */
2070 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2071 idx += exidx_vma + i * 8;
2073 /* Find section containing function and compute section offset. */
2074 sec = arm_obj_section_from_vma (objfile, idx);
2075 if (sec == NULL)
2076 continue;
2077 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2079 /* Determine address of exception table entry. */
2080 if (val == 1)
2082 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2084 else if ((val & 0xff000000) == 0x80000000)
2086 /* Exception table entry embedded in .ARM.exidx
2087 -- must be short form. */
2088 word = val;
2089 n_bytes = 3;
2091 else if (!(val & 0x80000000))
2093 /* Exception table entry in .ARM.extab. */
2094 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2095 addr += exidx_vma + i * 8 + 4;
2097 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2099 word = bfd_h_get_32 (objfile->obfd,
2100 extab_data + addr - extab_vma);
2101 addr += 4;
2103 if ((word & 0xff000000) == 0x80000000)
2105 /* Short form. */
2106 n_bytes = 3;
2108 else if ((word & 0xff000000) == 0x81000000
2109 || (word & 0xff000000) == 0x82000000)
2111 /* Long form. */
2112 n_bytes = 2;
2113 n_words = ((word >> 16) & 0xff);
2115 else if (!(word & 0x80000000))
2117 bfd_vma pers;
2118 struct obj_section *pers_sec;
2119 int gnu_personality = 0;
2121 /* Custom personality routine. */
2122 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2123 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2125 /* Check whether we've got one of the variants of the
2126 GNU personality routines. */
2127 pers_sec = arm_obj_section_from_vma (objfile, pers);
2128 if (pers_sec)
2130 static const char *personality[] =
2132 "__gcc_personality_v0",
2133 "__gxx_personality_v0",
2134 "__gcj_personality_v0",
2135 "__gnu_objc_personality_v0",
2136 NULL
2139 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2140 int k;
2142 for (k = 0; personality[k]; k++)
2143 if (lookup_minimal_symbol_by_pc_name
2144 (pc, personality[k], objfile))
2146 gnu_personality = 1;
2147 break;
2151 /* If so, the next word contains a word count in the high
2152 byte, followed by the same unwind instructions as the
2153 pre-defined forms. */
2154 if (gnu_personality
2155 && addr + 4 <= extab_vma + extab_size)
2157 word = bfd_h_get_32 (objfile->obfd,
2158 extab_data + addr - extab_vma);
2159 addr += 4;
2160 n_bytes = 3;
2161 n_words = ((word >> 24) & 0xff);
2167 /* Sanity check address. */
2168 if (n_words)
2169 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2170 n_words = n_bytes = 0;
2172 /* The unwind instructions reside in WORD (only the N_BYTES least
2173 significant bytes are valid), followed by N_WORDS words in the
2174 extab section starting at ADDR. */
2175 if (n_bytes || n_words)
2177 gdb_byte *p = entry
2178 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2179 n_bytes + n_words * 4 + 1);
2181 while (n_bytes--)
2182 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2184 while (n_words--)
2186 word = bfd_h_get_32 (objfile->obfd,
2187 extab_data + addr - extab_vma);
2188 addr += 4;
2190 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2191 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2192 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2193 *p++ = (gdb_byte) (word & 0xff);
2196 /* Implied "Finish" to terminate the list. */
2197 *p++ = 0xb0;
2200 /* Push entry onto vector. They are guaranteed to always
2201 appear in order of increasing addresses. */
2202 new_exidx_entry.addr = idx;
2203 new_exidx_entry.entry = entry;
2204 VEC_safe_push (arm_exidx_entry_s,
2205 data->section_maps[sec->the_bfd_section->index],
2206 &new_exidx_entry);
2209 do_cleanups (cleanups);
2212 /* Search for the exception table entry covering MEMADDR. If one is found,
2213 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2214 set *START to the start of the region covered by this entry. */
2216 static gdb_byte *
2217 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2219 struct obj_section *sec;
2221 sec = find_pc_section (memaddr);
2222 if (sec != NULL)
2224 struct arm_exidx_data *data;
2225 VEC(arm_exidx_entry_s) *map;
2226 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2227 unsigned int idx;
2229 data = ((struct arm_exidx_data *)
2230 objfile_data (sec->objfile, arm_exidx_data_key));
2231 if (data != NULL)
2233 map = data->section_maps[sec->the_bfd_section->index];
2234 if (!VEC_empty (arm_exidx_entry_s, map))
2236 struct arm_exidx_entry *map_sym;
2238 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2239 arm_compare_exidx_entries);
2241 /* VEC_lower_bound finds the earliest ordered insertion
2242 point. If the following symbol starts at this exact
2243 address, we use that; otherwise, the preceding
2244 exception table entry covers this address. */
2245 if (idx < VEC_length (arm_exidx_entry_s, map))
2247 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2248 if (map_sym->addr == map_key.addr)
2250 if (start)
2251 *start = map_sym->addr + obj_section_addr (sec);
2252 return map_sym->entry;
2256 if (idx > 0)
2258 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2259 if (start)
2260 *start = map_sym->addr + obj_section_addr (sec);
2261 return map_sym->entry;
2267 return NULL;
2270 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2271 instruction list from the ARM exception table entry ENTRY, allocate and
2272 return a prologue cache structure describing how to unwind this frame.
2274 Return NULL if the unwinding instruction list contains a "spare",
2275 "reserved" or "refuse to unwind" instruction as defined in section
2276 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2277 for the ARM Architecture" document. */
2279 static struct arm_prologue_cache *
2280 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2282 CORE_ADDR vsp = 0;
2283 int vsp_valid = 0;
2285 struct arm_prologue_cache *cache;
2286 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2287 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2289 for (;;)
2291 gdb_byte insn;
2293 /* Whenever we reload SP, we actually have to retrieve its
2294 actual value in the current frame. */
2295 if (!vsp_valid)
2297 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2299 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2300 vsp = get_frame_register_unsigned (this_frame, reg);
2302 else
2304 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2305 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2308 vsp_valid = 1;
2311 /* Decode next unwind instruction. */
2312 insn = *entry++;
2314 if ((insn & 0xc0) == 0)
2316 int offset = insn & 0x3f;
2317 vsp += (offset << 2) + 4;
2319 else if ((insn & 0xc0) == 0x40)
2321 int offset = insn & 0x3f;
2322 vsp -= (offset << 2) + 4;
2324 else if ((insn & 0xf0) == 0x80)
2326 int mask = ((insn & 0xf) << 8) | *entry++;
2327 int i;
2329 /* The special case of an all-zero mask identifies
2330 "Refuse to unwind". We return NULL to fall back
2331 to the prologue analyzer. */
2332 if (mask == 0)
2333 return NULL;
2335 /* Pop registers r4..r15 under mask. */
2336 for (i = 0; i < 12; i++)
2337 if (mask & (1 << i))
2339 cache->saved_regs[4 + i].addr = vsp;
2340 vsp += 4;
2343 /* Special-case popping SP -- we need to reload vsp. */
2344 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2345 vsp_valid = 0;
2347 else if ((insn & 0xf0) == 0x90)
2349 int reg = insn & 0xf;
2351 /* Reserved cases. */
2352 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2353 return NULL;
2355 /* Set SP from another register and mark VSP for reload. */
2356 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2357 vsp_valid = 0;
2359 else if ((insn & 0xf0) == 0xa0)
2361 int count = insn & 0x7;
2362 int pop_lr = (insn & 0x8) != 0;
2363 int i;
2365 /* Pop r4..r[4+count]. */
2366 for (i = 0; i <= count; i++)
2368 cache->saved_regs[4 + i].addr = vsp;
2369 vsp += 4;
2372 /* If indicated by flag, pop LR as well. */
2373 if (pop_lr)
2375 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2376 vsp += 4;
2379 else if (insn == 0xb0)
2381 /* We could only have updated PC by popping into it; if so, it
2382 will show up as address. Otherwise, copy LR into PC. */
2383 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2384 cache->saved_regs[ARM_PC_REGNUM]
2385 = cache->saved_regs[ARM_LR_REGNUM];
2387 /* We're done. */
2388 break;
2390 else if (insn == 0xb1)
2392 int mask = *entry++;
2393 int i;
2395 /* All-zero mask and mask >= 16 is "spare". */
2396 if (mask == 0 || mask >= 16)
2397 return NULL;
2399 /* Pop r0..r3 under mask. */
2400 for (i = 0; i < 4; i++)
2401 if (mask & (1 << i))
2403 cache->saved_regs[i].addr = vsp;
2404 vsp += 4;
2407 else if (insn == 0xb2)
2409 ULONGEST offset = 0;
2410 unsigned shift = 0;
2414 offset |= (*entry & 0x7f) << shift;
2415 shift += 7;
2417 while (*entry++ & 0x80);
2419 vsp += 0x204 + (offset << 2);
2421 else if (insn == 0xb3)
2423 int start = *entry >> 4;
2424 int count = (*entry++) & 0xf;
2425 int i;
2427 /* Only registers D0..D15 are valid here. */
2428 if (start + count >= 16)
2429 return NULL;
2431 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2432 for (i = 0; i <= count; i++)
2434 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2435 vsp += 8;
2438 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2439 vsp += 4;
2441 else if ((insn & 0xf8) == 0xb8)
2443 int count = insn & 0x7;
2444 int i;
2446 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2447 for (i = 0; i <= count; i++)
2449 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2450 vsp += 8;
2453 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2454 vsp += 4;
2456 else if (insn == 0xc6)
2458 int start = *entry >> 4;
2459 int count = (*entry++) & 0xf;
2460 int i;
2462 /* Only registers WR0..WR15 are valid. */
2463 if (start + count >= 16)
2464 return NULL;
2466 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2467 for (i = 0; i <= count; i++)
2469 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2470 vsp += 8;
2473 else if (insn == 0xc7)
2475 int mask = *entry++;
2476 int i;
2478 /* All-zero mask and mask >= 16 is "spare". */
2479 if (mask == 0 || mask >= 16)
2480 return NULL;
2482 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2483 for (i = 0; i < 4; i++)
2484 if (mask & (1 << i))
2486 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2487 vsp += 4;
2490 else if ((insn & 0xf8) == 0xc0)
2492 int count = insn & 0x7;
2493 int i;
2495 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2496 for (i = 0; i <= count; i++)
2498 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2499 vsp += 8;
2502 else if (insn == 0xc8)
2504 int start = *entry >> 4;
2505 int count = (*entry++) & 0xf;
2506 int i;
2508 /* Only registers D0..D31 are valid. */
2509 if (start + count >= 16)
2510 return NULL;
2512 /* Pop VFP double-precision registers
2513 D[16+start]..D[16+start+count]. */
2514 for (i = 0; i <= count; i++)
2516 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2517 vsp += 8;
2520 else if (insn == 0xc9)
2522 int start = *entry >> 4;
2523 int count = (*entry++) & 0xf;
2524 int i;
2526 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2527 for (i = 0; i <= count; i++)
2529 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2530 vsp += 8;
2533 else if ((insn & 0xf8) == 0xd0)
2535 int count = insn & 0x7;
2536 int i;
2538 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2539 for (i = 0; i <= count; i++)
2541 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2542 vsp += 8;
2545 else
2547 /* Everything else is "spare". */
2548 return NULL;
2552 /* If we restore SP from a register, assume this was the frame register.
2553 Otherwise just fall back to SP as frame register. */
2554 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2555 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2556 else
2557 cache->framereg = ARM_SP_REGNUM;
2559 /* Determine offset to previous frame. */
2560 cache->framesize
2561 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2563 /* We already got the previous SP. */
2564 cache->prev_sp = vsp;
2566 return cache;
2569 /* Unwinding via ARM exception table entries. Note that the sniffer
2570 already computes a filled-in prologue cache, which is then used
2571 with the same arm_prologue_this_id and arm_prologue_prev_register
2572 routines also used for prologue-parsing based unwinding. */
2574 static int
2575 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2576 struct frame_info *this_frame,
2577 void **this_prologue_cache)
2579 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2580 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2581 CORE_ADDR addr_in_block, exidx_region, func_start;
2582 struct arm_prologue_cache *cache;
2583 gdb_byte *entry;
2585 /* See if we have an ARM exception table entry covering this address. */
2586 addr_in_block = get_frame_address_in_block (this_frame);
2587 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2588 if (!entry)
2589 return 0;
2591 /* The ARM exception table does not describe unwind information
2592 for arbitrary PC values, but is guaranteed to be correct only
2593 at call sites. We have to decide here whether we want to use
2594 ARM exception table information for this frame, or fall back
2595 to using prologue parsing. (Note that if we have DWARF CFI,
2596 this sniffer isn't even called -- CFI is always preferred.)
2598 Before we make this decision, however, we check whether we
2599 actually have *symbol* information for the current frame.
2600 If not, prologue parsing would not work anyway, so we might
2601 as well use the exception table and hope for the best. */
2602 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2604 int exc_valid = 0;
2606 /* If the next frame is "normal", we are at a call site in this
2607 frame, so exception information is guaranteed to be valid. */
2608 if (get_next_frame (this_frame)
2609 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2610 exc_valid = 1;
2612 /* We also assume exception information is valid if we're currently
2613 blocked in a system call. The system library is supposed to
2614 ensure this, so that e.g. pthread cancellation works. */
2615 if (arm_frame_is_thumb (this_frame))
2617 LONGEST insn;
2619 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2620 byte_order_for_code, &insn)
2621 && (insn & 0xff00) == 0xdf00 /* svc */)
2622 exc_valid = 1;
2624 else
2626 LONGEST insn;
2628 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2629 byte_order_for_code, &insn)
2630 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2631 exc_valid = 1;
2634 /* Bail out if we don't know that exception information is valid. */
2635 if (!exc_valid)
2636 return 0;
2638 /* The ARM exception index does not mark the *end* of the region
2639 covered by the entry, and some functions will not have any entry.
2640 To correctly recognize the end of the covered region, the linker
2641 should have inserted dummy records with a CANTUNWIND marker.
2643 Unfortunately, current versions of GNU ld do not reliably do
2644 this, and thus we may have found an incorrect entry above.
2645 As a (temporary) sanity check, we only use the entry if it
2646 lies *within* the bounds of the function. Note that this check
2647 might reject perfectly valid entries that just happen to cover
2648 multiple functions; therefore this check ought to be removed
2649 once the linker is fixed. */
2650 if (func_start > exidx_region)
2651 return 0;
2654 /* Decode the list of unwinding instructions into a prologue cache.
2655 Note that this may fail due to e.g. a "refuse to unwind" code. */
2656 cache = arm_exidx_fill_cache (this_frame, entry);
2657 if (!cache)
2658 return 0;
2660 *this_prologue_cache = cache;
2661 return 1;
2664 struct frame_unwind arm_exidx_unwind = {
2665 NORMAL_FRAME,
2666 default_frame_unwind_stop_reason,
2667 arm_prologue_this_id,
2668 arm_prologue_prev_register,
2669 NULL,
2670 arm_exidx_unwind_sniffer
2673 static struct arm_prologue_cache *
2674 arm_make_epilogue_frame_cache (struct frame_info *this_frame)
2676 struct arm_prologue_cache *cache;
2677 int reg;
2679 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2680 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2682 /* Still rely on the offset calculated from prologue. */
2683 arm_scan_prologue (this_frame, cache);
2685 /* Since we are in epilogue, the SP has been restored. */
2686 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2688 /* Calculate actual addresses of saved registers using offsets
2689 determined by arm_scan_prologue. */
2690 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2691 if (trad_frame_addr_p (cache->saved_regs, reg))
2692 cache->saved_regs[reg].addr += cache->prev_sp;
2694 return cache;
2697 /* Implementation of function hook 'this_id' in
2698 'struct frame_uwnind' for epilogue unwinder. */
2700 static void
2701 arm_epilogue_frame_this_id (struct frame_info *this_frame,
2702 void **this_cache,
2703 struct frame_id *this_id)
2705 struct arm_prologue_cache *cache;
2706 CORE_ADDR pc, func;
2708 if (*this_cache == NULL)
2709 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2710 cache = (struct arm_prologue_cache *) *this_cache;
2712 /* Use function start address as part of the frame ID. If we cannot
2713 identify the start address (due to missing symbol information),
2714 fall back to just using the current PC. */
2715 pc = get_frame_pc (this_frame);
2716 func = get_frame_func (this_frame);
2717 if (func == 0)
2718 func = pc;
2720 (*this_id) = frame_id_build (cache->prev_sp, pc);
2723 /* Implementation of function hook 'prev_register' in
2724 'struct frame_uwnind' for epilogue unwinder. */
2726 static struct value *
2727 arm_epilogue_frame_prev_register (struct frame_info *this_frame,
2728 void **this_cache, int regnum)
2730 if (*this_cache == NULL)
2731 *this_cache = arm_make_epilogue_frame_cache (this_frame);
2733 return arm_prologue_prev_register (this_frame, this_cache, regnum);
2736 static int arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch,
2737 CORE_ADDR pc);
2738 static int thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch,
2739 CORE_ADDR pc);
2741 /* Implementation of function hook 'sniffer' in
2742 'struct frame_uwnind' for epilogue unwinder. */
2744 static int
2745 arm_epilogue_frame_sniffer (const struct frame_unwind *self,
2746 struct frame_info *this_frame,
2747 void **this_prologue_cache)
2749 if (frame_relative_level (this_frame) == 0)
2751 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2752 CORE_ADDR pc = get_frame_pc (this_frame);
2754 if (arm_frame_is_thumb (this_frame))
2755 return thumb_stack_frame_destroyed_p (gdbarch, pc);
2756 else
2757 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
2759 else
2760 return 0;
2763 /* Frame unwinder from epilogue. */
2765 static const struct frame_unwind arm_epilogue_frame_unwind =
2767 NORMAL_FRAME,
2768 default_frame_unwind_stop_reason,
2769 arm_epilogue_frame_this_id,
2770 arm_epilogue_frame_prev_register,
2771 NULL,
2772 arm_epilogue_frame_sniffer,
2775 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2776 trampoline, return the target PC. Otherwise return 0.
2778 void call0a (char c, short s, int i, long l) {}
2780 int main (void)
2782 (*pointer_to_call0a) (c, s, i, l);
2785 Instead of calling a stub library function _call_via_xx (xx is
2786 the register name), GCC may inline the trampoline in the object
2787 file as below (register r2 has the address of call0a).
2789 .global main
2790 .type main, %function
2792 bl .L1
2794 .size main, .-main
2796 .L1:
2797 bx r2
2799 The trampoline 'bx r2' doesn't belong to main. */
2801 static CORE_ADDR
2802 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2804 /* The heuristics of recognizing such trampoline is that FRAME is
2805 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2806 if (arm_frame_is_thumb (frame))
2808 gdb_byte buf[2];
2810 if (target_read_memory (pc, buf, 2) == 0)
2812 struct gdbarch *gdbarch = get_frame_arch (frame);
2813 enum bfd_endian byte_order_for_code
2814 = gdbarch_byte_order_for_code (gdbarch);
2815 uint16_t insn
2816 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2818 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2820 CORE_ADDR dest
2821 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2823 /* Clear the LSB so that gdb core sets step-resume
2824 breakpoint at the right address. */
2825 return UNMAKE_THUMB_ADDR (dest);
2830 return 0;
2833 static struct arm_prologue_cache *
2834 arm_make_stub_cache (struct frame_info *this_frame)
2836 struct arm_prologue_cache *cache;
2838 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2839 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2841 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2843 return cache;
2846 /* Our frame ID for a stub frame is the current SP and LR. */
2848 static void
2849 arm_stub_this_id (struct frame_info *this_frame,
2850 void **this_cache,
2851 struct frame_id *this_id)
2853 struct arm_prologue_cache *cache;
2855 if (*this_cache == NULL)
2856 *this_cache = arm_make_stub_cache (this_frame);
2857 cache = (struct arm_prologue_cache *) *this_cache;
2859 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2862 static int
2863 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2864 struct frame_info *this_frame,
2865 void **this_prologue_cache)
2867 CORE_ADDR addr_in_block;
2868 gdb_byte dummy[4];
2869 CORE_ADDR pc, start_addr;
2870 const char *name;
2872 addr_in_block = get_frame_address_in_block (this_frame);
2873 pc = get_frame_pc (this_frame);
2874 if (in_plt_section (addr_in_block)
2875 /* We also use the stub winder if the target memory is unreadable
2876 to avoid having the prologue unwinder trying to read it. */
2877 || target_read_memory (pc, dummy, 4) != 0)
2878 return 1;
2880 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2881 && arm_skip_bx_reg (this_frame, pc) != 0)
2882 return 1;
2884 return 0;
2887 struct frame_unwind arm_stub_unwind = {
2888 NORMAL_FRAME,
2889 default_frame_unwind_stop_reason,
2890 arm_stub_this_id,
2891 arm_prologue_prev_register,
2892 NULL,
2893 arm_stub_unwind_sniffer
2896 /* Put here the code to store, into CACHE->saved_regs, the addresses
2897 of the saved registers of frame described by THIS_FRAME. CACHE is
2898 returned. */
2900 static struct arm_prologue_cache *
2901 arm_m_exception_cache (struct frame_info *this_frame)
2903 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2904 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
2905 struct arm_prologue_cache *cache;
2906 CORE_ADDR unwound_sp;
2907 LONGEST xpsr;
2909 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2910 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2912 unwound_sp = get_frame_register_unsigned (this_frame,
2913 ARM_SP_REGNUM);
2915 /* The hardware saves eight 32-bit words, comprising xPSR,
2916 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2917 "B1.5.6 Exception entry behavior" in
2918 "ARMv7-M Architecture Reference Manual". */
2919 cache->saved_regs[0].addr = unwound_sp;
2920 cache->saved_regs[1].addr = unwound_sp + 4;
2921 cache->saved_regs[2].addr = unwound_sp + 8;
2922 cache->saved_regs[3].addr = unwound_sp + 12;
2923 cache->saved_regs[12].addr = unwound_sp + 16;
2924 cache->saved_regs[14].addr = unwound_sp + 20;
2925 cache->saved_regs[15].addr = unwound_sp + 24;
2926 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
2928 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2929 aligner between the top of the 32-byte stack frame and the
2930 previous context's stack pointer. */
2931 cache->prev_sp = unwound_sp + 32;
2932 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
2933 && (xpsr & (1 << 9)) != 0)
2934 cache->prev_sp += 4;
2936 return cache;
2939 /* Implementation of function hook 'this_id' in
2940 'struct frame_uwnind'. */
2942 static void
2943 arm_m_exception_this_id (struct frame_info *this_frame,
2944 void **this_cache,
2945 struct frame_id *this_id)
2947 struct arm_prologue_cache *cache;
2949 if (*this_cache == NULL)
2950 *this_cache = arm_m_exception_cache (this_frame);
2951 cache = (struct arm_prologue_cache *) *this_cache;
2953 /* Our frame ID for a stub frame is the current SP and LR. */
2954 *this_id = frame_id_build (cache->prev_sp,
2955 get_frame_pc (this_frame));
2958 /* Implementation of function hook 'prev_register' in
2959 'struct frame_uwnind'. */
2961 static struct value *
2962 arm_m_exception_prev_register (struct frame_info *this_frame,
2963 void **this_cache,
2964 int prev_regnum)
2966 struct arm_prologue_cache *cache;
2968 if (*this_cache == NULL)
2969 *this_cache = arm_m_exception_cache (this_frame);
2970 cache = (struct arm_prologue_cache *) *this_cache;
2972 /* The value was already reconstructed into PREV_SP. */
2973 if (prev_regnum == ARM_SP_REGNUM)
2974 return frame_unwind_got_constant (this_frame, prev_regnum,
2975 cache->prev_sp);
2977 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2978 prev_regnum);
2981 /* Implementation of function hook 'sniffer' in
2982 'struct frame_uwnind'. */
2984 static int
2985 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
2986 struct frame_info *this_frame,
2987 void **this_prologue_cache)
2989 CORE_ADDR this_pc = get_frame_pc (this_frame);
2991 /* No need to check is_m; this sniffer is only registered for
2992 M-profile architectures. */
2994 /* Exception frames return to one of these magic PCs. Other values
2995 are not defined as of v7-M. See details in "B1.5.8 Exception
2996 return behavior" in "ARMv7-M Architecture Reference Manual". */
2997 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
2998 || this_pc == 0xfffffffd)
2999 return 1;
3001 return 0;
3004 /* Frame unwinder for M-profile exceptions. */
3006 struct frame_unwind arm_m_exception_unwind =
3008 SIGTRAMP_FRAME,
3009 default_frame_unwind_stop_reason,
3010 arm_m_exception_this_id,
3011 arm_m_exception_prev_register,
3012 NULL,
3013 arm_m_exception_unwind_sniffer
3016 static CORE_ADDR
3017 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3019 struct arm_prologue_cache *cache;
3021 if (*this_cache == NULL)
3022 *this_cache = arm_make_prologue_cache (this_frame);
3023 cache = (struct arm_prologue_cache *) *this_cache;
3025 return cache->prev_sp - cache->framesize;
3028 struct frame_base arm_normal_base = {
3029 &arm_prologue_unwind,
3030 arm_normal_frame_base,
3031 arm_normal_frame_base,
3032 arm_normal_frame_base
3035 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3036 dummy frame. The frame ID's base needs to match the TOS value
3037 saved by save_dummy_frame_tos() and returned from
3038 arm_push_dummy_call, and the PC needs to match the dummy frame's
3039 breakpoint. */
3041 static struct frame_id
3042 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3044 return frame_id_build (get_frame_register_unsigned (this_frame,
3045 ARM_SP_REGNUM),
3046 get_frame_pc (this_frame));
3049 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3050 be used to construct the previous frame's ID, after looking up the
3051 containing function). */
3053 static CORE_ADDR
3054 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3056 CORE_ADDR pc;
3057 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3058 return arm_addr_bits_remove (gdbarch, pc);
3061 static CORE_ADDR
3062 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3064 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3067 static struct value *
3068 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3069 int regnum)
3071 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3072 CORE_ADDR lr, cpsr;
3073 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3075 switch (regnum)
3077 case ARM_PC_REGNUM:
3078 /* The PC is normally copied from the return column, which
3079 describes saves of LR. However, that version may have an
3080 extra bit set to indicate Thumb state. The bit is not
3081 part of the PC. */
3082 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3083 return frame_unwind_got_constant (this_frame, regnum,
3084 arm_addr_bits_remove (gdbarch, lr));
3086 case ARM_PS_REGNUM:
3087 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3088 cpsr = get_frame_register_unsigned (this_frame, regnum);
3089 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3090 if (IS_THUMB_ADDR (lr))
3091 cpsr |= t_bit;
3092 else
3093 cpsr &= ~t_bit;
3094 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3096 default:
3097 internal_error (__FILE__, __LINE__,
3098 _("Unexpected register %d"), regnum);
3102 static void
3103 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3104 struct dwarf2_frame_state_reg *reg,
3105 struct frame_info *this_frame)
3107 switch (regnum)
3109 case ARM_PC_REGNUM:
3110 case ARM_PS_REGNUM:
3111 reg->how = DWARF2_FRAME_REG_FN;
3112 reg->loc.fn = arm_dwarf2_prev_register;
3113 break;
3114 case ARM_SP_REGNUM:
3115 reg->how = DWARF2_FRAME_REG_CFA;
3116 break;
3120 /* Implement the stack_frame_destroyed_p gdbarch method. */
3122 static int
3123 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3125 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3126 unsigned int insn, insn2;
3127 int found_return = 0, found_stack_adjust = 0;
3128 CORE_ADDR func_start, func_end;
3129 CORE_ADDR scan_pc;
3130 gdb_byte buf[4];
3132 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3133 return 0;
3135 /* The epilogue is a sequence of instructions along the following lines:
3137 - add stack frame size to SP or FP
3138 - [if frame pointer used] restore SP from FP
3139 - restore registers from SP [may include PC]
3140 - a return-type instruction [if PC wasn't already restored]
3142 In a first pass, we scan forward from the current PC and verify the
3143 instructions we find as compatible with this sequence, ending in a
3144 return instruction.
3146 However, this is not sufficient to distinguish indirect function calls
3147 within a function from indirect tail calls in the epilogue in some cases.
3148 Therefore, if we didn't already find any SP-changing instruction during
3149 forward scan, we add a backward scanning heuristic to ensure we actually
3150 are in the epilogue. */
3152 scan_pc = pc;
3153 while (scan_pc < func_end && !found_return)
3155 if (target_read_memory (scan_pc, buf, 2))
3156 break;
3158 scan_pc += 2;
3159 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3161 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3162 found_return = 1;
3163 else if (insn == 0x46f7) /* mov pc, lr */
3164 found_return = 1;
3165 else if (thumb_instruction_restores_sp (insn))
3167 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3168 found_return = 1;
3170 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3172 if (target_read_memory (scan_pc, buf, 2))
3173 break;
3175 scan_pc += 2;
3176 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3178 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3180 if (insn2 & 0x8000) /* <registers> include PC. */
3181 found_return = 1;
3183 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3184 && (insn2 & 0x0fff) == 0x0b04)
3186 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3187 found_return = 1;
3189 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3190 && (insn2 & 0x0e00) == 0x0a00)
3192 else
3193 break;
3195 else
3196 break;
3199 if (!found_return)
3200 return 0;
3202 /* Since any instruction in the epilogue sequence, with the possible
3203 exception of return itself, updates the stack pointer, we need to
3204 scan backwards for at most one instruction. Try either a 16-bit or
3205 a 32-bit instruction. This is just a heuristic, so we do not worry
3206 too much about false positives. */
3208 if (pc - 4 < func_start)
3209 return 0;
3210 if (target_read_memory (pc - 4, buf, 4))
3211 return 0;
3213 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3214 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3216 if (thumb_instruction_restores_sp (insn2))
3217 found_stack_adjust = 1;
3218 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3219 found_stack_adjust = 1;
3220 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3221 && (insn2 & 0x0fff) == 0x0b04)
3222 found_stack_adjust = 1;
3223 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3224 && (insn2 & 0x0e00) == 0x0a00)
3225 found_stack_adjust = 1;
3227 return found_stack_adjust;
3230 static int
3231 arm_stack_frame_destroyed_p_1 (struct gdbarch *gdbarch, CORE_ADDR pc)
3233 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3234 unsigned int insn;
3235 int found_return;
3236 CORE_ADDR func_start, func_end;
3238 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3239 return 0;
3241 /* We are in the epilogue if the previous instruction was a stack
3242 adjustment and the next instruction is a possible return (bx, mov
3243 pc, or pop). We could have to scan backwards to find the stack
3244 adjustment, or forwards to find the return, but this is a decent
3245 approximation. First scan forwards. */
3247 found_return = 0;
3248 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3249 if (bits (insn, 28, 31) != INST_NV)
3251 if ((insn & 0x0ffffff0) == 0x012fff10)
3252 /* BX. */
3253 found_return = 1;
3254 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3255 /* MOV PC. */
3256 found_return = 1;
3257 else if ((insn & 0x0fff0000) == 0x08bd0000
3258 && (insn & 0x0000c000) != 0)
3259 /* POP (LDMIA), including PC or LR. */
3260 found_return = 1;
3263 if (!found_return)
3264 return 0;
3266 /* Scan backwards. This is just a heuristic, so do not worry about
3267 false positives from mode changes. */
3269 if (pc < func_start + 4)
3270 return 0;
3272 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3273 if (arm_instruction_restores_sp (insn))
3274 return 1;
3276 return 0;
3279 /* Implement the stack_frame_destroyed_p gdbarch method. */
3281 static int
3282 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3284 if (arm_pc_is_thumb (gdbarch, pc))
3285 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3286 else
3287 return arm_stack_frame_destroyed_p_1 (gdbarch, pc);
3290 /* When arguments must be pushed onto the stack, they go on in reverse
3291 order. The code below implements a FILO (stack) to do this. */
3293 struct stack_item
3295 int len;
3296 struct stack_item *prev;
3297 gdb_byte *data;
3300 static struct stack_item *
3301 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3303 struct stack_item *si;
3304 si = XNEW (struct stack_item);
3305 si->data = (gdb_byte *) xmalloc (len);
3306 si->len = len;
3307 si->prev = prev;
3308 memcpy (si->data, contents, len);
3309 return si;
3312 static struct stack_item *
3313 pop_stack_item (struct stack_item *si)
3315 struct stack_item *dead = si;
3316 si = si->prev;
3317 xfree (dead->data);
3318 xfree (dead);
3319 return si;
3323 /* Return the alignment (in bytes) of the given type. */
3325 static int
3326 arm_type_align (struct type *t)
3328 int n;
3329 int align;
3330 int falign;
3332 t = check_typedef (t);
3333 switch (TYPE_CODE (t))
3335 default:
3336 /* Should never happen. */
3337 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3338 return 4;
3340 case TYPE_CODE_PTR:
3341 case TYPE_CODE_ENUM:
3342 case TYPE_CODE_INT:
3343 case TYPE_CODE_FLT:
3344 case TYPE_CODE_SET:
3345 case TYPE_CODE_RANGE:
3346 case TYPE_CODE_REF:
3347 case TYPE_CODE_CHAR:
3348 case TYPE_CODE_BOOL:
3349 return TYPE_LENGTH (t);
3351 case TYPE_CODE_ARRAY:
3352 if (TYPE_VECTOR (t))
3354 /* Use the natural alignment for vector types (the same for
3355 scalar type), but the maximum alignment is 64-bit. */
3356 if (TYPE_LENGTH (t) > 8)
3357 return 8;
3358 else
3359 return TYPE_LENGTH (t);
3361 else
3362 return arm_type_align (TYPE_TARGET_TYPE (t));
3363 case TYPE_CODE_COMPLEX:
3364 return arm_type_align (TYPE_TARGET_TYPE (t));
3366 case TYPE_CODE_STRUCT:
3367 case TYPE_CODE_UNION:
3368 align = 1;
3369 for (n = 0; n < TYPE_NFIELDS (t); n++)
3371 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3372 if (falign > align)
3373 align = falign;
3375 return align;
3379 /* Possible base types for a candidate for passing and returning in
3380 VFP registers. */
3382 enum arm_vfp_cprc_base_type
3384 VFP_CPRC_UNKNOWN,
3385 VFP_CPRC_SINGLE,
3386 VFP_CPRC_DOUBLE,
3387 VFP_CPRC_VEC64,
3388 VFP_CPRC_VEC128
3391 /* The length of one element of base type B. */
3393 static unsigned
3394 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3396 switch (b)
3398 case VFP_CPRC_SINGLE:
3399 return 4;
3400 case VFP_CPRC_DOUBLE:
3401 return 8;
3402 case VFP_CPRC_VEC64:
3403 return 8;
3404 case VFP_CPRC_VEC128:
3405 return 16;
3406 default:
3407 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3408 (int) b);
3412 /* The character ('s', 'd' or 'q') for the type of VFP register used
3413 for passing base type B. */
3415 static int
3416 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3418 switch (b)
3420 case VFP_CPRC_SINGLE:
3421 return 's';
3422 case VFP_CPRC_DOUBLE:
3423 return 'd';
3424 case VFP_CPRC_VEC64:
3425 return 'd';
3426 case VFP_CPRC_VEC128:
3427 return 'q';
3428 default:
3429 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3430 (int) b);
3434 /* Determine whether T may be part of a candidate for passing and
3435 returning in VFP registers, ignoring the limit on the total number
3436 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3437 classification of the first valid component found; if it is not
3438 VFP_CPRC_UNKNOWN, all components must have the same classification
3439 as *BASE_TYPE. If it is found that T contains a type not permitted
3440 for passing and returning in VFP registers, a type differently
3441 classified from *BASE_TYPE, or two types differently classified
3442 from each other, return -1, otherwise return the total number of
3443 base-type elements found (possibly 0 in an empty structure or
3444 array). Vector types are not currently supported, matching the
3445 generic AAPCS support. */
3447 static int
3448 arm_vfp_cprc_sub_candidate (struct type *t,
3449 enum arm_vfp_cprc_base_type *base_type)
3451 t = check_typedef (t);
3452 switch (TYPE_CODE (t))
3454 case TYPE_CODE_FLT:
3455 switch (TYPE_LENGTH (t))
3457 case 4:
3458 if (*base_type == VFP_CPRC_UNKNOWN)
3459 *base_type = VFP_CPRC_SINGLE;
3460 else if (*base_type != VFP_CPRC_SINGLE)
3461 return -1;
3462 return 1;
3464 case 8:
3465 if (*base_type == VFP_CPRC_UNKNOWN)
3466 *base_type = VFP_CPRC_DOUBLE;
3467 else if (*base_type != VFP_CPRC_DOUBLE)
3468 return -1;
3469 return 1;
3471 default:
3472 return -1;
3474 break;
3476 case TYPE_CODE_COMPLEX:
3477 /* Arguments of complex T where T is one of the types float or
3478 double get treated as if they are implemented as:
3480 struct complexT
3482 T real;
3483 T imag;
3487 switch (TYPE_LENGTH (t))
3489 case 8:
3490 if (*base_type == VFP_CPRC_UNKNOWN)
3491 *base_type = VFP_CPRC_SINGLE;
3492 else if (*base_type != VFP_CPRC_SINGLE)
3493 return -1;
3494 return 2;
3496 case 16:
3497 if (*base_type == VFP_CPRC_UNKNOWN)
3498 *base_type = VFP_CPRC_DOUBLE;
3499 else if (*base_type != VFP_CPRC_DOUBLE)
3500 return -1;
3501 return 2;
3503 default:
3504 return -1;
3506 break;
3508 case TYPE_CODE_ARRAY:
3510 if (TYPE_VECTOR (t))
3512 /* A 64-bit or 128-bit containerized vector type are VFP
3513 CPRCs. */
3514 switch (TYPE_LENGTH (t))
3516 case 8:
3517 if (*base_type == VFP_CPRC_UNKNOWN)
3518 *base_type = VFP_CPRC_VEC64;
3519 return 1;
3520 case 16:
3521 if (*base_type == VFP_CPRC_UNKNOWN)
3522 *base_type = VFP_CPRC_VEC128;
3523 return 1;
3524 default:
3525 return -1;
3528 else
3530 int count;
3531 unsigned unitlen;
3533 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3534 base_type);
3535 if (count == -1)
3536 return -1;
3537 if (TYPE_LENGTH (t) == 0)
3539 gdb_assert (count == 0);
3540 return 0;
3542 else if (count == 0)
3543 return -1;
3544 unitlen = arm_vfp_cprc_unit_length (*base_type);
3545 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3546 return TYPE_LENGTH (t) / unitlen;
3549 break;
3551 case TYPE_CODE_STRUCT:
3553 int count = 0;
3554 unsigned unitlen;
3555 int i;
3556 for (i = 0; i < TYPE_NFIELDS (t); i++)
3558 int sub_count = 0;
3560 if (!field_is_static (&TYPE_FIELD (t, i)))
3561 sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3562 base_type);
3563 if (sub_count == -1)
3564 return -1;
3565 count += sub_count;
3567 if (TYPE_LENGTH (t) == 0)
3569 gdb_assert (count == 0);
3570 return 0;
3572 else if (count == 0)
3573 return -1;
3574 unitlen = arm_vfp_cprc_unit_length (*base_type);
3575 if (TYPE_LENGTH (t) != unitlen * count)
3576 return -1;
3577 return count;
3580 case TYPE_CODE_UNION:
3582 int count = 0;
3583 unsigned unitlen;
3584 int i;
3585 for (i = 0; i < TYPE_NFIELDS (t); i++)
3587 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3588 base_type);
3589 if (sub_count == -1)
3590 return -1;
3591 count = (count > sub_count ? count : sub_count);
3593 if (TYPE_LENGTH (t) == 0)
3595 gdb_assert (count == 0);
3596 return 0;
3598 else if (count == 0)
3599 return -1;
3600 unitlen = arm_vfp_cprc_unit_length (*base_type);
3601 if (TYPE_LENGTH (t) != unitlen * count)
3602 return -1;
3603 return count;
3606 default:
3607 break;
3610 return -1;
3613 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3614 if passed to or returned from a non-variadic function with the VFP
3615 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3616 *BASE_TYPE to the base type for T and *COUNT to the number of
3617 elements of that base type before returning. */
3619 static int
3620 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3621 int *count)
3623 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3624 int c = arm_vfp_cprc_sub_candidate (t, &b);
3625 if (c <= 0 || c > 4)
3626 return 0;
3627 *base_type = b;
3628 *count = c;
3629 return 1;
3632 /* Return 1 if the VFP ABI should be used for passing arguments to and
3633 returning values from a function of type FUNC_TYPE, 0
3634 otherwise. */
3636 static int
3637 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3639 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3640 /* Variadic functions always use the base ABI. Assume that functions
3641 without debug info are not variadic. */
3642 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3643 return 0;
3644 /* The VFP ABI is only supported as a variant of AAPCS. */
3645 if (tdep->arm_abi != ARM_ABI_AAPCS)
3646 return 0;
3647 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3650 /* We currently only support passing parameters in integer registers, which
3651 conforms with GCC's default model, and VFP argument passing following
3652 the VFP variant of AAPCS. Several other variants exist and
3653 we should probably support some of them based on the selected ABI. */
3655 static CORE_ADDR
3656 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3657 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3658 struct value **args, CORE_ADDR sp, int struct_return,
3659 CORE_ADDR struct_addr)
3661 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3662 int argnum;
3663 int argreg;
3664 int nstack;
3665 struct stack_item *si = NULL;
3666 int use_vfp_abi;
3667 struct type *ftype;
3668 unsigned vfp_regs_free = (1 << 16) - 1;
3670 /* Determine the type of this function and whether the VFP ABI
3671 applies. */
3672 ftype = check_typedef (value_type (function));
3673 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3674 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3675 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3677 /* Set the return address. For the ARM, the return breakpoint is
3678 always at BP_ADDR. */
3679 if (arm_pc_is_thumb (gdbarch, bp_addr))
3680 bp_addr |= 1;
3681 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3683 /* Walk through the list of args and determine how large a temporary
3684 stack is required. Need to take care here as structs may be
3685 passed on the stack, and we have to push them. */
3686 nstack = 0;
3688 argreg = ARM_A1_REGNUM;
3689 nstack = 0;
3691 /* The struct_return pointer occupies the first parameter
3692 passing register. */
3693 if (struct_return)
3695 if (arm_debug)
3696 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3697 gdbarch_register_name (gdbarch, argreg),
3698 paddress (gdbarch, struct_addr));
3699 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3700 argreg++;
3703 for (argnum = 0; argnum < nargs; argnum++)
3705 int len;
3706 struct type *arg_type;
3707 struct type *target_type;
3708 enum type_code typecode;
3709 const bfd_byte *val;
3710 int align;
3711 enum arm_vfp_cprc_base_type vfp_base_type;
3712 int vfp_base_count;
3713 int may_use_core_reg = 1;
3715 arg_type = check_typedef (value_type (args[argnum]));
3716 len = TYPE_LENGTH (arg_type);
3717 target_type = TYPE_TARGET_TYPE (arg_type);
3718 typecode = TYPE_CODE (arg_type);
3719 val = value_contents (args[argnum]);
3721 align = arm_type_align (arg_type);
3722 /* Round alignment up to a whole number of words. */
3723 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3724 /* Different ABIs have different maximum alignments. */
3725 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3727 /* The APCS ABI only requires word alignment. */
3728 align = INT_REGISTER_SIZE;
3730 else
3732 /* The AAPCS requires at most doubleword alignment. */
3733 if (align > INT_REGISTER_SIZE * 2)
3734 align = INT_REGISTER_SIZE * 2;
3737 if (use_vfp_abi
3738 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3739 &vfp_base_count))
3741 int regno;
3742 int unit_length;
3743 int shift;
3744 unsigned mask;
3746 /* Because this is a CPRC it cannot go in a core register or
3747 cause a core register to be skipped for alignment.
3748 Either it goes in VFP registers and the rest of this loop
3749 iteration is skipped for this argument, or it goes on the
3750 stack (and the stack alignment code is correct for this
3751 case). */
3752 may_use_core_reg = 0;
3754 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3755 shift = unit_length / 4;
3756 mask = (1 << (shift * vfp_base_count)) - 1;
3757 for (regno = 0; regno < 16; regno += shift)
3758 if (((vfp_regs_free >> regno) & mask) == mask)
3759 break;
3761 if (regno < 16)
3763 int reg_char;
3764 int reg_scaled;
3765 int i;
3767 vfp_regs_free &= ~(mask << regno);
3768 reg_scaled = regno / shift;
3769 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3770 for (i = 0; i < vfp_base_count; i++)
3772 char name_buf[4];
3773 int regnum;
3774 if (reg_char == 'q')
3775 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3776 val + i * unit_length);
3777 else
3779 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3780 reg_char, reg_scaled + i);
3781 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3782 strlen (name_buf));
3783 regcache_cooked_write (regcache, regnum,
3784 val + i * unit_length);
3787 continue;
3789 else
3791 /* This CPRC could not go in VFP registers, so all VFP
3792 registers are now marked as used. */
3793 vfp_regs_free = 0;
3797 /* Push stack padding for dowubleword alignment. */
3798 if (nstack & (align - 1))
3800 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3801 nstack += INT_REGISTER_SIZE;
3804 /* Doubleword aligned quantities must go in even register pairs. */
3805 if (may_use_core_reg
3806 && argreg <= ARM_LAST_ARG_REGNUM
3807 && align > INT_REGISTER_SIZE
3808 && argreg & 1)
3809 argreg++;
3811 /* If the argument is a pointer to a function, and it is a
3812 Thumb function, create a LOCAL copy of the value and set
3813 the THUMB bit in it. */
3814 if (TYPE_CODE_PTR == typecode
3815 && target_type != NULL
3816 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3818 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3819 if (arm_pc_is_thumb (gdbarch, regval))
3821 bfd_byte *copy = (bfd_byte *) alloca (len);
3822 store_unsigned_integer (copy, len, byte_order,
3823 MAKE_THUMB_ADDR (regval));
3824 val = copy;
3828 /* Copy the argument to general registers or the stack in
3829 register-sized pieces. Large arguments are split between
3830 registers and stack. */
3831 while (len > 0)
3833 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3834 CORE_ADDR regval
3835 = extract_unsigned_integer (val, partial_len, byte_order);
3837 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3839 /* The argument is being passed in a general purpose
3840 register. */
3841 if (byte_order == BFD_ENDIAN_BIG)
3842 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3843 if (arm_debug)
3844 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3845 argnum,
3846 gdbarch_register_name
3847 (gdbarch, argreg),
3848 phex (regval, INT_REGISTER_SIZE));
3849 regcache_cooked_write_unsigned (regcache, argreg, regval);
3850 argreg++;
3852 else
3854 gdb_byte buf[INT_REGISTER_SIZE];
3856 memset (buf, 0, sizeof (buf));
3857 store_unsigned_integer (buf, partial_len, byte_order, regval);
3859 /* Push the arguments onto the stack. */
3860 if (arm_debug)
3861 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3862 argnum, nstack);
3863 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3864 nstack += INT_REGISTER_SIZE;
3867 len -= partial_len;
3868 val += partial_len;
3871 /* If we have an odd number of words to push, then decrement the stack
3872 by one word now, so first stack argument will be dword aligned. */
3873 if (nstack & 4)
3874 sp -= 4;
3876 while (si)
3878 sp -= si->len;
3879 write_memory (sp, si->data, si->len);
3880 si = pop_stack_item (si);
3883 /* Finally, update teh SP register. */
3884 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3886 return sp;
3890 /* Always align the frame to an 8-byte boundary. This is required on
3891 some platforms and harmless on the rest. */
3893 static CORE_ADDR
3894 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3896 /* Align the stack to eight bytes. */
3897 return sp & ~ (CORE_ADDR) 7;
3900 static void
3901 print_fpu_flags (struct ui_file *file, int flags)
3903 if (flags & (1 << 0))
3904 fputs_filtered ("IVO ", file);
3905 if (flags & (1 << 1))
3906 fputs_filtered ("DVZ ", file);
3907 if (flags & (1 << 2))
3908 fputs_filtered ("OFL ", file);
3909 if (flags & (1 << 3))
3910 fputs_filtered ("UFL ", file);
3911 if (flags & (1 << 4))
3912 fputs_filtered ("INX ", file);
3913 fputc_filtered ('\n', file);
3916 /* Print interesting information about the floating point processor
3917 (if present) or emulator. */
3918 static void
3919 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3920 struct frame_info *frame, const char *args)
3922 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3923 int type;
3925 type = (status >> 24) & 127;
3926 if (status & (1 << 31))
3927 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3928 else
3929 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3930 /* i18n: [floating point unit] mask */
3931 fputs_filtered (_("mask: "), file);
3932 print_fpu_flags (file, status >> 16);
3933 /* i18n: [floating point unit] flags */
3934 fputs_filtered (_("flags: "), file);
3935 print_fpu_flags (file, status);
3938 /* Construct the ARM extended floating point type. */
3939 static struct type *
3940 arm_ext_type (struct gdbarch *gdbarch)
3942 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3944 if (!tdep->arm_ext_type)
3945 tdep->arm_ext_type
3946 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
3947 floatformats_arm_ext);
3949 return tdep->arm_ext_type;
3952 static struct type *
3953 arm_neon_double_type (struct gdbarch *gdbarch)
3955 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3957 if (tdep->neon_double_type == NULL)
3959 struct type *t, *elem;
3961 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
3962 TYPE_CODE_UNION);
3963 elem = builtin_type (gdbarch)->builtin_uint8;
3964 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
3965 elem = builtin_type (gdbarch)->builtin_uint16;
3966 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
3967 elem = builtin_type (gdbarch)->builtin_uint32;
3968 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
3969 elem = builtin_type (gdbarch)->builtin_uint64;
3970 append_composite_type_field (t, "u64", elem);
3971 elem = builtin_type (gdbarch)->builtin_float;
3972 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
3973 elem = builtin_type (gdbarch)->builtin_double;
3974 append_composite_type_field (t, "f64", elem);
3976 TYPE_VECTOR (t) = 1;
3977 TYPE_NAME (t) = "neon_d";
3978 tdep->neon_double_type = t;
3981 return tdep->neon_double_type;
3984 /* FIXME: The vector types are not correctly ordered on big-endian
3985 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3986 bits of d0 - regardless of what unit size is being held in d0. So
3987 the offset of the first uint8 in d0 is 7, but the offset of the
3988 first float is 4. This code works as-is for little-endian
3989 targets. */
3991 static struct type *
3992 arm_neon_quad_type (struct gdbarch *gdbarch)
3994 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3996 if (tdep->neon_quad_type == NULL)
3998 struct type *t, *elem;
4000 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4001 TYPE_CODE_UNION);
4002 elem = builtin_type (gdbarch)->builtin_uint8;
4003 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4004 elem = builtin_type (gdbarch)->builtin_uint16;
4005 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4006 elem = builtin_type (gdbarch)->builtin_uint32;
4007 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4008 elem = builtin_type (gdbarch)->builtin_uint64;
4009 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4010 elem = builtin_type (gdbarch)->builtin_float;
4011 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4012 elem = builtin_type (gdbarch)->builtin_double;
4013 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4015 TYPE_VECTOR (t) = 1;
4016 TYPE_NAME (t) = "neon_q";
4017 tdep->neon_quad_type = t;
4020 return tdep->neon_quad_type;
4023 /* Return the GDB type object for the "standard" data type of data in
4024 register N. */
4026 static struct type *
4027 arm_register_type (struct gdbarch *gdbarch, int regnum)
4029 int num_regs = gdbarch_num_regs (gdbarch);
4031 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4032 && regnum >= num_regs && regnum < num_regs + 32)
4033 return builtin_type (gdbarch)->builtin_float;
4035 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4036 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4037 return arm_neon_quad_type (gdbarch);
4039 /* If the target description has register information, we are only
4040 in this function so that we can override the types of
4041 double-precision registers for NEON. */
4042 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4044 struct type *t = tdesc_register_type (gdbarch, regnum);
4046 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4047 && TYPE_CODE (t) == TYPE_CODE_FLT
4048 && gdbarch_tdep (gdbarch)->have_neon)
4049 return arm_neon_double_type (gdbarch);
4050 else
4051 return t;
4054 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4056 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4057 return builtin_type (gdbarch)->builtin_void;
4059 return arm_ext_type (gdbarch);
4061 else if (regnum == ARM_SP_REGNUM)
4062 return builtin_type (gdbarch)->builtin_data_ptr;
4063 else if (regnum == ARM_PC_REGNUM)
4064 return builtin_type (gdbarch)->builtin_func_ptr;
4065 else if (regnum >= ARRAY_SIZE (arm_register_names))
4066 /* These registers are only supported on targets which supply
4067 an XML description. */
4068 return builtin_type (gdbarch)->builtin_int0;
4069 else
4070 return builtin_type (gdbarch)->builtin_uint32;
4073 /* Map a DWARF register REGNUM onto the appropriate GDB register
4074 number. */
4076 static int
4077 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4079 /* Core integer regs. */
4080 if (reg >= 0 && reg <= 15)
4081 return reg;
4083 /* Legacy FPA encoding. These were once used in a way which
4084 overlapped with VFP register numbering, so their use is
4085 discouraged, but GDB doesn't support the ARM toolchain
4086 which used them for VFP. */
4087 if (reg >= 16 && reg <= 23)
4088 return ARM_F0_REGNUM + reg - 16;
4090 /* New assignments for the FPA registers. */
4091 if (reg >= 96 && reg <= 103)
4092 return ARM_F0_REGNUM + reg - 96;
4094 /* WMMX register assignments. */
4095 if (reg >= 104 && reg <= 111)
4096 return ARM_WCGR0_REGNUM + reg - 104;
4098 if (reg >= 112 && reg <= 127)
4099 return ARM_WR0_REGNUM + reg - 112;
4101 if (reg >= 192 && reg <= 199)
4102 return ARM_WC0_REGNUM + reg - 192;
4104 /* VFP v2 registers. A double precision value is actually
4105 in d1 rather than s2, but the ABI only defines numbering
4106 for the single precision registers. This will "just work"
4107 in GDB for little endian targets (we'll read eight bytes,
4108 starting in s0 and then progressing to s1), but will be
4109 reversed on big endian targets with VFP. This won't
4110 be a problem for the new Neon quad registers; you're supposed
4111 to use DW_OP_piece for those. */
4112 if (reg >= 64 && reg <= 95)
4114 char name_buf[4];
4116 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4117 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4118 strlen (name_buf));
4121 /* VFP v3 / Neon registers. This range is also used for VFP v2
4122 registers, except that it now describes d0 instead of s0. */
4123 if (reg >= 256 && reg <= 287)
4125 char name_buf[4];
4127 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4128 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4129 strlen (name_buf));
4132 return -1;
4135 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4136 static int
4137 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4139 int reg = regnum;
4140 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4142 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4143 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4145 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4146 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4148 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4149 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4151 if (reg < NUM_GREGS)
4152 return SIM_ARM_R0_REGNUM + reg;
4153 reg -= NUM_GREGS;
4155 if (reg < NUM_FREGS)
4156 return SIM_ARM_FP0_REGNUM + reg;
4157 reg -= NUM_FREGS;
4159 if (reg < NUM_SREGS)
4160 return SIM_ARM_FPS_REGNUM + reg;
4161 reg -= NUM_SREGS;
4163 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4166 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4167 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4168 It is thought that this is is the floating-point register format on
4169 little-endian systems. */
4171 static void
4172 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4173 void *dbl, int endianess)
4175 DOUBLEST d;
4177 if (endianess == BFD_ENDIAN_BIG)
4178 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4179 else
4180 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4181 ptr, &d);
4182 floatformat_from_doublest (fmt, &d, dbl);
4185 static void
4186 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4187 int endianess)
4189 DOUBLEST d;
4191 floatformat_to_doublest (fmt, ptr, &d);
4192 if (endianess == BFD_ENDIAN_BIG)
4193 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4194 else
4195 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4196 &d, dbl);
4199 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
4200 of the appropriate mode (as encoded in the PC value), even if this
4201 differs from what would be expected according to the symbol tables. */
4203 void
4204 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
4205 struct address_space *aspace,
4206 CORE_ADDR pc)
4208 struct cleanup *old_chain
4209 = make_cleanup_restore_integer (&arm_override_mode);
4211 arm_override_mode = IS_THUMB_ADDR (pc);
4212 pc = gdbarch_addr_bits_remove (gdbarch, pc);
4214 insert_single_step_breakpoint (gdbarch, aspace, pc);
4216 do_cleanups (old_chain);
4219 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4220 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4221 NULL if an error occurs. BUF is freed. */
4223 static gdb_byte *
4224 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
4225 int old_len, int new_len)
4227 gdb_byte *new_buf;
4228 int bytes_to_read = new_len - old_len;
4230 new_buf = (gdb_byte *) xmalloc (new_len);
4231 memcpy (new_buf + bytes_to_read, buf, old_len);
4232 xfree (buf);
4233 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
4235 xfree (new_buf);
4236 return NULL;
4238 return new_buf;
4241 /* An IT block is at most the 2-byte IT instruction followed by
4242 four 4-byte instructions. The furthest back we must search to
4243 find an IT block that affects the current instruction is thus
4244 2 + 3 * 4 == 14 bytes. */
4245 #define MAX_IT_BLOCK_PREFIX 14
4247 /* Use a quick scan if there are more than this many bytes of
4248 code. */
4249 #define IT_SCAN_THRESHOLD 32
4251 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4252 A breakpoint in an IT block may not be hit, depending on the
4253 condition flags. */
4254 static CORE_ADDR
4255 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
4257 gdb_byte *buf;
4258 char map_type;
4259 CORE_ADDR boundary, func_start;
4260 int buf_len;
4261 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
4262 int i, any, last_it, last_it_count;
4264 /* If we are using BKPT breakpoints, none of this is necessary. */
4265 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
4266 return bpaddr;
4268 /* ARM mode does not have this problem. */
4269 if (!arm_pc_is_thumb (gdbarch, bpaddr))
4270 return bpaddr;
4272 /* We are setting a breakpoint in Thumb code that could potentially
4273 contain an IT block. The first step is to find how much Thumb
4274 code there is; we do not need to read outside of known Thumb
4275 sequences. */
4276 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
4277 if (map_type == 0)
4278 /* Thumb-2 code must have mapping symbols to have a chance. */
4279 return bpaddr;
4281 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
4283 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
4284 && func_start > boundary)
4285 boundary = func_start;
4287 /* Search for a candidate IT instruction. We have to do some fancy
4288 footwork to distinguish a real IT instruction from the second
4289 half of a 32-bit instruction, but there is no need for that if
4290 there's no candidate. */
4291 buf_len = std::min (bpaddr - boundary, (CORE_ADDR) MAX_IT_BLOCK_PREFIX);
4292 if (buf_len == 0)
4293 /* No room for an IT instruction. */
4294 return bpaddr;
4296 buf = (gdb_byte *) xmalloc (buf_len);
4297 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
4298 return bpaddr;
4299 any = 0;
4300 for (i = 0; i < buf_len; i += 2)
4302 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4303 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4305 any = 1;
4306 break;
4310 if (any == 0)
4312 xfree (buf);
4313 return bpaddr;
4316 /* OK, the code bytes before this instruction contain at least one
4317 halfword which resembles an IT instruction. We know that it's
4318 Thumb code, but there are still two possibilities. Either the
4319 halfword really is an IT instruction, or it is the second half of
4320 a 32-bit Thumb instruction. The only way we can tell is to
4321 scan forwards from a known instruction boundary. */
4322 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
4324 int definite;
4326 /* There's a lot of code before this instruction. Start with an
4327 optimistic search; it's easy to recognize halfwords that can
4328 not be the start of a 32-bit instruction, and use that to
4329 lock on to the instruction boundaries. */
4330 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
4331 if (buf == NULL)
4332 return bpaddr;
4333 buf_len = IT_SCAN_THRESHOLD;
4335 definite = 0;
4336 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
4338 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4339 if (thumb_insn_size (inst1) == 2)
4341 definite = 1;
4342 break;
4346 /* At this point, if DEFINITE, BUF[I] is the first place we
4347 are sure that we know the instruction boundaries, and it is far
4348 enough from BPADDR that we could not miss an IT instruction
4349 affecting BPADDR. If ! DEFINITE, give up - start from a
4350 known boundary. */
4351 if (! definite)
4353 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
4354 bpaddr - boundary);
4355 if (buf == NULL)
4356 return bpaddr;
4357 buf_len = bpaddr - boundary;
4358 i = 0;
4361 else
4363 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
4364 if (buf == NULL)
4365 return bpaddr;
4366 buf_len = bpaddr - boundary;
4367 i = 0;
4370 /* Scan forwards. Find the last IT instruction before BPADDR. */
4371 last_it = -1;
4372 last_it_count = 0;
4373 while (i < buf_len)
4375 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
4376 last_it_count--;
4377 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4379 last_it = i;
4380 if (inst1 & 0x0001)
4381 last_it_count = 4;
4382 else if (inst1 & 0x0002)
4383 last_it_count = 3;
4384 else if (inst1 & 0x0004)
4385 last_it_count = 2;
4386 else
4387 last_it_count = 1;
4389 i += thumb_insn_size (inst1);
4392 xfree (buf);
4394 if (last_it == -1)
4395 /* There wasn't really an IT instruction after all. */
4396 return bpaddr;
4398 if (last_it_count < 1)
4399 /* It was too far away. */
4400 return bpaddr;
4402 /* This really is a trouble spot. Move the breakpoint to the IT
4403 instruction. */
4404 return bpaddr - buf_len + last_it;
4407 /* ARM displaced stepping support.
4409 Generally ARM displaced stepping works as follows:
4411 1. When an instruction is to be single-stepped, it is first decoded by
4412 arm_process_displaced_insn. Depending on the type of instruction, it is
4413 then copied to a scratch location, possibly in a modified form. The
4414 copy_* set of functions performs such modification, as necessary. A
4415 breakpoint is placed after the modified instruction in the scratch space
4416 to return control to GDB. Note in particular that instructions which
4417 modify the PC will no longer do so after modification.
4419 2. The instruction is single-stepped, by setting the PC to the scratch
4420 location address, and resuming. Control returns to GDB when the
4421 breakpoint is hit.
4423 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4424 function used for the current instruction. This function's job is to
4425 put the CPU/memory state back to what it would have been if the
4426 instruction had been executed unmodified in its original location. */
4428 /* NOP instruction (mov r0, r0). */
4429 #define ARM_NOP 0xe1a00000
4430 #define THUMB_NOP 0x4600
4432 /* Helper for register reads for displaced stepping. In particular, this
4433 returns the PC as it would be seen by the instruction at its original
4434 location. */
4436 ULONGEST
4437 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4438 int regno)
4440 ULONGEST ret;
4441 CORE_ADDR from = dsc->insn_addr;
4443 if (regno == ARM_PC_REGNUM)
4445 /* Compute pipeline offset:
4446 - When executing an ARM instruction, PC reads as the address of the
4447 current instruction plus 8.
4448 - When executing a Thumb instruction, PC reads as the address of the
4449 current instruction plus 4. */
4451 if (!dsc->is_thumb)
4452 from += 8;
4453 else
4454 from += 4;
4456 if (debug_displaced)
4457 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
4458 (unsigned long) from);
4459 return (ULONGEST) from;
4461 else
4463 regcache_cooked_read_unsigned (regs, regno, &ret);
4464 if (debug_displaced)
4465 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
4466 regno, (unsigned long) ret);
4467 return ret;
4471 static int
4472 displaced_in_arm_mode (struct regcache *regs)
4474 ULONGEST ps;
4475 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4477 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4479 return (ps & t_bit) == 0;
4482 /* Write to the PC as from a branch instruction. */
4484 static void
4485 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4486 ULONGEST val)
4488 if (!dsc->is_thumb)
4489 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4490 architecture versions < 6. */
4491 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4492 val & ~(ULONGEST) 0x3);
4493 else
4494 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
4495 val & ~(ULONGEST) 0x1);
4498 /* Write to the PC as from a branch-exchange instruction. */
4500 static void
4501 bx_write_pc (struct regcache *regs, ULONGEST val)
4503 ULONGEST ps;
4504 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
4506 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
4508 if ((val & 1) == 1)
4510 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
4511 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
4513 else if ((val & 2) == 0)
4515 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4516 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
4518 else
4520 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4521 mode, align dest to 4 bytes). */
4522 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4523 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
4524 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
4528 /* Write to the PC as if from a load instruction. */
4530 static void
4531 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4532 ULONGEST val)
4534 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
4535 bx_write_pc (regs, val);
4536 else
4537 branch_write_pc (regs, dsc, val);
4540 /* Write to the PC as if from an ALU instruction. */
4542 static void
4543 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
4544 ULONGEST val)
4546 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
4547 bx_write_pc (regs, val);
4548 else
4549 branch_write_pc (regs, dsc, val);
4552 /* Helper for writing to registers for displaced stepping. Writing to the PC
4553 has a varying effects depending on the instruction which does the write:
4554 this is controlled by the WRITE_PC argument. */
4556 void
4557 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
4558 int regno, ULONGEST val, enum pc_write_style write_pc)
4560 if (regno == ARM_PC_REGNUM)
4562 if (debug_displaced)
4563 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
4564 (unsigned long) val);
4565 switch (write_pc)
4567 case BRANCH_WRITE_PC:
4568 branch_write_pc (regs, dsc, val);
4569 break;
4571 case BX_WRITE_PC:
4572 bx_write_pc (regs, val);
4573 break;
4575 case LOAD_WRITE_PC:
4576 load_write_pc (regs, dsc, val);
4577 break;
4579 case ALU_WRITE_PC:
4580 alu_write_pc (regs, dsc, val);
4581 break;
4583 case CANNOT_WRITE_PC:
4584 warning (_("Instruction wrote to PC in an unexpected way when "
4585 "single-stepping"));
4586 break;
4588 default:
4589 internal_error (__FILE__, __LINE__,
4590 _("Invalid argument to displaced_write_reg"));
4593 dsc->wrote_to_pc = 1;
4595 else
4597 if (debug_displaced)
4598 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
4599 regno, (unsigned long) val);
4600 regcache_cooked_write_unsigned (regs, regno, val);
4604 /* This function is used to concisely determine if an instruction INSN
4605 references PC. Register fields of interest in INSN should have the
4606 corresponding fields of BITMASK set to 0b1111. The function
4607 returns return 1 if any of these fields in INSN reference the PC
4608 (also 0b1111, r15), else it returns 0. */
4610 static int
4611 insn_references_pc (uint32_t insn, uint32_t bitmask)
4613 uint32_t lowbit = 1;
4615 while (bitmask != 0)
4617 uint32_t mask;
4619 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
4622 if (!lowbit)
4623 break;
4625 mask = lowbit * 0xf;
4627 if ((insn & mask) == mask)
4628 return 1;
4630 bitmask &= ~mask;
4633 return 0;
4636 /* The simplest copy function. Many instructions have the same effect no
4637 matter what address they are executed at: in those cases, use this. */
4639 static int
4640 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
4641 const char *iname, struct displaced_step_closure *dsc)
4643 if (debug_displaced)
4644 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
4645 "opcode/class '%s' unmodified\n", (unsigned long) insn,
4646 iname);
4648 dsc->modinsn[0] = insn;
4650 return 0;
4653 static int
4654 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
4655 uint16_t insn2, const char *iname,
4656 struct displaced_step_closure *dsc)
4658 if (debug_displaced)
4659 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
4660 "opcode/class '%s' unmodified\n", insn1, insn2,
4661 iname);
4663 dsc->modinsn[0] = insn1;
4664 dsc->modinsn[1] = insn2;
4665 dsc->numinsns = 2;
4667 return 0;
4670 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4671 modification. */
4672 static int
4673 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, uint16_t insn,
4674 const char *iname,
4675 struct displaced_step_closure *dsc)
4677 if (debug_displaced)
4678 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
4679 "opcode/class '%s' unmodified\n", insn,
4680 iname);
4682 dsc->modinsn[0] = insn;
4684 return 0;
4687 /* Preload instructions with immediate offset. */
4689 static void
4690 cleanup_preload (struct gdbarch *gdbarch,
4691 struct regcache *regs, struct displaced_step_closure *dsc)
4693 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4694 if (!dsc->u.preload.immed)
4695 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
4698 static void
4699 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
4700 struct displaced_step_closure *dsc, unsigned int rn)
4702 ULONGEST rn_val;
4703 /* Preload instructions:
4705 {pli/pld} [rn, #+/-imm]
4707 {pli/pld} [r0, #+/-imm]. */
4709 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4710 rn_val = displaced_read_reg (regs, dsc, rn);
4711 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4712 dsc->u.preload.immed = 1;
4714 dsc->cleanup = &cleanup_preload;
4717 static int
4718 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
4719 struct displaced_step_closure *dsc)
4721 unsigned int rn = bits (insn, 16, 19);
4723 if (!insn_references_pc (insn, 0x000f0000ul))
4724 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
4726 if (debug_displaced)
4727 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4728 (unsigned long) insn);
4730 dsc->modinsn[0] = insn & 0xfff0ffff;
4732 install_preload (gdbarch, regs, dsc, rn);
4734 return 0;
4737 static int
4738 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
4739 struct regcache *regs, struct displaced_step_closure *dsc)
4741 unsigned int rn = bits (insn1, 0, 3);
4742 unsigned int u_bit = bit (insn1, 7);
4743 int imm12 = bits (insn2, 0, 11);
4744 ULONGEST pc_val;
4746 if (rn != ARM_PC_REGNUM)
4747 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
4749 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4750 PLD (literal) Encoding T1. */
4751 if (debug_displaced)
4752 fprintf_unfiltered (gdb_stdlog,
4753 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4754 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
4755 imm12);
4757 if (!u_bit)
4758 imm12 = -1 * imm12;
4760 /* Rewrite instruction {pli/pld} PC imm12 into:
4761 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4763 {pli/pld} [r0, r1]
4765 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4767 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4768 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4770 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
4772 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
4773 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
4774 dsc->u.preload.immed = 0;
4776 /* {pli/pld} [r0, r1] */
4777 dsc->modinsn[0] = insn1 & 0xfff0;
4778 dsc->modinsn[1] = 0xf001;
4779 dsc->numinsns = 2;
4781 dsc->cleanup = &cleanup_preload;
4782 return 0;
4785 /* Preload instructions with register offset. */
4787 static void
4788 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
4789 struct displaced_step_closure *dsc, unsigned int rn,
4790 unsigned int rm)
4792 ULONGEST rn_val, rm_val;
4794 /* Preload register-offset instructions:
4796 {pli/pld} [rn, rm {, shift}]
4798 {pli/pld} [r0, r1 {, shift}]. */
4800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4801 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
4802 rn_val = displaced_read_reg (regs, dsc, rn);
4803 rm_val = displaced_read_reg (regs, dsc, rm);
4804 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4805 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
4806 dsc->u.preload.immed = 0;
4808 dsc->cleanup = &cleanup_preload;
4811 static int
4812 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
4813 struct regcache *regs,
4814 struct displaced_step_closure *dsc)
4816 unsigned int rn = bits (insn, 16, 19);
4817 unsigned int rm = bits (insn, 0, 3);
4820 if (!insn_references_pc (insn, 0x000f000ful))
4821 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
4823 if (debug_displaced)
4824 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
4825 (unsigned long) insn);
4827 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
4829 install_preload_reg (gdbarch, regs, dsc, rn, rm);
4830 return 0;
4833 /* Copy/cleanup coprocessor load and store instructions. */
4835 static void
4836 cleanup_copro_load_store (struct gdbarch *gdbarch,
4837 struct regcache *regs,
4838 struct displaced_step_closure *dsc)
4840 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
4842 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
4844 if (dsc->u.ldst.writeback)
4845 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
4848 static void
4849 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
4850 struct displaced_step_closure *dsc,
4851 int writeback, unsigned int rn)
4853 ULONGEST rn_val;
4855 /* Coprocessor load/store instructions:
4857 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4859 {stc/stc2} [r0, #+/-imm].
4861 ldc/ldc2 are handled identically. */
4863 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
4864 rn_val = displaced_read_reg (regs, dsc, rn);
4865 /* PC should be 4-byte aligned. */
4866 rn_val = rn_val & 0xfffffffc;
4867 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
4869 dsc->u.ldst.writeback = writeback;
4870 dsc->u.ldst.rn = rn;
4872 dsc->cleanup = &cleanup_copro_load_store;
4875 static int
4876 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
4877 struct regcache *regs,
4878 struct displaced_step_closure *dsc)
4880 unsigned int rn = bits (insn, 16, 19);
4882 if (!insn_references_pc (insn, 0x000f0000ul))
4883 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
4885 if (debug_displaced)
4886 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4887 "load/store insn %.8lx\n", (unsigned long) insn);
4889 dsc->modinsn[0] = insn & 0xfff0ffff;
4891 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
4893 return 0;
4896 static int
4897 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
4898 uint16_t insn2, struct regcache *regs,
4899 struct displaced_step_closure *dsc)
4901 unsigned int rn = bits (insn1, 0, 3);
4903 if (rn != ARM_PC_REGNUM)
4904 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
4905 "copro load/store", dsc);
4907 if (debug_displaced)
4908 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
4909 "load/store insn %.4x%.4x\n", insn1, insn2);
4911 dsc->modinsn[0] = insn1 & 0xfff0;
4912 dsc->modinsn[1] = insn2;
4913 dsc->numinsns = 2;
4915 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4916 doesn't support writeback, so pass 0. */
4917 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
4919 return 0;
4922 /* Clean up branch instructions (actually perform the branch, by setting
4923 PC). */
4925 static void
4926 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
4927 struct displaced_step_closure *dsc)
4929 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
4930 int branch_taken = condition_true (dsc->u.branch.cond, status);
4931 enum pc_write_style write_pc = dsc->u.branch.exchange
4932 ? BX_WRITE_PC : BRANCH_WRITE_PC;
4934 if (!branch_taken)
4935 return;
4937 if (dsc->u.branch.link)
4939 /* The value of LR should be the next insn of current one. In order
4940 not to confuse logic hanlding later insn `bx lr', if current insn mode
4941 is Thumb, the bit 0 of LR value should be set to 1. */
4942 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
4944 if (dsc->is_thumb)
4945 next_insn_addr |= 0x1;
4947 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
4948 CANNOT_WRITE_PC);
4951 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
4954 /* Copy B/BL/BLX instructions with immediate destinations. */
4956 static void
4957 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
4958 struct displaced_step_closure *dsc,
4959 unsigned int cond, int exchange, int link, long offset)
4961 /* Implement "BL<cond> <label>" as:
4963 Preparation: cond <- instruction condition
4964 Insn: mov r0, r0 (nop)
4965 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4967 B<cond> similar, but don't set r14 in cleanup. */
4969 dsc->u.branch.cond = cond;
4970 dsc->u.branch.link = link;
4971 dsc->u.branch.exchange = exchange;
4973 dsc->u.branch.dest = dsc->insn_addr;
4974 if (link && exchange)
4975 /* For BLX, offset is computed from the Align (PC, 4). */
4976 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
4978 if (dsc->is_thumb)
4979 dsc->u.branch.dest += 4 + offset;
4980 else
4981 dsc->u.branch.dest += 8 + offset;
4983 dsc->cleanup = &cleanup_branch;
4985 static int
4986 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
4987 struct regcache *regs, struct displaced_step_closure *dsc)
4989 unsigned int cond = bits (insn, 28, 31);
4990 int exchange = (cond == 0xf);
4991 int link = exchange || bit (insn, 24);
4992 long offset;
4994 if (debug_displaced)
4995 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
4996 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
4997 (unsigned long) insn);
4998 if (exchange)
4999 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
5000 then arrange the switch into Thumb mode. */
5001 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
5002 else
5003 offset = bits (insn, 0, 23) << 2;
5005 if (bit (offset, 25))
5006 offset = offset | ~0x3ffffff;
5008 dsc->modinsn[0] = ARM_NOP;
5010 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5011 return 0;
5014 static int
5015 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
5016 uint16_t insn2, struct regcache *regs,
5017 struct displaced_step_closure *dsc)
5019 int link = bit (insn2, 14);
5020 int exchange = link && !bit (insn2, 12);
5021 int cond = INST_AL;
5022 long offset = 0;
5023 int j1 = bit (insn2, 13);
5024 int j2 = bit (insn2, 11);
5025 int s = sbits (insn1, 10, 10);
5026 int i1 = !(j1 ^ bit (insn1, 10));
5027 int i2 = !(j2 ^ bit (insn1, 10));
5029 if (!link && !exchange) /* B */
5031 offset = (bits (insn2, 0, 10) << 1);
5032 if (bit (insn2, 12)) /* Encoding T4 */
5034 offset |= (bits (insn1, 0, 9) << 12)
5035 | (i2 << 22)
5036 | (i1 << 23)
5037 | (s << 24);
5038 cond = INST_AL;
5040 else /* Encoding T3 */
5042 offset |= (bits (insn1, 0, 5) << 12)
5043 | (j1 << 18)
5044 | (j2 << 19)
5045 | (s << 20);
5046 cond = bits (insn1, 6, 9);
5049 else
5051 offset = (bits (insn1, 0, 9) << 12);
5052 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
5053 offset |= exchange ?
5054 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
5057 if (debug_displaced)
5058 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
5059 "%.4x %.4x with offset %.8lx\n",
5060 link ? (exchange) ? "blx" : "bl" : "b",
5061 insn1, insn2, offset);
5063 dsc->modinsn[0] = THUMB_NOP;
5065 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
5066 return 0;
5069 /* Copy B Thumb instructions. */
5070 static int
5071 thumb_copy_b (struct gdbarch *gdbarch, uint16_t insn,
5072 struct displaced_step_closure *dsc)
5074 unsigned int cond = 0;
5075 int offset = 0;
5076 unsigned short bit_12_15 = bits (insn, 12, 15);
5077 CORE_ADDR from = dsc->insn_addr;
5079 if (bit_12_15 == 0xd)
5081 /* offset = SignExtend (imm8:0, 32) */
5082 offset = sbits ((insn << 1), 0, 8);
5083 cond = bits (insn, 8, 11);
5085 else if (bit_12_15 == 0xe) /* Encoding T2 */
5087 offset = sbits ((insn << 1), 0, 11);
5088 cond = INST_AL;
5091 if (debug_displaced)
5092 fprintf_unfiltered (gdb_stdlog,
5093 "displaced: copying b immediate insn %.4x "
5094 "with offset %d\n", insn, offset);
5096 dsc->u.branch.cond = cond;
5097 dsc->u.branch.link = 0;
5098 dsc->u.branch.exchange = 0;
5099 dsc->u.branch.dest = from + 4 + offset;
5101 dsc->modinsn[0] = THUMB_NOP;
5103 dsc->cleanup = &cleanup_branch;
5105 return 0;
5108 /* Copy BX/BLX with register-specified destinations. */
5110 static void
5111 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
5112 struct displaced_step_closure *dsc, int link,
5113 unsigned int cond, unsigned int rm)
5115 /* Implement {BX,BLX}<cond> <reg>" as:
5117 Preparation: cond <- instruction condition
5118 Insn: mov r0, r0 (nop)
5119 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5121 Don't set r14 in cleanup for BX. */
5123 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
5125 dsc->u.branch.cond = cond;
5126 dsc->u.branch.link = link;
5128 dsc->u.branch.exchange = 1;
5130 dsc->cleanup = &cleanup_branch;
5133 static int
5134 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
5135 struct regcache *regs, struct displaced_step_closure *dsc)
5137 unsigned int cond = bits (insn, 28, 31);
5138 /* BX: x12xxx1x
5139 BLX: x12xxx3x. */
5140 int link = bit (insn, 5);
5141 unsigned int rm = bits (insn, 0, 3);
5143 if (debug_displaced)
5144 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
5145 (unsigned long) insn);
5147 dsc->modinsn[0] = ARM_NOP;
5149 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
5150 return 0;
5153 static int
5154 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
5155 struct regcache *regs,
5156 struct displaced_step_closure *dsc)
5158 int link = bit (insn, 7);
5159 unsigned int rm = bits (insn, 3, 6);
5161 if (debug_displaced)
5162 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
5163 (unsigned short) insn);
5165 dsc->modinsn[0] = THUMB_NOP;
5167 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
5169 return 0;
5173 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5175 static void
5176 cleanup_alu_imm (struct gdbarch *gdbarch,
5177 struct regcache *regs, struct displaced_step_closure *dsc)
5179 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5180 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5181 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5182 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5185 static int
5186 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5187 struct displaced_step_closure *dsc)
5189 unsigned int rn = bits (insn, 16, 19);
5190 unsigned int rd = bits (insn, 12, 15);
5191 unsigned int op = bits (insn, 21, 24);
5192 int is_mov = (op == 0xd);
5193 ULONGEST rd_val, rn_val;
5195 if (!insn_references_pc (insn, 0x000ff000ul))
5196 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
5198 if (debug_displaced)
5199 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
5200 "%.8lx\n", is_mov ? "move" : "ALU",
5201 (unsigned long) insn);
5203 /* Instruction is of form:
5205 <op><cond> rd, [rn,] #imm
5207 Rewrite as:
5209 Preparation: tmp1, tmp2 <- r0, r1;
5210 r0, r1 <- rd, rn
5211 Insn: <op><cond> r0, r1, #imm
5212 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5215 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5216 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5217 rn_val = displaced_read_reg (regs, dsc, rn);
5218 rd_val = displaced_read_reg (regs, dsc, rd);
5219 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5220 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5221 dsc->rd = rd;
5223 if (is_mov)
5224 dsc->modinsn[0] = insn & 0xfff00fff;
5225 else
5226 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
5228 dsc->cleanup = &cleanup_alu_imm;
5230 return 0;
5233 static int
5234 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
5235 uint16_t insn2, struct regcache *regs,
5236 struct displaced_step_closure *dsc)
5238 unsigned int op = bits (insn1, 5, 8);
5239 unsigned int rn, rm, rd;
5240 ULONGEST rd_val, rn_val;
5242 rn = bits (insn1, 0, 3); /* Rn */
5243 rm = bits (insn2, 0, 3); /* Rm */
5244 rd = bits (insn2, 8, 11); /* Rd */
5246 /* This routine is only called for instruction MOV. */
5247 gdb_assert (op == 0x2 && rn == 0xf);
5249 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
5250 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
5252 if (debug_displaced)
5253 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
5254 "ALU", insn1, insn2);
5256 /* Instruction is of form:
5258 <op><cond> rd, [rn,] #imm
5260 Rewrite as:
5262 Preparation: tmp1, tmp2 <- r0, r1;
5263 r0, r1 <- rd, rn
5264 Insn: <op><cond> r0, r1, #imm
5265 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5268 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5269 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5270 rn_val = displaced_read_reg (regs, dsc, rn);
5271 rd_val = displaced_read_reg (regs, dsc, rd);
5272 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5273 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5274 dsc->rd = rd;
5276 dsc->modinsn[0] = insn1;
5277 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
5278 dsc->numinsns = 2;
5280 dsc->cleanup = &cleanup_alu_imm;
5282 return 0;
5285 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5287 static void
5288 cleanup_alu_reg (struct gdbarch *gdbarch,
5289 struct regcache *regs, struct displaced_step_closure *dsc)
5291 ULONGEST rd_val;
5292 int i;
5294 rd_val = displaced_read_reg (regs, dsc, 0);
5296 for (i = 0; i < 3; i++)
5297 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5299 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5302 static void
5303 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
5304 struct displaced_step_closure *dsc,
5305 unsigned int rd, unsigned int rn, unsigned int rm)
5307 ULONGEST rd_val, rn_val, rm_val;
5309 /* Instruction is of form:
5311 <op><cond> rd, [rn,] rm [, <shift>]
5313 Rewrite as:
5315 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5316 r0, r1, r2 <- rd, rn, rm
5317 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5318 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5321 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5322 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5323 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5324 rd_val = displaced_read_reg (regs, dsc, rd);
5325 rn_val = displaced_read_reg (regs, dsc, rn);
5326 rm_val = displaced_read_reg (regs, dsc, rm);
5327 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5328 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5329 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5330 dsc->rd = rd;
5332 dsc->cleanup = &cleanup_alu_reg;
5335 static int
5336 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5337 struct displaced_step_closure *dsc)
5339 unsigned int op = bits (insn, 21, 24);
5340 int is_mov = (op == 0xd);
5342 if (!insn_references_pc (insn, 0x000ff00ful))
5343 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
5345 if (debug_displaced)
5346 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
5347 is_mov ? "move" : "ALU", (unsigned long) insn);
5349 if (is_mov)
5350 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
5351 else
5352 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
5354 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
5355 bits (insn, 0, 3));
5356 return 0;
5359 static int
5360 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
5361 struct regcache *regs,
5362 struct displaced_step_closure *dsc)
5364 unsigned rm, rd;
5366 rm = bits (insn, 3, 6);
5367 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
5369 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
5370 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
5372 if (debug_displaced)
5373 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
5374 (unsigned short) insn);
5376 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
5378 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
5380 return 0;
5383 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5385 static void
5386 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
5387 struct regcache *regs,
5388 struct displaced_step_closure *dsc)
5390 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
5391 int i;
5393 for (i = 0; i < 4; i++)
5394 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
5396 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
5399 static void
5400 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
5401 struct displaced_step_closure *dsc,
5402 unsigned int rd, unsigned int rn, unsigned int rm,
5403 unsigned rs)
5405 int i;
5406 ULONGEST rd_val, rn_val, rm_val, rs_val;
5408 /* Instruction is of form:
5410 <op><cond> rd, [rn,] rm, <shift> rs
5412 Rewrite as:
5414 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5415 r0, r1, r2, r3 <- rd, rn, rm, rs
5416 Insn: <op><cond> r0, r1, r2, <shift> r3
5417 Cleanup: tmp5 <- r0
5418 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5419 rd <- tmp5
5422 for (i = 0; i < 4; i++)
5423 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
5425 rd_val = displaced_read_reg (regs, dsc, rd);
5426 rn_val = displaced_read_reg (regs, dsc, rn);
5427 rm_val = displaced_read_reg (regs, dsc, rm);
5428 rs_val = displaced_read_reg (regs, dsc, rs);
5429 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
5430 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
5431 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
5432 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
5433 dsc->rd = rd;
5434 dsc->cleanup = &cleanup_alu_shifted_reg;
5437 static int
5438 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
5439 struct regcache *regs,
5440 struct displaced_step_closure *dsc)
5442 unsigned int op = bits (insn, 21, 24);
5443 int is_mov = (op == 0xd);
5444 unsigned int rd, rn, rm, rs;
5446 if (!insn_references_pc (insn, 0x000fff0ful))
5447 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
5449 if (debug_displaced)
5450 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
5451 "%.8lx\n", is_mov ? "move" : "ALU",
5452 (unsigned long) insn);
5454 rn = bits (insn, 16, 19);
5455 rm = bits (insn, 0, 3);
5456 rs = bits (insn, 8, 11);
5457 rd = bits (insn, 12, 15);
5459 if (is_mov)
5460 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
5461 else
5462 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
5464 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
5466 return 0;
5469 /* Clean up load instructions. */
5471 static void
5472 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
5473 struct displaced_step_closure *dsc)
5475 ULONGEST rt_val, rt_val2 = 0, rn_val;
5477 rt_val = displaced_read_reg (regs, dsc, 0);
5478 if (dsc->u.ldst.xfersize == 8)
5479 rt_val2 = displaced_read_reg (regs, dsc, 1);
5480 rn_val = displaced_read_reg (regs, dsc, 2);
5482 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5483 if (dsc->u.ldst.xfersize > 4)
5484 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5485 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5486 if (!dsc->u.ldst.immed)
5487 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5489 /* Handle register writeback. */
5490 if (dsc->u.ldst.writeback)
5491 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5492 /* Put result in right place. */
5493 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
5494 if (dsc->u.ldst.xfersize == 8)
5495 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
5498 /* Clean up store instructions. */
5500 static void
5501 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
5502 struct displaced_step_closure *dsc)
5504 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
5506 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5507 if (dsc->u.ldst.xfersize > 4)
5508 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5509 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
5510 if (!dsc->u.ldst.immed)
5511 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
5512 if (!dsc->u.ldst.restore_r4)
5513 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
5515 /* Writeback. */
5516 if (dsc->u.ldst.writeback)
5517 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
5520 /* Copy "extra" load/store instructions. These are halfword/doubleword
5521 transfers, which have a different encoding to byte/word transfers. */
5523 static int
5524 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unprivileged,
5525 struct regcache *regs, struct displaced_step_closure *dsc)
5527 unsigned int op1 = bits (insn, 20, 24);
5528 unsigned int op2 = bits (insn, 5, 6);
5529 unsigned int rt = bits (insn, 12, 15);
5530 unsigned int rn = bits (insn, 16, 19);
5531 unsigned int rm = bits (insn, 0, 3);
5532 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5533 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5534 int immed = (op1 & 0x4) != 0;
5535 int opcode;
5536 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
5538 if (!insn_references_pc (insn, 0x000ff00ful))
5539 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
5541 if (debug_displaced)
5542 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
5543 "insn %.8lx\n", unprivileged ? "unprivileged " : "",
5544 (unsigned long) insn);
5546 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
5548 if (opcode < 0)
5549 internal_error (__FILE__, __LINE__,
5550 _("copy_extra_ld_st: instruction decode error"));
5552 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5553 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5554 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5555 if (!immed)
5556 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5558 rt_val = displaced_read_reg (regs, dsc, rt);
5559 if (bytesize[opcode] == 8)
5560 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
5561 rn_val = displaced_read_reg (regs, dsc, rn);
5562 if (!immed)
5563 rm_val = displaced_read_reg (regs, dsc, rm);
5565 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5566 if (bytesize[opcode] == 8)
5567 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
5568 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5569 if (!immed)
5570 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5572 dsc->rd = rt;
5573 dsc->u.ldst.xfersize = bytesize[opcode];
5574 dsc->u.ldst.rn = rn;
5575 dsc->u.ldst.immed = immed;
5576 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
5577 dsc->u.ldst.restore_r4 = 0;
5579 if (immed)
5580 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5582 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5583 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5584 else
5585 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5587 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5588 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5590 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
5592 return 0;
5595 /* Copy byte/half word/word loads and stores. */
5597 static void
5598 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5599 struct displaced_step_closure *dsc, int load,
5600 int immed, int writeback, int size, int usermode,
5601 int rt, int rm, int rn)
5603 ULONGEST rt_val, rn_val, rm_val = 0;
5605 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5606 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5607 if (!immed)
5608 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5609 if (!load)
5610 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
5612 rt_val = displaced_read_reg (regs, dsc, rt);
5613 rn_val = displaced_read_reg (regs, dsc, rn);
5614 if (!immed)
5615 rm_val = displaced_read_reg (regs, dsc, rm);
5617 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
5618 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
5619 if (!immed)
5620 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
5621 dsc->rd = rt;
5622 dsc->u.ldst.xfersize = size;
5623 dsc->u.ldst.rn = rn;
5624 dsc->u.ldst.immed = immed;
5625 dsc->u.ldst.writeback = writeback;
5627 /* To write PC we can do:
5629 Before this sequence of instructions:
5630 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5631 r2 is the Rn value got from dispalced_read_reg.
5633 Insn1: push {pc} Write address of STR instruction + offset on stack
5634 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5635 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5636 = addr(Insn1) + offset - addr(Insn3) - 8
5637 = offset - 16
5638 Insn4: add r4, r4, #8 r4 = offset - 8
5639 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5640 = from + offset
5641 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5643 Otherwise we don't know what value to write for PC, since the offset is
5644 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5645 of this can be found in Section "Saving from r15" in
5646 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5648 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5652 static int
5653 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
5654 uint16_t insn2, struct regcache *regs,
5655 struct displaced_step_closure *dsc, int size)
5657 unsigned int u_bit = bit (insn1, 7);
5658 unsigned int rt = bits (insn2, 12, 15);
5659 int imm12 = bits (insn2, 0, 11);
5660 ULONGEST pc_val;
5662 if (debug_displaced)
5663 fprintf_unfiltered (gdb_stdlog,
5664 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5665 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
5666 imm12);
5668 if (!u_bit)
5669 imm12 = -1 * imm12;
5671 /* Rewrite instruction LDR Rt imm12 into:
5673 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5675 LDR R0, R2, R3,
5677 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5680 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5681 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
5682 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
5684 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5686 pc_val = pc_val & 0xfffffffc;
5688 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
5689 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
5691 dsc->rd = rt;
5693 dsc->u.ldst.xfersize = size;
5694 dsc->u.ldst.immed = 0;
5695 dsc->u.ldst.writeback = 0;
5696 dsc->u.ldst.restore_r4 = 0;
5698 /* LDR R0, R2, R3 */
5699 dsc->modinsn[0] = 0xf852;
5700 dsc->modinsn[1] = 0x3;
5701 dsc->numinsns = 2;
5703 dsc->cleanup = &cleanup_load;
5705 return 0;
5708 static int
5709 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
5710 uint16_t insn2, struct regcache *regs,
5711 struct displaced_step_closure *dsc,
5712 int writeback, int immed)
5714 unsigned int rt = bits (insn2, 12, 15);
5715 unsigned int rn = bits (insn1, 0, 3);
5716 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
5717 /* In LDR (register), there is also a register Rm, which is not allowed to
5718 be PC, so we don't have to check it. */
5720 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
5721 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
5722 dsc);
5724 if (debug_displaced)
5725 fprintf_unfiltered (gdb_stdlog,
5726 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5727 rt, rn, insn1, insn2);
5729 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
5730 0, rt, rm, rn);
5732 dsc->u.ldst.restore_r4 = 0;
5734 if (immed)
5735 /* ldr[b]<cond> rt, [rn, #imm], etc.
5737 ldr[b]<cond> r0, [r2, #imm]. */
5739 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5740 dsc->modinsn[1] = insn2 & 0x0fff;
5742 else
5743 /* ldr[b]<cond> rt, [rn, rm], etc.
5745 ldr[b]<cond> r0, [r2, r3]. */
5747 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
5748 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
5751 dsc->numinsns = 2;
5753 return 0;
5757 static int
5758 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
5759 struct regcache *regs,
5760 struct displaced_step_closure *dsc,
5761 int load, int size, int usermode)
5763 int immed = !bit (insn, 25);
5764 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
5765 unsigned int rt = bits (insn, 12, 15);
5766 unsigned int rn = bits (insn, 16, 19);
5767 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
5769 if (!insn_references_pc (insn, 0x000ff00ful))
5770 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
5772 if (debug_displaced)
5773 fprintf_unfiltered (gdb_stdlog,
5774 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5775 load ? (size == 1 ? "ldrb" : "ldr")
5776 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
5777 rt, rn,
5778 (unsigned long) insn);
5780 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
5781 usermode, rt, rm, rn);
5783 if (load || rt != ARM_PC_REGNUM)
5785 dsc->u.ldst.restore_r4 = 0;
5787 if (immed)
5788 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5790 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5791 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
5792 else
5793 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5795 {ldr,str}[b]<cond> r0, [r2, r3]. */
5796 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
5798 else
5800 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5801 dsc->u.ldst.restore_r4 = 1;
5802 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
5803 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
5804 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
5805 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
5806 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
5808 /* As above. */
5809 if (immed)
5810 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
5811 else
5812 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
5814 dsc->numinsns = 6;
5817 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
5819 return 0;
5822 /* Cleanup LDM instructions with fully-populated register list. This is an
5823 unfortunate corner case: it's impossible to implement correctly by modifying
5824 the instruction. The issue is as follows: we have an instruction,
5826 ldm rN, {r0-r15}
5828 which we must rewrite to avoid loading PC. A possible solution would be to
5829 do the load in two halves, something like (with suitable cleanup
5830 afterwards):
5832 mov r8, rN
5833 ldm[id][ab] r8!, {r0-r7}
5834 str r7, <temp>
5835 ldm[id][ab] r8, {r7-r14}
5836 <bkpt>
5838 but at present there's no suitable place for <temp>, since the scratch space
5839 is overwritten before the cleanup routine is called. For now, we simply
5840 emulate the instruction. */
5842 static void
5843 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
5844 struct displaced_step_closure *dsc)
5846 int inc = dsc->u.block.increment;
5847 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
5848 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
5849 uint32_t regmask = dsc->u.block.regmask;
5850 int regno = inc ? 0 : 15;
5851 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
5852 int exception_return = dsc->u.block.load && dsc->u.block.user
5853 && (regmask & 0x8000) != 0;
5854 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5855 int do_transfer = condition_true (dsc->u.block.cond, status);
5856 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5858 if (!do_transfer)
5859 return;
5861 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5862 sensible we can do here. Complain loudly. */
5863 if (exception_return)
5864 error (_("Cannot single-step exception return"));
5866 /* We don't handle any stores here for now. */
5867 gdb_assert (dsc->u.block.load != 0);
5869 if (debug_displaced)
5870 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
5871 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
5872 dsc->u.block.increment ? "inc" : "dec",
5873 dsc->u.block.before ? "before" : "after");
5875 while (regmask)
5877 uint32_t memword;
5879 if (inc)
5880 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
5881 regno++;
5882 else
5883 while (regno >= 0 && (regmask & (1 << regno)) == 0)
5884 regno--;
5886 xfer_addr += bump_before;
5888 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
5889 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
5891 xfer_addr += bump_after;
5893 regmask &= ~(1 << regno);
5896 if (dsc->u.block.writeback)
5897 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
5898 CANNOT_WRITE_PC);
5901 /* Clean up an STM which included the PC in the register list. */
5903 static void
5904 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
5905 struct displaced_step_closure *dsc)
5907 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5908 int store_executed = condition_true (dsc->u.block.cond, status);
5909 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
5910 CORE_ADDR stm_insn_addr;
5911 uint32_t pc_val;
5912 long offset;
5913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
5915 /* If condition code fails, there's nothing else to do. */
5916 if (!store_executed)
5917 return;
5919 if (dsc->u.block.increment)
5921 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
5923 if (dsc->u.block.before)
5924 pc_stored_at += 4;
5926 else
5928 pc_stored_at = dsc->u.block.xfer_addr;
5930 if (dsc->u.block.before)
5931 pc_stored_at -= 4;
5934 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
5935 stm_insn_addr = dsc->scratch_base;
5936 offset = pc_val - stm_insn_addr;
5938 if (debug_displaced)
5939 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
5940 "STM instruction\n", offset);
5942 /* Rewrite the stored PC to the proper value for the non-displaced original
5943 instruction. */
5944 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
5945 dsc->insn_addr + offset);
5948 /* Clean up an LDM which includes the PC in the register list. We clumped all
5949 the registers in the transferred list into a contiguous range r0...rX (to
5950 avoid loading PC directly and losing control of the debugged program), so we
5951 must undo that here. */
5953 static void
5954 cleanup_block_load_pc (struct gdbarch *gdbarch,
5955 struct regcache *regs,
5956 struct displaced_step_closure *dsc)
5958 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
5959 int load_executed = condition_true (dsc->u.block.cond, status);
5960 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
5961 unsigned int regs_loaded = bitcount (mask);
5962 unsigned int num_to_shuffle = regs_loaded, clobbered;
5964 /* The method employed here will fail if the register list is fully populated
5965 (we need to avoid loading PC directly). */
5966 gdb_assert (num_to_shuffle < 16);
5968 if (!load_executed)
5969 return;
5971 clobbered = (1 << num_to_shuffle) - 1;
5973 while (num_to_shuffle > 0)
5975 if ((mask & (1 << write_reg)) != 0)
5977 unsigned int read_reg = num_to_shuffle - 1;
5979 if (read_reg != write_reg)
5981 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
5982 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
5983 if (debug_displaced)
5984 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
5985 "loaded register r%d to r%d\n"), read_reg,
5986 write_reg);
5988 else if (debug_displaced)
5989 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
5990 "r%d already in the right place\n"),
5991 write_reg);
5993 clobbered &= ~(1 << write_reg);
5995 num_to_shuffle--;
5998 write_reg--;
6001 /* Restore any registers we scribbled over. */
6002 for (write_reg = 0; clobbered != 0; write_reg++)
6004 if ((clobbered & (1 << write_reg)) != 0)
6006 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
6007 CANNOT_WRITE_PC);
6008 if (debug_displaced)
6009 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
6010 "clobbered register r%d\n"), write_reg);
6011 clobbered &= ~(1 << write_reg);
6015 /* Perform register writeback manually. */
6016 if (dsc->u.block.writeback)
6018 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
6020 if (dsc->u.block.increment)
6021 new_rn_val += regs_loaded * 4;
6022 else
6023 new_rn_val -= regs_loaded * 4;
6025 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
6026 CANNOT_WRITE_PC);
6030 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
6031 in user-level code (in particular exception return, ldm rn, {...pc}^). */
6033 static int
6034 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
6035 struct regcache *regs,
6036 struct displaced_step_closure *dsc)
6038 int load = bit (insn, 20);
6039 int user = bit (insn, 22);
6040 int increment = bit (insn, 23);
6041 int before = bit (insn, 24);
6042 int writeback = bit (insn, 21);
6043 int rn = bits (insn, 16, 19);
6045 /* Block transfers which don't mention PC can be run directly
6046 out-of-line. */
6047 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
6048 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
6050 if (rn == ARM_PC_REGNUM)
6052 warning (_("displaced: Unpredictable LDM or STM with "
6053 "base register r15"));
6054 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
6057 if (debug_displaced)
6058 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6059 "%.8lx\n", (unsigned long) insn);
6061 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6062 dsc->u.block.rn = rn;
6064 dsc->u.block.load = load;
6065 dsc->u.block.user = user;
6066 dsc->u.block.increment = increment;
6067 dsc->u.block.before = before;
6068 dsc->u.block.writeback = writeback;
6069 dsc->u.block.cond = bits (insn, 28, 31);
6071 dsc->u.block.regmask = insn & 0xffff;
6073 if (load)
6075 if ((insn & 0xffff) == 0xffff)
6077 /* LDM with a fully-populated register list. This case is
6078 particularly tricky. Implement for now by fully emulating the
6079 instruction (which might not behave perfectly in all cases, but
6080 these instructions should be rare enough for that not to matter
6081 too much). */
6082 dsc->modinsn[0] = ARM_NOP;
6084 dsc->cleanup = &cleanup_block_load_all;
6086 else
6088 /* LDM of a list of registers which includes PC. Implement by
6089 rewriting the list of registers to be transferred into a
6090 contiguous chunk r0...rX before doing the transfer, then shuffling
6091 registers into the correct places in the cleanup routine. */
6092 unsigned int regmask = insn & 0xffff;
6093 unsigned int num_in_list = bitcount (regmask), new_regmask;
6094 unsigned int i;
6096 for (i = 0; i < num_in_list; i++)
6097 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6099 /* Writeback makes things complicated. We need to avoid clobbering
6100 the base register with one of the registers in our modified
6101 register list, but just using a different register can't work in
6102 all cases, e.g.:
6104 ldm r14!, {r0-r13,pc}
6106 which would need to be rewritten as:
6108 ldm rN!, {r0-r14}
6110 but that can't work, because there's no free register for N.
6112 Solve this by turning off the writeback bit, and emulating
6113 writeback manually in the cleanup routine. */
6115 if (writeback)
6116 insn &= ~(1 << 21);
6118 new_regmask = (1 << num_in_list) - 1;
6120 if (debug_displaced)
6121 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6122 "{..., pc}: original reg list %.4x, modified "
6123 "list %.4x\n"), rn, writeback ? "!" : "",
6124 (int) insn & 0xffff, new_regmask);
6126 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
6128 dsc->cleanup = &cleanup_block_load_pc;
6131 else
6133 /* STM of a list of registers which includes PC. Run the instruction
6134 as-is, but out of line: this will store the wrong value for the PC,
6135 so we must manually fix up the memory in the cleanup routine.
6136 Doing things this way has the advantage that we can auto-detect
6137 the offset of the PC write (which is architecture-dependent) in
6138 the cleanup routine. */
6139 dsc->modinsn[0] = insn;
6141 dsc->cleanup = &cleanup_block_store_pc;
6144 return 0;
6147 static int
6148 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6149 struct regcache *regs,
6150 struct displaced_step_closure *dsc)
6152 int rn = bits (insn1, 0, 3);
6153 int load = bit (insn1, 4);
6154 int writeback = bit (insn1, 5);
6156 /* Block transfers which don't mention PC can be run directly
6157 out-of-line. */
6158 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
6159 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
6161 if (rn == ARM_PC_REGNUM)
6163 warning (_("displaced: Unpredictable LDM or STM with "
6164 "base register r15"));
6165 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6166 "unpredictable ldm/stm", dsc);
6169 if (debug_displaced)
6170 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
6171 "%.4x%.4x\n", insn1, insn2);
6173 /* Clear bit 13, since it should be always zero. */
6174 dsc->u.block.regmask = (insn2 & 0xdfff);
6175 dsc->u.block.rn = rn;
6177 dsc->u.block.load = load;
6178 dsc->u.block.user = 0;
6179 dsc->u.block.increment = bit (insn1, 7);
6180 dsc->u.block.before = bit (insn1, 8);
6181 dsc->u.block.writeback = writeback;
6182 dsc->u.block.cond = INST_AL;
6183 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
6185 if (load)
6187 if (dsc->u.block.regmask == 0xffff)
6189 /* This branch is impossible to happen. */
6190 gdb_assert (0);
6192 else
6194 unsigned int regmask = dsc->u.block.regmask;
6195 unsigned int num_in_list = bitcount (regmask), new_regmask;
6196 unsigned int i;
6198 for (i = 0; i < num_in_list; i++)
6199 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6201 if (writeback)
6202 insn1 &= ~(1 << 5);
6204 new_regmask = (1 << num_in_list) - 1;
6206 if (debug_displaced)
6207 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
6208 "{..., pc}: original reg list %.4x, modified "
6209 "list %.4x\n"), rn, writeback ? "!" : "",
6210 (int) dsc->u.block.regmask, new_regmask);
6212 dsc->modinsn[0] = insn1;
6213 dsc->modinsn[1] = (new_regmask & 0xffff);
6214 dsc->numinsns = 2;
6216 dsc->cleanup = &cleanup_block_load_pc;
6219 else
6221 dsc->modinsn[0] = insn1;
6222 dsc->modinsn[1] = insn2;
6223 dsc->numinsns = 2;
6224 dsc->cleanup = &cleanup_block_store_pc;
6226 return 0;
6229 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6230 This is used to avoid a dependency on BFD's bfd_endian enum. */
6232 ULONGEST
6233 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr, int len,
6234 int byte_order)
6236 return read_memory_unsigned_integer (memaddr, len,
6237 (enum bfd_endian) byte_order);
6240 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6242 CORE_ADDR
6243 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs *self,
6244 CORE_ADDR val)
6246 return gdbarch_addr_bits_remove (get_regcache_arch (self->regcache), val);
6249 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6251 static CORE_ADDR
6252 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs *self)
6254 return 0;
6257 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6260 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs *self)
6262 return arm_is_thumb (self->regcache);
6265 /* single_step() is called just before we want to resume the inferior,
6266 if we want to single-step it but there is no hardware or kernel
6267 single-step support. We find the target of the coming instructions
6268 and breakpoint them. */
6271 arm_software_single_step (struct frame_info *frame)
6273 struct regcache *regcache = get_current_regcache ();
6274 struct gdbarch *gdbarch = get_regcache_arch (regcache);
6275 struct address_space *aspace = get_regcache_aspace (regcache);
6276 struct arm_get_next_pcs next_pcs_ctx;
6277 CORE_ADDR pc;
6278 int i;
6279 VEC (CORE_ADDR) *next_pcs = NULL;
6280 struct cleanup *old_chain = make_cleanup (VEC_cleanup (CORE_ADDR), &next_pcs);
6282 arm_get_next_pcs_ctor (&next_pcs_ctx,
6283 &arm_get_next_pcs_ops,
6284 gdbarch_byte_order (gdbarch),
6285 gdbarch_byte_order_for_code (gdbarch),
6287 regcache);
6289 next_pcs = arm_get_next_pcs (&next_pcs_ctx);
6291 for (i = 0; VEC_iterate (CORE_ADDR, next_pcs, i, pc); i++)
6292 arm_insert_single_step_breakpoint (gdbarch, aspace, pc);
6294 do_cleanups (old_chain);
6296 return 1;
6299 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6300 for Linux, where some SVC instructions must be treated specially. */
6302 static void
6303 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
6304 struct displaced_step_closure *dsc)
6306 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
6308 if (debug_displaced)
6309 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
6310 "%.8lx\n", (unsigned long) resume_addr);
6312 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
6316 /* Common copy routine for svc instruciton. */
6318 static int
6319 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
6320 struct displaced_step_closure *dsc)
6322 /* Preparation: none.
6323 Insn: unmodified svc.
6324 Cleanup: pc <- insn_addr + insn_size. */
6326 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6327 instruction. */
6328 dsc->wrote_to_pc = 1;
6330 /* Allow OS-specific code to override SVC handling. */
6331 if (dsc->u.svc.copy_svc_os)
6332 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
6333 else
6335 dsc->cleanup = &cleanup_svc;
6336 return 0;
6340 static int
6341 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
6342 struct regcache *regs, struct displaced_step_closure *dsc)
6345 if (debug_displaced)
6346 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
6347 (unsigned long) insn);
6349 dsc->modinsn[0] = insn;
6351 return install_svc (gdbarch, regs, dsc);
6354 static int
6355 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
6356 struct regcache *regs, struct displaced_step_closure *dsc)
6359 if (debug_displaced)
6360 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
6361 insn);
6363 dsc->modinsn[0] = insn;
6365 return install_svc (gdbarch, regs, dsc);
6368 /* Copy undefined instructions. */
6370 static int
6371 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
6372 struct displaced_step_closure *dsc)
6374 if (debug_displaced)
6375 fprintf_unfiltered (gdb_stdlog,
6376 "displaced: copying undefined insn %.8lx\n",
6377 (unsigned long) insn);
6379 dsc->modinsn[0] = insn;
6381 return 0;
6384 static int
6385 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
6386 struct displaced_step_closure *dsc)
6389 if (debug_displaced)
6390 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
6391 "%.4x %.4x\n", (unsigned short) insn1,
6392 (unsigned short) insn2);
6394 dsc->modinsn[0] = insn1;
6395 dsc->modinsn[1] = insn2;
6396 dsc->numinsns = 2;
6398 return 0;
6401 /* Copy unpredictable instructions. */
6403 static int
6404 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
6405 struct displaced_step_closure *dsc)
6407 if (debug_displaced)
6408 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
6409 "%.8lx\n", (unsigned long) insn);
6411 dsc->modinsn[0] = insn;
6413 return 0;
6416 /* The decode_* functions are instruction decoding helpers. They mostly follow
6417 the presentation in the ARM ARM. */
6419 static int
6420 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
6421 struct regcache *regs,
6422 struct displaced_step_closure *dsc)
6424 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
6425 unsigned int rn = bits (insn, 16, 19);
6427 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
6428 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
6429 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
6430 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
6431 else if ((op1 & 0x60) == 0x20)
6432 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
6433 else if ((op1 & 0x71) == 0x40)
6434 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
6435 dsc);
6436 else if ((op1 & 0x77) == 0x41)
6437 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6438 else if ((op1 & 0x77) == 0x45)
6439 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
6440 else if ((op1 & 0x77) == 0x51)
6442 if (rn != 0xf)
6443 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6444 else
6445 return arm_copy_unpred (gdbarch, insn, dsc);
6447 else if ((op1 & 0x77) == 0x55)
6448 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
6449 else if (op1 == 0x57)
6450 switch (op2)
6452 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
6453 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
6454 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
6455 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
6456 default: return arm_copy_unpred (gdbarch, insn, dsc);
6458 else if ((op1 & 0x63) == 0x43)
6459 return arm_copy_unpred (gdbarch, insn, dsc);
6460 else if ((op2 & 0x1) == 0x0)
6461 switch (op1 & ~0x80)
6463 case 0x61:
6464 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
6465 case 0x65:
6466 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
6467 case 0x71: case 0x75:
6468 /* pld/pldw reg. */
6469 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
6470 case 0x63: case 0x67: case 0x73: case 0x77:
6471 return arm_copy_unpred (gdbarch, insn, dsc);
6472 default:
6473 return arm_copy_undef (gdbarch, insn, dsc);
6475 else
6476 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
6479 static int
6480 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
6481 struct regcache *regs,
6482 struct displaced_step_closure *dsc)
6484 if (bit (insn, 27) == 0)
6485 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
6486 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6487 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
6489 case 0x0: case 0x2:
6490 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
6492 case 0x1: case 0x3:
6493 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
6495 case 0x4: case 0x5: case 0x6: case 0x7:
6496 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6498 case 0x8:
6499 switch ((insn & 0xe00000) >> 21)
6501 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6502 /* stc/stc2. */
6503 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6505 case 0x2:
6506 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6508 default:
6509 return arm_copy_undef (gdbarch, insn, dsc);
6512 case 0x9:
6514 int rn_f = (bits (insn, 16, 19) == 0xf);
6515 switch ((insn & 0xe00000) >> 21)
6517 case 0x1: case 0x3:
6518 /* ldc/ldc2 imm (undefined for rn == pc). */
6519 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
6520 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6522 case 0x2:
6523 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6525 case 0x4: case 0x5: case 0x6: case 0x7:
6526 /* ldc/ldc2 lit (undefined for rn != pc). */
6527 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
6528 : arm_copy_undef (gdbarch, insn, dsc);
6530 default:
6531 return arm_copy_undef (gdbarch, insn, dsc);
6535 case 0xa:
6536 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
6538 case 0xb:
6539 if (bits (insn, 16, 19) == 0xf)
6540 /* ldc/ldc2 lit. */
6541 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6542 else
6543 return arm_copy_undef (gdbarch, insn, dsc);
6545 case 0xc:
6546 if (bit (insn, 4))
6547 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6548 else
6549 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6551 case 0xd:
6552 if (bit (insn, 4))
6553 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6554 else
6555 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6557 default:
6558 return arm_copy_undef (gdbarch, insn, dsc);
6562 /* Decode miscellaneous instructions in dp/misc encoding space. */
6564 static int
6565 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
6566 struct regcache *regs,
6567 struct displaced_step_closure *dsc)
6569 unsigned int op2 = bits (insn, 4, 6);
6570 unsigned int op = bits (insn, 21, 22);
6572 switch (op2)
6574 case 0x0:
6575 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
6577 case 0x1:
6578 if (op == 0x1) /* bx. */
6579 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
6580 else if (op == 0x3)
6581 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
6582 else
6583 return arm_copy_undef (gdbarch, insn, dsc);
6585 case 0x2:
6586 if (op == 0x1)
6587 /* Not really supported. */
6588 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
6589 else
6590 return arm_copy_undef (gdbarch, insn, dsc);
6592 case 0x3:
6593 if (op == 0x1)
6594 return arm_copy_bx_blx_reg (gdbarch, insn,
6595 regs, dsc); /* blx register. */
6596 else
6597 return arm_copy_undef (gdbarch, insn, dsc);
6599 case 0x5:
6600 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
6602 case 0x7:
6603 if (op == 0x1)
6604 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
6605 else if (op == 0x3)
6606 /* Not really supported. */
6607 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
6609 default:
6610 return arm_copy_undef (gdbarch, insn, dsc);
6614 static int
6615 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
6616 struct regcache *regs,
6617 struct displaced_step_closure *dsc)
6619 if (bit (insn, 25))
6620 switch (bits (insn, 20, 24))
6622 case 0x10:
6623 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
6625 case 0x14:
6626 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
6628 case 0x12: case 0x16:
6629 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
6631 default:
6632 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
6634 else
6636 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
6638 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
6639 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
6640 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
6641 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
6642 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
6643 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
6644 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
6645 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
6646 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
6647 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
6648 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
6649 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
6650 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
6651 /* 2nd arg means "unprivileged". */
6652 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
6653 dsc);
6656 /* Should be unreachable. */
6657 return 1;
6660 static int
6661 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
6662 struct regcache *regs,
6663 struct displaced_step_closure *dsc)
6665 int a = bit (insn, 25), b = bit (insn, 4);
6666 uint32_t op1 = bits (insn, 20, 24);
6668 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
6669 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
6670 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
6671 else if ((!a && (op1 & 0x17) == 0x02)
6672 || (a && (op1 & 0x17) == 0x02 && !b))
6673 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
6674 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
6675 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
6676 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
6677 else if ((!a && (op1 & 0x17) == 0x03)
6678 || (a && (op1 & 0x17) == 0x03 && !b))
6679 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
6680 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
6681 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
6682 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
6683 else if ((!a && (op1 & 0x17) == 0x06)
6684 || (a && (op1 & 0x17) == 0x06 && !b))
6685 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
6686 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
6687 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
6688 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
6689 else if ((!a && (op1 & 0x17) == 0x07)
6690 || (a && (op1 & 0x17) == 0x07 && !b))
6691 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
6693 /* Should be unreachable. */
6694 return 1;
6697 static int
6698 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
6699 struct displaced_step_closure *dsc)
6701 switch (bits (insn, 20, 24))
6703 case 0x00: case 0x01: case 0x02: case 0x03:
6704 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
6706 case 0x04: case 0x05: case 0x06: case 0x07:
6707 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
6709 case 0x08: case 0x09: case 0x0a: case 0x0b:
6710 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6711 return arm_copy_unmodified (gdbarch, insn,
6712 "decode/pack/unpack/saturate/reverse", dsc);
6714 case 0x18:
6715 if (bits (insn, 5, 7) == 0) /* op2. */
6717 if (bits (insn, 12, 15) == 0xf)
6718 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
6719 else
6720 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
6722 else
6723 return arm_copy_undef (gdbarch, insn, dsc);
6725 case 0x1a: case 0x1b:
6726 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6727 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
6728 else
6729 return arm_copy_undef (gdbarch, insn, dsc);
6731 case 0x1c: case 0x1d:
6732 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
6734 if (bits (insn, 0, 3) == 0xf)
6735 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
6736 else
6737 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
6739 else
6740 return arm_copy_undef (gdbarch, insn, dsc);
6742 case 0x1e: case 0x1f:
6743 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
6744 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
6745 else
6746 return arm_copy_undef (gdbarch, insn, dsc);
6749 /* Should be unreachable. */
6750 return 1;
6753 static int
6754 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, uint32_t insn,
6755 struct regcache *regs,
6756 struct displaced_step_closure *dsc)
6758 if (bit (insn, 25))
6759 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
6760 else
6761 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
6764 static int
6765 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
6766 struct regcache *regs,
6767 struct displaced_step_closure *dsc)
6769 unsigned int opcode = bits (insn, 20, 24);
6771 switch (opcode)
6773 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6774 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
6776 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6777 case 0x12: case 0x16:
6778 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
6780 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6781 case 0x13: case 0x17:
6782 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
6784 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6785 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6786 /* Note: no writeback for these instructions. Bit 25 will always be
6787 zero though (via caller), so the following works OK. */
6788 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6791 /* Should be unreachable. */
6792 return 1;
6795 /* Decode shifted register instructions. */
6797 static int
6798 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
6799 uint16_t insn2, struct regcache *regs,
6800 struct displaced_step_closure *dsc)
6802 /* PC is only allowed to be used in instruction MOV. */
6804 unsigned int op = bits (insn1, 5, 8);
6805 unsigned int rn = bits (insn1, 0, 3);
6807 if (op == 0x2 && rn == 0xf) /* MOV */
6808 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
6809 else
6810 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6811 "dp (shift reg)", dsc);
6815 /* Decode extension register load/store. Exactly the same as
6816 arm_decode_ext_reg_ld_st. */
6818 static int
6819 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
6820 uint16_t insn2, struct regcache *regs,
6821 struct displaced_step_closure *dsc)
6823 unsigned int opcode = bits (insn1, 4, 8);
6825 switch (opcode)
6827 case 0x04: case 0x05:
6828 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6829 "vfp/neon vmov", dsc);
6831 case 0x08: case 0x0c: /* 01x00 */
6832 case 0x0a: case 0x0e: /* 01x10 */
6833 case 0x12: case 0x16: /* 10x10 */
6834 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6835 "vfp/neon vstm/vpush", dsc);
6837 case 0x09: case 0x0d: /* 01x01 */
6838 case 0x0b: case 0x0f: /* 01x11 */
6839 case 0x13: case 0x17: /* 10x11 */
6840 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6841 "vfp/neon vldm/vpop", dsc);
6843 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6844 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6845 "vstr", dsc);
6846 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6847 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
6850 /* Should be unreachable. */
6851 return 1;
6854 static int
6855 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn,
6856 struct regcache *regs, struct displaced_step_closure *dsc)
6858 unsigned int op1 = bits (insn, 20, 25);
6859 int op = bit (insn, 4);
6860 unsigned int coproc = bits (insn, 8, 11);
6862 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
6863 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
6864 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
6865 && (coproc & 0xe) != 0xa)
6866 /* stc/stc2. */
6867 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6868 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
6869 && (coproc & 0xe) != 0xa)
6870 /* ldc/ldc2 imm/lit. */
6871 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
6872 else if ((op1 & 0x3e) == 0x00)
6873 return arm_copy_undef (gdbarch, insn, dsc);
6874 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
6875 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
6876 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
6877 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
6878 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
6879 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
6880 else if ((op1 & 0x30) == 0x20 && !op)
6882 if ((coproc & 0xe) == 0xa)
6883 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
6884 else
6885 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
6887 else if ((op1 & 0x30) == 0x20 && op)
6888 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
6889 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
6890 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
6891 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
6892 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
6893 else if ((op1 & 0x30) == 0x30)
6894 return arm_copy_svc (gdbarch, insn, regs, dsc);
6895 else
6896 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
6899 static int
6900 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
6901 uint16_t insn2, struct regcache *regs,
6902 struct displaced_step_closure *dsc)
6904 unsigned int coproc = bits (insn2, 8, 11);
6905 unsigned int bit_5_8 = bits (insn1, 5, 8);
6906 unsigned int bit_9 = bit (insn1, 9);
6907 unsigned int bit_4 = bit (insn1, 4);
6909 if (bit_9 == 0)
6911 if (bit_5_8 == 2)
6912 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6913 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6914 dsc);
6915 else if (bit_5_8 == 0) /* UNDEFINED. */
6916 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
6917 else
6919 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6920 if ((coproc & 0xe) == 0xa)
6921 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
6922 dsc);
6923 else /* coproc is not 101x. */
6925 if (bit_4 == 0) /* STC/STC2. */
6926 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6927 "stc/stc2", dsc);
6928 else /* LDC/LDC2 {literal, immeidate}. */
6929 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
6930 regs, dsc);
6934 else
6935 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
6937 return 0;
6940 static void
6941 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
6942 struct displaced_step_closure *dsc, int rd)
6944 /* ADR Rd, #imm
6946 Rewrite as:
6948 Preparation: Rd <- PC
6949 Insn: ADD Rd, #imm
6950 Cleanup: Null.
6953 /* Rd <- PC */
6954 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6955 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
6958 static int
6959 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
6960 struct displaced_step_closure *dsc,
6961 int rd, unsigned int imm)
6964 /* Encoding T2: ADDS Rd, #imm */
6965 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
6967 install_pc_relative (gdbarch, regs, dsc, rd);
6969 return 0;
6972 static int
6973 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
6974 struct regcache *regs,
6975 struct displaced_step_closure *dsc)
6977 unsigned int rd = bits (insn, 8, 10);
6978 unsigned int imm8 = bits (insn, 0, 7);
6980 if (debug_displaced)
6981 fprintf_unfiltered (gdb_stdlog,
6982 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6983 rd, imm8, insn);
6985 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
6988 static int
6989 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
6990 uint16_t insn2, struct regcache *regs,
6991 struct displaced_step_closure *dsc)
6993 unsigned int rd = bits (insn2, 8, 11);
6994 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6995 extract raw immediate encoding rather than computing immediate. When
6996 generating ADD or SUB instruction, we can simply perform OR operation to
6997 set immediate into ADD. */
6998 unsigned int imm_3_8 = insn2 & 0x70ff;
6999 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
7001 if (debug_displaced)
7002 fprintf_unfiltered (gdb_stdlog,
7003 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
7004 rd, imm_i, imm_3_8, insn1, insn2);
7006 if (bit (insn1, 7)) /* Encoding T2 */
7008 /* Encoding T3: SUB Rd, Rd, #imm */
7009 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
7010 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7012 else /* Encoding T3 */
7014 /* Encoding T3: ADD Rd, Rd, #imm */
7015 dsc->modinsn[0] = (0xf100 | rd | imm_i);
7016 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
7018 dsc->numinsns = 2;
7020 install_pc_relative (gdbarch, regs, dsc, rd);
7022 return 0;
7025 static int
7026 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, uint16_t insn1,
7027 struct regcache *regs,
7028 struct displaced_step_closure *dsc)
7030 unsigned int rt = bits (insn1, 8, 10);
7031 unsigned int pc;
7032 int imm8 = (bits (insn1, 0, 7) << 2);
7034 /* LDR Rd, #imm8
7036 Rwrite as:
7038 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
7040 Insn: LDR R0, [R2, R3];
7041 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
7043 if (debug_displaced)
7044 fprintf_unfiltered (gdb_stdlog,
7045 "displaced: copying thumb ldr r%d [pc #%d]\n"
7046 , rt, imm8);
7048 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
7049 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
7050 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
7051 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7052 /* The assembler calculates the required value of the offset from the
7053 Align(PC,4) value of this instruction to the label. */
7054 pc = pc & 0xfffffffc;
7056 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
7057 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
7059 dsc->rd = rt;
7060 dsc->u.ldst.xfersize = 4;
7061 dsc->u.ldst.rn = 0;
7062 dsc->u.ldst.immed = 0;
7063 dsc->u.ldst.writeback = 0;
7064 dsc->u.ldst.restore_r4 = 0;
7066 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7068 dsc->cleanup = &cleanup_load;
7070 return 0;
7073 /* Copy Thumb cbnz/cbz insruction. */
7075 static int
7076 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
7077 struct regcache *regs,
7078 struct displaced_step_closure *dsc)
7080 int non_zero = bit (insn1, 11);
7081 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
7082 CORE_ADDR from = dsc->insn_addr;
7083 int rn = bits (insn1, 0, 2);
7084 int rn_val = displaced_read_reg (regs, dsc, rn);
7086 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
7087 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7088 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7089 condition is false, let it be, cleanup_branch will do nothing. */
7090 if (dsc->u.branch.cond)
7092 dsc->u.branch.cond = INST_AL;
7093 dsc->u.branch.dest = from + 4 + imm5;
7095 else
7096 dsc->u.branch.dest = from + 2;
7098 dsc->u.branch.link = 0;
7099 dsc->u.branch.exchange = 0;
7101 if (debug_displaced)
7102 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
7103 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
7104 rn, rn_val, insn1, dsc->u.branch.dest);
7106 dsc->modinsn[0] = THUMB_NOP;
7108 dsc->cleanup = &cleanup_branch;
7109 return 0;
7112 /* Copy Table Branch Byte/Halfword */
7113 static int
7114 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
7115 uint16_t insn2, struct regcache *regs,
7116 struct displaced_step_closure *dsc)
7118 ULONGEST rn_val, rm_val;
7119 int is_tbh = bit (insn2, 4);
7120 CORE_ADDR halfwords = 0;
7121 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7123 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
7124 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
7126 if (is_tbh)
7128 gdb_byte buf[2];
7130 target_read_memory (rn_val + 2 * rm_val, buf, 2);
7131 halfwords = extract_unsigned_integer (buf, 2, byte_order);
7133 else
7135 gdb_byte buf[1];
7137 target_read_memory (rn_val + rm_val, buf, 1);
7138 halfwords = extract_unsigned_integer (buf, 1, byte_order);
7141 if (debug_displaced)
7142 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
7143 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
7144 (unsigned int) rn_val, (unsigned int) rm_val,
7145 (unsigned int) halfwords);
7147 dsc->u.branch.cond = INST_AL;
7148 dsc->u.branch.link = 0;
7149 dsc->u.branch.exchange = 0;
7150 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
7152 dsc->cleanup = &cleanup_branch;
7154 return 0;
7157 static void
7158 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
7159 struct displaced_step_closure *dsc)
7161 /* PC <- r7 */
7162 int val = displaced_read_reg (regs, dsc, 7);
7163 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
7165 /* r7 <- r8 */
7166 val = displaced_read_reg (regs, dsc, 8);
7167 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
7169 /* r8 <- tmp[0] */
7170 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
7174 static int
7175 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, uint16_t insn1,
7176 struct regcache *regs,
7177 struct displaced_step_closure *dsc)
7179 dsc->u.block.regmask = insn1 & 0x00ff;
7181 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7182 to :
7184 (1) register list is full, that is, r0-r7 are used.
7185 Prepare: tmp[0] <- r8
7187 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7188 MOV r8, r7; Move value of r7 to r8;
7189 POP {r7}; Store PC value into r7.
7191 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7193 (2) register list is not full, supposing there are N registers in
7194 register list (except PC, 0 <= N <= 7).
7195 Prepare: for each i, 0 - N, tmp[i] <- ri.
7197 POP {r0, r1, ...., rN};
7199 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7200 from tmp[] properly.
7202 if (debug_displaced)
7203 fprintf_unfiltered (gdb_stdlog,
7204 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7205 dsc->u.block.regmask, insn1);
7207 if (dsc->u.block.regmask == 0xff)
7209 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
7211 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
7212 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
7213 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
7215 dsc->numinsns = 3;
7216 dsc->cleanup = &cleanup_pop_pc_16bit_all;
7218 else
7220 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
7221 unsigned int i;
7222 unsigned int new_regmask;
7224 for (i = 0; i < num_in_list + 1; i++)
7225 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7227 new_regmask = (1 << (num_in_list + 1)) - 1;
7229 if (debug_displaced)
7230 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
7231 "{..., pc}: original reg list %.4x,"
7232 " modified list %.4x\n"),
7233 (int) dsc->u.block.regmask, new_regmask);
7235 dsc->u.block.regmask |= 0x8000;
7236 dsc->u.block.writeback = 0;
7237 dsc->u.block.cond = INST_AL;
7239 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
7241 dsc->cleanup = &cleanup_block_load_pc;
7244 return 0;
7247 static void
7248 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7249 struct regcache *regs,
7250 struct displaced_step_closure *dsc)
7252 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
7253 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
7254 int err = 0;
7256 /* 16-bit thumb instructions. */
7257 switch (op_bit_12_15)
7259 /* Shift (imme), add, subtract, move and compare. */
7260 case 0: case 1: case 2: case 3:
7261 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7262 "shift/add/sub/mov/cmp",
7263 dsc);
7264 break;
7265 case 4:
7266 switch (op_bit_10_11)
7268 case 0: /* Data-processing */
7269 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
7270 "data-processing",
7271 dsc);
7272 break;
7273 case 1: /* Special data instructions and branch and exchange. */
7275 unsigned short op = bits (insn1, 7, 9);
7276 if (op == 6 || op == 7) /* BX or BLX */
7277 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
7278 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7279 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
7280 else
7281 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
7282 dsc);
7284 break;
7285 default: /* LDR (literal) */
7286 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
7288 break;
7289 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7290 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
7291 break;
7292 case 10:
7293 if (op_bit_10_11 < 2) /* Generate PC-relative address */
7294 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
7295 else /* Generate SP-relative address */
7296 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
7297 break;
7298 case 11: /* Misc 16-bit instructions */
7300 switch (bits (insn1, 8, 11))
7302 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7303 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
7304 break;
7305 case 12: case 13: /* POP */
7306 if (bit (insn1, 8)) /* PC is in register list. */
7307 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
7308 else
7309 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
7310 break;
7311 case 15: /* If-Then, and hints */
7312 if (bits (insn1, 0, 3))
7313 /* If-Then makes up to four following instructions conditional.
7314 IT instruction itself is not conditional, so handle it as a
7315 common unmodified instruction. */
7316 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
7317 dsc);
7318 else
7319 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
7320 break;
7321 default:
7322 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
7325 break;
7326 case 12:
7327 if (op_bit_10_11 < 2) /* Store multiple registers */
7328 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
7329 else /* Load multiple registers */
7330 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
7331 break;
7332 case 13: /* Conditional branch and supervisor call */
7333 if (bits (insn1, 9, 11) != 7) /* conditional branch */
7334 err = thumb_copy_b (gdbarch, insn1, dsc);
7335 else
7336 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
7337 break;
7338 case 14: /* Unconditional branch */
7339 err = thumb_copy_b (gdbarch, insn1, dsc);
7340 break;
7341 default:
7342 err = 1;
7345 if (err)
7346 internal_error (__FILE__, __LINE__,
7347 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7350 static int
7351 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
7352 uint16_t insn1, uint16_t insn2,
7353 struct regcache *regs,
7354 struct displaced_step_closure *dsc)
7356 int rt = bits (insn2, 12, 15);
7357 int rn = bits (insn1, 0, 3);
7358 int op1 = bits (insn1, 7, 8);
7360 switch (bits (insn1, 5, 6))
7362 case 0: /* Load byte and memory hints */
7363 if (rt == 0xf) /* PLD/PLI */
7365 if (rn == 0xf)
7366 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7367 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
7368 else
7369 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7370 "pli/pld", dsc);
7372 else
7374 if (rn == 0xf) /* LDRB/LDRSB (literal) */
7375 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7377 else
7378 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7379 "ldrb{reg, immediate}/ldrbt",
7380 dsc);
7383 break;
7384 case 1: /* Load halfword and memory hints. */
7385 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
7386 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7387 "pld/unalloc memhint", dsc);
7388 else
7390 if (rn == 0xf)
7391 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
7393 else
7394 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7395 "ldrh/ldrht", dsc);
7397 break;
7398 case 2: /* Load word */
7400 int insn2_bit_8_11 = bits (insn2, 8, 11);
7402 if (rn == 0xf)
7403 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
7404 else if (op1 == 0x1) /* Encoding T3 */
7405 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
7406 0, 1);
7407 else /* op1 == 0x0 */
7409 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
7410 /* LDR (immediate) */
7411 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7412 dsc, bit (insn2, 8), 1);
7413 else if (insn2_bit_8_11 == 0xe) /* LDRT */
7414 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7415 "ldrt", dsc);
7416 else
7417 /* LDR (register) */
7418 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
7419 dsc, 0, 0);
7421 break;
7423 default:
7424 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7425 break;
7427 return 0;
7430 static void
7431 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
7432 uint16_t insn2, struct regcache *regs,
7433 struct displaced_step_closure *dsc)
7435 int err = 0;
7436 unsigned short op = bit (insn2, 15);
7437 unsigned int op1 = bits (insn1, 11, 12);
7439 switch (op1)
7441 case 1:
7443 switch (bits (insn1, 9, 10))
7445 case 0:
7446 if (bit (insn1, 6))
7448 /* Load/store {dual, execlusive}, table branch. */
7449 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
7450 && bits (insn2, 5, 7) == 0)
7451 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
7452 dsc);
7453 else
7454 /* PC is not allowed to use in load/store {dual, exclusive}
7455 instructions. */
7456 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7457 "load/store dual/ex", dsc);
7459 else /* load/store multiple */
7461 switch (bits (insn1, 7, 8))
7463 case 0: case 3: /* SRS, RFE */
7464 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7465 "srs/rfe", dsc);
7466 break;
7467 case 1: case 2: /* LDM/STM/PUSH/POP */
7468 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
7469 break;
7472 break;
7474 case 1:
7475 /* Data-processing (shift register). */
7476 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
7477 dsc);
7478 break;
7479 default: /* Coprocessor instructions. */
7480 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7481 break;
7483 break;
7485 case 2: /* op1 = 2 */
7486 if (op) /* Branch and misc control. */
7488 if (bit (insn2, 14) /* BLX/BL */
7489 || bit (insn2, 12) /* Unconditional branch */
7490 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
7491 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
7492 else
7493 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7494 "misc ctrl", dsc);
7496 else
7498 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
7500 int op = bits (insn1, 4, 8);
7501 int rn = bits (insn1, 0, 3);
7502 if ((op == 0 || op == 0xa) && rn == 0xf)
7503 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
7504 regs, dsc);
7505 else
7506 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7507 "dp/pb", dsc);
7509 else /* Data processing (modified immeidate) */
7510 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7511 "dp/mi", dsc);
7513 break;
7514 case 3: /* op1 = 3 */
7515 switch (bits (insn1, 9, 10))
7517 case 0:
7518 if (bit (insn1, 4))
7519 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
7520 regs, dsc);
7521 else /* NEON Load/Store and Store single data item */
7522 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7523 "neon elt/struct load/store",
7524 dsc);
7525 break;
7526 case 1: /* op1 = 3, bits (9, 10) == 1 */
7527 switch (bits (insn1, 7, 8))
7529 case 0: case 1: /* Data processing (register) */
7530 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7531 "dp(reg)", dsc);
7532 break;
7533 case 2: /* Multiply and absolute difference */
7534 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7535 "mul/mua/diff", dsc);
7536 break;
7537 case 3: /* Long multiply and divide */
7538 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7539 "lmul/lmua", dsc);
7540 break;
7542 break;
7543 default: /* Coprocessor instructions */
7544 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
7545 break;
7547 break;
7548 default:
7549 err = 1;
7552 if (err)
7553 internal_error (__FILE__, __LINE__,
7554 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7558 static void
7559 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7560 struct regcache *regs,
7561 struct displaced_step_closure *dsc)
7563 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7564 uint16_t insn1
7565 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
7567 if (debug_displaced)
7568 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
7569 "at %.8lx\n", insn1, (unsigned long) from);
7571 dsc->is_thumb = 1;
7572 dsc->insn_size = thumb_insn_size (insn1);
7573 if (thumb_insn_size (insn1) == 4)
7575 uint16_t insn2
7576 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
7577 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
7579 else
7580 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
7583 void
7584 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
7585 CORE_ADDR to, struct regcache *regs,
7586 struct displaced_step_closure *dsc)
7588 int err = 0;
7589 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7590 uint32_t insn;
7592 /* Most displaced instructions use a 1-instruction scratch space, so set this
7593 here and override below if/when necessary. */
7594 dsc->numinsns = 1;
7595 dsc->insn_addr = from;
7596 dsc->scratch_base = to;
7597 dsc->cleanup = NULL;
7598 dsc->wrote_to_pc = 0;
7600 if (!displaced_in_arm_mode (regs))
7601 return thumb_process_displaced_insn (gdbarch, from, regs, dsc);
7603 dsc->is_thumb = 0;
7604 dsc->insn_size = 4;
7605 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
7606 if (debug_displaced)
7607 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
7608 "at %.8lx\n", (unsigned long) insn,
7609 (unsigned long) from);
7611 if ((insn & 0xf0000000) == 0xf0000000)
7612 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
7613 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
7615 case 0x0: case 0x1: case 0x2: case 0x3:
7616 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
7617 break;
7619 case 0x4: case 0x5: case 0x6:
7620 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
7621 break;
7623 case 0x7:
7624 err = arm_decode_media (gdbarch, insn, dsc);
7625 break;
7627 case 0x8: case 0x9: case 0xa: case 0xb:
7628 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
7629 break;
7631 case 0xc: case 0xd: case 0xe: case 0xf:
7632 err = arm_decode_svc_copro (gdbarch, insn, regs, dsc);
7633 break;
7636 if (err)
7637 internal_error (__FILE__, __LINE__,
7638 _("arm_process_displaced_insn: Instruction decode error"));
7641 /* Actually set up the scratch space for a displaced instruction. */
7643 void
7644 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
7645 CORE_ADDR to, struct displaced_step_closure *dsc)
7647 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7648 unsigned int i, len, offset;
7649 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7650 int size = dsc->is_thumb? 2 : 4;
7651 const gdb_byte *bkp_insn;
7653 offset = 0;
7654 /* Poke modified instruction(s). */
7655 for (i = 0; i < dsc->numinsns; i++)
7657 if (debug_displaced)
7659 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
7660 if (size == 4)
7661 fprintf_unfiltered (gdb_stdlog, "%.8lx",
7662 dsc->modinsn[i]);
7663 else if (size == 2)
7664 fprintf_unfiltered (gdb_stdlog, "%.4x",
7665 (unsigned short)dsc->modinsn[i]);
7667 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
7668 (unsigned long) to + offset);
7671 write_memory_unsigned_integer (to + offset, size,
7672 byte_order_for_code,
7673 dsc->modinsn[i]);
7674 offset += size;
7677 /* Choose the correct breakpoint instruction. */
7678 if (dsc->is_thumb)
7680 bkp_insn = tdep->thumb_breakpoint;
7681 len = tdep->thumb_breakpoint_size;
7683 else
7685 bkp_insn = tdep->arm_breakpoint;
7686 len = tdep->arm_breakpoint_size;
7689 /* Put breakpoint afterwards. */
7690 write_memory (to + offset, bkp_insn, len);
7692 if (debug_displaced)
7693 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
7694 paddress (gdbarch, from), paddress (gdbarch, to));
7697 /* Entry point for cleaning things up after a displaced instruction has been
7698 single-stepped. */
7700 void
7701 arm_displaced_step_fixup (struct gdbarch *gdbarch,
7702 struct displaced_step_closure *dsc,
7703 CORE_ADDR from, CORE_ADDR to,
7704 struct regcache *regs)
7706 if (dsc->cleanup)
7707 dsc->cleanup (gdbarch, regs, dsc);
7709 if (!dsc->wrote_to_pc)
7710 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
7711 dsc->insn_addr + dsc->insn_size);
7715 #include "bfd-in2.h"
7716 #include "libcoff.h"
7718 static int
7719 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
7721 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
7723 if (arm_pc_is_thumb (gdbarch, memaddr))
7725 static asymbol *asym;
7726 static combined_entry_type ce;
7727 static struct coff_symbol_struct csym;
7728 static struct bfd fake_bfd;
7729 static bfd_target fake_target;
7731 if (csym.native == NULL)
7733 /* Create a fake symbol vector containing a Thumb symbol.
7734 This is solely so that the code in print_insn_little_arm()
7735 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7736 the presence of a Thumb symbol and switch to decoding
7737 Thumb instructions. */
7739 fake_target.flavour = bfd_target_coff_flavour;
7740 fake_bfd.xvec = &fake_target;
7741 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
7742 csym.native = &ce;
7743 csym.symbol.the_bfd = &fake_bfd;
7744 csym.symbol.name = "fake";
7745 asym = (asymbol *) & csym;
7748 memaddr = UNMAKE_THUMB_ADDR (memaddr);
7749 info->symbols = &asym;
7751 else
7752 info->symbols = NULL;
7754 if (info->endian == BFD_ENDIAN_BIG)
7755 return print_insn_big_arm (memaddr, info);
7756 else
7757 return print_insn_little_arm (memaddr, info);
7760 /* The following define instruction sequences that will cause ARM
7761 cpu's to take an undefined instruction trap. These are used to
7762 signal a breakpoint to GDB.
7764 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7765 modes. A different instruction is required for each mode. The ARM
7766 cpu's can also be big or little endian. Thus four different
7767 instructions are needed to support all cases.
7769 Note: ARMv4 defines several new instructions that will take the
7770 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7771 not in fact add the new instructions. The new undefined
7772 instructions in ARMv4 are all instructions that had no defined
7773 behaviour in earlier chips. There is no guarantee that they will
7774 raise an exception, but may be treated as NOP's. In practice, it
7775 may only safe to rely on instructions matching:
7777 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7778 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7779 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7781 Even this may only true if the condition predicate is true. The
7782 following use a condition predicate of ALWAYS so it is always TRUE.
7784 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7785 and NetBSD all use a software interrupt rather than an undefined
7786 instruction to force a trap. This can be handled by by the
7787 abi-specific code during establishment of the gdbarch vector. */
7789 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7790 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7791 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7792 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7794 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
7795 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
7796 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
7797 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
7799 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
7800 the program counter value to determine whether a 16-bit or 32-bit
7801 breakpoint should be used. It returns a pointer to a string of
7802 bytes that encode a breakpoint instruction, stores the length of
7803 the string to *lenptr, and adjusts the program counter (if
7804 necessary) to point to the actual memory location where the
7805 breakpoint should be inserted. */
7807 static const unsigned char *
7808 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
7810 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
7811 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
7813 if (arm_pc_is_thumb (gdbarch, *pcptr))
7815 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
7817 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7818 check whether we are replacing a 32-bit instruction. */
7819 if (tdep->thumb2_breakpoint != NULL)
7821 gdb_byte buf[2];
7822 if (target_read_memory (*pcptr, buf, 2) == 0)
7824 unsigned short inst1;
7825 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
7826 if (thumb_insn_size (inst1) == 4)
7828 *lenptr = tdep->thumb2_breakpoint_size;
7829 return tdep->thumb2_breakpoint;
7834 *lenptr = tdep->thumb_breakpoint_size;
7835 return tdep->thumb_breakpoint;
7837 else
7839 *lenptr = tdep->arm_breakpoint_size;
7840 return tdep->arm_breakpoint;
7844 static void
7845 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
7846 int *kindptr)
7848 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
7850 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
7851 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
7852 that this is not confused with a 32-bit ARM breakpoint. */
7853 *kindptr = 3;
7856 /* Extract from an array REGBUF containing the (raw) register state a
7857 function return value of type TYPE, and copy that, in virtual
7858 format, into VALBUF. */
7860 static void
7861 arm_extract_return_value (struct type *type, struct regcache *regs,
7862 gdb_byte *valbuf)
7864 struct gdbarch *gdbarch = get_regcache_arch (regs);
7865 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7867 if (TYPE_CODE_FLT == TYPE_CODE (type))
7869 switch (gdbarch_tdep (gdbarch)->fp_model)
7871 case ARM_FLOAT_FPA:
7873 /* The value is in register F0 in internal format. We need to
7874 extract the raw value and then convert it to the desired
7875 internal type. */
7876 bfd_byte tmpbuf[FP_REGISTER_SIZE];
7878 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
7879 convert_from_extended (floatformat_from_type (type), tmpbuf,
7880 valbuf, gdbarch_byte_order (gdbarch));
7882 break;
7884 case ARM_FLOAT_SOFT_FPA:
7885 case ARM_FLOAT_SOFT_VFP:
7886 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7887 not using the VFP ABI code. */
7888 case ARM_FLOAT_VFP:
7889 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
7890 if (TYPE_LENGTH (type) > 4)
7891 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
7892 valbuf + INT_REGISTER_SIZE);
7893 break;
7895 default:
7896 internal_error (__FILE__, __LINE__,
7897 _("arm_extract_return_value: "
7898 "Floating point model not supported"));
7899 break;
7902 else if (TYPE_CODE (type) == TYPE_CODE_INT
7903 || TYPE_CODE (type) == TYPE_CODE_CHAR
7904 || TYPE_CODE (type) == TYPE_CODE_BOOL
7905 || TYPE_CODE (type) == TYPE_CODE_PTR
7906 || TYPE_CODE (type) == TYPE_CODE_REF
7907 || TYPE_CODE (type) == TYPE_CODE_ENUM)
7909 /* If the type is a plain integer, then the access is
7910 straight-forward. Otherwise we have to play around a bit
7911 more. */
7912 int len = TYPE_LENGTH (type);
7913 int regno = ARM_A1_REGNUM;
7914 ULONGEST tmp;
7916 while (len > 0)
7918 /* By using store_unsigned_integer we avoid having to do
7919 anything special for small big-endian values. */
7920 regcache_cooked_read_unsigned (regs, regno++, &tmp);
7921 store_unsigned_integer (valbuf,
7922 (len > INT_REGISTER_SIZE
7923 ? INT_REGISTER_SIZE : len),
7924 byte_order, tmp);
7925 len -= INT_REGISTER_SIZE;
7926 valbuf += INT_REGISTER_SIZE;
7929 else
7931 /* For a structure or union the behaviour is as if the value had
7932 been stored to word-aligned memory and then loaded into
7933 registers with 32-bit load instruction(s). */
7934 int len = TYPE_LENGTH (type);
7935 int regno = ARM_A1_REGNUM;
7936 bfd_byte tmpbuf[INT_REGISTER_SIZE];
7938 while (len > 0)
7940 regcache_cooked_read (regs, regno++, tmpbuf);
7941 memcpy (valbuf, tmpbuf,
7942 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
7943 len -= INT_REGISTER_SIZE;
7944 valbuf += INT_REGISTER_SIZE;
7950 /* Will a function return an aggregate type in memory or in a
7951 register? Return 0 if an aggregate type can be returned in a
7952 register, 1 if it must be returned in memory. */
7954 static int
7955 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
7957 enum type_code code;
7959 type = check_typedef (type);
7961 /* Simple, non-aggregate types (ie not including vectors and
7962 complex) are always returned in a register (or registers). */
7963 code = TYPE_CODE (type);
7964 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
7965 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
7966 return 0;
7968 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
7970 /* Vector values should be returned using ARM registers if they
7971 are not over 16 bytes. */
7972 return (TYPE_LENGTH (type) > 16);
7975 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
7977 /* The AAPCS says all aggregates not larger than a word are returned
7978 in a register. */
7979 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
7980 return 0;
7982 return 1;
7984 else
7986 int nRc;
7988 /* All aggregate types that won't fit in a register must be returned
7989 in memory. */
7990 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
7991 return 1;
7993 /* In the ARM ABI, "integer" like aggregate types are returned in
7994 registers. For an aggregate type to be integer like, its size
7995 must be less than or equal to INT_REGISTER_SIZE and the
7996 offset of each addressable subfield must be zero. Note that bit
7997 fields are not addressable, and all addressable subfields of
7998 unions always start at offset zero.
8000 This function is based on the behaviour of GCC 2.95.1.
8001 See: gcc/arm.c: arm_return_in_memory() for details.
8003 Note: All versions of GCC before GCC 2.95.2 do not set up the
8004 parameters correctly for a function returning the following
8005 structure: struct { float f;}; This should be returned in memory,
8006 not a register. Richard Earnshaw sent me a patch, but I do not
8007 know of any way to detect if a function like the above has been
8008 compiled with the correct calling convention. */
8010 /* Assume all other aggregate types can be returned in a register.
8011 Run a check for structures, unions and arrays. */
8012 nRc = 0;
8014 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
8016 int i;
8017 /* Need to check if this struct/union is "integer" like. For
8018 this to be true, its size must be less than or equal to
8019 INT_REGISTER_SIZE and the offset of each addressable
8020 subfield must be zero. Note that bit fields are not
8021 addressable, and unions always start at offset zero. If any
8022 of the subfields is a floating point type, the struct/union
8023 cannot be an integer type. */
8025 /* For each field in the object, check:
8026 1) Is it FP? --> yes, nRc = 1;
8027 2) Is it addressable (bitpos != 0) and
8028 not packed (bitsize == 0)?
8029 --> yes, nRc = 1
8032 for (i = 0; i < TYPE_NFIELDS (type); i++)
8034 enum type_code field_type_code;
8036 field_type_code
8037 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
8038 i)));
8040 /* Is it a floating point type field? */
8041 if (field_type_code == TYPE_CODE_FLT)
8043 nRc = 1;
8044 break;
8047 /* If bitpos != 0, then we have to care about it. */
8048 if (TYPE_FIELD_BITPOS (type, i) != 0)
8050 /* Bitfields are not addressable. If the field bitsize is
8051 zero, then the field is not packed. Hence it cannot be
8052 a bitfield or any other packed type. */
8053 if (TYPE_FIELD_BITSIZE (type, i) == 0)
8055 nRc = 1;
8056 break;
8062 return nRc;
8066 /* Write into appropriate registers a function return value of type
8067 TYPE, given in virtual format. */
8069 static void
8070 arm_store_return_value (struct type *type, struct regcache *regs,
8071 const gdb_byte *valbuf)
8073 struct gdbarch *gdbarch = get_regcache_arch (regs);
8074 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8076 if (TYPE_CODE (type) == TYPE_CODE_FLT)
8078 gdb_byte buf[MAX_REGISTER_SIZE];
8080 switch (gdbarch_tdep (gdbarch)->fp_model)
8082 case ARM_FLOAT_FPA:
8084 convert_to_extended (floatformat_from_type (type), buf, valbuf,
8085 gdbarch_byte_order (gdbarch));
8086 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
8087 break;
8089 case ARM_FLOAT_SOFT_FPA:
8090 case ARM_FLOAT_SOFT_VFP:
8091 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8092 not using the VFP ABI code. */
8093 case ARM_FLOAT_VFP:
8094 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
8095 if (TYPE_LENGTH (type) > 4)
8096 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
8097 valbuf + INT_REGISTER_SIZE);
8098 break;
8100 default:
8101 internal_error (__FILE__, __LINE__,
8102 _("arm_store_return_value: Floating "
8103 "point model not supported"));
8104 break;
8107 else if (TYPE_CODE (type) == TYPE_CODE_INT
8108 || TYPE_CODE (type) == TYPE_CODE_CHAR
8109 || TYPE_CODE (type) == TYPE_CODE_BOOL
8110 || TYPE_CODE (type) == TYPE_CODE_PTR
8111 || TYPE_CODE (type) == TYPE_CODE_REF
8112 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8114 if (TYPE_LENGTH (type) <= 4)
8116 /* Values of one word or less are zero/sign-extended and
8117 returned in r0. */
8118 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8119 LONGEST val = unpack_long (type, valbuf);
8121 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
8122 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
8124 else
8126 /* Integral values greater than one word are stored in consecutive
8127 registers starting with r0. This will always be a multiple of
8128 the regiser size. */
8129 int len = TYPE_LENGTH (type);
8130 int regno = ARM_A1_REGNUM;
8132 while (len > 0)
8134 regcache_cooked_write (regs, regno++, valbuf);
8135 len -= INT_REGISTER_SIZE;
8136 valbuf += INT_REGISTER_SIZE;
8140 else
8142 /* For a structure or union the behaviour is as if the value had
8143 been stored to word-aligned memory and then loaded into
8144 registers with 32-bit load instruction(s). */
8145 int len = TYPE_LENGTH (type);
8146 int regno = ARM_A1_REGNUM;
8147 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8149 while (len > 0)
8151 memcpy (tmpbuf, valbuf,
8152 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8153 regcache_cooked_write (regs, regno++, tmpbuf);
8154 len -= INT_REGISTER_SIZE;
8155 valbuf += INT_REGISTER_SIZE;
8161 /* Handle function return values. */
8163 static enum return_value_convention
8164 arm_return_value (struct gdbarch *gdbarch, struct value *function,
8165 struct type *valtype, struct regcache *regcache,
8166 gdb_byte *readbuf, const gdb_byte *writebuf)
8168 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8169 struct type *func_type = function ? value_type (function) : NULL;
8170 enum arm_vfp_cprc_base_type vfp_base_type;
8171 int vfp_base_count;
8173 if (arm_vfp_abi_for_function (gdbarch, func_type)
8174 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
8176 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
8177 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
8178 int i;
8179 for (i = 0; i < vfp_base_count; i++)
8181 if (reg_char == 'q')
8183 if (writebuf)
8184 arm_neon_quad_write (gdbarch, regcache, i,
8185 writebuf + i * unit_length);
8187 if (readbuf)
8188 arm_neon_quad_read (gdbarch, regcache, i,
8189 readbuf + i * unit_length);
8191 else
8193 char name_buf[4];
8194 int regnum;
8196 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
8197 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8198 strlen (name_buf));
8199 if (writebuf)
8200 regcache_cooked_write (regcache, regnum,
8201 writebuf + i * unit_length);
8202 if (readbuf)
8203 regcache_cooked_read (regcache, regnum,
8204 readbuf + i * unit_length);
8207 return RETURN_VALUE_REGISTER_CONVENTION;
8210 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
8211 || TYPE_CODE (valtype) == TYPE_CODE_UNION
8212 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
8214 if (tdep->struct_return == pcc_struct_return
8215 || arm_return_in_memory (gdbarch, valtype))
8216 return RETURN_VALUE_STRUCT_CONVENTION;
8218 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
8220 if (arm_return_in_memory (gdbarch, valtype))
8221 return RETURN_VALUE_STRUCT_CONVENTION;
8224 if (writebuf)
8225 arm_store_return_value (valtype, regcache, writebuf);
8227 if (readbuf)
8228 arm_extract_return_value (valtype, regcache, readbuf);
8230 return RETURN_VALUE_REGISTER_CONVENTION;
8234 static int
8235 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
8237 struct gdbarch *gdbarch = get_frame_arch (frame);
8238 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8239 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8240 CORE_ADDR jb_addr;
8241 gdb_byte buf[INT_REGISTER_SIZE];
8243 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
8245 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
8246 INT_REGISTER_SIZE))
8247 return 0;
8249 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
8250 return 1;
8253 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8254 return the target PC. Otherwise return 0. */
8256 CORE_ADDR
8257 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
8259 const char *name;
8260 int namelen;
8261 CORE_ADDR start_addr;
8263 /* Find the starting address and name of the function containing the PC. */
8264 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
8266 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8267 check here. */
8268 start_addr = arm_skip_bx_reg (frame, pc);
8269 if (start_addr != 0)
8270 return start_addr;
8272 return 0;
8275 /* If PC is in a Thumb call or return stub, return the address of the
8276 target PC, which is in a register. The thunk functions are called
8277 _call_via_xx, where x is the register name. The possible names
8278 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8279 functions, named __ARM_call_via_r[0-7]. */
8280 if (startswith (name, "_call_via_")
8281 || startswith (name, "__ARM_call_via_"))
8283 /* Use the name suffix to determine which register contains the
8284 target PC. */
8285 static char *table[15] =
8286 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8287 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8289 int regno;
8290 int offset = strlen (name) - 2;
8292 for (regno = 0; regno <= 14; regno++)
8293 if (strcmp (&name[offset], table[regno]) == 0)
8294 return get_frame_register_unsigned (frame, regno);
8297 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8298 non-interworking calls to foo. We could decode the stubs
8299 to find the target but it's easier to use the symbol table. */
8300 namelen = strlen (name);
8301 if (name[0] == '_' && name[1] == '_'
8302 && ((namelen > 2 + strlen ("_from_thumb")
8303 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
8304 || (namelen > 2 + strlen ("_from_arm")
8305 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
8307 char *target_name;
8308 int target_len = namelen - 2;
8309 struct bound_minimal_symbol minsym;
8310 struct objfile *objfile;
8311 struct obj_section *sec;
8313 if (name[namelen - 1] == 'b')
8314 target_len -= strlen ("_from_thumb");
8315 else
8316 target_len -= strlen ("_from_arm");
8318 target_name = (char *) alloca (target_len + 1);
8319 memcpy (target_name, name + 2, target_len);
8320 target_name[target_len] = '\0';
8322 sec = find_pc_section (pc);
8323 objfile = (sec == NULL) ? NULL : sec->objfile;
8324 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
8325 if (minsym.minsym != NULL)
8326 return BMSYMBOL_VALUE_ADDRESS (minsym);
8327 else
8328 return 0;
8331 return 0; /* not a stub */
8334 static void
8335 set_arm_command (char *args, int from_tty)
8337 printf_unfiltered (_("\
8338 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8339 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
8342 static void
8343 show_arm_command (char *args, int from_tty)
8345 cmd_show_list (showarmcmdlist, from_tty, "");
8348 static void
8349 arm_update_current_architecture (void)
8351 struct gdbarch_info info;
8353 /* If the current architecture is not ARM, we have nothing to do. */
8354 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
8355 return;
8357 /* Update the architecture. */
8358 gdbarch_info_init (&info);
8360 if (!gdbarch_update_p (info))
8361 internal_error (__FILE__, __LINE__, _("could not update architecture"));
8364 static void
8365 set_fp_model_sfunc (char *args, int from_tty,
8366 struct cmd_list_element *c)
8368 int fp_model;
8370 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
8371 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
8373 arm_fp_model = (enum arm_float_model) fp_model;
8374 break;
8377 if (fp_model == ARM_FLOAT_LAST)
8378 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
8379 current_fp_model);
8381 arm_update_current_architecture ();
8384 static void
8385 show_fp_model (struct ui_file *file, int from_tty,
8386 struct cmd_list_element *c, const char *value)
8388 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8390 if (arm_fp_model == ARM_FLOAT_AUTO
8391 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8392 fprintf_filtered (file, _("\
8393 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8394 fp_model_strings[tdep->fp_model]);
8395 else
8396 fprintf_filtered (file, _("\
8397 The current ARM floating point model is \"%s\".\n"),
8398 fp_model_strings[arm_fp_model]);
8401 static void
8402 arm_set_abi (char *args, int from_tty,
8403 struct cmd_list_element *c)
8405 int arm_abi;
8407 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
8408 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
8410 arm_abi_global = (enum arm_abi_kind) arm_abi;
8411 break;
8414 if (arm_abi == ARM_ABI_LAST)
8415 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
8416 arm_abi_string);
8418 arm_update_current_architecture ();
8421 static void
8422 arm_show_abi (struct ui_file *file, int from_tty,
8423 struct cmd_list_element *c, const char *value)
8425 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
8427 if (arm_abi_global == ARM_ABI_AUTO
8428 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
8429 fprintf_filtered (file, _("\
8430 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8431 arm_abi_strings[tdep->arm_abi]);
8432 else
8433 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
8434 arm_abi_string);
8437 static void
8438 arm_show_fallback_mode (struct ui_file *file, int from_tty,
8439 struct cmd_list_element *c, const char *value)
8441 fprintf_filtered (file,
8442 _("The current execution mode assumed "
8443 "(when symbols are unavailable) is \"%s\".\n"),
8444 arm_fallback_mode_string);
8447 static void
8448 arm_show_force_mode (struct ui_file *file, int from_tty,
8449 struct cmd_list_element *c, const char *value)
8451 fprintf_filtered (file,
8452 _("The current execution mode assumed "
8453 "(even when symbols are available) is \"%s\".\n"),
8454 arm_force_mode_string);
8457 /* If the user changes the register disassembly style used for info
8458 register and other commands, we have to also switch the style used
8459 in opcodes for disassembly output. This function is run in the "set
8460 arm disassembly" command, and does that. */
8462 static void
8463 set_disassembly_style_sfunc (char *args, int from_tty,
8464 struct cmd_list_element *c)
8466 set_disassembly_style ();
8469 /* Return the ARM register name corresponding to register I. */
8470 static const char *
8471 arm_register_name (struct gdbarch *gdbarch, int i)
8473 const int num_regs = gdbarch_num_regs (gdbarch);
8475 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
8476 && i >= num_regs && i < num_regs + 32)
8478 static const char *const vfp_pseudo_names[] = {
8479 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8480 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8481 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8482 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8485 return vfp_pseudo_names[i - num_regs];
8488 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
8489 && i >= num_regs + 32 && i < num_regs + 32 + 16)
8491 static const char *const neon_pseudo_names[] = {
8492 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8493 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8496 return neon_pseudo_names[i - num_regs - 32];
8499 if (i >= ARRAY_SIZE (arm_register_names))
8500 /* These registers are only supported on targets which supply
8501 an XML description. */
8502 return "";
8504 return arm_register_names[i];
8507 static void
8508 set_disassembly_style (void)
8510 int current;
8512 /* Find the style that the user wants. */
8513 for (current = 0; current < num_disassembly_options; current++)
8514 if (disassembly_style == valid_disassembly_styles[current])
8515 break;
8516 gdb_assert (current < num_disassembly_options);
8518 /* Synchronize the disassembler. */
8519 set_arm_regname_option (current);
8522 /* Test whether the coff symbol specific value corresponds to a Thumb
8523 function. */
8525 static int
8526 coff_sym_is_thumb (int val)
8528 return (val == C_THUMBEXT
8529 || val == C_THUMBSTAT
8530 || val == C_THUMBEXTFUNC
8531 || val == C_THUMBSTATFUNC
8532 || val == C_THUMBLABEL);
8535 /* arm_coff_make_msymbol_special()
8536 arm_elf_make_msymbol_special()
8538 These functions test whether the COFF or ELF symbol corresponds to
8539 an address in thumb code, and set a "special" bit in a minimal
8540 symbol to indicate that it does. */
8542 static void
8543 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
8545 elf_symbol_type *elfsym = (elf_symbol_type *) sym;
8547 if (ARM_GET_SYM_BRANCH_TYPE (elfsym->internal_elf_sym.st_target_internal)
8548 == ST_BRANCH_TO_THUMB)
8549 MSYMBOL_SET_SPECIAL (msym);
8552 static void
8553 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
8555 if (coff_sym_is_thumb (val))
8556 MSYMBOL_SET_SPECIAL (msym);
8559 static void
8560 arm_objfile_data_free (struct objfile *objfile, void *arg)
8562 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
8563 unsigned int i;
8565 for (i = 0; i < objfile->obfd->section_count; i++)
8566 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
8569 static void
8570 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
8571 asymbol *sym)
8573 const char *name = bfd_asymbol_name (sym);
8574 struct arm_per_objfile *data;
8575 VEC(arm_mapping_symbol_s) **map_p;
8576 struct arm_mapping_symbol new_map_sym;
8578 gdb_assert (name[0] == '$');
8579 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
8580 return;
8582 data = (struct arm_per_objfile *) objfile_data (objfile,
8583 arm_objfile_data_key);
8584 if (data == NULL)
8586 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
8587 struct arm_per_objfile);
8588 set_objfile_data (objfile, arm_objfile_data_key, data);
8589 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
8590 objfile->obfd->section_count,
8591 VEC(arm_mapping_symbol_s) *);
8593 map_p = &data->section_maps[bfd_get_section (sym)->index];
8595 new_map_sym.value = sym->value;
8596 new_map_sym.type = name[1];
8598 /* Assume that most mapping symbols appear in order of increasing
8599 value. If they were randomly distributed, it would be faster to
8600 always push here and then sort at first use. */
8601 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
8603 struct arm_mapping_symbol *prev_map_sym;
8605 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
8606 if (prev_map_sym->value >= sym->value)
8608 unsigned int idx;
8609 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
8610 arm_compare_mapping_symbols);
8611 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
8612 return;
8616 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
8619 static void
8620 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
8622 struct gdbarch *gdbarch = get_regcache_arch (regcache);
8623 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
8625 /* If necessary, set the T bit. */
8626 if (arm_apcs_32)
8628 ULONGEST val, t_bit;
8629 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
8630 t_bit = arm_psr_thumb_bit (gdbarch);
8631 if (arm_pc_is_thumb (gdbarch, pc))
8632 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8633 val | t_bit);
8634 else
8635 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
8636 val & ~t_bit);
8640 /* Read the contents of a NEON quad register, by reading from two
8641 double registers. This is used to implement the quad pseudo
8642 registers, and for argument passing in case the quad registers are
8643 missing; vectors are passed in quad registers when using the VFP
8644 ABI, even if a NEON unit is not present. REGNUM is the index of
8645 the quad register, in [0, 15]. */
8647 static enum register_status
8648 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
8649 int regnum, gdb_byte *buf)
8651 char name_buf[4];
8652 gdb_byte reg_buf[8];
8653 int offset, double_regnum;
8654 enum register_status status;
8656 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8657 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8658 strlen (name_buf));
8660 /* d0 is always the least significant half of q0. */
8661 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8662 offset = 8;
8663 else
8664 offset = 0;
8666 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8667 if (status != REG_VALID)
8668 return status;
8669 memcpy (buf + offset, reg_buf, 8);
8671 offset = 8 - offset;
8672 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
8673 if (status != REG_VALID)
8674 return status;
8675 memcpy (buf + offset, reg_buf, 8);
8677 return REG_VALID;
8680 static enum register_status
8681 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
8682 int regnum, gdb_byte *buf)
8684 const int num_regs = gdbarch_num_regs (gdbarch);
8685 char name_buf[4];
8686 gdb_byte reg_buf[8];
8687 int offset, double_regnum;
8689 gdb_assert (regnum >= num_regs);
8690 regnum -= num_regs;
8692 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8693 /* Quad-precision register. */
8694 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
8695 else
8697 enum register_status status;
8699 /* Single-precision register. */
8700 gdb_assert (regnum < 32);
8702 /* s0 is always the least significant half of d0. */
8703 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8704 offset = (regnum & 1) ? 0 : 4;
8705 else
8706 offset = (regnum & 1) ? 4 : 0;
8708 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8709 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8710 strlen (name_buf));
8712 status = regcache_raw_read (regcache, double_regnum, reg_buf);
8713 if (status == REG_VALID)
8714 memcpy (buf, reg_buf + offset, 4);
8715 return status;
8719 /* Store the contents of BUF to a NEON quad register, by writing to
8720 two double registers. This is used to implement the quad pseudo
8721 registers, and for argument passing in case the quad registers are
8722 missing; vectors are passed in quad registers when using the VFP
8723 ABI, even if a NEON unit is not present. REGNUM is the index
8724 of the quad register, in [0, 15]. */
8726 static void
8727 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
8728 int regnum, const gdb_byte *buf)
8730 char name_buf[4];
8731 int offset, double_regnum;
8733 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
8734 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8735 strlen (name_buf));
8737 /* d0 is always the least significant half of q0. */
8738 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8739 offset = 8;
8740 else
8741 offset = 0;
8743 regcache_raw_write (regcache, double_regnum, buf + offset);
8744 offset = 8 - offset;
8745 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
8748 static void
8749 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
8750 int regnum, const gdb_byte *buf)
8752 const int num_regs = gdbarch_num_regs (gdbarch);
8753 char name_buf[4];
8754 gdb_byte reg_buf[8];
8755 int offset, double_regnum;
8757 gdb_assert (regnum >= num_regs);
8758 regnum -= num_regs;
8760 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
8761 /* Quad-precision register. */
8762 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
8763 else
8765 /* Single-precision register. */
8766 gdb_assert (regnum < 32);
8768 /* s0 is always the least significant half of d0. */
8769 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
8770 offset = (regnum & 1) ? 0 : 4;
8771 else
8772 offset = (regnum & 1) ? 4 : 0;
8774 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
8775 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
8776 strlen (name_buf));
8778 regcache_raw_read (regcache, double_regnum, reg_buf);
8779 memcpy (reg_buf + offset, buf, 4);
8780 regcache_raw_write (regcache, double_regnum, reg_buf);
8784 static struct value *
8785 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
8787 const int *reg_p = (const int *) baton;
8788 return value_of_register (*reg_p, frame);
8791 static enum gdb_osabi
8792 arm_elf_osabi_sniffer (bfd *abfd)
8794 unsigned int elfosabi;
8795 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
8797 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
8799 if (elfosabi == ELFOSABI_ARM)
8800 /* GNU tools use this value. Check note sections in this case,
8801 as well. */
8802 bfd_map_over_sections (abfd,
8803 generic_elf_osabi_sniff_abi_tag_sections,
8804 &osabi);
8806 /* Anything else will be handled by the generic ELF sniffer. */
8807 return osabi;
8810 static int
8811 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
8812 struct reggroup *group)
8814 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8815 this, FPS register belongs to save_regroup, restore_reggroup, and
8816 all_reggroup, of course. */
8817 if (regnum == ARM_FPS_REGNUM)
8818 return (group == float_reggroup
8819 || group == save_reggroup
8820 || group == restore_reggroup
8821 || group == all_reggroup);
8822 else
8823 return default_register_reggroup_p (gdbarch, regnum, group);
8827 /* For backward-compatibility we allow two 'g' packet lengths with
8828 the remote protocol depending on whether FPA registers are
8829 supplied. M-profile targets do not have FPA registers, but some
8830 stubs already exist in the wild which use a 'g' packet which
8831 supplies them albeit with dummy values. The packet format which
8832 includes FPA registers should be considered deprecated for
8833 M-profile targets. */
8835 static void
8836 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
8838 if (gdbarch_tdep (gdbarch)->is_m)
8840 /* If we know from the executable this is an M-profile target,
8841 cater for remote targets whose register set layout is the
8842 same as the FPA layout. */
8843 register_remote_g_packet_guess (gdbarch,
8844 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
8845 (16 * INT_REGISTER_SIZE)
8846 + (8 * FP_REGISTER_SIZE)
8847 + (2 * INT_REGISTER_SIZE),
8848 tdesc_arm_with_m_fpa_layout);
8850 /* The regular M-profile layout. */
8851 register_remote_g_packet_guess (gdbarch,
8852 /* r0-r12,sp,lr,pc; xpsr */
8853 (16 * INT_REGISTER_SIZE)
8854 + INT_REGISTER_SIZE,
8855 tdesc_arm_with_m);
8857 /* M-profile plus M4F VFP. */
8858 register_remote_g_packet_guess (gdbarch,
8859 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
8860 (16 * INT_REGISTER_SIZE)
8861 + (16 * VFP_REGISTER_SIZE)
8862 + (2 * INT_REGISTER_SIZE),
8863 tdesc_arm_with_m_vfp_d16);
8866 /* Otherwise we don't have a useful guess. */
8869 /* Implement the code_of_frame_writable gdbarch method. */
8871 static int
8872 arm_code_of_frame_writable (struct gdbarch *gdbarch, struct frame_info *frame)
8874 if (gdbarch_tdep (gdbarch)->is_m
8875 && get_frame_type (frame) == SIGTRAMP_FRAME)
8877 /* M-profile exception frames return to some magic PCs, where
8878 isn't writable at all. */
8879 return 0;
8881 else
8882 return 1;
8886 /* Initialize the current architecture based on INFO. If possible,
8887 re-use an architecture from ARCHES, which is a list of
8888 architectures already created during this debugging session.
8890 Called e.g. at program startup, when reading a core file, and when
8891 reading a binary file. */
8893 static struct gdbarch *
8894 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
8896 struct gdbarch_tdep *tdep;
8897 struct gdbarch *gdbarch;
8898 struct gdbarch_list *best_arch;
8899 enum arm_abi_kind arm_abi = arm_abi_global;
8900 enum arm_float_model fp_model = arm_fp_model;
8901 struct tdesc_arch_data *tdesc_data = NULL;
8902 int i, is_m = 0;
8903 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
8904 int have_wmmx_registers = 0;
8905 int have_neon = 0;
8906 int have_fpa_registers = 1;
8907 const struct target_desc *tdesc = info.target_desc;
8909 /* If we have an object to base this architecture on, try to determine
8910 its ABI. */
8912 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
8914 int ei_osabi, e_flags;
8916 switch (bfd_get_flavour (info.abfd))
8918 case bfd_target_aout_flavour:
8919 /* Assume it's an old APCS-style ABI. */
8920 arm_abi = ARM_ABI_APCS;
8921 break;
8923 case bfd_target_coff_flavour:
8924 /* Assume it's an old APCS-style ABI. */
8925 /* XXX WinCE? */
8926 arm_abi = ARM_ABI_APCS;
8927 break;
8929 case bfd_target_elf_flavour:
8930 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
8931 e_flags = elf_elfheader (info.abfd)->e_flags;
8933 if (ei_osabi == ELFOSABI_ARM)
8935 /* GNU tools used to use this value, but do not for EABI
8936 objects. There's nowhere to tag an EABI version
8937 anyway, so assume APCS. */
8938 arm_abi = ARM_ABI_APCS;
8940 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
8942 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
8943 int attr_arch, attr_profile;
8945 switch (eabi_ver)
8947 case EF_ARM_EABI_UNKNOWN:
8948 /* Assume GNU tools. */
8949 arm_abi = ARM_ABI_APCS;
8950 break;
8952 case EF_ARM_EABI_VER4:
8953 case EF_ARM_EABI_VER5:
8954 arm_abi = ARM_ABI_AAPCS;
8955 /* EABI binaries default to VFP float ordering.
8956 They may also contain build attributes that can
8957 be used to identify if the VFP argument-passing
8958 ABI is in use. */
8959 if (fp_model == ARM_FLOAT_AUTO)
8961 #ifdef HAVE_ELF
8962 switch (bfd_elf_get_obj_attr_int (info.abfd,
8963 OBJ_ATTR_PROC,
8964 Tag_ABI_VFP_args))
8966 case AEABI_VFP_args_base:
8967 /* "The user intended FP parameter/result
8968 passing to conform to AAPCS, base
8969 variant". */
8970 fp_model = ARM_FLOAT_SOFT_VFP;
8971 break;
8972 case AEABI_VFP_args_vfp:
8973 /* "The user intended FP parameter/result
8974 passing to conform to AAPCS, VFP
8975 variant". */
8976 fp_model = ARM_FLOAT_VFP;
8977 break;
8978 case AEABI_VFP_args_toolchain:
8979 /* "The user intended FP parameter/result
8980 passing to conform to tool chain-specific
8981 conventions" - we don't know any such
8982 conventions, so leave it as "auto". */
8983 break;
8984 case AEABI_VFP_args_compatible:
8985 /* "Code is compatible with both the base
8986 and VFP variants; the user did not permit
8987 non-variadic functions to pass FP
8988 parameters/results" - leave it as
8989 "auto". */
8990 break;
8991 default:
8992 /* Attribute value not mentioned in the
8993 November 2012 ABI, so leave it as
8994 "auto". */
8995 break;
8997 #else
8998 fp_model = ARM_FLOAT_SOFT_VFP;
8999 #endif
9001 break;
9003 default:
9004 /* Leave it as "auto". */
9005 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
9006 break;
9009 #ifdef HAVE_ELF
9010 /* Detect M-profile programs. This only works if the
9011 executable file includes build attributes; GCC does
9012 copy them to the executable, but e.g. RealView does
9013 not. */
9014 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
9015 Tag_CPU_arch);
9016 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
9017 OBJ_ATTR_PROC,
9018 Tag_CPU_arch_profile);
9019 /* GCC specifies the profile for v6-M; RealView only
9020 specifies the profile for architectures starting with
9021 V7 (as opposed to architectures with a tag
9022 numerically greater than TAG_CPU_ARCH_V7). */
9023 if (!tdesc_has_registers (tdesc)
9024 && (attr_arch == TAG_CPU_ARCH_V6_M
9025 || attr_arch == TAG_CPU_ARCH_V6S_M
9026 || attr_profile == 'M'))
9027 is_m = 1;
9028 #endif
9031 if (fp_model == ARM_FLOAT_AUTO)
9033 int e_flags = elf_elfheader (info.abfd)->e_flags;
9035 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
9037 case 0:
9038 /* Leave it as "auto". Strictly speaking this case
9039 means FPA, but almost nobody uses that now, and
9040 many toolchains fail to set the appropriate bits
9041 for the floating-point model they use. */
9042 break;
9043 case EF_ARM_SOFT_FLOAT:
9044 fp_model = ARM_FLOAT_SOFT_FPA;
9045 break;
9046 case EF_ARM_VFP_FLOAT:
9047 fp_model = ARM_FLOAT_VFP;
9048 break;
9049 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
9050 fp_model = ARM_FLOAT_SOFT_VFP;
9051 break;
9055 if (e_flags & EF_ARM_BE8)
9056 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
9058 break;
9060 default:
9061 /* Leave it as "auto". */
9062 break;
9066 /* Check any target description for validity. */
9067 if (tdesc_has_registers (tdesc))
9069 /* For most registers we require GDB's default names; but also allow
9070 the numeric names for sp / lr / pc, as a convenience. */
9071 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
9072 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
9073 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
9075 const struct tdesc_feature *feature;
9076 int valid_p;
9078 feature = tdesc_find_feature (tdesc,
9079 "org.gnu.gdb.arm.core");
9080 if (feature == NULL)
9082 feature = tdesc_find_feature (tdesc,
9083 "org.gnu.gdb.arm.m-profile");
9084 if (feature == NULL)
9085 return NULL;
9086 else
9087 is_m = 1;
9090 tdesc_data = tdesc_data_alloc ();
9092 valid_p = 1;
9093 for (i = 0; i < ARM_SP_REGNUM; i++)
9094 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9095 arm_register_names[i]);
9096 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9097 ARM_SP_REGNUM,
9098 arm_sp_names);
9099 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9100 ARM_LR_REGNUM,
9101 arm_lr_names);
9102 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
9103 ARM_PC_REGNUM,
9104 arm_pc_names);
9105 if (is_m)
9106 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9107 ARM_PS_REGNUM, "xpsr");
9108 else
9109 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9110 ARM_PS_REGNUM, "cpsr");
9112 if (!valid_p)
9114 tdesc_data_cleanup (tdesc_data);
9115 return NULL;
9118 feature = tdesc_find_feature (tdesc,
9119 "org.gnu.gdb.arm.fpa");
9120 if (feature != NULL)
9122 valid_p = 1;
9123 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
9124 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
9125 arm_register_names[i]);
9126 if (!valid_p)
9128 tdesc_data_cleanup (tdesc_data);
9129 return NULL;
9132 else
9133 have_fpa_registers = 0;
9135 feature = tdesc_find_feature (tdesc,
9136 "org.gnu.gdb.xscale.iwmmxt");
9137 if (feature != NULL)
9139 static const char *const iwmmxt_names[] = {
9140 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9141 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9142 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9143 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9146 valid_p = 1;
9147 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
9148 valid_p
9149 &= tdesc_numbered_register (feature, tdesc_data, i,
9150 iwmmxt_names[i - ARM_WR0_REGNUM]);
9152 /* Check for the control registers, but do not fail if they
9153 are missing. */
9154 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
9155 tdesc_numbered_register (feature, tdesc_data, i,
9156 iwmmxt_names[i - ARM_WR0_REGNUM]);
9158 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
9159 valid_p
9160 &= tdesc_numbered_register (feature, tdesc_data, i,
9161 iwmmxt_names[i - ARM_WR0_REGNUM]);
9163 if (!valid_p)
9165 tdesc_data_cleanup (tdesc_data);
9166 return NULL;
9169 have_wmmx_registers = 1;
9172 /* If we have a VFP unit, check whether the single precision registers
9173 are present. If not, then we will synthesize them as pseudo
9174 registers. */
9175 feature = tdesc_find_feature (tdesc,
9176 "org.gnu.gdb.arm.vfp");
9177 if (feature != NULL)
9179 static const char *const vfp_double_names[] = {
9180 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9181 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9182 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9183 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9186 /* Require the double precision registers. There must be either
9187 16 or 32. */
9188 valid_p = 1;
9189 for (i = 0; i < 32; i++)
9191 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9192 ARM_D0_REGNUM + i,
9193 vfp_double_names[i]);
9194 if (!valid_p)
9195 break;
9197 if (!valid_p && i == 16)
9198 valid_p = 1;
9200 /* Also require FPSCR. */
9201 valid_p &= tdesc_numbered_register (feature, tdesc_data,
9202 ARM_FPSCR_REGNUM, "fpscr");
9203 if (!valid_p)
9205 tdesc_data_cleanup (tdesc_data);
9206 return NULL;
9209 if (tdesc_unnumbered_register (feature, "s0") == 0)
9210 have_vfp_pseudos = 1;
9212 vfp_register_count = i;
9214 /* If we have VFP, also check for NEON. The architecture allows
9215 NEON without VFP (integer vector operations only), but GDB
9216 does not support that. */
9217 feature = tdesc_find_feature (tdesc,
9218 "org.gnu.gdb.arm.neon");
9219 if (feature != NULL)
9221 /* NEON requires 32 double-precision registers. */
9222 if (i != 32)
9224 tdesc_data_cleanup (tdesc_data);
9225 return NULL;
9228 /* If there are quad registers defined by the stub, use
9229 their type; otherwise (normally) provide them with
9230 the default type. */
9231 if (tdesc_unnumbered_register (feature, "q0") == 0)
9232 have_neon_pseudos = 1;
9234 have_neon = 1;
9239 /* If there is already a candidate, use it. */
9240 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
9241 best_arch != NULL;
9242 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
9244 if (arm_abi != ARM_ABI_AUTO
9245 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
9246 continue;
9248 if (fp_model != ARM_FLOAT_AUTO
9249 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
9250 continue;
9252 /* There are various other properties in tdep that we do not
9253 need to check here: those derived from a target description,
9254 since gdbarches with a different target description are
9255 automatically disqualified. */
9257 /* Do check is_m, though, since it might come from the binary. */
9258 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
9259 continue;
9261 /* Found a match. */
9262 break;
9265 if (best_arch != NULL)
9267 if (tdesc_data != NULL)
9268 tdesc_data_cleanup (tdesc_data);
9269 return best_arch->gdbarch;
9272 tdep = XCNEW (struct gdbarch_tdep);
9273 gdbarch = gdbarch_alloc (&info, tdep);
9275 /* Record additional information about the architecture we are defining.
9276 These are gdbarch discriminators, like the OSABI. */
9277 tdep->arm_abi = arm_abi;
9278 tdep->fp_model = fp_model;
9279 tdep->is_m = is_m;
9280 tdep->have_fpa_registers = have_fpa_registers;
9281 tdep->have_wmmx_registers = have_wmmx_registers;
9282 gdb_assert (vfp_register_count == 0
9283 || vfp_register_count == 16
9284 || vfp_register_count == 32);
9285 tdep->vfp_register_count = vfp_register_count;
9286 tdep->have_vfp_pseudos = have_vfp_pseudos;
9287 tdep->have_neon_pseudos = have_neon_pseudos;
9288 tdep->have_neon = have_neon;
9290 arm_register_g_packet_guesses (gdbarch);
9292 /* Breakpoints. */
9293 switch (info.byte_order_for_code)
9295 case BFD_ENDIAN_BIG:
9296 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
9297 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
9298 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
9299 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
9301 break;
9303 case BFD_ENDIAN_LITTLE:
9304 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
9305 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
9306 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
9307 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
9309 break;
9311 default:
9312 internal_error (__FILE__, __LINE__,
9313 _("arm_gdbarch_init: bad byte order for float format"));
9316 /* On ARM targets char defaults to unsigned. */
9317 set_gdbarch_char_signed (gdbarch, 0);
9319 /* Note: for displaced stepping, this includes the breakpoint, and one word
9320 of additional scratch space. This setting isn't used for anything beside
9321 displaced stepping at present. */
9322 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
9324 /* This should be low enough for everything. */
9325 tdep->lowest_pc = 0x20;
9326 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
9328 /* The default, for both APCS and AAPCS, is to return small
9329 structures in registers. */
9330 tdep->struct_return = reg_struct_return;
9332 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
9333 set_gdbarch_frame_align (gdbarch, arm_frame_align);
9335 if (is_m)
9336 set_gdbarch_code_of_frame_writable (gdbarch, arm_code_of_frame_writable);
9338 set_gdbarch_write_pc (gdbarch, arm_write_pc);
9340 /* Frame handling. */
9341 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
9342 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
9343 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
9345 frame_base_set_default (gdbarch, &arm_normal_base);
9347 /* Address manipulation. */
9348 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
9350 /* Advance PC across function entry code. */
9351 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
9353 /* Detect whether PC is at a point where the stack has been destroyed. */
9354 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
9356 /* Skip trampolines. */
9357 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
9359 /* The stack grows downward. */
9360 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
9362 /* Breakpoint manipulation. */
9363 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
9364 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
9365 arm_remote_breakpoint_from_pc);
9367 /* Information about registers, etc. */
9368 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
9369 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
9370 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
9371 set_gdbarch_register_type (gdbarch, arm_register_type);
9372 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
9374 /* This "info float" is FPA-specific. Use the generic version if we
9375 do not have FPA. */
9376 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
9377 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
9379 /* Internal <-> external register number maps. */
9380 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
9381 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
9383 set_gdbarch_register_name (gdbarch, arm_register_name);
9385 /* Returning results. */
9386 set_gdbarch_return_value (gdbarch, arm_return_value);
9388 /* Disassembly. */
9389 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
9391 /* Minsymbol frobbing. */
9392 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
9393 set_gdbarch_coff_make_msymbol_special (gdbarch,
9394 arm_coff_make_msymbol_special);
9395 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
9397 /* Thumb-2 IT block support. */
9398 set_gdbarch_adjust_breakpoint_address (gdbarch,
9399 arm_adjust_breakpoint_address);
9401 /* Virtual tables. */
9402 set_gdbarch_vbit_in_delta (gdbarch, 1);
9404 /* Hook in the ABI-specific overrides, if they have been registered. */
9405 gdbarch_init_osabi (info, gdbarch);
9407 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
9409 /* Add some default predicates. */
9410 if (is_m)
9411 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
9412 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
9413 dwarf2_append_unwinders (gdbarch);
9414 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
9415 frame_unwind_append_unwinder (gdbarch, &arm_epilogue_frame_unwind);
9416 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
9418 /* Now we have tuned the configuration, set a few final things,
9419 based on what the OS ABI has told us. */
9421 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9422 binaries are always marked. */
9423 if (tdep->arm_abi == ARM_ABI_AUTO)
9424 tdep->arm_abi = ARM_ABI_APCS;
9426 /* Watchpoints are not steppable. */
9427 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
9429 /* We used to default to FPA for generic ARM, but almost nobody
9430 uses that now, and we now provide a way for the user to force
9431 the model. So default to the most useful variant. */
9432 if (tdep->fp_model == ARM_FLOAT_AUTO)
9433 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
9435 if (tdep->jb_pc >= 0)
9436 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
9438 /* Floating point sizes and format. */
9439 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
9440 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
9442 set_gdbarch_double_format
9443 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9444 set_gdbarch_long_double_format
9445 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
9447 else
9449 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
9450 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
9453 if (have_vfp_pseudos)
9455 /* NOTE: These are the only pseudo registers used by
9456 the ARM target at the moment. If more are added, a
9457 little more care in numbering will be needed. */
9459 int num_pseudos = 32;
9460 if (have_neon_pseudos)
9461 num_pseudos += 16;
9462 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
9463 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
9464 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
9467 if (tdesc_data)
9469 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
9471 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
9473 /* Override tdesc_register_type to adjust the types of VFP
9474 registers for NEON. */
9475 set_gdbarch_register_type (gdbarch, arm_register_type);
9478 /* Add standard register aliases. We add aliases even for those
9479 nanes which are used by the current architecture - it's simpler,
9480 and does no harm, since nothing ever lists user registers. */
9481 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
9482 user_reg_add (gdbarch, arm_register_aliases[i].name,
9483 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
9485 return gdbarch;
9488 static void
9489 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
9491 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9493 if (tdep == NULL)
9494 return;
9496 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9497 (unsigned long) tdep->lowest_pc);
9500 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
9502 void
9503 _initialize_arm_tdep (void)
9505 struct ui_file *stb;
9506 long length;
9507 const char *setname;
9508 const char *setdesc;
9509 const char *const *regnames;
9510 int i;
9511 static char *helptext;
9512 char regdesc[1024], *rdptr = regdesc;
9513 size_t rest = sizeof (regdesc);
9515 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
9517 arm_objfile_data_key
9518 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
9520 /* Add ourselves to objfile event chain. */
9521 observer_attach_new_objfile (arm_exidx_new_objfile);
9522 arm_exidx_data_key
9523 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
9525 /* Register an ELF OS ABI sniffer for ARM binaries. */
9526 gdbarch_register_osabi_sniffer (bfd_arch_arm,
9527 bfd_target_elf_flavour,
9528 arm_elf_osabi_sniffer);
9530 /* Initialize the standard target descriptions. */
9531 initialize_tdesc_arm_with_m ();
9532 initialize_tdesc_arm_with_m_fpa_layout ();
9533 initialize_tdesc_arm_with_m_vfp_d16 ();
9534 initialize_tdesc_arm_with_iwmmxt ();
9535 initialize_tdesc_arm_with_vfpv2 ();
9536 initialize_tdesc_arm_with_vfpv3 ();
9537 initialize_tdesc_arm_with_neon ();
9539 /* Get the number of possible sets of register names defined in opcodes. */
9540 num_disassembly_options = get_arm_regname_num_options ();
9542 /* Add root prefix command for all "set arm"/"show arm" commands. */
9543 add_prefix_cmd ("arm", no_class, set_arm_command,
9544 _("Various ARM-specific commands."),
9545 &setarmcmdlist, "set arm ", 0, &setlist);
9547 add_prefix_cmd ("arm", no_class, show_arm_command,
9548 _("Various ARM-specific commands."),
9549 &showarmcmdlist, "show arm ", 0, &showlist);
9551 /* Sync the opcode insn printer with our register viewer. */
9552 parse_arm_disassembler_option ("reg-names-std");
9554 /* Initialize the array that will be passed to
9555 add_setshow_enum_cmd(). */
9556 valid_disassembly_styles = XNEWVEC (const char *,
9557 num_disassembly_options + 1);
9558 for (i = 0; i < num_disassembly_options; i++)
9560 get_arm_regnames (i, &setname, &setdesc, &regnames);
9561 valid_disassembly_styles[i] = setname;
9562 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
9563 rdptr += length;
9564 rest -= length;
9565 /* When we find the default names, tell the disassembler to use
9566 them. */
9567 if (!strcmp (setname, "std"))
9569 disassembly_style = setname;
9570 set_arm_regname_option (i);
9573 /* Mark the end of valid options. */
9574 valid_disassembly_styles[num_disassembly_options] = NULL;
9576 /* Create the help text. */
9577 stb = mem_fileopen ();
9578 fprintf_unfiltered (stb, "%s%s%s",
9579 _("The valid values are:\n"),
9580 regdesc,
9581 _("The default is \"std\"."));
9582 helptext = ui_file_xstrdup (stb, NULL);
9583 ui_file_delete (stb);
9585 add_setshow_enum_cmd("disassembler", no_class,
9586 valid_disassembly_styles, &disassembly_style,
9587 _("Set the disassembly style."),
9588 _("Show the disassembly style."),
9589 helptext,
9590 set_disassembly_style_sfunc,
9591 NULL, /* FIXME: i18n: The disassembly style is
9592 \"%s\". */
9593 &setarmcmdlist, &showarmcmdlist);
9595 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
9596 _("Set usage of ARM 32-bit mode."),
9597 _("Show usage of ARM 32-bit mode."),
9598 _("When off, a 26-bit PC will be used."),
9599 NULL,
9600 NULL, /* FIXME: i18n: Usage of ARM 32-bit
9601 mode is %s. */
9602 &setarmcmdlist, &showarmcmdlist);
9604 /* Add a command to allow the user to force the FPU model. */
9605 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
9606 _("Set the floating point type."),
9607 _("Show the floating point type."),
9608 _("auto - Determine the FP typefrom the OS-ABI.\n\
9609 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9610 fpa - FPA co-processor (GCC compiled).\n\
9611 softvfp - Software FP with pure-endian doubles.\n\
9612 vfp - VFP co-processor."),
9613 set_fp_model_sfunc, show_fp_model,
9614 &setarmcmdlist, &showarmcmdlist);
9616 /* Add a command to allow the user to force the ABI. */
9617 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
9618 _("Set the ABI."),
9619 _("Show the ABI."),
9620 NULL, arm_set_abi, arm_show_abi,
9621 &setarmcmdlist, &showarmcmdlist);
9623 /* Add two commands to allow the user to force the assumed
9624 execution mode. */
9625 add_setshow_enum_cmd ("fallback-mode", class_support,
9626 arm_mode_strings, &arm_fallback_mode_string,
9627 _("Set the mode assumed when symbols are unavailable."),
9628 _("Show the mode assumed when symbols are unavailable."),
9629 NULL, NULL, arm_show_fallback_mode,
9630 &setarmcmdlist, &showarmcmdlist);
9631 add_setshow_enum_cmd ("force-mode", class_support,
9632 arm_mode_strings, &arm_force_mode_string,
9633 _("Set the mode assumed even when symbols are available."),
9634 _("Show the mode assumed even when symbols are available."),
9635 NULL, NULL, arm_show_force_mode,
9636 &setarmcmdlist, &showarmcmdlist);
9638 /* Debugging flag. */
9639 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
9640 _("Set ARM debugging."),
9641 _("Show ARM debugging."),
9642 _("When on, arm-specific debugging is enabled."),
9643 NULL,
9644 NULL, /* FIXME: i18n: "ARM debugging is %s. */
9645 &setdebuglist, &showdebuglist);
9648 /* ARM-reversible process record data structures. */
9650 #define ARM_INSN_SIZE_BYTES 4
9651 #define THUMB_INSN_SIZE_BYTES 2
9652 #define THUMB2_INSN_SIZE_BYTES 4
9655 /* Position of the bit within a 32-bit ARM instruction
9656 that defines whether the instruction is a load or store. */
9657 #define INSN_S_L_BIT_NUM 20
9659 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9660 do \
9662 unsigned int reg_len = LENGTH; \
9663 if (reg_len) \
9665 REGS = XNEWVEC (uint32_t, reg_len); \
9666 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9669 while (0)
9671 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9672 do \
9674 unsigned int mem_len = LENGTH; \
9675 if (mem_len) \
9677 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9678 memcpy(&MEMS->len, &RECORD_BUF[0], \
9679 sizeof(struct arm_mem_r) * LENGTH); \
9682 while (0)
9684 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9685 #define INSN_RECORDED(ARM_RECORD) \
9686 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9688 /* ARM memory record structure. */
9689 struct arm_mem_r
9691 uint32_t len; /* Record length. */
9692 uint32_t addr; /* Memory address. */
9695 /* ARM instruction record contains opcode of current insn
9696 and execution state (before entry to decode_insn()),
9697 contains list of to-be-modified registers and
9698 memory blocks (on return from decode_insn()). */
9700 typedef struct insn_decode_record_t
9702 struct gdbarch *gdbarch;
9703 struct regcache *regcache;
9704 CORE_ADDR this_addr; /* Address of the insn being decoded. */
9705 uint32_t arm_insn; /* Should accommodate thumb. */
9706 uint32_t cond; /* Condition code. */
9707 uint32_t opcode; /* Insn opcode. */
9708 uint32_t decode; /* Insn decode bits. */
9709 uint32_t mem_rec_count; /* No of mem records. */
9710 uint32_t reg_rec_count; /* No of reg records. */
9711 uint32_t *arm_regs; /* Registers to be saved for this record. */
9712 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
9713 } insn_decode_record;
9716 /* Checks ARM SBZ and SBO mandatory fields. */
9718 static int
9719 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
9721 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
9723 if (!len)
9724 return 1;
9726 if (!sbo)
9727 ones = ~ones;
9729 while (ones)
9731 if (!(ones & sbo))
9733 return 0;
9735 ones = ones >> 1;
9737 return 1;
9740 enum arm_record_result
9742 ARM_RECORD_SUCCESS = 0,
9743 ARM_RECORD_FAILURE = 1
9746 typedef enum
9748 ARM_RECORD_STRH=1,
9749 ARM_RECORD_STRD
9750 } arm_record_strx_t;
9752 typedef enum
9754 ARM_RECORD=1,
9755 THUMB_RECORD,
9756 THUMB2_RECORD
9757 } record_type_t;
9760 static int
9761 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
9762 uint32_t *record_buf_mem, arm_record_strx_t str_type)
9765 struct regcache *reg_cache = arm_insn_r->regcache;
9766 ULONGEST u_regval[2]= {0};
9768 uint32_t reg_src1 = 0, reg_src2 = 0;
9769 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
9771 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
9772 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
9774 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
9776 /* 1) Handle misc store, immediate offset. */
9777 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9778 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9779 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9780 regcache_raw_read_unsigned (reg_cache, reg_src1,
9781 &u_regval[0]);
9782 if (ARM_PC_REGNUM == reg_src1)
9784 /* If R15 was used as Rn, hence current PC+8. */
9785 u_regval[0] = u_regval[0] + 8;
9787 offset_8 = (immed_high << 4) | immed_low;
9788 /* Calculate target store address. */
9789 if (14 == arm_insn_r->opcode)
9791 tgt_mem_addr = u_regval[0] + offset_8;
9793 else
9795 tgt_mem_addr = u_regval[0] - offset_8;
9797 if (ARM_RECORD_STRH == str_type)
9799 record_buf_mem[0] = 2;
9800 record_buf_mem[1] = tgt_mem_addr;
9801 arm_insn_r->mem_rec_count = 1;
9803 else if (ARM_RECORD_STRD == str_type)
9805 record_buf_mem[0] = 4;
9806 record_buf_mem[1] = tgt_mem_addr;
9807 record_buf_mem[2] = 4;
9808 record_buf_mem[3] = tgt_mem_addr + 4;
9809 arm_insn_r->mem_rec_count = 2;
9812 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
9814 /* 2) Store, register offset. */
9815 /* Get Rm. */
9816 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9817 /* Get Rn. */
9818 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9819 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9820 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9821 if (15 == reg_src2)
9823 /* If R15 was used as Rn, hence current PC+8. */
9824 u_regval[0] = u_regval[0] + 8;
9826 /* Calculate target store address, Rn +/- Rm, register offset. */
9827 if (12 == arm_insn_r->opcode)
9829 tgt_mem_addr = u_regval[0] + u_regval[1];
9831 else
9833 tgt_mem_addr = u_regval[1] - u_regval[0];
9835 if (ARM_RECORD_STRH == str_type)
9837 record_buf_mem[0] = 2;
9838 record_buf_mem[1] = tgt_mem_addr;
9839 arm_insn_r->mem_rec_count = 1;
9841 else if (ARM_RECORD_STRD == str_type)
9843 record_buf_mem[0] = 4;
9844 record_buf_mem[1] = tgt_mem_addr;
9845 record_buf_mem[2] = 4;
9846 record_buf_mem[3] = tgt_mem_addr + 4;
9847 arm_insn_r->mem_rec_count = 2;
9850 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
9851 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9853 /* 3) Store, immediate pre-indexed. */
9854 /* 5) Store, immediate post-indexed. */
9855 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
9856 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
9857 offset_8 = (immed_high << 4) | immed_low;
9858 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
9859 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9860 /* Calculate target store address, Rn +/- Rm, register offset. */
9861 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
9863 tgt_mem_addr = u_regval[0] + offset_8;
9865 else
9867 tgt_mem_addr = u_regval[0] - offset_8;
9869 if (ARM_RECORD_STRH == str_type)
9871 record_buf_mem[0] = 2;
9872 record_buf_mem[1] = tgt_mem_addr;
9873 arm_insn_r->mem_rec_count = 1;
9875 else if (ARM_RECORD_STRD == str_type)
9877 record_buf_mem[0] = 4;
9878 record_buf_mem[1] = tgt_mem_addr;
9879 record_buf_mem[2] = 4;
9880 record_buf_mem[3] = tgt_mem_addr + 4;
9881 arm_insn_r->mem_rec_count = 2;
9883 /* Record Rn also as it changes. */
9884 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9885 arm_insn_r->reg_rec_count = 1;
9887 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
9888 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9890 /* 4) Store, register pre-indexed. */
9891 /* 6) Store, register post -indexed. */
9892 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
9893 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
9894 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
9895 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
9896 /* Calculate target store address, Rn +/- Rm, register offset. */
9897 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
9899 tgt_mem_addr = u_regval[0] + u_regval[1];
9901 else
9903 tgt_mem_addr = u_regval[1] - u_regval[0];
9905 if (ARM_RECORD_STRH == str_type)
9907 record_buf_mem[0] = 2;
9908 record_buf_mem[1] = tgt_mem_addr;
9909 arm_insn_r->mem_rec_count = 1;
9911 else if (ARM_RECORD_STRD == str_type)
9913 record_buf_mem[0] = 4;
9914 record_buf_mem[1] = tgt_mem_addr;
9915 record_buf_mem[2] = 4;
9916 record_buf_mem[3] = tgt_mem_addr + 4;
9917 arm_insn_r->mem_rec_count = 2;
9919 /* Record Rn also as it changes. */
9920 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
9921 arm_insn_r->reg_rec_count = 1;
9923 return 0;
9926 /* Handling ARM extension space insns. */
9928 static int
9929 arm_record_extension_space (insn_decode_record *arm_insn_r)
9931 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
9932 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
9933 uint32_t record_buf[8], record_buf_mem[8];
9934 uint32_t reg_src1 = 0;
9935 struct regcache *reg_cache = arm_insn_r->regcache;
9936 ULONGEST u_regval = 0;
9938 gdb_assert (!INSN_RECORDED(arm_insn_r));
9939 /* Handle unconditional insn extension space. */
9941 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
9942 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9943 if (arm_insn_r->cond)
9945 /* PLD has no affect on architectural state, it just affects
9946 the caches. */
9947 if (5 == ((opcode1 & 0xE0) >> 5))
9949 /* BLX(1) */
9950 record_buf[0] = ARM_PS_REGNUM;
9951 record_buf[1] = ARM_LR_REGNUM;
9952 arm_insn_r->reg_rec_count = 2;
9954 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9958 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
9959 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
9961 ret = -1;
9962 /* Undefined instruction on ARM V5; need to handle if later
9963 versions define it. */
9966 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
9967 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
9968 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
9970 /* Handle arithmetic insn extension space. */
9971 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
9972 && !INSN_RECORDED(arm_insn_r))
9974 /* Handle MLA(S) and MUL(S). */
9975 if (0 <= insn_op1 && 3 >= insn_op1)
9977 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
9978 record_buf[1] = ARM_PS_REGNUM;
9979 arm_insn_r->reg_rec_count = 2;
9981 else if (4 <= insn_op1 && 15 >= insn_op1)
9983 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9984 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
9985 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
9986 record_buf[2] = ARM_PS_REGNUM;
9987 arm_insn_r->reg_rec_count = 3;
9991 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
9992 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
9993 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
9995 /* Handle control insn extension space. */
9997 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
9998 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
10000 if (!bit (arm_insn_r->arm_insn,25))
10002 if (!bits (arm_insn_r->arm_insn, 4, 7))
10004 if ((0 == insn_op1) || (2 == insn_op1))
10006 /* MRS. */
10007 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10008 arm_insn_r->reg_rec_count = 1;
10010 else if (1 == insn_op1)
10012 /* CSPR is going to be changed. */
10013 record_buf[0] = ARM_PS_REGNUM;
10014 arm_insn_r->reg_rec_count = 1;
10016 else if (3 == insn_op1)
10018 /* SPSR is going to be changed. */
10019 /* We need to get SPSR value, which is yet to be done. */
10020 return -1;
10023 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
10025 if (1 == insn_op1)
10027 /* BX. */
10028 record_buf[0] = ARM_PS_REGNUM;
10029 arm_insn_r->reg_rec_count = 1;
10031 else if (3 == insn_op1)
10033 /* CLZ. */
10034 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10035 arm_insn_r->reg_rec_count = 1;
10038 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
10040 /* BLX. */
10041 record_buf[0] = ARM_PS_REGNUM;
10042 record_buf[1] = ARM_LR_REGNUM;
10043 arm_insn_r->reg_rec_count = 2;
10045 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
10047 /* QADD, QSUB, QDADD, QDSUB */
10048 record_buf[0] = ARM_PS_REGNUM;
10049 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10050 arm_insn_r->reg_rec_count = 2;
10052 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
10054 /* BKPT. */
10055 record_buf[0] = ARM_PS_REGNUM;
10056 record_buf[1] = ARM_LR_REGNUM;
10057 arm_insn_r->reg_rec_count = 2;
10059 /* Save SPSR also;how? */
10060 return -1;
10062 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
10063 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
10064 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
10065 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
10068 if (0 == insn_op1 || 1 == insn_op1)
10070 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10071 /* We dont do optimization for SMULW<y> where we
10072 need only Rd. */
10073 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10074 record_buf[1] = ARM_PS_REGNUM;
10075 arm_insn_r->reg_rec_count = 2;
10077 else if (2 == insn_op1)
10079 /* SMLAL<x><y>. */
10080 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10081 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
10082 arm_insn_r->reg_rec_count = 2;
10084 else if (3 == insn_op1)
10086 /* SMUL<x><y>. */
10087 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10088 arm_insn_r->reg_rec_count = 1;
10092 else
10094 /* MSR : immediate form. */
10095 if (1 == insn_op1)
10097 /* CSPR is going to be changed. */
10098 record_buf[0] = ARM_PS_REGNUM;
10099 arm_insn_r->reg_rec_count = 1;
10101 else if (3 == insn_op1)
10103 /* SPSR is going to be changed. */
10104 /* we need to get SPSR value, which is yet to be done */
10105 return -1;
10110 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10111 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
10112 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
10114 /* Handle load/store insn extension space. */
10116 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
10117 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
10118 && !INSN_RECORDED(arm_insn_r))
10120 /* SWP/SWPB. */
10121 if (0 == insn_op1)
10123 /* These insn, changes register and memory as well. */
10124 /* SWP or SWPB insn. */
10125 /* Get memory address given by Rn. */
10126 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10127 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
10128 /* SWP insn ?, swaps word. */
10129 if (8 == arm_insn_r->opcode)
10131 record_buf_mem[0] = 4;
10133 else
10135 /* SWPB insn, swaps only byte. */
10136 record_buf_mem[0] = 1;
10138 record_buf_mem[1] = u_regval;
10139 arm_insn_r->mem_rec_count = 1;
10140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10141 arm_insn_r->reg_rec_count = 1;
10143 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10145 /* STRH. */
10146 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10147 ARM_RECORD_STRH);
10149 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10151 /* LDRD. */
10152 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10153 record_buf[1] = record_buf[0] + 1;
10154 arm_insn_r->reg_rec_count = 2;
10156 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
10158 /* STRD. */
10159 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10160 ARM_RECORD_STRD);
10162 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
10164 /* LDRH, LDRSB, LDRSH. */
10165 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10166 arm_insn_r->reg_rec_count = 1;
10171 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
10172 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
10173 && !INSN_RECORDED(arm_insn_r))
10175 ret = -1;
10176 /* Handle coprocessor insn extension space. */
10179 /* To be done for ARMv5 and later; as of now we return -1. */
10180 if (-1 == ret)
10181 return ret;
10183 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10184 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10186 return ret;
10189 /* Handling opcode 000 insns. */
10191 static int
10192 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
10194 struct regcache *reg_cache = arm_insn_r->regcache;
10195 uint32_t record_buf[8], record_buf_mem[8];
10196 ULONGEST u_regval[2] = {0};
10198 uint32_t reg_src1 = 0, reg_dest = 0;
10199 uint32_t opcode1 = 0;
10201 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10202 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10203 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10205 /* Data processing insn /multiply insn. */
10206 if (9 == arm_insn_r->decode
10207 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10208 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
10210 /* Handle multiply instructions. */
10211 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10212 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
10214 /* Handle MLA and MUL. */
10215 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10216 record_buf[1] = ARM_PS_REGNUM;
10217 arm_insn_r->reg_rec_count = 2;
10219 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
10221 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10222 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10223 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10224 record_buf[2] = ARM_PS_REGNUM;
10225 arm_insn_r->reg_rec_count = 3;
10228 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10229 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
10231 /* Handle misc load insns, as 20th bit (L = 1). */
10232 /* LDR insn has a capability to do branching, if
10233 MOV LR, PC is precceded by LDR insn having Rn as R15
10234 in that case, it emulates branch and link insn, and hence we
10235 need to save CSPR and PC as well. I am not sure this is right
10236 place; as opcode = 010 LDR insn make this happen, if R15 was
10237 used. */
10238 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10239 if (15 != reg_dest)
10241 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10242 arm_insn_r->reg_rec_count = 1;
10244 else
10246 record_buf[0] = reg_dest;
10247 record_buf[1] = ARM_PS_REGNUM;
10248 arm_insn_r->reg_rec_count = 2;
10251 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10252 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
10253 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10254 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
10256 /* Handle MSR insn. */
10257 if (9 == arm_insn_r->opcode)
10259 /* CSPR is going to be changed. */
10260 record_buf[0] = ARM_PS_REGNUM;
10261 arm_insn_r->reg_rec_count = 1;
10263 else
10265 /* SPSR is going to be changed. */
10266 /* How to read SPSR value? */
10267 return -1;
10270 else if (9 == arm_insn_r->decode
10271 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10272 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10274 /* Handling SWP, SWPB. */
10275 /* These insn, changes register and memory as well. */
10276 /* SWP or SWPB insn. */
10278 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10279 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10280 /* SWP insn ?, swaps word. */
10281 if (8 == arm_insn_r->opcode)
10283 record_buf_mem[0] = 4;
10285 else
10287 /* SWPB insn, swaps only byte. */
10288 record_buf_mem[0] = 1;
10290 record_buf_mem[1] = u_regval[0];
10291 arm_insn_r->mem_rec_count = 1;
10292 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10293 arm_insn_r->reg_rec_count = 1;
10295 else if (3 == arm_insn_r->decode && 0x12 == opcode1
10296 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10298 /* Handle BLX, branch and link/exchange. */
10299 if (9 == arm_insn_r->opcode)
10301 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10302 and R14 stores the return address. */
10303 record_buf[0] = ARM_PS_REGNUM;
10304 record_buf[1] = ARM_LR_REGNUM;
10305 arm_insn_r->reg_rec_count = 2;
10308 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
10310 /* Handle enhanced software breakpoint insn, BKPT. */
10311 /* CPSR is changed to be executed in ARM state, disabling normal
10312 interrupts, entering abort mode. */
10313 /* According to high vector configuration PC is set. */
10314 /* user hit breakpoint and type reverse, in
10315 that case, we need to go back with previous CPSR and
10316 Program Counter. */
10317 record_buf[0] = ARM_PS_REGNUM;
10318 record_buf[1] = ARM_LR_REGNUM;
10319 arm_insn_r->reg_rec_count = 2;
10321 /* Save SPSR also; how? */
10322 return -1;
10324 else if (11 == arm_insn_r->decode
10325 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10327 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
10329 /* Handle str(x) insn */
10330 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
10331 ARM_RECORD_STRH);
10333 else if (1 == arm_insn_r->decode && 0x12 == opcode1
10334 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
10336 /* Handle BX, branch and link/exchange. */
10337 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10338 record_buf[0] = ARM_PS_REGNUM;
10339 arm_insn_r->reg_rec_count = 1;
10341 else if (1 == arm_insn_r->decode && 0x16 == opcode1
10342 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
10343 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
10345 /* Count leading zeros: CLZ. */
10346 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10347 arm_insn_r->reg_rec_count = 1;
10349 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
10350 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10351 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
10352 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
10355 /* Handle MRS insn. */
10356 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10357 arm_insn_r->reg_rec_count = 1;
10359 else if (arm_insn_r->opcode <= 15)
10361 /* Normal data processing insns. */
10362 /* Out of 11 shifter operands mode, all the insn modifies destination
10363 register, which is specified by 13-16 decode. */
10364 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10365 record_buf[1] = ARM_PS_REGNUM;
10366 arm_insn_r->reg_rec_count = 2;
10368 else
10370 return -1;
10373 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10374 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10375 return 0;
10378 /* Handling opcode 001 insns. */
10380 static int
10381 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
10383 uint32_t record_buf[8], record_buf_mem[8];
10385 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10386 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10388 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
10389 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
10390 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
10393 /* Handle MSR insn. */
10394 if (9 == arm_insn_r->opcode)
10396 /* CSPR is going to be changed. */
10397 record_buf[0] = ARM_PS_REGNUM;
10398 arm_insn_r->reg_rec_count = 1;
10400 else
10402 /* SPSR is going to be changed. */
10405 else if (arm_insn_r->opcode <= 15)
10407 /* Normal data processing insns. */
10408 /* Out of 11 shifter operands mode, all the insn modifies destination
10409 register, which is specified by 13-16 decode. */
10410 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10411 record_buf[1] = ARM_PS_REGNUM;
10412 arm_insn_r->reg_rec_count = 2;
10414 else
10416 return -1;
10419 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10420 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10421 return 0;
10424 static int
10425 arm_record_media (insn_decode_record *arm_insn_r)
10427 uint32_t record_buf[8];
10429 switch (bits (arm_insn_r->arm_insn, 22, 24))
10431 case 0:
10432 /* Parallel addition and subtraction, signed */
10433 case 1:
10434 /* Parallel addition and subtraction, unsigned */
10435 case 2:
10436 case 3:
10437 /* Packing, unpacking, saturation and reversal */
10439 int rd = bits (arm_insn_r->arm_insn, 12, 15);
10441 record_buf[arm_insn_r->reg_rec_count++] = rd;
10443 break;
10445 case 4:
10446 case 5:
10447 /* Signed multiplies */
10449 int rd = bits (arm_insn_r->arm_insn, 16, 19);
10450 unsigned int op1 = bits (arm_insn_r->arm_insn, 20, 22);
10452 record_buf[arm_insn_r->reg_rec_count++] = rd;
10453 if (op1 == 0x0)
10454 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10455 else if (op1 == 0x4)
10456 record_buf[arm_insn_r->reg_rec_count++]
10457 = bits (arm_insn_r->arm_insn, 12, 15);
10459 break;
10461 case 6:
10463 if (bit (arm_insn_r->arm_insn, 21)
10464 && bits (arm_insn_r->arm_insn, 5, 6) == 0x2)
10466 /* SBFX */
10467 record_buf[arm_insn_r->reg_rec_count++]
10468 = bits (arm_insn_r->arm_insn, 12, 15);
10470 else if (bits (arm_insn_r->arm_insn, 20, 21) == 0x0
10471 && bits (arm_insn_r->arm_insn, 5, 7) == 0x0)
10473 /* USAD8 and USADA8 */
10474 record_buf[arm_insn_r->reg_rec_count++]
10475 = bits (arm_insn_r->arm_insn, 16, 19);
10478 break;
10480 case 7:
10482 if (bits (arm_insn_r->arm_insn, 20, 21) == 0x3
10483 && bits (arm_insn_r->arm_insn, 5, 7) == 0x7)
10485 /* Permanently UNDEFINED */
10486 return -1;
10488 else
10490 /* BFC, BFI and UBFX */
10491 record_buf[arm_insn_r->reg_rec_count++]
10492 = bits (arm_insn_r->arm_insn, 12, 15);
10495 break;
10497 default:
10498 return -1;
10501 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10503 return 0;
10506 /* Handle ARM mode instructions with opcode 010. */
10508 static int
10509 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
10511 struct regcache *reg_cache = arm_insn_r->regcache;
10513 uint32_t reg_base , reg_dest;
10514 uint32_t offset_12, tgt_mem_addr;
10515 uint32_t record_buf[8], record_buf_mem[8];
10516 unsigned char wback;
10517 ULONGEST u_regval;
10519 /* Calculate wback. */
10520 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
10521 || (bit (arm_insn_r->arm_insn, 21) == 1);
10523 arm_insn_r->reg_rec_count = 0;
10524 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10526 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10528 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10529 and LDRT. */
10531 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10532 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
10534 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10535 preceeds a LDR instruction having R15 as reg_base, it
10536 emulates a branch and link instruction, and hence we need to save
10537 CPSR and PC as well. */
10538 if (ARM_PC_REGNUM == reg_dest)
10539 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10541 /* If wback is true, also save the base register, which is going to be
10542 written to. */
10543 if (wback)
10544 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10546 else
10548 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10550 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
10551 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10553 /* Handle bit U. */
10554 if (bit (arm_insn_r->arm_insn, 23))
10556 /* U == 1: Add the offset. */
10557 tgt_mem_addr = (uint32_t) u_regval + offset_12;
10559 else
10561 /* U == 0: subtract the offset. */
10562 tgt_mem_addr = (uint32_t) u_regval - offset_12;
10565 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10566 bytes. */
10567 if (bit (arm_insn_r->arm_insn, 22))
10569 /* STRB and STRBT: 1 byte. */
10570 record_buf_mem[0] = 1;
10572 else
10574 /* STR and STRT: 4 bytes. */
10575 record_buf_mem[0] = 4;
10578 /* Handle bit P. */
10579 if (bit (arm_insn_r->arm_insn, 24))
10580 record_buf_mem[1] = tgt_mem_addr;
10581 else
10582 record_buf_mem[1] = (uint32_t) u_regval;
10584 arm_insn_r->mem_rec_count = 1;
10586 /* If wback is true, also save the base register, which is going to be
10587 written to. */
10588 if (wback)
10589 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10592 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10593 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10594 return 0;
10597 /* Handling opcode 011 insns. */
10599 static int
10600 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
10602 struct regcache *reg_cache = arm_insn_r->regcache;
10604 uint32_t shift_imm = 0;
10605 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
10606 uint32_t offset_12 = 0, tgt_mem_addr = 0;
10607 uint32_t record_buf[8], record_buf_mem[8];
10609 LONGEST s_word;
10610 ULONGEST u_regval[2];
10612 if (bit (arm_insn_r->arm_insn, 4))
10613 return arm_record_media (arm_insn_r);
10615 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10616 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10618 /* Handle enhanced store insns and LDRD DSP insn,
10619 order begins according to addressing modes for store insns
10620 STRH insn. */
10622 /* LDR or STR? */
10623 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10625 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
10626 /* LDR insn has a capability to do branching, if
10627 MOV LR, PC is precedded by LDR insn having Rn as R15
10628 in that case, it emulates branch and link insn, and hence we
10629 need to save CSPR and PC as well. */
10630 if (15 != reg_dest)
10632 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10633 arm_insn_r->reg_rec_count = 1;
10635 else
10637 record_buf[0] = reg_dest;
10638 record_buf[1] = ARM_PS_REGNUM;
10639 arm_insn_r->reg_rec_count = 2;
10642 else
10644 if (! bits (arm_insn_r->arm_insn, 4, 11))
10646 /* Store insn, register offset and register pre-indexed,
10647 register post-indexed. */
10648 /* Get Rm. */
10649 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10650 /* Get Rn. */
10651 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10652 regcache_raw_read_unsigned (reg_cache, reg_src1
10653 , &u_regval[0]);
10654 regcache_raw_read_unsigned (reg_cache, reg_src2
10655 , &u_regval[1]);
10656 if (15 == reg_src2)
10658 /* If R15 was used as Rn, hence current PC+8. */
10659 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10660 u_regval[0] = u_regval[0] + 8;
10662 /* Calculate target store address, Rn +/- Rm, register offset. */
10663 /* U == 1. */
10664 if (bit (arm_insn_r->arm_insn, 23))
10666 tgt_mem_addr = u_regval[0] + u_regval[1];
10668 else
10670 tgt_mem_addr = u_regval[1] - u_regval[0];
10673 switch (arm_insn_r->opcode)
10675 /* STR. */
10676 case 8:
10677 case 12:
10678 /* STR. */
10679 case 9:
10680 case 13:
10681 /* STRT. */
10682 case 1:
10683 case 5:
10684 /* STR. */
10685 case 0:
10686 case 4:
10687 record_buf_mem[0] = 4;
10688 break;
10690 /* STRB. */
10691 case 10:
10692 case 14:
10693 /* STRB. */
10694 case 11:
10695 case 15:
10696 /* STRBT. */
10697 case 3:
10698 case 7:
10699 /* STRB. */
10700 case 2:
10701 case 6:
10702 record_buf_mem[0] = 1;
10703 break;
10705 default:
10706 gdb_assert_not_reached ("no decoding pattern found");
10707 break;
10709 record_buf_mem[1] = tgt_mem_addr;
10710 arm_insn_r->mem_rec_count = 1;
10712 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10713 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10714 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10715 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10716 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10717 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10720 /* Rn is going to be changed in pre-indexed mode and
10721 post-indexed mode as well. */
10722 record_buf[0] = reg_src2;
10723 arm_insn_r->reg_rec_count = 1;
10726 else
10728 /* Store insn, scaled register offset; scaled pre-indexed. */
10729 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
10730 /* Get Rm. */
10731 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10732 /* Get Rn. */
10733 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10734 /* Get shift_imm. */
10735 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
10736 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10737 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
10738 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10739 /* Offset_12 used as shift. */
10740 switch (offset_12)
10742 case 0:
10743 /* Offset_12 used as index. */
10744 offset_12 = u_regval[0] << shift_imm;
10745 break;
10747 case 1:
10748 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
10749 break;
10751 case 2:
10752 if (!shift_imm)
10754 if (bit (u_regval[0], 31))
10756 offset_12 = 0xFFFFFFFF;
10758 else
10760 offset_12 = 0;
10763 else
10765 /* This is arithmetic shift. */
10766 offset_12 = s_word >> shift_imm;
10768 break;
10770 case 3:
10771 if (!shift_imm)
10773 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
10774 &u_regval[1]);
10775 /* Get C flag value and shift it by 31. */
10776 offset_12 = (((bit (u_regval[1], 29)) << 31) \
10777 | (u_regval[0]) >> 1);
10779 else
10781 offset_12 = (u_regval[0] >> shift_imm) \
10782 | (u_regval[0] <<
10783 (sizeof(uint32_t) - shift_imm));
10785 break;
10787 default:
10788 gdb_assert_not_reached ("no decoding pattern found");
10789 break;
10792 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10793 /* bit U set. */
10794 if (bit (arm_insn_r->arm_insn, 23))
10796 tgt_mem_addr = u_regval[1] + offset_12;
10798 else
10800 tgt_mem_addr = u_regval[1] - offset_12;
10803 switch (arm_insn_r->opcode)
10805 /* STR. */
10806 case 8:
10807 case 12:
10808 /* STR. */
10809 case 9:
10810 case 13:
10811 /* STRT. */
10812 case 1:
10813 case 5:
10814 /* STR. */
10815 case 0:
10816 case 4:
10817 record_buf_mem[0] = 4;
10818 break;
10820 /* STRB. */
10821 case 10:
10822 case 14:
10823 /* STRB. */
10824 case 11:
10825 case 15:
10826 /* STRBT. */
10827 case 3:
10828 case 7:
10829 /* STRB. */
10830 case 2:
10831 case 6:
10832 record_buf_mem[0] = 1;
10833 break;
10835 default:
10836 gdb_assert_not_reached ("no decoding pattern found");
10837 break;
10839 record_buf_mem[1] = tgt_mem_addr;
10840 arm_insn_r->mem_rec_count = 1;
10842 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
10843 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10844 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
10845 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
10846 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
10847 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
10850 /* Rn is going to be changed in register scaled pre-indexed
10851 mode,and scaled post indexed mode. */
10852 record_buf[0] = reg_src2;
10853 arm_insn_r->reg_rec_count = 1;
10858 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10859 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10860 return 0;
10863 /* Handle ARM mode instructions with opcode 100. */
10865 static int
10866 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
10868 struct regcache *reg_cache = arm_insn_r->regcache;
10869 uint32_t register_count = 0, register_bits;
10870 uint32_t reg_base, addr_mode;
10871 uint32_t record_buf[24], record_buf_mem[48];
10872 uint32_t wback;
10873 ULONGEST u_regval;
10875 /* Fetch the list of registers. */
10876 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
10877 arm_insn_r->reg_rec_count = 0;
10879 /* Fetch the base register that contains the address we are loading data
10880 to. */
10881 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
10883 /* Calculate wback. */
10884 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
10886 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
10888 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10890 /* Find out which registers are going to be loaded from memory. */
10891 while (register_bits)
10893 if (register_bits & 0x00000001)
10894 record_buf[arm_insn_r->reg_rec_count++] = register_count;
10895 register_bits = register_bits >> 1;
10896 register_count++;
10900 /* If wback is true, also save the base register, which is going to be
10901 written to. */
10902 if (wback)
10903 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10905 /* Save the CPSR register. */
10906 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
10908 else
10910 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10912 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
10914 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
10916 /* Find out how many registers are going to be stored to memory. */
10917 while (register_bits)
10919 if (register_bits & 0x00000001)
10920 register_count++;
10921 register_bits = register_bits >> 1;
10924 switch (addr_mode)
10926 /* STMDA (STMED): Decrement after. */
10927 case 0:
10928 record_buf_mem[1] = (uint32_t) u_regval
10929 - register_count * INT_REGISTER_SIZE + 4;
10930 break;
10931 /* STM (STMIA, STMEA): Increment after. */
10932 case 1:
10933 record_buf_mem[1] = (uint32_t) u_regval;
10934 break;
10935 /* STMDB (STMFD): Decrement before. */
10936 case 2:
10937 record_buf_mem[1] = (uint32_t) u_regval
10938 - register_count * INT_REGISTER_SIZE;
10939 break;
10940 /* STMIB (STMFA): Increment before. */
10941 case 3:
10942 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
10943 break;
10944 default:
10945 gdb_assert_not_reached ("no decoding pattern found");
10946 break;
10949 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
10950 arm_insn_r->mem_rec_count = 1;
10952 /* If wback is true, also save the base register, which is going to be
10953 written to. */
10954 if (wback)
10955 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
10958 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10959 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
10960 return 0;
10963 /* Handling opcode 101 insns. */
10965 static int
10966 arm_record_b_bl (insn_decode_record *arm_insn_r)
10968 uint32_t record_buf[8];
10970 /* Handle B, BL, BLX(1) insns. */
10971 /* B simply branches so we do nothing here. */
10972 /* Note: BLX(1) doesnt fall here but instead it falls into
10973 extension space. */
10974 if (bit (arm_insn_r->arm_insn, 24))
10976 record_buf[0] = ARM_LR_REGNUM;
10977 arm_insn_r->reg_rec_count = 1;
10980 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
10982 return 0;
10985 static int
10986 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
10988 printf_unfiltered (_("Process record does not support instruction "
10989 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
10990 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
10992 return -1;
10995 /* Record handler for vector data transfer instructions. */
10997 static int
10998 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11000 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11001 uint32_t record_buf[4];
11003 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11004 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11005 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11006 bit_l = bit (arm_insn_r->arm_insn, 20);
11007 bit_c = bit (arm_insn_r->arm_insn, 8);
11009 /* Handle VMOV instruction. */
11010 if (bit_l && bit_c)
11012 record_buf[0] = reg_t;
11013 arm_insn_r->reg_rec_count = 1;
11015 else if (bit_l && !bit_c)
11017 /* Handle VMOV instruction. */
11018 if (bits_a == 0x00)
11020 record_buf[0] = reg_t;
11021 arm_insn_r->reg_rec_count = 1;
11023 /* Handle VMRS instruction. */
11024 else if (bits_a == 0x07)
11026 if (reg_t == 15)
11027 reg_t = ARM_PS_REGNUM;
11029 record_buf[0] = reg_t;
11030 arm_insn_r->reg_rec_count = 1;
11033 else if (!bit_l && !bit_c)
11035 /* Handle VMOV instruction. */
11036 if (bits_a == 0x00)
11038 record_buf[0] = ARM_D0_REGNUM + reg_v;
11040 arm_insn_r->reg_rec_count = 1;
11042 /* Handle VMSR instruction. */
11043 else if (bits_a == 0x07)
11045 record_buf[0] = ARM_FPSCR_REGNUM;
11046 arm_insn_r->reg_rec_count = 1;
11049 else if (!bit_l && bit_c)
11051 /* Handle VMOV instruction. */
11052 if (!(bits_a & 0x04))
11054 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
11055 + ARM_D0_REGNUM;
11056 arm_insn_r->reg_rec_count = 1;
11058 /* Handle VDUP instruction. */
11059 else
11061 if (bit (arm_insn_r->arm_insn, 21))
11063 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11064 record_buf[0] = reg_v + ARM_D0_REGNUM;
11065 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
11066 arm_insn_r->reg_rec_count = 2;
11068 else
11070 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
11071 record_buf[0] = reg_v + ARM_D0_REGNUM;
11072 arm_insn_r->reg_rec_count = 1;
11077 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11078 return 0;
11081 /* Record handler for extension register load/store instructions. */
11083 static int
11084 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
11086 uint32_t opcode, single_reg;
11087 uint8_t op_vldm_vstm;
11088 uint32_t record_buf[8], record_buf_mem[128];
11089 ULONGEST u_regval = 0;
11091 struct regcache *reg_cache = arm_insn_r->regcache;
11093 opcode = bits (arm_insn_r->arm_insn, 20, 24);
11094 single_reg = !bit (arm_insn_r->arm_insn, 8);
11095 op_vldm_vstm = opcode & 0x1b;
11097 /* Handle VMOV instructions. */
11098 if ((opcode & 0x1e) == 0x04)
11100 if (bit (arm_insn_r->arm_insn, 20)) /* to_arm_registers bit 20? */
11102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11103 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11104 arm_insn_r->reg_rec_count = 2;
11106 else
11108 uint8_t reg_m = bits (arm_insn_r->arm_insn, 0, 3);
11109 uint8_t bit_m = bit (arm_insn_r->arm_insn, 5);
11111 if (single_reg)
11113 /* The first S register number m is REG_M:M (M is bit 5),
11114 the corresponding D register number is REG_M:M / 2, which
11115 is REG_M. */
11116 record_buf[arm_insn_r->reg_rec_count++] = ARM_D0_REGNUM + reg_m;
11117 /* The second S register number is REG_M:M + 1, the
11118 corresponding D register number is (REG_M:M + 1) / 2.
11119 IOW, if bit M is 1, the first and second S registers
11120 are mapped to different D registers, otherwise, they are
11121 in the same D register. */
11122 if (bit_m)
11124 record_buf[arm_insn_r->reg_rec_count++]
11125 = ARM_D0_REGNUM + reg_m + 1;
11128 else
11130 record_buf[0] = ((bit_m << 4) + reg_m + ARM_D0_REGNUM);
11131 arm_insn_r->reg_rec_count = 1;
11135 /* Handle VSTM and VPUSH instructions. */
11136 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
11137 || op_vldm_vstm == 0x12)
11139 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
11140 uint32_t memory_index = 0;
11142 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11143 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11144 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11145 imm_off32 = imm_off8 << 2;
11146 memory_count = imm_off8;
11148 if (bit (arm_insn_r->arm_insn, 23))
11149 start_address = u_regval;
11150 else
11151 start_address = u_regval - imm_off32;
11153 if (bit (arm_insn_r->arm_insn, 21))
11155 record_buf[0] = reg_rn;
11156 arm_insn_r->reg_rec_count = 1;
11159 while (memory_count > 0)
11161 if (single_reg)
11163 record_buf_mem[memory_index] = 4;
11164 record_buf_mem[memory_index + 1] = start_address;
11165 start_address = start_address + 4;
11166 memory_index = memory_index + 2;
11168 else
11170 record_buf_mem[memory_index] = 4;
11171 record_buf_mem[memory_index + 1] = start_address;
11172 record_buf_mem[memory_index + 2] = 4;
11173 record_buf_mem[memory_index + 3] = start_address + 4;
11174 start_address = start_address + 8;
11175 memory_index = memory_index + 4;
11177 memory_count--;
11179 arm_insn_r->mem_rec_count = (memory_index >> 1);
11181 /* Handle VLDM instructions. */
11182 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
11183 || op_vldm_vstm == 0x13)
11185 uint32_t reg_count, reg_vd;
11186 uint32_t reg_index = 0;
11187 uint32_t bit_d = bit (arm_insn_r->arm_insn, 22);
11189 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11190 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
11192 /* REG_VD is the first D register number. If the instruction
11193 loads memory to S registers (SINGLE_REG is TRUE), the register
11194 number is (REG_VD << 1 | bit D), so the corresponding D
11195 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11196 if (!single_reg)
11197 reg_vd = reg_vd | (bit_d << 4);
11199 if (bit (arm_insn_r->arm_insn, 21) /* write back */)
11200 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
11202 /* If the instruction loads memory to D register, REG_COUNT should
11203 be divided by 2, according to the ARM Architecture Reference
11204 Manual. If the instruction loads memory to S register, divide by
11205 2 as well because two S registers are mapped to D register. */
11206 reg_count = reg_count / 2;
11207 if (single_reg && bit_d)
11209 /* Increase the register count if S register list starts from
11210 an odd number (bit d is one). */
11211 reg_count++;
11214 while (reg_count > 0)
11216 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
11217 reg_count--;
11219 arm_insn_r->reg_rec_count = reg_index;
11221 /* VSTR Vector store register. */
11222 else if ((opcode & 0x13) == 0x10)
11224 uint32_t start_address, reg_rn, imm_off32, imm_off8;
11225 uint32_t memory_index = 0;
11227 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
11228 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
11229 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
11230 imm_off32 = imm_off8 << 2;
11232 if (bit (arm_insn_r->arm_insn, 23))
11233 start_address = u_regval + imm_off32;
11234 else
11235 start_address = u_regval - imm_off32;
11237 if (single_reg)
11239 record_buf_mem[memory_index] = 4;
11240 record_buf_mem[memory_index + 1] = start_address;
11241 arm_insn_r->mem_rec_count = 1;
11243 else
11245 record_buf_mem[memory_index] = 4;
11246 record_buf_mem[memory_index + 1] = start_address;
11247 record_buf_mem[memory_index + 2] = 4;
11248 record_buf_mem[memory_index + 3] = start_address + 4;
11249 arm_insn_r->mem_rec_count = 2;
11252 /* VLDR Vector load register. */
11253 else if ((opcode & 0x13) == 0x11)
11255 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11257 if (!single_reg)
11259 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
11260 record_buf[0] = ARM_D0_REGNUM + reg_vd;
11262 else
11264 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
11265 /* Record register D rather than pseudo register S. */
11266 record_buf[0] = ARM_D0_REGNUM + reg_vd / 2;
11268 arm_insn_r->reg_rec_count = 1;
11271 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11272 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11273 return 0;
11276 /* Record handler for arm/thumb mode VFP data processing instructions. */
11278 static int
11279 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
11281 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
11282 uint32_t record_buf[4];
11283 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
11284 enum insn_types curr_insn_type = INSN_INV;
11286 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
11287 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
11288 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
11289 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
11290 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
11291 bit_d = bit (arm_insn_r->arm_insn, 22);
11292 opc1 = opc1 & 0x04;
11294 /* Handle VMLA, VMLS. */
11295 if (opc1 == 0x00)
11297 if (bit (arm_insn_r->arm_insn, 10))
11299 if (bit (arm_insn_r->arm_insn, 6))
11300 curr_insn_type = INSN_T0;
11301 else
11302 curr_insn_type = INSN_T1;
11304 else
11306 if (dp_op_sz)
11307 curr_insn_type = INSN_T1;
11308 else
11309 curr_insn_type = INSN_T2;
11312 /* Handle VNMLA, VNMLS, VNMUL. */
11313 else if (opc1 == 0x01)
11315 if (dp_op_sz)
11316 curr_insn_type = INSN_T1;
11317 else
11318 curr_insn_type = INSN_T2;
11320 /* Handle VMUL. */
11321 else if (opc1 == 0x02 && !(opc3 & 0x01))
11323 if (bit (arm_insn_r->arm_insn, 10))
11325 if (bit (arm_insn_r->arm_insn, 6))
11326 curr_insn_type = INSN_T0;
11327 else
11328 curr_insn_type = INSN_T1;
11330 else
11332 if (dp_op_sz)
11333 curr_insn_type = INSN_T1;
11334 else
11335 curr_insn_type = INSN_T2;
11338 /* Handle VADD, VSUB. */
11339 else if (opc1 == 0x03)
11341 if (!bit (arm_insn_r->arm_insn, 9))
11343 if (bit (arm_insn_r->arm_insn, 6))
11344 curr_insn_type = INSN_T0;
11345 else
11346 curr_insn_type = INSN_T1;
11348 else
11350 if (dp_op_sz)
11351 curr_insn_type = INSN_T1;
11352 else
11353 curr_insn_type = INSN_T2;
11356 /* Handle VDIV. */
11357 else if (opc1 == 0x0b)
11359 if (dp_op_sz)
11360 curr_insn_type = INSN_T1;
11361 else
11362 curr_insn_type = INSN_T2;
11364 /* Handle all other vfp data processing instructions. */
11365 else if (opc1 == 0x0b)
11367 /* Handle VMOV. */
11368 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
11370 if (bit (arm_insn_r->arm_insn, 4))
11372 if (bit (arm_insn_r->arm_insn, 6))
11373 curr_insn_type = INSN_T0;
11374 else
11375 curr_insn_type = INSN_T1;
11377 else
11379 if (dp_op_sz)
11380 curr_insn_type = INSN_T1;
11381 else
11382 curr_insn_type = INSN_T2;
11385 /* Handle VNEG and VABS. */
11386 else if ((opc2 == 0x01 && opc3 == 0x01)
11387 || (opc2 == 0x00 && opc3 == 0x03))
11389 if (!bit (arm_insn_r->arm_insn, 11))
11391 if (bit (arm_insn_r->arm_insn, 6))
11392 curr_insn_type = INSN_T0;
11393 else
11394 curr_insn_type = INSN_T1;
11396 else
11398 if (dp_op_sz)
11399 curr_insn_type = INSN_T1;
11400 else
11401 curr_insn_type = INSN_T2;
11404 /* Handle VSQRT. */
11405 else if (opc2 == 0x01 && opc3 == 0x03)
11407 if (dp_op_sz)
11408 curr_insn_type = INSN_T1;
11409 else
11410 curr_insn_type = INSN_T2;
11412 /* Handle VCVT. */
11413 else if (opc2 == 0x07 && opc3 == 0x03)
11415 if (!dp_op_sz)
11416 curr_insn_type = INSN_T1;
11417 else
11418 curr_insn_type = INSN_T2;
11420 else if (opc3 & 0x01)
11422 /* Handle VCVT. */
11423 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
11425 if (!bit (arm_insn_r->arm_insn, 18))
11426 curr_insn_type = INSN_T2;
11427 else
11429 if (dp_op_sz)
11430 curr_insn_type = INSN_T1;
11431 else
11432 curr_insn_type = INSN_T2;
11435 /* Handle VCVT. */
11436 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
11438 if (dp_op_sz)
11439 curr_insn_type = INSN_T1;
11440 else
11441 curr_insn_type = INSN_T2;
11443 /* Handle VCVTB, VCVTT. */
11444 else if ((opc2 & 0x0e) == 0x02)
11445 curr_insn_type = INSN_T2;
11446 /* Handle VCMP, VCMPE. */
11447 else if ((opc2 & 0x0e) == 0x04)
11448 curr_insn_type = INSN_T3;
11452 switch (curr_insn_type)
11454 case INSN_T0:
11455 reg_vd = reg_vd | (bit_d << 4);
11456 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11457 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
11458 arm_insn_r->reg_rec_count = 2;
11459 break;
11461 case INSN_T1:
11462 reg_vd = reg_vd | (bit_d << 4);
11463 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11464 arm_insn_r->reg_rec_count = 1;
11465 break;
11467 case INSN_T2:
11468 reg_vd = (reg_vd << 1) | bit_d;
11469 record_buf[0] = reg_vd + ARM_D0_REGNUM;
11470 arm_insn_r->reg_rec_count = 1;
11471 break;
11473 case INSN_T3:
11474 record_buf[0] = ARM_FPSCR_REGNUM;
11475 arm_insn_r->reg_rec_count = 1;
11476 break;
11478 default:
11479 gdb_assert_not_reached ("no decoding pattern found");
11480 break;
11483 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11484 return 0;
11487 /* Handling opcode 110 insns. */
11489 static int
11490 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
11492 uint32_t op1, op1_ebit, coproc;
11494 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11495 op1 = bits (arm_insn_r->arm_insn, 20, 25);
11496 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11498 if ((coproc & 0x0e) == 0x0a)
11500 /* Handle extension register ld/st instructions. */
11501 if (!(op1 & 0x20))
11502 return arm_record_exreg_ld_st_insn (arm_insn_r);
11504 /* 64-bit transfers between arm core and extension registers. */
11505 if ((op1 & 0x3e) == 0x04)
11506 return arm_record_exreg_ld_st_insn (arm_insn_r);
11508 else
11510 /* Handle coprocessor ld/st instructions. */
11511 if (!(op1 & 0x3a))
11513 /* Store. */
11514 if (!op1_ebit)
11515 return arm_record_unsupported_insn (arm_insn_r);
11516 else
11517 /* Load. */
11518 return arm_record_unsupported_insn (arm_insn_r);
11521 /* Move to coprocessor from two arm core registers. */
11522 if (op1 == 0x4)
11523 return arm_record_unsupported_insn (arm_insn_r);
11525 /* Move to two arm core registers from coprocessor. */
11526 if (op1 == 0x5)
11528 uint32_t reg_t[2];
11530 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
11531 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
11532 arm_insn_r->reg_rec_count = 2;
11534 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
11535 return 0;
11538 return arm_record_unsupported_insn (arm_insn_r);
11541 /* Handling opcode 111 insns. */
11543 static int
11544 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
11546 uint32_t op, op1_sbit, op1_ebit, coproc;
11547 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
11548 struct regcache *reg_cache = arm_insn_r->regcache;
11550 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
11551 coproc = bits (arm_insn_r->arm_insn, 8, 11);
11552 op1_sbit = bit (arm_insn_r->arm_insn, 24);
11553 op1_ebit = bit (arm_insn_r->arm_insn, 20);
11554 op = bit (arm_insn_r->arm_insn, 4);
11556 /* Handle arm SWI/SVC system call instructions. */
11557 if (op1_sbit)
11559 if (tdep->arm_syscall_record != NULL)
11561 ULONGEST svc_operand, svc_number;
11563 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
11565 if (svc_operand) /* OABI. */
11566 svc_number = svc_operand - 0x900000;
11567 else /* EABI. */
11568 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
11570 return tdep->arm_syscall_record (reg_cache, svc_number);
11572 else
11574 printf_unfiltered (_("no syscall record support\n"));
11575 return -1;
11579 if ((coproc & 0x0e) == 0x0a)
11581 /* VFP data-processing instructions. */
11582 if (!op1_sbit && !op)
11583 return arm_record_vfp_data_proc_insn (arm_insn_r);
11585 /* Advanced SIMD, VFP instructions. */
11586 if (!op1_sbit && op)
11587 return arm_record_vdata_transfer_insn (arm_insn_r);
11589 else
11591 /* Coprocessor data operations. */
11592 if (!op1_sbit && !op)
11593 return arm_record_unsupported_insn (arm_insn_r);
11595 /* Move to Coprocessor from ARM core register. */
11596 if (!op1_sbit && !op1_ebit && op)
11597 return arm_record_unsupported_insn (arm_insn_r);
11599 /* Move to arm core register from coprocessor. */
11600 if (!op1_sbit && op1_ebit && op)
11602 uint32_t record_buf[1];
11604 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11605 if (record_buf[0] == 15)
11606 record_buf[0] = ARM_PS_REGNUM;
11608 arm_insn_r->reg_rec_count = 1;
11609 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
11610 record_buf);
11611 return 0;
11615 return arm_record_unsupported_insn (arm_insn_r);
11618 /* Handling opcode 000 insns. */
11620 static int
11621 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
11623 uint32_t record_buf[8];
11624 uint32_t reg_src1 = 0;
11626 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11628 record_buf[0] = ARM_PS_REGNUM;
11629 record_buf[1] = reg_src1;
11630 thumb_insn_r->reg_rec_count = 2;
11632 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11634 return 0;
11638 /* Handling opcode 001 insns. */
11640 static int
11641 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
11643 uint32_t record_buf[8];
11644 uint32_t reg_src1 = 0;
11646 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11648 record_buf[0] = ARM_PS_REGNUM;
11649 record_buf[1] = reg_src1;
11650 thumb_insn_r->reg_rec_count = 2;
11652 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11654 return 0;
11657 /* Handling opcode 010 insns. */
11659 static int
11660 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
11662 struct regcache *reg_cache = thumb_insn_r->regcache;
11663 uint32_t record_buf[8], record_buf_mem[8];
11665 uint32_t reg_src1 = 0, reg_src2 = 0;
11666 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
11668 ULONGEST u_regval[2] = {0};
11670 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
11672 if (bit (thumb_insn_r->arm_insn, 12))
11674 /* Handle load/store register offset. */
11675 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
11676 if (opcode2 >= 12 && opcode2 <= 15)
11678 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11679 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
11680 record_buf[0] = reg_src1;
11681 thumb_insn_r->reg_rec_count = 1;
11683 else if (opcode2 >= 8 && opcode2 <= 10)
11685 /* STR(2), STRB(2), STRH(2) . */
11686 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11687 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
11688 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11689 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11690 if (8 == opcode2)
11691 record_buf_mem[0] = 4; /* STR (2). */
11692 else if (10 == opcode2)
11693 record_buf_mem[0] = 1; /* STRB (2). */
11694 else if (9 == opcode2)
11695 record_buf_mem[0] = 2; /* STRH (2). */
11696 record_buf_mem[1] = u_regval[0] + u_regval[1];
11697 thumb_insn_r->mem_rec_count = 1;
11700 else if (bit (thumb_insn_r->arm_insn, 11))
11702 /* Handle load from literal pool. */
11703 /* LDR(3). */
11704 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11705 record_buf[0] = reg_src1;
11706 thumb_insn_r->reg_rec_count = 1;
11708 else if (opcode1)
11710 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
11711 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
11712 if ((3 == opcode2) && (!opcode3))
11714 /* Branch with exchange. */
11715 record_buf[0] = ARM_PS_REGNUM;
11716 thumb_insn_r->reg_rec_count = 1;
11718 else
11720 /* Format 8; special data processing insns. */
11721 record_buf[0] = ARM_PS_REGNUM;
11722 record_buf[1] = (bit (thumb_insn_r->arm_insn, 7) << 3
11723 | bits (thumb_insn_r->arm_insn, 0, 2));
11724 thumb_insn_r->reg_rec_count = 2;
11727 else
11729 /* Format 5; data processing insns. */
11730 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11731 if (bit (thumb_insn_r->arm_insn, 7))
11733 reg_src1 = reg_src1 + 8;
11735 record_buf[0] = ARM_PS_REGNUM;
11736 record_buf[1] = reg_src1;
11737 thumb_insn_r->reg_rec_count = 2;
11740 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11741 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11742 record_buf_mem);
11744 return 0;
11747 /* Handling opcode 001 insns. */
11749 static int
11750 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
11752 struct regcache *reg_cache = thumb_insn_r->regcache;
11753 uint32_t record_buf[8], record_buf_mem[8];
11755 uint32_t reg_src1 = 0;
11756 uint32_t opcode = 0, immed_5 = 0;
11758 ULONGEST u_regval = 0;
11760 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11762 if (opcode)
11764 /* LDR(1). */
11765 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11766 record_buf[0] = reg_src1;
11767 thumb_insn_r->reg_rec_count = 1;
11769 else
11771 /* STR(1). */
11772 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11773 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11774 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11775 record_buf_mem[0] = 4;
11776 record_buf_mem[1] = u_regval + (immed_5 * 4);
11777 thumb_insn_r->mem_rec_count = 1;
11780 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11781 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11782 record_buf_mem);
11784 return 0;
11787 /* Handling opcode 100 insns. */
11789 static int
11790 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
11792 struct regcache *reg_cache = thumb_insn_r->regcache;
11793 uint32_t record_buf[8], record_buf_mem[8];
11795 uint32_t reg_src1 = 0;
11796 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
11798 ULONGEST u_regval = 0;
11800 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11802 if (3 == opcode)
11804 /* LDR(4). */
11805 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11806 record_buf[0] = reg_src1;
11807 thumb_insn_r->reg_rec_count = 1;
11809 else if (1 == opcode)
11811 /* LDRH(1). */
11812 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
11813 record_buf[0] = reg_src1;
11814 thumb_insn_r->reg_rec_count = 1;
11816 else if (2 == opcode)
11818 /* STR(3). */
11819 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
11820 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11821 record_buf_mem[0] = 4;
11822 record_buf_mem[1] = u_regval + (immed_8 * 4);
11823 thumb_insn_r->mem_rec_count = 1;
11825 else if (0 == opcode)
11827 /* STRH(1). */
11828 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
11829 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
11830 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11831 record_buf_mem[0] = 2;
11832 record_buf_mem[1] = u_regval + (immed_5 * 2);
11833 thumb_insn_r->mem_rec_count = 1;
11836 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11837 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11838 record_buf_mem);
11840 return 0;
11843 /* Handling opcode 101 insns. */
11845 static int
11846 thumb_record_misc (insn_decode_record *thumb_insn_r)
11848 struct regcache *reg_cache = thumb_insn_r->regcache;
11850 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
11851 uint32_t register_bits = 0, register_count = 0;
11852 uint32_t index = 0, start_address = 0;
11853 uint32_t record_buf[24], record_buf_mem[48];
11854 uint32_t reg_src1;
11856 ULONGEST u_regval = 0;
11858 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
11859 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11860 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
11862 if (14 == opcode2)
11864 /* POP. */
11865 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11866 while (register_bits)
11868 if (register_bits & 0x00000001)
11869 record_buf[index++] = register_count;
11870 register_bits = register_bits >> 1;
11871 register_count++;
11873 record_buf[index++] = ARM_PS_REGNUM;
11874 record_buf[index++] = ARM_SP_REGNUM;
11875 thumb_insn_r->reg_rec_count = index;
11877 else if (10 == opcode2)
11879 /* PUSH. */
11880 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11881 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
11882 while (register_bits)
11884 if (register_bits & 0x00000001)
11885 register_count++;
11886 register_bits = register_bits >> 1;
11888 start_address = u_regval - \
11889 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
11890 thumb_insn_r->mem_rec_count = register_count;
11891 while (register_count)
11893 record_buf_mem[(register_count * 2) - 1] = start_address;
11894 record_buf_mem[(register_count * 2) - 2] = 4;
11895 start_address = start_address + 4;
11896 register_count--;
11898 record_buf[0] = ARM_SP_REGNUM;
11899 thumb_insn_r->reg_rec_count = 1;
11901 else if (0x1E == opcode1)
11903 /* BKPT insn. */
11904 /* Handle enhanced software breakpoint insn, BKPT. */
11905 /* CPSR is changed to be executed in ARM state, disabling normal
11906 interrupts, entering abort mode. */
11907 /* According to high vector configuration PC is set. */
11908 /* User hits breakpoint and type reverse, in that case, we need to go back with
11909 previous CPSR and Program Counter. */
11910 record_buf[0] = ARM_PS_REGNUM;
11911 record_buf[1] = ARM_LR_REGNUM;
11912 thumb_insn_r->reg_rec_count = 2;
11913 /* We need to save SPSR value, which is not yet done. */
11914 printf_unfiltered (_("Process record does not support instruction "
11915 "0x%0x at address %s.\n"),
11916 thumb_insn_r->arm_insn,
11917 paddress (thumb_insn_r->gdbarch,
11918 thumb_insn_r->this_addr));
11919 return -1;
11921 else if ((0 == opcode) || (1 == opcode))
11923 /* ADD(5), ADD(6). */
11924 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11925 record_buf[0] = reg_src1;
11926 thumb_insn_r->reg_rec_count = 1;
11928 else if (2 == opcode)
11930 /* ADD(7), SUB(4). */
11931 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11932 record_buf[0] = ARM_SP_REGNUM;
11933 thumb_insn_r->reg_rec_count = 1;
11936 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
11937 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
11938 record_buf_mem);
11940 return 0;
11943 /* Handling opcode 110 insns. */
11945 static int
11946 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
11948 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
11949 struct regcache *reg_cache = thumb_insn_r->regcache;
11951 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
11952 uint32_t reg_src1 = 0;
11953 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
11954 uint32_t index = 0, start_address = 0;
11955 uint32_t record_buf[24], record_buf_mem[48];
11957 ULONGEST u_regval = 0;
11959 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
11960 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
11962 if (1 == opcode2)
11965 /* LDMIA. */
11966 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11967 /* Get Rn. */
11968 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11969 while (register_bits)
11971 if (register_bits & 0x00000001)
11972 record_buf[index++] = register_count;
11973 register_bits = register_bits >> 1;
11974 register_count++;
11976 record_buf[index++] = reg_src1;
11977 thumb_insn_r->reg_rec_count = index;
11979 else if (0 == opcode2)
11981 /* It handles both STMIA. */
11982 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
11983 /* Get Rn. */
11984 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
11985 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11986 while (register_bits)
11988 if (register_bits & 0x00000001)
11989 register_count++;
11990 register_bits = register_bits >> 1;
11992 start_address = u_regval;
11993 thumb_insn_r->mem_rec_count = register_count;
11994 while (register_count)
11996 record_buf_mem[(register_count * 2) - 1] = start_address;
11997 record_buf_mem[(register_count * 2) - 2] = 4;
11998 start_address = start_address + 4;
11999 register_count--;
12002 else if (0x1F == opcode1)
12004 /* Handle arm syscall insn. */
12005 if (tdep->arm_syscall_record != NULL)
12007 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12008 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12010 else
12012 printf_unfiltered (_("no syscall record support\n"));
12013 return -1;
12017 /* B (1), conditional branch is automatically taken care in process_record,
12018 as PC is saved there. */
12020 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12021 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12022 record_buf_mem);
12024 return ret;
12027 /* Handling opcode 111 insns. */
12029 static int
12030 thumb_record_branch (insn_decode_record *thumb_insn_r)
12032 uint32_t record_buf[8];
12033 uint32_t bits_h = 0;
12035 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12037 if (2 == bits_h || 3 == bits_h)
12039 /* BL */
12040 record_buf[0] = ARM_LR_REGNUM;
12041 thumb_insn_r->reg_rec_count = 1;
12043 else if (1 == bits_h)
12045 /* BLX(1). */
12046 record_buf[0] = ARM_PS_REGNUM;
12047 record_buf[1] = ARM_LR_REGNUM;
12048 thumb_insn_r->reg_rec_count = 2;
12051 /* B(2) is automatically taken care in process_record, as PC is
12052 saved there. */
12054 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12056 return 0;
12059 /* Handler for thumb2 load/store multiple instructions. */
12061 static int
12062 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
12064 struct regcache *reg_cache = thumb2_insn_r->regcache;
12066 uint32_t reg_rn, op;
12067 uint32_t register_bits = 0, register_count = 0;
12068 uint32_t index = 0, start_address = 0;
12069 uint32_t record_buf[24], record_buf_mem[48];
12071 ULONGEST u_regval = 0;
12073 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12074 op = bits (thumb2_insn_r->arm_insn, 23, 24);
12076 if (0 == op || 3 == op)
12078 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12080 /* Handle RFE instruction. */
12081 record_buf[0] = ARM_PS_REGNUM;
12082 thumb2_insn_r->reg_rec_count = 1;
12084 else
12086 /* Handle SRS instruction after reading banked SP. */
12087 return arm_record_unsupported_insn (thumb2_insn_r);
12090 else if (1 == op || 2 == op)
12092 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12094 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12095 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12096 while (register_bits)
12098 if (register_bits & 0x00000001)
12099 record_buf[index++] = register_count;
12101 register_count++;
12102 register_bits = register_bits >> 1;
12104 record_buf[index++] = reg_rn;
12105 record_buf[index++] = ARM_PS_REGNUM;
12106 thumb2_insn_r->reg_rec_count = index;
12108 else
12110 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12111 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
12112 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12113 while (register_bits)
12115 if (register_bits & 0x00000001)
12116 register_count++;
12118 register_bits = register_bits >> 1;
12121 if (1 == op)
12123 /* Start address calculation for LDMDB/LDMEA. */
12124 start_address = u_regval;
12126 else if (2 == op)
12128 /* Start address calculation for LDMDB/LDMEA. */
12129 start_address = u_regval - register_count * 4;
12132 thumb2_insn_r->mem_rec_count = register_count;
12133 while (register_count)
12135 record_buf_mem[register_count * 2 - 1] = start_address;
12136 record_buf_mem[register_count * 2 - 2] = 4;
12137 start_address = start_address + 4;
12138 register_count--;
12140 record_buf[0] = reg_rn;
12141 record_buf[1] = ARM_PS_REGNUM;
12142 thumb2_insn_r->reg_rec_count = 2;
12146 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12147 record_buf_mem);
12148 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12149 record_buf);
12150 return ARM_RECORD_SUCCESS;
12153 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12154 instructions. */
12156 static int
12157 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
12159 struct regcache *reg_cache = thumb2_insn_r->regcache;
12161 uint32_t reg_rd, reg_rn, offset_imm;
12162 uint32_t reg_dest1, reg_dest2;
12163 uint32_t address, offset_addr;
12164 uint32_t record_buf[8], record_buf_mem[8];
12165 uint32_t op1, op2, op3;
12167 ULONGEST u_regval[2];
12169 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
12170 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
12171 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
12173 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
12175 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
12177 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
12178 record_buf[0] = reg_dest1;
12179 record_buf[1] = ARM_PS_REGNUM;
12180 thumb2_insn_r->reg_rec_count = 2;
12183 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
12185 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12186 record_buf[2] = reg_dest2;
12187 thumb2_insn_r->reg_rec_count = 3;
12190 else
12192 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12193 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12195 if (0 == op1 && 0 == op2)
12197 /* Handle STREX. */
12198 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12199 address = u_regval[0] + (offset_imm * 4);
12200 record_buf_mem[0] = 4;
12201 record_buf_mem[1] = address;
12202 thumb2_insn_r->mem_rec_count = 1;
12203 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12204 record_buf[0] = reg_rd;
12205 thumb2_insn_r->reg_rec_count = 1;
12207 else if (1 == op1 && 0 == op2)
12209 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
12210 record_buf[0] = reg_rd;
12211 thumb2_insn_r->reg_rec_count = 1;
12212 address = u_regval[0];
12213 record_buf_mem[1] = address;
12215 if (4 == op3)
12217 /* Handle STREXB. */
12218 record_buf_mem[0] = 1;
12219 thumb2_insn_r->mem_rec_count = 1;
12221 else if (5 == op3)
12223 /* Handle STREXH. */
12224 record_buf_mem[0] = 2 ;
12225 thumb2_insn_r->mem_rec_count = 1;
12227 else if (7 == op3)
12229 /* Handle STREXD. */
12230 address = u_regval[0];
12231 record_buf_mem[0] = 4;
12232 record_buf_mem[2] = 4;
12233 record_buf_mem[3] = address + 4;
12234 thumb2_insn_r->mem_rec_count = 2;
12237 else
12239 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12241 if (bit (thumb2_insn_r->arm_insn, 24))
12243 if (bit (thumb2_insn_r->arm_insn, 23))
12244 offset_addr = u_regval[0] + (offset_imm * 4);
12245 else
12246 offset_addr = u_regval[0] - (offset_imm * 4);
12248 address = offset_addr;
12250 else
12251 address = u_regval[0];
12253 record_buf_mem[0] = 4;
12254 record_buf_mem[1] = address;
12255 record_buf_mem[2] = 4;
12256 record_buf_mem[3] = address + 4;
12257 thumb2_insn_r->mem_rec_count = 2;
12258 record_buf[0] = reg_rn;
12259 thumb2_insn_r->reg_rec_count = 1;
12263 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12264 record_buf);
12265 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12266 record_buf_mem);
12267 return ARM_RECORD_SUCCESS;
12270 /* Handler for thumb2 data processing (shift register and modified immediate)
12271 instructions. */
12273 static int
12274 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
12276 uint32_t reg_rd, op;
12277 uint32_t record_buf[8];
12279 op = bits (thumb2_insn_r->arm_insn, 21, 24);
12280 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12282 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
12284 record_buf[0] = ARM_PS_REGNUM;
12285 thumb2_insn_r->reg_rec_count = 1;
12287 else
12289 record_buf[0] = reg_rd;
12290 record_buf[1] = ARM_PS_REGNUM;
12291 thumb2_insn_r->reg_rec_count = 2;
12294 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12295 record_buf);
12296 return ARM_RECORD_SUCCESS;
12299 /* Generic handler for thumb2 instructions which effect destination and PS
12300 registers. */
12302 static int
12303 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
12305 uint32_t reg_rd;
12306 uint32_t record_buf[8];
12308 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
12310 record_buf[0] = reg_rd;
12311 record_buf[1] = ARM_PS_REGNUM;
12312 thumb2_insn_r->reg_rec_count = 2;
12314 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12315 record_buf);
12316 return ARM_RECORD_SUCCESS;
12319 /* Handler for thumb2 branch and miscellaneous control instructions. */
12321 static int
12322 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
12324 uint32_t op, op1, op2;
12325 uint32_t record_buf[8];
12327 op = bits (thumb2_insn_r->arm_insn, 20, 26);
12328 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
12329 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
12331 /* Handle MSR insn. */
12332 if (!(op1 & 0x2) && 0x38 == op)
12334 if (!(op2 & 0x3))
12336 /* CPSR is going to be changed. */
12337 record_buf[0] = ARM_PS_REGNUM;
12338 thumb2_insn_r->reg_rec_count = 1;
12340 else
12342 arm_record_unsupported_insn(thumb2_insn_r);
12343 return -1;
12346 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
12348 /* BLX. */
12349 record_buf[0] = ARM_PS_REGNUM;
12350 record_buf[1] = ARM_LR_REGNUM;
12351 thumb2_insn_r->reg_rec_count = 2;
12354 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12355 record_buf);
12356 return ARM_RECORD_SUCCESS;
12359 /* Handler for thumb2 store single data item instructions. */
12361 static int
12362 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
12364 struct regcache *reg_cache = thumb2_insn_r->regcache;
12366 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
12367 uint32_t address, offset_addr;
12368 uint32_t record_buf[8], record_buf_mem[8];
12369 uint32_t op1, op2;
12371 ULONGEST u_regval[2];
12373 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
12374 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
12375 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12376 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
12378 if (bit (thumb2_insn_r->arm_insn, 23))
12380 /* T2 encoding. */
12381 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
12382 offset_addr = u_regval[0] + offset_imm;
12383 address = offset_addr;
12385 else
12387 /* T3 encoding. */
12388 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
12390 /* Handle STRB (register). */
12391 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
12392 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
12393 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
12394 offset_addr = u_regval[1] << shift_imm;
12395 address = u_regval[0] + offset_addr;
12397 else
12399 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
12400 if (bit (thumb2_insn_r->arm_insn, 10))
12402 if (bit (thumb2_insn_r->arm_insn, 9))
12403 offset_addr = u_regval[0] + offset_imm;
12404 else
12405 offset_addr = u_regval[0] - offset_imm;
12407 address = offset_addr;
12409 else
12410 address = u_regval[0];
12414 switch (op1)
12416 /* Store byte instructions. */
12417 case 4:
12418 case 0:
12419 record_buf_mem[0] = 1;
12420 break;
12421 /* Store half word instructions. */
12422 case 1:
12423 case 5:
12424 record_buf_mem[0] = 2;
12425 break;
12426 /* Store word instructions. */
12427 case 2:
12428 case 6:
12429 record_buf_mem[0] = 4;
12430 break;
12432 default:
12433 gdb_assert_not_reached ("no decoding pattern found");
12434 break;
12437 record_buf_mem[1] = address;
12438 thumb2_insn_r->mem_rec_count = 1;
12439 record_buf[0] = reg_rn;
12440 thumb2_insn_r->reg_rec_count = 1;
12442 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12443 record_buf);
12444 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12445 record_buf_mem);
12446 return ARM_RECORD_SUCCESS;
12449 /* Handler for thumb2 load memory hints instructions. */
12451 static int
12452 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
12454 uint32_t record_buf[8];
12455 uint32_t reg_rt, reg_rn;
12457 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
12458 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12460 if (ARM_PC_REGNUM != reg_rt)
12462 record_buf[0] = reg_rt;
12463 record_buf[1] = reg_rn;
12464 record_buf[2] = ARM_PS_REGNUM;
12465 thumb2_insn_r->reg_rec_count = 3;
12467 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12468 record_buf);
12469 return ARM_RECORD_SUCCESS;
12472 return ARM_RECORD_FAILURE;
12475 /* Handler for thumb2 load word instructions. */
12477 static int
12478 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
12480 uint32_t record_buf[8];
12482 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
12483 record_buf[1] = ARM_PS_REGNUM;
12484 thumb2_insn_r->reg_rec_count = 2;
12486 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12487 record_buf);
12488 return ARM_RECORD_SUCCESS;
12491 /* Handler for thumb2 long multiply, long multiply accumulate, and
12492 divide instructions. */
12494 static int
12495 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
12497 uint32_t opcode1 = 0, opcode2 = 0;
12498 uint32_t record_buf[8];
12500 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
12501 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
12503 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
12505 /* Handle SMULL, UMULL, SMULAL. */
12506 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12507 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12508 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12509 record_buf[2] = ARM_PS_REGNUM;
12510 thumb2_insn_r->reg_rec_count = 3;
12512 else if (1 == opcode1 || 3 == opcode2)
12514 /* Handle SDIV and UDIV. */
12515 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
12516 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
12517 record_buf[2] = ARM_PS_REGNUM;
12518 thumb2_insn_r->reg_rec_count = 3;
12520 else
12521 return ARM_RECORD_FAILURE;
12523 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12524 record_buf);
12525 return ARM_RECORD_SUCCESS;
12528 /* Record handler for thumb32 coprocessor instructions. */
12530 static int
12531 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
12533 if (bit (thumb2_insn_r->arm_insn, 25))
12534 return arm_record_coproc_data_proc (thumb2_insn_r);
12535 else
12536 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
12539 /* Record handler for advance SIMD structure load/store instructions. */
12541 static int
12542 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
12544 struct regcache *reg_cache = thumb2_insn_r->regcache;
12545 uint32_t l_bit, a_bit, b_bits;
12546 uint32_t record_buf[128], record_buf_mem[128];
12547 uint32_t reg_rn, reg_vd, address, f_elem;
12548 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
12549 uint8_t f_ebytes;
12551 l_bit = bit (thumb2_insn_r->arm_insn, 21);
12552 a_bit = bit (thumb2_insn_r->arm_insn, 23);
12553 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
12554 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
12555 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
12556 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
12557 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
12558 f_elem = 8 / f_ebytes;
12560 if (!l_bit)
12562 ULONGEST u_regval = 0;
12563 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12564 address = u_regval;
12566 if (!a_bit)
12568 /* Handle VST1. */
12569 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12571 if (b_bits == 0x07)
12572 bf_regs = 1;
12573 else if (b_bits == 0x0a)
12574 bf_regs = 2;
12575 else if (b_bits == 0x06)
12576 bf_regs = 3;
12577 else if (b_bits == 0x02)
12578 bf_regs = 4;
12579 else
12580 bf_regs = 0;
12582 for (index_r = 0; index_r < bf_regs; index_r++)
12584 for (index_e = 0; index_e < f_elem; index_e++)
12586 record_buf_mem[index_m++] = f_ebytes;
12587 record_buf_mem[index_m++] = address;
12588 address = address + f_ebytes;
12589 thumb2_insn_r->mem_rec_count += 1;
12593 /* Handle VST2. */
12594 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12596 if (b_bits == 0x09 || b_bits == 0x08)
12597 bf_regs = 1;
12598 else if (b_bits == 0x03)
12599 bf_regs = 2;
12600 else
12601 bf_regs = 0;
12603 for (index_r = 0; index_r < bf_regs; index_r++)
12604 for (index_e = 0; index_e < f_elem; index_e++)
12606 for (loop_t = 0; loop_t < 2; loop_t++)
12608 record_buf_mem[index_m++] = f_ebytes;
12609 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12610 thumb2_insn_r->mem_rec_count += 1;
12612 address = address + (2 * f_ebytes);
12615 /* Handle VST3. */
12616 else if ((b_bits & 0x0e) == 0x04)
12618 for (index_e = 0; index_e < f_elem; index_e++)
12620 for (loop_t = 0; loop_t < 3; loop_t++)
12622 record_buf_mem[index_m++] = f_ebytes;
12623 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12624 thumb2_insn_r->mem_rec_count += 1;
12626 address = address + (3 * f_ebytes);
12629 /* Handle VST4. */
12630 else if (!(b_bits & 0x0e))
12632 for (index_e = 0; index_e < f_elem; index_e++)
12634 for (loop_t = 0; loop_t < 4; loop_t++)
12636 record_buf_mem[index_m++] = f_ebytes;
12637 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
12638 thumb2_insn_r->mem_rec_count += 1;
12640 address = address + (4 * f_ebytes);
12644 else
12646 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
12648 if (bft_size == 0x00)
12649 f_ebytes = 1;
12650 else if (bft_size == 0x01)
12651 f_ebytes = 2;
12652 else if (bft_size == 0x02)
12653 f_ebytes = 4;
12654 else
12655 f_ebytes = 0;
12657 /* Handle VST1. */
12658 if (!(b_bits & 0x0b) || b_bits == 0x08)
12659 thumb2_insn_r->mem_rec_count = 1;
12660 /* Handle VST2. */
12661 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
12662 thumb2_insn_r->mem_rec_count = 2;
12663 /* Handle VST3. */
12664 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
12665 thumb2_insn_r->mem_rec_count = 3;
12666 /* Handle VST4. */
12667 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
12668 thumb2_insn_r->mem_rec_count = 4;
12670 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
12672 record_buf_mem[index_m] = f_ebytes;
12673 record_buf_mem[index_m] = address + (index_m * f_ebytes);
12677 else
12679 if (!a_bit)
12681 /* Handle VLD1. */
12682 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
12683 thumb2_insn_r->reg_rec_count = 1;
12684 /* Handle VLD2. */
12685 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
12686 thumb2_insn_r->reg_rec_count = 2;
12687 /* Handle VLD3. */
12688 else if ((b_bits & 0x0e) == 0x04)
12689 thumb2_insn_r->reg_rec_count = 3;
12690 /* Handle VLD4. */
12691 else if (!(b_bits & 0x0e))
12692 thumb2_insn_r->reg_rec_count = 4;
12694 else
12696 /* Handle VLD1. */
12697 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
12698 thumb2_insn_r->reg_rec_count = 1;
12699 /* Handle VLD2. */
12700 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
12701 thumb2_insn_r->reg_rec_count = 2;
12702 /* Handle VLD3. */
12703 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
12704 thumb2_insn_r->reg_rec_count = 3;
12705 /* Handle VLD4. */
12706 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
12707 thumb2_insn_r->reg_rec_count = 4;
12709 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
12710 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
12714 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
12716 record_buf[index_r] = reg_rn;
12717 thumb2_insn_r->reg_rec_count += 1;
12720 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
12721 record_buf);
12722 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
12723 record_buf_mem);
12724 return 0;
12727 /* Decodes thumb2 instruction type and invokes its record handler. */
12729 static unsigned int
12730 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
12732 uint32_t op, op1, op2;
12734 op = bit (thumb2_insn_r->arm_insn, 15);
12735 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
12736 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
12738 if (op1 == 0x01)
12740 if (!(op2 & 0x64 ))
12742 /* Load/store multiple instruction. */
12743 return thumb2_record_ld_st_multiple (thumb2_insn_r);
12745 else if (!((op2 & 0x64) ^ 0x04))
12747 /* Load/store (dual/exclusive) and table branch instruction. */
12748 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
12750 else if (!((op2 & 0x20) ^ 0x20))
12752 /* Data-processing (shifted register). */
12753 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12755 else if (op2 & 0x40)
12757 /* Co-processor instructions. */
12758 return thumb2_record_coproc_insn (thumb2_insn_r);
12761 else if (op1 == 0x02)
12763 if (op)
12765 /* Branches and miscellaneous control instructions. */
12766 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
12768 else if (op2 & 0x20)
12770 /* Data-processing (plain binary immediate) instruction. */
12771 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12773 else
12775 /* Data-processing (modified immediate). */
12776 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
12779 else if (op1 == 0x03)
12781 if (!(op2 & 0x71 ))
12783 /* Store single data item. */
12784 return thumb2_record_str_single_data (thumb2_insn_r);
12786 else if (!((op2 & 0x71) ^ 0x10))
12788 /* Advanced SIMD or structure load/store instructions. */
12789 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
12791 else if (!((op2 & 0x67) ^ 0x01))
12793 /* Load byte, memory hints instruction. */
12794 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12796 else if (!((op2 & 0x67) ^ 0x03))
12798 /* Load halfword, memory hints instruction. */
12799 return thumb2_record_ld_mem_hints (thumb2_insn_r);
12801 else if (!((op2 & 0x67) ^ 0x05))
12803 /* Load word instruction. */
12804 return thumb2_record_ld_word (thumb2_insn_r);
12806 else if (!((op2 & 0x70) ^ 0x20))
12808 /* Data-processing (register) instruction. */
12809 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12811 else if (!((op2 & 0x78) ^ 0x30))
12813 /* Multiply, multiply accumulate, abs diff instruction. */
12814 return thumb2_record_ps_dest_generic (thumb2_insn_r);
12816 else if (!((op2 & 0x78) ^ 0x38))
12818 /* Long multiply, long multiply accumulate, and divide. */
12819 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
12821 else if (op2 & 0x40)
12823 /* Co-processor instructions. */
12824 return thumb2_record_coproc_insn (thumb2_insn_r);
12828 return -1;
12831 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12832 and positive val on fauilure. */
12834 static int
12835 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
12837 gdb_byte buf[insn_size];
12839 memset (&buf[0], 0, insn_size);
12841 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
12842 return 1;
12843 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
12844 insn_size,
12845 gdbarch_byte_order_for_code (insn_record->gdbarch));
12846 return 0;
12849 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
12851 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12852 dispatch it. */
12854 static int
12855 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
12856 uint32_t insn_size)
12859 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12860 instruction. */
12861 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
12863 arm_record_data_proc_misc_ld_str, /* 000. */
12864 arm_record_data_proc_imm, /* 001. */
12865 arm_record_ld_st_imm_offset, /* 010. */
12866 arm_record_ld_st_reg_offset, /* 011. */
12867 arm_record_ld_st_multiple, /* 100. */
12868 arm_record_b_bl, /* 101. */
12869 arm_record_asimd_vfp_coproc, /* 110. */
12870 arm_record_coproc_data_proc /* 111. */
12873 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
12874 instruction. */
12875 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
12877 thumb_record_shift_add_sub, /* 000. */
12878 thumb_record_add_sub_cmp_mov, /* 001. */
12879 thumb_record_ld_st_reg_offset, /* 010. */
12880 thumb_record_ld_st_imm_offset, /* 011. */
12881 thumb_record_ld_st_stack, /* 100. */
12882 thumb_record_misc, /* 101. */
12883 thumb_record_ldm_stm_swi, /* 110. */
12884 thumb_record_branch /* 111. */
12887 uint32_t ret = 0; /* return value: negative:failure 0:success. */
12888 uint32_t insn_id = 0;
12890 if (extract_arm_insn (arm_record, insn_size))
12892 if (record_debug)
12894 printf_unfiltered (_("Process record: error reading memory at "
12895 "addr %s len = %d.\n"),
12896 paddress (arm_record->gdbarch,
12897 arm_record->this_addr), insn_size);
12899 return -1;
12901 else if (ARM_RECORD == record_type)
12903 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
12904 insn_id = bits (arm_record->arm_insn, 25, 27);
12906 if (arm_record->cond == 0xf)
12907 ret = arm_record_extension_space (arm_record);
12908 else
12910 /* If this insn has fallen into extension space
12911 then we need not decode it anymore. */
12912 ret = arm_handle_insn[insn_id] (arm_record);
12914 if (ret != ARM_RECORD_SUCCESS)
12916 arm_record_unsupported_insn (arm_record);
12917 ret = -1;
12920 else if (THUMB_RECORD == record_type)
12922 /* As thumb does not have condition codes, we set negative. */
12923 arm_record->cond = -1;
12924 insn_id = bits (arm_record->arm_insn, 13, 15);
12925 ret = thumb_handle_insn[insn_id] (arm_record);
12926 if (ret != ARM_RECORD_SUCCESS)
12928 arm_record_unsupported_insn (arm_record);
12929 ret = -1;
12932 else if (THUMB2_RECORD == record_type)
12934 /* As thumb does not have condition codes, we set negative. */
12935 arm_record->cond = -1;
12937 /* Swap first half of 32bit thumb instruction with second half. */
12938 arm_record->arm_insn
12939 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
12941 ret = thumb2_record_decode_insn_handler (arm_record);
12943 if (ret != ARM_RECORD_SUCCESS)
12945 arm_record_unsupported_insn (arm_record);
12946 ret = -1;
12949 else
12951 /* Throw assertion. */
12952 gdb_assert_not_reached ("not a valid instruction, could not decode");
12955 return ret;
12959 /* Cleans up local record registers and memory allocations. */
12961 static void
12962 deallocate_reg_mem (insn_decode_record *record)
12964 xfree (record->arm_regs);
12965 xfree (record->arm_mems);
12969 /* Parse the current instruction and record the values of the registers and
12970 memory that will be changed in current instruction to record_arch_list".
12971 Return -1 if something is wrong. */
12974 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
12975 CORE_ADDR insn_addr)
12978 uint32_t no_of_rec = 0;
12979 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
12980 ULONGEST t_bit = 0, insn_id = 0;
12982 ULONGEST u_regval = 0;
12984 insn_decode_record arm_record;
12986 memset (&arm_record, 0, sizeof (insn_decode_record));
12987 arm_record.regcache = regcache;
12988 arm_record.this_addr = insn_addr;
12989 arm_record.gdbarch = gdbarch;
12992 if (record_debug > 1)
12994 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
12995 "addr = %s\n",
12996 paddress (gdbarch, arm_record.this_addr));
12999 if (extract_arm_insn (&arm_record, 2))
13001 if (record_debug)
13003 printf_unfiltered (_("Process record: error reading memory at "
13004 "addr %s len = %d.\n"),
13005 paddress (arm_record.gdbarch,
13006 arm_record.this_addr), 2);
13008 return -1;
13011 /* Check the insn, whether it is thumb or arm one. */
13013 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13014 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13017 if (!(u_regval & t_bit))
13019 /* We are decoding arm insn. */
13020 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13022 else
13024 insn_id = bits (arm_record.arm_insn, 11, 15);
13025 /* is it thumb2 insn? */
13026 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13028 ret = decode_insn (&arm_record, THUMB2_RECORD,
13029 THUMB2_INSN_SIZE_BYTES);
13031 else
13033 /* We are decoding thumb insn. */
13034 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13038 if (0 == ret)
13040 /* Record registers. */
13041 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13042 if (arm_record.arm_regs)
13044 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13046 if (record_full_arch_list_add_reg
13047 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13048 ret = -1;
13051 /* Record memories. */
13052 if (arm_record.arm_mems)
13054 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13056 if (record_full_arch_list_add_mem
13057 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13058 arm_record.arm_mems[no_of_rec].len))
13059 ret = -1;
13063 if (record_full_arch_list_add_end ())
13064 ret = -1;
13068 deallocate_reg_mem (&arm_record);
13070 return ret;