* arm-proto.h: Convert to ISO C90 prototypes.
[official-gcc.git] / gcc / config / arm / arm.c
blob91235947e23ae87c9e6cd90c7557e144c4d8e701
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 const char extra_reg_names1[][16] =
59 { "mv0", "mv1", "mv2", "mv3", "mv4", "mv5", "mv6", "mv7",
60 "mv8", "mv9", "mv10", "mv11", "mv12", "mv13", "mv14", "mv15"
62 #define extra_reg_names1 bogus1_regnames
64 const struct attribute_spec arm_attribute_table[];
66 /* Forward function declarations. */
67 static void arm_add_gc_roots (void);
68 static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
69 rtx, rtx, int, int);
70 static unsigned bit_count (unsigned long);
71 static int arm_address_register_rtx_p (rtx, int);
72 static int arm_legitimate_index_p (enum machine_mode, rtx, int);
73 static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
74 inline static int thumb_index_register_rtx_p (rtx, int);
75 static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
76 static int eliminate_lr2ip (rtx *);
77 static rtx emit_multi_reg_push (int);
78 static rtx emit_sfm (int, int);
79 #ifndef AOF_ASSEMBLER
80 static bool arm_assemble_integer (rtx, unsigned int, int);
81 #endif
82 static const char *fp_const_from_val (REAL_VALUE_TYPE *);
83 static arm_cc get_arm_condition_code (rtx);
84 static void init_fpa_table (void);
85 static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
86 static rtx is_jump_table (rtx);
87 static const char *output_multi_immediate (rtx *, const char *, const char *,
88 int, HOST_WIDE_INT);
89 static void print_multi_reg (FILE *, const char *, int, int);
90 static const char *shift_op (rtx, HOST_WIDE_INT *);
91 static struct machine_function *arm_init_machine_status (void);
92 static int number_of_first_bit_set (int);
93 static void replace_symbols_in_block (tree, rtx, rtx);
94 static void thumb_exit (FILE *, int, rtx);
95 static void thumb_pushpop (FILE *, int, int);
96 static const char *thumb_condition_code (rtx, int);
97 static rtx is_jump_table (rtx);
98 static HOST_WIDE_INT get_jump_table_size (rtx);
99 static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
100 static Mnode *add_minipool_forward_ref (Mfix *);
101 static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
102 static Mnode *add_minipool_backward_ref (Mfix *);
103 static void assign_minipool_offsets (Mfix *);
104 static void arm_print_value (FILE *, rtx);
105 static void dump_minipool (rtx);
106 static int arm_barrier_cost (rtx);
107 static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
108 static void push_minipool_barrier (rtx, HOST_WIDE_INT);
109 static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
110 rtx);
111 static void arm_reorg (void);
112 static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
113 static int current_file_function_operand (rtx);
114 static unsigned long arm_compute_save_reg0_reg12_mask (void);
115 static unsigned long arm_compute_save_reg_mask (void);
116 static unsigned long arm_isr_value (tree);
117 static unsigned long arm_compute_func_type (void);
118 static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
119 static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
120 static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
121 static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
122 static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
123 static int arm_comp_type_attributes (tree, tree);
124 static void arm_set_default_type_attributes (tree);
125 static int arm_adjust_cost (rtx, rtx, rtx, int);
126 static int arm_use_dfa_pipeline_interface (void);
127 static int count_insns_for_constant (HOST_WIDE_INT, int);
128 static int arm_get_strip_length (int);
129 static bool arm_function_ok_for_sibcall (tree, tree);
130 static void arm_internal_label (FILE *, const char *, unsigned long);
131 static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
132 tree);
133 static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
134 static bool arm_rtx_costs (rtx, int, int, int *);
135 static int arm_address_cost (rtx);
136 static bool arm_memory_load_p (rtx);
137 static bool arm_cirrus_insn_p (rtx);
138 static void cirrus_reorg (rtx);
139 #ifdef OBJECT_FORMAT_ELF
140 static void arm_elf_asm_named_section (const char *, unsigned int);
141 #endif
142 #ifndef ARM_PE
143 static void arm_encode_section_info (tree, rtx, int);
144 #endif
145 #ifdef AOF_ASSEMBLER
146 static void aof_globalize_label (FILE *, const char *);
147 static void aof_dump_imports (FILE *);
148 static void aof_dump_pic_table (FILE *);
149 static void aof_file_end (void);
150 #endif
153 /* Initialize the GCC target structure. */
154 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
155 #undef TARGET_MERGE_DECL_ATTRIBUTES
156 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
157 #endif
159 #undef TARGET_ATTRIBUTE_TABLE
160 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
162 #ifdef AOF_ASSEMBLER
163 #undef TARGET_ASM_BYTE_OP
164 #define TARGET_ASM_BYTE_OP "\tDCB\t"
165 #undef TARGET_ASM_ALIGNED_HI_OP
166 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
167 #undef TARGET_ASM_ALIGNED_SI_OP
168 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
169 #undef TARGET_ASM_GLOBALIZE_LABEL
170 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
171 #undef TARGET_ASM_FILE_END
172 #define TARGET_ASM_FILE_END aof_file_end
173 #else
174 #undef TARGET_ASM_ALIGNED_SI_OP
175 #define TARGET_ASM_ALIGNED_SI_OP NULL
176 #undef TARGET_ASM_INTEGER
177 #define TARGET_ASM_INTEGER arm_assemble_integer
178 #endif
180 #undef TARGET_ASM_FUNCTION_PROLOGUE
181 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
183 #undef TARGET_ASM_FUNCTION_EPILOGUE
184 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
186 #undef TARGET_COMP_TYPE_ATTRIBUTES
187 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
189 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
190 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
192 #undef TARGET_SCHED_ADJUST_COST
193 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
195 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
196 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE arm_use_dfa_pipeline_interface
198 #undef TARGET_ENCODE_SECTION_INFO
199 #ifdef ARM_PE
200 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
201 #else
202 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
203 #endif
205 #undef TARGET_STRIP_NAME_ENCODING
206 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
208 #undef TARGET_ASM_INTERNAL_LABEL
209 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
211 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
212 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
214 #undef TARGET_ASM_OUTPUT_MI_THUNK
215 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
216 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
217 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
219 #undef TARGET_RTX_COSTS
220 #define TARGET_RTX_COSTS arm_rtx_costs
221 #undef TARGET_ADDRESS_COST
222 #define TARGET_ADDRESS_COST arm_address_cost
224 #undef TARGET_MACHINE_DEPENDENT_REORG
225 #define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
227 struct gcc_target targetm = TARGET_INITIALIZER;
229 /* Obstack for minipool constant handling. */
230 static struct obstack minipool_obstack;
231 static char * minipool_startobj;
233 /* The maximum number of insns skipped which
234 will be conditionalised if possible. */
235 static int max_insns_skipped = 5;
237 extern FILE * asm_out_file;
239 /* True if we are currently building a constant table. */
240 int making_const_table;
242 /* Define the information needed to generate branch insns. This is
243 stored from the compare operation. */
244 rtx arm_compare_op0, arm_compare_op1;
246 /* What type of floating point are we tuning for? */
247 enum fputype arm_fpu_tune;
249 /* What type of floating point instructions are available? */
250 enum fputype arm_fpu_arch;
252 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
253 enum prog_mode_type arm_prgmode;
255 /* Set by the -mfp=... option. */
256 const char * target_fp_name = NULL;
258 /* Used to parse -mstructure_size_boundary command line option. */
259 const char * structure_size_string = NULL;
260 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
262 /* Bit values used to identify processor capabilities. */
263 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
264 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
265 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
266 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
267 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
268 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
269 #define FL_THUMB (1 << 6) /* Thumb aware */
270 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
271 #define FL_STRONG (1 << 8) /* StrongARM */
272 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
273 #define FL_XSCALE (1 << 10) /* XScale */
274 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
276 /* The bits in this mask specify which
277 instructions we are allowed to generate. */
278 static unsigned long insn_flags = 0;
280 /* The bits in this mask specify which instruction scheduling options should
281 be used. Note - there is an overlap with the FL_FAST_MULT. For some
282 hardware we want to be able to generate the multiply instructions, but to
283 tune as if they were not present in the architecture. */
284 static unsigned long tune_flags = 0;
286 /* The following are used in the arm.md file as equivalents to bits
287 in the above two flag variables. */
289 /* Nonzero if this is an "M" variant of the processor. */
290 int arm_fast_multiply = 0;
292 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
293 int arm_arch4 = 0;
295 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
296 int arm_arch5 = 0;
298 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
299 int arm_arch5e = 0;
301 /* Nonzero if this chip can benefit from load scheduling. */
302 int arm_ld_sched = 0;
304 /* Nonzero if this chip is a StrongARM. */
305 int arm_is_strong = 0;
307 /* Nonzero if this chip is an XScale. */
308 int arm_arch_xscale = 0;
310 /* Nonzero if tuning for XScale */
311 int arm_tune_xscale = 0;
313 /* Nonzero if this chip is an ARM6 or an ARM7. */
314 int arm_is_6_or_7 = 0;
316 /* Nonzero if this chip is a Cirrus/DSP. */
317 int arm_is_cirrus = 0;
319 /* Nonzero if generating Thumb instructions. */
320 int thumb_code = 0;
322 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
323 must report the mode of the memory reference from PRINT_OPERAND to
324 PRINT_OPERAND_ADDRESS. */
325 enum machine_mode output_memory_reference_mode;
327 /* The register number to be used for the PIC offset register. */
328 const char * arm_pic_register_string = NULL;
329 int arm_pic_register = INVALID_REGNUM;
331 /* Set to 1 when a return insn is output, this means that the epilogue
332 is not needed. */
333 int return_used_this_function;
335 /* Set to 1 after arm_reorg has started. Reset to start at the start of
336 the next function. */
337 static int after_arm_reorg = 0;
339 /* The maximum number of insns to be used when loading a constant. */
340 static int arm_constant_limit = 3;
342 /* For an explanation of these variables, see final_prescan_insn below. */
343 int arm_ccfsm_state;
344 enum arm_cond_code arm_current_cc;
345 rtx arm_target_insn;
346 int arm_target_label;
348 /* The condition codes of the ARM, and the inverse function. */
349 static const char * const arm_condition_codes[] =
351 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
352 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
355 #define streq(string1, string2) (strcmp (string1, string2) == 0)
357 /* Initialization code. */
359 struct processors
361 const char *const name;
362 const unsigned long flags;
365 /* Not all of these give usefully different compilation alternatives,
366 but there is no simple way of generalizing them. */
367 static const struct processors all_cores[] =
369 /* ARM Cores */
371 {"arm2", FL_CO_PROC | FL_MODE26 },
372 {"arm250", FL_CO_PROC | FL_MODE26 },
373 {"arm3", FL_CO_PROC | FL_MODE26 },
374 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
375 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
376 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
377 {"arm610", FL_MODE26 | FL_MODE32 },
378 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
379 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
380 /* arm7m doesn't exist on its own, but only with D, (and I), but
381 those don't alter the code, so arm7m is sometimes used. */
382 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
383 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
384 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
385 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
386 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
387 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
388 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
389 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
390 {"arm710", FL_MODE26 | FL_MODE32 },
391 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
392 {"arm720", FL_MODE26 | FL_MODE32 },
393 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
394 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
395 {"arm710c", FL_MODE26 | FL_MODE32 },
396 {"arm7100", FL_MODE26 | FL_MODE32 },
397 {"arm7500", FL_MODE26 | FL_MODE32 },
398 /* Doesn't have an external co-proc, but does have embedded fpa. */
399 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
400 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
401 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
402 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
403 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
404 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
405 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
406 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
407 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
408 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
409 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
410 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
411 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
412 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
413 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
414 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
415 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
416 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
418 {NULL, 0}
421 static const struct processors all_architectures[] =
423 /* ARM Architectures */
425 { "armv2", FL_CO_PROC | FL_MODE26 },
426 { "armv2a", FL_CO_PROC | FL_MODE26 },
427 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
428 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
429 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
430 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
431 implementations that support it, so we will leave it out for now. */
432 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
433 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
434 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
435 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
436 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
437 { NULL, 0 }
440 /* This is a magic stucture. The 'string' field is magically filled in
441 with a pointer to the value specified by the user on the command line
442 assuming that the user has specified such a value. */
444 struct arm_cpu_select arm_select[] =
446 /* string name processors */
447 { NULL, "-mcpu=", all_cores },
448 { NULL, "-march=", all_architectures },
449 { NULL, "-mtune=", all_cores }
452 /* Return the number of bits set in VALUE. */
453 static unsigned
454 bit_count (unsigned long value)
456 unsigned long count = 0;
458 while (value)
460 count++;
461 value &= value - 1; /* Clear the least-significant set bit. */
464 return count;
467 /* Fix up any incompatible options that the user has specified.
468 This has now turned into a maze. */
469 void
470 arm_override_options (void)
472 unsigned i;
474 /* Set up the flags based on the cpu/architecture selected by the user. */
475 for (i = ARRAY_SIZE (arm_select); i--;)
477 struct arm_cpu_select * ptr = arm_select + i;
479 if (ptr->string != NULL && ptr->string[0] != '\0')
481 const struct processors * sel;
483 for (sel = ptr->processors; sel->name != NULL; sel++)
484 if (streq (ptr->string, sel->name))
486 if (i == 2)
487 tune_flags = sel->flags;
488 else
490 /* If we have been given an architecture and a processor
491 make sure that they are compatible. We only generate
492 a warning though, and we prefer the CPU over the
493 architecture. */
494 if (insn_flags != 0 && (insn_flags ^ sel->flags))
495 warning ("switch -mcpu=%s conflicts with -march= switch",
496 ptr->string);
498 insn_flags = sel->flags;
501 break;
504 if (sel->name == NULL)
505 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
509 /* If the user did not specify a processor, choose one for them. */
510 if (insn_flags == 0)
512 const struct processors * sel;
513 unsigned int sought;
514 static const struct cpu_default
516 const int cpu;
517 const char *const name;
519 cpu_defaults[] =
521 { TARGET_CPU_arm2, "arm2" },
522 { TARGET_CPU_arm6, "arm6" },
523 { TARGET_CPU_arm610, "arm610" },
524 { TARGET_CPU_arm710, "arm710" },
525 { TARGET_CPU_arm7m, "arm7m" },
526 { TARGET_CPU_arm7500fe, "arm7500fe" },
527 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
528 { TARGET_CPU_arm8, "arm8" },
529 { TARGET_CPU_arm810, "arm810" },
530 { TARGET_CPU_arm9, "arm9" },
531 { TARGET_CPU_strongarm, "strongarm" },
532 { TARGET_CPU_xscale, "xscale" },
533 { TARGET_CPU_ep9312, "ep9312" },
534 { TARGET_CPU_generic, "arm" },
535 { 0, 0 }
537 const struct cpu_default * def;
539 /* Find the default. */
540 for (def = cpu_defaults; def->name; def++)
541 if (def->cpu == TARGET_CPU_DEFAULT)
542 break;
544 /* Make sure we found the default CPU. */
545 if (def->name == NULL)
546 abort ();
548 /* Find the default CPU's flags. */
549 for (sel = all_cores; sel->name != NULL; sel++)
550 if (streq (def->name, sel->name))
551 break;
553 if (sel->name == NULL)
554 abort ();
556 insn_flags = sel->flags;
558 /* Now check to see if the user has specified some command line
559 switch that require certain abilities from the cpu. */
560 sought = 0;
562 if (TARGET_INTERWORK || TARGET_THUMB)
564 sought |= (FL_THUMB | FL_MODE32);
566 /* Force apcs-32 to be used for interworking. */
567 target_flags |= ARM_FLAG_APCS_32;
569 /* There are no ARM processors that support both APCS-26 and
570 interworking. Therefore we force FL_MODE26 to be removed
571 from insn_flags here (if it was set), so that the search
572 below will always be able to find a compatible processor. */
573 insn_flags &= ~FL_MODE26;
575 else if (!TARGET_APCS_32)
576 sought |= FL_MODE26;
578 if (sought != 0 && ((sought & insn_flags) != sought))
580 /* Try to locate a CPU type that supports all of the abilities
581 of the default CPU, plus the extra abilities requested by
582 the user. */
583 for (sel = all_cores; sel->name != NULL; sel++)
584 if ((sel->flags & sought) == (sought | insn_flags))
585 break;
587 if (sel->name == NULL)
589 unsigned current_bit_count = 0;
590 const struct processors * best_fit = NULL;
592 /* Ideally we would like to issue an error message here
593 saying that it was not possible to find a CPU compatible
594 with the default CPU, but which also supports the command
595 line options specified by the programmer, and so they
596 ought to use the -mcpu=<name> command line option to
597 override the default CPU type.
599 Unfortunately this does not work with multilibing. We
600 need to be able to support multilibs for -mapcs-26 and for
601 -mthumb-interwork and there is no CPU that can support both
602 options. Instead if we cannot find a cpu that has both the
603 characteristics of the default cpu and the given command line
604 options we scan the array again looking for a best match. */
605 for (sel = all_cores; sel->name != NULL; sel++)
606 if ((sel->flags & sought) == sought)
608 unsigned count;
610 count = bit_count (sel->flags & insn_flags);
612 if (count >= current_bit_count)
614 best_fit = sel;
615 current_bit_count = count;
619 if (best_fit == NULL)
620 abort ();
621 else
622 sel = best_fit;
625 insn_flags = sel->flags;
629 /* If tuning has not been specified, tune for whichever processor or
630 architecture has been selected. */
631 if (tune_flags == 0)
632 tune_flags = insn_flags;
634 /* Make sure that the processor choice does not conflict with any of the
635 other command line choices. */
636 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
638 /* If APCS-32 was not the default then it must have been set by the
639 user, so issue a warning message. If the user has specified
640 "-mapcs-32 -mcpu=arm2" then we loose here. */
641 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
642 warning ("target CPU does not support APCS-32" );
643 target_flags &= ~ARM_FLAG_APCS_32;
645 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
647 warning ("target CPU does not support APCS-26" );
648 target_flags |= ARM_FLAG_APCS_32;
651 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
653 warning ("target CPU does not support interworking" );
654 target_flags &= ~ARM_FLAG_INTERWORK;
657 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
659 warning ("target CPU does not support THUMB instructions");
660 target_flags &= ~ARM_FLAG_THUMB;
663 if (TARGET_APCS_FRAME && TARGET_THUMB)
665 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
666 target_flags &= ~ARM_FLAG_APCS_FRAME;
669 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
670 from here where no function is being compiled currently. */
671 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
672 && TARGET_ARM)
673 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
675 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
676 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
678 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
679 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
681 /* If interworking is enabled then APCS-32 must be selected as well. */
682 if (TARGET_INTERWORK)
684 if (!TARGET_APCS_32)
685 warning ("interworking forces APCS-32 to be used" );
686 target_flags |= ARM_FLAG_APCS_32;
689 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
691 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
692 target_flags |= ARM_FLAG_APCS_FRAME;
695 if (TARGET_POKE_FUNCTION_NAME)
696 target_flags |= ARM_FLAG_APCS_FRAME;
698 if (TARGET_APCS_REENT && flag_pic)
699 error ("-fpic and -mapcs-reent are incompatible");
701 if (TARGET_APCS_REENT)
702 warning ("APCS reentrant code not supported. Ignored");
704 /* If this target is normally configured to use APCS frames, warn if they
705 are turned off and debugging is turned on. */
706 if (TARGET_ARM
707 && write_symbols != NO_DEBUG
708 && !TARGET_APCS_FRAME
709 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
710 warning ("-g with -mno-apcs-frame may not give sensible debugging");
712 /* If stack checking is disabled, we can use r10 as the PIC register,
713 which keeps r9 available. */
714 if (flag_pic)
715 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
717 if (TARGET_APCS_FLOAT)
718 warning ("passing floating point arguments in fp regs not yet supported");
720 /* Initialize boolean versions of the flags, for use in the arm.md file. */
721 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
722 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
723 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
724 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
725 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
727 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
728 arm_is_strong = (tune_flags & FL_STRONG) != 0;
729 thumb_code = (TARGET_ARM == 0);
730 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
731 && !(tune_flags & FL_ARCH4))) != 0;
732 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
733 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
735 if (arm_is_cirrus)
737 arm_fpu_tune = FPUTYPE_MAVERICK;
739 /* Ignore -mhard-float if -mcpu=ep9312. */
740 if (TARGET_HARD_FLOAT)
741 target_flags ^= ARM_FLAG_SOFT_FLOAT;
743 else
744 /* Default value for floating point code... if no co-processor
745 bus, then schedule for emulated floating point. Otherwise,
746 assume the user has an FPA.
747 Note: this does not prevent use of floating point instructions,
748 -msoft-float does that. */
749 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
751 if (target_fp_name)
753 if (streq (target_fp_name, "2"))
754 arm_fpu_arch = FPUTYPE_FPA_EMU2;
755 else if (streq (target_fp_name, "3"))
756 arm_fpu_arch = FPUTYPE_FPA_EMU3;
757 else
758 error ("invalid floating point emulation option: -mfpe-%s",
759 target_fp_name);
761 else
762 arm_fpu_arch = FPUTYPE_DEFAULT;
764 if (TARGET_FPE)
766 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
767 arm_fpu_tune = FPUTYPE_FPA_EMU2;
768 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
769 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
770 else if (arm_fpu_tune != FPUTYPE_FPA)
771 arm_fpu_tune = FPUTYPE_FPA_EMU2;
774 /* For arm2/3 there is no need to do any scheduling if there is only
775 a floating point emulator, or we are doing software floating-point. */
776 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
777 && (tune_flags & FL_MODE32) == 0)
778 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
780 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
782 if (structure_size_string != NULL)
784 int size = strtol (structure_size_string, NULL, 0);
786 if (size == 8 || size == 32)
787 arm_structure_size_boundary = size;
788 else
789 warning ("structure size boundary can only be set to 8 or 32");
792 if (arm_pic_register_string != NULL)
794 int pic_register = decode_reg_name (arm_pic_register_string);
796 if (!flag_pic)
797 warning ("-mpic-register= is useless without -fpic");
799 /* Prevent the user from choosing an obviously stupid PIC register. */
800 else if (pic_register < 0 || call_used_regs[pic_register]
801 || pic_register == HARD_FRAME_POINTER_REGNUM
802 || pic_register == STACK_POINTER_REGNUM
803 || pic_register >= PC_REGNUM)
804 error ("unable to use '%s' for PIC register", arm_pic_register_string);
805 else
806 arm_pic_register = pic_register;
809 if (TARGET_THUMB && flag_schedule_insns)
811 /* Don't warn since it's on by default in -O2. */
812 flag_schedule_insns = 0;
815 /* If optimizing for space, don't synthesize constants.
816 For processors with load scheduling, it never costs more than 2 cycles
817 to load a constant, and the load scheduler may well reduce that to 1. */
818 if (optimize_size || (tune_flags & FL_LDSCHED))
819 arm_constant_limit = 1;
821 if (arm_arch_xscale)
822 arm_constant_limit = 2;
824 /* If optimizing for size, bump the number of instructions that we
825 are prepared to conditionally execute (even on a StrongARM).
826 Otherwise for the StrongARM, which has early execution of branches,
827 a sequence that is worth skipping is shorter. */
828 if (optimize_size)
829 max_insns_skipped = 6;
830 else if (arm_is_strong)
831 max_insns_skipped = 3;
833 /* Register global variables with the garbage collector. */
834 arm_add_gc_roots ();
837 static void
838 arm_add_gc_roots (void)
840 gcc_obstack_init(&minipool_obstack);
841 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
844 /* A table of known ARM exception types.
845 For use with the interrupt function attribute. */
847 typedef struct
849 const char *const arg;
850 const unsigned long return_value;
852 isr_attribute_arg;
854 static const isr_attribute_arg isr_attribute_args [] =
856 { "IRQ", ARM_FT_ISR },
857 { "irq", ARM_FT_ISR },
858 { "FIQ", ARM_FT_FIQ },
859 { "fiq", ARM_FT_FIQ },
860 { "ABORT", ARM_FT_ISR },
861 { "abort", ARM_FT_ISR },
862 { "ABORT", ARM_FT_ISR },
863 { "abort", ARM_FT_ISR },
864 { "UNDEF", ARM_FT_EXCEPTION },
865 { "undef", ARM_FT_EXCEPTION },
866 { "SWI", ARM_FT_EXCEPTION },
867 { "swi", ARM_FT_EXCEPTION },
868 { NULL, ARM_FT_NORMAL }
871 /* Returns the (interrupt) function type of the current
872 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
874 static unsigned long
875 arm_isr_value (tree argument)
877 const isr_attribute_arg * ptr;
878 const char * arg;
880 /* No argument - default to IRQ. */
881 if (argument == NULL_TREE)
882 return ARM_FT_ISR;
884 /* Get the value of the argument. */
885 if (TREE_VALUE (argument) == NULL_TREE
886 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
887 return ARM_FT_UNKNOWN;
889 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
891 /* Check it against the list of known arguments. */
892 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
893 if (streq (arg, ptr->arg))
894 return ptr->return_value;
896 /* An unrecognized interrupt type. */
897 return ARM_FT_UNKNOWN;
900 /* Computes the type of the current function. */
902 static unsigned long
903 arm_compute_func_type (void)
905 unsigned long type = ARM_FT_UNKNOWN;
906 tree a;
907 tree attr;
909 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
910 abort ();
912 /* Decide if the current function is volatile. Such functions
913 never return, and many memory cycles can be saved by not storing
914 register values that will never be needed again. This optimization
915 was added to speed up context switching in a kernel application. */
916 if (optimize > 0
917 && current_function_nothrow
918 && TREE_THIS_VOLATILE (current_function_decl))
919 type |= ARM_FT_VOLATILE;
921 if (current_function_needs_context)
922 type |= ARM_FT_NESTED;
924 attr = DECL_ATTRIBUTES (current_function_decl);
926 a = lookup_attribute ("naked", attr);
927 if (a != NULL_TREE)
928 type |= ARM_FT_NAKED;
930 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
931 type |= ARM_FT_EXCEPTION_HANDLER;
932 else
934 a = lookup_attribute ("isr", attr);
935 if (a == NULL_TREE)
936 a = lookup_attribute ("interrupt", attr);
938 if (a == NULL_TREE)
939 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
940 else
941 type |= arm_isr_value (TREE_VALUE (a));
944 return type;
947 /* Returns the type of the current function. */
949 unsigned long
950 arm_current_func_type (void)
952 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
953 cfun->machine->func_type = arm_compute_func_type ();
955 return cfun->machine->func_type;
958 /* Return 1 if it is possible to return using a single instruction. */
961 use_return_insn (int iscond)
963 int regno;
964 unsigned int func_type;
965 unsigned long saved_int_regs;
967 /* Never use a return instruction before reload has run. */
968 if (!reload_completed)
969 return 0;
971 func_type = arm_current_func_type ();
973 /* Naked functions and volatile functions need special
974 consideration. */
975 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
976 return 0;
978 /* So do interrupt functions that use the frame pointer. */
979 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
980 return 0;
982 /* As do variadic functions. */
983 if (current_function_pretend_args_size
984 || cfun->machine->uses_anonymous_args
985 /* Of if the function calls __builtin_eh_return () */
986 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
987 /* Or if there is no frame pointer and there is a stack adjustment. */
988 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
989 && !frame_pointer_needed))
990 return 0;
992 saved_int_regs = arm_compute_save_reg_mask ();
994 /* Can't be done if interworking with Thumb, and any registers have been
995 stacked. */
996 if (TARGET_INTERWORK && saved_int_regs != 0)
997 return 0;
999 /* On StrongARM, conditional returns are expensive if they aren't
1000 taken and multiple registers have been stacked. */
1001 if (iscond && arm_is_strong)
1003 /* Conditional return when just the LR is stored is a simple
1004 conditional-load instruction, that's not expensive. */
1005 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1006 return 0;
1008 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1009 return 0;
1012 /* If there are saved registers but the LR isn't saved, then we need
1013 two instructions for the return. */
1014 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1015 return 0;
1017 /* Can't be done if any of the FPA regs are pushed,
1018 since this also requires an insn. */
1019 if (TARGET_HARD_FLOAT)
1020 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1021 if (regs_ever_live[regno] && !call_used_regs[regno])
1022 return 0;
1024 return 1;
1027 /* Return TRUE if int I is a valid immediate ARM constant. */
1030 const_ok_for_arm (HOST_WIDE_INT i)
1032 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1034 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1035 be all zero, or all one. */
1036 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1037 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1038 != ((~(unsigned HOST_WIDE_INT) 0)
1039 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1040 return FALSE;
1042 /* Fast return for 0 and powers of 2 */
1043 if ((i & (i - 1)) == 0)
1044 return TRUE;
1048 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1049 return TRUE;
1050 mask =
1051 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1052 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1054 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1056 return FALSE;
1059 /* Return true if I is a valid constant for the operation CODE. */
1060 static int
1061 const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
1063 if (const_ok_for_arm (i))
1064 return 1;
1066 switch (code)
1068 case PLUS:
1069 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1071 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1072 case XOR:
1073 case IOR:
1074 return 0;
1076 case AND:
1077 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1079 default:
1080 abort ();
1084 /* Emit a sequence of insns to handle a large constant.
1085 CODE is the code of the operation required, it can be any of SET, PLUS,
1086 IOR, AND, XOR, MINUS;
1087 MODE is the mode in which the operation is being performed;
1088 VAL is the integer to operate on;
1089 SOURCE is the other operand (a register, or a null-pointer for SET);
1090 SUBTARGETS means it is safe to create scratch registers if that will
1091 either produce a simpler sequence, or we will want to cse the values.
1092 Return value is the number of insns emitted. */
1095 arm_split_constant (enum rtx_code code, enum machine_mode mode,
1096 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
1098 if (subtargets || code == SET
1099 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1100 && REGNO (target) != REGNO (source)))
1102 /* After arm_reorg has been called, we can't fix up expensive
1103 constants by pushing them into memory so we must synthesize
1104 them in-line, regardless of the cost. This is only likely to
1105 be more costly on chips that have load delay slots and we are
1106 compiling without running the scheduler (so no splitting
1107 occurred before the final instruction emission).
1109 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1111 if (!after_arm_reorg
1112 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1113 > arm_constant_limit + (code != SET)))
1115 if (code == SET)
1117 /* Currently SET is the only monadic value for CODE, all
1118 the rest are diadic. */
1119 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1120 return 1;
1122 else
1124 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1126 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1127 /* For MINUS, the value is subtracted from, since we never
1128 have subtraction of a constant. */
1129 if (code == MINUS)
1130 emit_insn (gen_rtx_SET (VOIDmode, target,
1131 gen_rtx_MINUS (mode, temp, source)));
1132 else
1133 emit_insn (gen_rtx_SET (VOIDmode, target,
1134 gen_rtx (code, mode, source, temp)));
1135 return 2;
1140 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1143 static int
1144 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1146 HOST_WIDE_INT temp1;
1147 int num_insns = 0;
1150 int end;
1152 if (i <= 0)
1153 i += 32;
1154 if (remainder & (3 << (i - 2)))
1156 end = i - 8;
1157 if (end < 0)
1158 end += 32;
1159 temp1 = remainder & ((0x0ff << end)
1160 | ((i < end) ? (0xff >> (32 - end)) : 0));
1161 remainder &= ~temp1;
1162 num_insns++;
1163 i -= 6;
1165 i -= 2;
1166 } while (remainder);
1167 return num_insns;
1170 /* As above, but extra parameter GENERATE which, if clear, suppresses
1171 RTL generation. */
1173 static int
1174 arm_gen_constant (enum rtx_code code, enum machine_mode mode,
1175 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1176 int generate)
1178 int can_invert = 0;
1179 int can_negate = 0;
1180 int can_negate_initial = 0;
1181 int can_shift = 0;
1182 int i;
1183 int num_bits_set = 0;
1184 int set_sign_bit_copies = 0;
1185 int clear_sign_bit_copies = 0;
1186 int clear_zero_bit_copies = 0;
1187 int set_zero_bit_copies = 0;
1188 int insns = 0;
1189 unsigned HOST_WIDE_INT temp1, temp2;
1190 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1192 /* Find out which operations are safe for a given CODE. Also do a quick
1193 check for degenerate cases; these can occur when DImode operations
1194 are split. */
1195 switch (code)
1197 case SET:
1198 can_invert = 1;
1199 can_shift = 1;
1200 can_negate = 1;
1201 break;
1203 case PLUS:
1204 can_negate = 1;
1205 can_negate_initial = 1;
1206 break;
1208 case IOR:
1209 if (remainder == 0xffffffff)
1211 if (generate)
1212 emit_insn (gen_rtx_SET (VOIDmode, target,
1213 GEN_INT (ARM_SIGN_EXTEND (val))));
1214 return 1;
1216 if (remainder == 0)
1218 if (reload_completed && rtx_equal_p (target, source))
1219 return 0;
1220 if (generate)
1221 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1222 return 1;
1224 break;
1226 case AND:
1227 if (remainder == 0)
1229 if (generate)
1230 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1231 return 1;
1233 if (remainder == 0xffffffff)
1235 if (reload_completed && rtx_equal_p (target, source))
1236 return 0;
1237 if (generate)
1238 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1239 return 1;
1241 can_invert = 1;
1242 break;
1244 case XOR:
1245 if (remainder == 0)
1247 if (reload_completed && rtx_equal_p (target, source))
1248 return 0;
1249 if (generate)
1250 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1251 return 1;
1253 if (remainder == 0xffffffff)
1255 if (generate)
1256 emit_insn (gen_rtx_SET (VOIDmode, target,
1257 gen_rtx_NOT (mode, source)));
1258 return 1;
1261 /* We don't know how to handle this yet below. */
1262 abort ();
1264 case MINUS:
1265 /* We treat MINUS as (val - source), since (source - val) is always
1266 passed as (source + (-val)). */
1267 if (remainder == 0)
1269 if (generate)
1270 emit_insn (gen_rtx_SET (VOIDmode, target,
1271 gen_rtx_NEG (mode, source)));
1272 return 1;
1274 if (const_ok_for_arm (val))
1276 if (generate)
1277 emit_insn (gen_rtx_SET (VOIDmode, target,
1278 gen_rtx_MINUS (mode, GEN_INT (val),
1279 source)));
1280 return 1;
1282 can_negate = 1;
1284 break;
1286 default:
1287 abort ();
1290 /* If we can do it in one insn get out quickly. */
1291 if (const_ok_for_arm (val)
1292 || (can_negate_initial && const_ok_for_arm (-val))
1293 || (can_invert && const_ok_for_arm (~val)))
1295 if (generate)
1296 emit_insn (gen_rtx_SET (VOIDmode, target,
1297 (source ? gen_rtx (code, mode, source,
1298 GEN_INT (val))
1299 : GEN_INT (val))));
1300 return 1;
1303 /* Calculate a few attributes that may be useful for specific
1304 optimizations. */
1305 for (i = 31; i >= 0; i--)
1307 if ((remainder & (1 << i)) == 0)
1308 clear_sign_bit_copies++;
1309 else
1310 break;
1313 for (i = 31; i >= 0; i--)
1315 if ((remainder & (1 << i)) != 0)
1316 set_sign_bit_copies++;
1317 else
1318 break;
1321 for (i = 0; i <= 31; i++)
1323 if ((remainder & (1 << i)) == 0)
1324 clear_zero_bit_copies++;
1325 else
1326 break;
1329 for (i = 0; i <= 31; i++)
1331 if ((remainder & (1 << i)) != 0)
1332 set_zero_bit_copies++;
1333 else
1334 break;
1337 switch (code)
1339 case SET:
1340 /* See if we can do this by sign_extending a constant that is known
1341 to be negative. This is a good, way of doing it, since the shift
1342 may well merge into a subsequent insn. */
1343 if (set_sign_bit_copies > 1)
1345 if (const_ok_for_arm
1346 (temp1 = ARM_SIGN_EXTEND (remainder
1347 << (set_sign_bit_copies - 1))))
1349 if (generate)
1351 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1352 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1353 GEN_INT (temp1)));
1354 emit_insn (gen_ashrsi3 (target, new_src,
1355 GEN_INT (set_sign_bit_copies - 1)));
1357 return 2;
1359 /* For an inverted constant, we will need to set the low bits,
1360 these will be shifted out of harm's way. */
1361 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1362 if (const_ok_for_arm (~temp1))
1364 if (generate)
1366 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1367 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1368 GEN_INT (temp1)));
1369 emit_insn (gen_ashrsi3 (target, new_src,
1370 GEN_INT (set_sign_bit_copies - 1)));
1372 return 2;
1376 /* See if we can generate this by setting the bottom (or the top)
1377 16 bits, and then shifting these into the other half of the
1378 word. We only look for the simplest cases, to do more would cost
1379 too much. Be careful, however, not to generate this when the
1380 alternative would take fewer insns. */
1381 if (val & 0xffff0000)
1383 temp1 = remainder & 0xffff0000;
1384 temp2 = remainder & 0x0000ffff;
1386 /* Overlaps outside this range are best done using other methods. */
1387 for (i = 9; i < 24; i++)
1389 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1390 && !const_ok_for_arm (temp2))
1392 rtx new_src = (subtargets
1393 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1394 : target);
1395 insns = arm_gen_constant (code, mode, temp2, new_src,
1396 source, subtargets, generate);
1397 source = new_src;
1398 if (generate)
1399 emit_insn (gen_rtx_SET
1400 (VOIDmode, target,
1401 gen_rtx_IOR (mode,
1402 gen_rtx_ASHIFT (mode, source,
1403 GEN_INT (i)),
1404 source)));
1405 return insns + 1;
1409 /* Don't duplicate cases already considered. */
1410 for (i = 17; i < 24; i++)
1412 if (((temp1 | (temp1 >> i)) == remainder)
1413 && !const_ok_for_arm (temp1))
1415 rtx new_src = (subtargets
1416 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1417 : target);
1418 insns = arm_gen_constant (code, mode, temp1, new_src,
1419 source, subtargets, generate);
1420 source = new_src;
1421 if (generate)
1422 emit_insn
1423 (gen_rtx_SET (VOIDmode, target,
1424 gen_rtx_IOR
1425 (mode,
1426 gen_rtx_LSHIFTRT (mode, source,
1427 GEN_INT (i)),
1428 source)));
1429 return insns + 1;
1433 break;
1435 case IOR:
1436 case XOR:
1437 /* If we have IOR or XOR, and the constant can be loaded in a
1438 single instruction, and we can find a temporary to put it in,
1439 then this can be done in two instructions instead of 3-4. */
1440 if (subtargets
1441 /* TARGET can't be NULL if SUBTARGETS is 0 */
1442 || (reload_completed && !reg_mentioned_p (target, source)))
1444 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1446 if (generate)
1448 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1450 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1451 emit_insn (gen_rtx_SET (VOIDmode, target,
1452 gen_rtx (code, mode, source, sub)));
1454 return 2;
1458 if (code == XOR)
1459 break;
1461 if (set_sign_bit_copies > 8
1462 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1464 if (generate)
1466 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1467 rtx shift = GEN_INT (set_sign_bit_copies);
1469 emit_insn (gen_rtx_SET (VOIDmode, sub,
1470 gen_rtx_NOT (mode,
1471 gen_rtx_ASHIFT (mode,
1472 source,
1473 shift))));
1474 emit_insn (gen_rtx_SET (VOIDmode, target,
1475 gen_rtx_NOT (mode,
1476 gen_rtx_LSHIFTRT (mode, sub,
1477 shift))));
1479 return 2;
1482 if (set_zero_bit_copies > 8
1483 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1485 if (generate)
1487 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1488 rtx shift = GEN_INT (set_zero_bit_copies);
1490 emit_insn (gen_rtx_SET (VOIDmode, sub,
1491 gen_rtx_NOT (mode,
1492 gen_rtx_LSHIFTRT (mode,
1493 source,
1494 shift))));
1495 emit_insn (gen_rtx_SET (VOIDmode, target,
1496 gen_rtx_NOT (mode,
1497 gen_rtx_ASHIFT (mode, sub,
1498 shift))));
1500 return 2;
1503 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1505 if (generate)
1507 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1508 emit_insn (gen_rtx_SET (VOIDmode, sub,
1509 gen_rtx_NOT (mode, source)));
1510 source = sub;
1511 if (subtargets)
1512 sub = gen_reg_rtx (mode);
1513 emit_insn (gen_rtx_SET (VOIDmode, sub,
1514 gen_rtx_AND (mode, source,
1515 GEN_INT (temp1))));
1516 emit_insn (gen_rtx_SET (VOIDmode, target,
1517 gen_rtx_NOT (mode, sub)));
1519 return 3;
1521 break;
1523 case AND:
1524 /* See if two shifts will do 2 or more insn's worth of work. */
1525 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1527 HOST_WIDE_INT shift_mask = ((0xffffffff
1528 << (32 - clear_sign_bit_copies))
1529 & 0xffffffff);
1531 if ((remainder | shift_mask) != 0xffffffff)
1533 if (generate)
1535 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1536 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1537 new_src, source, subtargets, 1);
1538 source = new_src;
1540 else
1542 rtx targ = subtargets ? NULL_RTX : target;
1543 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1544 targ, source, subtargets, 0);
1548 if (generate)
1550 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1551 rtx shift = GEN_INT (clear_sign_bit_copies);
1553 emit_insn (gen_ashlsi3 (new_src, source, shift));
1554 emit_insn (gen_lshrsi3 (target, new_src, shift));
1557 return insns + 2;
1560 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1562 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1564 if ((remainder | shift_mask) != 0xffffffff)
1566 if (generate)
1568 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1570 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1571 new_src, source, subtargets, 1);
1572 source = new_src;
1574 else
1576 rtx targ = subtargets ? NULL_RTX : target;
1578 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1579 targ, source, subtargets, 0);
1583 if (generate)
1585 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1586 rtx shift = GEN_INT (clear_zero_bit_copies);
1588 emit_insn (gen_lshrsi3 (new_src, source, shift));
1589 emit_insn (gen_ashlsi3 (target, new_src, shift));
1592 return insns + 2;
1595 break;
1597 default:
1598 break;
1601 for (i = 0; i < 32; i++)
1602 if (remainder & (1 << i))
1603 num_bits_set++;
1605 if (code == AND || (can_invert && num_bits_set > 16))
1606 remainder = (~remainder) & 0xffffffff;
1607 else if (code == PLUS && num_bits_set > 16)
1608 remainder = (-remainder) & 0xffffffff;
1609 else
1611 can_invert = 0;
1612 can_negate = 0;
1615 /* Now try and find a way of doing the job in either two or three
1616 instructions.
1617 We start by looking for the largest block of zeros that are aligned on
1618 a 2-bit boundary, we then fill up the temps, wrapping around to the
1619 top of the word when we drop off the bottom.
1620 In the worst case this code should produce no more than four insns. */
1622 int best_start = 0;
1623 int best_consecutive_zeros = 0;
1625 for (i = 0; i < 32; i += 2)
1627 int consecutive_zeros = 0;
1629 if (!(remainder & (3 << i)))
1631 while ((i < 32) && !(remainder & (3 << i)))
1633 consecutive_zeros += 2;
1634 i += 2;
1636 if (consecutive_zeros > best_consecutive_zeros)
1638 best_consecutive_zeros = consecutive_zeros;
1639 best_start = i - consecutive_zeros;
1641 i -= 2;
1645 /* So long as it won't require any more insns to do so, it's
1646 desirable to emit a small constant (in bits 0...9) in the last
1647 insn. This way there is more chance that it can be combined with
1648 a later addressing insn to form a pre-indexed load or store
1649 operation. Consider:
1651 *((volatile int *)0xe0000100) = 1;
1652 *((volatile int *)0xe0000110) = 2;
1654 We want this to wind up as:
1656 mov rA, #0xe0000000
1657 mov rB, #1
1658 str rB, [rA, #0x100]
1659 mov rB, #2
1660 str rB, [rA, #0x110]
1662 rather than having to synthesize both large constants from scratch.
1664 Therefore, we calculate how many insns would be required to emit
1665 the constant starting from `best_start', and also starting from
1666 zero (ie with bit 31 first to be output). If `best_start' doesn't
1667 yield a shorter sequence, we may as well use zero. */
1668 if (best_start != 0
1669 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1670 && (count_insns_for_constant (remainder, 0) <=
1671 count_insns_for_constant (remainder, best_start)))
1672 best_start = 0;
1674 /* Now start emitting the insns. */
1675 i = best_start;
1678 int end;
1680 if (i <= 0)
1681 i += 32;
1682 if (remainder & (3 << (i - 2)))
1684 end = i - 8;
1685 if (end < 0)
1686 end += 32;
1687 temp1 = remainder & ((0x0ff << end)
1688 | ((i < end) ? (0xff >> (32 - end)) : 0));
1689 remainder &= ~temp1;
1691 if (generate)
1693 rtx new_src, temp1_rtx;
1695 if (code == SET || code == MINUS)
1697 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1698 if (can_invert && code != MINUS)
1699 temp1 = ~temp1;
1701 else
1703 if (remainder && subtargets)
1704 new_src = gen_reg_rtx (mode);
1705 else
1706 new_src = target;
1707 if (can_invert)
1708 temp1 = ~temp1;
1709 else if (can_negate)
1710 temp1 = -temp1;
1713 temp1 = trunc_int_for_mode (temp1, mode);
1714 temp1_rtx = GEN_INT (temp1);
1716 if (code == SET)
1718 else if (code == MINUS)
1719 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1720 else
1721 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1723 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1724 source = new_src;
1727 if (code == SET)
1729 can_invert = 0;
1730 code = PLUS;
1732 else if (code == MINUS)
1733 code = PLUS;
1735 insns++;
1736 i -= 6;
1738 i -= 2;
1740 while (remainder);
1743 return insns;
1746 /* Canonicalize a comparison so that we are more likely to recognize it.
1747 This can be done for a few constant compares, where we can make the
1748 immediate value easier to load. */
1750 enum rtx_code
1751 arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
1753 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1755 switch (code)
1757 case EQ:
1758 case NE:
1759 return code;
1761 case GT:
1762 case LE:
1763 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1764 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1766 *op1 = GEN_INT (i + 1);
1767 return code == GT ? GE : LT;
1769 break;
1771 case GE:
1772 case LT:
1773 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1774 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1776 *op1 = GEN_INT (i - 1);
1777 return code == GE ? GT : LE;
1779 break;
1781 case GTU:
1782 case LEU:
1783 if (i != ~((unsigned HOST_WIDE_INT) 0)
1784 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1786 *op1 = GEN_INT (i + 1);
1787 return code == GTU ? GEU : LTU;
1789 break;
1791 case GEU:
1792 case LTU:
1793 if (i != 0
1794 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1796 *op1 = GEN_INT (i - 1);
1797 return code == GEU ? GTU : LEU;
1799 break;
1801 default:
1802 abort ();
1805 return code;
1808 /* Decide whether a type should be returned in memory (true)
1809 or in a register (false). This is called by the macro
1810 RETURN_IN_MEMORY. */
1812 arm_return_in_memory (tree type)
1814 HOST_WIDE_INT size;
1816 if (!AGGREGATE_TYPE_P (type))
1817 /* All simple types are returned in registers. */
1818 return 0;
1820 size = int_size_in_bytes (type);
1822 if (TARGET_ATPCS)
1824 /* ATPCS returns aggregate types in memory only if they are
1825 larger than a word (or are variable size). */
1826 return (size < 0 || size > UNITS_PER_WORD);
1829 /* For the arm-wince targets we choose to be compatible with Microsoft's
1830 ARM and Thumb compilers, which always return aggregates in memory. */
1831 #ifndef ARM_WINCE
1832 /* All structures/unions bigger than one word are returned in memory.
1833 Also catch the case where int_size_in_bytes returns -1. In this case
1834 the aggregate is either huge or of variable size, and in either case
1835 we will want to return it via memory and not in a register. */
1836 if (size < 0 || size > UNITS_PER_WORD)
1837 return 1;
1839 if (TREE_CODE (type) == RECORD_TYPE)
1841 tree field;
1843 /* For a struct the APCS says that we only return in a register
1844 if the type is 'integer like' and every addressable element
1845 has an offset of zero. For practical purposes this means
1846 that the structure can have at most one non bit-field element
1847 and that this element must be the first one in the structure. */
1849 /* Find the first field, ignoring non FIELD_DECL things which will
1850 have been created by C++. */
1851 for (field = TYPE_FIELDS (type);
1852 field && TREE_CODE (field) != FIELD_DECL;
1853 field = TREE_CHAIN (field))
1854 continue;
1856 if (field == NULL)
1857 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1859 /* Check that the first field is valid for returning in a register. */
1861 /* ... Floats are not allowed */
1862 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1863 return 1;
1865 /* ... Aggregates that are not themselves valid for returning in
1866 a register are not allowed. */
1867 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1868 return 1;
1870 /* Now check the remaining fields, if any. Only bitfields are allowed,
1871 since they are not addressable. */
1872 for (field = TREE_CHAIN (field);
1873 field;
1874 field = TREE_CHAIN (field))
1876 if (TREE_CODE (field) != FIELD_DECL)
1877 continue;
1879 if (!DECL_BIT_FIELD_TYPE (field))
1880 return 1;
1883 return 0;
1886 if (TREE_CODE (type) == UNION_TYPE)
1888 tree field;
1890 /* Unions can be returned in registers if every element is
1891 integral, or can be returned in an integer register. */
1892 for (field = TYPE_FIELDS (type);
1893 field;
1894 field = TREE_CHAIN (field))
1896 if (TREE_CODE (field) != FIELD_DECL)
1897 continue;
1899 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1900 return 1;
1902 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1903 return 1;
1906 return 0;
1908 #endif /* not ARM_WINCE */
1910 /* Return all other types in memory. */
1911 return 1;
1914 /* Indicate whether or not words of a double are in big-endian order. */
1917 arm_float_words_big_endian (void)
1919 if (TARGET_CIRRUS)
1920 return 0;
1922 /* For FPA, float words are always big-endian. For VFP, floats words
1923 follow the memory system mode. */
1925 if (TARGET_HARD_FLOAT)
1927 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1928 return 1;
1931 if (TARGET_VFP)
1932 return (TARGET_BIG_END ? 1 : 0);
1934 return 1;
1937 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1938 for a call to a function whose data type is FNTYPE.
1939 For a library call, FNTYPE is NULL. */
1940 void
1941 arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
1942 rtx libname ATTRIBUTE_UNUSED,
1943 tree fndecl ATTRIBUTE_UNUSED)
1945 /* On the ARM, the offset starts at 0. */
1946 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1948 pcum->call_cookie = CALL_NORMAL;
1950 if (TARGET_LONG_CALLS)
1951 pcum->call_cookie = CALL_LONG;
1953 /* Check for long call/short call attributes. The attributes
1954 override any command line option. */
1955 if (fntype)
1957 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1958 pcum->call_cookie = CALL_SHORT;
1959 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1960 pcum->call_cookie = CALL_LONG;
1964 /* Determine where to put an argument to a function.
1965 Value is zero to push the argument on the stack,
1966 or a hard register in which to store the argument.
1968 MODE is the argument's machine mode.
1969 TYPE is the data type of the argument (as a tree).
1970 This is null for libcalls where that information may
1971 not be available.
1972 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1973 the preceding args and about the function being called.
1974 NAMED is nonzero if this argument is a named parameter
1975 (otherwise it is an extra parameter matching an ellipsis). */
1978 arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
1979 tree type ATTRIBUTE_UNUSED, int named)
1981 if (mode == VOIDmode)
1982 /* Compute operand 2 of the call insn. */
1983 return GEN_INT (pcum->call_cookie);
1985 if (!named || pcum->nregs >= NUM_ARG_REGS)
1986 return NULL_RTX;
1988 return gen_rtx_REG (mode, pcum->nregs);
1991 /* Variable sized types are passed by reference. This is a GCC
1992 extension to the ARM ABI. */
1995 arm_function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1996 enum machine_mode mode ATTRIBUTE_UNUSED,
1997 tree type, int named ATTRIBUTE_UNUSED)
1999 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2002 /* Implement va_arg. */
2005 arm_va_arg (tree valist, tree type)
2007 /* Variable sized types are passed by reference. */
2008 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2010 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2011 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2014 return std_expand_builtin_va_arg (valist, type);
2017 /* Encode the current state of the #pragma [no_]long_calls. */
2018 typedef enum
2020 OFF, /* No #pramgma [no_]long_calls is in effect. */
2021 LONG, /* #pragma long_calls is in effect. */
2022 SHORT /* #pragma no_long_calls is in effect. */
2023 } arm_pragma_enum;
2025 static arm_pragma_enum arm_pragma_long_calls = OFF;
2027 void
2028 arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2030 arm_pragma_long_calls = LONG;
2033 void
2034 arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2036 arm_pragma_long_calls = SHORT;
2039 void
2040 arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2042 arm_pragma_long_calls = OFF;
2045 /* Table of machine attributes. */
2046 const struct attribute_spec arm_attribute_table[] =
2048 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2049 /* Function calls made to this symbol must be done indirectly, because
2050 it may lie outside of the 26 bit addressing range of a normal function
2051 call. */
2052 { "long_call", 0, 0, false, true, true, NULL },
2053 /* Whereas these functions are always known to reside within the 26 bit
2054 addressing range. */
2055 { "short_call", 0, 0, false, true, true, NULL },
2056 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2057 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2058 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2059 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2060 #ifdef ARM_PE
2061 /* ARM/PE has three new attributes:
2062 interfacearm - ?
2063 dllexport - for exporting a function/variable that will live in a dll
2064 dllimport - for importing a function/variable from a dll
2066 Microsoft allows multiple declspecs in one __declspec, separating
2067 them with spaces. We do NOT support this. Instead, use __declspec
2068 multiple times.
2070 { "dllimport", 0, 0, true, false, false, NULL },
2071 { "dllexport", 0, 0, true, false, false, NULL },
2072 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2073 #endif
2074 { NULL, 0, 0, false, false, false, NULL }
2077 /* Handle an attribute requiring a FUNCTION_DECL;
2078 arguments as in struct attribute_spec.handler. */
2079 static tree
2080 arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2081 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
2083 if (TREE_CODE (*node) != FUNCTION_DECL)
2085 warning ("`%s' attribute only applies to functions",
2086 IDENTIFIER_POINTER (name));
2087 *no_add_attrs = true;
2090 return NULL_TREE;
2093 /* Handle an "interrupt" or "isr" attribute;
2094 arguments as in struct attribute_spec.handler. */
2095 static tree
2096 arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2097 bool *no_add_attrs)
2099 if (DECL_P (*node))
2101 if (TREE_CODE (*node) != FUNCTION_DECL)
2103 warning ("`%s' attribute only applies to functions",
2104 IDENTIFIER_POINTER (name));
2105 *no_add_attrs = true;
2107 /* FIXME: the argument if any is checked for type attributes;
2108 should it be checked for decl ones? */
2110 else
2112 if (TREE_CODE (*node) == FUNCTION_TYPE
2113 || TREE_CODE (*node) == METHOD_TYPE)
2115 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2117 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2118 *no_add_attrs = true;
2121 else if (TREE_CODE (*node) == POINTER_TYPE
2122 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2123 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2124 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2126 *node = build_type_copy (*node);
2127 TREE_TYPE (*node) = build_type_attribute_variant
2128 (TREE_TYPE (*node),
2129 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2130 *no_add_attrs = true;
2132 else
2134 /* Possibly pass this attribute on from the type to a decl. */
2135 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2136 | (int) ATTR_FLAG_FUNCTION_NEXT
2137 | (int) ATTR_FLAG_ARRAY_NEXT))
2139 *no_add_attrs = true;
2140 return tree_cons (name, args, NULL_TREE);
2142 else
2144 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2149 return NULL_TREE;
2152 /* Return 0 if the attributes for two types are incompatible, 1 if they
2153 are compatible, and 2 if they are nearly compatible (which causes a
2154 warning to be generated). */
2155 static int
2156 arm_comp_type_attributes (tree type1, tree type2)
2158 int l1, l2, s1, s2;
2160 /* Check for mismatch of non-default calling convention. */
2161 if (TREE_CODE (type1) != FUNCTION_TYPE)
2162 return 1;
2164 /* Check for mismatched call attributes. */
2165 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2166 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2167 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2168 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2170 /* Only bother to check if an attribute is defined. */
2171 if (l1 | l2 | s1 | s2)
2173 /* If one type has an attribute, the other must have the same attribute. */
2174 if ((l1 != l2) || (s1 != s2))
2175 return 0;
2177 /* Disallow mixed attributes. */
2178 if ((l1 & s2) || (l2 & s1))
2179 return 0;
2182 /* Check for mismatched ISR attribute. */
2183 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2184 if (! l1)
2185 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2186 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2187 if (! l2)
2188 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2189 if (l1 != l2)
2190 return 0;
2192 return 1;
2195 /* Encode long_call or short_call attribute by prefixing
2196 symbol name in DECL with a special character FLAG. */
2197 void
2198 arm_encode_call_attribute (tree decl, int flag)
2200 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2201 int len = strlen (str);
2202 char * newstr;
2204 /* Do not allow weak functions to be treated as short call. */
2205 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2206 return;
2208 newstr = alloca (len + 2);
2209 newstr[0] = flag;
2210 strcpy (newstr + 1, str);
2212 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2213 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2216 /* Assigns default attributes to newly defined type. This is used to
2217 set short_call/long_call attributes for function types of
2218 functions defined inside corresponding #pragma scopes. */
2219 static void
2220 arm_set_default_type_attributes (tree type)
2222 /* Add __attribute__ ((long_call)) to all functions, when
2223 inside #pragma long_calls or __attribute__ ((short_call)),
2224 when inside #pragma no_long_calls. */
2225 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2227 tree type_attr_list, attr_name;
2228 type_attr_list = TYPE_ATTRIBUTES (type);
2230 if (arm_pragma_long_calls == LONG)
2231 attr_name = get_identifier ("long_call");
2232 else if (arm_pragma_long_calls == SHORT)
2233 attr_name = get_identifier ("short_call");
2234 else
2235 return;
2237 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2238 TYPE_ATTRIBUTES (type) = type_attr_list;
2242 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2243 defined within the current compilation unit. If this cannot be
2244 determined, then 0 is returned. */
2245 static int
2246 current_file_function_operand (rtx sym_ref)
2248 /* This is a bit of a fib. A function will have a short call flag
2249 applied to its name if it has the short call attribute, or it has
2250 already been defined within the current compilation unit. */
2251 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2252 return 1;
2254 /* The current function is always defined within the current compilation
2255 unit. if it s a weak definition however, then this may not be the real
2256 definition of the function, and so we have to say no. */
2257 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2258 && !DECL_WEAK (current_function_decl))
2259 return 1;
2261 /* We cannot make the determination - default to returning 0. */
2262 return 0;
2265 /* Return nonzero if a 32 bit "long_call" should be generated for
2266 this call. We generate a long_call if the function:
2268 a. has an __attribute__((long call))
2269 or b. is within the scope of a #pragma long_calls
2270 or c. the -mlong-calls command line switch has been specified
2272 However we do not generate a long call if the function:
2274 d. has an __attribute__ ((short_call))
2275 or e. is inside the scope of a #pragma no_long_calls
2276 or f. has an __attribute__ ((section))
2277 or g. is defined within the current compilation unit.
2279 This function will be called by C fragments contained in the machine
2280 description file. CALL_REF and CALL_COOKIE correspond to the matched
2281 rtl operands. CALL_SYMBOL is used to distinguish between
2282 two different callers of the function. It is set to 1 in the
2283 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2284 and "call_value" patterns. This is because of the difference in the
2285 SYM_REFs passed by these patterns. */
2287 arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
2289 if (!call_symbol)
2291 if (GET_CODE (sym_ref) != MEM)
2292 return 0;
2294 sym_ref = XEXP (sym_ref, 0);
2297 if (GET_CODE (sym_ref) != SYMBOL_REF)
2298 return 0;
2300 if (call_cookie & CALL_SHORT)
2301 return 0;
2303 if (TARGET_LONG_CALLS && flag_function_sections)
2304 return 1;
2306 if (current_file_function_operand (sym_ref))
2307 return 0;
2309 return (call_cookie & CALL_LONG)
2310 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2311 || TARGET_LONG_CALLS;
2314 /* Return nonzero if it is ok to make a tail-call to DECL. */
2315 static bool
2316 arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2318 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2320 /* Never tailcall something for which we have no decl, or if we
2321 are in Thumb mode. */
2322 if (decl == NULL || TARGET_THUMB)
2323 return false;
2325 /* Get the calling method. */
2326 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2327 call_type = CALL_SHORT;
2328 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2329 call_type = CALL_LONG;
2331 /* Cannot tail-call to long calls, since these are out of range of
2332 a branch instruction. However, if not compiling PIC, we know
2333 we can reach the symbol if it is in this compilation unit. */
2334 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2335 return false;
2337 /* If we are interworking and the function is not declared static
2338 then we can't tail-call it unless we know that it exists in this
2339 compilation unit (since it might be a Thumb routine). */
2340 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2341 return false;
2343 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2344 if (IS_INTERRUPT (arm_current_func_type ()))
2345 return false;
2347 /* Everything else is ok. */
2348 return true;
2352 /* Addressing mode support functions. */
2354 /* Return non-zero if X is a legitimate immediate operand when compiling
2355 for PIC. */
2357 legitimate_pic_operand_p (rtx x)
2359 if (CONSTANT_P (x)
2360 && flag_pic
2361 && (GET_CODE (x) == SYMBOL_REF
2362 || (GET_CODE (x) == CONST
2363 && GET_CODE (XEXP (x, 0)) == PLUS
2364 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2365 return 0;
2367 return 1;
2371 legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
2373 if (GET_CODE (orig) == SYMBOL_REF
2374 || GET_CODE (orig) == LABEL_REF)
2376 #ifndef AOF_ASSEMBLER
2377 rtx pic_ref, address;
2378 #endif
2379 rtx insn;
2380 int subregs = 0;
2382 if (reg == 0)
2384 if (no_new_pseudos)
2385 abort ();
2386 else
2387 reg = gen_reg_rtx (Pmode);
2389 subregs = 1;
2392 #ifdef AOF_ASSEMBLER
2393 /* The AOF assembler can generate relocations for these directly, and
2394 understands that the PIC register has to be added into the offset. */
2395 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2396 #else
2397 if (subregs)
2398 address = gen_reg_rtx (Pmode);
2399 else
2400 address = reg;
2402 if (TARGET_ARM)
2403 emit_insn (gen_pic_load_addr_arm (address, orig));
2404 else
2405 emit_insn (gen_pic_load_addr_thumb (address, orig));
2407 if ((GET_CODE (orig) == LABEL_REF
2408 || (GET_CODE (orig) == SYMBOL_REF &&
2409 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2410 && NEED_GOT_RELOC)
2411 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2412 else
2414 pic_ref = gen_rtx_MEM (Pmode,
2415 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2416 address));
2417 RTX_UNCHANGING_P (pic_ref) = 1;
2420 insn = emit_move_insn (reg, pic_ref);
2421 #endif
2422 current_function_uses_pic_offset_table = 1;
2423 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2424 by loop. */
2425 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2426 REG_NOTES (insn));
2427 return reg;
2429 else if (GET_CODE (orig) == CONST)
2431 rtx base, offset;
2433 if (GET_CODE (XEXP (orig, 0)) == PLUS
2434 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2435 return orig;
2437 if (reg == 0)
2439 if (no_new_pseudos)
2440 abort ();
2441 else
2442 reg = gen_reg_rtx (Pmode);
2445 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2447 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2448 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2449 base == reg ? 0 : reg);
2451 else
2452 abort ();
2454 if (GET_CODE (offset) == CONST_INT)
2456 /* The base register doesn't really matter, we only want to
2457 test the index for the appropriate mode. */
2458 if (!arm_legitimate_index_p (mode, offset, 0))
2460 if (!no_new_pseudos)
2461 offset = force_reg (Pmode, offset);
2462 else
2463 abort ();
2466 if (GET_CODE (offset) == CONST_INT)
2467 return plus_constant (base, INTVAL (offset));
2470 if (GET_MODE_SIZE (mode) > 4
2471 && (GET_MODE_CLASS (mode) == MODE_INT
2472 || TARGET_SOFT_FLOAT))
2474 emit_insn (gen_addsi3 (reg, base, offset));
2475 return reg;
2478 return gen_rtx_PLUS (Pmode, base, offset);
2481 return orig;
2484 /* Generate code to load the PIC register. PROLOGUE is true if
2485 called from arm_expand_prologue (in which case we want the
2486 generated insns at the start of the function); false if called
2487 by an exception receiver that needs the PIC register reloaded
2488 (in which case the insns are just dumped at the current location). */
2489 void
2490 arm_finalize_pic (int prologue ATTRIBUTE_UNUSED)
2492 #ifndef AOF_ASSEMBLER
2493 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2494 rtx global_offset_table;
2496 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2497 return;
2499 if (!flag_pic)
2500 abort ();
2502 start_sequence ();
2503 l1 = gen_label_rtx ();
2505 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2506 /* On the ARM the PC register contains 'dot + 8' at the time of the
2507 addition, on the Thumb it is 'dot + 4'. */
2508 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2509 if (GOT_PCREL)
2510 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2511 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2512 else
2513 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2515 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2517 if (TARGET_ARM)
2519 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2520 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2522 else
2524 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2525 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2528 seq = get_insns ();
2529 end_sequence ();
2530 if (prologue)
2531 emit_insn_after (seq, get_insns ());
2532 else
2533 emit_insn (seq);
2535 /* Need to emit this whether or not we obey regdecls,
2536 since setjmp/longjmp can cause life info to screw up. */
2537 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2538 #endif /* AOF_ASSEMBLER */
2541 /* Return nonzero if X is valid as an ARM state addressing register. */
2542 static int
2543 arm_address_register_rtx_p (rtx x, int strict_p)
2545 int regno;
2547 if (GET_CODE (x) != REG)
2548 return 0;
2550 regno = REGNO (x);
2552 if (strict_p)
2553 return ARM_REGNO_OK_FOR_BASE_P (regno);
2555 return (regno <= LAST_ARM_REGNUM
2556 || regno >= FIRST_PSEUDO_REGISTER
2557 || regno == FRAME_POINTER_REGNUM
2558 || regno == ARG_POINTER_REGNUM);
2561 /* Return nonzero if X is a valid ARM state address operand. */
2563 arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2565 if (arm_address_register_rtx_p (x, strict_p))
2566 return 1;
2568 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2569 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2571 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2572 && GET_MODE_SIZE (mode) <= 4
2573 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2574 && GET_CODE (XEXP (x, 1)) == PLUS
2575 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2576 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2578 /* After reload constants split into minipools will have addresses
2579 from a LABEL_REF. */
2580 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2581 && (GET_CODE (x) == LABEL_REF
2582 || (GET_CODE (x) == CONST
2583 && GET_CODE (XEXP (x, 0)) == PLUS
2584 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2585 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2586 return 1;
2588 else if (mode == TImode)
2589 return 0;
2591 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2593 if (GET_CODE (x) == PLUS
2594 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2595 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2597 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2599 if (val == 4 || val == -4 || val == -8)
2600 return 1;
2604 else if (GET_CODE (x) == PLUS)
2606 rtx xop0 = XEXP (x, 0);
2607 rtx xop1 = XEXP (x, 1);
2609 return ((arm_address_register_rtx_p (xop0, strict_p)
2610 && arm_legitimate_index_p (mode, xop1, strict_p))
2611 || (arm_address_register_rtx_p (xop1, strict_p)
2612 && arm_legitimate_index_p (mode, xop0, strict_p)));
2615 #if 0
2616 /* Reload currently can't handle MINUS, so disable this for now */
2617 else if (GET_CODE (x) == MINUS)
2619 rtx xop0 = XEXP (x, 0);
2620 rtx xop1 = XEXP (x, 1);
2622 return (arm_address_register_rtx_p (xop0, strict_p)
2623 && arm_legitimate_index_p (mode, xop1, strict_p));
2625 #endif
2627 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2628 && GET_CODE (x) == SYMBOL_REF
2629 && CONSTANT_POOL_ADDRESS_P (x)
2630 && ! (flag_pic
2631 && symbol_mentioned_p (get_pool_constant (x))))
2632 return 1;
2634 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2635 && (GET_MODE_SIZE (mode) <= 4)
2636 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2637 return 1;
2639 return 0;
2642 /* Return nonzero if INDEX is valid for an address index operand in
2643 ARM state. */
2644 static int
2645 arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
2647 HOST_WIDE_INT range;
2648 enum rtx_code code = GET_CODE (index);
2650 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2651 return (code == CONST_INT && INTVAL (index) < 1024
2652 && INTVAL (index) > -1024
2653 && (INTVAL (index) & 3) == 0);
2655 if (TARGET_CIRRUS
2656 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2657 return (code == CONST_INT
2658 && INTVAL (index) < 255
2659 && INTVAL (index) > -255);
2661 if (arm_address_register_rtx_p (index, strict_p)
2662 && GET_MODE_SIZE (mode) <= 4)
2663 return 1;
2665 /* XXX What about ldrsb? */
2666 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2667 && (!arm_arch4 || (mode) != HImode))
2669 rtx xiop0 = XEXP (index, 0);
2670 rtx xiop1 = XEXP (index, 1);
2672 return ((arm_address_register_rtx_p (xiop0, strict_p)
2673 && power_of_two_operand (xiop1, SImode))
2674 || (arm_address_register_rtx_p (xiop1, strict_p)
2675 && power_of_two_operand (xiop0, SImode)));
2678 if (GET_MODE_SIZE (mode) <= 4
2679 && (code == LSHIFTRT || code == ASHIFTRT
2680 || code == ASHIFT || code == ROTATERT)
2681 && (!arm_arch4 || (mode) != HImode))
2683 rtx op = XEXP (index, 1);
2685 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2686 && GET_CODE (op) == CONST_INT
2687 && INTVAL (op) > 0
2688 && INTVAL (op) <= 31);
2691 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2692 load, but that has a restricted addressing range and we are unable
2693 to tell here whether that is the case. To be safe we restrict all
2694 loads to that range. */
2695 range = ((mode) == HImode || (mode) == QImode)
2696 ? (arm_arch4 ? 256 : 4095) : 4096;
2698 return (code == CONST_INT
2699 && INTVAL (index) < range
2700 && INTVAL (index) > -range);
2703 /* Return nonzero if X is valid as an ARM state addressing register. */
2704 static int
2705 thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
2707 int regno;
2709 if (GET_CODE (x) != REG)
2710 return 0;
2712 regno = REGNO (x);
2714 if (strict_p)
2715 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2717 return (regno <= LAST_LO_REGNUM
2718 || regno >= FIRST_PSEUDO_REGISTER
2719 || regno == FRAME_POINTER_REGNUM
2720 || (GET_MODE_SIZE (mode) >= 4
2721 && (regno == STACK_POINTER_REGNUM
2722 || x == hard_frame_pointer_rtx
2723 || x == arg_pointer_rtx)));
2726 /* Return nonzero if x is a legitimate index register. This is the case
2727 for any base register that can access a QImode object. */
2728 inline static int
2729 thumb_index_register_rtx_p (rtx x, int strict_p)
2731 return thumb_base_register_rtx_p (x, QImode, strict_p);
2734 /* Return nonzero if x is a legitimate Thumb-state address.
2736 The AP may be eliminated to either the SP or the FP, so we use the
2737 least common denominator, e.g. SImode, and offsets from 0 to 64.
2739 ??? Verify whether the above is the right approach.
2741 ??? Also, the FP may be eliminated to the SP, so perhaps that
2742 needs special handling also.
2744 ??? Look at how the mips16 port solves this problem. It probably uses
2745 better ways to solve some of these problems.
2747 Although it is not incorrect, we don't accept QImode and HImode
2748 addresses based on the frame pointer or arg pointer until the
2749 reload pass starts. This is so that eliminating such addresses
2750 into stack based ones won't produce impossible code. */
2752 thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2754 /* ??? Not clear if this is right. Experiment. */
2755 if (GET_MODE_SIZE (mode) < 4
2756 && !(reload_in_progress || reload_completed)
2757 && (reg_mentioned_p (frame_pointer_rtx, x)
2758 || reg_mentioned_p (arg_pointer_rtx, x)
2759 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2760 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2761 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2762 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2763 return 0;
2765 /* Accept any base register. SP only in SImode or larger. */
2766 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2767 return 1;
2769 /* This is PC relative data before arm_reorg runs. */
2770 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2771 && GET_CODE (x) == SYMBOL_REF
2772 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2773 return 1;
2775 /* This is PC relative data after arm_reorg runs. */
2776 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2777 && (GET_CODE (x) == LABEL_REF
2778 || (GET_CODE (x) == CONST
2779 && GET_CODE (XEXP (x, 0)) == PLUS
2780 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2781 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2782 return 1;
2784 /* Post-inc indexing only supported for SImode and larger. */
2785 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2786 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2787 return 1;
2789 else if (GET_CODE (x) == PLUS)
2791 /* REG+REG address can be any two index registers. */
2792 /* We disallow FRAME+REG addressing since we know that FRAME
2793 will be replaced with STACK, and SP relative addressing only
2794 permits SP+OFFSET. */
2795 if (GET_MODE_SIZE (mode) <= 4
2796 && XEXP (x, 0) != frame_pointer_rtx
2797 && XEXP (x, 1) != frame_pointer_rtx
2798 && XEXP (x, 0) != virtual_stack_vars_rtx
2799 && XEXP (x, 1) != virtual_stack_vars_rtx
2800 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2801 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2802 return 1;
2804 /* REG+const has 5-7 bit offset for non-SP registers. */
2805 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2806 || XEXP (x, 0) == arg_pointer_rtx)
2807 && GET_CODE (XEXP (x, 1)) == CONST_INT
2808 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2809 return 1;
2811 /* REG+const has 10 bit offset for SP, but only SImode and
2812 larger is supported. */
2813 /* ??? Should probably check for DI/DFmode overflow here
2814 just like GO_IF_LEGITIMATE_OFFSET does. */
2815 else if (GET_CODE (XEXP (x, 0)) == REG
2816 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2817 && GET_MODE_SIZE (mode) >= 4
2818 && GET_CODE (XEXP (x, 1)) == CONST_INT
2819 && INTVAL (XEXP (x, 1)) >= 0
2820 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2821 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2822 return 1;
2824 else if (GET_CODE (XEXP (x, 0)) == REG
2825 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2826 && GET_MODE_SIZE (mode) >= 4
2827 && GET_CODE (XEXP (x, 1)) == CONST_INT
2828 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2829 return 1;
2832 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2833 && GET_CODE (x) == SYMBOL_REF
2834 && CONSTANT_POOL_ADDRESS_P (x)
2835 && !(flag_pic
2836 && symbol_mentioned_p (get_pool_constant (x))))
2837 return 1;
2839 return 0;
2842 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2843 instruction of mode MODE. */
2845 thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
2847 switch (GET_MODE_SIZE (mode))
2849 case 1:
2850 return val >= 0 && val < 32;
2852 case 2:
2853 return val >= 0 && val < 64 && (val & 1) == 0;
2855 default:
2856 return (val >= 0
2857 && (val + GET_MODE_SIZE (mode)) <= 128
2858 && (val & 3) == 0);
2862 /* Try machine-dependent ways of modifying an illegitimate address
2863 to be legitimate. If we find one, return the new, valid address. */
2865 arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
2867 if (GET_CODE (x) == PLUS)
2869 rtx xop0 = XEXP (x, 0);
2870 rtx xop1 = XEXP (x, 1);
2872 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2873 xop0 = force_reg (SImode, xop0);
2875 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2876 xop1 = force_reg (SImode, xop1);
2878 if (ARM_BASE_REGISTER_RTX_P (xop0)
2879 && GET_CODE (xop1) == CONST_INT)
2881 HOST_WIDE_INT n, low_n;
2882 rtx base_reg, val;
2883 n = INTVAL (xop1);
2885 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2887 low_n = n & 0x0f;
2888 n &= ~0x0f;
2889 if (low_n > 4)
2891 n += 16;
2892 low_n -= 16;
2895 else
2897 low_n = ((mode) == TImode ? 0
2898 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2899 n -= low_n;
2902 base_reg = gen_reg_rtx (SImode);
2903 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2904 GEN_INT (n)), NULL_RTX);
2905 emit_move_insn (base_reg, val);
2906 x = (low_n == 0 ? base_reg
2907 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
2909 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2910 x = gen_rtx_PLUS (SImode, xop0, xop1);
2913 /* XXX We don't allow MINUS any more -- see comment in
2914 arm_legitimate_address_p (). */
2915 else if (GET_CODE (x) == MINUS)
2917 rtx xop0 = XEXP (x, 0);
2918 rtx xop1 = XEXP (x, 1);
2920 if (CONSTANT_P (xop0))
2921 xop0 = force_reg (SImode, xop0);
2923 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
2924 xop1 = force_reg (SImode, xop1);
2926 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2927 x = gen_rtx_MINUS (SImode, xop0, xop1);
2930 if (flag_pic)
2932 /* We need to find and carefully transform any SYMBOL and LABEL
2933 references; so go back to the original address expression. */
2934 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
2936 if (new_x != orig_x)
2937 x = new_x;
2940 return x;
2945 #define REG_OR_SUBREG_REG(X) \
2946 (GET_CODE (X) == REG \
2947 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2949 #define REG_OR_SUBREG_RTX(X) \
2950 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2952 #ifndef COSTS_N_INSNS
2953 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2954 #endif
2955 /* Worker routine for arm_rtx_costs. */
2956 static inline int
2957 arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
2959 enum machine_mode mode = GET_MODE (x);
2960 enum rtx_code subcode;
2961 int extra_cost;
2963 if (TARGET_THUMB)
2965 switch (code)
2967 case ASHIFT:
2968 case ASHIFTRT:
2969 case LSHIFTRT:
2970 case ROTATERT:
2971 case PLUS:
2972 case MINUS:
2973 case COMPARE:
2974 case NEG:
2975 case NOT:
2976 return COSTS_N_INSNS (1);
2978 case MULT:
2979 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2981 int cycles = 0;
2982 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2984 while (i)
2986 i >>= 2;
2987 cycles++;
2989 return COSTS_N_INSNS (2) + cycles;
2991 return COSTS_N_INSNS (1) + 16;
2993 case SET:
2994 return (COSTS_N_INSNS (1)
2995 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2996 + GET_CODE (SET_DEST (x)) == MEM));
2998 case CONST_INT:
2999 if (outer == SET)
3001 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3002 return 0;
3003 if (thumb_shiftable_const (INTVAL (x)))
3004 return COSTS_N_INSNS (2);
3005 return COSTS_N_INSNS (3);
3007 else if (outer == PLUS
3008 && INTVAL (x) < 256 && INTVAL (x) > -256)
3009 return 0;
3010 else if (outer == COMPARE
3011 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3012 return 0;
3013 else if (outer == ASHIFT || outer == ASHIFTRT
3014 || outer == LSHIFTRT)
3015 return 0;
3016 return COSTS_N_INSNS (2);
3018 case CONST:
3019 case CONST_DOUBLE:
3020 case LABEL_REF:
3021 case SYMBOL_REF:
3022 return COSTS_N_INSNS (3);
3024 case UDIV:
3025 case UMOD:
3026 case DIV:
3027 case MOD:
3028 return 100;
3030 case TRUNCATE:
3031 return 99;
3033 case AND:
3034 case XOR:
3035 case IOR:
3036 /* XXX guess. */
3037 return 8;
3039 case ADDRESSOF:
3040 case MEM:
3041 /* XXX another guess. */
3042 /* Memory costs quite a lot for the first word, but subsequent words
3043 load at the equivalent of a single insn each. */
3044 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3045 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3046 ? 4 : 0));
3048 case IF_THEN_ELSE:
3049 /* XXX a guess. */
3050 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3051 return 14;
3052 return 2;
3054 case ZERO_EXTEND:
3055 /* XXX still guessing. */
3056 switch (GET_MODE (XEXP (x, 0)))
3058 case QImode:
3059 return (1 + (mode == DImode ? 4 : 0)
3060 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3062 case HImode:
3063 return (4 + (mode == DImode ? 4 : 0)
3064 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3066 case SImode:
3067 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3069 default:
3070 return 99;
3073 default:
3074 return 99;
3075 #if 0
3076 case FFS:
3077 case FLOAT:
3078 case FIX:
3079 case UNSIGNED_FIX:
3080 /* XXX guess */
3081 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3082 rtx_name[code]);
3083 abort ();
3084 #endif
3088 switch (code)
3090 case MEM:
3091 /* Memory costs quite a lot for the first word, but subsequent words
3092 load at the equivalent of a single insn each. */
3093 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3094 + (GET_CODE (x) == SYMBOL_REF
3095 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3097 case DIV:
3098 case MOD:
3099 return 100;
3101 case ROTATE:
3102 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3103 return 4;
3104 /* Fall through */
3105 case ROTATERT:
3106 if (mode != SImode)
3107 return 8;
3108 /* Fall through */
3109 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3110 if (mode == DImode)
3111 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3112 + ((GET_CODE (XEXP (x, 0)) == REG
3113 || (GET_CODE (XEXP (x, 0)) == SUBREG
3114 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3115 ? 0 : 8));
3116 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3117 || (GET_CODE (XEXP (x, 0)) == SUBREG
3118 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3119 ? 0 : 4)
3120 + ((GET_CODE (XEXP (x, 1)) == REG
3121 || (GET_CODE (XEXP (x, 1)) == SUBREG
3122 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3123 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3124 ? 0 : 4));
3126 case MINUS:
3127 if (mode == DImode)
3128 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3129 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3130 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3131 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3132 ? 0 : 8));
3134 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3135 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3136 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3137 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3138 ? 0 : 8)
3139 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3140 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3141 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
3142 ? 0 : 8));
3144 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3145 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3146 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3147 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3148 || subcode == ASHIFTRT || subcode == LSHIFTRT
3149 || subcode == ROTATE || subcode == ROTATERT
3150 || (subcode == MULT
3151 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3152 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3153 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3154 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3155 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3156 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3157 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3158 return 1;
3159 /* Fall through */
3161 case PLUS:
3162 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3163 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3164 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3165 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3166 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3167 ? 0 : 8));
3169 /* Fall through */
3170 case AND: case XOR: case IOR:
3171 extra_cost = 0;
3173 /* Normally the frame registers will be spilt into reg+const during
3174 reload, so it is a bad idea to combine them with other instructions,
3175 since then they might not be moved outside of loops. As a compromise
3176 we allow integration with ops that have a constant as their second
3177 operand. */
3178 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3179 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3180 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3181 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3182 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3183 extra_cost = 4;
3185 if (mode == DImode)
3186 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3187 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3188 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3189 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3190 ? 0 : 8));
3192 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3193 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3194 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3195 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3196 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3197 ? 0 : 4));
3199 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3200 return (1 + extra_cost
3201 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3202 || subcode == LSHIFTRT || subcode == ASHIFTRT
3203 || subcode == ROTATE || subcode == ROTATERT
3204 || (subcode == MULT
3205 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3206 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3207 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3208 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3209 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3210 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3211 ? 0 : 4));
3213 return 8;
3215 case MULT:
3216 /* There is no point basing this on the tuning, since it is always the
3217 fast variant if it exists at all. */
3218 if (arm_fast_multiply && mode == DImode
3219 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3220 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3221 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3222 return 8;
3224 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3225 || mode == DImode)
3226 return 30;
3228 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3230 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3231 & (unsigned HOST_WIDE_INT) 0xffffffff);
3232 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3233 int j;
3235 /* Tune as appropriate. */
3236 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3238 for (j = 0; i && j < 32; j += booth_unit_size)
3240 i >>= booth_unit_size;
3241 add_cost += 2;
3244 return add_cost;
3247 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3248 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3249 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3251 case TRUNCATE:
3252 if (arm_fast_multiply && mode == SImode
3253 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3254 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3255 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3256 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3257 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3258 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3259 return 8;
3260 return 99;
3262 case NEG:
3263 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3264 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3265 /* Fall through */
3266 case NOT:
3267 if (mode == DImode)
3268 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3270 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3272 case IF_THEN_ELSE:
3273 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3274 return 14;
3275 return 2;
3277 case COMPARE:
3278 return 1;
3280 case ABS:
3281 return 4 + (mode == DImode ? 4 : 0);
3283 case SIGN_EXTEND:
3284 if (GET_MODE (XEXP (x, 0)) == QImode)
3285 return (4 + (mode == DImode ? 4 : 0)
3286 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3287 /* Fall through */
3288 case ZERO_EXTEND:
3289 switch (GET_MODE (XEXP (x, 0)))
3291 case QImode:
3292 return (1 + (mode == DImode ? 4 : 0)
3293 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3295 case HImode:
3296 return (4 + (mode == DImode ? 4 : 0)
3297 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3299 case SImode:
3300 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3302 default:
3303 break;
3305 abort ();
3307 case CONST_INT:
3308 if (const_ok_for_arm (INTVAL (x)))
3309 return outer == SET ? 2 : -1;
3310 else if (outer == AND
3311 && const_ok_for_arm (~INTVAL (x)))
3312 return -1;
3313 else if ((outer == COMPARE
3314 || outer == PLUS || outer == MINUS)
3315 && const_ok_for_arm (-INTVAL (x)))
3316 return -1;
3317 else
3318 return 5;
3320 case CONST:
3321 case LABEL_REF:
3322 case SYMBOL_REF:
3323 return 6;
3325 case CONST_DOUBLE:
3326 if (const_double_rtx_ok_for_fpa (x))
3327 return outer == SET ? 2 : -1;
3328 else if ((outer == COMPARE || outer == PLUS)
3329 && neg_const_double_rtx_ok_for_fpa (x))
3330 return -1;
3331 return 7;
3333 default:
3334 return 99;
3338 static bool
3339 arm_rtx_costs (rtx x, int code, int outer_code, int *total)
3341 *total = arm_rtx_costs_1 (x, code, outer_code);
3342 return true;
3345 /* All address computations that can be done are free, but rtx cost returns
3346 the same for practically all of them. So we weight the different types
3347 of address here in the order (most pref first):
3348 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3349 static int
3350 arm_address_cost (rtx x)
3352 #define ARM_ADDRESS_COST(X) \
3353 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3354 || GET_CODE (X) == SYMBOL_REF) \
3355 ? 0 \
3356 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3357 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3358 ? 10 \
3359 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3360 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3361 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3362 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3363 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3364 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3365 ? 1 : 0)) \
3366 : 4)))))
3368 #define THUMB_ADDRESS_COST(X) \
3369 ((GET_CODE (X) == REG \
3370 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3371 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3372 ? 1 : 2)
3374 return (TARGET_ARM ? ARM_ADDRESS_COST (x) : THUMB_ADDRESS_COST (x));
3377 static int
3378 arm_use_dfa_pipeline_interface (void)
3380 return true;
3383 static int
3384 arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
3386 rtx i_pat, d_pat;
3388 /* Some true dependencies can have a higher cost depending
3389 on precisely how certain input operands are used. */
3390 if (arm_tune_xscale
3391 && REG_NOTE_KIND (link) == 0
3392 && recog_memoized (insn) >= 0
3393 && recog_memoized (dep) >= 0)
3395 int shift_opnum = get_attr_shift (insn);
3396 enum attr_type attr_type = get_attr_type (dep);
3398 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3399 operand for INSN. If we have a shifted input operand and the
3400 instruction we depend on is another ALU instruction, then we may
3401 have to account for an additional stall. */
3402 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3404 rtx shifted_operand;
3405 int opno;
3407 /* Get the shifted operand. */
3408 extract_insn (insn);
3409 shifted_operand = recog_data.operand[shift_opnum];
3411 /* Iterate over all the operands in DEP. If we write an operand
3412 that overlaps with SHIFTED_OPERAND, then we have increase the
3413 cost of this dependency. */
3414 extract_insn (dep);
3415 preprocess_constraints ();
3416 for (opno = 0; opno < recog_data.n_operands; opno++)
3418 /* We can ignore strict inputs. */
3419 if (recog_data.operand_type[opno] == OP_IN)
3420 continue;
3422 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3423 shifted_operand))
3424 return 2;
3429 /* XXX This is not strictly true for the FPA. */
3430 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3431 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3432 return 0;
3434 /* Call insns don't incur a stall, even if they follow a load. */
3435 if (REG_NOTE_KIND (link) == 0
3436 && GET_CODE (insn) == CALL_INSN)
3437 return 1;
3439 if ((i_pat = single_set (insn)) != NULL
3440 && GET_CODE (SET_SRC (i_pat)) == MEM
3441 && (d_pat = single_set (dep)) != NULL
3442 && GET_CODE (SET_DEST (d_pat)) == MEM)
3444 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3445 /* This is a load after a store, there is no conflict if the load reads
3446 from a cached area. Assume that loads from the stack, and from the
3447 constant pool are cached, and that others will miss. This is a
3448 hack. */
3450 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3451 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3452 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3453 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3454 return 1;
3457 return cost;
3460 static int fpa_consts_inited = 0;
3462 static const char * const strings_fpa[8] =
3464 "0", "1", "2", "3",
3465 "4", "5", "0.5", "10"
3468 static REAL_VALUE_TYPE values_fpa[8];
3470 static void
3471 init_fpa_table (void)
3473 int i;
3474 REAL_VALUE_TYPE r;
3476 for (i = 0; i < 8; i++)
3478 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3479 values_fpa[i] = r;
3482 fpa_consts_inited = 1;
3485 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3487 const_double_rtx_ok_for_fpa (rtx x)
3489 REAL_VALUE_TYPE r;
3490 int i;
3492 if (!fpa_consts_inited)
3493 init_fpa_table ();
3495 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3496 if (REAL_VALUE_MINUS_ZERO (r))
3497 return 0;
3499 for (i = 0; i < 8; i++)
3500 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3501 return 1;
3503 return 0;
3506 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3508 neg_const_double_rtx_ok_for_fpa (rtx x)
3510 REAL_VALUE_TYPE r;
3511 int i;
3513 if (!fpa_consts_inited)
3514 init_fpa_table ();
3516 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3517 r = REAL_VALUE_NEGATE (r);
3518 if (REAL_VALUE_MINUS_ZERO (r))
3519 return 0;
3521 for (i = 0; i < 8; i++)
3522 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3523 return 1;
3525 return 0;
3528 /* Predicates for `match_operand' and `match_operator'. */
3530 /* s_register_operand is the same as register_operand, but it doesn't accept
3531 (SUBREG (MEM)...).
3533 This function exists because at the time it was put in it led to better
3534 code. SUBREG(MEM) always needs a reload in the places where
3535 s_register_operand is used, and this seemed to lead to excessive
3536 reloading. */
3538 s_register_operand (rtx op, enum machine_mode mode)
3540 if (GET_MODE (op) != mode && mode != VOIDmode)
3541 return 0;
3543 if (GET_CODE (op) == SUBREG)
3544 op = SUBREG_REG (op);
3546 /* We don't consider registers whose class is NO_REGS
3547 to be a register operand. */
3548 /* XXX might have to check for lo regs only for thumb ??? */
3549 return (GET_CODE (op) == REG
3550 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3551 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3554 /* A hard register operand (even before reload. */
3556 arm_hard_register_operand (rtx op, enum machine_mode mode)
3558 if (GET_MODE (op) != mode && mode != VOIDmode)
3559 return 0;
3561 return (GET_CODE (op) == REG
3562 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3565 /* Only accept reg, subreg(reg), const_int. */
3567 reg_or_int_operand (rtx op, enum machine_mode mode)
3569 if (GET_CODE (op) == CONST_INT)
3570 return 1;
3572 if (GET_MODE (op) != mode && mode != VOIDmode)
3573 return 0;
3575 if (GET_CODE (op) == SUBREG)
3576 op = SUBREG_REG (op);
3578 /* We don't consider registers whose class is NO_REGS
3579 to be a register operand. */
3580 return (GET_CODE (op) == REG
3581 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3582 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3585 /* Return 1 if OP is an item in memory, given that we are in reload. */
3587 arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3589 int regno = true_regnum (op);
3591 return (!CONSTANT_P (op)
3592 && (regno == -1
3593 || (GET_CODE (op) == REG
3594 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3597 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3598 memory access (architecture V4).
3599 MODE is QImode if called when computing constraints, or VOIDmode when
3600 emitting patterns. In this latter case we cannot use memory_operand()
3601 because it will fail on badly formed MEMs, which is precisely what we are
3602 trying to catch. */
3604 bad_signed_byte_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3606 #if 0
3607 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3608 return 0;
3609 #endif
3610 if (GET_CODE (op) != MEM)
3611 return 0;
3613 op = XEXP (op, 0);
3615 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3616 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3617 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3618 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3619 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3620 return 1;
3622 /* Big constants are also bad. */
3623 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3624 && (INTVAL (XEXP (op, 1)) > 0xff
3625 || -INTVAL (XEXP (op, 1)) > 0xff))
3626 return 1;
3628 /* Everything else is good, or can will automatically be made so. */
3629 return 0;
3632 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3634 arm_rhs_operand (rtx op, enum machine_mode mode)
3636 return (s_register_operand (op, mode)
3637 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3640 /* Return TRUE for valid operands for the
3641 rhs of an ARM instruction, or a load. */
3643 arm_rhsm_operand (rtx op, enum machine_mode mode)
3645 return (s_register_operand (op, mode)
3646 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3647 || memory_operand (op, mode));
3650 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3651 constant that is valid when negated. */
3653 arm_add_operand (rtx op, enum machine_mode mode)
3655 if (TARGET_THUMB)
3656 return thumb_cmp_operand (op, mode);
3658 return (s_register_operand (op, mode)
3659 || (GET_CODE (op) == CONST_INT
3660 && (const_ok_for_arm (INTVAL (op))
3661 || const_ok_for_arm (-INTVAL (op)))));
3665 arm_not_operand (rtx op, enum machine_mode mode)
3667 return (s_register_operand (op, mode)
3668 || (GET_CODE (op) == CONST_INT
3669 && (const_ok_for_arm (INTVAL (op))
3670 || const_ok_for_arm (~INTVAL (op)))));
3673 /* Return TRUE if the operand is a memory reference which contains an
3674 offsettable address. */
3676 offsettable_memory_operand (rtx op, enum machine_mode mode)
3678 if (mode == VOIDmode)
3679 mode = GET_MODE (op);
3681 return (mode == GET_MODE (op)
3682 && GET_CODE (op) == MEM
3683 && offsettable_address_p (reload_completed | reload_in_progress,
3684 mode, XEXP (op, 0)));
3687 /* Return TRUE if the operand is a memory reference which is, or can be
3688 made word aligned by adjusting the offset. */
3690 alignable_memory_operand (rtx op, enum machine_mode mode)
3692 rtx reg;
3694 if (mode == VOIDmode)
3695 mode = GET_MODE (op);
3697 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3698 return 0;
3700 op = XEXP (op, 0);
3702 return ((GET_CODE (reg = op) == REG
3703 || (GET_CODE (op) == SUBREG
3704 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3705 || (GET_CODE (op) == PLUS
3706 && GET_CODE (XEXP (op, 1)) == CONST_INT
3707 && (GET_CODE (reg = XEXP (op, 0)) == REG
3708 || (GET_CODE (XEXP (op, 0)) == SUBREG
3709 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3710 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3713 /* Similar to s_register_operand, but does not allow hard integer
3714 registers. */
3716 f_register_operand (rtx op, enum machine_mode mode)
3718 if (GET_MODE (op) != mode && mode != VOIDmode)
3719 return 0;
3721 if (GET_CODE (op) == SUBREG)
3722 op = SUBREG_REG (op);
3724 /* We don't consider registers whose class is NO_REGS
3725 to be a register operand. */
3726 return (GET_CODE (op) == REG
3727 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3728 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
3731 /* Return TRUE for valid operands for the rhs of an FPA instruction. */
3733 fpa_rhs_operand (rtx op, enum machine_mode mode)
3735 if (s_register_operand (op, mode))
3736 return TRUE;
3738 if (GET_MODE (op) != mode && mode != VOIDmode)
3739 return FALSE;
3741 if (GET_CODE (op) == CONST_DOUBLE)
3742 return const_double_rtx_ok_for_fpa (op);
3744 return FALSE;
3748 fpa_add_operand (rtx op, enum machine_mode mode)
3750 if (s_register_operand (op, mode))
3751 return TRUE;
3753 if (GET_MODE (op) != mode && mode != VOIDmode)
3754 return FALSE;
3756 if (GET_CODE (op) == CONST_DOUBLE)
3757 return (const_double_rtx_ok_for_fpa (op)
3758 || neg_const_double_rtx_ok_for_fpa (op));
3760 return FALSE;
3763 /* Return nonzero if OP is a valid Cirrus memory address pattern. */
3765 cirrus_memory_offset (rtx op)
3767 /* Reject eliminable registers. */
3768 if (! (reload_in_progress || reload_completed)
3769 && ( reg_mentioned_p (frame_pointer_rtx, op)
3770 || reg_mentioned_p (arg_pointer_rtx, op)
3771 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3772 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3773 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3774 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3775 return 0;
3777 if (GET_CODE (op) == MEM)
3779 rtx ind;
3781 ind = XEXP (op, 0);
3783 /* Match: (mem (reg)). */
3784 if (GET_CODE (ind) == REG)
3785 return 1;
3787 /* Match:
3788 (mem (plus (reg)
3789 (const))). */
3790 if (GET_CODE (ind) == PLUS
3791 && GET_CODE (XEXP (ind, 0)) == REG
3792 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
3793 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
3794 return 1;
3797 return 0;
3800 /* Return nonzero if OP is a Cirrus or general register. */
3802 cirrus_register_operand (rtx op, enum machine_mode mode)
3804 if (GET_MODE (op) != mode && mode != VOIDmode)
3805 return FALSE;
3807 if (GET_CODE (op) == SUBREG)
3808 op = SUBREG_REG (op);
3810 return (GET_CODE (op) == REG
3811 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
3812 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
3815 /* Return nonzero if OP is a cirrus FP register. */
3817 cirrus_fp_register (rtx op, enum machine_mode mode)
3819 if (GET_MODE (op) != mode && mode != VOIDmode)
3820 return FALSE;
3822 if (GET_CODE (op) == SUBREG)
3823 op = SUBREG_REG (op);
3825 return (GET_CODE (op) == REG
3826 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3827 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
3830 /* Return nonzero if OP is a 6bit constant (0..63). */
3832 cirrus_shift_const (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3834 return (GET_CODE (op) == CONST_INT
3835 && INTVAL (op) >= 0
3836 && INTVAL (op) < 64);
3839 /* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
3840 Use by the Cirrus Maverick code which has to workaround
3841 a hardware bug triggered by such instructions. */
3842 static bool
3843 arm_memory_load_p (rtx insn)
3845 rtx body, lhs, rhs;;
3847 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
3848 return false;
3850 body = PATTERN (insn);
3852 if (GET_CODE (body) != SET)
3853 return false;
3855 lhs = XEXP (body, 0);
3856 rhs = XEXP (body, 1);
3858 lhs = REG_OR_SUBREG_RTX (lhs);
3860 /* If the destination is not a general purpose
3861 register we do not have to worry. */
3862 if (GET_CODE (lhs) != REG
3863 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
3864 return false;
3866 /* As well as loads from memory we also have to react
3867 to loads of invalid constants which will be turned
3868 into loads from the minipool. */
3869 return (GET_CODE (rhs) == MEM
3870 || GET_CODE (rhs) == SYMBOL_REF
3871 || note_invalid_constants (insn, -1, false));
3874 /* Return TRUE if INSN is a Cirrus instruction. */
3875 static bool
3876 arm_cirrus_insn_p (rtx insn)
3878 enum attr_cirrus attr;
3880 /* get_attr aborts on USE and CLOBBER. */
3881 if (!insn
3882 || GET_CODE (insn) != INSN
3883 || GET_CODE (PATTERN (insn)) == USE
3884 || GET_CODE (PATTERN (insn)) == CLOBBER)
3885 return 0;
3887 attr = get_attr_cirrus (insn);
3889 return attr != CIRRUS_NOT;
3892 /* Cirrus reorg for invalid instruction combinations. */
3893 static void
3894 cirrus_reorg (rtx first)
3896 enum attr_cirrus attr;
3897 rtx body = PATTERN (first);
3898 rtx t;
3899 int nops;
3901 /* Any branch must be followed by 2 non Cirrus instructions. */
3902 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
3904 nops = 0;
3905 t = next_nonnote_insn (first);
3907 if (arm_cirrus_insn_p (t))
3908 ++ nops;
3910 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
3911 ++ nops;
3913 while (nops --)
3914 emit_insn_after (gen_nop (), first);
3916 return;
3919 /* (float (blah)) is in parallel with a clobber. */
3920 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
3921 body = XVECEXP (body, 0, 0);
3923 if (GET_CODE (body) == SET)
3925 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
3927 /* cfldrd, cfldr64, cfstrd, cfstr64 must
3928 be followed by a non Cirrus insn. */
3929 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
3931 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
3932 emit_insn_after (gen_nop (), first);
3934 return;
3936 else if (arm_memory_load_p (first))
3938 unsigned int arm_regno;
3940 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
3941 ldr/cfmv64hr combination where the Rd field is the same
3942 in both instructions must be split with a non Cirrus
3943 insn. Example:
3945 ldr r0, blah
3947 cfmvsr mvf0, r0. */
3949 /* Get Arm register number for ldr insn. */
3950 if (GET_CODE (lhs) == REG)
3951 arm_regno = REGNO (lhs);
3952 else if (GET_CODE (rhs) == REG)
3953 arm_regno = REGNO (rhs);
3954 else
3955 abort ();
3957 /* Next insn. */
3958 first = next_nonnote_insn (first);
3960 if (! arm_cirrus_insn_p (first))
3961 return;
3963 body = PATTERN (first);
3965 /* (float (blah)) is in parallel with a clobber. */
3966 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
3967 body = XVECEXP (body, 0, 0);
3969 if (GET_CODE (body) == FLOAT)
3970 body = XEXP (body, 0);
3972 if (get_attr_cirrus (first) == CIRRUS_MOVE
3973 && GET_CODE (XEXP (body, 1)) == REG
3974 && arm_regno == REGNO (XEXP (body, 1)))
3975 emit_insn_after (gen_nop (), first);
3977 return;
3981 /* get_attr aborts on USE and CLOBBER. */
3982 if (!first
3983 || GET_CODE (first) != INSN
3984 || GET_CODE (PATTERN (first)) == USE
3985 || GET_CODE (PATTERN (first)) == CLOBBER)
3986 return;
3988 attr = get_attr_cirrus (first);
3990 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
3991 must be followed by a non-coprocessor instruction. */
3992 if (attr == CIRRUS_COMPARE)
3994 nops = 0;
3996 t = next_nonnote_insn (first);
3998 if (arm_cirrus_insn_p (t))
3999 ++ nops;
4001 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4002 ++ nops;
4004 while (nops --)
4005 emit_insn_after (gen_nop (), first);
4007 return;
4011 /* Return nonzero if OP is a constant power of two. */
4013 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4015 if (GET_CODE (op) == CONST_INT)
4017 HOST_WIDE_INT value = INTVAL (op);
4019 return value != 0 && (value & (value - 1)) == 0;
4022 return FALSE;
4025 /* Return TRUE for a valid operand of a DImode operation.
4026 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4027 Note that this disallows MEM(REG+REG), but allows
4028 MEM(PRE/POST_INC/DEC(REG)). */
4030 di_operand (rtx op, enum machine_mode mode)
4032 if (s_register_operand (op, mode))
4033 return TRUE;
4035 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4036 return FALSE;
4038 if (GET_CODE (op) == SUBREG)
4039 op = SUBREG_REG (op);
4041 switch (GET_CODE (op))
4043 case CONST_DOUBLE:
4044 case CONST_INT:
4045 return TRUE;
4047 case MEM:
4048 return memory_address_p (DImode, XEXP (op, 0));
4050 default:
4051 return FALSE;
4055 /* Like di_operand, but don't accept constants. */
4057 nonimmediate_di_operand (rtx op, enum machine_mode mode)
4059 if (s_register_operand (op, mode))
4060 return TRUE;
4062 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4063 return FALSE;
4065 if (GET_CODE (op) == SUBREG)
4066 op = SUBREG_REG (op);
4068 if (GET_CODE (op) == MEM)
4069 return memory_address_p (DImode, XEXP (op, 0));
4071 return FALSE;
4074 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
4075 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4076 Note that this disallows MEM(REG+REG), but allows
4077 MEM(PRE/POST_INC/DEC(REG)). */
4079 soft_df_operand (rtx op, enum machine_mode mode)
4081 if (s_register_operand (op, mode))
4082 return TRUE;
4084 if (mode != VOIDmode && GET_MODE (op) != mode)
4085 return FALSE;
4087 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4088 return FALSE;
4090 if (GET_CODE (op) == SUBREG)
4091 op = SUBREG_REG (op);
4093 switch (GET_CODE (op))
4095 case CONST_DOUBLE:
4096 return TRUE;
4098 case MEM:
4099 return memory_address_p (DFmode, XEXP (op, 0));
4101 default:
4102 return FALSE;
4106 /* Like soft_df_operand, but don't accept constants. */
4108 nonimmediate_soft_df_operand (rtx op, enum machine_mode mode)
4110 if (s_register_operand (op, mode))
4111 return TRUE;
4113 if (mode != VOIDmode && GET_MODE (op) != mode)
4114 return FALSE;
4116 if (GET_CODE (op) == SUBREG)
4117 op = SUBREG_REG (op);
4119 if (GET_CODE (op) == MEM)
4120 return memory_address_p (DFmode, XEXP (op, 0));
4121 return FALSE;
4124 /* Return TRUE for valid index operands. */
4126 index_operand (rtx op, enum machine_mode mode)
4128 return (s_register_operand (op, mode)
4129 || (immediate_operand (op, mode)
4130 && (GET_CODE (op) != CONST_INT
4131 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4134 /* Return TRUE for valid shifts by a constant. This also accepts any
4135 power of two on the (somewhat overly relaxed) assumption that the
4136 shift operator in this case was a mult. */
4138 const_shift_operand (rtx op, enum machine_mode mode)
4140 return (power_of_two_operand (op, mode)
4141 || (immediate_operand (op, mode)
4142 && (GET_CODE (op) != CONST_INT
4143 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4146 /* Return TRUE for arithmetic operators which can be combined with a multiply
4147 (shift). */
4149 shiftable_operator (rtx x, enum machine_mode mode)
4151 enum rtx_code code;
4153 if (GET_MODE (x) != mode)
4154 return FALSE;
4156 code = GET_CODE (x);
4158 return (code == PLUS || code == MINUS
4159 || code == IOR || code == XOR || code == AND);
4162 /* Return TRUE for binary logical operators. */
4164 logical_binary_operator (rtx x, enum machine_mode mode)
4166 enum rtx_code code;
4168 if (GET_MODE (x) != mode)
4169 return FALSE;
4171 code = GET_CODE (x);
4173 return (code == IOR || code == XOR || code == AND);
4176 /* Return TRUE for shift operators. */
4178 shift_operator (rtx x,enum machine_mode mode)
4180 enum rtx_code code;
4182 if (GET_MODE (x) != mode)
4183 return FALSE;
4185 code = GET_CODE (x);
4187 if (code == MULT)
4188 return power_of_two_operand (XEXP (x, 1), mode);
4190 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4191 || code == ROTATERT);
4194 /* Return TRUE if x is EQ or NE. */
4196 equality_operator (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
4198 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4201 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4203 arm_comparison_operator (rtx x, enum machine_mode mode)
4205 return (comparison_operator (x, mode)
4206 && GET_CODE (x) != LTGT
4207 && GET_CODE (x) != UNEQ);
4210 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4212 minmax_operator (rtx x, enum machine_mode mode)
4214 enum rtx_code code = GET_CODE (x);
4216 if (GET_MODE (x) != mode)
4217 return FALSE;
4219 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4222 /* Return TRUE if this is the condition code register, if we aren't given
4223 a mode, accept any class CCmode register. */
4225 cc_register (rtx x, enum machine_mode mode)
4227 if (mode == VOIDmode)
4229 mode = GET_MODE (x);
4231 if (GET_MODE_CLASS (mode) != MODE_CC)
4232 return FALSE;
4235 if ( GET_MODE (x) == mode
4236 && GET_CODE (x) == REG
4237 && REGNO (x) == CC_REGNUM)
4238 return TRUE;
4240 return FALSE;
4243 /* Return TRUE if this is the condition code register, if we aren't given
4244 a mode, accept any class CCmode register which indicates a dominance
4245 expression. */
4247 dominant_cc_register (rtx x, enum machine_mode mode)
4249 if (mode == VOIDmode)
4251 mode = GET_MODE (x);
4253 if (GET_MODE_CLASS (mode) != MODE_CC)
4254 return FALSE;
4257 if (mode != CC_DNEmode && mode != CC_DEQmode
4258 && mode != CC_DLEmode && mode != CC_DLTmode
4259 && mode != CC_DGEmode && mode != CC_DGTmode
4260 && mode != CC_DLEUmode && mode != CC_DLTUmode
4261 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4262 return FALSE;
4264 return cc_register (x, mode);
4267 /* Return TRUE if X references a SYMBOL_REF. */
4269 symbol_mentioned_p (rtx x)
4271 const char * fmt;
4272 int i;
4274 if (GET_CODE (x) == SYMBOL_REF)
4275 return 1;
4277 fmt = GET_RTX_FORMAT (GET_CODE (x));
4279 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4281 if (fmt[i] == 'E')
4283 int j;
4285 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4286 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4287 return 1;
4289 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4290 return 1;
4293 return 0;
4296 /* Return TRUE if X references a LABEL_REF. */
4298 label_mentioned_p (rtx x)
4300 const char * fmt;
4301 int i;
4303 if (GET_CODE (x) == LABEL_REF)
4304 return 1;
4306 fmt = GET_RTX_FORMAT (GET_CODE (x));
4307 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4309 if (fmt[i] == 'E')
4311 int j;
4313 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4314 if (label_mentioned_p (XVECEXP (x, i, j)))
4315 return 1;
4317 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4318 return 1;
4321 return 0;
4324 enum rtx_code
4325 minmax_code (rtx x)
4327 enum rtx_code code = GET_CODE (x);
4329 if (code == SMAX)
4330 return GE;
4331 else if (code == SMIN)
4332 return LE;
4333 else if (code == UMIN)
4334 return LEU;
4335 else if (code == UMAX)
4336 return GEU;
4338 abort ();
4341 /* Return 1 if memory locations are adjacent. */
4343 adjacent_mem_locations (rtx a, rtx b)
4345 if ((GET_CODE (XEXP (a, 0)) == REG
4346 || (GET_CODE (XEXP (a, 0)) == PLUS
4347 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4348 && (GET_CODE (XEXP (b, 0)) == REG
4349 || (GET_CODE (XEXP (b, 0)) == PLUS
4350 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4352 int val0 = 0, val1 = 0;
4353 int reg0, reg1;
4355 if (GET_CODE (XEXP (a, 0)) == PLUS)
4357 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4358 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4360 else
4361 reg0 = REGNO (XEXP (a, 0));
4363 if (GET_CODE (XEXP (b, 0)) == PLUS)
4365 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4366 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4368 else
4369 reg1 = REGNO (XEXP (b, 0));
4371 /* Don't accept any offset that will require multiple
4372 instructions to handle, since this would cause the
4373 arith_adjacentmem pattern to output an overlong sequence. */
4374 if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
4375 return 0;
4377 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4379 return 0;
4382 /* Return 1 if OP is a load multiple operation. It is known to be
4383 parallel and the first section will be tested. */
4385 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4387 HOST_WIDE_INT count = XVECLEN (op, 0);
4388 int dest_regno;
4389 rtx src_addr;
4390 HOST_WIDE_INT i = 1, base = 0;
4391 rtx elt;
4393 if (count <= 1
4394 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4395 return 0;
4397 /* Check to see if this might be a write-back. */
4398 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4400 i++;
4401 base = 1;
4403 /* Now check it more carefully. */
4404 if (GET_CODE (SET_DEST (elt)) != REG
4405 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4406 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4407 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4408 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4409 return 0;
4412 /* Perform a quick check so we don't blow up below. */
4413 if (count <= i
4414 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4415 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4416 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4417 return 0;
4419 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4420 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4422 for (; i < count; i++)
4424 elt = XVECEXP (op, 0, i);
4426 if (GET_CODE (elt) != SET
4427 || GET_CODE (SET_DEST (elt)) != REG
4428 || GET_MODE (SET_DEST (elt)) != SImode
4429 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4430 || GET_CODE (SET_SRC (elt)) != MEM
4431 || GET_MODE (SET_SRC (elt)) != SImode
4432 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4433 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4434 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4435 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4436 return 0;
4439 return 1;
4442 /* Return 1 if OP is a store multiple operation. It is known to be
4443 parallel and the first section will be tested. */
4445 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4447 HOST_WIDE_INT count = XVECLEN (op, 0);
4448 int src_regno;
4449 rtx dest_addr;
4450 HOST_WIDE_INT i = 1, base = 0;
4451 rtx elt;
4453 if (count <= 1
4454 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4455 return 0;
4457 /* Check to see if this might be a write-back. */
4458 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4460 i++;
4461 base = 1;
4463 /* Now check it more carefully. */
4464 if (GET_CODE (SET_DEST (elt)) != REG
4465 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4466 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4467 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4468 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4469 return 0;
4472 /* Perform a quick check so we don't blow up below. */
4473 if (count <= i
4474 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4475 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4476 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4477 return 0;
4479 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4480 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4482 for (; i < count; i++)
4484 elt = XVECEXP (op, 0, i);
4486 if (GET_CODE (elt) != SET
4487 || GET_CODE (SET_SRC (elt)) != REG
4488 || GET_MODE (SET_SRC (elt)) != SImode
4489 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4490 || GET_CODE (SET_DEST (elt)) != MEM
4491 || GET_MODE (SET_DEST (elt)) != SImode
4492 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4493 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4494 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4495 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4496 return 0;
4499 return 1;
4503 load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4504 HOST_WIDE_INT *load_offset)
4506 int unsorted_regs[4];
4507 HOST_WIDE_INT unsorted_offsets[4];
4508 int order[4];
4509 int base_reg = -1;
4510 int i;
4512 /* Can only handle 2, 3, or 4 insns at present,
4513 though could be easily extended if required. */
4514 if (nops < 2 || nops > 4)
4515 abort ();
4517 /* Loop over the operands and check that the memory references are
4518 suitable (ie immediate offsets from the same base register). At
4519 the same time, extract the target register, and the memory
4520 offsets. */
4521 for (i = 0; i < nops; i++)
4523 rtx reg;
4524 rtx offset;
4526 /* Convert a subreg of a mem into the mem itself. */
4527 if (GET_CODE (operands[nops + i]) == SUBREG)
4528 operands[nops + i] = alter_subreg (operands + (nops + i));
4530 if (GET_CODE (operands[nops + i]) != MEM)
4531 abort ();
4533 /* Don't reorder volatile memory references; it doesn't seem worth
4534 looking for the case where the order is ok anyway. */
4535 if (MEM_VOLATILE_P (operands[nops + i]))
4536 return 0;
4538 offset = const0_rtx;
4540 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4541 || (GET_CODE (reg) == SUBREG
4542 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4543 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4544 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4545 == REG)
4546 || (GET_CODE (reg) == SUBREG
4547 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4548 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4549 == CONST_INT)))
4551 if (i == 0)
4553 base_reg = REGNO (reg);
4554 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4555 ? REGNO (operands[i])
4556 : REGNO (SUBREG_REG (operands[i])));
4557 order[0] = 0;
4559 else
4561 if (base_reg != (int) REGNO (reg))
4562 /* Not addressed from the same base register. */
4563 return 0;
4565 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4566 ? REGNO (operands[i])
4567 : REGNO (SUBREG_REG (operands[i])));
4568 if (unsorted_regs[i] < unsorted_regs[order[0]])
4569 order[0] = i;
4572 /* If it isn't an integer register, or if it overwrites the
4573 base register but isn't the last insn in the list, then
4574 we can't do this. */
4575 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4576 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4577 return 0;
4579 unsorted_offsets[i] = INTVAL (offset);
4581 else
4582 /* Not a suitable memory address. */
4583 return 0;
4586 /* All the useful information has now been extracted from the
4587 operands into unsorted_regs and unsorted_offsets; additionally,
4588 order[0] has been set to the lowest numbered register in the
4589 list. Sort the registers into order, and check that the memory
4590 offsets are ascending and adjacent. */
4592 for (i = 1; i < nops; i++)
4594 int j;
4596 order[i] = order[i - 1];
4597 for (j = 0; j < nops; j++)
4598 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4599 && (order[i] == order[i - 1]
4600 || unsorted_regs[j] < unsorted_regs[order[i]]))
4601 order[i] = j;
4603 /* Have we found a suitable register? if not, one must be used more
4604 than once. */
4605 if (order[i] == order[i - 1])
4606 return 0;
4608 /* Is the memory address adjacent and ascending? */
4609 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4610 return 0;
4613 if (base)
4615 *base = base_reg;
4617 for (i = 0; i < nops; i++)
4618 regs[i] = unsorted_regs[order[i]];
4620 *load_offset = unsorted_offsets[order[0]];
4623 if (unsorted_offsets[order[0]] == 0)
4624 return 1; /* ldmia */
4626 if (unsorted_offsets[order[0]] == 4)
4627 return 2; /* ldmib */
4629 if (unsorted_offsets[order[nops - 1]] == 0)
4630 return 3; /* ldmda */
4632 if (unsorted_offsets[order[nops - 1]] == -4)
4633 return 4; /* ldmdb */
4635 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4636 if the offset isn't small enough. The reason 2 ldrs are faster
4637 is because these ARMs are able to do more than one cache access
4638 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4639 whilst the ARM8 has a double bandwidth cache. This means that
4640 these cores can do both an instruction fetch and a data fetch in
4641 a single cycle, so the trick of calculating the address into a
4642 scratch register (one of the result regs) and then doing a load
4643 multiple actually becomes slower (and no smaller in code size).
4644 That is the transformation
4646 ldr rd1, [rbase + offset]
4647 ldr rd2, [rbase + offset + 4]
4651 add rd1, rbase, offset
4652 ldmia rd1, {rd1, rd2}
4654 produces worse code -- '3 cycles + any stalls on rd2' instead of
4655 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4656 access per cycle, the first sequence could never complete in less
4657 than 6 cycles, whereas the ldm sequence would only take 5 and
4658 would make better use of sequential accesses if not hitting the
4659 cache.
4661 We cheat here and test 'arm_ld_sched' which we currently know to
4662 only be true for the ARM8, ARM9 and StrongARM. If this ever
4663 changes, then the test below needs to be reworked. */
4664 if (nops == 2 && arm_ld_sched)
4665 return 0;
4667 /* Can't do it without setting up the offset, only do this if it takes
4668 no more than one insn. */
4669 return (const_ok_for_arm (unsorted_offsets[order[0]])
4670 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4673 const char *
4674 emit_ldm_seq (rtx *operands, int nops)
4676 int regs[4];
4677 int base_reg;
4678 HOST_WIDE_INT offset;
4679 char buf[100];
4680 int i;
4682 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4684 case 1:
4685 strcpy (buf, "ldm%?ia\t");
4686 break;
4688 case 2:
4689 strcpy (buf, "ldm%?ib\t");
4690 break;
4692 case 3:
4693 strcpy (buf, "ldm%?da\t");
4694 break;
4696 case 4:
4697 strcpy (buf, "ldm%?db\t");
4698 break;
4700 case 5:
4701 if (offset >= 0)
4702 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4703 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4704 (long) offset);
4705 else
4706 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4707 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4708 (long) -offset);
4709 output_asm_insn (buf, operands);
4710 base_reg = regs[0];
4711 strcpy (buf, "ldm%?ia\t");
4712 break;
4714 default:
4715 abort ();
4718 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4719 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4721 for (i = 1; i < nops; i++)
4722 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4723 reg_names[regs[i]]);
4725 strcat (buf, "}\t%@ phole ldm");
4727 output_asm_insn (buf, operands);
4728 return "";
4732 store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4733 HOST_WIDE_INT * load_offset)
4735 int unsorted_regs[4];
4736 HOST_WIDE_INT unsorted_offsets[4];
4737 int order[4];
4738 int base_reg = -1;
4739 int i;
4741 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4742 extended if required. */
4743 if (nops < 2 || nops > 4)
4744 abort ();
4746 /* Loop over the operands and check that the memory references are
4747 suitable (ie immediate offsets from the same base register). At
4748 the same time, extract the target register, and the memory
4749 offsets. */
4750 for (i = 0; i < nops; i++)
4752 rtx reg;
4753 rtx offset;
4755 /* Convert a subreg of a mem into the mem itself. */
4756 if (GET_CODE (operands[nops + i]) == SUBREG)
4757 operands[nops + i] = alter_subreg (operands + (nops + i));
4759 if (GET_CODE (operands[nops + i]) != MEM)
4760 abort ();
4762 /* Don't reorder volatile memory references; it doesn't seem worth
4763 looking for the case where the order is ok anyway. */
4764 if (MEM_VOLATILE_P (operands[nops + i]))
4765 return 0;
4767 offset = const0_rtx;
4769 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4770 || (GET_CODE (reg) == SUBREG
4771 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4772 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4773 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4774 == REG)
4775 || (GET_CODE (reg) == SUBREG
4776 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4777 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4778 == CONST_INT)))
4780 if (i == 0)
4782 base_reg = REGNO (reg);
4783 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4784 ? REGNO (operands[i])
4785 : REGNO (SUBREG_REG (operands[i])));
4786 order[0] = 0;
4788 else
4790 if (base_reg != (int) REGNO (reg))
4791 /* Not addressed from the same base register. */
4792 return 0;
4794 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4795 ? REGNO (operands[i])
4796 : REGNO (SUBREG_REG (operands[i])));
4797 if (unsorted_regs[i] < unsorted_regs[order[0]])
4798 order[0] = i;
4801 /* If it isn't an integer register, then we can't do this. */
4802 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4803 return 0;
4805 unsorted_offsets[i] = INTVAL (offset);
4807 else
4808 /* Not a suitable memory address. */
4809 return 0;
4812 /* All the useful information has now been extracted from the
4813 operands into unsorted_regs and unsorted_offsets; additionally,
4814 order[0] has been set to the lowest numbered register in the
4815 list. Sort the registers into order, and check that the memory
4816 offsets are ascending and adjacent. */
4818 for (i = 1; i < nops; i++)
4820 int j;
4822 order[i] = order[i - 1];
4823 for (j = 0; j < nops; j++)
4824 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4825 && (order[i] == order[i - 1]
4826 || unsorted_regs[j] < unsorted_regs[order[i]]))
4827 order[i] = j;
4829 /* Have we found a suitable register? if not, one must be used more
4830 than once. */
4831 if (order[i] == order[i - 1])
4832 return 0;
4834 /* Is the memory address adjacent and ascending? */
4835 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4836 return 0;
4839 if (base)
4841 *base = base_reg;
4843 for (i = 0; i < nops; i++)
4844 regs[i] = unsorted_regs[order[i]];
4846 *load_offset = unsorted_offsets[order[0]];
4849 if (unsorted_offsets[order[0]] == 0)
4850 return 1; /* stmia */
4852 if (unsorted_offsets[order[0]] == 4)
4853 return 2; /* stmib */
4855 if (unsorted_offsets[order[nops - 1]] == 0)
4856 return 3; /* stmda */
4858 if (unsorted_offsets[order[nops - 1]] == -4)
4859 return 4; /* stmdb */
4861 return 0;
4864 const char *
4865 emit_stm_seq (rtx *operands, int nops)
4867 int regs[4];
4868 int base_reg;
4869 HOST_WIDE_INT offset;
4870 char buf[100];
4871 int i;
4873 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4875 case 1:
4876 strcpy (buf, "stm%?ia\t");
4877 break;
4879 case 2:
4880 strcpy (buf, "stm%?ib\t");
4881 break;
4883 case 3:
4884 strcpy (buf, "stm%?da\t");
4885 break;
4887 case 4:
4888 strcpy (buf, "stm%?db\t");
4889 break;
4891 default:
4892 abort ();
4895 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4896 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4898 for (i = 1; i < nops; i++)
4899 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4900 reg_names[regs[i]]);
4902 strcat (buf, "}\t%@ phole stm");
4904 output_asm_insn (buf, operands);
4905 return "";
4909 multi_register_push (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4911 if (GET_CODE (op) != PARALLEL
4912 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4913 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4914 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4915 return 0;
4917 return 1;
4920 /* Routines for use in generating RTL. */
4923 arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
4924 int write_back, int unchanging_p, int in_struct_p,
4925 int scalar_p)
4927 int i = 0, j;
4928 rtx result;
4929 int sign = up ? 1 : -1;
4930 rtx mem;
4932 /* XScale has load-store double instructions, but they have stricter
4933 alignment requirements than load-store multiple, so we can not
4934 use them.
4936 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4937 the pipeline until completion.
4939 NREGS CYCLES
4945 An ldr instruction takes 1-3 cycles, but does not block the
4946 pipeline.
4948 NREGS CYCLES
4949 1 1-3
4950 2 2-6
4951 3 3-9
4952 4 4-12
4954 Best case ldr will always win. However, the more ldr instructions
4955 we issue, the less likely we are to be able to schedule them well.
4956 Using ldr instructions also increases code size.
4958 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4959 for counts of 3 or 4 regs. */
4960 if (arm_tune_xscale && count <= 2 && ! optimize_size)
4962 rtx seq;
4964 start_sequence ();
4966 for (i = 0; i < count; i++)
4968 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4969 RTX_UNCHANGING_P (mem) = unchanging_p;
4970 MEM_IN_STRUCT_P (mem) = in_struct_p;
4971 MEM_SCALAR_P (mem) = scalar_p;
4972 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4975 if (write_back)
4976 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4978 seq = get_insns ();
4979 end_sequence ();
4981 return seq;
4984 result = gen_rtx_PARALLEL (VOIDmode,
4985 rtvec_alloc (count + (write_back ? 1 : 0)));
4986 if (write_back)
4988 XVECEXP (result, 0, 0)
4989 = gen_rtx_SET (GET_MODE (from), from,
4990 plus_constant (from, count * 4 * sign));
4991 i = 1;
4992 count++;
4995 for (j = 0; i < count; i++, j++)
4997 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4998 RTX_UNCHANGING_P (mem) = unchanging_p;
4999 MEM_IN_STRUCT_P (mem) = in_struct_p;
5000 MEM_SCALAR_P (mem) = scalar_p;
5001 XVECEXP (result, 0, i)
5002 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
5005 return result;
5009 arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
5010 int write_back, int unchanging_p, int in_struct_p,
5011 int scalar_p)
5013 int i = 0, j;
5014 rtx result;
5015 int sign = up ? 1 : -1;
5016 rtx mem;
5018 /* See arm_gen_load_multiple for discussion of
5019 the pros/cons of ldm/stm usage for XScale. */
5020 if (arm_tune_xscale && count <= 2 && ! optimize_size)
5022 rtx seq;
5024 start_sequence ();
5026 for (i = 0; i < count; i++)
5028 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5029 RTX_UNCHANGING_P (mem) = unchanging_p;
5030 MEM_IN_STRUCT_P (mem) = in_struct_p;
5031 MEM_SCALAR_P (mem) = scalar_p;
5032 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5035 if (write_back)
5036 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5038 seq = get_insns ();
5039 end_sequence ();
5041 return seq;
5044 result = gen_rtx_PARALLEL (VOIDmode,
5045 rtvec_alloc (count + (write_back ? 1 : 0)));
5046 if (write_back)
5048 XVECEXP (result, 0, 0)
5049 = gen_rtx_SET (GET_MODE (to), to,
5050 plus_constant (to, count * 4 * sign));
5051 i = 1;
5052 count++;
5055 for (j = 0; i < count; i++, j++)
5057 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
5058 RTX_UNCHANGING_P (mem) = unchanging_p;
5059 MEM_IN_STRUCT_P (mem) = in_struct_p;
5060 MEM_SCALAR_P (mem) = scalar_p;
5062 XVECEXP (result, 0, i)
5063 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5066 return result;
5070 arm_gen_movstrqi (rtx *operands)
5072 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5073 int i;
5074 rtx src, dst;
5075 rtx st_src, st_dst, fin_src, fin_dst;
5076 rtx part_bytes_reg = NULL;
5077 rtx mem;
5078 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5079 int dst_scalar_p, src_scalar_p;
5081 if (GET_CODE (operands[2]) != CONST_INT
5082 || GET_CODE (operands[3]) != CONST_INT
5083 || INTVAL (operands[2]) > 64
5084 || INTVAL (operands[3]) & 3)
5085 return 0;
5087 st_dst = XEXP (operands[0], 0);
5088 st_src = XEXP (operands[1], 0);
5090 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5091 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5092 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5093 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5094 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5095 src_scalar_p = MEM_SCALAR_P (operands[1]);
5097 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5098 fin_src = src = copy_to_mode_reg (SImode, st_src);
5100 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5101 out_words_to_go = INTVAL (operands[2]) / 4;
5102 last_bytes = INTVAL (operands[2]) & 3;
5104 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5105 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5107 for (i = 0; in_words_to_go >= 2; i+=4)
5109 if (in_words_to_go > 4)
5110 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5111 src_unchanging_p,
5112 src_in_struct_p,
5113 src_scalar_p));
5114 else
5115 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5116 FALSE, src_unchanging_p,
5117 src_in_struct_p, src_scalar_p));
5119 if (out_words_to_go)
5121 if (out_words_to_go > 4)
5122 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5123 dst_unchanging_p,
5124 dst_in_struct_p,
5125 dst_scalar_p));
5126 else if (out_words_to_go != 1)
5127 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5128 dst, TRUE,
5129 (last_bytes == 0
5130 ? FALSE : TRUE),
5131 dst_unchanging_p,
5132 dst_in_struct_p,
5133 dst_scalar_p));
5134 else
5136 mem = gen_rtx_MEM (SImode, dst);
5137 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5138 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5139 MEM_SCALAR_P (mem) = dst_scalar_p;
5140 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5141 if (last_bytes != 0)
5142 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5146 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5147 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5150 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5151 if (out_words_to_go)
5153 rtx sreg;
5155 mem = gen_rtx_MEM (SImode, src);
5156 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5157 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5158 MEM_SCALAR_P (mem) = src_scalar_p;
5159 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5160 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5162 mem = gen_rtx_MEM (SImode, dst);
5163 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5164 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5165 MEM_SCALAR_P (mem) = dst_scalar_p;
5166 emit_move_insn (mem, sreg);
5167 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5168 in_words_to_go--;
5170 if (in_words_to_go) /* Sanity check */
5171 abort ();
5174 if (in_words_to_go)
5176 if (in_words_to_go < 0)
5177 abort ();
5179 mem = gen_rtx_MEM (SImode, src);
5180 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5181 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5182 MEM_SCALAR_P (mem) = src_scalar_p;
5183 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5186 if (last_bytes && part_bytes_reg == NULL)
5187 abort ();
5189 if (BYTES_BIG_ENDIAN && last_bytes)
5191 rtx tmp = gen_reg_rtx (SImode);
5193 /* The bytes we want are in the top end of the word. */
5194 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5195 GEN_INT (8 * (4 - last_bytes))));
5196 part_bytes_reg = tmp;
5198 while (last_bytes)
5200 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5201 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5202 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5203 MEM_SCALAR_P (mem) = dst_scalar_p;
5204 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5206 if (--last_bytes)
5208 tmp = gen_reg_rtx (SImode);
5209 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5210 part_bytes_reg = tmp;
5215 else
5217 if (last_bytes > 1)
5219 mem = gen_rtx_MEM (HImode, dst);
5220 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5221 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5222 MEM_SCALAR_P (mem) = dst_scalar_p;
5223 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5224 last_bytes -= 2;
5225 if (last_bytes)
5227 rtx tmp = gen_reg_rtx (SImode);
5229 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5230 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5231 part_bytes_reg = tmp;
5235 if (last_bytes)
5237 mem = gen_rtx_MEM (QImode, dst);
5238 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5239 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5240 MEM_SCALAR_P (mem) = dst_scalar_p;
5241 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5245 return 1;
5248 /* Generate a memory reference for a half word, such that it will be loaded
5249 into the top 16 bits of the word. We can assume that the address is
5250 known to be alignable and of the form reg, or plus (reg, const). */
5253 arm_gen_rotated_half_load (rtx memref)
5255 HOST_WIDE_INT offset = 0;
5256 rtx base = XEXP (memref, 0);
5258 if (GET_CODE (base) == PLUS)
5260 offset = INTVAL (XEXP (base, 1));
5261 base = XEXP (base, 0);
5264 /* If we aren't allowed to generate unaligned addresses, then fail. */
5265 if (TARGET_MMU_TRAPS
5266 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5267 return NULL;
5269 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5271 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5272 return base;
5274 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5277 /* Select a dominance comparison mode if possible for a test of the general
5278 form (OP (COND_OR (X) (Y)) (const_int 0)). We support three forms.
5279 COND_OR == DOM_CC_X_AND_Y => (X && Y)
5280 COND_OR == DOM_CC_NX_OR_Y => ((! X) || Y)
5281 COND_OR == DOM_CC_X_OR_Y => (X || Y)
5282 In all cases OP will be either EQ or NE, but we don't need to know which
5283 here. If we are unable to support a dominance comparison we return
5284 CC mode. This will then fail to match for the RTL expressions that
5285 generate this call. */
5286 enum machine_mode
5287 arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
5289 enum rtx_code cond1, cond2;
5290 int swapped = 0;
5292 /* Currently we will probably get the wrong result if the individual
5293 comparisons are not simple. This also ensures that it is safe to
5294 reverse a comparison if necessary. */
5295 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5296 != CCmode)
5297 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5298 != CCmode))
5299 return CCmode;
5301 /* The if_then_else variant of this tests the second condition if the
5302 first passes, but is true if the first fails. Reverse the first
5303 condition to get a true "inclusive-or" expression. */
5304 if (cond_or == DOM_CC_NX_OR_Y)
5305 cond1 = reverse_condition (cond1);
5307 /* If the comparisons are not equal, and one doesn't dominate the other,
5308 then we can't do this. */
5309 if (cond1 != cond2
5310 && !comparison_dominates_p (cond1, cond2)
5311 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5312 return CCmode;
5314 if (swapped)
5316 enum rtx_code temp = cond1;
5317 cond1 = cond2;
5318 cond2 = temp;
5321 switch (cond1)
5323 case EQ:
5324 if (cond2 == EQ || cond_or == DOM_CC_X_AND_Y)
5325 return CC_DEQmode;
5327 switch (cond2)
5329 case LE: return CC_DLEmode;
5330 case LEU: return CC_DLEUmode;
5331 case GE: return CC_DGEmode;
5332 case GEU: return CC_DGEUmode;
5333 default: break;
5336 break;
5338 case LT:
5339 if (cond2 == LT || cond_or == DOM_CC_X_AND_Y)
5340 return CC_DLTmode;
5341 if (cond2 == LE)
5342 return CC_DLEmode;
5343 if (cond2 == NE)
5344 return CC_DNEmode;
5345 break;
5347 case GT:
5348 if (cond2 == GT || cond_or == DOM_CC_X_AND_Y)
5349 return CC_DGTmode;
5350 if (cond2 == GE)
5351 return CC_DGEmode;
5352 if (cond2 == NE)
5353 return CC_DNEmode;
5354 break;
5356 case LTU:
5357 if (cond2 == LTU || cond_or == DOM_CC_X_AND_Y)
5358 return CC_DLTUmode;
5359 if (cond2 == LEU)
5360 return CC_DLEUmode;
5361 if (cond2 == NE)
5362 return CC_DNEmode;
5363 break;
5365 case GTU:
5366 if (cond2 == GTU || cond_or == DOM_CC_X_AND_Y)
5367 return CC_DGTUmode;
5368 if (cond2 == GEU)
5369 return CC_DGEUmode;
5370 if (cond2 == NE)
5371 return CC_DNEmode;
5372 break;
5374 /* The remaining cases only occur when both comparisons are the
5375 same. */
5376 case NE:
5377 return CC_DNEmode;
5379 case LE:
5380 return CC_DLEmode;
5382 case GE:
5383 return CC_DGEmode;
5385 case LEU:
5386 return CC_DLEUmode;
5388 case GEU:
5389 return CC_DGEUmode;
5391 default:
5392 break;
5395 abort ();
5398 enum machine_mode
5399 arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
5401 /* All floating point compares return CCFP if it is an equality
5402 comparison, and CCFPE otherwise. */
5403 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5405 switch (op)
5407 case EQ:
5408 case NE:
5409 case UNORDERED:
5410 case ORDERED:
5411 case UNLT:
5412 case UNLE:
5413 case UNGT:
5414 case UNGE:
5415 case UNEQ:
5416 case LTGT:
5417 return CCFPmode;
5419 case LT:
5420 case LE:
5421 case GT:
5422 case GE:
5423 if (TARGET_CIRRUS)
5424 return CCFPmode;
5425 return CCFPEmode;
5427 default:
5428 abort ();
5432 /* A compare with a shifted operand. Because of canonicalization, the
5433 comparison will have to be swapped when we emit the assembler. */
5434 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5435 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5436 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5437 || GET_CODE (x) == ROTATERT))
5438 return CC_SWPmode;
5440 /* This is a special case that is used by combine to allow a
5441 comparison of a shifted byte load to be split into a zero-extend
5442 followed by a comparison of the shifted integer (only valid for
5443 equalities and unsigned inequalities). */
5444 if (GET_MODE (x) == SImode
5445 && GET_CODE (x) == ASHIFT
5446 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5447 && GET_CODE (XEXP (x, 0)) == SUBREG
5448 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5449 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5450 && (op == EQ || op == NE
5451 || op == GEU || op == GTU || op == LTU || op == LEU)
5452 && GET_CODE (y) == CONST_INT)
5453 return CC_Zmode;
5455 /* A construct for a conditional compare, if the false arm contains
5456 0, then both conditions must be true, otherwise either condition
5457 must be true. Not all conditions are possible, so CCmode is
5458 returned if it can't be done. */
5459 if (GET_CODE (x) == IF_THEN_ELSE
5460 && (XEXP (x, 2) == const0_rtx
5461 || XEXP (x, 2) == const1_rtx)
5462 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5463 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5464 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5465 INTVAL (XEXP (x, 2)));
5467 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5468 if (GET_CODE (x) == AND
5469 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5470 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5471 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5472 DOM_CC_X_AND_Y);
5474 if (GET_CODE (x) == IOR
5475 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5476 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5477 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5478 DOM_CC_X_OR_Y);
5480 /* An operation that sets the condition codes as a side-effect, the
5481 V flag is not set correctly, so we can only use comparisons where
5482 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5483 instead. */
5484 if (GET_MODE (x) == SImode
5485 && y == const0_rtx
5486 && (op == EQ || op == NE || op == LT || op == GE)
5487 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5488 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5489 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5490 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5491 || GET_CODE (x) == LSHIFTRT
5492 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5493 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5494 return CC_NOOVmode;
5496 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5497 return CC_Zmode;
5499 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5500 && GET_CODE (x) == PLUS
5501 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5502 return CC_Cmode;
5504 return CCmode;
5507 /* X and Y are two things to compare using CODE. Emit the compare insn and
5508 return the rtx for register 0 in the proper mode. FP means this is a
5509 floating point compare: I don't think that it is needed on the arm. */
5511 arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
5513 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5514 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5516 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5517 gen_rtx_COMPARE (mode, x, y)));
5519 return cc_reg;
5522 /* Generate a sequence of insns that will generate the correct return
5523 address mask depending on the physical architecture that the program
5524 is running on. */
5526 arm_gen_return_addr_mask (void)
5528 rtx reg = gen_reg_rtx (Pmode);
5530 emit_insn (gen_return_addr_mask (reg));
5531 return reg;
5534 void
5535 arm_reload_in_hi (rtx *operands)
5537 rtx ref = operands[1];
5538 rtx base, scratch;
5539 HOST_WIDE_INT offset = 0;
5541 if (GET_CODE (ref) == SUBREG)
5543 offset = SUBREG_BYTE (ref);
5544 ref = SUBREG_REG (ref);
5547 if (GET_CODE (ref) == REG)
5549 /* We have a pseudo which has been spilt onto the stack; there
5550 are two cases here: the first where there is a simple
5551 stack-slot replacement and a second where the stack-slot is
5552 out of range, or is used as a subreg. */
5553 if (reg_equiv_mem[REGNO (ref)])
5555 ref = reg_equiv_mem[REGNO (ref)];
5556 base = find_replacement (&XEXP (ref, 0));
5558 else
5559 /* The slot is out of range, or was dressed up in a SUBREG. */
5560 base = reg_equiv_address[REGNO (ref)];
5562 else
5563 base = find_replacement (&XEXP (ref, 0));
5565 /* Handle the case where the address is too complex to be offset by 1. */
5566 if (GET_CODE (base) == MINUS
5567 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5569 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5571 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5572 base = base_plus;
5574 else if (GET_CODE (base) == PLUS)
5576 /* The addend must be CONST_INT, or we would have dealt with it above. */
5577 HOST_WIDE_INT hi, lo;
5579 offset += INTVAL (XEXP (base, 1));
5580 base = XEXP (base, 0);
5582 /* Rework the address into a legal sequence of insns. */
5583 /* Valid range for lo is -4095 -> 4095 */
5584 lo = (offset >= 0
5585 ? (offset & 0xfff)
5586 : -((-offset) & 0xfff));
5588 /* Corner case, if lo is the max offset then we would be out of range
5589 once we have added the additional 1 below, so bump the msb into the
5590 pre-loading insn(s). */
5591 if (lo == 4095)
5592 lo &= 0x7ff;
5594 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5595 ^ (HOST_WIDE_INT) 0x80000000)
5596 - (HOST_WIDE_INT) 0x80000000);
5598 if (hi + lo != offset)
5599 abort ();
5601 if (hi != 0)
5603 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5605 /* Get the base address; addsi3 knows how to handle constants
5606 that require more than one insn. */
5607 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5608 base = base_plus;
5609 offset = lo;
5613 /* Operands[2] may overlap operands[0] (though it won't overlap
5614 operands[1]), that's why we asked for a DImode reg -- so we can
5615 use the bit that does not overlap. */
5616 if (REGNO (operands[2]) == REGNO (operands[0]))
5617 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5618 else
5619 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5621 emit_insn (gen_zero_extendqisi2 (scratch,
5622 gen_rtx_MEM (QImode,
5623 plus_constant (base,
5624 offset))));
5625 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5626 gen_rtx_MEM (QImode,
5627 plus_constant (base,
5628 offset + 1))));
5629 if (!BYTES_BIG_ENDIAN)
5630 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5631 gen_rtx_IOR (SImode,
5632 gen_rtx_ASHIFT
5633 (SImode,
5634 gen_rtx_SUBREG (SImode, operands[0], 0),
5635 GEN_INT (8)),
5636 scratch)));
5637 else
5638 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5639 gen_rtx_IOR (SImode,
5640 gen_rtx_ASHIFT (SImode, scratch,
5641 GEN_INT (8)),
5642 gen_rtx_SUBREG (SImode, operands[0],
5643 0))));
5646 /* Handle storing a half-word to memory during reload by synthesising as two
5647 byte stores. Take care not to clobber the input values until after we
5648 have moved them somewhere safe. This code assumes that if the DImode
5649 scratch in operands[2] overlaps either the input value or output address
5650 in some way, then that value must die in this insn (we absolutely need
5651 two scratch registers for some corner cases). */
5652 void
5653 arm_reload_out_hi (rtx *operands)
5655 rtx ref = operands[0];
5656 rtx outval = operands[1];
5657 rtx base, scratch;
5658 HOST_WIDE_INT offset = 0;
5660 if (GET_CODE (ref) == SUBREG)
5662 offset = SUBREG_BYTE (ref);
5663 ref = SUBREG_REG (ref);
5666 if (GET_CODE (ref) == REG)
5668 /* We have a pseudo which has been spilt onto the stack; there
5669 are two cases here: the first where there is a simple
5670 stack-slot replacement and a second where the stack-slot is
5671 out of range, or is used as a subreg. */
5672 if (reg_equiv_mem[REGNO (ref)])
5674 ref = reg_equiv_mem[REGNO (ref)];
5675 base = find_replacement (&XEXP (ref, 0));
5677 else
5678 /* The slot is out of range, or was dressed up in a SUBREG. */
5679 base = reg_equiv_address[REGNO (ref)];
5681 else
5682 base = find_replacement (&XEXP (ref, 0));
5684 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5686 /* Handle the case where the address is too complex to be offset by 1. */
5687 if (GET_CODE (base) == MINUS
5688 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5690 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5692 /* Be careful not to destroy OUTVAL. */
5693 if (reg_overlap_mentioned_p (base_plus, outval))
5695 /* Updating base_plus might destroy outval, see if we can
5696 swap the scratch and base_plus. */
5697 if (!reg_overlap_mentioned_p (scratch, outval))
5699 rtx tmp = scratch;
5700 scratch = base_plus;
5701 base_plus = tmp;
5703 else
5705 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5707 /* Be conservative and copy OUTVAL into the scratch now,
5708 this should only be necessary if outval is a subreg
5709 of something larger than a word. */
5710 /* XXX Might this clobber base? I can't see how it can,
5711 since scratch is known to overlap with OUTVAL, and
5712 must be wider than a word. */
5713 emit_insn (gen_movhi (scratch_hi, outval));
5714 outval = scratch_hi;
5718 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5719 base = base_plus;
5721 else if (GET_CODE (base) == PLUS)
5723 /* The addend must be CONST_INT, or we would have dealt with it above. */
5724 HOST_WIDE_INT hi, lo;
5726 offset += INTVAL (XEXP (base, 1));
5727 base = XEXP (base, 0);
5729 /* Rework the address into a legal sequence of insns. */
5730 /* Valid range for lo is -4095 -> 4095 */
5731 lo = (offset >= 0
5732 ? (offset & 0xfff)
5733 : -((-offset) & 0xfff));
5735 /* Corner case, if lo is the max offset then we would be out of range
5736 once we have added the additional 1 below, so bump the msb into the
5737 pre-loading insn(s). */
5738 if (lo == 4095)
5739 lo &= 0x7ff;
5741 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5742 ^ (HOST_WIDE_INT) 0x80000000)
5743 - (HOST_WIDE_INT) 0x80000000);
5745 if (hi + lo != offset)
5746 abort ();
5748 if (hi != 0)
5750 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5752 /* Be careful not to destroy OUTVAL. */
5753 if (reg_overlap_mentioned_p (base_plus, outval))
5755 /* Updating base_plus might destroy outval, see if we
5756 can swap the scratch and base_plus. */
5757 if (!reg_overlap_mentioned_p (scratch, outval))
5759 rtx tmp = scratch;
5760 scratch = base_plus;
5761 base_plus = tmp;
5763 else
5765 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5767 /* Be conservative and copy outval into scratch now,
5768 this should only be necessary if outval is a
5769 subreg of something larger than a word. */
5770 /* XXX Might this clobber base? I can't see how it
5771 can, since scratch is known to overlap with
5772 outval. */
5773 emit_insn (gen_movhi (scratch_hi, outval));
5774 outval = scratch_hi;
5778 /* Get the base address; addsi3 knows how to handle constants
5779 that require more than one insn. */
5780 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5781 base = base_plus;
5782 offset = lo;
5786 if (BYTES_BIG_ENDIAN)
5788 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5789 plus_constant (base, offset + 1)),
5790 gen_lowpart (QImode, outval)));
5791 emit_insn (gen_lshrsi3 (scratch,
5792 gen_rtx_SUBREG (SImode, outval, 0),
5793 GEN_INT (8)));
5794 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5795 gen_lowpart (QImode, scratch)));
5797 else
5799 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5800 gen_lowpart (QImode, outval)));
5801 emit_insn (gen_lshrsi3 (scratch,
5802 gen_rtx_SUBREG (SImode, outval, 0),
5803 GEN_INT (8)));
5804 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5805 plus_constant (base, offset + 1)),
5806 gen_lowpart (QImode, scratch)));
5810 /* Print a symbolic form of X to the debug file, F. */
5811 static void
5812 arm_print_value (FILE *f, rtx x)
5814 switch (GET_CODE (x))
5816 case CONST_INT:
5817 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5818 return;
5820 case CONST_DOUBLE:
5821 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5822 return;
5824 case CONST_STRING:
5825 fprintf (f, "\"%s\"", XSTR (x, 0));
5826 return;
5828 case SYMBOL_REF:
5829 fprintf (f, "`%s'", XSTR (x, 0));
5830 return;
5832 case LABEL_REF:
5833 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5834 return;
5836 case CONST:
5837 arm_print_value (f, XEXP (x, 0));
5838 return;
5840 case PLUS:
5841 arm_print_value (f, XEXP (x, 0));
5842 fprintf (f, "+");
5843 arm_print_value (f, XEXP (x, 1));
5844 return;
5846 case PC:
5847 fprintf (f, "pc");
5848 return;
5850 default:
5851 fprintf (f, "????");
5852 return;
5856 /* Routines for manipulation of the constant pool. */
5858 /* Arm instructions cannot load a large constant directly into a
5859 register; they have to come from a pc relative load. The constant
5860 must therefore be placed in the addressable range of the pc
5861 relative load. Depending on the precise pc relative load
5862 instruction the range is somewhere between 256 bytes and 4k. This
5863 means that we often have to dump a constant inside a function, and
5864 generate code to branch around it.
5866 It is important to minimize this, since the branches will slow
5867 things down and make the code larger.
5869 Normally we can hide the table after an existing unconditional
5870 branch so that there is no interruption of the flow, but in the
5871 worst case the code looks like this:
5873 ldr rn, L1
5875 b L2
5876 align
5877 L1: .long value
5881 ldr rn, L3
5883 b L4
5884 align
5885 L3: .long value
5889 We fix this by performing a scan after scheduling, which notices
5890 which instructions need to have their operands fetched from the
5891 constant table and builds the table.
5893 The algorithm starts by building a table of all the constants that
5894 need fixing up and all the natural barriers in the function (places
5895 where a constant table can be dropped without breaking the flow).
5896 For each fixup we note how far the pc-relative replacement will be
5897 able to reach and the offset of the instruction into the function.
5899 Having built the table we then group the fixes together to form
5900 tables that are as large as possible (subject to addressing
5901 constraints) and emit each table of constants after the last
5902 barrier that is within range of all the instructions in the group.
5903 If a group does not contain a barrier, then we forcibly create one
5904 by inserting a jump instruction into the flow. Once the table has
5905 been inserted, the insns are then modified to reference the
5906 relevant entry in the pool.
5908 Possible enhancements to the algorithm (not implemented) are:
5910 1) For some processors and object formats, there may be benefit in
5911 aligning the pools to the start of cache lines; this alignment
5912 would need to be taken into account when calculating addressability
5913 of a pool. */
5915 /* These typedefs are located at the start of this file, so that
5916 they can be used in the prototypes there. This comment is to
5917 remind readers of that fact so that the following structures
5918 can be understood more easily.
5920 typedef struct minipool_node Mnode;
5921 typedef struct minipool_fixup Mfix; */
5923 struct minipool_node
5925 /* Doubly linked chain of entries. */
5926 Mnode * next;
5927 Mnode * prev;
5928 /* The maximum offset into the code that this entry can be placed. While
5929 pushing fixes for forward references, all entries are sorted in order
5930 of increasing max_address. */
5931 HOST_WIDE_INT max_address;
5932 /* Similarly for an entry inserted for a backwards ref. */
5933 HOST_WIDE_INT min_address;
5934 /* The number of fixes referencing this entry. This can become zero
5935 if we "unpush" an entry. In this case we ignore the entry when we
5936 come to emit the code. */
5937 int refcount;
5938 /* The offset from the start of the minipool. */
5939 HOST_WIDE_INT offset;
5940 /* The value in table. */
5941 rtx value;
5942 /* The mode of value. */
5943 enum machine_mode mode;
5944 int fix_size;
5947 struct minipool_fixup
5949 Mfix * next;
5950 rtx insn;
5951 HOST_WIDE_INT address;
5952 rtx * loc;
5953 enum machine_mode mode;
5954 int fix_size;
5955 rtx value;
5956 Mnode * minipool;
5957 HOST_WIDE_INT forwards;
5958 HOST_WIDE_INT backwards;
5961 /* Fixes less than a word need padding out to a word boundary. */
5962 #define MINIPOOL_FIX_SIZE(mode) \
5963 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5965 static Mnode * minipool_vector_head;
5966 static Mnode * minipool_vector_tail;
5967 static rtx minipool_vector_label;
5969 /* The linked list of all minipool fixes required for this function. */
5970 Mfix * minipool_fix_head;
5971 Mfix * minipool_fix_tail;
5972 /* The fix entry for the current minipool, once it has been placed. */
5973 Mfix * minipool_barrier;
5975 /* Determines if INSN is the start of a jump table. Returns the end
5976 of the TABLE or NULL_RTX. */
5977 static rtx
5978 is_jump_table (rtx insn)
5980 rtx table;
5982 if (GET_CODE (insn) == JUMP_INSN
5983 && JUMP_LABEL (insn) != NULL
5984 && ((table = next_real_insn (JUMP_LABEL (insn)))
5985 == next_real_insn (insn))
5986 && table != NULL
5987 && GET_CODE (table) == JUMP_INSN
5988 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5989 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5990 return table;
5992 return NULL_RTX;
5995 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5996 #define JUMP_TABLES_IN_TEXT_SECTION 0
5997 #endif
5999 static HOST_WIDE_INT
6000 get_jump_table_size (rtx insn)
6002 /* ADDR_VECs only take room if read-only data does into the text
6003 section. */
6004 if (JUMP_TABLES_IN_TEXT_SECTION
6005 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
6006 || 1
6007 #endif
6010 rtx body = PATTERN (insn);
6011 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
6013 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6016 return 0;
6019 /* Move a minipool fix MP from its current location to before MAX_MP.
6020 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
6021 contrains may need updating. */
6022 static Mnode *
6023 move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
6024 HOST_WIDE_INT max_address)
6026 /* This should never be true and the code below assumes these are
6027 different. */
6028 if (mp == max_mp)
6029 abort ();
6031 if (max_mp == NULL)
6033 if (max_address < mp->max_address)
6034 mp->max_address = max_address;
6036 else
6038 if (max_address > max_mp->max_address - mp->fix_size)
6039 mp->max_address = max_mp->max_address - mp->fix_size;
6040 else
6041 mp->max_address = max_address;
6043 /* Unlink MP from its current position. Since max_mp is non-null,
6044 mp->prev must be non-null. */
6045 mp->prev->next = mp->next;
6046 if (mp->next != NULL)
6047 mp->next->prev = mp->prev;
6048 else
6049 minipool_vector_tail = mp->prev;
6051 /* Re-insert it before MAX_MP. */
6052 mp->next = max_mp;
6053 mp->prev = max_mp->prev;
6054 max_mp->prev = mp;
6056 if (mp->prev != NULL)
6057 mp->prev->next = mp;
6058 else
6059 minipool_vector_head = mp;
6062 /* Save the new entry. */
6063 max_mp = mp;
6065 /* Scan over the preceding entries and adjust their addresses as
6066 required. */
6067 while (mp->prev != NULL
6068 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6070 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6071 mp = mp->prev;
6074 return max_mp;
6077 /* Add a constant to the minipool for a forward reference. Returns the
6078 node added or NULL if the constant will not fit in this pool. */
6079 static Mnode *
6080 add_minipool_forward_ref (Mfix *fix)
6082 /* If set, max_mp is the first pool_entry that has a lower
6083 constraint than the one we are trying to add. */
6084 Mnode * max_mp = NULL;
6085 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6086 Mnode * mp;
6088 /* If this fix's address is greater than the address of the first
6089 entry, then we can't put the fix in this pool. We subtract the
6090 size of the current fix to ensure that if the table is fully
6091 packed we still have enough room to insert this value by suffling
6092 the other fixes forwards. */
6093 if (minipool_vector_head &&
6094 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6095 return NULL;
6097 /* Scan the pool to see if a constant with the same value has
6098 already been added. While we are doing this, also note the
6099 location where we must insert the constant if it doesn't already
6100 exist. */
6101 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6103 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6104 && fix->mode == mp->mode
6105 && (GET_CODE (fix->value) != CODE_LABEL
6106 || (CODE_LABEL_NUMBER (fix->value)
6107 == CODE_LABEL_NUMBER (mp->value)))
6108 && rtx_equal_p (fix->value, mp->value))
6110 /* More than one fix references this entry. */
6111 mp->refcount++;
6112 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6115 /* Note the insertion point if necessary. */
6116 if (max_mp == NULL
6117 && mp->max_address > max_address)
6118 max_mp = mp;
6121 /* The value is not currently in the minipool, so we need to create
6122 a new entry for it. If MAX_MP is NULL, the entry will be put on
6123 the end of the list since the placement is less constrained than
6124 any existing entry. Otherwise, we insert the new fix before
6125 MAX_MP and, if necessary, adjust the constraints on the other
6126 entries. */
6127 mp = xmalloc (sizeof (* mp));
6128 mp->fix_size = fix->fix_size;
6129 mp->mode = fix->mode;
6130 mp->value = fix->value;
6131 mp->refcount = 1;
6132 /* Not yet required for a backwards ref. */
6133 mp->min_address = -65536;
6135 if (max_mp == NULL)
6137 mp->max_address = max_address;
6138 mp->next = NULL;
6139 mp->prev = minipool_vector_tail;
6141 if (mp->prev == NULL)
6143 minipool_vector_head = mp;
6144 minipool_vector_label = gen_label_rtx ();
6146 else
6147 mp->prev->next = mp;
6149 minipool_vector_tail = mp;
6151 else
6153 if (max_address > max_mp->max_address - mp->fix_size)
6154 mp->max_address = max_mp->max_address - mp->fix_size;
6155 else
6156 mp->max_address = max_address;
6158 mp->next = max_mp;
6159 mp->prev = max_mp->prev;
6160 max_mp->prev = mp;
6161 if (mp->prev != NULL)
6162 mp->prev->next = mp;
6163 else
6164 minipool_vector_head = mp;
6167 /* Save the new entry. */
6168 max_mp = mp;
6170 /* Scan over the preceding entries and adjust their addresses as
6171 required. */
6172 while (mp->prev != NULL
6173 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6175 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6176 mp = mp->prev;
6179 return max_mp;
6182 static Mnode *
6183 move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
6184 HOST_WIDE_INT min_address)
6186 HOST_WIDE_INT offset;
6188 /* This should never be true, and the code below assumes these are
6189 different. */
6190 if (mp == min_mp)
6191 abort ();
6193 if (min_mp == NULL)
6195 if (min_address > mp->min_address)
6196 mp->min_address = min_address;
6198 else
6200 /* We will adjust this below if it is too loose. */
6201 mp->min_address = min_address;
6203 /* Unlink MP from its current position. Since min_mp is non-null,
6204 mp->next must be non-null. */
6205 mp->next->prev = mp->prev;
6206 if (mp->prev != NULL)
6207 mp->prev->next = mp->next;
6208 else
6209 minipool_vector_head = mp->next;
6211 /* Reinsert it after MIN_MP. */
6212 mp->prev = min_mp;
6213 mp->next = min_mp->next;
6214 min_mp->next = mp;
6215 if (mp->next != NULL)
6216 mp->next->prev = mp;
6217 else
6218 minipool_vector_tail = mp;
6221 min_mp = mp;
6223 offset = 0;
6224 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6226 mp->offset = offset;
6227 if (mp->refcount > 0)
6228 offset += mp->fix_size;
6230 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6231 mp->next->min_address = mp->min_address + mp->fix_size;
6234 return min_mp;
6237 /* Add a constant to the minipool for a backward reference. Returns the
6238 node added or NULL if the constant will not fit in this pool.
6240 Note that the code for insertion for a backwards reference can be
6241 somewhat confusing because the calculated offsets for each fix do
6242 not take into account the size of the pool (which is still under
6243 construction. */
6244 static Mnode *
6245 add_minipool_backward_ref (Mfix *fix)
6247 /* If set, min_mp is the last pool_entry that has a lower constraint
6248 than the one we are trying to add. */
6249 Mnode *min_mp = NULL;
6250 /* This can be negative, since it is only a constraint. */
6251 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6252 Mnode *mp;
6254 /* If we can't reach the current pool from this insn, or if we can't
6255 insert this entry at the end of the pool without pushing other
6256 fixes out of range, then we don't try. This ensures that we
6257 can't fail later on. */
6258 if (min_address >= minipool_barrier->address
6259 || (minipool_vector_tail->min_address + fix->fix_size
6260 >= minipool_barrier->address))
6261 return NULL;
6263 /* Scan the pool to see if a constant with the same value has
6264 already been added. While we are doing this, also note the
6265 location where we must insert the constant if it doesn't already
6266 exist. */
6267 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6269 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6270 && fix->mode == mp->mode
6271 && (GET_CODE (fix->value) != CODE_LABEL
6272 || (CODE_LABEL_NUMBER (fix->value)
6273 == CODE_LABEL_NUMBER (mp->value)))
6274 && rtx_equal_p (fix->value, mp->value)
6275 /* Check that there is enough slack to move this entry to the
6276 end of the table (this is conservative). */
6277 && (mp->max_address
6278 > (minipool_barrier->address
6279 + minipool_vector_tail->offset
6280 + minipool_vector_tail->fix_size)))
6282 mp->refcount++;
6283 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6286 if (min_mp != NULL)
6287 mp->min_address += fix->fix_size;
6288 else
6290 /* Note the insertion point if necessary. */
6291 if (mp->min_address < min_address)
6292 min_mp = mp;
6293 else if (mp->max_address
6294 < minipool_barrier->address + mp->offset + fix->fix_size)
6296 /* Inserting before this entry would push the fix beyond
6297 its maximum address (which can happen if we have
6298 re-located a forwards fix); force the new fix to come
6299 after it. */
6300 min_mp = mp;
6301 min_address = mp->min_address + fix->fix_size;
6306 /* We need to create a new entry. */
6307 mp = xmalloc (sizeof (* mp));
6308 mp->fix_size = fix->fix_size;
6309 mp->mode = fix->mode;
6310 mp->value = fix->value;
6311 mp->refcount = 1;
6312 mp->max_address = minipool_barrier->address + 65536;
6314 mp->min_address = min_address;
6316 if (min_mp == NULL)
6318 mp->prev = NULL;
6319 mp->next = minipool_vector_head;
6321 if (mp->next == NULL)
6323 minipool_vector_tail = mp;
6324 minipool_vector_label = gen_label_rtx ();
6326 else
6327 mp->next->prev = mp;
6329 minipool_vector_head = mp;
6331 else
6333 mp->next = min_mp->next;
6334 mp->prev = min_mp;
6335 min_mp->next = mp;
6337 if (mp->next != NULL)
6338 mp->next->prev = mp;
6339 else
6340 minipool_vector_tail = mp;
6343 /* Save the new entry. */
6344 min_mp = mp;
6346 if (mp->prev)
6347 mp = mp->prev;
6348 else
6349 mp->offset = 0;
6351 /* Scan over the following entries and adjust their offsets. */
6352 while (mp->next != NULL)
6354 if (mp->next->min_address < mp->min_address + mp->fix_size)
6355 mp->next->min_address = mp->min_address + mp->fix_size;
6357 if (mp->refcount)
6358 mp->next->offset = mp->offset + mp->fix_size;
6359 else
6360 mp->next->offset = mp->offset;
6362 mp = mp->next;
6365 return min_mp;
6368 static void
6369 assign_minipool_offsets (Mfix *barrier)
6371 HOST_WIDE_INT offset = 0;
6372 Mnode *mp;
6374 minipool_barrier = barrier;
6376 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6378 mp->offset = offset;
6380 if (mp->refcount > 0)
6381 offset += mp->fix_size;
6385 /* Output the literal table */
6386 static void
6387 dump_minipool (rtx scan)
6389 Mnode *mp;
6390 Mnode *nmp;
6392 if (rtl_dump_file)
6393 fprintf (rtl_dump_file,
6394 ";; Emitting minipool after insn %u; address %ld\n",
6395 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6397 scan = emit_label_after (gen_label_rtx (), scan);
6398 scan = emit_insn_after (gen_align_4 (), scan);
6399 scan = emit_label_after (minipool_vector_label, scan);
6401 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6403 if (mp->refcount > 0)
6405 if (rtl_dump_file)
6407 fprintf (rtl_dump_file,
6408 ";; Offset %u, min %ld, max %ld ",
6409 (unsigned) mp->offset, (unsigned long) mp->min_address,
6410 (unsigned long) mp->max_address);
6411 arm_print_value (rtl_dump_file, mp->value);
6412 fputc ('\n', rtl_dump_file);
6415 switch (mp->fix_size)
6417 #ifdef HAVE_consttable_1
6418 case 1:
6419 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6420 break;
6422 #endif
6423 #ifdef HAVE_consttable_2
6424 case 2:
6425 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6426 break;
6428 #endif
6429 #ifdef HAVE_consttable_4
6430 case 4:
6431 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6432 break;
6434 #endif
6435 #ifdef HAVE_consttable_8
6436 case 8:
6437 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6438 break;
6440 #endif
6441 default:
6442 abort ();
6443 break;
6447 nmp = mp->next;
6448 free (mp);
6451 minipool_vector_head = minipool_vector_tail = NULL;
6452 scan = emit_insn_after (gen_consttable_end (), scan);
6453 scan = emit_barrier_after (scan);
6456 /* Return the cost of forcibly inserting a barrier after INSN. */
6457 static int
6458 arm_barrier_cost (rtx insn)
6460 /* Basing the location of the pool on the loop depth is preferable,
6461 but at the moment, the basic block information seems to be
6462 corrupt by this stage of the compilation. */
6463 int base_cost = 50;
6464 rtx next = next_nonnote_insn (insn);
6466 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6467 base_cost -= 20;
6469 switch (GET_CODE (insn))
6471 case CODE_LABEL:
6472 /* It will always be better to place the table before the label, rather
6473 than after it. */
6474 return 50;
6476 case INSN:
6477 case CALL_INSN:
6478 return base_cost;
6480 case JUMP_INSN:
6481 return base_cost - 10;
6483 default:
6484 return base_cost + 10;
6488 /* Find the best place in the insn stream in the range
6489 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6490 Create the barrier by inserting a jump and add a new fix entry for
6491 it. */
6492 static Mfix *
6493 create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
6495 HOST_WIDE_INT count = 0;
6496 rtx barrier;
6497 rtx from = fix->insn;
6498 rtx selected = from;
6499 int selected_cost;
6500 HOST_WIDE_INT selected_address;
6501 Mfix * new_fix;
6502 HOST_WIDE_INT max_count = max_address - fix->address;
6503 rtx label = gen_label_rtx ();
6505 selected_cost = arm_barrier_cost (from);
6506 selected_address = fix->address;
6508 while (from && count < max_count)
6510 rtx tmp;
6511 int new_cost;
6513 /* This code shouldn't have been called if there was a natural barrier
6514 within range. */
6515 if (GET_CODE (from) == BARRIER)
6516 abort ();
6518 /* Count the length of this insn. */
6519 count += get_attr_length (from);
6521 /* If there is a jump table, add its length. */
6522 tmp = is_jump_table (from);
6523 if (tmp != NULL)
6525 count += get_jump_table_size (tmp);
6527 /* Jump tables aren't in a basic block, so base the cost on
6528 the dispatch insn. If we select this location, we will
6529 still put the pool after the table. */
6530 new_cost = arm_barrier_cost (from);
6532 if (count < max_count && new_cost <= selected_cost)
6534 selected = tmp;
6535 selected_cost = new_cost;
6536 selected_address = fix->address + count;
6539 /* Continue after the dispatch table. */
6540 from = NEXT_INSN (tmp);
6541 continue;
6544 new_cost = arm_barrier_cost (from);
6546 if (count < max_count && new_cost <= selected_cost)
6548 selected = from;
6549 selected_cost = new_cost;
6550 selected_address = fix->address + count;
6553 from = NEXT_INSN (from);
6556 /* Create a new JUMP_INSN that branches around a barrier. */
6557 from = emit_jump_insn_after (gen_jump (label), selected);
6558 JUMP_LABEL (from) = label;
6559 barrier = emit_barrier_after (from);
6560 emit_label_after (label, barrier);
6562 /* Create a minipool barrier entry for the new barrier. */
6563 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6564 new_fix->insn = barrier;
6565 new_fix->address = selected_address;
6566 new_fix->next = fix->next;
6567 fix->next = new_fix;
6569 return new_fix;
6572 /* Record that there is a natural barrier in the insn stream at
6573 ADDRESS. */
6574 static void
6575 push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
6577 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6579 fix->insn = insn;
6580 fix->address = address;
6582 fix->next = NULL;
6583 if (minipool_fix_head != NULL)
6584 minipool_fix_tail->next = fix;
6585 else
6586 minipool_fix_head = fix;
6588 minipool_fix_tail = fix;
6591 /* Record INSN, which will need fixing up to load a value from the
6592 minipool. ADDRESS is the offset of the insn since the start of the
6593 function; LOC is a pointer to the part of the insn which requires
6594 fixing; VALUE is the constant that must be loaded, which is of type
6595 MODE. */
6596 static void
6597 push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
6598 enum machine_mode mode, rtx value)
6600 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6602 #ifdef AOF_ASSEMBLER
6603 /* PIC symbol refereneces need to be converted into offsets into the
6604 based area. */
6605 /* XXX This shouldn't be done here. */
6606 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6607 value = aof_pic_entry (value);
6608 #endif /* AOF_ASSEMBLER */
6610 fix->insn = insn;
6611 fix->address = address;
6612 fix->loc = loc;
6613 fix->mode = mode;
6614 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6615 fix->value = value;
6616 fix->forwards = get_attr_pool_range (insn);
6617 fix->backwards = get_attr_neg_pool_range (insn);
6618 fix->minipool = NULL;
6620 /* If an insn doesn't have a range defined for it, then it isn't
6621 expecting to be reworked by this code. Better to abort now than
6622 to generate duff assembly code. */
6623 if (fix->forwards == 0 && fix->backwards == 0)
6624 abort ();
6626 if (rtl_dump_file)
6628 fprintf (rtl_dump_file,
6629 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6630 GET_MODE_NAME (mode),
6631 INSN_UID (insn), (unsigned long) address,
6632 -1 * (long)fix->backwards, (long)fix->forwards);
6633 arm_print_value (rtl_dump_file, fix->value);
6634 fprintf (rtl_dump_file, "\n");
6637 /* Add it to the chain of fixes. */
6638 fix->next = NULL;
6640 if (minipool_fix_head != NULL)
6641 minipool_fix_tail->next = fix;
6642 else
6643 minipool_fix_head = fix;
6645 minipool_fix_tail = fix;
6648 /* Scan INSN and note any of its operands that need fixing.
6649 If DO_PUSHES is false we do not actually push any of the fixups
6650 needed. The function returns TRUE is any fixups were needed/pushed.
6651 This is used by arm_memory_load_p() which needs to know about loads
6652 of constants that will be converted into minipool loads. */
6653 static bool
6654 note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
6656 bool result = false;
6657 int opno;
6659 extract_insn (insn);
6661 if (!constrain_operands (1))
6662 fatal_insn_not_found (insn);
6664 /* Fill in recog_op_alt with information about the constraints of this insn. */
6665 preprocess_constraints ();
6667 for (opno = 0; opno < recog_data.n_operands; opno++)
6669 /* Things we need to fix can only occur in inputs. */
6670 if (recog_data.operand_type[opno] != OP_IN)
6671 continue;
6673 /* If this alternative is a memory reference, then any mention
6674 of constants in this alternative is really to fool reload
6675 into allowing us to accept one there. We need to fix them up
6676 now so that we output the right code. */
6677 if (recog_op_alt[opno][which_alternative].memory_ok)
6679 rtx op = recog_data.operand[opno];
6681 if (CONSTANT_P (op))
6683 if (do_pushes)
6684 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6685 recog_data.operand_mode[opno], op);
6686 result = true;
6688 else if (GET_CODE (op) == MEM
6689 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6690 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6692 if (do_pushes)
6693 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6694 recog_data.operand_mode[opno],
6695 get_pool_constant (XEXP (op, 0)));
6697 result = true;
6702 return result;
6705 /* Gcc puts the pool in the wrong place for ARM, since we can only
6706 load addresses a limited distance around the pc. We do some
6707 special munging to move the constant pool values to the correct
6708 point in the code. */
6709 static void
6710 arm_reorg (void)
6712 rtx insn;
6713 HOST_WIDE_INT address = 0;
6714 Mfix * fix;
6716 minipool_fix_head = minipool_fix_tail = NULL;
6718 /* The first insn must always be a note, or the code below won't
6719 scan it properly. */
6720 insn = get_insns ();
6721 if (GET_CODE (insn) != NOTE)
6722 abort ();
6724 /* Scan all the insns and record the operands that will need fixing. */
6725 for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
6727 if (TARGET_CIRRUS_FIX_INVALID_INSNS
6728 && (arm_cirrus_insn_p (insn)
6729 || GET_CODE (insn) == JUMP_INSN
6730 || arm_memory_load_p (insn)))
6731 cirrus_reorg (insn);
6733 if (GET_CODE (insn) == BARRIER)
6734 push_minipool_barrier (insn, address);
6735 else if (INSN_P (insn))
6737 rtx table;
6739 note_invalid_constants (insn, address, true);
6740 address += get_attr_length (insn);
6742 /* If the insn is a vector jump, add the size of the table
6743 and skip the table. */
6744 if ((table = is_jump_table (insn)) != NULL)
6746 address += get_jump_table_size (table);
6747 insn = table;
6752 fix = minipool_fix_head;
6754 /* Now scan the fixups and perform the required changes. */
6755 while (fix)
6757 Mfix * ftmp;
6758 Mfix * fdel;
6759 Mfix * last_added_fix;
6760 Mfix * last_barrier = NULL;
6761 Mfix * this_fix;
6763 /* Skip any further barriers before the next fix. */
6764 while (fix && GET_CODE (fix->insn) == BARRIER)
6765 fix = fix->next;
6767 /* No more fixes. */
6768 if (fix == NULL)
6769 break;
6771 last_added_fix = NULL;
6773 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6775 if (GET_CODE (ftmp->insn) == BARRIER)
6777 if (ftmp->address >= minipool_vector_head->max_address)
6778 break;
6780 last_barrier = ftmp;
6782 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6783 break;
6785 last_added_fix = ftmp; /* Keep track of the last fix added. */
6788 /* If we found a barrier, drop back to that; any fixes that we
6789 could have reached but come after the barrier will now go in
6790 the next mini-pool. */
6791 if (last_barrier != NULL)
6793 /* Reduce the refcount for those fixes that won't go into this
6794 pool after all. */
6795 for (fdel = last_barrier->next;
6796 fdel && fdel != ftmp;
6797 fdel = fdel->next)
6799 fdel->minipool->refcount--;
6800 fdel->minipool = NULL;
6803 ftmp = last_barrier;
6805 else
6807 /* ftmp is first fix that we can't fit into this pool and
6808 there no natural barriers that we could use. Insert a
6809 new barrier in the code somewhere between the previous
6810 fix and this one, and arrange to jump around it. */
6811 HOST_WIDE_INT max_address;
6813 /* The last item on the list of fixes must be a barrier, so
6814 we can never run off the end of the list of fixes without
6815 last_barrier being set. */
6816 if (ftmp == NULL)
6817 abort ();
6819 max_address = minipool_vector_head->max_address;
6820 /* Check that there isn't another fix that is in range that
6821 we couldn't fit into this pool because the pool was
6822 already too large: we need to put the pool before such an
6823 instruction. */
6824 if (ftmp->address < max_address)
6825 max_address = ftmp->address;
6827 last_barrier = create_fix_barrier (last_added_fix, max_address);
6830 assign_minipool_offsets (last_barrier);
6832 while (ftmp)
6834 if (GET_CODE (ftmp->insn) != BARRIER
6835 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6836 == NULL))
6837 break;
6839 ftmp = ftmp->next;
6842 /* Scan over the fixes we have identified for this pool, fixing them
6843 up and adding the constants to the pool itself. */
6844 for (this_fix = fix; this_fix && ftmp != this_fix;
6845 this_fix = this_fix->next)
6846 if (GET_CODE (this_fix->insn) != BARRIER)
6848 rtx addr
6849 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6850 minipool_vector_label),
6851 this_fix->minipool->offset);
6852 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6855 dump_minipool (last_barrier->insn);
6856 fix = ftmp;
6859 /* From now on we must synthesize any constants that we can't handle
6860 directly. This can happen if the RTL gets split during final
6861 instruction generation. */
6862 after_arm_reorg = 1;
6864 /* Free the minipool memory. */
6865 obstack_free (&minipool_obstack, minipool_startobj);
6868 /* Routines to output assembly language. */
6870 /* If the rtx is the correct value then return the string of the number.
6871 In this way we can ensure that valid double constants are generated even
6872 when cross compiling. */
6873 const char *
6874 fp_immediate_constant (rtx x)
6876 REAL_VALUE_TYPE r;
6877 int i;
6879 if (!fpa_consts_inited)
6880 init_fpa_table ();
6882 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6883 for (i = 0; i < 8; i++)
6884 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6885 return strings_fpa[i];
6887 abort ();
6890 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6891 static const char *
6892 fp_const_from_val (REAL_VALUE_TYPE *r)
6894 int i;
6896 if (!fpa_consts_inited)
6897 init_fpa_table ();
6899 for (i = 0; i < 8; i++)
6900 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6901 return strings_fpa[i];
6903 abort ();
6906 /* Output the operands of a LDM/STM instruction to STREAM.
6907 MASK is the ARM register set mask of which only bits 0-15 are important.
6908 REG is the base register, either the frame pointer or the stack pointer,
6909 INSTR is the possibly suffixed load or store instruction. */
6910 static void
6911 print_multi_reg (FILE *stream, const char *instr, int reg, int mask)
6913 int i;
6914 int not_first = FALSE;
6916 fputc ('\t', stream);
6917 asm_fprintf (stream, instr, reg);
6918 fputs (", {", stream);
6920 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6921 if (mask & (1 << i))
6923 if (not_first)
6924 fprintf (stream, ", ");
6926 asm_fprintf (stream, "%r", i);
6927 not_first = TRUE;
6930 fprintf (stream, "}");
6932 /* Add a ^ character for the 26-bit ABI, but only if we were loading
6933 the PC. Otherwise we would generate an UNPREDICTABLE instruction.
6934 Strictly speaking the instruction would be unpredicatble only if
6935 we were writing back the base register as well, but since we never
6936 want to generate an LDM type 2 instruction (register bank switching)
6937 which is what you get if the PC is not being loaded, we do not need
6938 to check for writeback. */
6939 if (! TARGET_APCS_32
6940 && ((mask & (1 << PC_REGNUM)) != 0))
6941 fprintf (stream, "^");
6943 fprintf (stream, "\n");
6946 /* Output a 'call' insn. */
6947 const char *
6948 output_call (rtx *operands)
6950 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6952 if (REGNO (operands[0]) == LR_REGNUM)
6954 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6955 output_asm_insn ("mov%?\t%0, %|lr", operands);
6958 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6960 if (TARGET_INTERWORK)
6961 output_asm_insn ("bx%?\t%0", operands);
6962 else
6963 output_asm_insn ("mov%?\t%|pc, %0", operands);
6965 return "";
6968 static int
6969 eliminate_lr2ip (rtx *x)
6971 int something_changed = 0;
6972 rtx x0 = * x;
6973 int code = GET_CODE (x0);
6974 int i, j;
6975 const char * fmt;
6977 switch (code)
6979 case REG:
6980 if (REGNO (x0) == LR_REGNUM)
6982 *x = gen_rtx_REG (SImode, IP_REGNUM);
6983 return 1;
6985 return 0;
6986 default:
6987 /* Scan through the sub-elements and change any references there. */
6988 fmt = GET_RTX_FORMAT (code);
6990 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6991 if (fmt[i] == 'e')
6992 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6993 else if (fmt[i] == 'E')
6994 for (j = 0; j < XVECLEN (x0, i); j++)
6995 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6997 return something_changed;
7001 /* Output a 'call' insn that is a reference in memory. */
7002 const char *
7003 output_call_mem (rtx *operands)
7005 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
7006 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
7007 if (eliminate_lr2ip (&operands[0]))
7008 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
7010 if (TARGET_INTERWORK)
7012 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7013 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7014 output_asm_insn ("bx%?\t%|ip", operands);
7016 else
7018 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7019 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7022 return "";
7026 /* Output a move from arm registers to an fpa registers.
7027 OPERANDS[0] is an fpa register.
7028 OPERANDS[1] is the first registers of an arm register pair. */
7029 const char *
7030 output_mov_long_double_fpa_from_arm (rtx *operands)
7032 int arm_reg0 = REGNO (operands[1]);
7033 rtx ops[3];
7035 if (arm_reg0 == IP_REGNUM)
7036 abort ();
7038 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7039 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7040 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7042 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7043 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7045 return "";
7048 /* Output a move from an fpa register to arm registers.
7049 OPERANDS[0] is the first registers of an arm register pair.
7050 OPERANDS[1] is an fpa register. */
7051 const char *
7052 output_mov_long_double_arm_from_fpa (rtx *operands)
7054 int arm_reg0 = REGNO (operands[0]);
7055 rtx ops[3];
7057 if (arm_reg0 == IP_REGNUM)
7058 abort ();
7060 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7061 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7062 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7064 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7065 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7066 return "";
7069 /* Output a move from arm registers to arm registers of a long double
7070 OPERANDS[0] is the destination.
7071 OPERANDS[1] is the source. */
7072 const char *
7073 output_mov_long_double_arm_from_arm (rtx *operands)
7075 /* We have to be careful here because the two might overlap. */
7076 int dest_start = REGNO (operands[0]);
7077 int src_start = REGNO (operands[1]);
7078 rtx ops[2];
7079 int i;
7081 if (dest_start < src_start)
7083 for (i = 0; i < 3; i++)
7085 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7086 ops[1] = gen_rtx_REG (SImode, src_start + i);
7087 output_asm_insn ("mov%?\t%0, %1", ops);
7090 else
7092 for (i = 2; i >= 0; i--)
7094 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7095 ops[1] = gen_rtx_REG (SImode, src_start + i);
7096 output_asm_insn ("mov%?\t%0, %1", ops);
7100 return "";
7104 /* Output a move from arm registers to an fpa registers.
7105 OPERANDS[0] is an fpa register.
7106 OPERANDS[1] is the first registers of an arm register pair. */
7107 const char *
7108 output_mov_double_fpa_from_arm (rtx *operands)
7110 int arm_reg0 = REGNO (operands[1]);
7111 rtx ops[2];
7113 if (arm_reg0 == IP_REGNUM)
7114 abort ();
7116 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7117 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7118 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7119 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7120 return "";
7123 /* Output a move from an fpa register to arm registers.
7124 OPERANDS[0] is the first registers of an arm register pair.
7125 OPERANDS[1] is an fpa register. */
7126 const char *
7127 output_mov_double_arm_from_fpa (rtx *operands)
7129 int arm_reg0 = REGNO (operands[0]);
7130 rtx ops[2];
7132 if (arm_reg0 == IP_REGNUM)
7133 abort ();
7135 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7136 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7137 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7138 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7139 return "";
7142 /* Output a move between double words.
7143 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7144 or MEM<-REG and all MEMs must be offsettable addresses. */
7145 const char *
7146 output_move_double (rtx *operands)
7148 enum rtx_code code0 = GET_CODE (operands[0]);
7149 enum rtx_code code1 = GET_CODE (operands[1]);
7150 rtx otherops[3];
7152 if (code0 == REG)
7154 int reg0 = REGNO (operands[0]);
7156 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7158 if (code1 == REG)
7160 int reg1 = REGNO (operands[1]);
7161 if (reg1 == IP_REGNUM)
7162 abort ();
7164 /* Ensure the second source is not overwritten. */
7165 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7166 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7167 else
7168 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7170 else if (code1 == CONST_DOUBLE)
7172 if (GET_MODE (operands[1]) == DFmode)
7174 REAL_VALUE_TYPE r;
7175 long l[2];
7177 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7178 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7179 otherops[1] = GEN_INT (l[1]);
7180 operands[1] = GEN_INT (l[0]);
7182 else if (GET_MODE (operands[1]) != VOIDmode)
7183 abort ();
7184 else if (WORDS_BIG_ENDIAN)
7186 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7187 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7189 else
7191 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7192 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7195 output_mov_immediate (operands);
7196 output_mov_immediate (otherops);
7198 else if (code1 == CONST_INT)
7200 #if HOST_BITS_PER_WIDE_INT > 32
7201 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7202 what the upper word is. */
7203 if (WORDS_BIG_ENDIAN)
7205 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7206 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7208 else
7210 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7211 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7213 #else
7214 /* Sign extend the intval into the high-order word. */
7215 if (WORDS_BIG_ENDIAN)
7217 otherops[1] = operands[1];
7218 operands[1] = (INTVAL (operands[1]) < 0
7219 ? constm1_rtx : const0_rtx);
7221 else
7222 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7223 #endif
7224 output_mov_immediate (otherops);
7225 output_mov_immediate (operands);
7227 else if (code1 == MEM)
7229 switch (GET_CODE (XEXP (operands[1], 0)))
7231 case REG:
7232 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7233 break;
7235 case PRE_INC:
7236 abort (); /* Should never happen now. */
7237 break;
7239 case PRE_DEC:
7240 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7241 break;
7243 case POST_INC:
7244 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7245 break;
7247 case POST_DEC:
7248 abort (); /* Should never happen now. */
7249 break;
7251 case LABEL_REF:
7252 case CONST:
7253 output_asm_insn ("adr%?\t%0, %1", operands);
7254 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7255 break;
7257 default:
7258 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7259 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7261 otherops[0] = operands[0];
7262 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7263 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7265 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7267 if (GET_CODE (otherops[2]) == CONST_INT)
7269 switch ((int) INTVAL (otherops[2]))
7271 case -8:
7272 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7273 return "";
7274 case -4:
7275 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7276 return "";
7277 case 4:
7278 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7279 return "";
7282 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7283 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7284 else
7285 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7287 else
7288 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7290 else
7291 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7293 return "ldm%?ia\t%0, %M0";
7295 else
7297 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7298 /* Take care of overlapping base/data reg. */
7299 if (reg_mentioned_p (operands[0], operands[1]))
7301 output_asm_insn ("ldr%?\t%0, %1", otherops);
7302 output_asm_insn ("ldr%?\t%0, %1", operands);
7304 else
7306 output_asm_insn ("ldr%?\t%0, %1", operands);
7307 output_asm_insn ("ldr%?\t%0, %1", otherops);
7312 else
7313 abort (); /* Constraints should prevent this. */
7315 else if (code0 == MEM && code1 == REG)
7317 if (REGNO (operands[1]) == IP_REGNUM)
7318 abort ();
7320 switch (GET_CODE (XEXP (operands[0], 0)))
7322 case REG:
7323 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7324 break;
7326 case PRE_INC:
7327 abort (); /* Should never happen now. */
7328 break;
7330 case PRE_DEC:
7331 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7332 break;
7334 case POST_INC:
7335 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7336 break;
7338 case POST_DEC:
7339 abort (); /* Should never happen now. */
7340 break;
7342 case PLUS:
7343 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7345 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7347 case -8:
7348 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7349 return "";
7351 case -4:
7352 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7353 return "";
7355 case 4:
7356 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7357 return "";
7360 /* Fall through */
7362 default:
7363 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7364 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7365 output_asm_insn ("str%?\t%1, %0", operands);
7366 output_asm_insn ("str%?\t%1, %0", otherops);
7369 else
7370 /* Constraints should prevent this. */
7371 abort ();
7373 return "";
7377 /* Output an arbitrary MOV reg, #n.
7378 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7379 const char *
7380 output_mov_immediate (rtx *operands)
7382 HOST_WIDE_INT n = INTVAL (operands[1]);
7384 /* Try to use one MOV. */
7385 if (const_ok_for_arm (n))
7386 output_asm_insn ("mov%?\t%0, %1", operands);
7388 /* Try to use one MVN. */
7389 else if (const_ok_for_arm (~n))
7391 operands[1] = GEN_INT (~n);
7392 output_asm_insn ("mvn%?\t%0, %1", operands);
7394 else
7396 int n_ones = 0;
7397 int i;
7399 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7400 for (i = 0; i < 32; i ++)
7401 if (n & 1 << i)
7402 n_ones ++;
7404 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7405 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7406 else
7407 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7410 return "";
7413 /* Output an ADD r, s, #n where n may be too big for one instruction.
7414 If adding zero to one register, output nothing. */
7415 const char *
7416 output_add_immediate (rtx *operands)
7418 HOST_WIDE_INT n = INTVAL (operands[2]);
7420 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7422 if (n < 0)
7423 output_multi_immediate (operands,
7424 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7425 -n);
7426 else
7427 output_multi_immediate (operands,
7428 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7432 return "";
7435 /* Output a multiple immediate operation.
7436 OPERANDS is the vector of operands referred to in the output patterns.
7437 INSTR1 is the output pattern to use for the first constant.
7438 INSTR2 is the output pattern to use for subsequent constants.
7439 IMMED_OP is the index of the constant slot in OPERANDS.
7440 N is the constant value. */
7441 static const char *
7442 output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
7443 int immed_op, HOST_WIDE_INT n)
7445 #if HOST_BITS_PER_WIDE_INT > 32
7446 n &= 0xffffffff;
7447 #endif
7449 if (n == 0)
7451 /* Quick and easy output. */
7452 operands[immed_op] = const0_rtx;
7453 output_asm_insn (instr1, operands);
7455 else
7457 int i;
7458 const char * instr = instr1;
7460 /* Note that n is never zero here (which would give no output). */
7461 for (i = 0; i < 32; i += 2)
7463 if (n & (3 << i))
7465 operands[immed_op] = GEN_INT (n & (255 << i));
7466 output_asm_insn (instr, operands);
7467 instr = instr2;
7468 i += 6;
7473 return "";
7476 /* Return the appropriate ARM instruction for the operation code.
7477 The returned result should not be overwritten. OP is the rtx of the
7478 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7479 was shifted. */
7480 const char *
7481 arithmetic_instr (rtx op, int shift_first_arg)
7483 switch (GET_CODE (op))
7485 case PLUS:
7486 return "add";
7488 case MINUS:
7489 return shift_first_arg ? "rsb" : "sub";
7491 case IOR:
7492 return "orr";
7494 case XOR:
7495 return "eor";
7497 case AND:
7498 return "and";
7500 default:
7501 abort ();
7505 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7506 for the operation code. The returned result should not be overwritten.
7507 OP is the rtx code of the shift.
7508 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7509 shift. */
7510 static const char *
7511 shift_op (rtx op, HOST_WIDE_INT *amountp)
7513 const char * mnem;
7514 enum rtx_code code = GET_CODE (op);
7516 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7517 *amountp = -1;
7518 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7519 *amountp = INTVAL (XEXP (op, 1));
7520 else
7521 abort ();
7523 switch (code)
7525 case ASHIFT:
7526 mnem = "asl";
7527 break;
7529 case ASHIFTRT:
7530 mnem = "asr";
7531 break;
7533 case LSHIFTRT:
7534 mnem = "lsr";
7535 break;
7537 case ROTATERT:
7538 mnem = "ror";
7539 break;
7541 case MULT:
7542 /* We never have to worry about the amount being other than a
7543 power of 2, since this case can never be reloaded from a reg. */
7544 if (*amountp != -1)
7545 *amountp = int_log2 (*amountp);
7546 else
7547 abort ();
7548 return "asl";
7550 default:
7551 abort ();
7554 if (*amountp != -1)
7556 /* This is not 100% correct, but follows from the desire to merge
7557 multiplication by a power of 2 with the recognizer for a
7558 shift. >=32 is not a valid shift for "asl", so we must try and
7559 output a shift that produces the correct arithmetical result.
7560 Using lsr #32 is identical except for the fact that the carry bit
7561 is not set correctly if we set the flags; but we never use the
7562 carry bit from such an operation, so we can ignore that. */
7563 if (code == ROTATERT)
7564 /* Rotate is just modulo 32. */
7565 *amountp &= 31;
7566 else if (*amountp != (*amountp & 31))
7568 if (code == ASHIFT)
7569 mnem = "lsr";
7570 *amountp = 32;
7573 /* Shifts of 0 are no-ops. */
7574 if (*amountp == 0)
7575 return NULL;
7578 return mnem;
7581 /* Obtain the shift from the POWER of two. */
7583 static HOST_WIDE_INT
7584 int_log2 (HOST_WIDE_INT power)
7586 HOST_WIDE_INT shift = 0;
7588 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7590 if (shift > 31)
7591 abort ();
7592 shift++;
7595 return shift;
7598 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7599 /bin/as is horribly restrictive. */
7600 #define MAX_ASCII_LEN 51
7602 void
7603 output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
7605 int i;
7606 int len_so_far = 0;
7608 fputs ("\t.ascii\t\"", stream);
7610 for (i = 0; i < len; i++)
7612 int c = p[i];
7614 if (len_so_far >= MAX_ASCII_LEN)
7616 fputs ("\"\n\t.ascii\t\"", stream);
7617 len_so_far = 0;
7620 switch (c)
7622 case TARGET_TAB:
7623 fputs ("\\t", stream);
7624 len_so_far += 2;
7625 break;
7627 case TARGET_FF:
7628 fputs ("\\f", stream);
7629 len_so_far += 2;
7630 break;
7632 case TARGET_BS:
7633 fputs ("\\b", stream);
7634 len_so_far += 2;
7635 break;
7637 case TARGET_CR:
7638 fputs ("\\r", stream);
7639 len_so_far += 2;
7640 break;
7642 case TARGET_NEWLINE:
7643 fputs ("\\n", stream);
7644 c = p [i + 1];
7645 if ((c >= ' ' && c <= '~')
7646 || c == TARGET_TAB)
7647 /* This is a good place for a line break. */
7648 len_so_far = MAX_ASCII_LEN;
7649 else
7650 len_so_far += 2;
7651 break;
7653 case '\"':
7654 case '\\':
7655 putc ('\\', stream);
7656 len_so_far++;
7657 /* drop through. */
7659 default:
7660 if (c >= ' ' && c <= '~')
7662 putc (c, stream);
7663 len_so_far++;
7665 else
7667 fprintf (stream, "\\%03o", c);
7668 len_so_far += 4;
7670 break;
7674 fputs ("\"\n", stream);
7677 /* Compute the register sabe mask for registers 0 through 12
7678 inclusive. This code is used by both arm_compute_save_reg_mask
7679 and arm_compute_initial_elimination_offset. */
7680 static unsigned long
7681 arm_compute_save_reg0_reg12_mask (void)
7683 unsigned long func_type = arm_current_func_type ();
7684 unsigned int save_reg_mask = 0;
7685 unsigned int reg;
7687 if (IS_INTERRUPT (func_type))
7689 unsigned int max_reg;
7690 /* Interrupt functions must not corrupt any registers,
7691 even call clobbered ones. If this is a leaf function
7692 we can just examine the registers used by the RTL, but
7693 otherwise we have to assume that whatever function is
7694 called might clobber anything, and so we have to save
7695 all the call-clobbered registers as well. */
7696 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7697 /* FIQ handlers have registers r8 - r12 banked, so
7698 we only need to check r0 - r7, Normal ISRs only
7699 bank r14 and r15, so we must check up to r12.
7700 r13 is the stack pointer which is always preserved,
7701 so we do not need to consider it here. */
7702 max_reg = 7;
7703 else
7704 max_reg = 12;
7706 for (reg = 0; reg <= max_reg; reg++)
7707 if (regs_ever_live[reg]
7708 || (! current_function_is_leaf && call_used_regs [reg]))
7709 save_reg_mask |= (1 << reg);
7711 else
7713 /* In the normal case we only need to save those registers
7714 which are call saved and which are used by this function. */
7715 for (reg = 0; reg <= 10; reg++)
7716 if (regs_ever_live[reg] && ! call_used_regs [reg])
7717 save_reg_mask |= (1 << reg);
7719 /* Handle the frame pointer as a special case. */
7720 if (! TARGET_APCS_FRAME
7721 && ! frame_pointer_needed
7722 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7723 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7724 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7726 /* If we aren't loading the PIC register,
7727 don't stack it even though it may be live. */
7728 if (flag_pic
7729 && ! TARGET_SINGLE_PIC_BASE
7730 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7731 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7734 return save_reg_mask;
7737 /* Compute a bit mask of which registers need to be
7738 saved on the stack for the current function. */
7740 static unsigned long
7741 arm_compute_save_reg_mask (void)
7743 unsigned int save_reg_mask = 0;
7744 unsigned long func_type = arm_current_func_type ();
7746 if (IS_NAKED (func_type))
7747 /* This should never really happen. */
7748 return 0;
7750 /* If we are creating a stack frame, then we must save the frame pointer,
7751 IP (which will hold the old stack pointer), LR and the PC. */
7752 if (frame_pointer_needed)
7753 save_reg_mask |=
7754 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7755 | (1 << IP_REGNUM)
7756 | (1 << LR_REGNUM)
7757 | (1 << PC_REGNUM);
7759 /* Volatile functions do not return, so there
7760 is no need to save any other registers. */
7761 if (IS_VOLATILE (func_type))
7762 return save_reg_mask;
7764 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7766 /* Decide if we need to save the link register.
7767 Interrupt routines have their own banked link register,
7768 so they never need to save it.
7769 Otherwise if we do not use the link register we do not need to save
7770 it. If we are pushing other registers onto the stack however, we
7771 can save an instruction in the epilogue by pushing the link register
7772 now and then popping it back into the PC. This incurs extra memory
7773 accesses though, so we only do it when optimising for size, and only
7774 if we know that we will not need a fancy return sequence. */
7775 if (regs_ever_live [LR_REGNUM]
7776 || (save_reg_mask
7777 && optimize_size
7778 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7779 save_reg_mask |= 1 << LR_REGNUM;
7781 if (cfun->machine->lr_save_eliminated)
7782 save_reg_mask &= ~ (1 << LR_REGNUM);
7784 return save_reg_mask;
7787 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7788 everything bar the final return instruction. */
7789 const char *
7790 output_return_instruction (rtx operand, int really_return, int reverse)
7792 char conditional[10];
7793 char instr[100];
7794 int reg;
7795 unsigned long live_regs_mask;
7796 unsigned long func_type;
7798 func_type = arm_current_func_type ();
7800 if (IS_NAKED (func_type))
7801 return "";
7803 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7805 /* If this function was declared non-returning, and we have found a tail
7806 call, then we have to trust that the called function won't return. */
7807 if (really_return)
7809 rtx ops[2];
7811 /* Otherwise, trap an attempted return by aborting. */
7812 ops[0] = operand;
7813 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7814 : "abort");
7815 assemble_external_libcall (ops[1]);
7816 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7819 return "";
7822 if (current_function_calls_alloca && !really_return)
7823 abort ();
7825 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7827 return_used_this_function = 1;
7829 live_regs_mask = arm_compute_save_reg_mask ();
7831 if (live_regs_mask)
7833 const char * return_reg;
7835 /* If we do not have any special requirements for function exit
7836 (eg interworking, or ISR) then we can load the return address
7837 directly into the PC. Otherwise we must load it into LR. */
7838 if (really_return
7839 && ! TARGET_INTERWORK)
7840 return_reg = reg_names[PC_REGNUM];
7841 else
7842 return_reg = reg_names[LR_REGNUM];
7844 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7845 /* There are two possible reasons for the IP register being saved.
7846 Either a stack frame was created, in which case IP contains the
7847 old stack pointer, or an ISR routine corrupted it. If this in an
7848 ISR routine then just restore IP, otherwise restore IP into SP. */
7849 if (! IS_INTERRUPT (func_type))
7851 live_regs_mask &= ~ (1 << IP_REGNUM);
7852 live_regs_mask |= (1 << SP_REGNUM);
7855 /* On some ARM architectures it is faster to use LDR rather than
7856 LDM to load a single register. On other architectures, the
7857 cost is the same. In 26 bit mode, or for exception handlers,
7858 we have to use LDM to load the PC so that the CPSR is also
7859 restored. */
7860 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7862 if (live_regs_mask == (unsigned int)(1 << reg))
7863 break;
7865 if (reg <= LAST_ARM_REGNUM
7866 && (reg != LR_REGNUM
7867 || ! really_return
7868 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7870 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7871 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7873 else
7875 char *p;
7876 int first = 1;
7878 /* Generate the load multiple instruction to restore the registers. */
7879 if (frame_pointer_needed)
7880 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7881 else if (live_regs_mask & (1 << SP_REGNUM))
7882 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7883 else
7884 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7886 p = instr + strlen (instr);
7888 for (reg = 0; reg <= SP_REGNUM; reg++)
7889 if (live_regs_mask & (1 << reg))
7891 int l = strlen (reg_names[reg]);
7893 if (first)
7894 first = 0;
7895 else
7897 memcpy (p, ", ", 2);
7898 p += 2;
7901 memcpy (p, "%|", 2);
7902 memcpy (p + 2, reg_names[reg], l);
7903 p += l + 2;
7906 if (live_regs_mask & (1 << LR_REGNUM))
7908 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
7909 /* Decide if we need to add the ^ symbol to the end of the
7910 register list. This causes the saved condition codes
7911 register to be copied into the current condition codes
7912 register. We do the copy if we are conforming to the 32-bit
7913 ABI and this is an interrupt function, or if we are
7914 conforming to the 26-bit ABI. There is a special case for
7915 the 26-bit ABI however, which is if we are writing back the
7916 stack pointer but not loading the PC. In this case adding
7917 the ^ symbol would create a type 2 LDM instruction, where
7918 writeback is UNPREDICTABLE. We are safe in leaving the ^
7919 character off in this case however, since the actual return
7920 instruction will be a MOVS which will restore the CPSR. */
7921 if ((TARGET_APCS_32 && IS_INTERRUPT (func_type))
7922 || (! TARGET_APCS_32 && really_return))
7923 strcat (p, "^");
7925 else
7926 strcpy (p, "}");
7929 output_asm_insn (instr, & operand);
7931 /* See if we need to generate an extra instruction to
7932 perform the actual function return. */
7933 if (really_return
7934 && func_type != ARM_FT_INTERWORKED
7935 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7937 /* The return has already been handled
7938 by loading the LR into the PC. */
7939 really_return = 0;
7943 if (really_return)
7945 switch ((int) ARM_FUNC_TYPE (func_type))
7947 case ARM_FT_ISR:
7948 case ARM_FT_FIQ:
7949 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7950 break;
7952 case ARM_FT_INTERWORKED:
7953 sprintf (instr, "bx%s\t%%|lr", conditional);
7954 break;
7956 case ARM_FT_EXCEPTION:
7957 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7958 break;
7960 default:
7961 /* ARMv5 implementations always provide BX, so interworking
7962 is the default unless APCS-26 is in use. */
7963 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7964 sprintf (instr, "bx%s\t%%|lr", conditional);
7965 else
7966 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7967 conditional, TARGET_APCS_32 ? "" : "s");
7968 break;
7971 output_asm_insn (instr, & operand);
7974 return "";
7977 /* Write the function name into the code section, directly preceding
7978 the function prologue.
7980 Code will be output similar to this:
7982 .ascii "arm_poke_function_name", 0
7983 .align
7985 .word 0xff000000 + (t1 - t0)
7986 arm_poke_function_name
7987 mov ip, sp
7988 stmfd sp!, {fp, ip, lr, pc}
7989 sub fp, ip, #4
7991 When performing a stack backtrace, code can inspect the value
7992 of 'pc' stored at 'fp' + 0. If the trace function then looks
7993 at location pc - 12 and the top 8 bits are set, then we know
7994 that there is a function name embedded immediately preceding this
7995 location and has length ((pc[-3]) & 0xff000000).
7997 We assume that pc is declared as a pointer to an unsigned long.
7999 It is of no benefit to output the function name if we are assembling
8000 a leaf function. These function types will not contain a stack
8001 backtrace structure, therefore it is not possible to determine the
8002 function name. */
8003 void
8004 arm_poke_function_name (FILE *stream, const char *name)
8006 unsigned long alignlength;
8007 unsigned long length;
8008 rtx x;
8010 length = strlen (name) + 1;
8011 alignlength = ROUND_UP_WORD (length);
8013 ASM_OUTPUT_ASCII (stream, name, length);
8014 ASM_OUTPUT_ALIGN (stream, 2);
8015 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8016 assemble_aligned_integer (UNITS_PER_WORD, x);
8019 /* Place some comments into the assembler stream
8020 describing the current function. */
8021 static void
8022 arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
8024 unsigned long func_type;
8026 if (!TARGET_ARM)
8028 thumb_output_function_prologue (f, frame_size);
8029 return;
8032 /* Sanity check. */
8033 if (arm_ccfsm_state || arm_target_insn)
8034 abort ();
8036 func_type = arm_current_func_type ();
8038 switch ((int) ARM_FUNC_TYPE (func_type))
8040 default:
8041 case ARM_FT_NORMAL:
8042 break;
8043 case ARM_FT_INTERWORKED:
8044 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8045 break;
8046 case ARM_FT_EXCEPTION_HANDLER:
8047 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8048 break;
8049 case ARM_FT_ISR:
8050 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8051 break;
8052 case ARM_FT_FIQ:
8053 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8054 break;
8055 case ARM_FT_EXCEPTION:
8056 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8057 break;
8060 if (IS_NAKED (func_type))
8061 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8063 if (IS_VOLATILE (func_type))
8064 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8066 if (IS_NESTED (func_type))
8067 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8069 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
8070 current_function_args_size,
8071 current_function_pretend_args_size, frame_size);
8073 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8074 frame_pointer_needed,
8075 cfun->machine->uses_anonymous_args);
8077 if (cfun->machine->lr_save_eliminated)
8078 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8080 #ifdef AOF_ASSEMBLER
8081 if (flag_pic)
8082 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8083 #endif
8085 return_used_this_function = 0;
8088 const char *
8089 arm_output_epilogue (int really_return)
8091 int reg;
8092 unsigned long saved_regs_mask;
8093 unsigned long func_type;
8094 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8095 frame that is $fp + 4 for a non-variadic function. */
8096 int floats_offset = 0;
8097 rtx operands[3];
8098 int frame_size = arm_get_frame_size ();
8099 FILE * f = asm_out_file;
8100 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8102 /* If we have already generated the return instruction
8103 then it is futile to generate anything else. */
8104 if (use_return_insn (FALSE) && return_used_this_function)
8105 return "";
8107 func_type = arm_current_func_type ();
8109 if (IS_NAKED (func_type))
8110 /* Naked functions don't have epilogues. */
8111 return "";
8113 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8115 rtx op;
8117 /* A volatile function should never return. Call abort. */
8118 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8119 assemble_external_libcall (op);
8120 output_asm_insn ("bl\t%a0", &op);
8122 return "";
8125 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8126 && ! really_return)
8127 /* If we are throwing an exception, then we really must
8128 be doing a return, so we can't tail-call. */
8129 abort ();
8131 saved_regs_mask = arm_compute_save_reg_mask ();
8133 /* XXX We should adjust floats_offset for any anonymous args, and then
8134 re-adjust vfp_offset below to compensate. */
8136 /* Compute how far away the floats will be. */
8137 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8138 if (saved_regs_mask & (1 << reg))
8139 floats_offset += 4;
8141 if (frame_pointer_needed)
8143 int vfp_offset = 4;
8145 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8147 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8148 if (regs_ever_live[reg] && !call_used_regs[reg])
8150 floats_offset += 12;
8151 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8152 reg, FP_REGNUM, floats_offset - vfp_offset);
8155 else
8157 int start_reg = LAST_ARM_FP_REGNUM;
8159 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8161 if (regs_ever_live[reg] && !call_used_regs[reg])
8163 floats_offset += 12;
8165 /* We can't unstack more than four registers at once. */
8166 if (start_reg - reg == 3)
8168 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8169 reg, FP_REGNUM, floats_offset - vfp_offset);
8170 start_reg = reg - 1;
8173 else
8175 if (reg != start_reg)
8176 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8177 reg + 1, start_reg - reg,
8178 FP_REGNUM, floats_offset - vfp_offset);
8179 start_reg = reg - 1;
8183 /* Just in case the last register checked also needs unstacking. */
8184 if (reg != start_reg)
8185 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8186 reg + 1, start_reg - reg,
8187 FP_REGNUM, floats_offset - vfp_offset);
8190 /* saved_regs_mask should contain the IP, which at the time of stack
8191 frame generation actually contains the old stack pointer. So a
8192 quick way to unwind the stack is just pop the IP register directly
8193 into the stack pointer. */
8194 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8195 abort ();
8196 saved_regs_mask &= ~ (1 << IP_REGNUM);
8197 saved_regs_mask |= (1 << SP_REGNUM);
8199 /* There are two registers left in saved_regs_mask - LR and PC. We
8200 only need to restore the LR register (the return address), but to
8201 save time we can load it directly into the PC, unless we need a
8202 special function exit sequence, or we are not really returning. */
8203 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8204 /* Delete the LR from the register mask, so that the LR on
8205 the stack is loaded into the PC in the register mask. */
8206 saved_regs_mask &= ~ (1 << LR_REGNUM);
8207 else
8208 saved_regs_mask &= ~ (1 << PC_REGNUM);
8210 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8212 if (IS_INTERRUPT (func_type))
8213 /* Interrupt handlers will have pushed the
8214 IP onto the stack, so restore it now. */
8215 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8217 else
8219 /* Restore stack pointer if necessary. */
8220 if (frame_size + current_function_outgoing_args_size != 0)
8222 operands[0] = operands[1] = stack_pointer_rtx;
8223 operands[2] = GEN_INT (frame_size
8224 + current_function_outgoing_args_size);
8225 output_add_immediate (operands);
8228 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8230 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8231 if (regs_ever_live[reg] && !call_used_regs[reg])
8232 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8233 reg, SP_REGNUM);
8235 else
8237 int start_reg = FIRST_ARM_FP_REGNUM;
8239 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8241 if (regs_ever_live[reg] && !call_used_regs[reg])
8243 if (reg - start_reg == 3)
8245 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8246 start_reg, SP_REGNUM);
8247 start_reg = reg + 1;
8250 else
8252 if (reg != start_reg)
8253 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8254 start_reg, reg - start_reg,
8255 SP_REGNUM);
8257 start_reg = reg + 1;
8261 /* Just in case the last register checked also needs unstacking. */
8262 if (reg != start_reg)
8263 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8264 start_reg, reg - start_reg, SP_REGNUM);
8267 /* If we can, restore the LR into the PC. */
8268 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8269 && really_return
8270 && current_function_pretend_args_size == 0
8271 && saved_regs_mask & (1 << LR_REGNUM))
8273 saved_regs_mask &= ~ (1 << LR_REGNUM);
8274 saved_regs_mask |= (1 << PC_REGNUM);
8277 /* Load the registers off the stack. If we only have one register
8278 to load use the LDR instruction - it is faster. */
8279 if (saved_regs_mask == (1 << LR_REGNUM))
8281 /* The exception handler ignores the LR, so we do
8282 not really need to load it off the stack. */
8283 if (eh_ofs)
8284 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8285 else
8286 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8288 else if (saved_regs_mask)
8290 if (saved_regs_mask & (1 << SP_REGNUM))
8291 /* Note - write back to the stack register is not enabled
8292 (ie "ldmfd sp!..."). We know that the stack pointer is
8293 in the list of registers and if we add writeback the
8294 instruction becomes UNPREDICTABLE. */
8295 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8296 else
8297 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8300 if (current_function_pretend_args_size)
8302 /* Unwind the pre-pushed regs. */
8303 operands[0] = operands[1] = stack_pointer_rtx;
8304 operands[2] = GEN_INT (current_function_pretend_args_size);
8305 output_add_immediate (operands);
8309 #if 0
8310 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8311 /* Adjust the stack to remove the exception handler stuff. */
8312 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8313 REGNO (eh_ofs));
8314 #endif
8316 if (! really_return
8317 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8318 && current_function_pretend_args_size == 0
8319 && saved_regs_mask & (1 << PC_REGNUM)))
8320 return "";
8322 /* Generate the return instruction. */
8323 switch ((int) ARM_FUNC_TYPE (func_type))
8325 case ARM_FT_EXCEPTION_HANDLER:
8326 /* Even in 26-bit mode we do a mov (rather than a movs)
8327 because we don't have the PSR bits set in the address. */
8328 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8329 break;
8331 case ARM_FT_ISR:
8332 case ARM_FT_FIQ:
8333 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8334 break;
8336 case ARM_FT_EXCEPTION:
8337 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8338 break;
8340 case ARM_FT_INTERWORKED:
8341 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8342 break;
8344 default:
8345 if (frame_pointer_needed)
8346 /* If we used the frame pointer then the return address
8347 will have been loaded off the stack directly into the
8348 PC, so there is no need to issue a MOV instruction
8349 here. */
8351 else if (current_function_pretend_args_size == 0
8352 && (saved_regs_mask & (1 << LR_REGNUM)))
8353 /* Similarly we may have been able to load LR into the PC
8354 even if we did not create a stack frame. */
8356 else if (TARGET_APCS_32)
8357 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8358 else
8359 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8360 break;
8363 return "";
8366 static void
8367 arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8368 HOST_WIDE_INT frame_size)
8370 if (TARGET_THUMB)
8372 /* ??? Probably not safe to set this here, since it assumes that a
8373 function will be emitted as assembly immediately after we generate
8374 RTL for it. This does not happen for inline functions. */
8375 return_used_this_function = 0;
8377 else
8379 /* We need to take into account any stack-frame rounding. */
8380 frame_size = arm_get_frame_size ();
8382 if (use_return_insn (FALSE)
8383 && return_used_this_function
8384 && (frame_size + current_function_outgoing_args_size) != 0
8385 && !frame_pointer_needed)
8386 abort ();
8388 /* Reset the ARM-specific per-function variables. */
8389 after_arm_reorg = 0;
8393 /* Generate and emit an insn that we will recognize as a push_multi.
8394 Unfortunately, since this insn does not reflect very well the actual
8395 semantics of the operation, we need to annotate the insn for the benefit
8396 of DWARF2 frame unwind information. */
8397 static rtx
8398 emit_multi_reg_push (int mask)
8400 int num_regs = 0;
8401 int num_dwarf_regs;
8402 int i, j;
8403 rtx par;
8404 rtx dwarf;
8405 int dwarf_par_index;
8406 rtx tmp, reg;
8408 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8409 if (mask & (1 << i))
8410 num_regs++;
8412 if (num_regs == 0 || num_regs > 16)
8413 abort ();
8415 /* We don't record the PC in the dwarf frame information. */
8416 num_dwarf_regs = num_regs;
8417 if (mask & (1 << PC_REGNUM))
8418 num_dwarf_regs--;
8420 /* For the body of the insn we are going to generate an UNSPEC in
8421 parallel with several USEs. This allows the insn to be recognized
8422 by the push_multi pattern in the arm.md file. The insn looks
8423 something like this:
8425 (parallel [
8426 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8427 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8428 (use (reg:SI 11 fp))
8429 (use (reg:SI 12 ip))
8430 (use (reg:SI 14 lr))
8431 (use (reg:SI 15 pc))
8434 For the frame note however, we try to be more explicit and actually
8435 show each register being stored into the stack frame, plus a (single)
8436 decrement of the stack pointer. We do it this way in order to be
8437 friendly to the stack unwinding code, which only wants to see a single
8438 stack decrement per instruction. The RTL we generate for the note looks
8439 something like this:
8441 (sequence [
8442 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8443 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8444 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8445 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8446 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8449 This sequence is used both by the code to support stack unwinding for
8450 exceptions handlers and the code to generate dwarf2 frame debugging. */
8452 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8453 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8454 dwarf_par_index = 1;
8456 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8458 if (mask & (1 << i))
8460 reg = gen_rtx_REG (SImode, i);
8462 XVECEXP (par, 0, 0)
8463 = gen_rtx_SET (VOIDmode,
8464 gen_rtx_MEM (BLKmode,
8465 gen_rtx_PRE_DEC (BLKmode,
8466 stack_pointer_rtx)),
8467 gen_rtx_UNSPEC (BLKmode,
8468 gen_rtvec (1, reg),
8469 UNSPEC_PUSH_MULT));
8471 if (i != PC_REGNUM)
8473 tmp = gen_rtx_SET (VOIDmode,
8474 gen_rtx_MEM (SImode, stack_pointer_rtx),
8475 reg);
8476 RTX_FRAME_RELATED_P (tmp) = 1;
8477 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8478 dwarf_par_index++;
8481 break;
8485 for (j = 1, i++; j < num_regs; i++)
8487 if (mask & (1 << i))
8489 reg = gen_rtx_REG (SImode, i);
8491 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8493 if (i != PC_REGNUM)
8495 tmp = gen_rtx_SET (VOIDmode,
8496 gen_rtx_MEM (SImode,
8497 plus_constant (stack_pointer_rtx,
8498 4 * j)),
8499 reg);
8500 RTX_FRAME_RELATED_P (tmp) = 1;
8501 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8504 j++;
8508 par = emit_insn (par);
8510 tmp = gen_rtx_SET (SImode,
8511 stack_pointer_rtx,
8512 gen_rtx_PLUS (SImode,
8513 stack_pointer_rtx,
8514 GEN_INT (-4 * num_regs)));
8515 RTX_FRAME_RELATED_P (tmp) = 1;
8516 XVECEXP (dwarf, 0, 0) = tmp;
8518 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8519 REG_NOTES (par));
8520 return par;
8523 static rtx
8524 emit_sfm (int base_reg, int count)
8526 rtx par;
8527 rtx dwarf;
8528 rtx tmp, reg;
8529 int i;
8531 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8532 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8534 reg = gen_rtx_REG (XFmode, base_reg++);
8536 XVECEXP (par, 0, 0)
8537 = gen_rtx_SET (VOIDmode,
8538 gen_rtx_MEM (BLKmode,
8539 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8540 gen_rtx_UNSPEC (BLKmode,
8541 gen_rtvec (1, reg),
8542 UNSPEC_PUSH_MULT));
8544 = gen_rtx_SET (VOIDmode,
8545 gen_rtx_MEM (XFmode,
8546 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8547 reg);
8548 RTX_FRAME_RELATED_P (tmp) = 1;
8549 XVECEXP (dwarf, 0, count - 1) = tmp;
8551 for (i = 1; i < count; i++)
8553 reg = gen_rtx_REG (XFmode, base_reg++);
8554 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8556 tmp = gen_rtx_SET (VOIDmode,
8557 gen_rtx_MEM (XFmode,
8558 gen_rtx_PRE_DEC (BLKmode,
8559 stack_pointer_rtx)),
8560 reg);
8561 RTX_FRAME_RELATED_P (tmp) = 1;
8562 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8565 par = emit_insn (par);
8566 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8567 REG_NOTES (par));
8568 return par;
8571 /* Compute the distance from register FROM to register TO.
8572 These can be the arg pointer (26), the soft frame pointer (25),
8573 the stack pointer (13) or the hard frame pointer (11).
8574 Typical stack layout looks like this:
8576 old stack pointer -> | |
8577 ----
8578 | | \
8579 | | saved arguments for
8580 | | vararg functions
8581 | | /
8583 hard FP & arg pointer -> | | \
8584 | | stack
8585 | | frame
8586 | | /
8588 | | \
8589 | | call saved
8590 | | registers
8591 soft frame pointer -> | | /
8593 | | \
8594 | | local
8595 | | variables
8596 | | /
8598 | | \
8599 | | outgoing
8600 | | arguments
8601 current stack pointer -> | | /
8604 For a given function some or all of these stack components
8605 may not be needed, giving rise to the possibility of
8606 eliminating some of the registers.
8608 The values returned by this function must reflect the behavior
8609 of arm_expand_prologue() and arm_compute_save_reg_mask().
8611 The sign of the number returned reflects the direction of stack
8612 growth, so the values are positive for all eliminations except
8613 from the soft frame pointer to the hard frame pointer. */
8614 unsigned int
8615 arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
8617 unsigned int local_vars = arm_get_frame_size ();
8618 unsigned int outgoing_args = current_function_outgoing_args_size;
8619 unsigned int stack_frame;
8620 unsigned int call_saved_registers;
8621 unsigned long func_type;
8623 func_type = arm_current_func_type ();
8625 /* Volatile functions never return, so there is
8626 no need to save call saved registers. */
8627 call_saved_registers = 0;
8628 if (! IS_VOLATILE (func_type))
8630 unsigned int reg_mask;
8631 unsigned int reg;
8633 /* Make sure that we compute which registers will be saved
8634 on the stack using the same algorithm that is used by
8635 arm_compute_save_reg_mask(). */
8636 reg_mask = arm_compute_save_reg0_reg12_mask ();
8638 /* Now count the number of bits set in save_reg_mask.
8639 For each set bit we need 4 bytes of stack space. */
8640 while (reg_mask)
8642 call_saved_registers += 4;
8643 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8646 if ((regs_ever_live[LR_REGNUM]
8647 /* If optimizing for size, then we save the link register if
8648 any other integer register is saved. This gives a smaller
8649 return sequence. */
8650 || (optimize_size && call_saved_registers > 0))
8651 /* But if a stack frame is going to be created, the LR will
8652 be saved as part of that, so we do not need to allow for
8653 it here. */
8654 && ! frame_pointer_needed)
8655 call_saved_registers += 4;
8657 /* If the hard floating point registers are going to be
8658 used then they must be saved on the stack as well.
8659 Each register occupies 12 bytes of stack space. */
8660 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8661 if (regs_ever_live[reg] && ! call_used_regs[reg])
8662 call_saved_registers += 12;
8665 /* The stack frame contains 4 registers - the old frame pointer,
8666 the old stack pointer, the return address and PC of the start
8667 of the function. */
8668 stack_frame = frame_pointer_needed ? 16 : 0;
8670 /* OK, now we have enough information to compute the distances.
8671 There must be an entry in these switch tables for each pair
8672 of registers in ELIMINABLE_REGS, even if some of the entries
8673 seem to be redundant or useless. */
8674 switch (from)
8676 case ARG_POINTER_REGNUM:
8677 switch (to)
8679 case THUMB_HARD_FRAME_POINTER_REGNUM:
8680 return 0;
8682 case FRAME_POINTER_REGNUM:
8683 /* This is the reverse of the soft frame pointer
8684 to hard frame pointer elimination below. */
8685 if (call_saved_registers == 0 && stack_frame == 0)
8686 return 0;
8687 return (call_saved_registers + stack_frame - 4);
8689 case ARM_HARD_FRAME_POINTER_REGNUM:
8690 /* If there is no stack frame then the hard
8691 frame pointer and the arg pointer coincide. */
8692 if (stack_frame == 0 && call_saved_registers != 0)
8693 return 0;
8694 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8695 return (frame_pointer_needed
8696 && current_function_needs_context
8697 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8699 case STACK_POINTER_REGNUM:
8700 /* If nothing has been pushed on the stack at all
8701 then this will return -4. This *is* correct! */
8702 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8704 default:
8705 abort ();
8707 break;
8709 case FRAME_POINTER_REGNUM:
8710 switch (to)
8712 case THUMB_HARD_FRAME_POINTER_REGNUM:
8713 return 0;
8715 case ARM_HARD_FRAME_POINTER_REGNUM:
8716 /* The hard frame pointer points to the top entry in the
8717 stack frame. The soft frame pointer to the bottom entry
8718 in the stack frame. If there is no stack frame at all,
8719 then they are identical. */
8720 if (call_saved_registers == 0 && stack_frame == 0)
8721 return 0;
8722 return - (call_saved_registers + stack_frame - 4);
8724 case STACK_POINTER_REGNUM:
8725 return local_vars + outgoing_args;
8727 default:
8728 abort ();
8730 break;
8732 default:
8733 /* You cannot eliminate from the stack pointer.
8734 In theory you could eliminate from the hard frame
8735 pointer to the stack pointer, but this will never
8736 happen, since if a stack frame is not needed the
8737 hard frame pointer will never be used. */
8738 abort ();
8742 /* Calculate the size of the stack frame, taking into account any
8743 padding that is required to ensure stack-alignment. */
8744 HOST_WIDE_INT
8745 arm_get_frame_size (void)
8747 int regno;
8749 int base_size = ROUND_UP_WORD (get_frame_size ());
8750 int entry_size = 0;
8751 unsigned long func_type = arm_current_func_type ();
8752 int leaf;
8754 if (! TARGET_ARM)
8755 abort();
8757 if (! TARGET_ATPCS)
8758 return base_size;
8760 /* We need to know if we are a leaf function. Unfortunately, it
8761 is possible to be called after start_sequence has been called,
8762 which causes get_insns to return the insns for the sequence,
8763 not the function, which will cause leaf_function_p to return
8764 the incorrect result.
8766 To work around this, we cache the computed frame size. This
8767 works because we will only be calling RTL expanders that need
8768 to know about leaf functions once reload has completed, and the
8769 frame size cannot be changed after that time, so we can safely
8770 use the cached value. */
8772 if (reload_completed)
8773 return cfun->machine->frame_size;
8775 leaf = leaf_function_p ();
8777 /* A leaf function does not need any stack alignment if it has nothing
8778 on the stack. */
8779 if (leaf && base_size == 0)
8781 cfun->machine->frame_size = 0;
8782 return 0;
8785 /* We know that SP will be word aligned on entry, and we must
8786 preserve that condition at any subroutine call. But those are
8787 the only constraints. */
8789 /* Space for variadic functions. */
8790 if (current_function_pretend_args_size)
8791 entry_size += current_function_pretend_args_size;
8793 /* Space for saved registers. */
8794 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8796 /* Space for saved FPA registers. */
8797 if (! IS_VOLATILE (func_type))
8799 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8800 if (regs_ever_live[regno] && ! call_used_regs[regno])
8801 entry_size += 12;
8804 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8805 base_size += 4;
8806 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8807 abort ();
8809 cfun->machine->frame_size = base_size;
8811 return base_size;
8814 /* Generate the prologue instructions for entry into an ARM function. */
8815 void
8816 arm_expand_prologue (void)
8818 int reg;
8819 rtx amount;
8820 rtx insn;
8821 rtx ip_rtx;
8822 unsigned long live_regs_mask;
8823 unsigned long func_type;
8824 int fp_offset = 0;
8825 int saved_pretend_args = 0;
8826 unsigned int args_to_push;
8828 func_type = arm_current_func_type ();
8830 /* Naked functions don't have prologues. */
8831 if (IS_NAKED (func_type))
8832 return;
8834 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8835 args_to_push = current_function_pretend_args_size;
8837 /* Compute which register we will have to save onto the stack. */
8838 live_regs_mask = arm_compute_save_reg_mask ();
8840 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8842 if (frame_pointer_needed)
8844 if (IS_INTERRUPT (func_type))
8846 /* Interrupt functions must not corrupt any registers.
8847 Creating a frame pointer however, corrupts the IP
8848 register, so we must push it first. */
8849 insn = emit_multi_reg_push (1 << IP_REGNUM);
8851 /* Do not set RTX_FRAME_RELATED_P on this insn.
8852 The dwarf stack unwinding code only wants to see one
8853 stack decrement per function, and this is not it. If
8854 this instruction is labeled as being part of the frame
8855 creation sequence then dwarf2out_frame_debug_expr will
8856 abort when it encounters the assignment of IP to FP
8857 later on, since the use of SP here establishes SP as
8858 the CFA register and not IP.
8860 Anyway this instruction is not really part of the stack
8861 frame creation although it is part of the prologue. */
8863 else if (IS_NESTED (func_type))
8865 /* The Static chain register is the same as the IP register
8866 used as a scratch register during stack frame creation.
8867 To get around this need to find somewhere to store IP
8868 whilst the frame is being created. We try the following
8869 places in order:
8871 1. The last argument register.
8872 2. A slot on the stack above the frame. (This only
8873 works if the function is not a varargs function).
8874 3. Register r3, after pushing the argument registers
8875 onto the stack.
8877 Note - we only need to tell the dwarf2 backend about the SP
8878 adjustment in the second variant; the static chain register
8879 doesn't need to be unwound, as it doesn't contain a value
8880 inherited from the caller. */
8882 if (regs_ever_live[3] == 0)
8884 insn = gen_rtx_REG (SImode, 3);
8885 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8886 insn = emit_insn (insn);
8888 else if (args_to_push == 0)
8890 rtx dwarf;
8891 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8892 insn = gen_rtx_MEM (SImode, insn);
8893 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8894 insn = emit_insn (insn);
8896 fp_offset = 4;
8898 /* Just tell the dwarf backend that we adjusted SP. */
8899 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8900 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8901 GEN_INT (-fp_offset)));
8902 RTX_FRAME_RELATED_P (insn) = 1;
8903 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8904 dwarf, REG_NOTES (insn));
8906 else
8908 /* Store the args on the stack. */
8909 if (cfun->machine->uses_anonymous_args)
8910 insn = emit_multi_reg_push
8911 ((0xf0 >> (args_to_push / 4)) & 0xf);
8912 else
8913 insn = emit_insn
8914 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8915 GEN_INT (- args_to_push)));
8917 RTX_FRAME_RELATED_P (insn) = 1;
8919 saved_pretend_args = 1;
8920 fp_offset = args_to_push;
8921 args_to_push = 0;
8923 /* Now reuse r3 to preserve IP. */
8924 insn = gen_rtx_REG (SImode, 3);
8925 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8926 (void) emit_insn (insn);
8930 if (fp_offset)
8932 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8933 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8935 else
8936 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8938 insn = emit_insn (insn);
8939 RTX_FRAME_RELATED_P (insn) = 1;
8942 if (args_to_push)
8944 /* Push the argument registers, or reserve space for them. */
8945 if (cfun->machine->uses_anonymous_args)
8946 insn = emit_multi_reg_push
8947 ((0xf0 >> (args_to_push / 4)) & 0xf);
8948 else
8949 insn = emit_insn
8950 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8951 GEN_INT (- args_to_push)));
8952 RTX_FRAME_RELATED_P (insn) = 1;
8955 /* If this is an interrupt service routine, and the link register
8956 is going to be pushed, and we are not creating a stack frame,
8957 (which would involve an extra push of IP and a pop in the epilogue)
8958 subtracting four from LR now will mean that the function return
8959 can be done with a single instruction. */
8960 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8961 && (live_regs_mask & (1 << LR_REGNUM)) != 0
8962 && ! frame_pointer_needed)
8963 emit_insn (gen_rtx_SET (SImode,
8964 gen_rtx_REG (SImode, LR_REGNUM),
8965 gen_rtx_PLUS (SImode,
8966 gen_rtx_REG (SImode, LR_REGNUM),
8967 GEN_INT (-4))));
8969 if (live_regs_mask)
8971 insn = emit_multi_reg_push (live_regs_mask);
8972 RTX_FRAME_RELATED_P (insn) = 1;
8975 if (! IS_VOLATILE (func_type))
8977 /* Save any floating point call-saved registers used by this
8978 function. */
8979 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8981 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8982 if (regs_ever_live[reg] && !call_used_regs[reg])
8984 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8985 insn = gen_rtx_MEM (XFmode, insn);
8986 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8987 gen_rtx_REG (XFmode, reg)));
8988 RTX_FRAME_RELATED_P (insn) = 1;
8991 else
8993 int start_reg = LAST_ARM_FP_REGNUM;
8995 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8997 if (regs_ever_live[reg] && !call_used_regs[reg])
8999 if (start_reg - reg == 3)
9001 insn = emit_sfm (reg, 4);
9002 RTX_FRAME_RELATED_P (insn) = 1;
9003 start_reg = reg - 1;
9006 else
9008 if (start_reg != reg)
9010 insn = emit_sfm (reg + 1, start_reg - reg);
9011 RTX_FRAME_RELATED_P (insn) = 1;
9013 start_reg = reg - 1;
9017 if (start_reg != reg)
9019 insn = emit_sfm (reg + 1, start_reg - reg);
9020 RTX_FRAME_RELATED_P (insn) = 1;
9025 if (frame_pointer_needed)
9027 /* Create the new frame pointer. */
9028 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9029 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9030 RTX_FRAME_RELATED_P (insn) = 1;
9032 if (IS_NESTED (func_type))
9034 /* Recover the static chain register. */
9035 if (regs_ever_live [3] == 0
9036 || saved_pretend_args)
9037 insn = gen_rtx_REG (SImode, 3);
9038 else /* if (current_function_pretend_args_size == 0) */
9040 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
9041 GEN_INT (4));
9042 insn = gen_rtx_MEM (SImode, insn);
9045 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9046 /* Add a USE to stop propagate_one_insn() from barfing. */
9047 emit_insn (gen_prologue_use (ip_rtx));
9051 amount = GEN_INT (-(arm_get_frame_size ()
9052 + current_function_outgoing_args_size));
9054 if (amount != const0_rtx)
9056 /* This add can produce multiple insns for a large constant, so we
9057 need to get tricky. */
9058 rtx last = get_last_insn ();
9059 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9060 amount));
9063 last = last ? NEXT_INSN (last) : get_insns ();
9064 RTX_FRAME_RELATED_P (last) = 1;
9066 while (last != insn);
9068 /* If the frame pointer is needed, emit a special barrier that
9069 will prevent the scheduler from moving stores to the frame
9070 before the stack adjustment. */
9071 if (frame_pointer_needed)
9072 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9073 hard_frame_pointer_rtx));
9076 /* If we are profiling, make sure no instructions are scheduled before
9077 the call to mcount. Similarly if the user has requested no
9078 scheduling in the prolog. */
9079 if (current_function_profile || TARGET_NO_SCHED_PRO)
9080 emit_insn (gen_blockage ());
9082 /* If the link register is being kept alive, with the return address in it,
9083 then make sure that it does not get reused by the ce2 pass. */
9084 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9086 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9087 cfun->machine->lr_save_eliminated = 1;
9091 /* If CODE is 'd', then the X is a condition operand and the instruction
9092 should only be executed if the condition is true.
9093 if CODE is 'D', then the X is a condition operand and the instruction
9094 should only be executed if the condition is false: however, if the mode
9095 of the comparison is CCFPEmode, then always execute the instruction -- we
9096 do this because in these circumstances !GE does not necessarily imply LT;
9097 in these cases the instruction pattern will take care to make sure that
9098 an instruction containing %d will follow, thereby undoing the effects of
9099 doing this instruction unconditionally.
9100 If CODE is 'N' then X is a floating point operand that must be negated
9101 before output.
9102 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9103 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9104 void
9105 arm_print_operand (FILE *stream, rtx x, int code)
9107 switch (code)
9109 case '@':
9110 fputs (ASM_COMMENT_START, stream);
9111 return;
9113 case '_':
9114 fputs (user_label_prefix, stream);
9115 return;
9117 case '|':
9118 fputs (REGISTER_PREFIX, stream);
9119 return;
9121 case '?':
9122 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9124 if (TARGET_THUMB || current_insn_predicate != NULL)
9125 abort ();
9127 fputs (arm_condition_codes[arm_current_cc], stream);
9129 else if (current_insn_predicate)
9131 enum arm_cond_code code;
9133 if (TARGET_THUMB)
9134 abort ();
9136 code = get_arm_condition_code (current_insn_predicate);
9137 fputs (arm_condition_codes[code], stream);
9139 return;
9141 case 'N':
9143 REAL_VALUE_TYPE r;
9144 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9145 r = REAL_VALUE_NEGATE (r);
9146 fprintf (stream, "%s", fp_const_from_val (&r));
9148 return;
9150 case 'B':
9151 if (GET_CODE (x) == CONST_INT)
9153 HOST_WIDE_INT val;
9154 val = ARM_SIGN_EXTEND (~INTVAL (x));
9155 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9157 else
9159 putc ('~', stream);
9160 output_addr_const (stream, x);
9162 return;
9164 case 'i':
9165 fprintf (stream, "%s", arithmetic_instr (x, 1));
9166 return;
9168 /* Truncate Cirrus shift counts. */
9169 case 's':
9170 if (GET_CODE (x) == CONST_INT)
9172 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9173 return;
9175 arm_print_operand (stream, x, 0);
9176 return;
9178 case 'I':
9179 fprintf (stream, "%s", arithmetic_instr (x, 0));
9180 return;
9182 case 'S':
9184 HOST_WIDE_INT val;
9185 const char * shift = shift_op (x, &val);
9187 if (shift)
9189 fprintf (stream, ", %s ", shift_op (x, &val));
9190 if (val == -1)
9191 arm_print_operand (stream, XEXP (x, 1), 0);
9192 else
9193 fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
9196 return;
9198 /* An explanation of the 'Q', 'R' and 'H' register operands:
9200 In a pair of registers containing a DI or DF value the 'Q'
9201 operand returns the register number of the register containing
9202 the least signficant part of the value. The 'R' operand returns
9203 the register number of the register containing the most
9204 significant part of the value.
9206 The 'H' operand returns the higher of the two register numbers.
9207 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9208 same as the 'Q' operand, since the most signficant part of the
9209 value is held in the lower number register. The reverse is true
9210 on systems where WORDS_BIG_ENDIAN is false.
9212 The purpose of these operands is to distinguish between cases
9213 where the endian-ness of the values is important (for example
9214 when they are added together), and cases where the endian-ness
9215 is irrelevant, but the order of register operations is important.
9216 For example when loading a value from memory into a register
9217 pair, the endian-ness does not matter. Provided that the value
9218 from the lower memory address is put into the lower numbered
9219 register, and the value from the higher address is put into the
9220 higher numbered register, the load will work regardless of whether
9221 the value being loaded is big-wordian or little-wordian. The
9222 order of the two register loads can matter however, if the address
9223 of the memory location is actually held in one of the registers
9224 being overwritten by the load. */
9225 case 'Q':
9226 if (REGNO (x) > LAST_ARM_REGNUM)
9227 abort ();
9228 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9229 return;
9231 case 'R':
9232 if (REGNO (x) > LAST_ARM_REGNUM)
9233 abort ();
9234 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9235 return;
9237 case 'H':
9238 if (REGNO (x) > LAST_ARM_REGNUM)
9239 abort ();
9240 asm_fprintf (stream, "%r", REGNO (x) + 1);
9241 return;
9243 case 'm':
9244 asm_fprintf (stream, "%r",
9245 GET_CODE (XEXP (x, 0)) == REG
9246 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9247 return;
9249 case 'M':
9250 asm_fprintf (stream, "{%r-%r}",
9251 REGNO (x),
9252 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9253 return;
9255 case 'd':
9256 /* CONST_TRUE_RTX means always -- that's the default. */
9257 if (x == const_true_rtx)
9258 return;
9260 if (TARGET_ARM)
9261 fputs (arm_condition_codes[get_arm_condition_code (x)],
9262 stream);
9263 else
9264 fputs (thumb_condition_code (x, 0), stream);
9265 return;
9267 case 'D':
9268 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9269 want to do that. */
9270 if (x == const_true_rtx)
9271 abort ();
9273 if (TARGET_ARM)
9274 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9275 (get_arm_condition_code (x))],
9276 stream);
9277 else
9278 fputs (thumb_condition_code (x, 1), stream);
9279 return;
9282 /* Cirrus registers can be accessed in a variety of ways:
9283 single floating point (f)
9284 double floating point (d)
9285 32bit integer (fx)
9286 64bit integer (dx). */
9287 case 'W': /* Cirrus register in F mode. */
9288 case 'X': /* Cirrus register in D mode. */
9289 case 'Y': /* Cirrus register in FX mode. */
9290 case 'Z': /* Cirrus register in DX mode. */
9291 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9292 abort ();
9294 fprintf (stream, "mv%s%s",
9295 code == 'W' ? "f"
9296 : code == 'X' ? "d"
9297 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9299 return;
9301 /* Print cirrus register in the mode specified by the register's mode. */
9302 case 'V':
9304 int mode = GET_MODE (x);
9306 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9307 abort ();
9309 fprintf (stream, "mv%s%s",
9310 mode == DFmode ? "d"
9311 : mode == SImode ? "fx"
9312 : mode == DImode ? "dx"
9313 : "f", reg_names[REGNO (x)] + 2);
9315 return;
9318 default:
9319 if (x == 0)
9320 abort ();
9322 if (GET_CODE (x) == REG)
9323 asm_fprintf (stream, "%r", REGNO (x));
9324 else if (GET_CODE (x) == MEM)
9326 output_memory_reference_mode = GET_MODE (x);
9327 output_address (XEXP (x, 0));
9329 else if (GET_CODE (x) == CONST_DOUBLE)
9330 fprintf (stream, "#%s", fp_immediate_constant (x));
9331 else if (GET_CODE (x) == NEG)
9332 abort (); /* This should never happen now. */
9333 else
9335 fputc ('#', stream);
9336 output_addr_const (stream, x);
9341 #ifndef AOF_ASSEMBLER
9342 /* Target hook for assembling integer objects. The ARM version needs to
9343 handle word-sized values specially. */
9344 static bool
9345 arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
9347 if (size == UNITS_PER_WORD && aligned_p)
9349 fputs ("\t.word\t", asm_out_file);
9350 output_addr_const (asm_out_file, x);
9352 /* Mark symbols as position independent. We only do this in the
9353 .text segment, not in the .data segment. */
9354 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9355 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9357 if (GET_CODE (x) == SYMBOL_REF
9358 && (CONSTANT_POOL_ADDRESS_P (x)
9359 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9360 fputs ("(GOTOFF)", asm_out_file);
9361 else if (GET_CODE (x) == LABEL_REF)
9362 fputs ("(GOTOFF)", asm_out_file);
9363 else
9364 fputs ("(GOT)", asm_out_file);
9366 fputc ('\n', asm_out_file);
9367 return true;
9370 return default_assemble_integer (x, size, aligned_p);
9372 #endif
9374 /* A finite state machine takes care of noticing whether or not instructions
9375 can be conditionally executed, and thus decrease execution time and code
9376 size by deleting branch instructions. The fsm is controlled by
9377 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9379 /* The state of the fsm controlling condition codes are:
9380 0: normal, do nothing special
9381 1: make ASM_OUTPUT_OPCODE not output this instruction
9382 2: make ASM_OUTPUT_OPCODE not output this instruction
9383 3: make instructions conditional
9384 4: make instructions conditional
9386 State transitions (state->state by whom under condition):
9387 0 -> 1 final_prescan_insn if the `target' is a label
9388 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9389 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9390 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9391 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9392 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9393 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9394 (the target insn is arm_target_insn).
9396 If the jump clobbers the conditions then we use states 2 and 4.
9398 A similar thing can be done with conditional return insns.
9400 XXX In case the `target' is an unconditional branch, this conditionalising
9401 of the instructions always reduces code size, but not always execution
9402 time. But then, I want to reduce the code size to somewhere near what
9403 /bin/cc produces. */
9405 /* Returns the index of the ARM condition code string in
9406 `arm_condition_codes'. COMPARISON should be an rtx like
9407 `(eq (...) (...))'. */
9408 static enum arm_cond_code
9409 get_arm_condition_code (rtx comparison)
9411 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9412 int code;
9413 enum rtx_code comp_code = GET_CODE (comparison);
9415 if (GET_MODE_CLASS (mode) != MODE_CC)
9416 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9417 XEXP (comparison, 1));
9419 switch (mode)
9421 case CC_DNEmode: code = ARM_NE; goto dominance;
9422 case CC_DEQmode: code = ARM_EQ; goto dominance;
9423 case CC_DGEmode: code = ARM_GE; goto dominance;
9424 case CC_DGTmode: code = ARM_GT; goto dominance;
9425 case CC_DLEmode: code = ARM_LE; goto dominance;
9426 case CC_DLTmode: code = ARM_LT; goto dominance;
9427 case CC_DGEUmode: code = ARM_CS; goto dominance;
9428 case CC_DGTUmode: code = ARM_HI; goto dominance;
9429 case CC_DLEUmode: code = ARM_LS; goto dominance;
9430 case CC_DLTUmode: code = ARM_CC;
9432 dominance:
9433 if (comp_code != EQ && comp_code != NE)
9434 abort ();
9436 if (comp_code == EQ)
9437 return ARM_INVERSE_CONDITION_CODE (code);
9438 return code;
9440 case CC_NOOVmode:
9441 switch (comp_code)
9443 case NE: return ARM_NE;
9444 case EQ: return ARM_EQ;
9445 case GE: return ARM_PL;
9446 case LT: return ARM_MI;
9447 default: abort ();
9450 case CC_Zmode:
9451 switch (comp_code)
9453 case NE: return ARM_NE;
9454 case EQ: return ARM_EQ;
9455 default: abort ();
9458 case CCFPEmode:
9459 case CCFPmode:
9460 /* These encodings assume that AC=1 in the FPA system control
9461 byte. This allows us to handle all cases except UNEQ and
9462 LTGT. */
9463 switch (comp_code)
9465 case GE: return ARM_GE;
9466 case GT: return ARM_GT;
9467 case LE: return ARM_LS;
9468 case LT: return ARM_MI;
9469 case NE: return ARM_NE;
9470 case EQ: return ARM_EQ;
9471 case ORDERED: return ARM_VC;
9472 case UNORDERED: return ARM_VS;
9473 case UNLT: return ARM_LT;
9474 case UNLE: return ARM_LE;
9475 case UNGT: return ARM_HI;
9476 case UNGE: return ARM_PL;
9477 /* UNEQ and LTGT do not have a representation. */
9478 case UNEQ: /* Fall through. */
9479 case LTGT: /* Fall through. */
9480 default: abort ();
9483 case CC_SWPmode:
9484 switch (comp_code)
9486 case NE: return ARM_NE;
9487 case EQ: return ARM_EQ;
9488 case GE: return ARM_LE;
9489 case GT: return ARM_LT;
9490 case LE: return ARM_GE;
9491 case LT: return ARM_GT;
9492 case GEU: return ARM_LS;
9493 case GTU: return ARM_CC;
9494 case LEU: return ARM_CS;
9495 case LTU: return ARM_HI;
9496 default: abort ();
9499 case CC_Cmode:
9500 switch (comp_code)
9502 case LTU: return ARM_CS;
9503 case GEU: return ARM_CC;
9504 default: abort ();
9507 case CCmode:
9508 switch (comp_code)
9510 case NE: return ARM_NE;
9511 case EQ: return ARM_EQ;
9512 case GE: return ARM_GE;
9513 case GT: return ARM_GT;
9514 case LE: return ARM_LE;
9515 case LT: return ARM_LT;
9516 case GEU: return ARM_CS;
9517 case GTU: return ARM_HI;
9518 case LEU: return ARM_LS;
9519 case LTU: return ARM_CC;
9520 default: abort ();
9523 default: abort ();
9526 abort ();
9529 void
9530 arm_final_prescan_insn (rtx insn)
9532 /* BODY will hold the body of INSN. */
9533 rtx body = PATTERN (insn);
9535 /* This will be 1 if trying to repeat the trick, and things need to be
9536 reversed if it appears to fail. */
9537 int reverse = 0;
9539 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9540 taken are clobbered, even if the rtl suggests otherwise. It also
9541 means that we have to grub around within the jump expression to find
9542 out what the conditions are when the jump isn't taken. */
9543 int jump_clobbers = 0;
9545 /* If we start with a return insn, we only succeed if we find another one. */
9546 int seeking_return = 0;
9548 /* START_INSN will hold the insn from where we start looking. This is the
9549 first insn after the following code_label if REVERSE is true. */
9550 rtx start_insn = insn;
9552 /* If in state 4, check if the target branch is reached, in order to
9553 change back to state 0. */
9554 if (arm_ccfsm_state == 4)
9556 if (insn == arm_target_insn)
9558 arm_target_insn = NULL;
9559 arm_ccfsm_state = 0;
9561 return;
9564 /* If in state 3, it is possible to repeat the trick, if this insn is an
9565 unconditional branch to a label, and immediately following this branch
9566 is the previous target label which is only used once, and the label this
9567 branch jumps to is not too far off. */
9568 if (arm_ccfsm_state == 3)
9570 if (simplejump_p (insn))
9572 start_insn = next_nonnote_insn (start_insn);
9573 if (GET_CODE (start_insn) == BARRIER)
9575 /* XXX Isn't this always a barrier? */
9576 start_insn = next_nonnote_insn (start_insn);
9578 if (GET_CODE (start_insn) == CODE_LABEL
9579 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9580 && LABEL_NUSES (start_insn) == 1)
9581 reverse = TRUE;
9582 else
9583 return;
9585 else if (GET_CODE (body) == RETURN)
9587 start_insn = next_nonnote_insn (start_insn);
9588 if (GET_CODE (start_insn) == BARRIER)
9589 start_insn = next_nonnote_insn (start_insn);
9590 if (GET_CODE (start_insn) == CODE_LABEL
9591 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9592 && LABEL_NUSES (start_insn) == 1)
9594 reverse = TRUE;
9595 seeking_return = 1;
9597 else
9598 return;
9600 else
9601 return;
9604 if (arm_ccfsm_state != 0 && !reverse)
9605 abort ();
9606 if (GET_CODE (insn) != JUMP_INSN)
9607 return;
9609 /* This jump might be paralleled with a clobber of the condition codes
9610 the jump should always come first */
9611 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9612 body = XVECEXP (body, 0, 0);
9614 #if 0
9615 /* If this is a conditional return then we don't want to know */
9616 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9617 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9618 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9619 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9620 return;
9621 #endif
9623 if (reverse
9624 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9625 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9627 int insns_skipped;
9628 int fail = FALSE, succeed = FALSE;
9629 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9630 int then_not_else = TRUE;
9631 rtx this_insn = start_insn, label = 0;
9633 /* If the jump cannot be done with one instruction, we cannot
9634 conditionally execute the instruction in the inverse case. */
9635 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9637 jump_clobbers = 1;
9638 return;
9641 /* Register the insn jumped to. */
9642 if (reverse)
9644 if (!seeking_return)
9645 label = XEXP (SET_SRC (body), 0);
9647 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9648 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9649 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9651 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9652 then_not_else = FALSE;
9654 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9655 seeking_return = 1;
9656 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9658 seeking_return = 1;
9659 then_not_else = FALSE;
9661 else
9662 abort ();
9664 /* See how many insns this branch skips, and what kind of insns. If all
9665 insns are okay, and the label or unconditional branch to the same
9666 label is not too far away, succeed. */
9667 for (insns_skipped = 0;
9668 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9670 rtx scanbody;
9672 this_insn = next_nonnote_insn (this_insn);
9673 if (!this_insn)
9674 break;
9676 switch (GET_CODE (this_insn))
9678 case CODE_LABEL:
9679 /* Succeed if it is the target label, otherwise fail since
9680 control falls in from somewhere else. */
9681 if (this_insn == label)
9683 if (jump_clobbers)
9685 arm_ccfsm_state = 2;
9686 this_insn = next_nonnote_insn (this_insn);
9688 else
9689 arm_ccfsm_state = 1;
9690 succeed = TRUE;
9692 else
9693 fail = TRUE;
9694 break;
9696 case BARRIER:
9697 /* Succeed if the following insn is the target label.
9698 Otherwise fail.
9699 If return insns are used then the last insn in a function
9700 will be a barrier. */
9701 this_insn = next_nonnote_insn (this_insn);
9702 if (this_insn && this_insn == label)
9704 if (jump_clobbers)
9706 arm_ccfsm_state = 2;
9707 this_insn = next_nonnote_insn (this_insn);
9709 else
9710 arm_ccfsm_state = 1;
9711 succeed = TRUE;
9713 else
9714 fail = TRUE;
9715 break;
9717 case CALL_INSN:
9718 /* If using 32-bit addresses the cc is not preserved over
9719 calls. */
9720 if (TARGET_APCS_32)
9722 /* Succeed if the following insn is the target label,
9723 or if the following two insns are a barrier and
9724 the target label. */
9725 this_insn = next_nonnote_insn (this_insn);
9726 if (this_insn && GET_CODE (this_insn) == BARRIER)
9727 this_insn = next_nonnote_insn (this_insn);
9729 if (this_insn && this_insn == label
9730 && insns_skipped < max_insns_skipped)
9732 if (jump_clobbers)
9734 arm_ccfsm_state = 2;
9735 this_insn = next_nonnote_insn (this_insn);
9737 else
9738 arm_ccfsm_state = 1;
9739 succeed = TRUE;
9741 else
9742 fail = TRUE;
9744 break;
9746 case JUMP_INSN:
9747 /* If this is an unconditional branch to the same label, succeed.
9748 If it is to another label, do nothing. If it is conditional,
9749 fail. */
9750 /* XXX Probably, the tests for SET and the PC are
9751 unnecessary. */
9753 scanbody = PATTERN (this_insn);
9754 if (GET_CODE (scanbody) == SET
9755 && GET_CODE (SET_DEST (scanbody)) == PC)
9757 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9758 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9760 arm_ccfsm_state = 2;
9761 succeed = TRUE;
9763 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9764 fail = TRUE;
9766 /* Fail if a conditional return is undesirable (eg on a
9767 StrongARM), but still allow this if optimizing for size. */
9768 else if (GET_CODE (scanbody) == RETURN
9769 && !use_return_insn (TRUE)
9770 && !optimize_size)
9771 fail = TRUE;
9772 else if (GET_CODE (scanbody) == RETURN
9773 && seeking_return)
9775 arm_ccfsm_state = 2;
9776 succeed = TRUE;
9778 else if (GET_CODE (scanbody) == PARALLEL)
9780 switch (get_attr_conds (this_insn))
9782 case CONDS_NOCOND:
9783 break;
9784 default:
9785 fail = TRUE;
9786 break;
9789 else
9790 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9792 break;
9794 case INSN:
9795 /* Instructions using or affecting the condition codes make it
9796 fail. */
9797 scanbody = PATTERN (this_insn);
9798 if (!(GET_CODE (scanbody) == SET
9799 || GET_CODE (scanbody) == PARALLEL)
9800 || get_attr_conds (this_insn) != CONDS_NOCOND)
9801 fail = TRUE;
9803 /* A conditional cirrus instruction must be followed by
9804 a non Cirrus instruction. However, since we
9805 conditionalize instructions in this function and by
9806 the time we get here we can't add instructions
9807 (nops), because shorten_branches() has already been
9808 called, we will disable conditionalizing Cirrus
9809 instructions to be safe. */
9810 if (GET_CODE (scanbody) != USE
9811 && GET_CODE (scanbody) != CLOBBER
9812 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
9813 fail = TRUE;
9814 break;
9816 default:
9817 break;
9820 if (succeed)
9822 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9823 arm_target_label = CODE_LABEL_NUMBER (label);
9824 else if (seeking_return || arm_ccfsm_state == 2)
9826 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9828 this_insn = next_nonnote_insn (this_insn);
9829 if (this_insn && (GET_CODE (this_insn) == BARRIER
9830 || GET_CODE (this_insn) == CODE_LABEL))
9831 abort ();
9833 if (!this_insn)
9835 /* Oh, dear! we ran off the end.. give up */
9836 recog (PATTERN (insn), insn, NULL);
9837 arm_ccfsm_state = 0;
9838 arm_target_insn = NULL;
9839 return;
9841 arm_target_insn = this_insn;
9843 else
9844 abort ();
9845 if (jump_clobbers)
9847 if (reverse)
9848 abort ();
9849 arm_current_cc =
9850 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9851 0), 0), 1));
9852 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9853 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9854 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9855 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9857 else
9859 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9860 what it was. */
9861 if (!reverse)
9862 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9863 0));
9866 if (reverse || then_not_else)
9867 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9870 /* Restore recog_data (getting the attributes of other insns can
9871 destroy this array, but final.c assumes that it remains intact
9872 across this call; since the insn has been recognized already we
9873 call recog direct). */
9874 recog (PATTERN (insn), insn, NULL);
9878 /* Returns true if REGNO is a valid register
9879 for holding a quantity of tyoe MODE. */
9881 arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
9883 if (GET_MODE_CLASS (mode) == MODE_CC)
9884 return regno == CC_REGNUM;
9886 if (TARGET_THUMB)
9887 /* For the Thumb we only allow values bigger than SImode in
9888 registers 0 - 6, so that there is always a second low
9889 register available to hold the upper part of the value.
9890 We probably we ought to ensure that the register is the
9891 start of an even numbered register pair. */
9892 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9894 if (IS_CIRRUS_REGNUM (regno))
9895 /* We have outlawed SI values in Cirrus registers because they
9896 reside in the lower 32 bits, but SF values reside in the
9897 upper 32 bits. This causes gcc all sorts of grief. We can't
9898 even split the registers into pairs because Cirrus SI values
9899 get sign extended to 64bits-- aldyh. */
9900 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
9902 if (regno <= LAST_ARM_REGNUM)
9903 /* We allow any value to be stored in the general regisetrs. */
9904 return 1;
9906 if ( regno == FRAME_POINTER_REGNUM
9907 || regno == ARG_POINTER_REGNUM)
9908 /* We only allow integers in the fake hard registers. */
9909 return GET_MODE_CLASS (mode) == MODE_INT;
9911 /* The only registers left are the FPA registers
9912 which we only allow to hold FP values. */
9913 return GET_MODE_CLASS (mode) == MODE_FLOAT
9914 && regno >= FIRST_ARM_FP_REGNUM
9915 && regno <= LAST_ARM_FP_REGNUM;
9919 arm_regno_class (int regno)
9921 if (TARGET_THUMB)
9923 if (regno == STACK_POINTER_REGNUM)
9924 return STACK_REG;
9925 if (regno == CC_REGNUM)
9926 return CC_REG;
9927 if (regno < 8)
9928 return LO_REGS;
9929 return HI_REGS;
9932 if ( regno <= LAST_ARM_REGNUM
9933 || regno == FRAME_POINTER_REGNUM
9934 || regno == ARG_POINTER_REGNUM)
9935 return GENERAL_REGS;
9937 if (regno == CC_REGNUM)
9938 return NO_REGS;
9940 if (IS_CIRRUS_REGNUM (regno))
9941 return CIRRUS_REGS;
9943 return FPA_REGS;
9946 /* Handle a special case when computing the offset
9947 of an argument from the frame pointer. */
9949 arm_debugger_arg_offset (int value, rtx addr)
9951 rtx insn;
9953 /* We are only interested if dbxout_parms() failed to compute the offset. */
9954 if (value != 0)
9955 return 0;
9957 /* We can only cope with the case where the address is held in a register. */
9958 if (GET_CODE (addr) != REG)
9959 return 0;
9961 /* If we are using the frame pointer to point at the argument, then
9962 an offset of 0 is correct. */
9963 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9964 return 0;
9966 /* If we are using the stack pointer to point at the
9967 argument, then an offset of 0 is correct. */
9968 if ((TARGET_THUMB || !frame_pointer_needed)
9969 && REGNO (addr) == SP_REGNUM)
9970 return 0;
9972 /* Oh dear. The argument is pointed to by a register rather
9973 than being held in a register, or being stored at a known
9974 offset from the frame pointer. Since GDB only understands
9975 those two kinds of argument we must translate the address
9976 held in the register into an offset from the frame pointer.
9977 We do this by searching through the insns for the function
9978 looking to see where this register gets its value. If the
9979 register is initialized from the frame pointer plus an offset
9980 then we are in luck and we can continue, otherwise we give up.
9982 This code is exercised by producing debugging information
9983 for a function with arguments like this:
9985 double func (double a, double b, int c, double d) {return d;}
9987 Without this code the stab for parameter 'd' will be set to
9988 an offset of 0 from the frame pointer, rather than 8. */
9990 /* The if() statement says:
9992 If the insn is a normal instruction
9993 and if the insn is setting the value in a register
9994 and if the register being set is the register holding the address of the argument
9995 and if the address is computing by an addition
9996 that involves adding to a register
9997 which is the frame pointer
9998 a constant integer
10000 then... */
10002 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10004 if ( GET_CODE (insn) == INSN
10005 && GET_CODE (PATTERN (insn)) == SET
10006 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10007 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10008 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
10009 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
10010 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10013 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10015 break;
10019 if (value == 0)
10021 debug_rtx (addr);
10022 warning ("unable to compute real location of stacked parameter");
10023 value = 8; /* XXX magic hack */
10026 return value;
10029 /* Recursively search through all of the blocks in a function
10030 checking to see if any of the variables created in that
10031 function match the RTX called 'orig'. If they do then
10032 replace them with the RTX called 'new'. */
10033 static void
10034 replace_symbols_in_block (tree block, rtx orig, rtx new)
10036 for (; block; block = BLOCK_CHAIN (block))
10038 tree sym;
10040 if (!TREE_USED (block))
10041 continue;
10043 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
10045 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
10046 || DECL_IGNORED_P (sym)
10047 || TREE_CODE (sym) != VAR_DECL
10048 || DECL_EXTERNAL (sym)
10049 || !rtx_equal_p (DECL_RTL (sym), orig)
10051 continue;
10053 SET_DECL_RTL (sym, new);
10056 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
10060 /* Return the number (counting from 0) of
10061 the least significant set bit in MASK. */
10063 inline static int
10064 number_of_first_bit_set (int mask)
10066 int bit;
10068 for (bit = 0;
10069 (mask & (1 << bit)) == 0;
10070 ++bit)
10071 continue;
10073 return bit;
10076 /* Generate code to return from a thumb function.
10077 If 'reg_containing_return_addr' is -1, then the return address is
10078 actually on the stack, at the stack pointer. */
10079 static void
10080 thumb_exit (FILE *f, int reg_containing_return_addr, rtx eh_ofs)
10082 unsigned regs_available_for_popping;
10083 unsigned regs_to_pop;
10084 int pops_needed;
10085 unsigned available;
10086 unsigned required;
10087 int mode;
10088 int size;
10089 int restore_a4 = FALSE;
10091 /* Compute the registers we need to pop. */
10092 regs_to_pop = 0;
10093 pops_needed = 0;
10095 /* There is an assumption here, that if eh_ofs is not NULL, the
10096 normal return address will have been pushed. */
10097 if (reg_containing_return_addr == -1 || eh_ofs)
10099 /* When we are generating a return for __builtin_eh_return,
10100 reg_containing_return_addr must specify the return regno. */
10101 if (eh_ofs && reg_containing_return_addr == -1)
10102 abort ();
10104 regs_to_pop |= 1 << LR_REGNUM;
10105 ++pops_needed;
10108 if (TARGET_BACKTRACE)
10110 /* Restore the (ARM) frame pointer and stack pointer. */
10111 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10112 pops_needed += 2;
10115 /* If there is nothing to pop then just emit the BX instruction and
10116 return. */
10117 if (pops_needed == 0)
10119 if (eh_ofs)
10120 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10122 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10123 return;
10125 /* Otherwise if we are not supporting interworking and we have not created
10126 a backtrace structure and the function was not entered in ARM mode then
10127 just pop the return address straight into the PC. */
10128 else if (!TARGET_INTERWORK
10129 && !TARGET_BACKTRACE
10130 && !is_called_in_ARM_mode (current_function_decl))
10132 if (eh_ofs)
10134 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10135 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10136 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10138 else
10139 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10141 return;
10144 /* Find out how many of the (return) argument registers we can corrupt. */
10145 regs_available_for_popping = 0;
10147 /* If returning via __builtin_eh_return, the bottom three registers
10148 all contain information needed for the return. */
10149 if (eh_ofs)
10150 size = 12;
10151 else
10153 #ifdef RTX_CODE
10154 /* If we can deduce the registers used from the function's
10155 return value. This is more reliable that examining
10156 regs_ever_live[] because that will be set if the register is
10157 ever used in the function, not just if the register is used
10158 to hold a return value. */
10160 if (current_function_return_rtx != 0)
10161 mode = GET_MODE (current_function_return_rtx);
10162 else
10163 #endif
10164 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10166 size = GET_MODE_SIZE (mode);
10168 if (size == 0)
10170 /* In a void function we can use any argument register.
10171 In a function that returns a structure on the stack
10172 we can use the second and third argument registers. */
10173 if (mode == VOIDmode)
10174 regs_available_for_popping =
10175 (1 << ARG_REGISTER (1))
10176 | (1 << ARG_REGISTER (2))
10177 | (1 << ARG_REGISTER (3));
10178 else
10179 regs_available_for_popping =
10180 (1 << ARG_REGISTER (2))
10181 | (1 << ARG_REGISTER (3));
10183 else if (size <= 4)
10184 regs_available_for_popping =
10185 (1 << ARG_REGISTER (2))
10186 | (1 << ARG_REGISTER (3));
10187 else if (size <= 8)
10188 regs_available_for_popping =
10189 (1 << ARG_REGISTER (3));
10192 /* Match registers to be popped with registers into which we pop them. */
10193 for (available = regs_available_for_popping,
10194 required = regs_to_pop;
10195 required != 0 && available != 0;
10196 available &= ~(available & - available),
10197 required &= ~(required & - required))
10198 -- pops_needed;
10200 /* If we have any popping registers left over, remove them. */
10201 if (available > 0)
10202 regs_available_for_popping &= ~available;
10204 /* Otherwise if we need another popping register we can use
10205 the fourth argument register. */
10206 else if (pops_needed)
10208 /* If we have not found any free argument registers and
10209 reg a4 contains the return address, we must move it. */
10210 if (regs_available_for_popping == 0
10211 && reg_containing_return_addr == LAST_ARG_REGNUM)
10213 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10214 reg_containing_return_addr = LR_REGNUM;
10216 else if (size > 12)
10218 /* Register a4 is being used to hold part of the return value,
10219 but we have dire need of a free, low register. */
10220 restore_a4 = TRUE;
10222 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10225 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10227 /* The fourth argument register is available. */
10228 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10230 --pops_needed;
10234 /* Pop as many registers as we can. */
10235 thumb_pushpop (f, regs_available_for_popping, FALSE);
10237 /* Process the registers we popped. */
10238 if (reg_containing_return_addr == -1)
10240 /* The return address was popped into the lowest numbered register. */
10241 regs_to_pop &= ~(1 << LR_REGNUM);
10243 reg_containing_return_addr =
10244 number_of_first_bit_set (regs_available_for_popping);
10246 /* Remove this register for the mask of available registers, so that
10247 the return address will not be corrupted by further pops. */
10248 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10251 /* If we popped other registers then handle them here. */
10252 if (regs_available_for_popping)
10254 int frame_pointer;
10256 /* Work out which register currently contains the frame pointer. */
10257 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10259 /* Move it into the correct place. */
10260 asm_fprintf (f, "\tmov\t%r, %r\n",
10261 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10263 /* (Temporarily) remove it from the mask of popped registers. */
10264 regs_available_for_popping &= ~(1 << frame_pointer);
10265 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10267 if (regs_available_for_popping)
10269 int stack_pointer;
10271 /* We popped the stack pointer as well,
10272 find the register that contains it. */
10273 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10275 /* Move it into the stack register. */
10276 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10278 /* At this point we have popped all necessary registers, so
10279 do not worry about restoring regs_available_for_popping
10280 to its correct value:
10282 assert (pops_needed == 0)
10283 assert (regs_available_for_popping == (1 << frame_pointer))
10284 assert (regs_to_pop == (1 << STACK_POINTER)) */
10286 else
10288 /* Since we have just move the popped value into the frame
10289 pointer, the popping register is available for reuse, and
10290 we know that we still have the stack pointer left to pop. */
10291 regs_available_for_popping |= (1 << frame_pointer);
10295 /* If we still have registers left on the stack, but we no longer have
10296 any registers into which we can pop them, then we must move the return
10297 address into the link register and make available the register that
10298 contained it. */
10299 if (regs_available_for_popping == 0 && pops_needed > 0)
10301 regs_available_for_popping |= 1 << reg_containing_return_addr;
10303 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10304 reg_containing_return_addr);
10306 reg_containing_return_addr = LR_REGNUM;
10309 /* If we have registers left on the stack then pop some more.
10310 We know that at most we will want to pop FP and SP. */
10311 if (pops_needed > 0)
10313 int popped_into;
10314 int move_to;
10316 thumb_pushpop (f, regs_available_for_popping, FALSE);
10318 /* We have popped either FP or SP.
10319 Move whichever one it is into the correct register. */
10320 popped_into = number_of_first_bit_set (regs_available_for_popping);
10321 move_to = number_of_first_bit_set (regs_to_pop);
10323 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10325 regs_to_pop &= ~(1 << move_to);
10327 --pops_needed;
10330 /* If we still have not popped everything then we must have only
10331 had one register available to us and we are now popping the SP. */
10332 if (pops_needed > 0)
10334 int popped_into;
10336 thumb_pushpop (f, regs_available_for_popping, FALSE);
10338 popped_into = number_of_first_bit_set (regs_available_for_popping);
10340 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10342 assert (regs_to_pop == (1 << STACK_POINTER))
10343 assert (pops_needed == 1)
10347 /* If necessary restore the a4 register. */
10348 if (restore_a4)
10350 if (reg_containing_return_addr != LR_REGNUM)
10352 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10353 reg_containing_return_addr = LR_REGNUM;
10356 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10359 if (eh_ofs)
10360 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10362 /* Return to caller. */
10363 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10366 /* Emit code to push or pop registers to or from the stack. */
10367 static void
10368 thumb_pushpop (FILE *f, int mask, int push)
10370 int regno;
10371 int lo_mask = mask & 0xFF;
10373 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10375 /* Special case. Do not generate a POP PC statement here, do it in
10376 thumb_exit() */
10377 thumb_exit (f, -1, NULL_RTX);
10378 return;
10381 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10383 /* Look at the low registers first. */
10384 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10386 if (lo_mask & 1)
10388 asm_fprintf (f, "%r", regno);
10390 if ((lo_mask & ~1) != 0)
10391 fprintf (f, ", ");
10395 if (push && (mask & (1 << LR_REGNUM)))
10397 /* Catch pushing the LR. */
10398 if (mask & 0xFF)
10399 fprintf (f, ", ");
10401 asm_fprintf (f, "%r", LR_REGNUM);
10403 else if (!push && (mask & (1 << PC_REGNUM)))
10405 /* Catch popping the PC. */
10406 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10408 /* The PC is never poped directly, instead
10409 it is popped into r3 and then BX is used. */
10410 fprintf (f, "}\n");
10412 thumb_exit (f, -1, NULL_RTX);
10414 return;
10416 else
10418 if (mask & 0xFF)
10419 fprintf (f, ", ");
10421 asm_fprintf (f, "%r", PC_REGNUM);
10425 fprintf (f, "}\n");
10428 void
10429 thumb_final_prescan_insn (rtx insn)
10431 if (flag_print_asm_name)
10432 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10433 INSN_ADDRESSES (INSN_UID (insn)));
10437 thumb_shiftable_const (unsigned HOST_WIDE_INT val)
10439 unsigned HOST_WIDE_INT mask = 0xff;
10440 int i;
10442 if (val == 0) /* XXX */
10443 return 0;
10445 for (i = 0; i < 25; i++)
10446 if ((val & (mask << i)) == val)
10447 return 1;
10449 return 0;
10452 /* Returns nonzero if the current function contains,
10453 or might contain a far jump. */
10455 thumb_far_jump_used_p (int in_prologue)
10457 rtx insn;
10459 /* This test is only important for leaf functions. */
10460 /* assert (!leaf_function_p ()); */
10462 /* If we have already decided that far jumps may be used,
10463 do not bother checking again, and always return true even if
10464 it turns out that they are not being used. Once we have made
10465 the decision that far jumps are present (and that hence the link
10466 register will be pushed onto the stack) we cannot go back on it. */
10467 if (cfun->machine->far_jump_used)
10468 return 1;
10470 /* If this function is not being called from the prologue/epilogue
10471 generation code then it must be being called from the
10472 INITIAL_ELIMINATION_OFFSET macro. */
10473 if (!in_prologue)
10475 /* In this case we know that we are being asked about the elimination
10476 of the arg pointer register. If that register is not being used,
10477 then there are no arguments on the stack, and we do not have to
10478 worry that a far jump might force the prologue to push the link
10479 register, changing the stack offsets. In this case we can just
10480 return false, since the presence of far jumps in the function will
10481 not affect stack offsets.
10483 If the arg pointer is live (or if it was live, but has now been
10484 eliminated and so set to dead) then we do have to test to see if
10485 the function might contain a far jump. This test can lead to some
10486 false negatives, since before reload is completed, then length of
10487 branch instructions is not known, so gcc defaults to returning their
10488 longest length, which in turn sets the far jump attribute to true.
10490 A false negative will not result in bad code being generated, but it
10491 will result in a needless push and pop of the link register. We
10492 hope that this does not occur too often. */
10493 if (regs_ever_live [ARG_POINTER_REGNUM])
10494 cfun->machine->arg_pointer_live = 1;
10495 else if (!cfun->machine->arg_pointer_live)
10496 return 0;
10499 /* Check to see if the function contains a branch
10500 insn with the far jump attribute set. */
10501 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10503 if (GET_CODE (insn) == JUMP_INSN
10504 /* Ignore tablejump patterns. */
10505 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10506 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10507 && get_attr_far_jump (insn) == FAR_JUMP_YES
10510 /* Record the fact that we have decied that
10511 the function does use far jumps. */
10512 cfun->machine->far_jump_used = 1;
10513 return 1;
10517 return 0;
10520 /* Return nonzero if FUNC must be entered in ARM mode. */
10522 is_called_in_ARM_mode (tree func)
10524 if (TREE_CODE (func) != FUNCTION_DECL)
10525 abort ();
10527 /* Ignore the problem about functions whoes address is taken. */
10528 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10529 return TRUE;
10531 #ifdef ARM_PE
10532 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10533 #else
10534 return FALSE;
10535 #endif
10538 /* The bits which aren't usefully expanded as rtl. */
10539 const char *
10540 thumb_unexpanded_epilogue (void)
10542 int regno;
10543 int live_regs_mask = 0;
10544 int high_regs_pushed = 0;
10545 int leaf_function = leaf_function_p ();
10546 int had_to_push_lr;
10547 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10549 if (return_used_this_function)
10550 return "";
10552 if (IS_NAKED (arm_current_func_type ()))
10553 return "";
10555 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10556 if (THUMB_REG_PUSHED_P (regno))
10557 live_regs_mask |= 1 << regno;
10559 for (regno = 8; regno < 13; regno++)
10560 if (THUMB_REG_PUSHED_P (regno))
10561 high_regs_pushed++;
10563 /* The prolog may have pushed some high registers to use as
10564 work registers. eg the testuite file:
10565 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10566 compiles to produce:
10567 push {r4, r5, r6, r7, lr}
10568 mov r7, r9
10569 mov r6, r8
10570 push {r6, r7}
10571 as part of the prolog. We have to undo that pushing here. */
10573 if (high_regs_pushed)
10575 int mask = live_regs_mask;
10576 int next_hi_reg;
10577 int size;
10578 int mode;
10580 #ifdef RTX_CODE
10581 /* If we can deduce the registers used from the function's return value.
10582 This is more reliable that examining regs_ever_live[] because that
10583 will be set if the register is ever used in the function, not just if
10584 the register is used to hold a return value. */
10586 if (current_function_return_rtx != 0)
10587 mode = GET_MODE (current_function_return_rtx);
10588 else
10589 #endif
10590 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10592 size = GET_MODE_SIZE (mode);
10594 /* Unless we are returning a type of size > 12 register r3 is
10595 available. */
10596 if (size < 13)
10597 mask |= 1 << 3;
10599 if (mask == 0)
10600 /* Oh dear! We have no low registers into which we can pop
10601 high registers! */
10602 internal_error
10603 ("no low registers available for popping high registers");
10605 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10606 if (THUMB_REG_PUSHED_P (next_hi_reg))
10607 break;
10609 while (high_regs_pushed)
10611 /* Find lo register(s) into which the high register(s) can
10612 be popped. */
10613 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10615 if (mask & (1 << regno))
10616 high_regs_pushed--;
10617 if (high_regs_pushed == 0)
10618 break;
10621 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10623 /* Pop the values into the low register(s). */
10624 thumb_pushpop (asm_out_file, mask, 0);
10626 /* Move the value(s) into the high registers. */
10627 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10629 if (mask & (1 << regno))
10631 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10632 regno);
10634 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10635 if (THUMB_REG_PUSHED_P (next_hi_reg))
10636 break;
10642 had_to_push_lr = (live_regs_mask || !leaf_function
10643 || thumb_far_jump_used_p (1));
10645 if (TARGET_BACKTRACE
10646 && ((live_regs_mask & 0xFF) == 0)
10647 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10649 /* The stack backtrace structure creation code had to
10650 push R7 in order to get a work register, so we pop
10651 it now. */
10652 live_regs_mask |= (1 << LAST_LO_REGNUM);
10655 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10657 if (had_to_push_lr
10658 && !is_called_in_ARM_mode (current_function_decl)
10659 && !eh_ofs)
10660 live_regs_mask |= 1 << PC_REGNUM;
10662 /* Either no argument registers were pushed or a backtrace
10663 structure was created which includes an adjusted stack
10664 pointer, so just pop everything. */
10665 if (live_regs_mask)
10666 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10668 if (eh_ofs)
10669 thumb_exit (asm_out_file, 2, eh_ofs);
10670 /* We have either just popped the return address into the
10671 PC or it is was kept in LR for the entire function or
10672 it is still on the stack because we do not want to
10673 return by doing a pop {pc}. */
10674 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10675 thumb_exit (asm_out_file,
10676 (had_to_push_lr
10677 && is_called_in_ARM_mode (current_function_decl)) ?
10678 -1 : LR_REGNUM, NULL_RTX);
10680 else
10682 /* Pop everything but the return address. */
10683 live_regs_mask &= ~(1 << PC_REGNUM);
10685 if (live_regs_mask)
10686 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10688 if (had_to_push_lr)
10689 /* Get the return address into a temporary register. */
10690 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10692 /* Remove the argument registers that were pushed onto the stack. */
10693 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10694 SP_REGNUM, SP_REGNUM,
10695 current_function_pretend_args_size);
10697 if (eh_ofs)
10698 thumb_exit (asm_out_file, 2, eh_ofs);
10699 else
10700 thumb_exit (asm_out_file,
10701 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10704 return "";
10707 /* Functions to save and restore machine-specific function data. */
10708 static struct machine_function *
10709 arm_init_machine_status (void)
10711 struct machine_function *machine;
10712 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10714 #if ARM_FT_UNKNOWN != 0
10715 machine->func_type = ARM_FT_UNKNOWN;
10716 #endif
10717 return machine;
10720 /* Return an RTX indicating where the return address to the
10721 calling function can be found. */
10723 arm_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
10725 if (count != 0)
10726 return NULL_RTX;
10728 if (TARGET_APCS_32)
10729 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10730 else
10732 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10733 GEN_INT (RETURN_ADDR_MASK26));
10734 return get_func_hard_reg_initial_val (cfun, lr);
10738 /* Do anything needed before RTL is emitted for each function. */
10739 void
10740 arm_init_expanders (void)
10742 /* Arrange to initialize and mark the machine per-function status. */
10743 init_machine_status = arm_init_machine_status;
10746 HOST_WIDE_INT
10747 thumb_get_frame_size (void)
10749 int regno;
10751 int base_size = ROUND_UP_WORD (get_frame_size ());
10752 int count_regs = 0;
10753 int entry_size = 0;
10754 int leaf;
10756 if (! TARGET_THUMB)
10757 abort ();
10759 if (! TARGET_ATPCS)
10760 return base_size;
10762 /* We need to know if we are a leaf function. Unfortunately, it
10763 is possible to be called after start_sequence has been called,
10764 which causes get_insns to return the insns for the sequence,
10765 not the function, which will cause leaf_function_p to return
10766 the incorrect result.
10768 To work around this, we cache the computed frame size. This
10769 works because we will only be calling RTL expanders that need
10770 to know about leaf functions once reload has completed, and the
10771 frame size cannot be changed after that time, so we can safely
10772 use the cached value. */
10774 if (reload_completed)
10775 return cfun->machine->frame_size;
10777 leaf = leaf_function_p ();
10779 /* A leaf function does not need any stack alignment if it has nothing
10780 on the stack. */
10781 if (leaf && base_size == 0)
10783 cfun->machine->frame_size = 0;
10784 return 0;
10787 /* We know that SP will be word aligned on entry, and we must
10788 preserve that condition at any subroutine call. But those are
10789 the only constraints. */
10791 /* Space for variadic functions. */
10792 if (current_function_pretend_args_size)
10793 entry_size += current_function_pretend_args_size;
10795 /* Space for pushed lo registers. */
10796 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10797 if (THUMB_REG_PUSHED_P (regno))
10798 count_regs++;
10800 /* Space for backtrace structure. */
10801 if (TARGET_BACKTRACE)
10803 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10804 entry_size += 20;
10805 else
10806 entry_size += 16;
10809 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10810 count_regs++; /* LR */
10812 entry_size += count_regs * 4;
10813 count_regs = 0;
10815 /* Space for pushed hi regs. */
10816 for (regno = 8; regno < 13; regno++)
10817 if (THUMB_REG_PUSHED_P (regno))
10818 count_regs++;
10820 entry_size += count_regs * 4;
10822 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10823 base_size += 4;
10824 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10825 abort ();
10827 cfun->machine->frame_size = base_size;
10829 return base_size;
10832 /* Generate the rest of a function's prologue. */
10833 void
10834 thumb_expand_prologue (void)
10836 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10837 + current_function_outgoing_args_size);
10838 unsigned long func_type;
10840 func_type = arm_current_func_type ();
10842 /* Naked functions don't have prologues. */
10843 if (IS_NAKED (func_type))
10844 return;
10846 if (IS_INTERRUPT (func_type))
10848 error ("interrupt Service Routines cannot be coded in Thumb mode");
10849 return;
10852 if (frame_pointer_needed)
10853 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10855 if (amount)
10857 amount = ROUND_UP_WORD (amount);
10859 if (amount < 512)
10860 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10861 GEN_INT (- amount)));
10862 else
10864 int regno;
10865 rtx reg;
10867 /* The stack decrement is too big for an immediate value in a single
10868 insn. In theory we could issue multiple subtracts, but after
10869 three of them it becomes more space efficient to place the full
10870 value in the constant pool and load into a register. (Also the
10871 ARM debugger really likes to see only one stack decrement per
10872 function). So instead we look for a scratch register into which
10873 we can load the decrement, and then we subtract this from the
10874 stack pointer. Unfortunately on the thumb the only available
10875 scratch registers are the argument registers, and we cannot use
10876 these as they may hold arguments to the function. Instead we
10877 attempt to locate a call preserved register which is used by this
10878 function. If we can find one, then we know that it will have
10879 been pushed at the start of the prologue and so we can corrupt
10880 it now. */
10881 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10882 if (THUMB_REG_PUSHED_P (regno)
10883 && !(frame_pointer_needed
10884 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10885 break;
10887 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10889 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10891 /* Choose an arbitrary, non-argument low register. */
10892 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10894 /* Save it by copying it into a high, scratch register. */
10895 emit_insn (gen_movsi (spare, reg));
10896 /* Add a USE to stop propagate_one_insn() from barfing. */
10897 emit_insn (gen_prologue_use (spare));
10899 /* Decrement the stack. */
10900 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10901 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10902 reg));
10904 /* Restore the low register's original value. */
10905 emit_insn (gen_movsi (reg, spare));
10907 /* Emit a USE of the restored scratch register, so that flow
10908 analysis will not consider the restore redundant. The
10909 register won't be used again in this function and isn't
10910 restored by the epilogue. */
10911 emit_insn (gen_prologue_use (reg));
10913 else
10915 reg = gen_rtx (REG, SImode, regno);
10917 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10918 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10919 reg));
10924 if (current_function_profile || TARGET_NO_SCHED_PRO)
10925 emit_insn (gen_blockage ());
10928 void
10929 thumb_expand_epilogue (void)
10931 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10932 + current_function_outgoing_args_size);
10934 /* Naked functions don't have prologues. */
10935 if (IS_NAKED (arm_current_func_type ()))
10936 return;
10938 if (frame_pointer_needed)
10939 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10940 else if (amount)
10942 amount = ROUND_UP_WORD (amount);
10944 if (amount < 512)
10945 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10946 GEN_INT (amount)));
10947 else
10949 /* r3 is always free in the epilogue. */
10950 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10952 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10953 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10957 /* Emit a USE (stack_pointer_rtx), so that
10958 the stack adjustment will not be deleted. */
10959 emit_insn (gen_prologue_use (stack_pointer_rtx));
10961 if (current_function_profile || TARGET_NO_SCHED_PRO)
10962 emit_insn (gen_blockage ());
10965 static void
10966 thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
10968 int live_regs_mask = 0;
10969 int high_regs_pushed = 0;
10970 int regno;
10972 if (IS_NAKED (arm_current_func_type ()))
10973 return;
10975 if (is_called_in_ARM_mode (current_function_decl))
10977 const char * name;
10979 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10980 abort ();
10981 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10982 abort ();
10983 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10985 /* Generate code sequence to switch us into Thumb mode. */
10986 /* The .code 32 directive has already been emitted by
10987 ASM_DECLARE_FUNCTION_NAME. */
10988 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10989 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10991 /* Generate a label, so that the debugger will notice the
10992 change in instruction sets. This label is also used by
10993 the assembler to bypass the ARM code when this function
10994 is called from a Thumb encoded function elsewhere in the
10995 same file. Hence the definition of STUB_NAME here must
10996 agree with the definition in gas/config/tc-arm.c */
10998 #define STUB_NAME ".real_start_of"
11000 fprintf (f, "\t.code\t16\n");
11001 #ifdef ARM_PE
11002 if (arm_dllexport_name_p (name))
11003 name = arm_strip_name_encoding (name);
11004 #endif
11005 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
11006 fprintf (f, "\t.thumb_func\n");
11007 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
11010 if (current_function_pretend_args_size)
11012 if (cfun->machine->uses_anonymous_args)
11014 int num_pushes;
11016 fprintf (f, "\tpush\t{");
11018 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
11020 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
11021 regno <= LAST_ARG_REGNUM;
11022 regno++)
11023 asm_fprintf (f, "%r%s", regno,
11024 regno == LAST_ARG_REGNUM ? "" : ", ");
11026 fprintf (f, "}\n");
11028 else
11029 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
11030 SP_REGNUM, SP_REGNUM,
11031 current_function_pretend_args_size);
11034 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11035 if (THUMB_REG_PUSHED_P (regno))
11036 live_regs_mask |= 1 << regno;
11038 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
11039 live_regs_mask |= 1 << LR_REGNUM;
11041 if (TARGET_BACKTRACE)
11043 int offset;
11044 int work_register = 0;
11045 int wr;
11047 /* We have been asked to create a stack backtrace structure.
11048 The code looks like this:
11050 0 .align 2
11051 0 func:
11052 0 sub SP, #16 Reserve space for 4 registers.
11053 2 push {R7} Get a work register.
11054 4 add R7, SP, #20 Get the stack pointer before the push.
11055 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
11056 8 mov R7, PC Get hold of the start of this code plus 12.
11057 10 str R7, [SP, #16] Store it.
11058 12 mov R7, FP Get hold of the current frame pointer.
11059 14 str R7, [SP, #4] Store it.
11060 16 mov R7, LR Get hold of the current return address.
11061 18 str R7, [SP, #12] Store it.
11062 20 add R7, SP, #16 Point at the start of the backtrace structure.
11063 22 mov FP, R7 Put this value into the frame pointer. */
11065 if ((live_regs_mask & 0xFF) == 0)
11067 /* See if the a4 register is free. */
11069 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11070 work_register = LAST_ARG_REGNUM;
11071 else /* We must push a register of our own */
11072 live_regs_mask |= (1 << LAST_LO_REGNUM);
11075 if (work_register == 0)
11077 /* Select a register from the list that will be pushed to
11078 use as our work register. */
11079 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11080 if ((1 << work_register) & live_regs_mask)
11081 break;
11084 asm_fprintf
11085 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11086 SP_REGNUM, SP_REGNUM);
11088 if (live_regs_mask)
11089 thumb_pushpop (f, live_regs_mask, 1);
11091 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11092 if (wr & live_regs_mask)
11093 offset += 4;
11095 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11096 offset + 16 + current_function_pretend_args_size);
11098 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11099 offset + 4);
11101 /* Make sure that the instruction fetching the PC is in the right place
11102 to calculate "start of backtrace creation code + 12". */
11103 if (live_regs_mask)
11105 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11106 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11107 offset + 12);
11108 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11109 ARM_HARD_FRAME_POINTER_REGNUM);
11110 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11111 offset);
11113 else
11115 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11116 ARM_HARD_FRAME_POINTER_REGNUM);
11117 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11118 offset);
11119 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11120 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11121 offset + 12);
11124 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11125 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11126 offset + 8);
11127 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11128 offset + 12);
11129 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11130 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11132 else if (live_regs_mask)
11133 thumb_pushpop (f, live_regs_mask, 1);
11135 for (regno = 8; regno < 13; regno++)
11136 if (THUMB_REG_PUSHED_P (regno))
11137 high_regs_pushed++;
11139 if (high_regs_pushed)
11141 int pushable_regs = 0;
11142 int mask = live_regs_mask & 0xff;
11143 int next_hi_reg;
11145 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11146 if (THUMB_REG_PUSHED_P (next_hi_reg))
11147 break;
11149 pushable_regs = mask;
11151 if (pushable_regs == 0)
11153 /* Desperation time -- this probably will never happen. */
11154 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11155 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11156 mask = 1 << LAST_ARG_REGNUM;
11159 while (high_regs_pushed > 0)
11161 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11163 if (mask & (1 << regno))
11165 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11167 high_regs_pushed--;
11169 if (high_regs_pushed)
11171 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11172 next_hi_reg--)
11173 if (THUMB_REG_PUSHED_P (next_hi_reg))
11174 break;
11176 else
11178 mask &= ~((1 << regno) - 1);
11179 break;
11184 thumb_pushpop (f, mask, 1);
11187 if (pushable_regs == 0
11188 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11189 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11193 /* Handle the case of a double word load into a low register from
11194 a computed memory address. The computed address may involve a
11195 register which is overwritten by the load. */
11196 const char *
11197 thumb_load_double_from_address (rtx *operands)
11199 rtx addr;
11200 rtx base;
11201 rtx offset;
11202 rtx arg1;
11203 rtx arg2;
11205 if (GET_CODE (operands[0]) != REG)
11206 abort ();
11208 if (GET_CODE (operands[1]) != MEM)
11209 abort ();
11211 /* Get the memory address. */
11212 addr = XEXP (operands[1], 0);
11214 /* Work out how the memory address is computed. */
11215 switch (GET_CODE (addr))
11217 case REG:
11218 operands[2] = gen_rtx (MEM, SImode,
11219 plus_constant (XEXP (operands[1], 0), 4));
11221 if (REGNO (operands[0]) == REGNO (addr))
11223 output_asm_insn ("ldr\t%H0, %2", operands);
11224 output_asm_insn ("ldr\t%0, %1", operands);
11226 else
11228 output_asm_insn ("ldr\t%0, %1", operands);
11229 output_asm_insn ("ldr\t%H0, %2", operands);
11231 break;
11233 case CONST:
11234 /* Compute <address> + 4 for the high order load. */
11235 operands[2] = gen_rtx (MEM, SImode,
11236 plus_constant (XEXP (operands[1], 0), 4));
11238 output_asm_insn ("ldr\t%0, %1", operands);
11239 output_asm_insn ("ldr\t%H0, %2", operands);
11240 break;
11242 case PLUS:
11243 arg1 = XEXP (addr, 0);
11244 arg2 = XEXP (addr, 1);
11246 if (CONSTANT_P (arg1))
11247 base = arg2, offset = arg1;
11248 else
11249 base = arg1, offset = arg2;
11251 if (GET_CODE (base) != REG)
11252 abort ();
11254 /* Catch the case of <address> = <reg> + <reg> */
11255 if (GET_CODE (offset) == REG)
11257 int reg_offset = REGNO (offset);
11258 int reg_base = REGNO (base);
11259 int reg_dest = REGNO (operands[0]);
11261 /* Add the base and offset registers together into the
11262 higher destination register. */
11263 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11264 reg_dest + 1, reg_base, reg_offset);
11266 /* Load the lower destination register from the address in
11267 the higher destination register. */
11268 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11269 reg_dest, reg_dest + 1);
11271 /* Load the higher destination register from its own address
11272 plus 4. */
11273 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11274 reg_dest + 1, reg_dest + 1);
11276 else
11278 /* Compute <address> + 4 for the high order load. */
11279 operands[2] = gen_rtx (MEM, SImode,
11280 plus_constant (XEXP (operands[1], 0), 4));
11282 /* If the computed address is held in the low order register
11283 then load the high order register first, otherwise always
11284 load the low order register first. */
11285 if (REGNO (operands[0]) == REGNO (base))
11287 output_asm_insn ("ldr\t%H0, %2", operands);
11288 output_asm_insn ("ldr\t%0, %1", operands);
11290 else
11292 output_asm_insn ("ldr\t%0, %1", operands);
11293 output_asm_insn ("ldr\t%H0, %2", operands);
11296 break;
11298 case LABEL_REF:
11299 /* With no registers to worry about we can just load the value
11300 directly. */
11301 operands[2] = gen_rtx (MEM, SImode,
11302 plus_constant (XEXP (operands[1], 0), 4));
11304 output_asm_insn ("ldr\t%H0, %2", operands);
11305 output_asm_insn ("ldr\t%0, %1", operands);
11306 break;
11308 default:
11309 abort ();
11310 break;
11313 return "";
11316 const char *
11317 thumb_output_move_mem_multiple (int n, rtx *operands)
11319 rtx tmp;
11321 switch (n)
11323 case 2:
11324 if (REGNO (operands[4]) > REGNO (operands[5]))
11326 tmp = operands[4];
11327 operands[4] = operands[5];
11328 operands[5] = tmp;
11330 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11331 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11332 break;
11334 case 3:
11335 if (REGNO (operands[4]) > REGNO (operands[5]))
11337 tmp = operands[4];
11338 operands[4] = operands[5];
11339 operands[5] = tmp;
11341 if (REGNO (operands[5]) > REGNO (operands[6]))
11343 tmp = operands[5];
11344 operands[5] = operands[6];
11345 operands[6] = tmp;
11347 if (REGNO (operands[4]) > REGNO (operands[5]))
11349 tmp = operands[4];
11350 operands[4] = operands[5];
11351 operands[5] = tmp;
11354 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11355 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11356 break;
11358 default:
11359 abort ();
11362 return "";
11365 /* Routines for generating rtl. */
11366 void
11367 thumb_expand_movstrqi (rtx *operands)
11369 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11370 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11371 HOST_WIDE_INT len = INTVAL (operands[2]);
11372 HOST_WIDE_INT offset = 0;
11374 while (len >= 12)
11376 emit_insn (gen_movmem12b (out, in, out, in));
11377 len -= 12;
11380 if (len >= 8)
11382 emit_insn (gen_movmem8b (out, in, out, in));
11383 len -= 8;
11386 if (len >= 4)
11388 rtx reg = gen_reg_rtx (SImode);
11389 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11390 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11391 len -= 4;
11392 offset += 4;
11395 if (len >= 2)
11397 rtx reg = gen_reg_rtx (HImode);
11398 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11399 plus_constant (in, offset))));
11400 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11401 reg));
11402 len -= 2;
11403 offset += 2;
11406 if (len)
11408 rtx reg = gen_reg_rtx (QImode);
11409 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11410 plus_constant (in, offset))));
11411 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11412 reg));
11417 thumb_cmp_operand (rtx op, enum machine_mode mode)
11419 return ((GET_CODE (op) == CONST_INT
11420 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11421 || register_operand (op, mode));
11424 static const char *
11425 thumb_condition_code (rtx x, int invert)
11427 static const char * const conds[] =
11429 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11430 "hi", "ls", "ge", "lt", "gt", "le"
11432 int val;
11434 switch (GET_CODE (x))
11436 case EQ: val = 0; break;
11437 case NE: val = 1; break;
11438 case GEU: val = 2; break;
11439 case LTU: val = 3; break;
11440 case GTU: val = 8; break;
11441 case LEU: val = 9; break;
11442 case GE: val = 10; break;
11443 case LT: val = 11; break;
11444 case GT: val = 12; break;
11445 case LE: val = 13; break;
11446 default:
11447 abort ();
11450 return conds[val ^ invert];
11453 /* Handle storing a half-word to memory during reload. */
11454 void
11455 thumb_reload_out_hi (rtx *operands)
11457 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11460 /* Handle reading a half-word from memory during reload. */
11461 void
11462 thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
11464 abort ();
11467 /* Return the length of a function name prefix
11468 that starts with the character 'c'. */
11469 static int
11470 arm_get_strip_length (int c)
11472 switch (c)
11474 ARM_NAME_ENCODING_LENGTHS
11475 default: return 0;
11479 /* Return a pointer to a function's name with any
11480 and all prefix encodings stripped from it. */
11481 const char *
11482 arm_strip_name_encoding (const char *name)
11484 int skip;
11486 while ((skip = arm_get_strip_length (* name)))
11487 name += skip;
11489 return name;
11492 /* If there is a '*' anywhere in the name's prefix, then
11493 emit the stripped name verbatim, otherwise prepend an
11494 underscore if leading underscores are being used. */
11495 void
11496 arm_asm_output_labelref (FILE *stream, const char *name)
11498 int skip;
11499 int verbatim = 0;
11501 while ((skip = arm_get_strip_length (* name)))
11503 verbatim |= (*name == '*');
11504 name += skip;
11507 if (verbatim)
11508 fputs (name, stream);
11509 else
11510 asm_fprintf (stream, "%U%s", name);
11513 rtx aof_pic_label;
11515 #ifdef AOF_ASSEMBLER
11516 /* Special functions only needed when producing AOF syntax assembler. */
11518 struct pic_chain
11520 struct pic_chain * next;
11521 const char * symname;
11524 static struct pic_chain * aof_pic_chain = NULL;
11527 aof_pic_entry (rtx x)
11529 struct pic_chain ** chainp;
11530 int offset;
11532 if (aof_pic_label == NULL_RTX)
11534 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11537 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11538 offset += 4, chainp = &(*chainp)->next)
11539 if ((*chainp)->symname == XSTR (x, 0))
11540 return plus_constant (aof_pic_label, offset);
11542 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11543 (*chainp)->next = NULL;
11544 (*chainp)->symname = XSTR (x, 0);
11545 return plus_constant (aof_pic_label, offset);
11548 void
11549 aof_dump_pic_table (FILE *f)
11551 struct pic_chain * chain;
11553 if (aof_pic_chain == NULL)
11554 return;
11556 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11557 PIC_OFFSET_TABLE_REGNUM,
11558 PIC_OFFSET_TABLE_REGNUM);
11559 fputs ("|x$adcons|\n", f);
11561 for (chain = aof_pic_chain; chain; chain = chain->next)
11563 fputs ("\tDCD\t", f);
11564 assemble_name (f, chain->symname);
11565 fputs ("\n", f);
11569 int arm_text_section_count = 1;
11571 char *
11572 aof_text_section (void )
11574 static char buf[100];
11575 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11576 arm_text_section_count++);
11577 if (flag_pic)
11578 strcat (buf, ", PIC, REENTRANT");
11579 return buf;
11582 static int arm_data_section_count = 1;
11584 char *
11585 aof_data_section (void)
11587 static char buf[100];
11588 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11589 return buf;
11592 /* The AOF assembler is religiously strict about declarations of
11593 imported and exported symbols, so that it is impossible to declare
11594 a function as imported near the beginning of the file, and then to
11595 export it later on. It is, however, possible to delay the decision
11596 until all the functions in the file have been compiled. To get
11597 around this, we maintain a list of the imports and exports, and
11598 delete from it any that are subsequently defined. At the end of
11599 compilation we spit the remainder of the list out before the END
11600 directive. */
11602 struct import
11604 struct import * next;
11605 const char * name;
11608 static struct import * imports_list = NULL;
11610 void
11611 aof_add_import (const char *name)
11613 struct import * new;
11615 for (new = imports_list; new; new = new->next)
11616 if (new->name == name)
11617 return;
11619 new = (struct import *) xmalloc (sizeof (struct import));
11620 new->next = imports_list;
11621 imports_list = new;
11622 new->name = name;
11625 void
11626 aof_delete_import (const char *name)
11628 struct import ** old;
11630 for (old = &imports_list; *old; old = & (*old)->next)
11632 if ((*old)->name == name)
11634 *old = (*old)->next;
11635 return;
11640 int arm_main_function = 0;
11642 static void
11643 aof_dump_imports (FILE *f)
11645 /* The AOF assembler needs this to cause the startup code to be extracted
11646 from the library. Brining in __main causes the whole thing to work
11647 automagically. */
11648 if (arm_main_function)
11650 text_section ();
11651 fputs ("\tIMPORT __main\n", f);
11652 fputs ("\tDCD __main\n", f);
11655 /* Now dump the remaining imports. */
11656 while (imports_list)
11658 fprintf (f, "\tIMPORT\t");
11659 assemble_name (f, imports_list->name);
11660 fputc ('\n', f);
11661 imports_list = imports_list->next;
11665 static void
11666 aof_globalize_label (FILE *stream, const char *name)
11668 default_globalize_label (stream, name);
11669 if (! strcmp (name, "main"))
11670 arm_main_function = 1;
11673 static void
11674 aof_file_end (void)
11676 if (flag_pic)
11677 aof_dump_pic_table (asm_out_file);
11678 aof_dump_imports (asm_out_file);
11679 fputs ("\tEND\n", asm_out_file);
11681 #endif /* AOF_ASSEMBLER */
11683 #ifdef OBJECT_FORMAT_ELF
11684 /* Switch to an arbitrary section NAME with attributes as specified
11685 by FLAGS. ALIGN specifies any known alignment requirements for
11686 the section; 0 if the default should be used.
11688 Differs from the default elf version only in the prefix character
11689 used before the section type. */
11691 static void
11692 arm_elf_asm_named_section (const char *name, unsigned int flags)
11694 char flagchars[10], *f = flagchars;
11696 if (! named_section_first_declaration (name))
11698 fprintf (asm_out_file, "\t.section\t%s\n", name);
11699 return;
11702 if (!(flags & SECTION_DEBUG))
11703 *f++ = 'a';
11704 if (flags & SECTION_WRITE)
11705 *f++ = 'w';
11706 if (flags & SECTION_CODE)
11707 *f++ = 'x';
11708 if (flags & SECTION_SMALL)
11709 *f++ = 's';
11710 if (flags & SECTION_MERGE)
11711 *f++ = 'M';
11712 if (flags & SECTION_STRINGS)
11713 *f++ = 'S';
11714 if (flags & SECTION_TLS)
11715 *f++ = 'T';
11716 *f = '\0';
11718 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11720 if (!(flags & SECTION_NOTYPE))
11722 const char *type;
11724 if (flags & SECTION_BSS)
11725 type = "nobits";
11726 else
11727 type = "progbits";
11729 fprintf (asm_out_file, ",%%%s", type);
11731 if (flags & SECTION_ENTSIZE)
11732 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11735 putc ('\n', asm_out_file);
11737 #endif
11739 #ifndef ARM_PE
11740 /* Symbols in the text segment can be accessed without indirecting via the
11741 constant pool; it may take an extra binary operation, but this is still
11742 faster than indirecting via memory. Don't do this when not optimizing,
11743 since we won't be calculating al of the offsets necessary to do this
11744 simplification. */
11746 static void
11747 arm_encode_section_info (tree decl, rtx rtl, int first)
11749 /* This doesn't work with AOF syntax, since the string table may be in
11750 a different AREA. */
11751 #ifndef AOF_ASSEMBLER
11752 if (optimize > 0 && TREE_CONSTANT (decl)
11753 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11754 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11755 #endif
11757 /* If we are referencing a function that is weak then encode a long call
11758 flag in the function name, otherwise if the function is static or
11759 or known to be defined in this file then encode a short call flag. */
11760 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11762 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11763 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11764 else if (! TREE_PUBLIC (decl))
11765 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11768 #endif /* !ARM_PE */
11770 static void
11771 arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
11773 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11774 && !strcmp (prefix, "L"))
11776 arm_ccfsm_state = 0;
11777 arm_target_insn = NULL;
11779 default_internal_label (stream, prefix, labelno);
11782 /* Output code to add DELTA to the first argument, and then jump
11783 to FUNCTION. Used for C++ multiple inheritance. */
11784 static void
11785 arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
11786 HOST_WIDE_INT delta,
11787 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
11788 tree function)
11790 int mi_delta = delta;
11791 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11792 int shift = 0;
11793 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11794 ? 1 : 0);
11795 if (mi_delta < 0)
11796 mi_delta = - mi_delta;
11797 while (mi_delta != 0)
11799 if ((mi_delta & (3 << shift)) == 0)
11800 shift += 2;
11801 else
11803 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11804 mi_op, this_regno, this_regno,
11805 mi_delta & (0xff << shift));
11806 mi_delta &= ~(0xff << shift);
11807 shift += 8;
11810 fputs ("\tb\t", file);
11811 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11812 if (NEED_PLT_RELOC)
11813 fputs ("(PLT)", file);
11814 fputc ('\n', file);