output_return_instruction: Simplyify test for avoiding type 2 LDMs.
[official-gcc.git] / gcc / config / arm / arm.c
blob216e5a270c878502cc0a16b10241f2a8a1011d42
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const char extra_reg_names1[][16] =
66 { "mv0", "mv1", "mv2", "mv3", "mv4", "mv5", "mv6", "mv7",
67 "mv8", "mv9", "mv10", "mv11", "mv12", "mv13", "mv14", "mv15"
69 #define extra_reg_names1 bogus1_regnames
71 const struct attribute_spec arm_attribute_table[];
73 /* Forward function declarations. */
74 static void arm_add_gc_roots PARAMS ((void));
75 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
76 static unsigned bit_count PARAMS ((Ulong));
77 static int arm_address_register_rtx_p PARAMS ((rtx, int));
78 static int arm_legitimate_index_p PARAMS ((Mmode, rtx, int));
79 static int thumb_base_register_rtx_p PARAMS ((rtx, Mmode, int));
80 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
81 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
82 static int eliminate_lr2ip PARAMS ((rtx *));
83 static rtx emit_multi_reg_push PARAMS ((int));
84 static rtx emit_sfm PARAMS ((int, int));
85 #ifndef AOF_ASSEMBLER
86 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
87 #endif
88 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
89 static arm_cc get_arm_condition_code PARAMS ((rtx));
90 static void init_fpa_table PARAMS ((void));
91 static Hint int_log2 PARAMS ((Hint));
92 static rtx is_jump_table PARAMS ((rtx));
93 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
94 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
95 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
96 static Ccstar shift_op PARAMS ((rtx, Hint *));
97 static struct machine_function * arm_init_machine_status PARAMS ((void));
98 static int number_of_first_bit_set PARAMS ((int));
99 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
100 static void thumb_exit PARAMS ((FILE *, int, rtx));
101 static void thumb_pushpop PARAMS ((FILE *, int, int));
102 static Ccstar thumb_condition_code PARAMS ((rtx, int));
103 static rtx is_jump_table PARAMS ((rtx));
104 static Hint get_jump_table_size PARAMS ((rtx));
105 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
106 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
107 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
108 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
109 static void assign_minipool_offsets PARAMS ((Mfix *));
110 static void arm_print_value PARAMS ((FILE *, rtx));
111 static void dump_minipool PARAMS ((rtx));
112 static int arm_barrier_cost PARAMS ((rtx));
113 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
114 static void push_minipool_barrier PARAMS ((rtx, Hint));
115 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
116 static bool note_invalid_constants PARAMS ((rtx, Hint, int));
117 static int current_file_function_operand PARAMS ((rtx));
118 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
119 static Ulong arm_compute_save_reg_mask PARAMS ((void));
120 static Ulong arm_isr_value PARAMS ((tree));
121 static Ulong arm_compute_func_type PARAMS ((void));
122 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
123 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
124 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
125 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
126 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
127 static int arm_comp_type_attributes PARAMS ((tree, tree));
128 static void arm_set_default_type_attributes PARAMS ((tree));
129 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
130 static int count_insns_for_constant PARAMS ((Hint, int));
131 static int arm_get_strip_length PARAMS ((int));
132 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
133 static void arm_internal_label PARAMS ((FILE *, Ccstar, Ulong));
134 static void arm_output_mi_thunk PARAMS ((FILE *, tree, Hint, Hint, tree));
135 static int arm_rtx_costs_1 PARAMS ((rtx, enum rtx_code, enum rtx_code));
136 static bool arm_rtx_costs PARAMS ((rtx, int, int, int *));
137 static int arm_address_cost PARAMS ((rtx));
138 static bool arm_memory_load_p PARAMS ((rtx));
139 static bool arm_cirrus_insn_p PARAMS ((rtx));
140 static void cirrus_reorg PARAMS ((rtx));
141 #ifdef OBJECT_FORMAT_ELF
142 static void arm_elf_asm_named_section PARAMS ((Ccstar, unsigned int));
143 #endif
144 #ifndef ARM_PE
145 static void arm_encode_section_info PARAMS ((tree, int));
146 #endif
147 #ifdef AOF_ASSEMBLER
148 static void aof_globalize_label PARAMS ((FILE *, Ccstar));
149 #endif
151 #undef Hint
152 #undef Mmode
153 #undef Ulong
154 #undef Ccstar
156 /* Initialize the GCC target structure. */
157 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
158 #undef TARGET_MERGE_DECL_ATTRIBUTES
159 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
160 #endif
162 #undef TARGET_ATTRIBUTE_TABLE
163 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
165 #ifdef AOF_ASSEMBLER
166 #undef TARGET_ASM_BYTE_OP
167 #define TARGET_ASM_BYTE_OP "\tDCB\t"
168 #undef TARGET_ASM_ALIGNED_HI_OP
169 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
170 #undef TARGET_ASM_ALIGNED_SI_OP
171 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
172 #undef TARGET_ASM_GLOBALIZE_LABEL
173 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
174 #else
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP NULL
177 #undef TARGET_ASM_INTEGER
178 #define TARGET_ASM_INTEGER arm_assemble_integer
179 #endif
181 #undef TARGET_ASM_FUNCTION_PROLOGUE
182 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
184 #undef TARGET_ASM_FUNCTION_EPILOGUE
185 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
187 #undef TARGET_COMP_TYPE_ATTRIBUTES
188 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
190 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
191 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
193 #undef TARGET_SCHED_ADJUST_COST
194 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
196 #undef TARGET_ENCODE_SECTION_INFO
197 #ifdef ARM_PE
198 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
199 #else
200 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
201 #endif
203 #undef TARGET_STRIP_NAME_ENCODING
204 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
206 #undef TARGET_ASM_INTERNAL_LABEL
207 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
209 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
210 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
212 #undef TARGET_ASM_OUTPUT_MI_THUNK
213 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
214 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
215 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
217 #undef TARGET_RTX_COSTS
218 #define TARGET_RTX_COSTS arm_rtx_costs
219 #undef TARGET_ADDRESS_COST
220 #define TARGET_ADDRESS_COST arm_address_cost
222 struct gcc_target targetm = TARGET_INITIALIZER;
224 /* Obstack for minipool constant handling. */
225 static struct obstack minipool_obstack;
226 static char * minipool_startobj;
228 /* The maximum number of insns skipped which
229 will be conditionalised if possible. */
230 static int max_insns_skipped = 5;
232 extern FILE * asm_out_file;
234 /* True if we are currently building a constant table. */
235 int making_const_table;
237 /* Define the information needed to generate branch insns. This is
238 stored from the compare operation. */
239 rtx arm_compare_op0, arm_compare_op1;
241 /* What type of floating point are we tuning for? */
242 enum fputype arm_fpu_tune;
244 /* What type of floating point instructions are available? */
245 enum fputype arm_fpu_arch;
247 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
248 enum prog_mode_type arm_prgmode;
250 /* Set by the -mfp=... option. */
251 const char * target_fp_name = NULL;
253 /* Used to parse -mstructure_size_boundary command line option. */
254 const char * structure_size_string = NULL;
255 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
257 /* Bit values used to identify processor capabilities. */
258 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
259 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
260 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
261 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
262 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
263 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
264 #define FL_THUMB (1 << 6) /* Thumb aware */
265 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
266 #define FL_STRONG (1 << 8) /* StrongARM */
267 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
268 #define FL_XSCALE (1 << 10) /* XScale */
269 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
271 /* The bits in this mask specify which
272 instructions we are allowed to generate. */
273 static unsigned long insn_flags = 0;
275 /* The bits in this mask specify which instruction scheduling options should
276 be used. Note - there is an overlap with the FL_FAST_MULT. For some
277 hardware we want to be able to generate the multiply instructions, but to
278 tune as if they were not present in the architecture. */
279 static unsigned long tune_flags = 0;
281 /* The following are used in the arm.md file as equivalents to bits
282 in the above two flag variables. */
284 /* Nonzero if this is an "M" variant of the processor. */
285 int arm_fast_multiply = 0;
287 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
288 int arm_arch4 = 0;
290 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
291 int arm_arch5 = 0;
293 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
294 int arm_arch5e = 0;
296 /* Nonzero if this chip can benefit from load scheduling. */
297 int arm_ld_sched = 0;
299 /* Nonzero if this chip is a StrongARM. */
300 int arm_is_strong = 0;
302 /* Nonzero if this chip is an XScale. */
303 int arm_is_xscale = 0;
305 /* Nonzero if this chip is an ARM6 or an ARM7. */
306 int arm_is_6_or_7 = 0;
308 /* Nonzero if this chip is a Cirrus/DSP. */
309 int arm_is_cirrus = 0;
311 /* Nonzero if generating Thumb instructions. */
312 int thumb_code = 0;
314 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
315 must report the mode of the memory reference from PRINT_OPERAND to
316 PRINT_OPERAND_ADDRESS. */
317 enum machine_mode output_memory_reference_mode;
319 /* The register number to be used for the PIC offset register. */
320 const char * arm_pic_register_string = NULL;
321 int arm_pic_register = INVALID_REGNUM;
323 /* Set to 1 when a return insn is output, this means that the epilogue
324 is not needed. */
325 int return_used_this_function;
327 /* Set to 1 after arm_reorg has started. Reset to start at the start of
328 the next function. */
329 static int after_arm_reorg = 0;
331 /* The maximum number of insns to be used when loading a constant. */
332 static int arm_constant_limit = 3;
334 /* For an explanation of these variables, see final_prescan_insn below. */
335 int arm_ccfsm_state;
336 enum arm_cond_code arm_current_cc;
337 rtx arm_target_insn;
338 int arm_target_label;
340 /* The condition codes of the ARM, and the inverse function. */
341 static const char * const arm_condition_codes[] =
343 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
344 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
347 #define streq(string1, string2) (strcmp (string1, string2) == 0)
349 /* Initialization code. */
351 struct processors
353 const char *const name;
354 const unsigned long flags;
357 /* Not all of these give usefully different compilation alternatives,
358 but there is no simple way of generalizing them. */
359 static const struct processors all_cores[] =
361 /* ARM Cores */
363 {"arm2", FL_CO_PROC | FL_MODE26 },
364 {"arm250", FL_CO_PROC | FL_MODE26 },
365 {"arm3", FL_CO_PROC | FL_MODE26 },
366 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
367 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
368 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
369 {"arm610", FL_MODE26 | FL_MODE32 },
370 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
371 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
372 /* arm7m doesn't exist on its own, but only with D, (and I), but
373 those don't alter the code, so arm7m is sometimes used. */
374 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
375 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
376 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
377 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
378 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
379 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
380 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
381 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
382 {"arm710", FL_MODE26 | FL_MODE32 },
383 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
384 {"arm720", FL_MODE26 | FL_MODE32 },
385 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
386 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
387 {"arm710c", FL_MODE26 | FL_MODE32 },
388 {"arm7100", FL_MODE26 | FL_MODE32 },
389 {"arm7500", FL_MODE26 | FL_MODE32 },
390 /* Doesn't have an external co-proc, but does have embedded fpa. */
391 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
392 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
393 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
394 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
395 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
396 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
397 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
398 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
399 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
400 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
401 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
402 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
403 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
404 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
405 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
406 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
407 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
408 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
410 {NULL, 0}
413 static const struct processors all_architectures[] =
415 /* ARM Architectures */
417 { "armv2", FL_CO_PROC | FL_MODE26 },
418 { "armv2a", FL_CO_PROC | FL_MODE26 },
419 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
420 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
421 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
422 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
423 implementations that support it, so we will leave it out for now. */
424 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
425 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
426 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
427 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
428 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
429 { NULL, 0 }
432 /* This is a magic stucture. The 'string' field is magically filled in
433 with a pointer to the value specified by the user on the command line
434 assuming that the user has specified such a value. */
436 struct arm_cpu_select arm_select[] =
438 /* string name processors */
439 { NULL, "-mcpu=", all_cores },
440 { NULL, "-march=", all_architectures },
441 { NULL, "-mtune=", all_cores }
444 /* Return the number of bits set in VALUE. */
445 static unsigned
446 bit_count (value)
447 unsigned long value;
449 unsigned long count = 0;
451 while (value)
453 count++;
454 value &= value - 1; /* Clear the least-significant set bit. */
457 return count;
460 /* Fix up any incompatible options that the user has specified.
461 This has now turned into a maze. */
462 void
463 arm_override_options ()
465 unsigned i;
467 /* Set up the flags based on the cpu/architecture selected by the user. */
468 for (i = ARRAY_SIZE (arm_select); i--;)
470 struct arm_cpu_select * ptr = arm_select + i;
472 if (ptr->string != NULL && ptr->string[0] != '\0')
474 const struct processors * sel;
476 for (sel = ptr->processors; sel->name != NULL; sel++)
477 if (streq (ptr->string, sel->name))
479 if (i == 2)
480 tune_flags = sel->flags;
481 else
483 /* If we have been given an architecture and a processor
484 make sure that they are compatible. We only generate
485 a warning though, and we prefer the CPU over the
486 architecture. */
487 if (insn_flags != 0 && (insn_flags ^ sel->flags))
488 warning ("switch -mcpu=%s conflicts with -march= switch",
489 ptr->string);
491 insn_flags = sel->flags;
494 break;
497 if (sel->name == NULL)
498 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
502 /* If the user did not specify a processor, choose one for them. */
503 if (insn_flags == 0)
505 const struct processors * sel;
506 unsigned int sought;
507 static const struct cpu_default
509 const int cpu;
510 const char *const name;
512 cpu_defaults[] =
514 { TARGET_CPU_arm2, "arm2" },
515 { TARGET_CPU_arm6, "arm6" },
516 { TARGET_CPU_arm610, "arm610" },
517 { TARGET_CPU_arm710, "arm710" },
518 { TARGET_CPU_arm7m, "arm7m" },
519 { TARGET_CPU_arm7500fe, "arm7500fe" },
520 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
521 { TARGET_CPU_arm8, "arm8" },
522 { TARGET_CPU_arm810, "arm810" },
523 { TARGET_CPU_arm9, "arm9" },
524 { TARGET_CPU_strongarm, "strongarm" },
525 { TARGET_CPU_xscale, "xscale" },
526 { TARGET_CPU_ep9312, "ep9312" },
527 { TARGET_CPU_generic, "arm" },
528 { 0, 0 }
530 const struct cpu_default * def;
532 /* Find the default. */
533 for (def = cpu_defaults; def->name; def++)
534 if (def->cpu == TARGET_CPU_DEFAULT)
535 break;
537 /* Make sure we found the default CPU. */
538 if (def->name == NULL)
539 abort ();
541 /* Find the default CPU's flags. */
542 for (sel = all_cores; sel->name != NULL; sel++)
543 if (streq (def->name, sel->name))
544 break;
546 if (sel->name == NULL)
547 abort ();
549 insn_flags = sel->flags;
551 /* Now check to see if the user has specified some command line
552 switch that require certain abilities from the cpu. */
553 sought = 0;
555 if (TARGET_INTERWORK || TARGET_THUMB)
557 sought |= (FL_THUMB | FL_MODE32);
559 /* Force apcs-32 to be used for interworking. */
560 target_flags |= ARM_FLAG_APCS_32;
562 /* There are no ARM processors that support both APCS-26 and
563 interworking. Therefore we force FL_MODE26 to be removed
564 from insn_flags here (if it was set), so that the search
565 below will always be able to find a compatible processor. */
566 insn_flags &= ~FL_MODE26;
568 else if (!TARGET_APCS_32)
569 sought |= FL_MODE26;
571 if (sought != 0 && ((sought & insn_flags) != sought))
573 /* Try to locate a CPU type that supports all of the abilities
574 of the default CPU, plus the extra abilities requested by
575 the user. */
576 for (sel = all_cores; sel->name != NULL; sel++)
577 if ((sel->flags & sought) == (sought | insn_flags))
578 break;
580 if (sel->name == NULL)
582 unsigned current_bit_count = 0;
583 const struct processors * best_fit = NULL;
585 /* Ideally we would like to issue an error message here
586 saying that it was not possible to find a CPU compatible
587 with the default CPU, but which also supports the command
588 line options specified by the programmer, and so they
589 ought to use the -mcpu=<name> command line option to
590 override the default CPU type.
592 Unfortunately this does not work with multilibing. We
593 need to be able to support multilibs for -mapcs-26 and for
594 -mthumb-interwork and there is no CPU that can support both
595 options. Instead if we cannot find a cpu that has both the
596 characteristics of the default cpu and the given command line
597 options we scan the array again looking for a best match. */
598 for (sel = all_cores; sel->name != NULL; sel++)
599 if ((sel->flags & sought) == sought)
601 unsigned count;
603 count = bit_count (sel->flags & insn_flags);
605 if (count >= current_bit_count)
607 best_fit = sel;
608 current_bit_count = count;
612 if (best_fit == NULL)
613 abort ();
614 else
615 sel = best_fit;
618 insn_flags = sel->flags;
622 /* If tuning has not been specified, tune for whichever processor or
623 architecture has been selected. */
624 if (tune_flags == 0)
625 tune_flags = insn_flags;
627 /* Make sure that the processor choice does not conflict with any of the
628 other command line choices. */
629 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
631 /* If APCS-32 was not the default then it must have been set by the
632 user, so issue a warning message. If the user has specified
633 "-mapcs-32 -mcpu=arm2" then we loose here. */
634 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
635 warning ("target CPU does not support APCS-32" );
636 target_flags &= ~ARM_FLAG_APCS_32;
638 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
640 warning ("target CPU does not support APCS-26" );
641 target_flags |= ARM_FLAG_APCS_32;
644 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
646 warning ("target CPU does not support interworking" );
647 target_flags &= ~ARM_FLAG_INTERWORK;
650 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
652 warning ("target CPU does not support THUMB instructions");
653 target_flags &= ~ARM_FLAG_THUMB;
656 if (TARGET_APCS_FRAME && TARGET_THUMB)
658 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
659 target_flags &= ~ARM_FLAG_APCS_FRAME;
662 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
663 from here where no function is being compiled currently. */
664 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
665 && TARGET_ARM)
666 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
668 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
669 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
671 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
672 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
674 /* If interworking is enabled then APCS-32 must be selected as well. */
675 if (TARGET_INTERWORK)
677 if (!TARGET_APCS_32)
678 warning ("interworking forces APCS-32 to be used" );
679 target_flags |= ARM_FLAG_APCS_32;
682 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
684 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
685 target_flags |= ARM_FLAG_APCS_FRAME;
688 if (TARGET_POKE_FUNCTION_NAME)
689 target_flags |= ARM_FLAG_APCS_FRAME;
691 if (TARGET_APCS_REENT && flag_pic)
692 error ("-fpic and -mapcs-reent are incompatible");
694 if (TARGET_APCS_REENT)
695 warning ("APCS reentrant code not supported. Ignored");
697 /* If this target is normally configured to use APCS frames, warn if they
698 are turned off and debugging is turned on. */
699 if (TARGET_ARM
700 && write_symbols != NO_DEBUG
701 && !TARGET_APCS_FRAME
702 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
703 warning ("-g with -mno-apcs-frame may not give sensible debugging");
705 /* If stack checking is disabled, we can use r10 as the PIC register,
706 which keeps r9 available. */
707 if (flag_pic)
708 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
710 if (TARGET_APCS_FLOAT)
711 warning ("passing floating point arguments in fp regs not yet supported");
713 /* Initialize boolean versions of the flags, for use in the arm.md file. */
714 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
715 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
716 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
717 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
718 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
720 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
721 arm_is_strong = (tune_flags & FL_STRONG) != 0;
722 thumb_code = (TARGET_ARM == 0);
723 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
724 && !(tune_flags & FL_ARCH4))) != 0;
725 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
727 if (arm_is_cirrus)
729 arm_fpu_tune = FPUTYPE_MAVERICK;
731 /* Ignore -mhard-float if -mcpu=ep9312. */
732 if (TARGET_HARD_FLOAT)
733 target_flags ^= ARM_FLAG_SOFT_FLOAT;
735 else
736 /* Default value for floating point code... if no co-processor
737 bus, then schedule for emulated floating point. Otherwise,
738 assume the user has an FPA.
739 Note: this does not prevent use of floating point instructions,
740 -msoft-float does that. */
741 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
743 if (target_fp_name)
745 if (streq (target_fp_name, "2"))
746 arm_fpu_arch = FPUTYPE_FPA_EMU2;
747 else if (streq (target_fp_name, "3"))
748 arm_fpu_arch = FPUTYPE_FPA_EMU3;
749 else
750 error ("invalid floating point emulation option: -mfpe-%s",
751 target_fp_name);
753 else
754 arm_fpu_arch = FPUTYPE_DEFAULT;
756 if (TARGET_FPE)
758 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
759 arm_fpu_tune = FPUTYPE_FPA_EMU2;
760 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
761 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
762 else if (arm_fpu_tune != FPUTYPE_FPA)
763 arm_fpu_tune = FPUTYPE_FPA_EMU2;
766 /* For arm2/3 there is no need to do any scheduling if there is only
767 a floating point emulator, or we are doing software floating-point. */
768 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
769 && (tune_flags & FL_MODE32) == 0)
770 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
772 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
774 if (structure_size_string != NULL)
776 int size = strtol (structure_size_string, NULL, 0);
778 if (size == 8 || size == 32)
779 arm_structure_size_boundary = size;
780 else
781 warning ("structure size boundary can only be set to 8 or 32");
784 if (arm_pic_register_string != NULL)
786 int pic_register = decode_reg_name (arm_pic_register_string);
788 if (!flag_pic)
789 warning ("-mpic-register= is useless without -fpic");
791 /* Prevent the user from choosing an obviously stupid PIC register. */
792 else if (pic_register < 0 || call_used_regs[pic_register]
793 || pic_register == HARD_FRAME_POINTER_REGNUM
794 || pic_register == STACK_POINTER_REGNUM
795 || pic_register >= PC_REGNUM)
796 error ("unable to use '%s' for PIC register", arm_pic_register_string);
797 else
798 arm_pic_register = pic_register;
801 if (TARGET_THUMB && flag_schedule_insns)
803 /* Don't warn since it's on by default in -O2. */
804 flag_schedule_insns = 0;
807 /* If optimizing for space, don't synthesize constants.
808 For processors with load scheduling, it never costs more than 2 cycles
809 to load a constant, and the load scheduler may well reduce that to 1. */
810 if (optimize_size || (tune_flags & FL_LDSCHED))
811 arm_constant_limit = 1;
813 if (arm_is_xscale)
814 arm_constant_limit = 2;
816 /* If optimizing for size, bump the number of instructions that we
817 are prepared to conditionally execute (even on a StrongARM).
818 Otherwise for the StrongARM, which has early execution of branches,
819 a sequence that is worth skipping is shorter. */
820 if (optimize_size)
821 max_insns_skipped = 6;
822 else if (arm_is_strong)
823 max_insns_skipped = 3;
825 /* Register global variables with the garbage collector. */
826 arm_add_gc_roots ();
829 static void
830 arm_add_gc_roots ()
832 gcc_obstack_init(&minipool_obstack);
833 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
836 /* A table of known ARM exception types.
837 For use with the interrupt function attribute. */
839 typedef struct
841 const char *const arg;
842 const unsigned long return_value;
844 isr_attribute_arg;
846 static const isr_attribute_arg isr_attribute_args [] =
848 { "IRQ", ARM_FT_ISR },
849 { "irq", ARM_FT_ISR },
850 { "FIQ", ARM_FT_FIQ },
851 { "fiq", ARM_FT_FIQ },
852 { "ABORT", ARM_FT_ISR },
853 { "abort", ARM_FT_ISR },
854 { "ABORT", ARM_FT_ISR },
855 { "abort", ARM_FT_ISR },
856 { "UNDEF", ARM_FT_EXCEPTION },
857 { "undef", ARM_FT_EXCEPTION },
858 { "SWI", ARM_FT_EXCEPTION },
859 { "swi", ARM_FT_EXCEPTION },
860 { NULL, ARM_FT_NORMAL }
863 /* Returns the (interrupt) function type of the current
864 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
866 static unsigned long
867 arm_isr_value (argument)
868 tree argument;
870 const isr_attribute_arg * ptr;
871 const char * arg;
873 /* No argument - default to IRQ. */
874 if (argument == NULL_TREE)
875 return ARM_FT_ISR;
877 /* Get the value of the argument. */
878 if (TREE_VALUE (argument) == NULL_TREE
879 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
880 return ARM_FT_UNKNOWN;
882 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
884 /* Check it against the list of known arguments. */
885 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
886 if (streq (arg, ptr->arg))
887 return ptr->return_value;
889 /* An unrecognized interrupt type. */
890 return ARM_FT_UNKNOWN;
893 /* Computes the type of the current function. */
895 static unsigned long
896 arm_compute_func_type ()
898 unsigned long type = ARM_FT_UNKNOWN;
899 tree a;
900 tree attr;
902 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
903 abort ();
905 /* Decide if the current function is volatile. Such functions
906 never return, and many memory cycles can be saved by not storing
907 register values that will never be needed again. This optimization
908 was added to speed up context switching in a kernel application. */
909 if (optimize > 0
910 && current_function_nothrow
911 && TREE_THIS_VOLATILE (current_function_decl))
912 type |= ARM_FT_VOLATILE;
914 if (current_function_needs_context)
915 type |= ARM_FT_NESTED;
917 attr = DECL_ATTRIBUTES (current_function_decl);
919 a = lookup_attribute ("naked", attr);
920 if (a != NULL_TREE)
921 type |= ARM_FT_NAKED;
923 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
924 type |= ARM_FT_EXCEPTION_HANDLER;
925 else
927 a = lookup_attribute ("isr", attr);
928 if (a == NULL_TREE)
929 a = lookup_attribute ("interrupt", attr);
931 if (a == NULL_TREE)
932 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
933 else
934 type |= arm_isr_value (TREE_VALUE (a));
937 return type;
940 /* Returns the type of the current function. */
942 unsigned long
943 arm_current_func_type ()
945 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
946 cfun->machine->func_type = arm_compute_func_type ();
948 return cfun->machine->func_type;
951 /* Return 1 if it is possible to return using a single instruction. */
954 use_return_insn (iscond)
955 int iscond;
957 int regno;
958 unsigned int func_type;
959 unsigned long saved_int_regs;
961 /* Never use a return instruction before reload has run. */
962 if (!reload_completed)
963 return 0;
965 func_type = arm_current_func_type ();
967 /* Naked functions and volatile functions need special
968 consideration. */
969 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
970 return 0;
972 /* So do interrupt functions that use the frame pointer. */
973 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
974 return 0;
976 /* As do variadic functions. */
977 if (current_function_pretend_args_size
978 || cfun->machine->uses_anonymous_args
979 /* Of if the function calls __builtin_eh_return () */
980 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
981 /* Or if there is no frame pointer and there is a stack adjustment. */
982 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
983 && !frame_pointer_needed))
984 return 0;
986 saved_int_regs = arm_compute_save_reg_mask ();
988 /* Can't be done if interworking with Thumb, and any registers have been
989 stacked. */
990 if (TARGET_INTERWORK && saved_int_regs != 0)
991 return 0;
993 /* On StrongARM, conditional returns are expensive if they aren't
994 taken and multiple registers have been stacked. */
995 if (iscond && arm_is_strong)
997 /* Conditional return when just the LR is stored is a simple
998 conditional-load instruction, that's not expensive. */
999 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1000 return 0;
1002 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1003 return 0;
1006 /* If there are saved registers but the LR isn't saved, then we need
1007 two instructions for the return. */
1008 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1009 return 0;
1011 /* Can't be done if any of the FPA regs are pushed,
1012 since this also requires an insn. */
1013 if (TARGET_HARD_FLOAT)
1014 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1015 if (regs_ever_live[regno] && !call_used_regs[regno])
1016 return 0;
1018 return 1;
1021 /* Return TRUE if int I is a valid immediate ARM constant. */
1024 const_ok_for_arm (i)
1025 HOST_WIDE_INT i;
1027 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1029 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1030 be all zero, or all one. */
1031 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1032 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1033 != ((~(unsigned HOST_WIDE_INT) 0)
1034 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1035 return FALSE;
1037 /* Fast return for 0 and powers of 2 */
1038 if ((i & (i - 1)) == 0)
1039 return TRUE;
1043 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1044 return TRUE;
1045 mask =
1046 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1047 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1049 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1051 return FALSE;
1054 /* Return true if I is a valid constant for the operation CODE. */
1055 static int
1056 const_ok_for_op (i, code)
1057 HOST_WIDE_INT i;
1058 enum rtx_code code;
1060 if (const_ok_for_arm (i))
1061 return 1;
1063 switch (code)
1065 case PLUS:
1066 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1068 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1069 case XOR:
1070 case IOR:
1071 return 0;
1073 case AND:
1074 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1076 default:
1077 abort ();
1081 /* Emit a sequence of insns to handle a large constant.
1082 CODE is the code of the operation required, it can be any of SET, PLUS,
1083 IOR, AND, XOR, MINUS;
1084 MODE is the mode in which the operation is being performed;
1085 VAL is the integer to operate on;
1086 SOURCE is the other operand (a register, or a null-pointer for SET);
1087 SUBTARGETS means it is safe to create scratch registers if that will
1088 either produce a simpler sequence, or we will want to cse the values.
1089 Return value is the number of insns emitted. */
1092 arm_split_constant (code, mode, val, target, source, subtargets)
1093 enum rtx_code code;
1094 enum machine_mode mode;
1095 HOST_WIDE_INT val;
1096 rtx target;
1097 rtx source;
1098 int subtargets;
1100 if (subtargets || code == SET
1101 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1102 && REGNO (target) != REGNO (source)))
1104 /* After arm_reorg has been called, we can't fix up expensive
1105 constants by pushing them into memory so we must synthesize
1106 them in-line, regardless of the cost. This is only likely to
1107 be more costly on chips that have load delay slots and we are
1108 compiling without running the scheduler (so no splitting
1109 occurred before the final instruction emission).
1111 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1113 if (!after_arm_reorg
1114 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1115 > arm_constant_limit + (code != SET)))
1117 if (code == SET)
1119 /* Currently SET is the only monadic value for CODE, all
1120 the rest are diadic. */
1121 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1122 return 1;
1124 else
1126 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1128 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1129 /* For MINUS, the value is subtracted from, since we never
1130 have subtraction of a constant. */
1131 if (code == MINUS)
1132 emit_insn (gen_rtx_SET (VOIDmode, target,
1133 gen_rtx_MINUS (mode, temp, source)));
1134 else
1135 emit_insn (gen_rtx_SET (VOIDmode, target,
1136 gen_rtx (code, mode, source, temp)));
1137 return 2;
1142 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1145 static int
1146 count_insns_for_constant (remainder, i)
1147 HOST_WIDE_INT remainder;
1148 int i;
1150 HOST_WIDE_INT temp1;
1151 int num_insns = 0;
1154 int end;
1156 if (i <= 0)
1157 i += 32;
1158 if (remainder & (3 << (i - 2)))
1160 end = i - 8;
1161 if (end < 0)
1162 end += 32;
1163 temp1 = remainder & ((0x0ff << end)
1164 | ((i < end) ? (0xff >> (32 - end)) : 0));
1165 remainder &= ~temp1;
1166 num_insns++;
1167 i -= 6;
1169 i -= 2;
1170 } while (remainder);
1171 return num_insns;
1174 /* As above, but extra parameter GENERATE which, if clear, suppresses
1175 RTL generation. */
1177 static int
1178 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1179 enum rtx_code code;
1180 enum machine_mode mode;
1181 HOST_WIDE_INT val;
1182 rtx target;
1183 rtx source;
1184 int subtargets;
1185 int generate;
1187 int can_invert = 0;
1188 int can_negate = 0;
1189 int can_negate_initial = 0;
1190 int can_shift = 0;
1191 int i;
1192 int num_bits_set = 0;
1193 int set_sign_bit_copies = 0;
1194 int clear_sign_bit_copies = 0;
1195 int clear_zero_bit_copies = 0;
1196 int set_zero_bit_copies = 0;
1197 int insns = 0;
1198 unsigned HOST_WIDE_INT temp1, temp2;
1199 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1201 /* Find out which operations are safe for a given CODE. Also do a quick
1202 check for degenerate cases; these can occur when DImode operations
1203 are split. */
1204 switch (code)
1206 case SET:
1207 can_invert = 1;
1208 can_shift = 1;
1209 can_negate = 1;
1210 break;
1212 case PLUS:
1213 can_negate = 1;
1214 can_negate_initial = 1;
1215 break;
1217 case IOR:
1218 if (remainder == 0xffffffff)
1220 if (generate)
1221 emit_insn (gen_rtx_SET (VOIDmode, target,
1222 GEN_INT (ARM_SIGN_EXTEND (val))));
1223 return 1;
1225 if (remainder == 0)
1227 if (reload_completed && rtx_equal_p (target, source))
1228 return 0;
1229 if (generate)
1230 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1231 return 1;
1233 break;
1235 case AND:
1236 if (remainder == 0)
1238 if (generate)
1239 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1240 return 1;
1242 if (remainder == 0xffffffff)
1244 if (reload_completed && rtx_equal_p (target, source))
1245 return 0;
1246 if (generate)
1247 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1248 return 1;
1250 can_invert = 1;
1251 break;
1253 case XOR:
1254 if (remainder == 0)
1256 if (reload_completed && rtx_equal_p (target, source))
1257 return 0;
1258 if (generate)
1259 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1260 return 1;
1262 if (remainder == 0xffffffff)
1264 if (generate)
1265 emit_insn (gen_rtx_SET (VOIDmode, target,
1266 gen_rtx_NOT (mode, source)));
1267 return 1;
1270 /* We don't know how to handle this yet below. */
1271 abort ();
1273 case MINUS:
1274 /* We treat MINUS as (val - source), since (source - val) is always
1275 passed as (source + (-val)). */
1276 if (remainder == 0)
1278 if (generate)
1279 emit_insn (gen_rtx_SET (VOIDmode, target,
1280 gen_rtx_NEG (mode, source)));
1281 return 1;
1283 if (const_ok_for_arm (val))
1285 if (generate)
1286 emit_insn (gen_rtx_SET (VOIDmode, target,
1287 gen_rtx_MINUS (mode, GEN_INT (val),
1288 source)));
1289 return 1;
1291 can_negate = 1;
1293 break;
1295 default:
1296 abort ();
1299 /* If we can do it in one insn get out quickly. */
1300 if (const_ok_for_arm (val)
1301 || (can_negate_initial && const_ok_for_arm (-val))
1302 || (can_invert && const_ok_for_arm (~val)))
1304 if (generate)
1305 emit_insn (gen_rtx_SET (VOIDmode, target,
1306 (source ? gen_rtx (code, mode, source,
1307 GEN_INT (val))
1308 : GEN_INT (val))));
1309 return 1;
1312 /* Calculate a few attributes that may be useful for specific
1313 optimizations. */
1314 for (i = 31; i >= 0; i--)
1316 if ((remainder & (1 << i)) == 0)
1317 clear_sign_bit_copies++;
1318 else
1319 break;
1322 for (i = 31; i >= 0; i--)
1324 if ((remainder & (1 << i)) != 0)
1325 set_sign_bit_copies++;
1326 else
1327 break;
1330 for (i = 0; i <= 31; i++)
1332 if ((remainder & (1 << i)) == 0)
1333 clear_zero_bit_copies++;
1334 else
1335 break;
1338 for (i = 0; i <= 31; i++)
1340 if ((remainder & (1 << i)) != 0)
1341 set_zero_bit_copies++;
1342 else
1343 break;
1346 switch (code)
1348 case SET:
1349 /* See if we can do this by sign_extending a constant that is known
1350 to be negative. This is a good, way of doing it, since the shift
1351 may well merge into a subsequent insn. */
1352 if (set_sign_bit_copies > 1)
1354 if (const_ok_for_arm
1355 (temp1 = ARM_SIGN_EXTEND (remainder
1356 << (set_sign_bit_copies - 1))))
1358 if (generate)
1360 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1361 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1362 GEN_INT (temp1)));
1363 emit_insn (gen_ashrsi3 (target, new_src,
1364 GEN_INT (set_sign_bit_copies - 1)));
1366 return 2;
1368 /* For an inverted constant, we will need to set the low bits,
1369 these will be shifted out of harm's way. */
1370 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1371 if (const_ok_for_arm (~temp1))
1373 if (generate)
1375 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1376 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1377 GEN_INT (temp1)));
1378 emit_insn (gen_ashrsi3 (target, new_src,
1379 GEN_INT (set_sign_bit_copies - 1)));
1381 return 2;
1385 /* See if we can generate this by setting the bottom (or the top)
1386 16 bits, and then shifting these into the other half of the
1387 word. We only look for the simplest cases, to do more would cost
1388 too much. Be careful, however, not to generate this when the
1389 alternative would take fewer insns. */
1390 if (val & 0xffff0000)
1392 temp1 = remainder & 0xffff0000;
1393 temp2 = remainder & 0x0000ffff;
1395 /* Overlaps outside this range are best done using other methods. */
1396 for (i = 9; i < 24; i++)
1398 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1399 && !const_ok_for_arm (temp2))
1401 rtx new_src = (subtargets
1402 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1403 : target);
1404 insns = arm_gen_constant (code, mode, temp2, new_src,
1405 source, subtargets, generate);
1406 source = new_src;
1407 if (generate)
1408 emit_insn (gen_rtx_SET
1409 (VOIDmode, target,
1410 gen_rtx_IOR (mode,
1411 gen_rtx_ASHIFT (mode, source,
1412 GEN_INT (i)),
1413 source)));
1414 return insns + 1;
1418 /* Don't duplicate cases already considered. */
1419 for (i = 17; i < 24; i++)
1421 if (((temp1 | (temp1 >> i)) == remainder)
1422 && !const_ok_for_arm (temp1))
1424 rtx new_src = (subtargets
1425 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1426 : target);
1427 insns = arm_gen_constant (code, mode, temp1, new_src,
1428 source, subtargets, generate);
1429 source = new_src;
1430 if (generate)
1431 emit_insn
1432 (gen_rtx_SET (VOIDmode, target,
1433 gen_rtx_IOR
1434 (mode,
1435 gen_rtx_LSHIFTRT (mode, source,
1436 GEN_INT (i)),
1437 source)));
1438 return insns + 1;
1442 break;
1444 case IOR:
1445 case XOR:
1446 /* If we have IOR or XOR, and the constant can be loaded in a
1447 single instruction, and we can find a temporary to put it in,
1448 then this can be done in two instructions instead of 3-4. */
1449 if (subtargets
1450 /* TARGET can't be NULL if SUBTARGETS is 0 */
1451 || (reload_completed && !reg_mentioned_p (target, source)))
1453 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1455 if (generate)
1457 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1459 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1460 emit_insn (gen_rtx_SET (VOIDmode, target,
1461 gen_rtx (code, mode, source, sub)));
1463 return 2;
1467 if (code == XOR)
1468 break;
1470 if (set_sign_bit_copies > 8
1471 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1473 if (generate)
1475 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1476 rtx shift = GEN_INT (set_sign_bit_copies);
1478 emit_insn (gen_rtx_SET (VOIDmode, sub,
1479 gen_rtx_NOT (mode,
1480 gen_rtx_ASHIFT (mode,
1481 source,
1482 shift))));
1483 emit_insn (gen_rtx_SET (VOIDmode, target,
1484 gen_rtx_NOT (mode,
1485 gen_rtx_LSHIFTRT (mode, sub,
1486 shift))));
1488 return 2;
1491 if (set_zero_bit_copies > 8
1492 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1494 if (generate)
1496 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1497 rtx shift = GEN_INT (set_zero_bit_copies);
1499 emit_insn (gen_rtx_SET (VOIDmode, sub,
1500 gen_rtx_NOT (mode,
1501 gen_rtx_LSHIFTRT (mode,
1502 source,
1503 shift))));
1504 emit_insn (gen_rtx_SET (VOIDmode, target,
1505 gen_rtx_NOT (mode,
1506 gen_rtx_ASHIFT (mode, sub,
1507 shift))));
1509 return 2;
1512 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1514 if (generate)
1516 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1517 emit_insn (gen_rtx_SET (VOIDmode, sub,
1518 gen_rtx_NOT (mode, source)));
1519 source = sub;
1520 if (subtargets)
1521 sub = gen_reg_rtx (mode);
1522 emit_insn (gen_rtx_SET (VOIDmode, sub,
1523 gen_rtx_AND (mode, source,
1524 GEN_INT (temp1))));
1525 emit_insn (gen_rtx_SET (VOIDmode, target,
1526 gen_rtx_NOT (mode, sub)));
1528 return 3;
1530 break;
1532 case AND:
1533 /* See if two shifts will do 2 or more insn's worth of work. */
1534 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1536 HOST_WIDE_INT shift_mask = ((0xffffffff
1537 << (32 - clear_sign_bit_copies))
1538 & 0xffffffff);
1540 if ((remainder | shift_mask) != 0xffffffff)
1542 if (generate)
1544 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1545 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1546 new_src, source, subtargets, 1);
1547 source = new_src;
1549 else
1551 rtx targ = subtargets ? NULL_RTX : target;
1552 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1553 targ, source, subtargets, 0);
1557 if (generate)
1559 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1560 rtx shift = GEN_INT (clear_sign_bit_copies);
1562 emit_insn (gen_ashlsi3 (new_src, source, shift));
1563 emit_insn (gen_lshrsi3 (target, new_src, shift));
1566 return insns + 2;
1569 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1571 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1573 if ((remainder | shift_mask) != 0xffffffff)
1575 if (generate)
1577 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1579 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1580 new_src, source, subtargets, 1);
1581 source = new_src;
1583 else
1585 rtx targ = subtargets ? NULL_RTX : target;
1587 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1588 targ, source, subtargets, 0);
1592 if (generate)
1594 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1595 rtx shift = GEN_INT (clear_zero_bit_copies);
1597 emit_insn (gen_lshrsi3 (new_src, source, shift));
1598 emit_insn (gen_ashlsi3 (target, new_src, shift));
1601 return insns + 2;
1604 break;
1606 default:
1607 break;
1610 for (i = 0; i < 32; i++)
1611 if (remainder & (1 << i))
1612 num_bits_set++;
1614 if (code == AND || (can_invert && num_bits_set > 16))
1615 remainder = (~remainder) & 0xffffffff;
1616 else if (code == PLUS && num_bits_set > 16)
1617 remainder = (-remainder) & 0xffffffff;
1618 else
1620 can_invert = 0;
1621 can_negate = 0;
1624 /* Now try and find a way of doing the job in either two or three
1625 instructions.
1626 We start by looking for the largest block of zeros that are aligned on
1627 a 2-bit boundary, we then fill up the temps, wrapping around to the
1628 top of the word when we drop off the bottom.
1629 In the worst case this code should produce no more than four insns. */
1631 int best_start = 0;
1632 int best_consecutive_zeros = 0;
1634 for (i = 0; i < 32; i += 2)
1636 int consecutive_zeros = 0;
1638 if (!(remainder & (3 << i)))
1640 while ((i < 32) && !(remainder & (3 << i)))
1642 consecutive_zeros += 2;
1643 i += 2;
1645 if (consecutive_zeros > best_consecutive_zeros)
1647 best_consecutive_zeros = consecutive_zeros;
1648 best_start = i - consecutive_zeros;
1650 i -= 2;
1654 /* So long as it won't require any more insns to do so, it's
1655 desirable to emit a small constant (in bits 0...9) in the last
1656 insn. This way there is more chance that it can be combined with
1657 a later addressing insn to form a pre-indexed load or store
1658 operation. Consider:
1660 *((volatile int *)0xe0000100) = 1;
1661 *((volatile int *)0xe0000110) = 2;
1663 We want this to wind up as:
1665 mov rA, #0xe0000000
1666 mov rB, #1
1667 str rB, [rA, #0x100]
1668 mov rB, #2
1669 str rB, [rA, #0x110]
1671 rather than having to synthesize both large constants from scratch.
1673 Therefore, we calculate how many insns would be required to emit
1674 the constant starting from `best_start', and also starting from
1675 zero (ie with bit 31 first to be output). If `best_start' doesn't
1676 yield a shorter sequence, we may as well use zero. */
1677 if (best_start != 0
1678 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1679 && (count_insns_for_constant (remainder, 0) <=
1680 count_insns_for_constant (remainder, best_start)))
1681 best_start = 0;
1683 /* Now start emitting the insns. */
1684 i = best_start;
1687 int end;
1689 if (i <= 0)
1690 i += 32;
1691 if (remainder & (3 << (i - 2)))
1693 end = i - 8;
1694 if (end < 0)
1695 end += 32;
1696 temp1 = remainder & ((0x0ff << end)
1697 | ((i < end) ? (0xff >> (32 - end)) : 0));
1698 remainder &= ~temp1;
1700 if (generate)
1702 rtx new_src, temp1_rtx;
1704 if (code == SET || code == MINUS)
1706 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1707 if (can_invert && code != MINUS)
1708 temp1 = ~temp1;
1710 else
1712 if (remainder && subtargets)
1713 new_src = gen_reg_rtx (mode);
1714 else
1715 new_src = target;
1716 if (can_invert)
1717 temp1 = ~temp1;
1718 else if (can_negate)
1719 temp1 = -temp1;
1722 temp1 = trunc_int_for_mode (temp1, mode);
1723 temp1_rtx = GEN_INT (temp1);
1725 if (code == SET)
1727 else if (code == MINUS)
1728 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1729 else
1730 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1732 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1733 source = new_src;
1736 if (code == SET)
1738 can_invert = 0;
1739 code = PLUS;
1741 else if (code == MINUS)
1742 code = PLUS;
1744 insns++;
1745 i -= 6;
1747 i -= 2;
1749 while (remainder);
1752 return insns;
1755 /* Canonicalize a comparison so that we are more likely to recognize it.
1756 This can be done for a few constant compares, where we can make the
1757 immediate value easier to load. */
1759 enum rtx_code
1760 arm_canonicalize_comparison (code, op1)
1761 enum rtx_code code;
1762 rtx * op1;
1764 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1766 switch (code)
1768 case EQ:
1769 case NE:
1770 return code;
1772 case GT:
1773 case LE:
1774 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1775 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1777 *op1 = GEN_INT (i + 1);
1778 return code == GT ? GE : LT;
1780 break;
1782 case GE:
1783 case LT:
1784 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1785 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1787 *op1 = GEN_INT (i - 1);
1788 return code == GE ? GT : LE;
1790 break;
1792 case GTU:
1793 case LEU:
1794 if (i != ~((unsigned HOST_WIDE_INT) 0)
1795 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1797 *op1 = GEN_INT (i + 1);
1798 return code == GTU ? GEU : LTU;
1800 break;
1802 case GEU:
1803 case LTU:
1804 if (i != 0
1805 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1807 *op1 = GEN_INT (i - 1);
1808 return code == GEU ? GTU : LEU;
1810 break;
1812 default:
1813 abort ();
1816 return code;
1819 /* Decide whether a type should be returned in memory (true)
1820 or in a register (false). This is called by the macro
1821 RETURN_IN_MEMORY. */
1824 arm_return_in_memory (type)
1825 tree type;
1827 HOST_WIDE_INT size;
1829 if (!AGGREGATE_TYPE_P (type))
1830 /* All simple types are returned in registers. */
1831 return 0;
1833 size = int_size_in_bytes (type);
1835 if (TARGET_ATPCS)
1837 /* ATPCS returns aggregate types in memory only if they are
1838 larger than a word (or are variable size). */
1839 return (size < 0 || size > UNITS_PER_WORD);
1842 /* For the arm-wince targets we choose to be compatible with Microsoft's
1843 ARM and Thumb compilers, which always return aggregates in memory. */
1844 #ifndef ARM_WINCE
1845 /* All structures/unions bigger than one word are returned in memory.
1846 Also catch the case where int_size_in_bytes returns -1. In this case
1847 the aggregate is either huge or of variable size, and in either case
1848 we will want to return it via memory and not in a register. */
1849 if (size < 0 || size > UNITS_PER_WORD)
1850 return 1;
1852 if (TREE_CODE (type) == RECORD_TYPE)
1854 tree field;
1856 /* For a struct the APCS says that we only return in a register
1857 if the type is 'integer like' and every addressable element
1858 has an offset of zero. For practical purposes this means
1859 that the structure can have at most one non bit-field element
1860 and that this element must be the first one in the structure. */
1862 /* Find the first field, ignoring non FIELD_DECL things which will
1863 have been created by C++. */
1864 for (field = TYPE_FIELDS (type);
1865 field && TREE_CODE (field) != FIELD_DECL;
1866 field = TREE_CHAIN (field))
1867 continue;
1869 if (field == NULL)
1870 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1872 /* Check that the first field is valid for returning in a register. */
1874 /* ... Floats are not allowed */
1875 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1876 return 1;
1878 /* ... Aggregates that are not themselves valid for returning in
1879 a register are not allowed. */
1880 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1881 return 1;
1883 /* Now check the remaining fields, if any. Only bitfields are allowed,
1884 since they are not addressable. */
1885 for (field = TREE_CHAIN (field);
1886 field;
1887 field = TREE_CHAIN (field))
1889 if (TREE_CODE (field) != FIELD_DECL)
1890 continue;
1892 if (!DECL_BIT_FIELD_TYPE (field))
1893 return 1;
1896 return 0;
1899 if (TREE_CODE (type) == UNION_TYPE)
1901 tree field;
1903 /* Unions can be returned in registers if every element is
1904 integral, or can be returned in an integer register. */
1905 for (field = TYPE_FIELDS (type);
1906 field;
1907 field = TREE_CHAIN (field))
1909 if (TREE_CODE (field) != FIELD_DECL)
1910 continue;
1912 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1913 return 1;
1915 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1916 return 1;
1919 return 0;
1921 #endif /* not ARM_WINCE */
1923 /* Return all other types in memory. */
1924 return 1;
1927 /* Indicate whether or not words of a double are in big-endian order. */
1930 arm_float_words_big_endian ()
1932 if (TARGET_CIRRUS)
1933 return 0;
1935 /* For FPA, float words are always big-endian. For VFP, floats words
1936 follow the memory system mode. */
1938 if (TARGET_HARD_FLOAT)
1940 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1941 return 1;
1944 if (TARGET_VFP)
1945 return (TARGET_BIG_END ? 1 : 0);
1947 return 1;
1950 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1951 for a call to a function whose data type is FNTYPE.
1952 For a library call, FNTYPE is NULL. */
1953 void
1954 arm_init_cumulative_args (pcum, fntype, libname, fndecl)
1955 CUMULATIVE_ARGS * pcum;
1956 tree fntype;
1957 rtx libname ATTRIBUTE_UNUSED;
1958 tree fndecl ATTRIBUTE_UNUSED;
1960 /* On the ARM, the offset starts at 0. */
1961 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1963 pcum->call_cookie = CALL_NORMAL;
1965 if (TARGET_LONG_CALLS)
1966 pcum->call_cookie = CALL_LONG;
1968 /* Check for long call/short call attributes. The attributes
1969 override any command line option. */
1970 if (fntype)
1972 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1973 pcum->call_cookie = CALL_SHORT;
1974 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1975 pcum->call_cookie = CALL_LONG;
1979 /* Determine where to put an argument to a function.
1980 Value is zero to push the argument on the stack,
1981 or a hard register in which to store the argument.
1983 MODE is the argument's machine mode.
1984 TYPE is the data type of the argument (as a tree).
1985 This is null for libcalls where that information may
1986 not be available.
1987 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1988 the preceding args and about the function being called.
1989 NAMED is nonzero if this argument is a named parameter
1990 (otherwise it is an extra parameter matching an ellipsis). */
1993 arm_function_arg (pcum, mode, type, named)
1994 CUMULATIVE_ARGS * pcum;
1995 enum machine_mode mode;
1996 tree type ATTRIBUTE_UNUSED;
1997 int named;
1999 if (mode == VOIDmode)
2000 /* Compute operand 2 of the call insn. */
2001 return GEN_INT (pcum->call_cookie);
2003 if (!named || pcum->nregs >= NUM_ARG_REGS)
2004 return NULL_RTX;
2006 return gen_rtx_REG (mode, pcum->nregs);
2009 /* Variable sized types are passed by reference. This is a GCC
2010 extension to the ARM ABI. */
2013 arm_function_arg_pass_by_reference (cum, mode, type, named)
2014 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2015 enum machine_mode mode ATTRIBUTE_UNUSED;
2016 tree type;
2017 int named ATTRIBUTE_UNUSED;
2019 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2022 /* Implement va_arg. */
2025 arm_va_arg (valist, type)
2026 tree valist, type;
2028 /* Variable sized types are passed by reference. */
2029 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2031 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2032 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2035 return std_expand_builtin_va_arg (valist, type);
2038 /* Encode the current state of the #pragma [no_]long_calls. */
2039 typedef enum
2041 OFF, /* No #pramgma [no_]long_calls is in effect. */
2042 LONG, /* #pragma long_calls is in effect. */
2043 SHORT /* #pragma no_long_calls is in effect. */
2044 } arm_pragma_enum;
2046 static arm_pragma_enum arm_pragma_long_calls = OFF;
2048 void
2049 arm_pr_long_calls (pfile)
2050 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2052 arm_pragma_long_calls = LONG;
2055 void
2056 arm_pr_no_long_calls (pfile)
2057 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2059 arm_pragma_long_calls = SHORT;
2062 void
2063 arm_pr_long_calls_off (pfile)
2064 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2066 arm_pragma_long_calls = OFF;
2069 /* Table of machine attributes. */
2070 const struct attribute_spec arm_attribute_table[] =
2072 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2073 /* Function calls made to this symbol must be done indirectly, because
2074 it may lie outside of the 26 bit addressing range of a normal function
2075 call. */
2076 { "long_call", 0, 0, false, true, true, NULL },
2077 /* Whereas these functions are always known to reside within the 26 bit
2078 addressing range. */
2079 { "short_call", 0, 0, false, true, true, NULL },
2080 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2081 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2082 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2083 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2084 #ifdef ARM_PE
2085 /* ARM/PE has three new attributes:
2086 interfacearm - ?
2087 dllexport - for exporting a function/variable that will live in a dll
2088 dllimport - for importing a function/variable from a dll
2090 Microsoft allows multiple declspecs in one __declspec, separating
2091 them with spaces. We do NOT support this. Instead, use __declspec
2092 multiple times.
2094 { "dllimport", 0, 0, true, false, false, NULL },
2095 { "dllexport", 0, 0, true, false, false, NULL },
2096 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2097 #endif
2098 { NULL, 0, 0, false, false, false, NULL }
2101 /* Handle an attribute requiring a FUNCTION_DECL;
2102 arguments as in struct attribute_spec.handler. */
2104 static tree
2105 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2106 tree * node;
2107 tree name;
2108 tree args ATTRIBUTE_UNUSED;
2109 int flags ATTRIBUTE_UNUSED;
2110 bool * no_add_attrs;
2112 if (TREE_CODE (*node) != FUNCTION_DECL)
2114 warning ("`%s' attribute only applies to functions",
2115 IDENTIFIER_POINTER (name));
2116 *no_add_attrs = true;
2119 return NULL_TREE;
2122 /* Handle an "interrupt" or "isr" attribute;
2123 arguments as in struct attribute_spec.handler. */
2125 static tree
2126 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2127 tree * node;
2128 tree name;
2129 tree args;
2130 int flags;
2131 bool * no_add_attrs;
2133 if (DECL_P (*node))
2135 if (TREE_CODE (*node) != FUNCTION_DECL)
2137 warning ("`%s' attribute only applies to functions",
2138 IDENTIFIER_POINTER (name));
2139 *no_add_attrs = true;
2141 /* FIXME: the argument if any is checked for type attributes;
2142 should it be checked for decl ones? */
2144 else
2146 if (TREE_CODE (*node) == FUNCTION_TYPE
2147 || TREE_CODE (*node) == METHOD_TYPE)
2149 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2151 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2152 *no_add_attrs = true;
2155 else if (TREE_CODE (*node) == POINTER_TYPE
2156 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2157 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2158 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2160 *node = build_type_copy (*node);
2161 TREE_TYPE (*node) = build_type_attribute_variant
2162 (TREE_TYPE (*node),
2163 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2164 *no_add_attrs = true;
2166 else
2168 /* Possibly pass this attribute on from the type to a decl. */
2169 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2170 | (int) ATTR_FLAG_FUNCTION_NEXT
2171 | (int) ATTR_FLAG_ARRAY_NEXT))
2173 *no_add_attrs = true;
2174 return tree_cons (name, args, NULL_TREE);
2176 else
2178 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2183 return NULL_TREE;
2186 /* Return 0 if the attributes for two types are incompatible, 1 if they
2187 are compatible, and 2 if they are nearly compatible (which causes a
2188 warning to be generated). */
2190 static int
2191 arm_comp_type_attributes (type1, type2)
2192 tree type1;
2193 tree type2;
2195 int l1, l2, s1, s2;
2197 /* Check for mismatch of non-default calling convention. */
2198 if (TREE_CODE (type1) != FUNCTION_TYPE)
2199 return 1;
2201 /* Check for mismatched call attributes. */
2202 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2203 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2204 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2205 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2207 /* Only bother to check if an attribute is defined. */
2208 if (l1 | l2 | s1 | s2)
2210 /* If one type has an attribute, the other must have the same attribute. */
2211 if ((l1 != l2) || (s1 != s2))
2212 return 0;
2214 /* Disallow mixed attributes. */
2215 if ((l1 & s2) || (l2 & s1))
2216 return 0;
2219 /* Check for mismatched ISR attribute. */
2220 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2221 if (! l1)
2222 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2223 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2224 if (! l2)
2225 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2226 if (l1 != l2)
2227 return 0;
2229 return 1;
2232 /* Encode long_call or short_call attribute by prefixing
2233 symbol name in DECL with a special character FLAG. */
2235 void
2236 arm_encode_call_attribute (decl, flag)
2237 tree decl;
2238 int flag;
2240 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2241 int len = strlen (str);
2242 char * newstr;
2244 /* Do not allow weak functions to be treated as short call. */
2245 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2246 return;
2248 newstr = alloca (len + 2);
2249 newstr[0] = flag;
2250 strcpy (newstr + 1, str);
2252 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2253 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2256 /* Assigns default attributes to newly defined type. This is used to
2257 set short_call/long_call attributes for function types of
2258 functions defined inside corresponding #pragma scopes. */
2260 static void
2261 arm_set_default_type_attributes (type)
2262 tree type;
2264 /* Add __attribute__ ((long_call)) to all functions, when
2265 inside #pragma long_calls or __attribute__ ((short_call)),
2266 when inside #pragma no_long_calls. */
2267 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2269 tree type_attr_list, attr_name;
2270 type_attr_list = TYPE_ATTRIBUTES (type);
2272 if (arm_pragma_long_calls == LONG)
2273 attr_name = get_identifier ("long_call");
2274 else if (arm_pragma_long_calls == SHORT)
2275 attr_name = get_identifier ("short_call");
2276 else
2277 return;
2279 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2280 TYPE_ATTRIBUTES (type) = type_attr_list;
2284 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2285 defined within the current compilation unit. If this cannot be
2286 determined, then 0 is returned. */
2288 static int
2289 current_file_function_operand (sym_ref)
2290 rtx sym_ref;
2292 /* This is a bit of a fib. A function will have a short call flag
2293 applied to its name if it has the short call attribute, or it has
2294 already been defined within the current compilation unit. */
2295 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2296 return 1;
2298 /* The current function is always defined within the current compilation
2299 unit. if it s a weak definition however, then this may not be the real
2300 definition of the function, and so we have to say no. */
2301 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2302 && !DECL_WEAK (current_function_decl))
2303 return 1;
2305 /* We cannot make the determination - default to returning 0. */
2306 return 0;
2309 /* Return nonzero if a 32 bit "long_call" should be generated for
2310 this call. We generate a long_call if the function:
2312 a. has an __attribute__((long call))
2313 or b. is within the scope of a #pragma long_calls
2314 or c. the -mlong-calls command line switch has been specified
2316 However we do not generate a long call if the function:
2318 d. has an __attribute__ ((short_call))
2319 or e. is inside the scope of a #pragma no_long_calls
2320 or f. has an __attribute__ ((section))
2321 or g. is defined within the current compilation unit.
2323 This function will be called by C fragments contained in the machine
2324 description file. CALL_REF and CALL_COOKIE correspond to the matched
2325 rtl operands. CALL_SYMBOL is used to distinguish between
2326 two different callers of the function. It is set to 1 in the
2327 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2328 and "call_value" patterns. This is because of the difference in the
2329 SYM_REFs passed by these patterns. */
2332 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2333 rtx sym_ref;
2334 int call_cookie;
2335 int call_symbol;
2337 if (!call_symbol)
2339 if (GET_CODE (sym_ref) != MEM)
2340 return 0;
2342 sym_ref = XEXP (sym_ref, 0);
2345 if (GET_CODE (sym_ref) != SYMBOL_REF)
2346 return 0;
2348 if (call_cookie & CALL_SHORT)
2349 return 0;
2351 if (TARGET_LONG_CALLS && flag_function_sections)
2352 return 1;
2354 if (current_file_function_operand (sym_ref))
2355 return 0;
2357 return (call_cookie & CALL_LONG)
2358 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2359 || TARGET_LONG_CALLS;
2362 /* Return nonzero if it is ok to make a tail-call to DECL. */
2364 static bool
2365 arm_function_ok_for_sibcall (decl, exp)
2366 tree decl;
2367 tree exp ATTRIBUTE_UNUSED;
2369 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2371 /* Never tailcall something for which we have no decl, or if we
2372 are in Thumb mode. */
2373 if (decl == NULL || TARGET_THUMB)
2374 return false;
2376 /* Get the calling method. */
2377 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2378 call_type = CALL_SHORT;
2379 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2380 call_type = CALL_LONG;
2382 /* Cannot tail-call to long calls, since these are out of range of
2383 a branch instruction. However, if not compiling PIC, we know
2384 we can reach the symbol if it is in this compilation unit. */
2385 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2386 return false;
2388 /* If we are interworking and the function is not declared static
2389 then we can't tail-call it unless we know that it exists in this
2390 compilation unit (since it might be a Thumb routine). */
2391 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2392 return false;
2394 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2395 if (IS_INTERRUPT (arm_current_func_type ()))
2396 return false;
2398 /* Everything else is ok. */
2399 return true;
2403 /* Addressing mode support functions. */
2405 /* Return non-zero if X is a legitimate immediate operand when compiling
2406 for PIC. */
2408 legitimate_pic_operand_p (x)
2409 rtx x;
2411 if (CONSTANT_P (x)
2412 && flag_pic
2413 && (GET_CODE (x) == SYMBOL_REF
2414 || (GET_CODE (x) == CONST
2415 && GET_CODE (XEXP (x, 0)) == PLUS
2416 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2417 return 0;
2419 return 1;
2423 legitimize_pic_address (orig, mode, reg)
2424 rtx orig;
2425 enum machine_mode mode;
2426 rtx reg;
2428 if (GET_CODE (orig) == SYMBOL_REF
2429 || GET_CODE (orig) == LABEL_REF)
2431 #ifndef AOF_ASSEMBLER
2432 rtx pic_ref, address;
2433 #endif
2434 rtx insn;
2435 int subregs = 0;
2437 if (reg == 0)
2439 if (no_new_pseudos)
2440 abort ();
2441 else
2442 reg = gen_reg_rtx (Pmode);
2444 subregs = 1;
2447 #ifdef AOF_ASSEMBLER
2448 /* The AOF assembler can generate relocations for these directly, and
2449 understands that the PIC register has to be added into the offset. */
2450 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2451 #else
2452 if (subregs)
2453 address = gen_reg_rtx (Pmode);
2454 else
2455 address = reg;
2457 if (TARGET_ARM)
2458 emit_insn (gen_pic_load_addr_arm (address, orig));
2459 else
2460 emit_insn (gen_pic_load_addr_thumb (address, orig));
2462 if ((GET_CODE (orig) == LABEL_REF
2463 || (GET_CODE (orig) == SYMBOL_REF &&
2464 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2465 && NEED_GOT_RELOC)
2466 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2467 else
2469 pic_ref = gen_rtx_MEM (Pmode,
2470 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2471 address));
2472 RTX_UNCHANGING_P (pic_ref) = 1;
2475 insn = emit_move_insn (reg, pic_ref);
2476 #endif
2477 current_function_uses_pic_offset_table = 1;
2478 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2479 by loop. */
2480 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2481 REG_NOTES (insn));
2482 return reg;
2484 else if (GET_CODE (orig) == CONST)
2486 rtx base, offset;
2488 if (GET_CODE (XEXP (orig, 0)) == PLUS
2489 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2490 return orig;
2492 if (reg == 0)
2494 if (no_new_pseudos)
2495 abort ();
2496 else
2497 reg = gen_reg_rtx (Pmode);
2500 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2502 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2503 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2504 base == reg ? 0 : reg);
2506 else
2507 abort ();
2509 if (GET_CODE (offset) == CONST_INT)
2511 /* The base register doesn't really matter, we only want to
2512 test the index for the appropriate mode. */
2513 if (!arm_legitimate_index_p (mode, offset, 0))
2515 if (!no_new_pseudos)
2516 offset = force_reg (Pmode, offset);
2517 else
2518 abort ();
2521 if (GET_CODE (offset) == CONST_INT)
2522 return plus_constant (base, INTVAL (offset));
2525 if (GET_MODE_SIZE (mode) > 4
2526 && (GET_MODE_CLASS (mode) == MODE_INT
2527 || TARGET_SOFT_FLOAT))
2529 emit_insn (gen_addsi3 (reg, base, offset));
2530 return reg;
2533 return gen_rtx_PLUS (Pmode, base, offset);
2536 return orig;
2539 /* Generate code to load the PIC register. PROLOGUE is true if
2540 called from arm_expand_prologue (in which case we want the
2541 generated insns at the start of the function); false if called
2542 by an exception receiver that needs the PIC register reloaded
2543 (in which case the insns are just dumped at the current location). */
2545 void
2546 arm_finalize_pic (prologue)
2547 int prologue ATTRIBUTE_UNUSED;
2549 #ifndef AOF_ASSEMBLER
2550 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2551 rtx global_offset_table;
2553 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2554 return;
2556 if (!flag_pic)
2557 abort ();
2559 start_sequence ();
2560 l1 = gen_label_rtx ();
2562 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2563 /* On the ARM the PC register contains 'dot + 8' at the time of the
2564 addition, on the Thumb it is 'dot + 4'. */
2565 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2566 if (GOT_PCREL)
2567 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2568 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2569 else
2570 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2572 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2574 if (TARGET_ARM)
2576 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2577 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2579 else
2581 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2582 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2585 seq = get_insns ();
2586 end_sequence ();
2587 if (prologue)
2588 emit_insn_after (seq, get_insns ());
2589 else
2590 emit_insn (seq);
2592 /* Need to emit this whether or not we obey regdecls,
2593 since setjmp/longjmp can cause life info to screw up. */
2594 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2595 #endif /* AOF_ASSEMBLER */
2598 /* Return nonzero if X is valid as an ARM state addressing register. */
2599 static int
2600 arm_address_register_rtx_p (x, strict_p)
2601 rtx x;
2602 int strict_p;
2604 int regno;
2606 if (GET_CODE (x) != REG)
2607 return 0;
2609 regno = REGNO (x);
2611 if (strict_p)
2612 return ARM_REGNO_OK_FOR_BASE_P (regno);
2614 return (regno <= LAST_ARM_REGNUM
2615 || regno >= FIRST_PSEUDO_REGISTER
2616 || regno == FRAME_POINTER_REGNUM
2617 || regno == ARG_POINTER_REGNUM);
2620 /* Return nonzero if X is a valid ARM state address operand. */
2622 arm_legitimate_address_p (mode, x, strict_p)
2623 enum machine_mode mode;
2624 rtx x;
2625 int strict_p;
2627 if (arm_address_register_rtx_p (x, strict_p))
2628 return 1;
2630 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2631 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2633 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2634 && GET_MODE_SIZE (mode) <= 4
2635 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2636 && GET_CODE (XEXP (x, 1)) == PLUS
2637 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2638 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2640 /* After reload constants split into minipools will have addresses
2641 from a LABEL_REF. */
2642 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2643 && (GET_CODE (x) == LABEL_REF
2644 || (GET_CODE (x) == CONST
2645 && GET_CODE (XEXP (x, 0)) == PLUS
2646 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2647 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2648 return 1;
2650 else if (mode == TImode)
2651 return 0;
2653 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2655 if (GET_CODE (x) == PLUS
2656 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2657 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2659 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2661 if (val == 4 || val == -4 || val == -8)
2662 return 1;
2666 else if (GET_CODE (x) == PLUS)
2668 rtx xop0 = XEXP (x, 0);
2669 rtx xop1 = XEXP (x, 1);
2671 return ((arm_address_register_rtx_p (xop0, strict_p)
2672 && arm_legitimate_index_p (mode, xop1, strict_p))
2673 || (arm_address_register_rtx_p (xop1, strict_p)
2674 && arm_legitimate_index_p (mode, xop0, strict_p)));
2677 #if 0
2678 /* Reload currently can't handle MINUS, so disable this for now */
2679 else if (GET_CODE (x) == MINUS)
2681 rtx xop0 = XEXP (x, 0);
2682 rtx xop1 = XEXP (x, 1);
2684 return (arm_address_register_rtx_p (xop0, strict_p)
2685 && arm_legitimate_index_p (mode, xop1, strict_p));
2687 #endif
2689 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2690 && GET_CODE (x) == SYMBOL_REF
2691 && CONSTANT_POOL_ADDRESS_P (x)
2692 && ! (flag_pic
2693 && symbol_mentioned_p (get_pool_constant (x))))
2694 return 1;
2696 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2697 && (GET_MODE_SIZE (mode) <= 4)
2698 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2699 return 1;
2701 return 0;
2704 /* Return nonzero if INDEX is valid for an address index operand in
2705 ARM state. */
2706 static int
2707 arm_legitimate_index_p (mode, index, strict_p)
2708 enum machine_mode mode;
2709 rtx index;
2710 int strict_p;
2712 HOST_WIDE_INT range;
2713 enum rtx_code code = GET_CODE (index);
2715 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2716 return (code == CONST_INT && INTVAL (index) < 1024
2717 && INTVAL (index) > -1024
2718 && (INTVAL (index) & 3) == 0);
2720 if (TARGET_CIRRUS
2721 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2722 return (code == CONST_INT
2723 && INTVAL (index) < 255
2724 && INTVAL (index) > -255);
2726 if (arm_address_register_rtx_p (index, strict_p)
2727 && GET_MODE_SIZE (mode) <= 4)
2728 return 1;
2730 /* XXX What about ldrsb? */
2731 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2732 && (!arm_arch4 || (mode) != HImode))
2734 rtx xiop0 = XEXP (index, 0);
2735 rtx xiop1 = XEXP (index, 1);
2737 return ((arm_address_register_rtx_p (xiop0, strict_p)
2738 && power_of_two_operand (xiop1, SImode))
2739 || (arm_address_register_rtx_p (xiop1, strict_p)
2740 && power_of_two_operand (xiop0, SImode)));
2743 if (GET_MODE_SIZE (mode) <= 4
2744 && (code == LSHIFTRT || code == ASHIFTRT
2745 || code == ASHIFT || code == ROTATERT)
2746 && (!arm_arch4 || (mode) != HImode))
2748 rtx op = XEXP (index, 1);
2750 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2751 && GET_CODE (op) == CONST_INT
2752 && INTVAL (op) > 0
2753 && INTVAL (op) <= 31);
2756 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2757 load, but that has a restricted addressing range and we are unable
2758 to tell here whether that is the case. To be safe we restrict all
2759 loads to that range. */
2760 range = ((mode) == HImode || (mode) == QImode)
2761 ? (arm_arch4 ? 256 : 4095) : 4096;
2763 return (code == CONST_INT
2764 && INTVAL (index) < range
2765 && INTVAL (index) > -range);
2768 /* Return nonzero if X is valid as an ARM state addressing register. */
2769 static int
2770 thumb_base_register_rtx_p (x, mode, strict_p)
2771 rtx x;
2772 enum machine_mode mode;
2773 int strict_p;
2775 int regno;
2777 if (GET_CODE (x) != REG)
2778 return 0;
2780 regno = REGNO (x);
2782 if (strict_p)
2783 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2785 return (regno <= LAST_LO_REGNUM
2786 || regno >= FIRST_PSEUDO_REGISTER
2787 || regno == FRAME_POINTER_REGNUM
2788 || (GET_MODE_SIZE (mode) >= 4
2789 && (regno == STACK_POINTER_REGNUM
2790 || x == hard_frame_pointer_rtx
2791 || x == arg_pointer_rtx)));
2794 /* Return nonzero if x is a legitimate index register. This is the case
2795 for any base register that can access a QImode object. */
2796 inline static int
2797 thumb_index_register_rtx_p (x, strict_p)
2798 rtx x;
2799 int strict_p;
2801 return thumb_base_register_rtx_p (x, QImode, strict_p);
2804 /* Return nonzero if x is a legitimate Thumb-state address.
2806 The AP may be eliminated to either the SP or the FP, so we use the
2807 least common denominator, e.g. SImode, and offsets from 0 to 64.
2809 ??? Verify whether the above is the right approach.
2811 ??? Also, the FP may be eliminated to the SP, so perhaps that
2812 needs special handling also.
2814 ??? Look at how the mips16 port solves this problem. It probably uses
2815 better ways to solve some of these problems.
2817 Although it is not incorrect, we don't accept QImode and HImode
2818 addresses based on the frame pointer or arg pointer until the
2819 reload pass starts. This is so that eliminating such addresses
2820 into stack based ones won't produce impossible code. */
2822 thumb_legitimate_address_p (mode, x, strict_p)
2823 enum machine_mode mode;
2824 rtx x;
2825 int strict_p;
2827 /* ??? Not clear if this is right. Experiment. */
2828 if (GET_MODE_SIZE (mode) < 4
2829 && !(reload_in_progress || reload_completed)
2830 && (reg_mentioned_p (frame_pointer_rtx, x)
2831 || reg_mentioned_p (arg_pointer_rtx, x)
2832 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2833 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2834 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2835 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2836 return 0;
2838 /* Accept any base register. SP only in SImode or larger. */
2839 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2840 return 1;
2842 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2843 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2844 && GET_CODE (x) == SYMBOL_REF
2845 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2846 return 1;
2848 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2849 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2850 && (GET_CODE (x) == LABEL_REF
2851 || (GET_CODE (x) == CONST
2852 && GET_CODE (XEXP (x, 0)) == PLUS
2853 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2854 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2855 return 1;
2857 /* Post-inc indexing only supported for SImode and larger. */
2858 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2859 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2860 return 1;
2862 else if (GET_CODE (x) == PLUS)
2864 /* REG+REG address can be any two index registers. */
2865 /* We disallow FRAME+REG addressing since we know that FRAME
2866 will be replaced with STACK, and SP relative addressing only
2867 permits SP+OFFSET. */
2868 if (GET_MODE_SIZE (mode) <= 4
2869 && XEXP (x, 0) != frame_pointer_rtx
2870 && XEXP (x, 1) != frame_pointer_rtx
2871 && XEXP (x, 0) != virtual_stack_vars_rtx
2872 && XEXP (x, 1) != virtual_stack_vars_rtx
2873 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2874 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2875 return 1;
2877 /* REG+const has 5-7 bit offset for non-SP registers. */
2878 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2879 || XEXP (x, 0) == arg_pointer_rtx)
2880 && GET_CODE (XEXP (x, 1)) == CONST_INT
2881 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2882 return 1;
2884 /* REG+const has 10 bit offset for SP, but only SImode and
2885 larger is supported. */
2886 /* ??? Should probably check for DI/DFmode overflow here
2887 just like GO_IF_LEGITIMATE_OFFSET does. */
2888 else if (GET_CODE (XEXP (x, 0)) == REG
2889 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2890 && GET_MODE_SIZE (mode) >= 4
2891 && GET_CODE (XEXP (x, 1)) == CONST_INT
2892 && INTVAL (XEXP (x, 1)) >= 0
2893 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2894 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2895 return 1;
2897 else if (GET_CODE (XEXP (x, 0)) == REG
2898 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2899 && GET_MODE_SIZE (mode) >= 4
2900 && GET_CODE (XEXP (x, 1)) == CONST_INT
2901 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2902 return 1;
2905 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2906 && GET_CODE (x) == SYMBOL_REF
2907 && CONSTANT_POOL_ADDRESS_P (x)
2908 && !(flag_pic
2909 && symbol_mentioned_p (get_pool_constant (x))))
2910 return 1;
2912 return 0;
2915 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2916 instruction of mode MODE. */
2918 thumb_legitimate_offset_p (mode, val)
2919 enum machine_mode mode;
2920 HOST_WIDE_INT val;
2922 switch (GET_MODE_SIZE (mode))
2924 case 1:
2925 return val >= 0 && val < 32;
2927 case 2:
2928 return val >= 0 && val < 64 && (val & 1) == 0;
2930 default:
2931 return (val >= 0
2932 && (val + GET_MODE_SIZE (mode)) <= 128
2933 && (val & 3) == 0);
2937 /* Try machine-dependent ways of modifying an illegitimate address
2938 to be legitimate. If we find one, return the new, valid address. */
2941 arm_legitimize_address (x, orig_x, mode)
2942 rtx x;
2943 rtx orig_x;
2944 enum machine_mode mode;
2946 if (GET_CODE (x) == PLUS)
2948 rtx xop0 = XEXP (x, 0);
2949 rtx xop1 = XEXP (x, 1);
2951 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2952 xop0 = force_reg (SImode, xop0);
2954 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2955 xop1 = force_reg (SImode, xop1);
2957 if (ARM_BASE_REGISTER_RTX_P (xop0)
2958 && GET_CODE (xop1) == CONST_INT)
2960 HOST_WIDE_INT n, low_n;
2961 rtx base_reg, val;
2962 n = INTVAL (xop1);
2964 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2966 low_n = n & 0x0f;
2967 n &= ~0x0f;
2968 if (low_n > 4)
2970 n += 16;
2971 low_n -= 16;
2974 else
2976 low_n = ((mode) == TImode ? 0
2977 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2978 n -= low_n;
2981 base_reg = gen_reg_rtx (SImode);
2982 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2983 GEN_INT (n)), NULL_RTX);
2984 emit_move_insn (base_reg, val);
2985 x = (low_n == 0 ? base_reg
2986 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
2988 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2989 x = gen_rtx_PLUS (SImode, xop0, xop1);
2992 /* XXX We don't allow MINUS any more -- see comment in
2993 arm_legitimate_address_p (). */
2994 else if (GET_CODE (x) == MINUS)
2996 rtx xop0 = XEXP (x, 0);
2997 rtx xop1 = XEXP (x, 1);
2999 if (CONSTANT_P (xop0))
3000 xop0 = force_reg (SImode, xop0);
3002 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3003 xop1 = force_reg (SImode, xop1);
3005 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3006 x = gen_rtx_MINUS (SImode, xop0, xop1);
3009 if (flag_pic)
3011 /* We need to find and carefully transform any SYMBOL and LABEL
3012 references; so go back to the original address expression. */
3013 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3015 if (new_x != orig_x)
3016 x = new_x;
3019 return x;
3024 #define REG_OR_SUBREG_REG(X) \
3025 (GET_CODE (X) == REG \
3026 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3028 #define REG_OR_SUBREG_RTX(X) \
3029 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3031 #ifndef COSTS_N_INSNS
3032 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3033 #endif
3035 static inline int
3036 arm_rtx_costs_1 (x, code, outer)
3037 rtx x;
3038 enum rtx_code code;
3039 enum rtx_code outer;
3041 enum machine_mode mode = GET_MODE (x);
3042 enum rtx_code subcode;
3043 int extra_cost;
3045 if (TARGET_THUMB)
3047 switch (code)
3049 case ASHIFT:
3050 case ASHIFTRT:
3051 case LSHIFTRT:
3052 case ROTATERT:
3053 case PLUS:
3054 case MINUS:
3055 case COMPARE:
3056 case NEG:
3057 case NOT:
3058 return COSTS_N_INSNS (1);
3060 case MULT:
3061 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3063 int cycles = 0;
3064 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3066 while (i)
3068 i >>= 2;
3069 cycles++;
3071 return COSTS_N_INSNS (2) + cycles;
3073 return COSTS_N_INSNS (1) + 16;
3075 case SET:
3076 return (COSTS_N_INSNS (1)
3077 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3078 + GET_CODE (SET_DEST (x)) == MEM));
3080 case CONST_INT:
3081 if (outer == SET)
3083 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3084 return 0;
3085 if (thumb_shiftable_const (INTVAL (x)))
3086 return COSTS_N_INSNS (2);
3087 return COSTS_N_INSNS (3);
3089 else if (outer == PLUS
3090 && INTVAL (x) < 256 && INTVAL (x) > -256)
3091 return 0;
3092 else if (outer == COMPARE
3093 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3094 return 0;
3095 else if (outer == ASHIFT || outer == ASHIFTRT
3096 || outer == LSHIFTRT)
3097 return 0;
3098 return COSTS_N_INSNS (2);
3100 case CONST:
3101 case CONST_DOUBLE:
3102 case LABEL_REF:
3103 case SYMBOL_REF:
3104 return COSTS_N_INSNS (3);
3106 case UDIV:
3107 case UMOD:
3108 case DIV:
3109 case MOD:
3110 return 100;
3112 case TRUNCATE:
3113 return 99;
3115 case AND:
3116 case XOR:
3117 case IOR:
3118 /* XXX guess. */
3119 return 8;
3121 case ADDRESSOF:
3122 case MEM:
3123 /* XXX another guess. */
3124 /* Memory costs quite a lot for the first word, but subsequent words
3125 load at the equivalent of a single insn each. */
3126 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3127 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3128 ? 4 : 0));
3130 case IF_THEN_ELSE:
3131 /* XXX a guess. */
3132 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3133 return 14;
3134 return 2;
3136 case ZERO_EXTEND:
3137 /* XXX still guessing. */
3138 switch (GET_MODE (XEXP (x, 0)))
3140 case QImode:
3141 return (1 + (mode == DImode ? 4 : 0)
3142 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3144 case HImode:
3145 return (4 + (mode == DImode ? 4 : 0)
3146 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3148 case SImode:
3149 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3151 default:
3152 return 99;
3155 default:
3156 return 99;
3157 #if 0
3158 case FFS:
3159 case FLOAT:
3160 case FIX:
3161 case UNSIGNED_FIX:
3162 /* XXX guess */
3163 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3164 rtx_name[code]);
3165 abort ();
3166 #endif
3170 switch (code)
3172 case MEM:
3173 /* Memory costs quite a lot for the first word, but subsequent words
3174 load at the equivalent of a single insn each. */
3175 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3176 + (GET_CODE (x) == SYMBOL_REF
3177 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3179 case DIV:
3180 case MOD:
3181 return 100;
3183 case ROTATE:
3184 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3185 return 4;
3186 /* Fall through */
3187 case ROTATERT:
3188 if (mode != SImode)
3189 return 8;
3190 /* Fall through */
3191 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3192 if (mode == DImode)
3193 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3194 + ((GET_CODE (XEXP (x, 0)) == REG
3195 || (GET_CODE (XEXP (x, 0)) == SUBREG
3196 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3197 ? 0 : 8));
3198 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3199 || (GET_CODE (XEXP (x, 0)) == SUBREG
3200 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3201 ? 0 : 4)
3202 + ((GET_CODE (XEXP (x, 1)) == REG
3203 || (GET_CODE (XEXP (x, 1)) == SUBREG
3204 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3205 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3206 ? 0 : 4));
3208 case MINUS:
3209 if (mode == DImode)
3210 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3211 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3212 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3213 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3214 ? 0 : 8));
3216 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3217 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3218 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3219 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3220 ? 0 : 8)
3221 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3222 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3223 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
3224 ? 0 : 8));
3226 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3227 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3228 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3229 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3230 || subcode == ASHIFTRT || subcode == LSHIFTRT
3231 || subcode == ROTATE || subcode == ROTATERT
3232 || (subcode == MULT
3233 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3234 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3235 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3236 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3237 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3238 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3239 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3240 return 1;
3241 /* Fall through */
3243 case PLUS:
3244 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3245 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3246 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3247 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3248 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3249 ? 0 : 8));
3251 /* Fall through */
3252 case AND: case XOR: case IOR:
3253 extra_cost = 0;
3255 /* Normally the frame registers will be spilt into reg+const during
3256 reload, so it is a bad idea to combine them with other instructions,
3257 since then they might not be moved outside of loops. As a compromise
3258 we allow integration with ops that have a constant as their second
3259 operand. */
3260 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3261 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3262 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3263 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3264 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3265 extra_cost = 4;
3267 if (mode == DImode)
3268 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3269 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3270 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3271 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3272 ? 0 : 8));
3274 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3275 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3276 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3277 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3278 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3279 ? 0 : 4));
3281 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3282 return (1 + extra_cost
3283 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3284 || subcode == LSHIFTRT || subcode == ASHIFTRT
3285 || subcode == ROTATE || subcode == ROTATERT
3286 || (subcode == MULT
3287 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3288 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3289 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3290 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3291 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3292 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3293 ? 0 : 4));
3295 return 8;
3297 case MULT:
3298 /* There is no point basing this on the tuning, since it is always the
3299 fast variant if it exists at all. */
3300 if (arm_fast_multiply && mode == DImode
3301 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3302 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3303 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3304 return 8;
3306 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3307 || mode == DImode)
3308 return 30;
3310 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3312 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3313 & (unsigned HOST_WIDE_INT) 0xffffffff);
3314 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3315 int j;
3317 /* Tune as appropriate. */
3318 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3320 for (j = 0; i && j < 32; j += booth_unit_size)
3322 i >>= booth_unit_size;
3323 add_cost += 2;
3326 return add_cost;
3329 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3330 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3331 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3333 case TRUNCATE:
3334 if (arm_fast_multiply && mode == SImode
3335 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3336 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3337 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3338 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3339 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3340 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3341 return 8;
3342 return 99;
3344 case NEG:
3345 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3346 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3347 /* Fall through */
3348 case NOT:
3349 if (mode == DImode)
3350 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3352 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3354 case IF_THEN_ELSE:
3355 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3356 return 14;
3357 return 2;
3359 case COMPARE:
3360 return 1;
3362 case ABS:
3363 return 4 + (mode == DImode ? 4 : 0);
3365 case SIGN_EXTEND:
3366 if (GET_MODE (XEXP (x, 0)) == QImode)
3367 return (4 + (mode == DImode ? 4 : 0)
3368 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3369 /* Fall through */
3370 case ZERO_EXTEND:
3371 switch (GET_MODE (XEXP (x, 0)))
3373 case QImode:
3374 return (1 + (mode == DImode ? 4 : 0)
3375 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3377 case HImode:
3378 return (4 + (mode == DImode ? 4 : 0)
3379 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3381 case SImode:
3382 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3384 default:
3385 break;
3387 abort ();
3389 case CONST_INT:
3390 if (const_ok_for_arm (INTVAL (x)))
3391 return outer == SET ? 2 : -1;
3392 else if (outer == AND
3393 && const_ok_for_arm (~INTVAL (x)))
3394 return -1;
3395 else if ((outer == COMPARE
3396 || outer == PLUS || outer == MINUS)
3397 && const_ok_for_arm (-INTVAL (x)))
3398 return -1;
3399 else
3400 return 5;
3402 case CONST:
3403 case LABEL_REF:
3404 case SYMBOL_REF:
3405 return 6;
3407 case CONST_DOUBLE:
3408 if (const_double_rtx_ok_for_fpa (x))
3409 return outer == SET ? 2 : -1;
3410 else if ((outer == COMPARE || outer == PLUS)
3411 && neg_const_double_rtx_ok_for_fpa (x))
3412 return -1;
3413 return 7;
3415 default:
3416 return 99;
3420 static bool
3421 arm_rtx_costs (x, code, outer_code, total)
3422 rtx x;
3423 int code, outer_code;
3424 int *total;
3426 *total = arm_rtx_costs_1 (x, code, outer_code);
3427 return true;
3430 /* All address computations that can be done are free, but rtx cost returns
3431 the same for practically all of them. So we weight the different types
3432 of address here in the order (most pref first):
3433 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3435 static int
3436 arm_address_cost (X)
3437 rtx X;
3439 #define ARM_ADDRESS_COST(X) \
3440 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3441 || GET_CODE (X) == SYMBOL_REF) \
3442 ? 0 \
3443 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3444 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3445 ? 10 \
3446 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3447 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3448 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3449 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3450 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3451 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3452 ? 1 : 0)) \
3453 : 4)))))
3455 #define THUMB_ADDRESS_COST(X) \
3456 ((GET_CODE (X) == REG \
3457 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3458 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3459 ? 1 : 2)
3461 return (TARGET_ARM ? ARM_ADDRESS_COST (X) : THUMB_ADDRESS_COST (X));
3464 static int
3465 arm_adjust_cost (insn, link, dep, cost)
3466 rtx insn;
3467 rtx link;
3468 rtx dep;
3469 int cost;
3471 rtx i_pat, d_pat;
3473 /* Some true dependencies can have a higher cost depending
3474 on precisely how certain input operands are used. */
3475 if (arm_is_xscale
3476 && REG_NOTE_KIND (link) == 0
3477 && recog_memoized (insn) < 0
3478 && recog_memoized (dep) < 0)
3480 int shift_opnum = get_attr_shift (insn);
3481 enum attr_type attr_type = get_attr_type (dep);
3483 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3484 operand for INSN. If we have a shifted input operand and the
3485 instruction we depend on is another ALU instruction, then we may
3486 have to account for an additional stall. */
3487 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3489 rtx shifted_operand;
3490 int opno;
3492 /* Get the shifted operand. */
3493 extract_insn (insn);
3494 shifted_operand = recog_data.operand[shift_opnum];
3496 /* Iterate over all the operands in DEP. If we write an operand
3497 that overlaps with SHIFTED_OPERAND, then we have increase the
3498 cost of this dependency. */
3499 extract_insn (dep);
3500 preprocess_constraints ();
3501 for (opno = 0; opno < recog_data.n_operands; opno++)
3503 /* We can ignore strict inputs. */
3504 if (recog_data.operand_type[opno] == OP_IN)
3505 continue;
3507 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3508 shifted_operand))
3509 return 2;
3514 /* XXX This is not strictly true for the FPA. */
3515 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3516 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3517 return 0;
3519 /* Call insns don't incur a stall, even if they follow a load. */
3520 if (REG_NOTE_KIND (link) == 0
3521 && GET_CODE (insn) == CALL_INSN)
3522 return 1;
3524 if ((i_pat = single_set (insn)) != NULL
3525 && GET_CODE (SET_SRC (i_pat)) == MEM
3526 && (d_pat = single_set (dep)) != NULL
3527 && GET_CODE (SET_DEST (d_pat)) == MEM)
3529 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3530 /* This is a load after a store, there is no conflict if the load reads
3531 from a cached area. Assume that loads from the stack, and from the
3532 constant pool are cached, and that others will miss. This is a
3533 hack. */
3535 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3536 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3537 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3538 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3539 return 1;
3542 return cost;
3545 /* This code has been fixed for cross compilation. */
3547 static int fpa_consts_inited = 0;
3549 static const char * const strings_fpa[8] =
3551 "0", "1", "2", "3",
3552 "4", "5", "0.5", "10"
3555 static REAL_VALUE_TYPE values_fpa[8];
3557 static void
3558 init_fpa_table ()
3560 int i;
3561 REAL_VALUE_TYPE r;
3563 for (i = 0; i < 8; i++)
3565 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3566 values_fpa[i] = r;
3569 fpa_consts_inited = 1;
3572 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3575 const_double_rtx_ok_for_fpa (x)
3576 rtx x;
3578 REAL_VALUE_TYPE r;
3579 int i;
3581 if (!fpa_consts_inited)
3582 init_fpa_table ();
3584 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3585 if (REAL_VALUE_MINUS_ZERO (r))
3586 return 0;
3588 for (i = 0; i < 8; i++)
3589 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3590 return 1;
3592 return 0;
3595 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3598 neg_const_double_rtx_ok_for_fpa (x)
3599 rtx x;
3601 REAL_VALUE_TYPE r;
3602 int i;
3604 if (!fpa_consts_inited)
3605 init_fpa_table ();
3607 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3608 r = REAL_VALUE_NEGATE (r);
3609 if (REAL_VALUE_MINUS_ZERO (r))
3610 return 0;
3612 for (i = 0; i < 8; i++)
3613 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3614 return 1;
3616 return 0;
3619 /* Predicates for `match_operand' and `match_operator'. */
3621 /* s_register_operand is the same as register_operand, but it doesn't accept
3622 (SUBREG (MEM)...).
3624 This function exists because at the time it was put in it led to better
3625 code. SUBREG(MEM) always needs a reload in the places where
3626 s_register_operand is used, and this seemed to lead to excessive
3627 reloading. */
3630 s_register_operand (op, mode)
3631 rtx op;
3632 enum machine_mode mode;
3634 if (GET_MODE (op) != mode && mode != VOIDmode)
3635 return 0;
3637 if (GET_CODE (op) == SUBREG)
3638 op = SUBREG_REG (op);
3640 /* We don't consider registers whose class is NO_REGS
3641 to be a register operand. */
3642 /* XXX might have to check for lo regs only for thumb ??? */
3643 return (GET_CODE (op) == REG
3644 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3645 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3648 /* A hard register operand (even before reload. */
3651 arm_hard_register_operand (op, mode)
3652 rtx op;
3653 enum machine_mode mode;
3655 if (GET_MODE (op) != mode && mode != VOIDmode)
3656 return 0;
3658 return (GET_CODE (op) == REG
3659 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3662 /* Only accept reg, subreg(reg), const_int. */
3665 reg_or_int_operand (op, mode)
3666 rtx op;
3667 enum machine_mode mode;
3669 if (GET_CODE (op) == CONST_INT)
3670 return 1;
3672 if (GET_MODE (op) != mode && mode != VOIDmode)
3673 return 0;
3675 if (GET_CODE (op) == SUBREG)
3676 op = SUBREG_REG (op);
3678 /* We don't consider registers whose class is NO_REGS
3679 to be a register operand. */
3680 return (GET_CODE (op) == REG
3681 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3682 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3685 /* Return 1 if OP is an item in memory, given that we are in reload. */
3688 arm_reload_memory_operand (op, mode)
3689 rtx op;
3690 enum machine_mode mode ATTRIBUTE_UNUSED;
3692 int regno = true_regnum (op);
3694 return (!CONSTANT_P (op)
3695 && (regno == -1
3696 || (GET_CODE (op) == REG
3697 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3700 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3701 memory access (architecture V4).
3702 MODE is QImode if called when computing constraints, or VOIDmode when
3703 emitting patterns. In this latter case we cannot use memory_operand()
3704 because it will fail on badly formed MEMs, which is precisely what we are
3705 trying to catch. */
3708 bad_signed_byte_operand (op, mode)
3709 rtx op;
3710 enum machine_mode mode ATTRIBUTE_UNUSED;
3712 #if 0
3713 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3714 return 0;
3715 #endif
3716 if (GET_CODE (op) != MEM)
3717 return 0;
3719 op = XEXP (op, 0);
3721 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3722 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3723 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3724 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3725 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3726 return 1;
3728 /* Big constants are also bad. */
3729 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3730 && (INTVAL (XEXP (op, 1)) > 0xff
3731 || -INTVAL (XEXP (op, 1)) > 0xff))
3732 return 1;
3734 /* Everything else is good, or can will automatically be made so. */
3735 return 0;
3738 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3741 arm_rhs_operand (op, mode)
3742 rtx op;
3743 enum machine_mode mode;
3745 return (s_register_operand (op, mode)
3746 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3749 /* Return TRUE for valid operands for the
3750 rhs of an ARM instruction, or a load. */
3753 arm_rhsm_operand (op, mode)
3754 rtx op;
3755 enum machine_mode mode;
3757 return (s_register_operand (op, mode)
3758 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3759 || memory_operand (op, mode));
3762 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3763 constant that is valid when negated. */
3766 arm_add_operand (op, mode)
3767 rtx op;
3768 enum machine_mode mode;
3770 if (TARGET_THUMB)
3771 return thumb_cmp_operand (op, mode);
3773 return (s_register_operand (op, mode)
3774 || (GET_CODE (op) == CONST_INT
3775 && (const_ok_for_arm (INTVAL (op))
3776 || const_ok_for_arm (-INTVAL (op)))));
3780 arm_not_operand (op, mode)
3781 rtx op;
3782 enum machine_mode mode;
3784 return (s_register_operand (op, mode)
3785 || (GET_CODE (op) == CONST_INT
3786 && (const_ok_for_arm (INTVAL (op))
3787 || const_ok_for_arm (~INTVAL (op)))));
3790 /* Return TRUE if the operand is a memory reference which contains an
3791 offsettable address. */
3794 offsettable_memory_operand (op, mode)
3795 rtx op;
3796 enum machine_mode mode;
3798 if (mode == VOIDmode)
3799 mode = GET_MODE (op);
3801 return (mode == GET_MODE (op)
3802 && GET_CODE (op) == MEM
3803 && offsettable_address_p (reload_completed | reload_in_progress,
3804 mode, XEXP (op, 0)));
3807 /* Return TRUE if the operand is a memory reference which is, or can be
3808 made word aligned by adjusting the offset. */
3811 alignable_memory_operand (op, mode)
3812 rtx op;
3813 enum machine_mode mode;
3815 rtx reg;
3817 if (mode == VOIDmode)
3818 mode = GET_MODE (op);
3820 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3821 return 0;
3823 op = XEXP (op, 0);
3825 return ((GET_CODE (reg = op) == REG
3826 || (GET_CODE (op) == SUBREG
3827 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3828 || (GET_CODE (op) == PLUS
3829 && GET_CODE (XEXP (op, 1)) == CONST_INT
3830 && (GET_CODE (reg = XEXP (op, 0)) == REG
3831 || (GET_CODE (XEXP (op, 0)) == SUBREG
3832 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3833 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3836 /* Similar to s_register_operand, but does not allow hard integer
3837 registers. */
3840 f_register_operand (op, mode)
3841 rtx op;
3842 enum machine_mode mode;
3844 if (GET_MODE (op) != mode && mode != VOIDmode)
3845 return 0;
3847 if (GET_CODE (op) == SUBREG)
3848 op = SUBREG_REG (op);
3850 /* We don't consider registers whose class is NO_REGS
3851 to be a register operand. */
3852 return (GET_CODE (op) == REG
3853 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3854 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
3857 /* Return TRUE for valid operands for the rhs of an FPA instruction. */
3860 fpa_rhs_operand (op, mode)
3861 rtx op;
3862 enum machine_mode mode;
3864 if (s_register_operand (op, mode))
3865 return TRUE;
3867 if (GET_MODE (op) != mode && mode != VOIDmode)
3868 return FALSE;
3870 if (GET_CODE (op) == CONST_DOUBLE)
3871 return const_double_rtx_ok_for_fpa (op);
3873 return FALSE;
3877 fpa_add_operand (op, mode)
3878 rtx op;
3879 enum machine_mode mode;
3881 if (s_register_operand (op, mode))
3882 return TRUE;
3884 if (GET_MODE (op) != mode && mode != VOIDmode)
3885 return FALSE;
3887 if (GET_CODE (op) == CONST_DOUBLE)
3888 return (const_double_rtx_ok_for_fpa (op)
3889 || neg_const_double_rtx_ok_for_fpa (op));
3891 return FALSE;
3894 /* Return nonzero if OP is a valid Cirrus memory address pattern. */
3897 cirrus_memory_offset (op)
3898 rtx op;
3900 /* Reject eliminable registers. */
3901 if (! (reload_in_progress || reload_completed)
3902 && ( reg_mentioned_p (frame_pointer_rtx, op)
3903 || reg_mentioned_p (arg_pointer_rtx, op)
3904 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3905 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3906 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3907 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3908 return 0;
3910 if (GET_CODE (op) == MEM)
3912 rtx ind;
3914 ind = XEXP (op, 0);
3916 /* Match: (mem (reg)). */
3917 if (GET_CODE (ind) == REG)
3918 return 1;
3920 /* Match:
3921 (mem (plus (reg)
3922 (const))). */
3923 if (GET_CODE (ind) == PLUS
3924 && GET_CODE (XEXP (ind, 0)) == REG
3925 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
3926 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
3927 return 1;
3930 return 0;
3933 /* Return nonzero if OP is a Cirrus or general register. */
3936 cirrus_register_operand (op, mode)
3937 rtx op;
3938 enum machine_mode mode;
3940 if (GET_MODE (op) != mode && mode != VOIDmode)
3941 return FALSE;
3943 if (GET_CODE (op) == SUBREG)
3944 op = SUBREG_REG (op);
3946 return (GET_CODE (op) == REG
3947 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
3948 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
3951 /* Return nonzero if OP is a cirrus FP register. */
3954 cirrus_fp_register (op, mode)
3955 rtx op;
3956 enum machine_mode mode;
3958 if (GET_MODE (op) != mode && mode != VOIDmode)
3959 return FALSE;
3961 if (GET_CODE (op) == SUBREG)
3962 op = SUBREG_REG (op);
3964 return (GET_CODE (op) == REG
3965 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3966 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
3969 /* Return nonzero if OP is a 6bit constant (0..63). */
3972 cirrus_shift_const (op, mode)
3973 rtx op;
3974 enum machine_mode mode ATTRIBUTE_UNUSED;
3976 return (GET_CODE (op) == CONST_INT
3977 && INTVAL (op) >= 0
3978 && INTVAL (op) < 64);
3981 /* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
3982 Use by the Cirrus Maverick code which has to workaround
3983 a hardware bug triggered by such instructions. */
3985 static bool
3986 arm_memory_load_p (insn)
3987 rtx insn;
3989 rtx body, lhs, rhs;;
3991 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
3992 return false;
3994 body = PATTERN (insn);
3996 if (GET_CODE (body) != SET)
3997 return false;
3999 lhs = XEXP (body, 0);
4000 rhs = XEXP (body, 1);
4002 lhs = REG_OR_SUBREG_RTX (lhs);
4004 /* If the destination is not a general purpose
4005 register we do not have to worry. */
4006 if (GET_CODE (lhs) != REG
4007 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
4008 return false;
4010 /* As well as loads from memory we also have to react
4011 to loads of invalid constants which will be turned
4012 into loads from the minipool. */
4013 return (GET_CODE (rhs) == MEM
4014 || GET_CODE (rhs) == SYMBOL_REF
4015 || note_invalid_constants (insn, -1, false));
4018 /* Return TRUE if INSN is a Cirrus instruction. */
4020 static bool
4021 arm_cirrus_insn_p (insn)
4022 rtx insn;
4024 enum attr_cirrus attr;
4026 /* get_attr aborts on USE and CLOBBER. */
4027 if (!insn
4028 || GET_CODE (insn) != INSN
4029 || GET_CODE (PATTERN (insn)) == USE
4030 || GET_CODE (PATTERN (insn)) == CLOBBER)
4031 return 0;
4033 attr = get_attr_cirrus (insn);
4035 return attr != CIRRUS_NOT;
4038 /* Cirrus reorg for invalid instruction combinations. */
4040 static void
4041 cirrus_reorg (first)
4042 rtx first;
4044 enum attr_cirrus attr;
4045 rtx body = PATTERN (first);
4046 rtx t;
4047 int nops;
4049 /* Any branch must be followed by 2 non Cirrus instructions. */
4050 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
4052 nops = 0;
4053 t = next_nonnote_insn (first);
4055 if (arm_cirrus_insn_p (t))
4056 ++ nops;
4058 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4059 ++ nops;
4061 while (nops --)
4062 emit_insn_after (gen_nop (), first);
4064 return;
4067 /* (float (blah)) is in parallel with a clobber. */
4068 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4069 body = XVECEXP (body, 0, 0);
4071 if (GET_CODE (body) == SET)
4073 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4075 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4076 be followed by a non Cirrus insn. */
4077 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4079 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
4080 emit_insn_after (gen_nop (), first);
4082 return;
4084 else if (arm_memory_load_p (first))
4086 unsigned int arm_regno;
4088 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4089 ldr/cfmv64hr combination where the Rd field is the same
4090 in both instructions must be split with a non Cirrus
4091 insn. Example:
4093 ldr r0, blah
4095 cfmvsr mvf0, r0. */
4097 /* Get Arm register number for ldr insn. */
4098 if (GET_CODE (lhs) == REG)
4099 arm_regno = REGNO (lhs);
4100 else if (GET_CODE (rhs) == REG)
4101 arm_regno = REGNO (rhs);
4102 else
4103 abort ();
4105 /* Next insn. */
4106 first = next_nonnote_insn (first);
4108 if (! arm_cirrus_insn_p (first))
4109 return;
4111 body = PATTERN (first);
4113 /* (float (blah)) is in parallel with a clobber. */
4114 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4115 body = XVECEXP (body, 0, 0);
4117 if (GET_CODE (body) == FLOAT)
4118 body = XEXP (body, 0);
4120 if (get_attr_cirrus (first) == CIRRUS_MOVE
4121 && GET_CODE (XEXP (body, 1)) == REG
4122 && arm_regno == REGNO (XEXP (body, 1)))
4123 emit_insn_after (gen_nop (), first);
4125 return;
4129 /* get_attr aborts on USE and CLOBBER. */
4130 if (!first
4131 || GET_CODE (first) != INSN
4132 || GET_CODE (PATTERN (first)) == USE
4133 || GET_CODE (PATTERN (first)) == CLOBBER)
4134 return;
4136 attr = get_attr_cirrus (first);
4138 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4139 must be followed by a non-coprocessor instruction. */
4140 if (attr == CIRRUS_COMPARE)
4142 nops = 0;
4144 t = next_nonnote_insn (first);
4146 if (arm_cirrus_insn_p (t))
4147 ++ nops;
4149 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4150 ++ nops;
4152 while (nops --)
4153 emit_insn_after (gen_nop (), first);
4155 return;
4159 /* Return nonzero if OP is a constant power of two. */
4162 power_of_two_operand (op, mode)
4163 rtx op;
4164 enum machine_mode mode ATTRIBUTE_UNUSED;
4166 if (GET_CODE (op) == CONST_INT)
4168 HOST_WIDE_INT value = INTVAL (op);
4170 return value != 0 && (value & (value - 1)) == 0;
4173 return FALSE;
4176 /* Return TRUE for a valid operand of a DImode operation.
4177 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4178 Note that this disallows MEM(REG+REG), but allows
4179 MEM(PRE/POST_INC/DEC(REG)). */
4182 di_operand (op, mode)
4183 rtx op;
4184 enum machine_mode mode;
4186 if (s_register_operand (op, mode))
4187 return TRUE;
4189 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4190 return FALSE;
4192 if (GET_CODE (op) == SUBREG)
4193 op = SUBREG_REG (op);
4195 switch (GET_CODE (op))
4197 case CONST_DOUBLE:
4198 case CONST_INT:
4199 return TRUE;
4201 case MEM:
4202 return memory_address_p (DImode, XEXP (op, 0));
4204 default:
4205 return FALSE;
4209 /* Like di_operand, but don't accept constants. */
4212 nonimmediate_di_operand (op, mode)
4213 rtx op;
4214 enum machine_mode mode;
4216 if (s_register_operand (op, mode))
4217 return TRUE;
4219 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4220 return FALSE;
4222 if (GET_CODE (op) == SUBREG)
4223 op = SUBREG_REG (op);
4225 if (GET_CODE (op) == MEM)
4226 return memory_address_p (DImode, XEXP (op, 0));
4228 return FALSE;
4231 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
4232 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4233 Note that this disallows MEM(REG+REG), but allows
4234 MEM(PRE/POST_INC/DEC(REG)). */
4237 soft_df_operand (op, mode)
4238 rtx op;
4239 enum machine_mode mode;
4241 if (s_register_operand (op, mode))
4242 return TRUE;
4244 if (mode != VOIDmode && GET_MODE (op) != mode)
4245 return FALSE;
4247 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4248 return FALSE;
4250 if (GET_CODE (op) == SUBREG)
4251 op = SUBREG_REG (op);
4253 switch (GET_CODE (op))
4255 case CONST_DOUBLE:
4256 return TRUE;
4258 case MEM:
4259 return memory_address_p (DFmode, XEXP (op, 0));
4261 default:
4262 return FALSE;
4266 /* Like soft_df_operand, but don't accept constants. */
4269 nonimmediate_soft_df_operand (op, mode)
4270 rtx op;
4271 enum machine_mode mode;
4273 if (s_register_operand (op, mode))
4274 return TRUE;
4276 if (mode != VOIDmode && GET_MODE (op) != mode)
4277 return FALSE;
4279 if (GET_CODE (op) == SUBREG)
4280 op = SUBREG_REG (op);
4282 if (GET_CODE (op) == MEM)
4283 return memory_address_p (DFmode, XEXP (op, 0));
4284 return FALSE;
4287 /* Return TRUE for valid index operands. */
4290 index_operand (op, mode)
4291 rtx op;
4292 enum machine_mode mode;
4294 return (s_register_operand (op, mode)
4295 || (immediate_operand (op, mode)
4296 && (GET_CODE (op) != CONST_INT
4297 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4300 /* Return TRUE for valid shifts by a constant. This also accepts any
4301 power of two on the (somewhat overly relaxed) assumption that the
4302 shift operator in this case was a mult. */
4305 const_shift_operand (op, mode)
4306 rtx op;
4307 enum machine_mode mode;
4309 return (power_of_two_operand (op, mode)
4310 || (immediate_operand (op, mode)
4311 && (GET_CODE (op) != CONST_INT
4312 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4315 /* Return TRUE for arithmetic operators which can be combined with a multiply
4316 (shift). */
4319 shiftable_operator (x, mode)
4320 rtx x;
4321 enum machine_mode mode;
4323 enum rtx_code code;
4325 if (GET_MODE (x) != mode)
4326 return FALSE;
4328 code = GET_CODE (x);
4330 return (code == PLUS || code == MINUS
4331 || code == IOR || code == XOR || code == AND);
4334 /* Return TRUE for binary logical operators. */
4337 logical_binary_operator (x, mode)
4338 rtx x;
4339 enum machine_mode mode;
4341 enum rtx_code code;
4343 if (GET_MODE (x) != mode)
4344 return FALSE;
4346 code = GET_CODE (x);
4348 return (code == IOR || code == XOR || code == AND);
4351 /* Return TRUE for shift operators. */
4354 shift_operator (x, mode)
4355 rtx x;
4356 enum machine_mode mode;
4358 enum rtx_code code;
4360 if (GET_MODE (x) != mode)
4361 return FALSE;
4363 code = GET_CODE (x);
4365 if (code == MULT)
4366 return power_of_two_operand (XEXP (x, 1), mode);
4368 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4369 || code == ROTATERT);
4372 /* Return TRUE if x is EQ or NE. */
4375 equality_operator (x, mode)
4376 rtx x;
4377 enum machine_mode mode ATTRIBUTE_UNUSED;
4379 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4382 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4385 arm_comparison_operator (x, mode)
4386 rtx x;
4387 enum machine_mode mode;
4389 return (comparison_operator (x, mode)
4390 && GET_CODE (x) != LTGT
4391 && GET_CODE (x) != UNEQ);
4394 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4397 minmax_operator (x, mode)
4398 rtx x;
4399 enum machine_mode mode;
4401 enum rtx_code code = GET_CODE (x);
4403 if (GET_MODE (x) != mode)
4404 return FALSE;
4406 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4409 /* Return TRUE if this is the condition code register, if we aren't given
4410 a mode, accept any class CCmode register. */
4413 cc_register (x, mode)
4414 rtx x;
4415 enum machine_mode mode;
4417 if (mode == VOIDmode)
4419 mode = GET_MODE (x);
4421 if (GET_MODE_CLASS (mode) != MODE_CC)
4422 return FALSE;
4425 if ( GET_MODE (x) == mode
4426 && GET_CODE (x) == REG
4427 && REGNO (x) == CC_REGNUM)
4428 return TRUE;
4430 return FALSE;
4433 /* Return TRUE if this is the condition code register, if we aren't given
4434 a mode, accept any class CCmode register which indicates a dominance
4435 expression. */
4438 dominant_cc_register (x, mode)
4439 rtx x;
4440 enum machine_mode mode;
4442 if (mode == VOIDmode)
4444 mode = GET_MODE (x);
4446 if (GET_MODE_CLASS (mode) != MODE_CC)
4447 return FALSE;
4450 if ( mode != CC_DNEmode && mode != CC_DEQmode
4451 && mode != CC_DLEmode && mode != CC_DLTmode
4452 && mode != CC_DGEmode && mode != CC_DGTmode
4453 && mode != CC_DLEUmode && mode != CC_DLTUmode
4454 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4455 return FALSE;
4457 return cc_register (x, mode);
4460 /* Return TRUE if X references a SYMBOL_REF. */
4463 symbol_mentioned_p (x)
4464 rtx x;
4466 const char * fmt;
4467 int i;
4469 if (GET_CODE (x) == SYMBOL_REF)
4470 return 1;
4472 fmt = GET_RTX_FORMAT (GET_CODE (x));
4474 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4476 if (fmt[i] == 'E')
4478 int j;
4480 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4481 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4482 return 1;
4484 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4485 return 1;
4488 return 0;
4491 /* Return TRUE if X references a LABEL_REF. */
4494 label_mentioned_p (x)
4495 rtx x;
4497 const char * fmt;
4498 int i;
4500 if (GET_CODE (x) == LABEL_REF)
4501 return 1;
4503 fmt = GET_RTX_FORMAT (GET_CODE (x));
4504 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4506 if (fmt[i] == 'E')
4508 int j;
4510 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4511 if (label_mentioned_p (XVECEXP (x, i, j)))
4512 return 1;
4514 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4515 return 1;
4518 return 0;
4521 enum rtx_code
4522 minmax_code (x)
4523 rtx x;
4525 enum rtx_code code = GET_CODE (x);
4527 if (code == SMAX)
4528 return GE;
4529 else if (code == SMIN)
4530 return LE;
4531 else if (code == UMIN)
4532 return LEU;
4533 else if (code == UMAX)
4534 return GEU;
4536 abort ();
4539 /* Return 1 if memory locations are adjacent. */
4542 adjacent_mem_locations (a, b)
4543 rtx a, b;
4545 if ((GET_CODE (XEXP (a, 0)) == REG
4546 || (GET_CODE (XEXP (a, 0)) == PLUS
4547 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4548 && (GET_CODE (XEXP (b, 0)) == REG
4549 || (GET_CODE (XEXP (b, 0)) == PLUS
4550 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4552 int val0 = 0, val1 = 0;
4553 int reg0, reg1;
4555 if (GET_CODE (XEXP (a, 0)) == PLUS)
4557 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4558 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4560 else
4561 reg0 = REGNO (XEXP (a, 0));
4563 if (GET_CODE (XEXP (b, 0)) == PLUS)
4565 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4566 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4568 else
4569 reg1 = REGNO (XEXP (b, 0));
4571 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4573 return 0;
4576 /* Return 1 if OP is a load multiple operation. It is known to be
4577 parallel and the first section will be tested. */
4580 load_multiple_operation (op, mode)
4581 rtx op;
4582 enum machine_mode mode ATTRIBUTE_UNUSED;
4584 HOST_WIDE_INT count = XVECLEN (op, 0);
4585 int dest_regno;
4586 rtx src_addr;
4587 HOST_WIDE_INT i = 1, base = 0;
4588 rtx elt;
4590 if (count <= 1
4591 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4592 return 0;
4594 /* Check to see if this might be a write-back. */
4595 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4597 i++;
4598 base = 1;
4600 /* Now check it more carefully. */
4601 if (GET_CODE (SET_DEST (elt)) != REG
4602 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4603 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4604 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4605 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4606 return 0;
4609 /* Perform a quick check so we don't blow up below. */
4610 if (count <= i
4611 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4612 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4613 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4614 return 0;
4616 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4617 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4619 for (; i < count; i++)
4621 elt = XVECEXP (op, 0, i);
4623 if (GET_CODE (elt) != SET
4624 || GET_CODE (SET_DEST (elt)) != REG
4625 || GET_MODE (SET_DEST (elt)) != SImode
4626 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4627 || GET_CODE (SET_SRC (elt)) != MEM
4628 || GET_MODE (SET_SRC (elt)) != SImode
4629 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4630 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4631 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4632 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4633 return 0;
4636 return 1;
4639 /* Return 1 if OP is a store multiple operation. It is known to be
4640 parallel and the first section will be tested. */
4643 store_multiple_operation (op, mode)
4644 rtx op;
4645 enum machine_mode mode ATTRIBUTE_UNUSED;
4647 HOST_WIDE_INT count = XVECLEN (op, 0);
4648 int src_regno;
4649 rtx dest_addr;
4650 HOST_WIDE_INT i = 1, base = 0;
4651 rtx elt;
4653 if (count <= 1
4654 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4655 return 0;
4657 /* Check to see if this might be a write-back. */
4658 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4660 i++;
4661 base = 1;
4663 /* Now check it more carefully. */
4664 if (GET_CODE (SET_DEST (elt)) != REG
4665 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4666 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4667 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4668 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4669 return 0;
4672 /* Perform a quick check so we don't blow up below. */
4673 if (count <= i
4674 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4675 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4676 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4677 return 0;
4679 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4680 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4682 for (; i < count; i++)
4684 elt = XVECEXP (op, 0, i);
4686 if (GET_CODE (elt) != SET
4687 || GET_CODE (SET_SRC (elt)) != REG
4688 || GET_MODE (SET_SRC (elt)) != SImode
4689 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4690 || GET_CODE (SET_DEST (elt)) != MEM
4691 || GET_MODE (SET_DEST (elt)) != SImode
4692 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4693 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4694 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4695 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4696 return 0;
4699 return 1;
4703 load_multiple_sequence (operands, nops, regs, base, load_offset)
4704 rtx * operands;
4705 int nops;
4706 int * regs;
4707 int * base;
4708 HOST_WIDE_INT * load_offset;
4710 int unsorted_regs[4];
4711 HOST_WIDE_INT unsorted_offsets[4];
4712 int order[4];
4713 int base_reg = -1;
4714 int i;
4716 /* Can only handle 2, 3, or 4 insns at present,
4717 though could be easily extended if required. */
4718 if (nops < 2 || nops > 4)
4719 abort ();
4721 /* Loop over the operands and check that the memory references are
4722 suitable (ie immediate offsets from the same base register). At
4723 the same time, extract the target register, and the memory
4724 offsets. */
4725 for (i = 0; i < nops; i++)
4727 rtx reg;
4728 rtx offset;
4730 /* Convert a subreg of a mem into the mem itself. */
4731 if (GET_CODE (operands[nops + i]) == SUBREG)
4732 operands[nops + i] = alter_subreg (operands + (nops + i));
4734 if (GET_CODE (operands[nops + i]) != MEM)
4735 abort ();
4737 /* Don't reorder volatile memory references; it doesn't seem worth
4738 looking for the case where the order is ok anyway. */
4739 if (MEM_VOLATILE_P (operands[nops + i]))
4740 return 0;
4742 offset = const0_rtx;
4744 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4745 || (GET_CODE (reg) == SUBREG
4746 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4747 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4748 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4749 == REG)
4750 || (GET_CODE (reg) == SUBREG
4751 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4752 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4753 == CONST_INT)))
4755 if (i == 0)
4757 base_reg = REGNO (reg);
4758 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4759 ? REGNO (operands[i])
4760 : REGNO (SUBREG_REG (operands[i])));
4761 order[0] = 0;
4763 else
4765 if (base_reg != (int) REGNO (reg))
4766 /* Not addressed from the same base register. */
4767 return 0;
4769 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4770 ? REGNO (operands[i])
4771 : REGNO (SUBREG_REG (operands[i])));
4772 if (unsorted_regs[i] < unsorted_regs[order[0]])
4773 order[0] = i;
4776 /* If it isn't an integer register, or if it overwrites the
4777 base register but isn't the last insn in the list, then
4778 we can't do this. */
4779 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4780 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4781 return 0;
4783 unsorted_offsets[i] = INTVAL (offset);
4785 else
4786 /* Not a suitable memory address. */
4787 return 0;
4790 /* All the useful information has now been extracted from the
4791 operands into unsorted_regs and unsorted_offsets; additionally,
4792 order[0] has been set to the lowest numbered register in the
4793 list. Sort the registers into order, and check that the memory
4794 offsets are ascending and adjacent. */
4796 for (i = 1; i < nops; i++)
4798 int j;
4800 order[i] = order[i - 1];
4801 for (j = 0; j < nops; j++)
4802 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4803 && (order[i] == order[i - 1]
4804 || unsorted_regs[j] < unsorted_regs[order[i]]))
4805 order[i] = j;
4807 /* Have we found a suitable register? if not, one must be used more
4808 than once. */
4809 if (order[i] == order[i - 1])
4810 return 0;
4812 /* Is the memory address adjacent and ascending? */
4813 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4814 return 0;
4817 if (base)
4819 *base = base_reg;
4821 for (i = 0; i < nops; i++)
4822 regs[i] = unsorted_regs[order[i]];
4824 *load_offset = unsorted_offsets[order[0]];
4827 if (unsorted_offsets[order[0]] == 0)
4828 return 1; /* ldmia */
4830 if (unsorted_offsets[order[0]] == 4)
4831 return 2; /* ldmib */
4833 if (unsorted_offsets[order[nops - 1]] == 0)
4834 return 3; /* ldmda */
4836 if (unsorted_offsets[order[nops - 1]] == -4)
4837 return 4; /* ldmdb */
4839 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4840 if the offset isn't small enough. The reason 2 ldrs are faster
4841 is because these ARMs are able to do more than one cache access
4842 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4843 whilst the ARM8 has a double bandwidth cache. This means that
4844 these cores can do both an instruction fetch and a data fetch in
4845 a single cycle, so the trick of calculating the address into a
4846 scratch register (one of the result regs) and then doing a load
4847 multiple actually becomes slower (and no smaller in code size).
4848 That is the transformation
4850 ldr rd1, [rbase + offset]
4851 ldr rd2, [rbase + offset + 4]
4855 add rd1, rbase, offset
4856 ldmia rd1, {rd1, rd2}
4858 produces worse code -- '3 cycles + any stalls on rd2' instead of
4859 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4860 access per cycle, the first sequence could never complete in less
4861 than 6 cycles, whereas the ldm sequence would only take 5 and
4862 would make better use of sequential accesses if not hitting the
4863 cache.
4865 We cheat here and test 'arm_ld_sched' which we currently know to
4866 only be true for the ARM8, ARM9 and StrongARM. If this ever
4867 changes, then the test below needs to be reworked. */
4868 if (nops == 2 && arm_ld_sched)
4869 return 0;
4871 /* Can't do it without setting up the offset, only do this if it takes
4872 no more than one insn. */
4873 return (const_ok_for_arm (unsorted_offsets[order[0]])
4874 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4877 const char *
4878 emit_ldm_seq (operands, nops)
4879 rtx * operands;
4880 int nops;
4882 int regs[4];
4883 int base_reg;
4884 HOST_WIDE_INT offset;
4885 char buf[100];
4886 int i;
4888 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4890 case 1:
4891 strcpy (buf, "ldm%?ia\t");
4892 break;
4894 case 2:
4895 strcpy (buf, "ldm%?ib\t");
4896 break;
4898 case 3:
4899 strcpy (buf, "ldm%?da\t");
4900 break;
4902 case 4:
4903 strcpy (buf, "ldm%?db\t");
4904 break;
4906 case 5:
4907 if (offset >= 0)
4908 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4909 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4910 (long) offset);
4911 else
4912 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4913 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4914 (long) -offset);
4915 output_asm_insn (buf, operands);
4916 base_reg = regs[0];
4917 strcpy (buf, "ldm%?ia\t");
4918 break;
4920 default:
4921 abort ();
4924 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4925 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4927 for (i = 1; i < nops; i++)
4928 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4929 reg_names[regs[i]]);
4931 strcat (buf, "}\t%@ phole ldm");
4933 output_asm_insn (buf, operands);
4934 return "";
4938 store_multiple_sequence (operands, nops, regs, base, load_offset)
4939 rtx * operands;
4940 int nops;
4941 int * regs;
4942 int * base;
4943 HOST_WIDE_INT * load_offset;
4945 int unsorted_regs[4];
4946 HOST_WIDE_INT unsorted_offsets[4];
4947 int order[4];
4948 int base_reg = -1;
4949 int i;
4951 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4952 extended if required. */
4953 if (nops < 2 || nops > 4)
4954 abort ();
4956 /* Loop over the operands and check that the memory references are
4957 suitable (ie immediate offsets from the same base register). At
4958 the same time, extract the target register, and the memory
4959 offsets. */
4960 for (i = 0; i < nops; i++)
4962 rtx reg;
4963 rtx offset;
4965 /* Convert a subreg of a mem into the mem itself. */
4966 if (GET_CODE (operands[nops + i]) == SUBREG)
4967 operands[nops + i] = alter_subreg (operands + (nops + i));
4969 if (GET_CODE (operands[nops + i]) != MEM)
4970 abort ();
4972 /* Don't reorder volatile memory references; it doesn't seem worth
4973 looking for the case where the order is ok anyway. */
4974 if (MEM_VOLATILE_P (operands[nops + i]))
4975 return 0;
4977 offset = const0_rtx;
4979 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4980 || (GET_CODE (reg) == SUBREG
4981 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4982 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4983 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4984 == REG)
4985 || (GET_CODE (reg) == SUBREG
4986 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4987 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4988 == CONST_INT)))
4990 if (i == 0)
4992 base_reg = REGNO (reg);
4993 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4994 ? REGNO (operands[i])
4995 : REGNO (SUBREG_REG (operands[i])));
4996 order[0] = 0;
4998 else
5000 if (base_reg != (int) REGNO (reg))
5001 /* Not addressed from the same base register. */
5002 return 0;
5004 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
5005 ? REGNO (operands[i])
5006 : REGNO (SUBREG_REG (operands[i])));
5007 if (unsorted_regs[i] < unsorted_regs[order[0]])
5008 order[0] = i;
5011 /* If it isn't an integer register, then we can't do this. */
5012 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
5013 return 0;
5015 unsorted_offsets[i] = INTVAL (offset);
5017 else
5018 /* Not a suitable memory address. */
5019 return 0;
5022 /* All the useful information has now been extracted from the
5023 operands into unsorted_regs and unsorted_offsets; additionally,
5024 order[0] has been set to the lowest numbered register in the
5025 list. Sort the registers into order, and check that the memory
5026 offsets are ascending and adjacent. */
5028 for (i = 1; i < nops; i++)
5030 int j;
5032 order[i] = order[i - 1];
5033 for (j = 0; j < nops; j++)
5034 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
5035 && (order[i] == order[i - 1]
5036 || unsorted_regs[j] < unsorted_regs[order[i]]))
5037 order[i] = j;
5039 /* Have we found a suitable register? if not, one must be used more
5040 than once. */
5041 if (order[i] == order[i - 1])
5042 return 0;
5044 /* Is the memory address adjacent and ascending? */
5045 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
5046 return 0;
5049 if (base)
5051 *base = base_reg;
5053 for (i = 0; i < nops; i++)
5054 regs[i] = unsorted_regs[order[i]];
5056 *load_offset = unsorted_offsets[order[0]];
5059 if (unsorted_offsets[order[0]] == 0)
5060 return 1; /* stmia */
5062 if (unsorted_offsets[order[0]] == 4)
5063 return 2; /* stmib */
5065 if (unsorted_offsets[order[nops - 1]] == 0)
5066 return 3; /* stmda */
5068 if (unsorted_offsets[order[nops - 1]] == -4)
5069 return 4; /* stmdb */
5071 return 0;
5074 const char *
5075 emit_stm_seq (operands, nops)
5076 rtx * operands;
5077 int nops;
5079 int regs[4];
5080 int base_reg;
5081 HOST_WIDE_INT offset;
5082 char buf[100];
5083 int i;
5085 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
5087 case 1:
5088 strcpy (buf, "stm%?ia\t");
5089 break;
5091 case 2:
5092 strcpy (buf, "stm%?ib\t");
5093 break;
5095 case 3:
5096 strcpy (buf, "stm%?da\t");
5097 break;
5099 case 4:
5100 strcpy (buf, "stm%?db\t");
5101 break;
5103 default:
5104 abort ();
5107 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
5108 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
5110 for (i = 1; i < nops; i++)
5111 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5112 reg_names[regs[i]]);
5114 strcat (buf, "}\t%@ phole stm");
5116 output_asm_insn (buf, operands);
5117 return "";
5121 multi_register_push (op, mode)
5122 rtx op;
5123 enum machine_mode mode ATTRIBUTE_UNUSED;
5125 if (GET_CODE (op) != PARALLEL
5126 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5127 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
5128 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
5129 return 0;
5131 return 1;
5134 /* Routines for use in generating RTL. */
5137 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
5138 in_struct_p, scalar_p)
5139 int base_regno;
5140 int count;
5141 rtx from;
5142 int up;
5143 int write_back;
5144 int unchanging_p;
5145 int in_struct_p;
5146 int scalar_p;
5148 int i = 0, j;
5149 rtx result;
5150 int sign = up ? 1 : -1;
5151 rtx mem;
5153 /* XScale has load-store double instructions, but they have stricter
5154 alignment requirements than load-store multiple, so we can not
5155 use them.
5157 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5158 the pipeline until completion.
5160 NREGS CYCLES
5166 An ldr instruction takes 1-3 cycles, but does not block the
5167 pipeline.
5169 NREGS CYCLES
5170 1 1-3
5171 2 2-6
5172 3 3-9
5173 4 4-12
5175 Best case ldr will always win. However, the more ldr instructions
5176 we issue, the less likely we are to be able to schedule them well.
5177 Using ldr instructions also increases code size.
5179 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5180 for counts of 3 or 4 regs. */
5181 if (arm_is_xscale && count <= 2 && ! optimize_size)
5183 rtx seq;
5185 start_sequence ();
5187 for (i = 0; i < count; i++)
5189 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
5190 RTX_UNCHANGING_P (mem) = unchanging_p;
5191 MEM_IN_STRUCT_P (mem) = in_struct_p;
5192 MEM_SCALAR_P (mem) = scalar_p;
5193 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5196 if (write_back)
5197 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5199 seq = get_insns ();
5200 end_sequence ();
5202 return seq;
5205 result = gen_rtx_PARALLEL (VOIDmode,
5206 rtvec_alloc (count + (write_back ? 1 : 0)));
5207 if (write_back)
5209 XVECEXP (result, 0, 0)
5210 = gen_rtx_SET (GET_MODE (from), from,
5211 plus_constant (from, count * 4 * sign));
5212 i = 1;
5213 count++;
5216 for (j = 0; i < count; i++, j++)
5218 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
5219 RTX_UNCHANGING_P (mem) = unchanging_p;
5220 MEM_IN_STRUCT_P (mem) = in_struct_p;
5221 MEM_SCALAR_P (mem) = scalar_p;
5222 XVECEXP (result, 0, i)
5223 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
5226 return result;
5230 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
5231 in_struct_p, scalar_p)
5232 int base_regno;
5233 int count;
5234 rtx to;
5235 int up;
5236 int write_back;
5237 int unchanging_p;
5238 int in_struct_p;
5239 int scalar_p;
5241 int i = 0, j;
5242 rtx result;
5243 int sign = up ? 1 : -1;
5244 rtx mem;
5246 /* See arm_gen_load_multiple for discussion of
5247 the pros/cons of ldm/stm usage for XScale. */
5248 if (arm_is_xscale && count <= 2 && ! optimize_size)
5250 rtx seq;
5252 start_sequence ();
5254 for (i = 0; i < count; i++)
5256 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5257 RTX_UNCHANGING_P (mem) = unchanging_p;
5258 MEM_IN_STRUCT_P (mem) = in_struct_p;
5259 MEM_SCALAR_P (mem) = scalar_p;
5260 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5263 if (write_back)
5264 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5266 seq = get_insns ();
5267 end_sequence ();
5269 return seq;
5272 result = gen_rtx_PARALLEL (VOIDmode,
5273 rtvec_alloc (count + (write_back ? 1 : 0)));
5274 if (write_back)
5276 XVECEXP (result, 0, 0)
5277 = gen_rtx_SET (GET_MODE (to), to,
5278 plus_constant (to, count * 4 * sign));
5279 i = 1;
5280 count++;
5283 for (j = 0; i < count; i++, j++)
5285 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
5286 RTX_UNCHANGING_P (mem) = unchanging_p;
5287 MEM_IN_STRUCT_P (mem) = in_struct_p;
5288 MEM_SCALAR_P (mem) = scalar_p;
5290 XVECEXP (result, 0, i)
5291 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5294 return result;
5298 arm_gen_movstrqi (operands)
5299 rtx * operands;
5301 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5302 int i;
5303 rtx src, dst;
5304 rtx st_src, st_dst, fin_src, fin_dst;
5305 rtx part_bytes_reg = NULL;
5306 rtx mem;
5307 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5308 int dst_scalar_p, src_scalar_p;
5310 if (GET_CODE (operands[2]) != CONST_INT
5311 || GET_CODE (operands[3]) != CONST_INT
5312 || INTVAL (operands[2]) > 64
5313 || INTVAL (operands[3]) & 3)
5314 return 0;
5316 st_dst = XEXP (operands[0], 0);
5317 st_src = XEXP (operands[1], 0);
5319 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5320 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5321 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5322 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5323 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5324 src_scalar_p = MEM_SCALAR_P (operands[1]);
5326 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5327 fin_src = src = copy_to_mode_reg (SImode, st_src);
5329 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5330 out_words_to_go = INTVAL (operands[2]) / 4;
5331 last_bytes = INTVAL (operands[2]) & 3;
5333 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5334 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5336 for (i = 0; in_words_to_go >= 2; i+=4)
5338 if (in_words_to_go > 4)
5339 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5340 src_unchanging_p,
5341 src_in_struct_p,
5342 src_scalar_p));
5343 else
5344 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5345 FALSE, src_unchanging_p,
5346 src_in_struct_p, src_scalar_p));
5348 if (out_words_to_go)
5350 if (out_words_to_go > 4)
5351 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5352 dst_unchanging_p,
5353 dst_in_struct_p,
5354 dst_scalar_p));
5355 else if (out_words_to_go != 1)
5356 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5357 dst, TRUE,
5358 (last_bytes == 0
5359 ? FALSE : TRUE),
5360 dst_unchanging_p,
5361 dst_in_struct_p,
5362 dst_scalar_p));
5363 else
5365 mem = gen_rtx_MEM (SImode, dst);
5366 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5367 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5368 MEM_SCALAR_P (mem) = dst_scalar_p;
5369 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5370 if (last_bytes != 0)
5371 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5375 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5376 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5379 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5380 if (out_words_to_go)
5382 rtx sreg;
5384 mem = gen_rtx_MEM (SImode, src);
5385 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5386 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5387 MEM_SCALAR_P (mem) = src_scalar_p;
5388 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5389 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5391 mem = gen_rtx_MEM (SImode, dst);
5392 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5393 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5394 MEM_SCALAR_P (mem) = dst_scalar_p;
5395 emit_move_insn (mem, sreg);
5396 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5397 in_words_to_go--;
5399 if (in_words_to_go) /* Sanity check */
5400 abort ();
5403 if (in_words_to_go)
5405 if (in_words_to_go < 0)
5406 abort ();
5408 mem = gen_rtx_MEM (SImode, src);
5409 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5410 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5411 MEM_SCALAR_P (mem) = src_scalar_p;
5412 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5415 if (last_bytes && part_bytes_reg == NULL)
5416 abort ();
5418 if (BYTES_BIG_ENDIAN && last_bytes)
5420 rtx tmp = gen_reg_rtx (SImode);
5422 /* The bytes we want are in the top end of the word. */
5423 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5424 GEN_INT (8 * (4 - last_bytes))));
5425 part_bytes_reg = tmp;
5427 while (last_bytes)
5429 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5430 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5431 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5432 MEM_SCALAR_P (mem) = dst_scalar_p;
5433 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5435 if (--last_bytes)
5437 tmp = gen_reg_rtx (SImode);
5438 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5439 part_bytes_reg = tmp;
5444 else
5446 if (last_bytes > 1)
5448 mem = gen_rtx_MEM (HImode, dst);
5449 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5450 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5451 MEM_SCALAR_P (mem) = dst_scalar_p;
5452 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5453 last_bytes -= 2;
5454 if (last_bytes)
5456 rtx tmp = gen_reg_rtx (SImode);
5458 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5459 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5460 part_bytes_reg = tmp;
5464 if (last_bytes)
5466 mem = gen_rtx_MEM (QImode, dst);
5467 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5468 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5469 MEM_SCALAR_P (mem) = dst_scalar_p;
5470 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5474 return 1;
5477 /* Generate a memory reference for a half word, such that it will be loaded
5478 into the top 16 bits of the word. We can assume that the address is
5479 known to be alignable and of the form reg, or plus (reg, const). */
5482 arm_gen_rotated_half_load (memref)
5483 rtx memref;
5485 HOST_WIDE_INT offset = 0;
5486 rtx base = XEXP (memref, 0);
5488 if (GET_CODE (base) == PLUS)
5490 offset = INTVAL (XEXP (base, 1));
5491 base = XEXP (base, 0);
5494 /* If we aren't allowed to generate unaligned addresses, then fail. */
5495 if (TARGET_MMU_TRAPS
5496 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5497 return NULL;
5499 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5501 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5502 return base;
5504 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5507 /* Select a dominance comparison mode if possible. We support three forms.
5508 COND_OR == 0 => (X && Y)
5509 COND_OR == 1 => ((! X( || Y)
5510 COND_OR == 2 => (X || Y)
5511 If we are unable to support a dominance comparison we return CC mode.
5512 This will then fail to match for the RTL expressions that generate this
5513 call. */
5515 static enum machine_mode
5516 select_dominance_cc_mode (x, y, cond_or)
5517 rtx x;
5518 rtx y;
5519 HOST_WIDE_INT cond_or;
5521 enum rtx_code cond1, cond2;
5522 int swapped = 0;
5524 /* Currently we will probably get the wrong result if the individual
5525 comparisons are not simple. This also ensures that it is safe to
5526 reverse a comparison if necessary. */
5527 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5528 != CCmode)
5529 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5530 != CCmode))
5531 return CCmode;
5533 /* The if_then_else variant of this tests the second condition if the
5534 first passes, but is true if the first fails. Reverse the first
5535 condition to get a true "inclusive-or" expression. */
5536 if (cond_or == 1)
5537 cond1 = reverse_condition (cond1);
5539 /* If the comparisons are not equal, and one doesn't dominate the other,
5540 then we can't do this. */
5541 if (cond1 != cond2
5542 && !comparison_dominates_p (cond1, cond2)
5543 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5544 return CCmode;
5546 if (swapped)
5548 enum rtx_code temp = cond1;
5549 cond1 = cond2;
5550 cond2 = temp;
5553 switch (cond1)
5555 case EQ:
5556 if (cond2 == EQ || !cond_or)
5557 return CC_DEQmode;
5559 switch (cond2)
5561 case LE: return CC_DLEmode;
5562 case LEU: return CC_DLEUmode;
5563 case GE: return CC_DGEmode;
5564 case GEU: return CC_DGEUmode;
5565 default: break;
5568 break;
5570 case LT:
5571 if (cond2 == LT || !cond_or)
5572 return CC_DLTmode;
5573 if (cond2 == LE)
5574 return CC_DLEmode;
5575 if (cond2 == NE)
5576 return CC_DNEmode;
5577 break;
5579 case GT:
5580 if (cond2 == GT || !cond_or)
5581 return CC_DGTmode;
5582 if (cond2 == GE)
5583 return CC_DGEmode;
5584 if (cond2 == NE)
5585 return CC_DNEmode;
5586 break;
5588 case LTU:
5589 if (cond2 == LTU || !cond_or)
5590 return CC_DLTUmode;
5591 if (cond2 == LEU)
5592 return CC_DLEUmode;
5593 if (cond2 == NE)
5594 return CC_DNEmode;
5595 break;
5597 case GTU:
5598 if (cond2 == GTU || !cond_or)
5599 return CC_DGTUmode;
5600 if (cond2 == GEU)
5601 return CC_DGEUmode;
5602 if (cond2 == NE)
5603 return CC_DNEmode;
5604 break;
5606 /* The remaining cases only occur when both comparisons are the
5607 same. */
5608 case NE:
5609 return CC_DNEmode;
5611 case LE:
5612 return CC_DLEmode;
5614 case GE:
5615 return CC_DGEmode;
5617 case LEU:
5618 return CC_DLEUmode;
5620 case GEU:
5621 return CC_DGEUmode;
5623 default:
5624 break;
5627 abort ();
5630 enum machine_mode
5631 arm_select_cc_mode (op, x, y)
5632 enum rtx_code op;
5633 rtx x;
5634 rtx y;
5636 /* All floating point compares return CCFP if it is an equality
5637 comparison, and CCFPE otherwise. */
5638 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5640 switch (op)
5642 case EQ:
5643 case NE:
5644 case UNORDERED:
5645 case ORDERED:
5646 case UNLT:
5647 case UNLE:
5648 case UNGT:
5649 case UNGE:
5650 case UNEQ:
5651 case LTGT:
5652 return CCFPmode;
5654 case LT:
5655 case LE:
5656 case GT:
5657 case GE:
5658 if (TARGET_CIRRUS)
5659 return CCFPmode;
5660 return CCFPEmode;
5662 default:
5663 abort ();
5667 /* A compare with a shifted operand. Because of canonicalization, the
5668 comparison will have to be swapped when we emit the assembler. */
5669 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5670 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5671 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5672 || GET_CODE (x) == ROTATERT))
5673 return CC_SWPmode;
5675 /* This is a special case that is used by combine to allow a
5676 comparison of a shifted byte load to be split into a zero-extend
5677 followed by a comparison of the shifted integer (only valid for
5678 equalities and unsigned inequalities). */
5679 if (GET_MODE (x) == SImode
5680 && GET_CODE (x) == ASHIFT
5681 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5682 && GET_CODE (XEXP (x, 0)) == SUBREG
5683 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5684 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5685 && (op == EQ || op == NE
5686 || op == GEU || op == GTU || op == LTU || op == LEU)
5687 && GET_CODE (y) == CONST_INT)
5688 return CC_Zmode;
5690 /* A construct for a conditional compare, if the false arm contains
5691 0, then both conditions must be true, otherwise either condition
5692 must be true. Not all conditions are possible, so CCmode is
5693 returned if it can't be done. */
5694 if (GET_CODE (x) == IF_THEN_ELSE
5695 && (XEXP (x, 2) == const0_rtx
5696 || XEXP (x, 2) == const1_rtx)
5697 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5698 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5699 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5700 INTVAL (XEXP (x, 2)));
5702 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5703 if (GET_CODE (x) == AND
5704 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5705 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5706 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5708 if (GET_CODE (x) == IOR
5709 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5710 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5711 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5713 /* An operation that sets the condition codes as a side-effect, the
5714 V flag is not set correctly, so we can only use comparisons where
5715 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5716 instead. */
5717 if (GET_MODE (x) == SImode
5718 && y == const0_rtx
5719 && (op == EQ || op == NE || op == LT || op == GE)
5720 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5721 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5722 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5723 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5724 || GET_CODE (x) == LSHIFTRT
5725 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5726 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5727 return CC_NOOVmode;
5729 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5730 return CC_Zmode;
5732 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5733 && GET_CODE (x) == PLUS
5734 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5735 return CC_Cmode;
5737 return CCmode;
5740 /* X and Y are two things to compare using CODE. Emit the compare insn and
5741 return the rtx for register 0 in the proper mode. FP means this is a
5742 floating point compare: I don't think that it is needed on the arm. */
5745 arm_gen_compare_reg (code, x, y)
5746 enum rtx_code code;
5747 rtx x, y;
5749 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5750 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5752 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5753 gen_rtx_COMPARE (mode, x, y)));
5755 return cc_reg;
5758 /* Generate a sequence of insns that will generate the correct return
5759 address mask depending on the physical architecture that the program
5760 is running on. */
5763 arm_gen_return_addr_mask ()
5765 rtx reg = gen_reg_rtx (Pmode);
5767 emit_insn (gen_return_addr_mask (reg));
5768 return reg;
5771 void
5772 arm_reload_in_hi (operands)
5773 rtx * operands;
5775 rtx ref = operands[1];
5776 rtx base, scratch;
5777 HOST_WIDE_INT offset = 0;
5779 if (GET_CODE (ref) == SUBREG)
5781 offset = SUBREG_BYTE (ref);
5782 ref = SUBREG_REG (ref);
5785 if (GET_CODE (ref) == REG)
5787 /* We have a pseudo which has been spilt onto the stack; there
5788 are two cases here: the first where there is a simple
5789 stack-slot replacement and a second where the stack-slot is
5790 out of range, or is used as a subreg. */
5791 if (reg_equiv_mem[REGNO (ref)])
5793 ref = reg_equiv_mem[REGNO (ref)];
5794 base = find_replacement (&XEXP (ref, 0));
5796 else
5797 /* The slot is out of range, or was dressed up in a SUBREG. */
5798 base = reg_equiv_address[REGNO (ref)];
5800 else
5801 base = find_replacement (&XEXP (ref, 0));
5803 /* Handle the case where the address is too complex to be offset by 1. */
5804 if (GET_CODE (base) == MINUS
5805 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5807 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5809 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5810 base = base_plus;
5812 else if (GET_CODE (base) == PLUS)
5814 /* The addend must be CONST_INT, or we would have dealt with it above. */
5815 HOST_WIDE_INT hi, lo;
5817 offset += INTVAL (XEXP (base, 1));
5818 base = XEXP (base, 0);
5820 /* Rework the address into a legal sequence of insns. */
5821 /* Valid range for lo is -4095 -> 4095 */
5822 lo = (offset >= 0
5823 ? (offset & 0xfff)
5824 : -((-offset) & 0xfff));
5826 /* Corner case, if lo is the max offset then we would be out of range
5827 once we have added the additional 1 below, so bump the msb into the
5828 pre-loading insn(s). */
5829 if (lo == 4095)
5830 lo &= 0x7ff;
5832 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5833 ^ (HOST_WIDE_INT) 0x80000000)
5834 - (HOST_WIDE_INT) 0x80000000);
5836 if (hi + lo != offset)
5837 abort ();
5839 if (hi != 0)
5841 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5843 /* Get the base address; addsi3 knows how to handle constants
5844 that require more than one insn. */
5845 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5846 base = base_plus;
5847 offset = lo;
5851 /* Operands[2] may overlap operands[0] (though it won't overlap
5852 operands[1]), that's why we asked for a DImode reg -- so we can
5853 use the bit that does not overlap. */
5854 if (REGNO (operands[2]) == REGNO (operands[0]))
5855 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5856 else
5857 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5859 emit_insn (gen_zero_extendqisi2 (scratch,
5860 gen_rtx_MEM (QImode,
5861 plus_constant (base,
5862 offset))));
5863 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5864 gen_rtx_MEM (QImode,
5865 plus_constant (base,
5866 offset + 1))));
5867 if (!BYTES_BIG_ENDIAN)
5868 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5869 gen_rtx_IOR (SImode,
5870 gen_rtx_ASHIFT
5871 (SImode,
5872 gen_rtx_SUBREG (SImode, operands[0], 0),
5873 GEN_INT (8)),
5874 scratch)));
5875 else
5876 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5877 gen_rtx_IOR (SImode,
5878 gen_rtx_ASHIFT (SImode, scratch,
5879 GEN_INT (8)),
5880 gen_rtx_SUBREG (SImode, operands[0],
5881 0))));
5884 /* Handle storing a half-word to memory during reload by synthesising as two
5885 byte stores. Take care not to clobber the input values until after we
5886 have moved them somewhere safe. This code assumes that if the DImode
5887 scratch in operands[2] overlaps either the input value or output address
5888 in some way, then that value must die in this insn (we absolutely need
5889 two scratch registers for some corner cases). */
5891 void
5892 arm_reload_out_hi (operands)
5893 rtx * operands;
5895 rtx ref = operands[0];
5896 rtx outval = operands[1];
5897 rtx base, scratch;
5898 HOST_WIDE_INT offset = 0;
5900 if (GET_CODE (ref) == SUBREG)
5902 offset = SUBREG_BYTE (ref);
5903 ref = SUBREG_REG (ref);
5906 if (GET_CODE (ref) == REG)
5908 /* We have a pseudo which has been spilt onto the stack; there
5909 are two cases here: the first where there is a simple
5910 stack-slot replacement and a second where the stack-slot is
5911 out of range, or is used as a subreg. */
5912 if (reg_equiv_mem[REGNO (ref)])
5914 ref = reg_equiv_mem[REGNO (ref)];
5915 base = find_replacement (&XEXP (ref, 0));
5917 else
5918 /* The slot is out of range, or was dressed up in a SUBREG. */
5919 base = reg_equiv_address[REGNO (ref)];
5921 else
5922 base = find_replacement (&XEXP (ref, 0));
5924 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5926 /* Handle the case where the address is too complex to be offset by 1. */
5927 if (GET_CODE (base) == MINUS
5928 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5930 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5932 /* Be careful not to destroy OUTVAL. */
5933 if (reg_overlap_mentioned_p (base_plus, outval))
5935 /* Updating base_plus might destroy outval, see if we can
5936 swap the scratch and base_plus. */
5937 if (!reg_overlap_mentioned_p (scratch, outval))
5939 rtx tmp = scratch;
5940 scratch = base_plus;
5941 base_plus = tmp;
5943 else
5945 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5947 /* Be conservative and copy OUTVAL into the scratch now,
5948 this should only be necessary if outval is a subreg
5949 of something larger than a word. */
5950 /* XXX Might this clobber base? I can't see how it can,
5951 since scratch is known to overlap with OUTVAL, and
5952 must be wider than a word. */
5953 emit_insn (gen_movhi (scratch_hi, outval));
5954 outval = scratch_hi;
5958 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5959 base = base_plus;
5961 else if (GET_CODE (base) == PLUS)
5963 /* The addend must be CONST_INT, or we would have dealt with it above. */
5964 HOST_WIDE_INT hi, lo;
5966 offset += INTVAL (XEXP (base, 1));
5967 base = XEXP (base, 0);
5969 /* Rework the address into a legal sequence of insns. */
5970 /* Valid range for lo is -4095 -> 4095 */
5971 lo = (offset >= 0
5972 ? (offset & 0xfff)
5973 : -((-offset) & 0xfff));
5975 /* Corner case, if lo is the max offset then we would be out of range
5976 once we have added the additional 1 below, so bump the msb into the
5977 pre-loading insn(s). */
5978 if (lo == 4095)
5979 lo &= 0x7ff;
5981 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5982 ^ (HOST_WIDE_INT) 0x80000000)
5983 - (HOST_WIDE_INT) 0x80000000);
5985 if (hi + lo != offset)
5986 abort ();
5988 if (hi != 0)
5990 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5992 /* Be careful not to destroy OUTVAL. */
5993 if (reg_overlap_mentioned_p (base_plus, outval))
5995 /* Updating base_plus might destroy outval, see if we
5996 can swap the scratch and base_plus. */
5997 if (!reg_overlap_mentioned_p (scratch, outval))
5999 rtx tmp = scratch;
6000 scratch = base_plus;
6001 base_plus = tmp;
6003 else
6005 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
6007 /* Be conservative and copy outval into scratch now,
6008 this should only be necessary if outval is a
6009 subreg of something larger than a word. */
6010 /* XXX Might this clobber base? I can't see how it
6011 can, since scratch is known to overlap with
6012 outval. */
6013 emit_insn (gen_movhi (scratch_hi, outval));
6014 outval = scratch_hi;
6018 /* Get the base address; addsi3 knows how to handle constants
6019 that require more than one insn. */
6020 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
6021 base = base_plus;
6022 offset = lo;
6026 if (BYTES_BIG_ENDIAN)
6028 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6029 plus_constant (base, offset + 1)),
6030 gen_lowpart (QImode, outval)));
6031 emit_insn (gen_lshrsi3 (scratch,
6032 gen_rtx_SUBREG (SImode, outval, 0),
6033 GEN_INT (8)));
6034 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
6035 gen_lowpart (QImode, scratch)));
6037 else
6039 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
6040 gen_lowpart (QImode, outval)));
6041 emit_insn (gen_lshrsi3 (scratch,
6042 gen_rtx_SUBREG (SImode, outval, 0),
6043 GEN_INT (8)));
6044 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6045 plus_constant (base, offset + 1)),
6046 gen_lowpart (QImode, scratch)));
6050 /* Print a symbolic form of X to the debug file, F. */
6052 static void
6053 arm_print_value (f, x)
6054 FILE * f;
6055 rtx x;
6057 switch (GET_CODE (x))
6059 case CONST_INT:
6060 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
6061 return;
6063 case CONST_DOUBLE:
6064 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
6065 return;
6067 case CONST_STRING:
6068 fprintf (f, "\"%s\"", XSTR (x, 0));
6069 return;
6071 case SYMBOL_REF:
6072 fprintf (f, "`%s'", XSTR (x, 0));
6073 return;
6075 case LABEL_REF:
6076 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
6077 return;
6079 case CONST:
6080 arm_print_value (f, XEXP (x, 0));
6081 return;
6083 case PLUS:
6084 arm_print_value (f, XEXP (x, 0));
6085 fprintf (f, "+");
6086 arm_print_value (f, XEXP (x, 1));
6087 return;
6089 case PC:
6090 fprintf (f, "pc");
6091 return;
6093 default:
6094 fprintf (f, "????");
6095 return;
6099 /* Routines for manipulation of the constant pool. */
6101 /* Arm instructions cannot load a large constant directly into a
6102 register; they have to come from a pc relative load. The constant
6103 must therefore be placed in the addressable range of the pc
6104 relative load. Depending on the precise pc relative load
6105 instruction the range is somewhere between 256 bytes and 4k. This
6106 means that we often have to dump a constant inside a function, and
6107 generate code to branch around it.
6109 It is important to minimize this, since the branches will slow
6110 things down and make the code larger.
6112 Normally we can hide the table after an existing unconditional
6113 branch so that there is no interruption of the flow, but in the
6114 worst case the code looks like this:
6116 ldr rn, L1
6118 b L2
6119 align
6120 L1: .long value
6124 ldr rn, L3
6126 b L4
6127 align
6128 L3: .long value
6132 We fix this by performing a scan after scheduling, which notices
6133 which instructions need to have their operands fetched from the
6134 constant table and builds the table.
6136 The algorithm starts by building a table of all the constants that
6137 need fixing up and all the natural barriers in the function (places
6138 where a constant table can be dropped without breaking the flow).
6139 For each fixup we note how far the pc-relative replacement will be
6140 able to reach and the offset of the instruction into the function.
6142 Having built the table we then group the fixes together to form
6143 tables that are as large as possible (subject to addressing
6144 constraints) and emit each table of constants after the last
6145 barrier that is within range of all the instructions in the group.
6146 If a group does not contain a barrier, then we forcibly create one
6147 by inserting a jump instruction into the flow. Once the table has
6148 been inserted, the insns are then modified to reference the
6149 relevant entry in the pool.
6151 Possible enhancements to the algorithm (not implemented) are:
6153 1) For some processors and object formats, there may be benefit in
6154 aligning the pools to the start of cache lines; this alignment
6155 would need to be taken into account when calculating addressability
6156 of a pool. */
6158 /* These typedefs are located at the start of this file, so that
6159 they can be used in the prototypes there. This comment is to
6160 remind readers of that fact so that the following structures
6161 can be understood more easily.
6163 typedef struct minipool_node Mnode;
6164 typedef struct minipool_fixup Mfix; */
6166 struct minipool_node
6168 /* Doubly linked chain of entries. */
6169 Mnode * next;
6170 Mnode * prev;
6171 /* The maximum offset into the code that this entry can be placed. While
6172 pushing fixes for forward references, all entries are sorted in order
6173 of increasing max_address. */
6174 HOST_WIDE_INT max_address;
6175 /* Similarly for an entry inserted for a backwards ref. */
6176 HOST_WIDE_INT min_address;
6177 /* The number of fixes referencing this entry. This can become zero
6178 if we "unpush" an entry. In this case we ignore the entry when we
6179 come to emit the code. */
6180 int refcount;
6181 /* The offset from the start of the minipool. */
6182 HOST_WIDE_INT offset;
6183 /* The value in table. */
6184 rtx value;
6185 /* The mode of value. */
6186 enum machine_mode mode;
6187 int fix_size;
6190 struct minipool_fixup
6192 Mfix * next;
6193 rtx insn;
6194 HOST_WIDE_INT address;
6195 rtx * loc;
6196 enum machine_mode mode;
6197 int fix_size;
6198 rtx value;
6199 Mnode * minipool;
6200 HOST_WIDE_INT forwards;
6201 HOST_WIDE_INT backwards;
6204 /* Fixes less than a word need padding out to a word boundary. */
6205 #define MINIPOOL_FIX_SIZE(mode) \
6206 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
6208 static Mnode * minipool_vector_head;
6209 static Mnode * minipool_vector_tail;
6210 static rtx minipool_vector_label;
6212 /* The linked list of all minipool fixes required for this function. */
6213 Mfix * minipool_fix_head;
6214 Mfix * minipool_fix_tail;
6215 /* The fix entry for the current minipool, once it has been placed. */
6216 Mfix * minipool_barrier;
6218 /* Determines if INSN is the start of a jump table. Returns the end
6219 of the TABLE or NULL_RTX. */
6221 static rtx
6222 is_jump_table (insn)
6223 rtx insn;
6225 rtx table;
6227 if (GET_CODE (insn) == JUMP_INSN
6228 && JUMP_LABEL (insn) != NULL
6229 && ((table = next_real_insn (JUMP_LABEL (insn)))
6230 == next_real_insn (insn))
6231 && table != NULL
6232 && GET_CODE (table) == JUMP_INSN
6233 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6234 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6235 return table;
6237 return NULL_RTX;
6240 #ifndef JUMP_TABLES_IN_TEXT_SECTION
6241 #define JUMP_TABLES_IN_TEXT_SECTION 0
6242 #endif
6244 static HOST_WIDE_INT
6245 get_jump_table_size (insn)
6246 rtx insn;
6248 /* ADDR_VECs only take room if read-only data does into the text
6249 section. */
6250 if (JUMP_TABLES_IN_TEXT_SECTION
6251 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
6252 || 1
6253 #endif
6256 rtx body = PATTERN (insn);
6257 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
6259 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6262 return 0;
6265 /* Move a minipool fix MP from its current location to before MAX_MP.
6266 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
6267 contrains may need updating. */
6269 static Mnode *
6270 move_minipool_fix_forward_ref (mp, max_mp, max_address)
6271 Mnode * mp;
6272 Mnode * max_mp;
6273 HOST_WIDE_INT max_address;
6275 /* This should never be true and the code below assumes these are
6276 different. */
6277 if (mp == max_mp)
6278 abort ();
6280 if (max_mp == NULL)
6282 if (max_address < mp->max_address)
6283 mp->max_address = max_address;
6285 else
6287 if (max_address > max_mp->max_address - mp->fix_size)
6288 mp->max_address = max_mp->max_address - mp->fix_size;
6289 else
6290 mp->max_address = max_address;
6292 /* Unlink MP from its current position. Since max_mp is non-null,
6293 mp->prev must be non-null. */
6294 mp->prev->next = mp->next;
6295 if (mp->next != NULL)
6296 mp->next->prev = mp->prev;
6297 else
6298 minipool_vector_tail = mp->prev;
6300 /* Re-insert it before MAX_MP. */
6301 mp->next = max_mp;
6302 mp->prev = max_mp->prev;
6303 max_mp->prev = mp;
6305 if (mp->prev != NULL)
6306 mp->prev->next = mp;
6307 else
6308 minipool_vector_head = mp;
6311 /* Save the new entry. */
6312 max_mp = mp;
6314 /* Scan over the preceding entries and adjust their addresses as
6315 required. */
6316 while (mp->prev != NULL
6317 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6319 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6320 mp = mp->prev;
6323 return max_mp;
6326 /* Add a constant to the minipool for a forward reference. Returns the
6327 node added or NULL if the constant will not fit in this pool. */
6329 static Mnode *
6330 add_minipool_forward_ref (fix)
6331 Mfix * fix;
6333 /* If set, max_mp is the first pool_entry that has a lower
6334 constraint than the one we are trying to add. */
6335 Mnode * max_mp = NULL;
6336 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6337 Mnode * mp;
6339 /* If this fix's address is greater than the address of the first
6340 entry, then we can't put the fix in this pool. We subtract the
6341 size of the current fix to ensure that if the table is fully
6342 packed we still have enough room to insert this value by suffling
6343 the other fixes forwards. */
6344 if (minipool_vector_head &&
6345 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6346 return NULL;
6348 /* Scan the pool to see if a constant with the same value has
6349 already been added. While we are doing this, also note the
6350 location where we must insert the constant if it doesn't already
6351 exist. */
6352 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6354 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6355 && fix->mode == mp->mode
6356 && (GET_CODE (fix->value) != CODE_LABEL
6357 || (CODE_LABEL_NUMBER (fix->value)
6358 == CODE_LABEL_NUMBER (mp->value)))
6359 && rtx_equal_p (fix->value, mp->value))
6361 /* More than one fix references this entry. */
6362 mp->refcount++;
6363 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6366 /* Note the insertion point if necessary. */
6367 if (max_mp == NULL
6368 && mp->max_address > max_address)
6369 max_mp = mp;
6372 /* The value is not currently in the minipool, so we need to create
6373 a new entry for it. If MAX_MP is NULL, the entry will be put on
6374 the end of the list since the placement is less constrained than
6375 any existing entry. Otherwise, we insert the new fix before
6376 MAX_MP and, if necessary, adjust the constraints on the other
6377 entries. */
6378 mp = xmalloc (sizeof (* mp));
6379 mp->fix_size = fix->fix_size;
6380 mp->mode = fix->mode;
6381 mp->value = fix->value;
6382 mp->refcount = 1;
6383 /* Not yet required for a backwards ref. */
6384 mp->min_address = -65536;
6386 if (max_mp == NULL)
6388 mp->max_address = max_address;
6389 mp->next = NULL;
6390 mp->prev = minipool_vector_tail;
6392 if (mp->prev == NULL)
6394 minipool_vector_head = mp;
6395 minipool_vector_label = gen_label_rtx ();
6397 else
6398 mp->prev->next = mp;
6400 minipool_vector_tail = mp;
6402 else
6404 if (max_address > max_mp->max_address - mp->fix_size)
6405 mp->max_address = max_mp->max_address - mp->fix_size;
6406 else
6407 mp->max_address = max_address;
6409 mp->next = max_mp;
6410 mp->prev = max_mp->prev;
6411 max_mp->prev = mp;
6412 if (mp->prev != NULL)
6413 mp->prev->next = mp;
6414 else
6415 minipool_vector_head = mp;
6418 /* Save the new entry. */
6419 max_mp = mp;
6421 /* Scan over the preceding entries and adjust their addresses as
6422 required. */
6423 while (mp->prev != NULL
6424 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6426 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6427 mp = mp->prev;
6430 return max_mp;
6433 static Mnode *
6434 move_minipool_fix_backward_ref (mp, min_mp, min_address)
6435 Mnode * mp;
6436 Mnode * min_mp;
6437 HOST_WIDE_INT min_address;
6439 HOST_WIDE_INT offset;
6441 /* This should never be true, and the code below assumes these are
6442 different. */
6443 if (mp == min_mp)
6444 abort ();
6446 if (min_mp == NULL)
6448 if (min_address > mp->min_address)
6449 mp->min_address = min_address;
6451 else
6453 /* We will adjust this below if it is too loose. */
6454 mp->min_address = min_address;
6456 /* Unlink MP from its current position. Since min_mp is non-null,
6457 mp->next must be non-null. */
6458 mp->next->prev = mp->prev;
6459 if (mp->prev != NULL)
6460 mp->prev->next = mp->next;
6461 else
6462 minipool_vector_head = mp->next;
6464 /* Reinsert it after MIN_MP. */
6465 mp->prev = min_mp;
6466 mp->next = min_mp->next;
6467 min_mp->next = mp;
6468 if (mp->next != NULL)
6469 mp->next->prev = mp;
6470 else
6471 minipool_vector_tail = mp;
6474 min_mp = mp;
6476 offset = 0;
6477 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6479 mp->offset = offset;
6480 if (mp->refcount > 0)
6481 offset += mp->fix_size;
6483 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6484 mp->next->min_address = mp->min_address + mp->fix_size;
6487 return min_mp;
6490 /* Add a constant to the minipool for a backward reference. Returns the
6491 node added or NULL if the constant will not fit in this pool.
6493 Note that the code for insertion for a backwards reference can be
6494 somewhat confusing because the calculated offsets for each fix do
6495 not take into account the size of the pool (which is still under
6496 construction. */
6498 static Mnode *
6499 add_minipool_backward_ref (fix)
6500 Mfix * fix;
6502 /* If set, min_mp is the last pool_entry that has a lower constraint
6503 than the one we are trying to add. */
6504 Mnode * min_mp = NULL;
6505 /* This can be negative, since it is only a constraint. */
6506 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6507 Mnode * mp;
6509 /* If we can't reach the current pool from this insn, or if we can't
6510 insert this entry at the end of the pool without pushing other
6511 fixes out of range, then we don't try. This ensures that we
6512 can't fail later on. */
6513 if (min_address >= minipool_barrier->address
6514 || (minipool_vector_tail->min_address + fix->fix_size
6515 >= minipool_barrier->address))
6516 return NULL;
6518 /* Scan the pool to see if a constant with the same value has
6519 already been added. While we are doing this, also note the
6520 location where we must insert the constant if it doesn't already
6521 exist. */
6522 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6524 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6525 && fix->mode == mp->mode
6526 && (GET_CODE (fix->value) != CODE_LABEL
6527 || (CODE_LABEL_NUMBER (fix->value)
6528 == CODE_LABEL_NUMBER (mp->value)))
6529 && rtx_equal_p (fix->value, mp->value)
6530 /* Check that there is enough slack to move this entry to the
6531 end of the table (this is conservative). */
6532 && (mp->max_address
6533 > (minipool_barrier->address
6534 + minipool_vector_tail->offset
6535 + minipool_vector_tail->fix_size)))
6537 mp->refcount++;
6538 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6541 if (min_mp != NULL)
6542 mp->min_address += fix->fix_size;
6543 else
6545 /* Note the insertion point if necessary. */
6546 if (mp->min_address < min_address)
6547 min_mp = mp;
6548 else if (mp->max_address
6549 < minipool_barrier->address + mp->offset + fix->fix_size)
6551 /* Inserting before this entry would push the fix beyond
6552 its maximum address (which can happen if we have
6553 re-located a forwards fix); force the new fix to come
6554 after it. */
6555 min_mp = mp;
6556 min_address = mp->min_address + fix->fix_size;
6561 /* We need to create a new entry. */
6562 mp = xmalloc (sizeof (* mp));
6563 mp->fix_size = fix->fix_size;
6564 mp->mode = fix->mode;
6565 mp->value = fix->value;
6566 mp->refcount = 1;
6567 mp->max_address = minipool_barrier->address + 65536;
6569 mp->min_address = min_address;
6571 if (min_mp == NULL)
6573 mp->prev = NULL;
6574 mp->next = minipool_vector_head;
6576 if (mp->next == NULL)
6578 minipool_vector_tail = mp;
6579 minipool_vector_label = gen_label_rtx ();
6581 else
6582 mp->next->prev = mp;
6584 minipool_vector_head = mp;
6586 else
6588 mp->next = min_mp->next;
6589 mp->prev = min_mp;
6590 min_mp->next = mp;
6592 if (mp->next != NULL)
6593 mp->next->prev = mp;
6594 else
6595 minipool_vector_tail = mp;
6598 /* Save the new entry. */
6599 min_mp = mp;
6601 if (mp->prev)
6602 mp = mp->prev;
6603 else
6604 mp->offset = 0;
6606 /* Scan over the following entries and adjust their offsets. */
6607 while (mp->next != NULL)
6609 if (mp->next->min_address < mp->min_address + mp->fix_size)
6610 mp->next->min_address = mp->min_address + mp->fix_size;
6612 if (mp->refcount)
6613 mp->next->offset = mp->offset + mp->fix_size;
6614 else
6615 mp->next->offset = mp->offset;
6617 mp = mp->next;
6620 return min_mp;
6623 static void
6624 assign_minipool_offsets (barrier)
6625 Mfix * barrier;
6627 HOST_WIDE_INT offset = 0;
6628 Mnode * mp;
6630 minipool_barrier = barrier;
6632 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6634 mp->offset = offset;
6636 if (mp->refcount > 0)
6637 offset += mp->fix_size;
6641 /* Output the literal table */
6642 static void
6643 dump_minipool (scan)
6644 rtx scan;
6646 Mnode * mp;
6647 Mnode * nmp;
6649 if (rtl_dump_file)
6650 fprintf (rtl_dump_file,
6651 ";; Emitting minipool after insn %u; address %ld\n",
6652 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6654 scan = emit_label_after (gen_label_rtx (), scan);
6655 scan = emit_insn_after (gen_align_4 (), scan);
6656 scan = emit_label_after (minipool_vector_label, scan);
6658 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6660 if (mp->refcount > 0)
6662 if (rtl_dump_file)
6664 fprintf (rtl_dump_file,
6665 ";; Offset %u, min %ld, max %ld ",
6666 (unsigned) mp->offset, (unsigned long) mp->min_address,
6667 (unsigned long) mp->max_address);
6668 arm_print_value (rtl_dump_file, mp->value);
6669 fputc ('\n', rtl_dump_file);
6672 switch (mp->fix_size)
6674 #ifdef HAVE_consttable_1
6675 case 1:
6676 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6677 break;
6679 #endif
6680 #ifdef HAVE_consttable_2
6681 case 2:
6682 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6683 break;
6685 #endif
6686 #ifdef HAVE_consttable_4
6687 case 4:
6688 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6689 break;
6691 #endif
6692 #ifdef HAVE_consttable_8
6693 case 8:
6694 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6695 break;
6697 #endif
6698 default:
6699 abort ();
6700 break;
6704 nmp = mp->next;
6705 free (mp);
6708 minipool_vector_head = minipool_vector_tail = NULL;
6709 scan = emit_insn_after (gen_consttable_end (), scan);
6710 scan = emit_barrier_after (scan);
6713 /* Return the cost of forcibly inserting a barrier after INSN. */
6715 static int
6716 arm_barrier_cost (insn)
6717 rtx insn;
6719 /* Basing the location of the pool on the loop depth is preferable,
6720 but at the moment, the basic block information seems to be
6721 corrupt by this stage of the compilation. */
6722 int base_cost = 50;
6723 rtx next = next_nonnote_insn (insn);
6725 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6726 base_cost -= 20;
6728 switch (GET_CODE (insn))
6730 case CODE_LABEL:
6731 /* It will always be better to place the table before the label, rather
6732 than after it. */
6733 return 50;
6735 case INSN:
6736 case CALL_INSN:
6737 return base_cost;
6739 case JUMP_INSN:
6740 return base_cost - 10;
6742 default:
6743 return base_cost + 10;
6747 /* Find the best place in the insn stream in the range
6748 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6749 Create the barrier by inserting a jump and add a new fix entry for
6750 it. */
6752 static Mfix *
6753 create_fix_barrier (fix, max_address)
6754 Mfix * fix;
6755 HOST_WIDE_INT max_address;
6757 HOST_WIDE_INT count = 0;
6758 rtx barrier;
6759 rtx from = fix->insn;
6760 rtx selected = from;
6761 int selected_cost;
6762 HOST_WIDE_INT selected_address;
6763 Mfix * new_fix;
6764 HOST_WIDE_INT max_count = max_address - fix->address;
6765 rtx label = gen_label_rtx ();
6767 selected_cost = arm_barrier_cost (from);
6768 selected_address = fix->address;
6770 while (from && count < max_count)
6772 rtx tmp;
6773 int new_cost;
6775 /* This code shouldn't have been called if there was a natural barrier
6776 within range. */
6777 if (GET_CODE (from) == BARRIER)
6778 abort ();
6780 /* Count the length of this insn. */
6781 count += get_attr_length (from);
6783 /* If there is a jump table, add its length. */
6784 tmp = is_jump_table (from);
6785 if (tmp != NULL)
6787 count += get_jump_table_size (tmp);
6789 /* Jump tables aren't in a basic block, so base the cost on
6790 the dispatch insn. If we select this location, we will
6791 still put the pool after the table. */
6792 new_cost = arm_barrier_cost (from);
6794 if (count < max_count && new_cost <= selected_cost)
6796 selected = tmp;
6797 selected_cost = new_cost;
6798 selected_address = fix->address + count;
6801 /* Continue after the dispatch table. */
6802 from = NEXT_INSN (tmp);
6803 continue;
6806 new_cost = arm_barrier_cost (from);
6808 if (count < max_count && new_cost <= selected_cost)
6810 selected = from;
6811 selected_cost = new_cost;
6812 selected_address = fix->address + count;
6815 from = NEXT_INSN (from);
6818 /* Create a new JUMP_INSN that branches around a barrier. */
6819 from = emit_jump_insn_after (gen_jump (label), selected);
6820 JUMP_LABEL (from) = label;
6821 barrier = emit_barrier_after (from);
6822 emit_label_after (label, barrier);
6824 /* Create a minipool barrier entry for the new barrier. */
6825 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6826 new_fix->insn = barrier;
6827 new_fix->address = selected_address;
6828 new_fix->next = fix->next;
6829 fix->next = new_fix;
6831 return new_fix;
6834 /* Record that there is a natural barrier in the insn stream at
6835 ADDRESS. */
6836 static void
6837 push_minipool_barrier (insn, address)
6838 rtx insn;
6839 HOST_WIDE_INT address;
6841 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6843 fix->insn = insn;
6844 fix->address = address;
6846 fix->next = NULL;
6847 if (minipool_fix_head != NULL)
6848 minipool_fix_tail->next = fix;
6849 else
6850 minipool_fix_head = fix;
6852 minipool_fix_tail = fix;
6855 /* Record INSN, which will need fixing up to load a value from the
6856 minipool. ADDRESS is the offset of the insn since the start of the
6857 function; LOC is a pointer to the part of the insn which requires
6858 fixing; VALUE is the constant that must be loaded, which is of type
6859 MODE. */
6860 static void
6861 push_minipool_fix (insn, address, loc, mode, value)
6862 rtx insn;
6863 HOST_WIDE_INT address;
6864 rtx * loc;
6865 enum machine_mode mode;
6866 rtx value;
6868 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6870 #ifdef AOF_ASSEMBLER
6871 /* PIC symbol refereneces need to be converted into offsets into the
6872 based area. */
6873 /* XXX This shouldn't be done here. */
6874 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6875 value = aof_pic_entry (value);
6876 #endif /* AOF_ASSEMBLER */
6878 fix->insn = insn;
6879 fix->address = address;
6880 fix->loc = loc;
6881 fix->mode = mode;
6882 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6883 fix->value = value;
6884 fix->forwards = get_attr_pool_range (insn);
6885 fix->backwards = get_attr_neg_pool_range (insn);
6886 fix->minipool = NULL;
6888 /* If an insn doesn't have a range defined for it, then it isn't
6889 expecting to be reworked by this code. Better to abort now than
6890 to generate duff assembly code. */
6891 if (fix->forwards == 0 && fix->backwards == 0)
6892 abort ();
6894 if (rtl_dump_file)
6896 fprintf (rtl_dump_file,
6897 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6898 GET_MODE_NAME (mode),
6899 INSN_UID (insn), (unsigned long) address,
6900 -1 * (long)fix->backwards, (long)fix->forwards);
6901 arm_print_value (rtl_dump_file, fix->value);
6902 fprintf (rtl_dump_file, "\n");
6905 /* Add it to the chain of fixes. */
6906 fix->next = NULL;
6908 if (minipool_fix_head != NULL)
6909 minipool_fix_tail->next = fix;
6910 else
6911 minipool_fix_head = fix;
6913 minipool_fix_tail = fix;
6916 /* Scan INSN and note any of its operands that need fixing.
6917 If DO_PUSHES is false we do not actually push any of the fixups
6918 needed. The function returns TRUE is any fixups were needed/pushed.
6919 This is used by arm_memory_load_p() which needs to know about loads
6920 of constants that will be converted into minipool loads. */
6922 static bool
6923 note_invalid_constants (insn, address, do_pushes)
6924 rtx insn;
6925 HOST_WIDE_INT address;
6926 int do_pushes;
6928 bool result = false;
6929 int opno;
6931 extract_insn (insn);
6933 if (!constrain_operands (1))
6934 fatal_insn_not_found (insn);
6936 /* Fill in recog_op_alt with information about the constraints of this insn. */
6937 preprocess_constraints ();
6939 for (opno = 0; opno < recog_data.n_operands; opno++)
6941 /* Things we need to fix can only occur in inputs. */
6942 if (recog_data.operand_type[opno] != OP_IN)
6943 continue;
6945 /* If this alternative is a memory reference, then any mention
6946 of constants in this alternative is really to fool reload
6947 into allowing us to accept one there. We need to fix them up
6948 now so that we output the right code. */
6949 if (recog_op_alt[opno][which_alternative].memory_ok)
6951 rtx op = recog_data.operand[opno];
6953 if (CONSTANT_P (op))
6955 if (do_pushes)
6956 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6957 recog_data.operand_mode[opno], op);
6958 result = true;
6960 else if (GET_CODE (op) == MEM
6961 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6962 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6964 if (do_pushes)
6965 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6966 recog_data.operand_mode[opno],
6967 get_pool_constant (XEXP (op, 0)));
6969 result = true;
6974 return result;
6977 void
6978 arm_reorg (first)
6979 rtx first;
6981 rtx insn;
6982 HOST_WIDE_INT address = 0;
6983 Mfix * fix;
6985 minipool_fix_head = minipool_fix_tail = NULL;
6987 /* The first insn must always be a note, or the code below won't
6988 scan it properly. */
6989 if (GET_CODE (first) != NOTE)
6990 abort ();
6992 /* Scan all the insns and record the operands that will need fixing. */
6993 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6995 if (TARGET_CIRRUS_FIX_INVALID_INSNS
6996 && (arm_cirrus_insn_p (insn)
6997 || GET_CODE (insn) == JUMP_INSN
6998 || arm_memory_load_p (insn)))
6999 cirrus_reorg (insn);
7001 if (GET_CODE (insn) == BARRIER)
7002 push_minipool_barrier (insn, address);
7003 else if (INSN_P (insn))
7005 rtx table;
7007 note_invalid_constants (insn, address, true);
7008 address += get_attr_length (insn);
7010 /* If the insn is a vector jump, add the size of the table
7011 and skip the table. */
7012 if ((table = is_jump_table (insn)) != NULL)
7014 address += get_jump_table_size (table);
7015 insn = table;
7020 fix = minipool_fix_head;
7022 /* Now scan the fixups and perform the required changes. */
7023 while (fix)
7025 Mfix * ftmp;
7026 Mfix * fdel;
7027 Mfix * last_added_fix;
7028 Mfix * last_barrier = NULL;
7029 Mfix * this_fix;
7031 /* Skip any further barriers before the next fix. */
7032 while (fix && GET_CODE (fix->insn) == BARRIER)
7033 fix = fix->next;
7035 /* No more fixes. */
7036 if (fix == NULL)
7037 break;
7039 last_added_fix = NULL;
7041 for (ftmp = fix; ftmp; ftmp = ftmp->next)
7043 if (GET_CODE (ftmp->insn) == BARRIER)
7045 if (ftmp->address >= minipool_vector_head->max_address)
7046 break;
7048 last_barrier = ftmp;
7050 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
7051 break;
7053 last_added_fix = ftmp; /* Keep track of the last fix added. */
7056 /* If we found a barrier, drop back to that; any fixes that we
7057 could have reached but come after the barrier will now go in
7058 the next mini-pool. */
7059 if (last_barrier != NULL)
7061 /* Reduce the refcount for those fixes that won't go into this
7062 pool after all. */
7063 for (fdel = last_barrier->next;
7064 fdel && fdel != ftmp;
7065 fdel = fdel->next)
7067 fdel->minipool->refcount--;
7068 fdel->minipool = NULL;
7071 ftmp = last_barrier;
7073 else
7075 /* ftmp is first fix that we can't fit into this pool and
7076 there no natural barriers that we could use. Insert a
7077 new barrier in the code somewhere between the previous
7078 fix and this one, and arrange to jump around it. */
7079 HOST_WIDE_INT max_address;
7081 /* The last item on the list of fixes must be a barrier, so
7082 we can never run off the end of the list of fixes without
7083 last_barrier being set. */
7084 if (ftmp == NULL)
7085 abort ();
7087 max_address = minipool_vector_head->max_address;
7088 /* Check that there isn't another fix that is in range that
7089 we couldn't fit into this pool because the pool was
7090 already too large: we need to put the pool before such an
7091 instruction. */
7092 if (ftmp->address < max_address)
7093 max_address = ftmp->address;
7095 last_barrier = create_fix_barrier (last_added_fix, max_address);
7098 assign_minipool_offsets (last_barrier);
7100 while (ftmp)
7102 if (GET_CODE (ftmp->insn) != BARRIER
7103 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
7104 == NULL))
7105 break;
7107 ftmp = ftmp->next;
7110 /* Scan over the fixes we have identified for this pool, fixing them
7111 up and adding the constants to the pool itself. */
7112 for (this_fix = fix; this_fix && ftmp != this_fix;
7113 this_fix = this_fix->next)
7114 if (GET_CODE (this_fix->insn) != BARRIER)
7116 rtx addr
7117 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7118 minipool_vector_label),
7119 this_fix->minipool->offset);
7120 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7123 dump_minipool (last_barrier->insn);
7124 fix = ftmp;
7127 /* From now on we must synthesize any constants that we can't handle
7128 directly. This can happen if the RTL gets split during final
7129 instruction generation. */
7130 after_arm_reorg = 1;
7132 /* Free the minipool memory. */
7133 obstack_free (&minipool_obstack, minipool_startobj);
7136 /* Routines to output assembly language. */
7138 /* If the rtx is the correct value then return the string of the number.
7139 In this way we can ensure that valid double constants are generated even
7140 when cross compiling. */
7142 const char *
7143 fp_immediate_constant (x)
7144 rtx x;
7146 REAL_VALUE_TYPE r;
7147 int i;
7149 if (!fpa_consts_inited)
7150 init_fpa_table ();
7152 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7153 for (i = 0; i < 8; i++)
7154 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
7155 return strings_fpa[i];
7157 abort ();
7160 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
7162 static const char *
7163 fp_const_from_val (r)
7164 REAL_VALUE_TYPE * r;
7166 int i;
7168 if (!fpa_consts_inited)
7169 init_fpa_table ();
7171 for (i = 0; i < 8; i++)
7172 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
7173 return strings_fpa[i];
7175 abort ();
7178 /* Output the operands of a LDM/STM instruction to STREAM.
7179 MASK is the ARM register set mask of which only bits 0-15 are important.
7180 REG is the base register, either the frame pointer or the stack pointer,
7181 INSTR is the possibly suffixed load or store instruction. */
7183 static void
7184 print_multi_reg (stream, instr, reg, mask)
7185 FILE * stream;
7186 const char * instr;
7187 int reg;
7188 int mask;
7190 int i;
7191 int not_first = FALSE;
7193 fputc ('\t', stream);
7194 asm_fprintf (stream, instr, reg);
7195 fputs (", {", stream);
7197 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7198 if (mask & (1 << i))
7200 if (not_first)
7201 fprintf (stream, ", ");
7203 asm_fprintf (stream, "%r", i);
7204 not_first = TRUE;
7207 fprintf (stream, "}");
7209 /* Add a ^ character for the 26-bit ABI, but only if we were loading
7210 the PC. Otherwise we would generate an UNPREDICTABLE instruction.
7211 Strictly speaking the instruction would be unpredicatble only if
7212 we were writing back the base register as well, but since we never
7213 want to generate an LDM type 2 instruction (register bank switching)
7214 which is what you get if the PC is not being loaded, we do not need
7215 to check for writeback. */
7216 if (! TARGET_APCS_32
7217 && ((mask & (1 << PC_REGNUM)) != 0))
7218 fprintf (stream, "^");
7220 fprintf (stream, "\n");
7223 /* Output a 'call' insn. */
7225 const char *
7226 output_call (operands)
7227 rtx * operands;
7229 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
7231 if (REGNO (operands[0]) == LR_REGNUM)
7233 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
7234 output_asm_insn ("mov%?\t%0, %|lr", operands);
7237 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7239 if (TARGET_INTERWORK)
7240 output_asm_insn ("bx%?\t%0", operands);
7241 else
7242 output_asm_insn ("mov%?\t%|pc, %0", operands);
7244 return "";
7247 static int
7248 eliminate_lr2ip (x)
7249 rtx * x;
7251 int something_changed = 0;
7252 rtx x0 = * x;
7253 int code = GET_CODE (x0);
7254 int i, j;
7255 const char * fmt;
7257 switch (code)
7259 case REG:
7260 if (REGNO (x0) == LR_REGNUM)
7262 *x = gen_rtx_REG (SImode, IP_REGNUM);
7263 return 1;
7265 return 0;
7266 default:
7267 /* Scan through the sub-elements and change any references there. */
7268 fmt = GET_RTX_FORMAT (code);
7270 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7271 if (fmt[i] == 'e')
7272 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
7273 else if (fmt[i] == 'E')
7274 for (j = 0; j < XVECLEN (x0, i); j++)
7275 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
7277 return something_changed;
7281 /* Output a 'call' insn that is a reference in memory. */
7283 const char *
7284 output_call_mem (operands)
7285 rtx * operands;
7287 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
7288 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
7289 if (eliminate_lr2ip (&operands[0]))
7290 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
7292 if (TARGET_INTERWORK)
7294 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7295 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7296 output_asm_insn ("bx%?\t%|ip", operands);
7298 else
7300 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7301 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7304 return "";
7308 /* Output a move from arm registers to an fpa registers.
7309 OPERANDS[0] is an fpa register.
7310 OPERANDS[1] is the first registers of an arm register pair. */
7312 const char *
7313 output_mov_long_double_fpa_from_arm (operands)
7314 rtx * operands;
7316 int arm_reg0 = REGNO (operands[1]);
7317 rtx ops[3];
7319 if (arm_reg0 == IP_REGNUM)
7320 abort ();
7322 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7323 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7324 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7326 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7327 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7329 return "";
7332 /* Output a move from an fpa register to arm registers.
7333 OPERANDS[0] is the first registers of an arm register pair.
7334 OPERANDS[1] is an fpa register. */
7336 const char *
7337 output_mov_long_double_arm_from_fpa (operands)
7338 rtx * operands;
7340 int arm_reg0 = REGNO (operands[0]);
7341 rtx ops[3];
7343 if (arm_reg0 == IP_REGNUM)
7344 abort ();
7346 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7347 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7348 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7350 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7351 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7352 return "";
7355 /* Output a move from arm registers to arm registers of a long double
7356 OPERANDS[0] is the destination.
7357 OPERANDS[1] is the source. */
7359 const char *
7360 output_mov_long_double_arm_from_arm (operands)
7361 rtx * operands;
7363 /* We have to be careful here because the two might overlap. */
7364 int dest_start = REGNO (operands[0]);
7365 int src_start = REGNO (operands[1]);
7366 rtx ops[2];
7367 int i;
7369 if (dest_start < src_start)
7371 for (i = 0; i < 3; i++)
7373 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7374 ops[1] = gen_rtx_REG (SImode, src_start + i);
7375 output_asm_insn ("mov%?\t%0, %1", ops);
7378 else
7380 for (i = 2; i >= 0; i--)
7382 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7383 ops[1] = gen_rtx_REG (SImode, src_start + i);
7384 output_asm_insn ("mov%?\t%0, %1", ops);
7388 return "";
7392 /* Output a move from arm registers to an fpa registers.
7393 OPERANDS[0] is an fpa register.
7394 OPERANDS[1] is the first registers of an arm register pair. */
7396 const char *
7397 output_mov_double_fpa_from_arm (operands)
7398 rtx * operands;
7400 int arm_reg0 = REGNO (operands[1]);
7401 rtx ops[2];
7403 if (arm_reg0 == IP_REGNUM)
7404 abort ();
7406 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7407 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7408 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7409 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7410 return "";
7413 /* Output a move from an fpa register to arm registers.
7414 OPERANDS[0] is the first registers of an arm register pair.
7415 OPERANDS[1] is an fpa register. */
7417 const char *
7418 output_mov_double_arm_from_fpa (operands)
7419 rtx * operands;
7421 int arm_reg0 = REGNO (operands[0]);
7422 rtx ops[2];
7424 if (arm_reg0 == IP_REGNUM)
7425 abort ();
7427 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7428 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7429 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7430 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7431 return "";
7434 /* Output a move between double words.
7435 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7436 or MEM<-REG and all MEMs must be offsettable addresses. */
7438 const char *
7439 output_move_double (operands)
7440 rtx * operands;
7442 enum rtx_code code0 = GET_CODE (operands[0]);
7443 enum rtx_code code1 = GET_CODE (operands[1]);
7444 rtx otherops[3];
7446 if (code0 == REG)
7448 int reg0 = REGNO (operands[0]);
7450 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7452 if (code1 == REG)
7454 int reg1 = REGNO (operands[1]);
7455 if (reg1 == IP_REGNUM)
7456 abort ();
7458 /* Ensure the second source is not overwritten. */
7459 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7460 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7461 else
7462 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7464 else if (code1 == CONST_DOUBLE)
7466 if (GET_MODE (operands[1]) == DFmode)
7468 REAL_VALUE_TYPE r;
7469 long l[2];
7471 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7472 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7473 otherops[1] = GEN_INT (l[1]);
7474 operands[1] = GEN_INT (l[0]);
7476 else if (GET_MODE (operands[1]) != VOIDmode)
7477 abort ();
7478 else if (WORDS_BIG_ENDIAN)
7480 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7481 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7483 else
7485 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7486 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7489 output_mov_immediate (operands);
7490 output_mov_immediate (otherops);
7492 else if (code1 == CONST_INT)
7494 #if HOST_BITS_PER_WIDE_INT > 32
7495 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7496 what the upper word is. */
7497 if (WORDS_BIG_ENDIAN)
7499 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7500 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7502 else
7504 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7505 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7507 #else
7508 /* Sign extend the intval into the high-order word. */
7509 if (WORDS_BIG_ENDIAN)
7511 otherops[1] = operands[1];
7512 operands[1] = (INTVAL (operands[1]) < 0
7513 ? constm1_rtx : const0_rtx);
7515 else
7516 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7517 #endif
7518 output_mov_immediate (otherops);
7519 output_mov_immediate (operands);
7521 else if (code1 == MEM)
7523 switch (GET_CODE (XEXP (operands[1], 0)))
7525 case REG:
7526 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7527 break;
7529 case PRE_INC:
7530 abort (); /* Should never happen now. */
7531 break;
7533 case PRE_DEC:
7534 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7535 break;
7537 case POST_INC:
7538 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7539 break;
7541 case POST_DEC:
7542 abort (); /* Should never happen now. */
7543 break;
7545 case LABEL_REF:
7546 case CONST:
7547 output_asm_insn ("adr%?\t%0, %1", operands);
7548 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7549 break;
7551 default:
7552 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7553 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7555 otherops[0] = operands[0];
7556 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7557 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7559 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7561 if (GET_CODE (otherops[2]) == CONST_INT)
7563 switch ((int) INTVAL (otherops[2]))
7565 case -8:
7566 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7567 return "";
7568 case -4:
7569 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7570 return "";
7571 case 4:
7572 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7573 return "";
7576 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7577 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7578 else
7579 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7581 else
7582 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7584 else
7585 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7587 return "ldm%?ia\t%0, %M0";
7589 else
7591 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7592 /* Take care of overlapping base/data reg. */
7593 if (reg_mentioned_p (operands[0], operands[1]))
7595 output_asm_insn ("ldr%?\t%0, %1", otherops);
7596 output_asm_insn ("ldr%?\t%0, %1", operands);
7598 else
7600 output_asm_insn ("ldr%?\t%0, %1", operands);
7601 output_asm_insn ("ldr%?\t%0, %1", otherops);
7606 else
7607 abort (); /* Constraints should prevent this. */
7609 else if (code0 == MEM && code1 == REG)
7611 if (REGNO (operands[1]) == IP_REGNUM)
7612 abort ();
7614 switch (GET_CODE (XEXP (operands[0], 0)))
7616 case REG:
7617 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7618 break;
7620 case PRE_INC:
7621 abort (); /* Should never happen now. */
7622 break;
7624 case PRE_DEC:
7625 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7626 break;
7628 case POST_INC:
7629 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7630 break;
7632 case POST_DEC:
7633 abort (); /* Should never happen now. */
7634 break;
7636 case PLUS:
7637 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7639 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7641 case -8:
7642 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7643 return "";
7645 case -4:
7646 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7647 return "";
7649 case 4:
7650 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7651 return "";
7654 /* Fall through */
7656 default:
7657 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7658 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7659 output_asm_insn ("str%?\t%1, %0", operands);
7660 output_asm_insn ("str%?\t%1, %0", otherops);
7663 else
7664 /* Constraints should prevent this. */
7665 abort ();
7667 return "";
7671 /* Output an arbitrary MOV reg, #n.
7672 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7674 const char *
7675 output_mov_immediate (operands)
7676 rtx * operands;
7678 HOST_WIDE_INT n = INTVAL (operands[1]);
7680 /* Try to use one MOV. */
7681 if (const_ok_for_arm (n))
7682 output_asm_insn ("mov%?\t%0, %1", operands);
7684 /* Try to use one MVN. */
7685 else if (const_ok_for_arm (~n))
7687 operands[1] = GEN_INT (~n);
7688 output_asm_insn ("mvn%?\t%0, %1", operands);
7690 else
7692 int n_ones = 0;
7693 int i;
7695 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7696 for (i = 0; i < 32; i ++)
7697 if (n & 1 << i)
7698 n_ones ++;
7700 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7701 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7702 else
7703 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7706 return "";
7709 /* Output an ADD r, s, #n where n may be too big for one instruction.
7710 If adding zero to one register, output nothing. */
7712 const char *
7713 output_add_immediate (operands)
7714 rtx * operands;
7716 HOST_WIDE_INT n = INTVAL (operands[2]);
7718 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7720 if (n < 0)
7721 output_multi_immediate (operands,
7722 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7723 -n);
7724 else
7725 output_multi_immediate (operands,
7726 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7730 return "";
7733 /* Output a multiple immediate operation.
7734 OPERANDS is the vector of operands referred to in the output patterns.
7735 INSTR1 is the output pattern to use for the first constant.
7736 INSTR2 is the output pattern to use for subsequent constants.
7737 IMMED_OP is the index of the constant slot in OPERANDS.
7738 N is the constant value. */
7740 static const char *
7741 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7742 rtx * operands;
7743 const char * instr1;
7744 const char * instr2;
7745 int immed_op;
7746 HOST_WIDE_INT n;
7748 #if HOST_BITS_PER_WIDE_INT > 32
7749 n &= 0xffffffff;
7750 #endif
7752 if (n == 0)
7754 /* Quick and easy output. */
7755 operands[immed_op] = const0_rtx;
7756 output_asm_insn (instr1, operands);
7758 else
7760 int i;
7761 const char * instr = instr1;
7763 /* Note that n is never zero here (which would give no output). */
7764 for (i = 0; i < 32; i += 2)
7766 if (n & (3 << i))
7768 operands[immed_op] = GEN_INT (n & (255 << i));
7769 output_asm_insn (instr, operands);
7770 instr = instr2;
7771 i += 6;
7776 return "";
7779 /* Return the appropriate ARM instruction for the operation code.
7780 The returned result should not be overwritten. OP is the rtx of the
7781 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7782 was shifted. */
7784 const char *
7785 arithmetic_instr (op, shift_first_arg)
7786 rtx op;
7787 int shift_first_arg;
7789 switch (GET_CODE (op))
7791 case PLUS:
7792 return "add";
7794 case MINUS:
7795 return shift_first_arg ? "rsb" : "sub";
7797 case IOR:
7798 return "orr";
7800 case XOR:
7801 return "eor";
7803 case AND:
7804 return "and";
7806 default:
7807 abort ();
7811 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7812 for the operation code. The returned result should not be overwritten.
7813 OP is the rtx code of the shift.
7814 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7815 shift. */
7817 static const char *
7818 shift_op (op, amountp)
7819 rtx op;
7820 HOST_WIDE_INT *amountp;
7822 const char * mnem;
7823 enum rtx_code code = GET_CODE (op);
7825 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7826 *amountp = -1;
7827 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7828 *amountp = INTVAL (XEXP (op, 1));
7829 else
7830 abort ();
7832 switch (code)
7834 case ASHIFT:
7835 mnem = "asl";
7836 break;
7838 case ASHIFTRT:
7839 mnem = "asr";
7840 break;
7842 case LSHIFTRT:
7843 mnem = "lsr";
7844 break;
7846 case ROTATERT:
7847 mnem = "ror";
7848 break;
7850 case MULT:
7851 /* We never have to worry about the amount being other than a
7852 power of 2, since this case can never be reloaded from a reg. */
7853 if (*amountp != -1)
7854 *amountp = int_log2 (*amountp);
7855 else
7856 abort ();
7857 return "asl";
7859 default:
7860 abort ();
7863 if (*amountp != -1)
7865 /* This is not 100% correct, but follows from the desire to merge
7866 multiplication by a power of 2 with the recognizer for a
7867 shift. >=32 is not a valid shift for "asl", so we must try and
7868 output a shift that produces the correct arithmetical result.
7869 Using lsr #32 is identical except for the fact that the carry bit
7870 is not set correctly if we set the flags; but we never use the
7871 carry bit from such an operation, so we can ignore that. */
7872 if (code == ROTATERT)
7873 /* Rotate is just modulo 32. */
7874 *amountp &= 31;
7875 else if (*amountp != (*amountp & 31))
7877 if (code == ASHIFT)
7878 mnem = "lsr";
7879 *amountp = 32;
7882 /* Shifts of 0 are no-ops. */
7883 if (*amountp == 0)
7884 return NULL;
7887 return mnem;
7890 /* Obtain the shift from the POWER of two. */
7892 static HOST_WIDE_INT
7893 int_log2 (power)
7894 HOST_WIDE_INT power;
7896 HOST_WIDE_INT shift = 0;
7898 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7900 if (shift > 31)
7901 abort ();
7902 shift ++;
7905 return shift;
7908 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7909 /bin/as is horribly restrictive. */
7910 #define MAX_ASCII_LEN 51
7912 void
7913 output_ascii_pseudo_op (stream, p, len)
7914 FILE * stream;
7915 const unsigned char * p;
7916 int len;
7918 int i;
7919 int len_so_far = 0;
7921 fputs ("\t.ascii\t\"", stream);
7923 for (i = 0; i < len; i++)
7925 int c = p[i];
7927 if (len_so_far >= MAX_ASCII_LEN)
7929 fputs ("\"\n\t.ascii\t\"", stream);
7930 len_so_far = 0;
7933 switch (c)
7935 case TARGET_TAB:
7936 fputs ("\\t", stream);
7937 len_so_far += 2;
7938 break;
7940 case TARGET_FF:
7941 fputs ("\\f", stream);
7942 len_so_far += 2;
7943 break;
7945 case TARGET_BS:
7946 fputs ("\\b", stream);
7947 len_so_far += 2;
7948 break;
7950 case TARGET_CR:
7951 fputs ("\\r", stream);
7952 len_so_far += 2;
7953 break;
7955 case TARGET_NEWLINE:
7956 fputs ("\\n", stream);
7957 c = p [i + 1];
7958 if ((c >= ' ' && c <= '~')
7959 || c == TARGET_TAB)
7960 /* This is a good place for a line break. */
7961 len_so_far = MAX_ASCII_LEN;
7962 else
7963 len_so_far += 2;
7964 break;
7966 case '\"':
7967 case '\\':
7968 putc ('\\', stream);
7969 len_so_far++;
7970 /* drop through. */
7972 default:
7973 if (c >= ' ' && c <= '~')
7975 putc (c, stream);
7976 len_so_far++;
7978 else
7980 fprintf (stream, "\\%03o", c);
7981 len_so_far += 4;
7983 break;
7987 fputs ("\"\n", stream);
7990 /* Compute the register sabe mask for registers 0 through 12
7991 inclusive. This code is used by both arm_compute_save_reg_mask
7992 and arm_compute_initial_elimination_offset. */
7994 static unsigned long
7995 arm_compute_save_reg0_reg12_mask ()
7997 unsigned long func_type = arm_current_func_type ();
7998 unsigned int save_reg_mask = 0;
7999 unsigned int reg;
8001 if (IS_INTERRUPT (func_type))
8003 unsigned int max_reg;
8004 /* Interrupt functions must not corrupt any registers,
8005 even call clobbered ones. If this is a leaf function
8006 we can just examine the registers used by the RTL, but
8007 otherwise we have to assume that whatever function is
8008 called might clobber anything, and so we have to save
8009 all the call-clobbered registers as well. */
8010 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
8011 /* FIQ handlers have registers r8 - r12 banked, so
8012 we only need to check r0 - r7, Normal ISRs only
8013 bank r14 and r15, so we must check up to r12.
8014 r13 is the stack pointer which is always preserved,
8015 so we do not need to consider it here. */
8016 max_reg = 7;
8017 else
8018 max_reg = 12;
8020 for (reg = 0; reg <= max_reg; reg++)
8021 if (regs_ever_live[reg]
8022 || (! current_function_is_leaf && call_used_regs [reg]))
8023 save_reg_mask |= (1 << reg);
8025 else
8027 /* In the normal case we only need to save those registers
8028 which are call saved and which are used by this function. */
8029 for (reg = 0; reg <= 10; reg++)
8030 if (regs_ever_live[reg] && ! call_used_regs [reg])
8031 save_reg_mask |= (1 << reg);
8033 /* Handle the frame pointer as a special case. */
8034 if (! TARGET_APCS_FRAME
8035 && ! frame_pointer_needed
8036 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
8037 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
8038 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
8040 /* If we aren't loading the PIC register,
8041 don't stack it even though it may be live. */
8042 if (flag_pic
8043 && ! TARGET_SINGLE_PIC_BASE
8044 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
8045 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
8048 return save_reg_mask;
8051 /* Compute a bit mask of which registers need to be
8052 saved on the stack for the current function. */
8054 static unsigned long
8055 arm_compute_save_reg_mask ()
8057 unsigned int save_reg_mask = 0;
8058 unsigned long func_type = arm_current_func_type ();
8060 if (IS_NAKED (func_type))
8061 /* This should never really happen. */
8062 return 0;
8064 /* If we are creating a stack frame, then we must save the frame pointer,
8065 IP (which will hold the old stack pointer), LR and the PC. */
8066 if (frame_pointer_needed)
8067 save_reg_mask |=
8068 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
8069 | (1 << IP_REGNUM)
8070 | (1 << LR_REGNUM)
8071 | (1 << PC_REGNUM);
8073 /* Volatile functions do not return, so there
8074 is no need to save any other registers. */
8075 if (IS_VOLATILE (func_type))
8076 return save_reg_mask;
8078 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
8080 /* Decide if we need to save the link register.
8081 Interrupt routines have their own banked link register,
8082 so they never need to save it.
8083 Otherwise if we do not use the link register we do not need to save
8084 it. If we are pushing other registers onto the stack however, we
8085 can save an instruction in the epilogue by pushing the link register
8086 now and then popping it back into the PC. This incurs extra memory
8087 accesses though, so we only do it when optimising for size, and only
8088 if we know that we will not need a fancy return sequence. */
8089 if (regs_ever_live [LR_REGNUM]
8090 || (save_reg_mask
8091 && optimize_size
8092 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
8093 save_reg_mask |= 1 << LR_REGNUM;
8095 if (cfun->machine->lr_save_eliminated)
8096 save_reg_mask &= ~ (1 << LR_REGNUM);
8098 return save_reg_mask;
8101 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
8102 everything bar the final return instruction. */
8104 const char *
8105 output_return_instruction (operand, really_return, reverse)
8106 rtx operand;
8107 int really_return;
8108 int reverse;
8110 char conditional[10];
8111 char instr[100];
8112 int reg;
8113 unsigned long live_regs_mask;
8114 unsigned long func_type;
8116 func_type = arm_current_func_type ();
8118 if (IS_NAKED (func_type))
8119 return "";
8121 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8123 /* If this function was declared non-returning, and we have found a tail
8124 call, then we have to trust that the called function won't return. */
8125 if (really_return)
8127 rtx ops[2];
8129 /* Otherwise, trap an attempted return by aborting. */
8130 ops[0] = operand;
8131 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
8132 : "abort");
8133 assemble_external_libcall (ops[1]);
8134 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
8137 return "";
8140 if (current_function_calls_alloca && !really_return)
8141 abort ();
8143 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
8145 return_used_this_function = 1;
8147 live_regs_mask = arm_compute_save_reg_mask ();
8149 if (live_regs_mask)
8151 const char * return_reg;
8153 /* If we do not have any special requirements for function exit
8154 (eg interworking, or ISR) then we can load the return address
8155 directly into the PC. Otherwise we must load it into LR. */
8156 if (really_return
8157 && ! TARGET_INTERWORK)
8158 return_reg = reg_names[PC_REGNUM];
8159 else
8160 return_reg = reg_names[LR_REGNUM];
8162 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
8163 /* There are two possible reasons for the IP register being saved.
8164 Either a stack frame was created, in which case IP contains the
8165 old stack pointer, or an ISR routine corrupted it. If this in an
8166 ISR routine then just restore IP, otherwise restore IP into SP. */
8167 if (! IS_INTERRUPT (func_type))
8169 live_regs_mask &= ~ (1 << IP_REGNUM);
8170 live_regs_mask |= (1 << SP_REGNUM);
8173 /* On some ARM architectures it is faster to use LDR rather than
8174 LDM to load a single register. On other architectures, the
8175 cost is the same. In 26 bit mode, or for exception handlers,
8176 we have to use LDM to load the PC so that the CPSR is also
8177 restored. */
8178 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8180 if (live_regs_mask == (unsigned int)(1 << reg))
8181 break;
8183 if (reg <= LAST_ARM_REGNUM
8184 && (reg != LR_REGNUM
8185 || ! really_return
8186 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
8188 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
8189 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
8191 else
8193 char *p;
8194 int first = 1;
8196 /* Generate the load multiple instruction to restore the registers. */
8197 if (frame_pointer_needed)
8198 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
8199 else if (live_regs_mask & (1 << SP_REGNUM))
8200 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
8201 else
8202 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
8204 p = instr + strlen (instr);
8206 for (reg = 0; reg <= SP_REGNUM; reg++)
8207 if (live_regs_mask & (1 << reg))
8209 int l = strlen (reg_names[reg]);
8211 if (first)
8212 first = 0;
8213 else
8215 memcpy (p, ", ", 2);
8216 p += 2;
8219 memcpy (p, "%|", 2);
8220 memcpy (p + 2, reg_names[reg], l);
8221 p += l + 2;
8224 if (live_regs_mask & (1 << LR_REGNUM))
8226 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
8227 /* Decide if we need to add the ^ symbol to the end of the
8228 register list. This causes the saved condition codes
8229 register to be copied into the current condition codes
8230 register. We do the copy if we are conforming to the 32-bit
8231 ABI and this is an interrupt function, or if we are
8232 conforming to the 26-bit ABI. There is a special case for
8233 the 26-bit ABI however, which is if we are writing back the
8234 stack pointer but not loading the PC. In this case adding
8235 the ^ symbol would create a type 2 LDM instruction, where
8236 writeback is UNPREDICTABLE. We are safe in leaving the ^
8237 character off in this case however, since the actual return
8238 instruction will be a MOVS which will restore the CPSR. */
8239 if ((TARGET_APCS_32 && IS_INTERRUPT (func_type))
8240 || (! TARGET_APCS_32 && really_return))
8241 strcat (p, "^");
8243 else
8244 strcpy (p, "}");
8247 output_asm_insn (instr, & operand);
8249 /* See if we need to generate an extra instruction to
8250 perform the actual function return. */
8251 if (really_return
8252 && func_type != ARM_FT_INTERWORKED
8253 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8255 /* The return has already been handled
8256 by loading the LR into the PC. */
8257 really_return = 0;
8261 if (really_return)
8263 switch ((int) ARM_FUNC_TYPE (func_type))
8265 case ARM_FT_ISR:
8266 case ARM_FT_FIQ:
8267 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
8268 break;
8270 case ARM_FT_INTERWORKED:
8271 sprintf (instr, "bx%s\t%%|lr", conditional);
8272 break;
8274 case ARM_FT_EXCEPTION:
8275 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
8276 break;
8278 default:
8279 /* ARMv5 implementations always provide BX, so interworking
8280 is the default unless APCS-26 is in use. */
8281 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
8282 sprintf (instr, "bx%s\t%%|lr", conditional);
8283 else
8284 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
8285 conditional, TARGET_APCS_32 ? "" : "s");
8286 break;
8289 output_asm_insn (instr, & operand);
8292 return "";
8295 /* Write the function name into the code section, directly preceding
8296 the function prologue.
8298 Code will be output similar to this:
8300 .ascii "arm_poke_function_name", 0
8301 .align
8303 .word 0xff000000 + (t1 - t0)
8304 arm_poke_function_name
8305 mov ip, sp
8306 stmfd sp!, {fp, ip, lr, pc}
8307 sub fp, ip, #4
8309 When performing a stack backtrace, code can inspect the value
8310 of 'pc' stored at 'fp' + 0. If the trace function then looks
8311 at location pc - 12 and the top 8 bits are set, then we know
8312 that there is a function name embedded immediately preceding this
8313 location and has length ((pc[-3]) & 0xff000000).
8315 We assume that pc is declared as a pointer to an unsigned long.
8317 It is of no benefit to output the function name if we are assembling
8318 a leaf function. These function types will not contain a stack
8319 backtrace structure, therefore it is not possible to determine the
8320 function name. */
8322 void
8323 arm_poke_function_name (stream, name)
8324 FILE * stream;
8325 const char * name;
8327 unsigned long alignlength;
8328 unsigned long length;
8329 rtx x;
8331 length = strlen (name) + 1;
8332 alignlength = ROUND_UP_WORD (length);
8334 ASM_OUTPUT_ASCII (stream, name, length);
8335 ASM_OUTPUT_ALIGN (stream, 2);
8336 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8337 assemble_aligned_integer (UNITS_PER_WORD, x);
8340 /* Place some comments into the assembler stream
8341 describing the current function. */
8343 static void
8344 arm_output_function_prologue (f, frame_size)
8345 FILE * f;
8346 HOST_WIDE_INT frame_size;
8348 unsigned long func_type;
8350 if (!TARGET_ARM)
8352 thumb_output_function_prologue (f, frame_size);
8353 return;
8356 /* Sanity check. */
8357 if (arm_ccfsm_state || arm_target_insn)
8358 abort ();
8360 func_type = arm_current_func_type ();
8362 switch ((int) ARM_FUNC_TYPE (func_type))
8364 default:
8365 case ARM_FT_NORMAL:
8366 break;
8367 case ARM_FT_INTERWORKED:
8368 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8369 break;
8370 case ARM_FT_EXCEPTION_HANDLER:
8371 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8372 break;
8373 case ARM_FT_ISR:
8374 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8375 break;
8376 case ARM_FT_FIQ:
8377 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8378 break;
8379 case ARM_FT_EXCEPTION:
8380 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8381 break;
8384 if (IS_NAKED (func_type))
8385 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8387 if (IS_VOLATILE (func_type))
8388 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8390 if (IS_NESTED (func_type))
8391 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8393 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
8394 current_function_args_size,
8395 current_function_pretend_args_size, frame_size);
8397 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8398 frame_pointer_needed,
8399 cfun->machine->uses_anonymous_args);
8401 if (cfun->machine->lr_save_eliminated)
8402 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8404 #ifdef AOF_ASSEMBLER
8405 if (flag_pic)
8406 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8407 #endif
8409 return_used_this_function = 0;
8412 const char *
8413 arm_output_epilogue (really_return)
8414 int really_return;
8416 int reg;
8417 unsigned long saved_regs_mask;
8418 unsigned long func_type;
8419 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8420 frame that is $fp + 4 for a non-variadic function. */
8421 int floats_offset = 0;
8422 rtx operands[3];
8423 int frame_size = arm_get_frame_size ();
8424 FILE * f = asm_out_file;
8425 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8427 /* If we have already generated the return instruction
8428 then it is futile to generate anything else. */
8429 if (use_return_insn (FALSE) && return_used_this_function)
8430 return "";
8432 func_type = arm_current_func_type ();
8434 if (IS_NAKED (func_type))
8435 /* Naked functions don't have epilogues. */
8436 return "";
8438 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8440 rtx op;
8442 /* A volatile function should never return. Call abort. */
8443 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8444 assemble_external_libcall (op);
8445 output_asm_insn ("bl\t%a0", &op);
8447 return "";
8450 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8451 && ! really_return)
8452 /* If we are throwing an exception, then we really must
8453 be doing a return, so we can't tail-call. */
8454 abort ();
8456 saved_regs_mask = arm_compute_save_reg_mask ();
8458 /* XXX We should adjust floats_offset for any anonymous args, and then
8459 re-adjust vfp_offset below to compensate. */
8461 /* Compute how far away the floats will be. */
8462 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8463 if (saved_regs_mask & (1 << reg))
8464 floats_offset += 4;
8466 if (frame_pointer_needed)
8468 int vfp_offset = 4;
8470 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8472 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8473 if (regs_ever_live[reg] && !call_used_regs[reg])
8475 floats_offset += 12;
8476 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8477 reg, FP_REGNUM, floats_offset - vfp_offset);
8480 else
8482 int start_reg = LAST_ARM_FP_REGNUM;
8484 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8486 if (regs_ever_live[reg] && !call_used_regs[reg])
8488 floats_offset += 12;
8490 /* We can't unstack more than four registers at once. */
8491 if (start_reg - reg == 3)
8493 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8494 reg, FP_REGNUM, floats_offset - vfp_offset);
8495 start_reg = reg - 1;
8498 else
8500 if (reg != start_reg)
8501 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8502 reg + 1, start_reg - reg,
8503 FP_REGNUM, floats_offset - vfp_offset);
8504 start_reg = reg - 1;
8508 /* Just in case the last register checked also needs unstacking. */
8509 if (reg != start_reg)
8510 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8511 reg + 1, start_reg - reg,
8512 FP_REGNUM, floats_offset - vfp_offset);
8515 /* saved_regs_mask should contain the IP, which at the time of stack
8516 frame generation actually contains the old stack pointer. So a
8517 quick way to unwind the stack is just pop the IP register directly
8518 into the stack pointer. */
8519 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8520 abort ();
8521 saved_regs_mask &= ~ (1 << IP_REGNUM);
8522 saved_regs_mask |= (1 << SP_REGNUM);
8524 /* There are two registers left in saved_regs_mask - LR and PC. We
8525 only need to restore the LR register (the return address), but to
8526 save time we can load it directly into the PC, unless we need a
8527 special function exit sequence, or we are not really returning. */
8528 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8529 /* Delete the LR from the register mask, so that the LR on
8530 the stack is loaded into the PC in the register mask. */
8531 saved_regs_mask &= ~ (1 << LR_REGNUM);
8532 else
8533 saved_regs_mask &= ~ (1 << PC_REGNUM);
8535 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8537 if (IS_INTERRUPT (func_type))
8538 /* Interrupt handlers will have pushed the
8539 IP onto the stack, so restore it now. */
8540 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8542 else
8544 /* Restore stack pointer if necessary. */
8545 if (frame_size + current_function_outgoing_args_size != 0)
8547 operands[0] = operands[1] = stack_pointer_rtx;
8548 operands[2] = GEN_INT (frame_size
8549 + current_function_outgoing_args_size);
8550 output_add_immediate (operands);
8553 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8555 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8556 if (regs_ever_live[reg] && !call_used_regs[reg])
8557 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8558 reg, SP_REGNUM);
8560 else
8562 int start_reg = FIRST_ARM_FP_REGNUM;
8564 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8566 if (regs_ever_live[reg] && !call_used_regs[reg])
8568 if (reg - start_reg == 3)
8570 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8571 start_reg, SP_REGNUM);
8572 start_reg = reg + 1;
8575 else
8577 if (reg != start_reg)
8578 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8579 start_reg, reg - start_reg,
8580 SP_REGNUM);
8582 start_reg = reg + 1;
8586 /* Just in case the last register checked also needs unstacking. */
8587 if (reg != start_reg)
8588 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8589 start_reg, reg - start_reg, SP_REGNUM);
8592 /* If we can, restore the LR into the PC. */
8593 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8594 && really_return
8595 && current_function_pretend_args_size == 0
8596 && saved_regs_mask & (1 << LR_REGNUM))
8598 saved_regs_mask &= ~ (1 << LR_REGNUM);
8599 saved_regs_mask |= (1 << PC_REGNUM);
8602 /* Load the registers off the stack. If we only have one register
8603 to load use the LDR instruction - it is faster. */
8604 if (saved_regs_mask == (1 << LR_REGNUM))
8606 /* The exception handler ignores the LR, so we do
8607 not really need to load it off the stack. */
8608 if (eh_ofs)
8609 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8610 else
8611 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8613 else if (saved_regs_mask)
8615 if (saved_regs_mask & (1 << SP_REGNUM))
8616 /* Note - write back to the stack register is not enabled
8617 (ie "ldmfd sp!..."). We know that the stack pointer is
8618 in the list of registers and if we add writeback the
8619 instruction becomes UNPREDICTABLE. */
8620 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8621 else
8622 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8625 if (current_function_pretend_args_size)
8627 /* Unwind the pre-pushed regs. */
8628 operands[0] = operands[1] = stack_pointer_rtx;
8629 operands[2] = GEN_INT (current_function_pretend_args_size);
8630 output_add_immediate (operands);
8634 #if 0
8635 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8636 /* Adjust the stack to remove the exception handler stuff. */
8637 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8638 REGNO (eh_ofs));
8639 #endif
8641 if (! really_return
8642 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8643 && current_function_pretend_args_size == 0
8644 && saved_regs_mask & (1 << PC_REGNUM)))
8645 return "";
8647 /* Generate the return instruction. */
8648 switch ((int) ARM_FUNC_TYPE (func_type))
8650 case ARM_FT_EXCEPTION_HANDLER:
8651 /* Even in 26-bit mode we do a mov (rather than a movs)
8652 because we don't have the PSR bits set in the address. */
8653 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8654 break;
8656 case ARM_FT_ISR:
8657 case ARM_FT_FIQ:
8658 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8659 break;
8661 case ARM_FT_EXCEPTION:
8662 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8663 break;
8665 case ARM_FT_INTERWORKED:
8666 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8667 break;
8669 default:
8670 if (frame_pointer_needed)
8671 /* If we used the frame pointer then the return address
8672 will have been loaded off the stack directly into the
8673 PC, so there is no need to issue a MOV instruction
8674 here. */
8676 else if (current_function_pretend_args_size == 0
8677 && (saved_regs_mask & (1 << LR_REGNUM)))
8678 /* Similarly we may have been able to load LR into the PC
8679 even if we did not create a stack frame. */
8681 else if (TARGET_APCS_32)
8682 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8683 else
8684 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8685 break;
8688 return "";
8691 static void
8692 arm_output_function_epilogue (file, frame_size)
8693 FILE *file ATTRIBUTE_UNUSED;
8694 HOST_WIDE_INT frame_size;
8696 if (TARGET_THUMB)
8698 /* ??? Probably not safe to set this here, since it assumes that a
8699 function will be emitted as assembly immediately after we generate
8700 RTL for it. This does not happen for inline functions. */
8701 return_used_this_function = 0;
8703 else
8705 /* We need to take into account any stack-frame rounding. */
8706 frame_size = arm_get_frame_size ();
8708 if (use_return_insn (FALSE)
8709 && return_used_this_function
8710 && (frame_size + current_function_outgoing_args_size) != 0
8711 && !frame_pointer_needed)
8712 abort ();
8714 /* Reset the ARM-specific per-function variables. */
8715 after_arm_reorg = 0;
8719 /* Generate and emit an insn that we will recognize as a push_multi.
8720 Unfortunately, since this insn does not reflect very well the actual
8721 semantics of the operation, we need to annotate the insn for the benefit
8722 of DWARF2 frame unwind information. */
8724 static rtx
8725 emit_multi_reg_push (mask)
8726 int mask;
8728 int num_regs = 0;
8729 int num_dwarf_regs;
8730 int i, j;
8731 rtx par;
8732 rtx dwarf;
8733 int dwarf_par_index;
8734 rtx tmp, reg;
8736 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8737 if (mask & (1 << i))
8738 num_regs++;
8740 if (num_regs == 0 || num_regs > 16)
8741 abort ();
8743 /* We don't record the PC in the dwarf frame information. */
8744 num_dwarf_regs = num_regs;
8745 if (mask & (1 << PC_REGNUM))
8746 num_dwarf_regs--;
8748 /* For the body of the insn we are going to generate an UNSPEC in
8749 parallel with several USEs. This allows the insn to be recognized
8750 by the push_multi pattern in the arm.md file. The insn looks
8751 something like this:
8753 (parallel [
8754 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8755 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8756 (use (reg:SI 11 fp))
8757 (use (reg:SI 12 ip))
8758 (use (reg:SI 14 lr))
8759 (use (reg:SI 15 pc))
8762 For the frame note however, we try to be more explicit and actually
8763 show each register being stored into the stack frame, plus a (single)
8764 decrement of the stack pointer. We do it this way in order to be
8765 friendly to the stack unwinding code, which only wants to see a single
8766 stack decrement per instruction. The RTL we generate for the note looks
8767 something like this:
8769 (sequence [
8770 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8771 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8772 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8773 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8774 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8777 This sequence is used both by the code to support stack unwinding for
8778 exceptions handlers and the code to generate dwarf2 frame debugging. */
8780 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8781 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8782 dwarf_par_index = 1;
8784 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8786 if (mask & (1 << i))
8788 reg = gen_rtx_REG (SImode, i);
8790 XVECEXP (par, 0, 0)
8791 = gen_rtx_SET (VOIDmode,
8792 gen_rtx_MEM (BLKmode,
8793 gen_rtx_PRE_DEC (BLKmode,
8794 stack_pointer_rtx)),
8795 gen_rtx_UNSPEC (BLKmode,
8796 gen_rtvec (1, reg),
8797 UNSPEC_PUSH_MULT));
8799 if (i != PC_REGNUM)
8801 tmp = gen_rtx_SET (VOIDmode,
8802 gen_rtx_MEM (SImode, stack_pointer_rtx),
8803 reg);
8804 RTX_FRAME_RELATED_P (tmp) = 1;
8805 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8806 dwarf_par_index++;
8809 break;
8813 for (j = 1, i++; j < num_regs; i++)
8815 if (mask & (1 << i))
8817 reg = gen_rtx_REG (SImode, i);
8819 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8821 if (i != PC_REGNUM)
8823 tmp = gen_rtx_SET (VOIDmode,
8824 gen_rtx_MEM (SImode,
8825 plus_constant (stack_pointer_rtx,
8826 4 * j)),
8827 reg);
8828 RTX_FRAME_RELATED_P (tmp) = 1;
8829 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8832 j++;
8836 par = emit_insn (par);
8838 tmp = gen_rtx_SET (SImode,
8839 stack_pointer_rtx,
8840 gen_rtx_PLUS (SImode,
8841 stack_pointer_rtx,
8842 GEN_INT (-4 * num_regs)));
8843 RTX_FRAME_RELATED_P (tmp) = 1;
8844 XVECEXP (dwarf, 0, 0) = tmp;
8846 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8847 REG_NOTES (par));
8848 return par;
8851 static rtx
8852 emit_sfm (base_reg, count)
8853 int base_reg;
8854 int count;
8856 rtx par;
8857 rtx dwarf;
8858 rtx tmp, reg;
8859 int i;
8861 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8862 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8864 reg = gen_rtx_REG (XFmode, base_reg++);
8866 XVECEXP (par, 0, 0)
8867 = gen_rtx_SET (VOIDmode,
8868 gen_rtx_MEM (BLKmode,
8869 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8870 gen_rtx_UNSPEC (BLKmode,
8871 gen_rtvec (1, reg),
8872 UNSPEC_PUSH_MULT));
8874 = gen_rtx_SET (VOIDmode,
8875 gen_rtx_MEM (XFmode,
8876 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8877 reg);
8878 RTX_FRAME_RELATED_P (tmp) = 1;
8879 XVECEXP (dwarf, 0, count - 1) = tmp;
8881 for (i = 1; i < count; i++)
8883 reg = gen_rtx_REG (XFmode, base_reg++);
8884 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8886 tmp = gen_rtx_SET (VOIDmode,
8887 gen_rtx_MEM (XFmode,
8888 gen_rtx_PRE_DEC (BLKmode,
8889 stack_pointer_rtx)),
8890 reg);
8891 RTX_FRAME_RELATED_P (tmp) = 1;
8892 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8895 par = emit_insn (par);
8896 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8897 REG_NOTES (par));
8898 return par;
8901 /* Compute the distance from register FROM to register TO.
8902 These can be the arg pointer (26), the soft frame pointer (25),
8903 the stack pointer (13) or the hard frame pointer (11).
8904 Typical stack layout looks like this:
8906 old stack pointer -> | |
8907 ----
8908 | | \
8909 | | saved arguments for
8910 | | vararg functions
8911 | | /
8913 hard FP & arg pointer -> | | \
8914 | | stack
8915 | | frame
8916 | | /
8918 | | \
8919 | | call saved
8920 | | registers
8921 soft frame pointer -> | | /
8923 | | \
8924 | | local
8925 | | variables
8926 | | /
8928 | | \
8929 | | outgoing
8930 | | arguments
8931 current stack pointer -> | | /
8934 For a given function some or all of these stack components
8935 may not be needed, giving rise to the possibility of
8936 eliminating some of the registers.
8938 The values returned by this function must reflect the behavior
8939 of arm_expand_prologue() and arm_compute_save_reg_mask().
8941 The sign of the number returned reflects the direction of stack
8942 growth, so the values are positive for all eliminations except
8943 from the soft frame pointer to the hard frame pointer. */
8945 unsigned int
8946 arm_compute_initial_elimination_offset (from, to)
8947 unsigned int from;
8948 unsigned int to;
8950 unsigned int local_vars = arm_get_frame_size ();
8951 unsigned int outgoing_args = current_function_outgoing_args_size;
8952 unsigned int stack_frame;
8953 unsigned int call_saved_registers;
8954 unsigned long func_type;
8956 func_type = arm_current_func_type ();
8958 /* Volatile functions never return, so there is
8959 no need to save call saved registers. */
8960 call_saved_registers = 0;
8961 if (! IS_VOLATILE (func_type))
8963 unsigned int reg_mask;
8964 unsigned int reg;
8966 /* Make sure that we compute which registers will be saved
8967 on the stack using the same algorithm that is used by
8968 arm_compute_save_reg_mask(). */
8969 reg_mask = arm_compute_save_reg0_reg12_mask ();
8971 /* Now count the number of bits set in save_reg_mask.
8972 For each set bit we need 4 bytes of stack space. */
8973 while (reg_mask)
8975 call_saved_registers += 4;
8976 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8979 if ((regs_ever_live[LR_REGNUM]
8980 /* If optimizing for size, then we save the link register if
8981 any other integer register is saved. This gives a smaller
8982 return sequence. */
8983 || (optimize_size && call_saved_registers > 0))
8984 /* But if a stack frame is going to be created, the LR will
8985 be saved as part of that, so we do not need to allow for
8986 it here. */
8987 && ! frame_pointer_needed)
8988 call_saved_registers += 4;
8990 /* If the hard floating point registers are going to be
8991 used then they must be saved on the stack as well.
8992 Each register occupies 12 bytes of stack space. */
8993 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8994 if (regs_ever_live[reg] && ! call_used_regs[reg])
8995 call_saved_registers += 12;
8998 /* The stack frame contains 4 registers - the old frame pointer,
8999 the old stack pointer, the return address and PC of the start
9000 of the function. */
9001 stack_frame = frame_pointer_needed ? 16 : 0;
9003 /* OK, now we have enough information to compute the distances.
9004 There must be an entry in these switch tables for each pair
9005 of registers in ELIMINABLE_REGS, even if some of the entries
9006 seem to be redundant or useless. */
9007 switch (from)
9009 case ARG_POINTER_REGNUM:
9010 switch (to)
9012 case THUMB_HARD_FRAME_POINTER_REGNUM:
9013 return 0;
9015 case FRAME_POINTER_REGNUM:
9016 /* This is the reverse of the soft frame pointer
9017 to hard frame pointer elimination below. */
9018 if (call_saved_registers == 0 && stack_frame == 0)
9019 return 0;
9020 return (call_saved_registers + stack_frame - 4);
9022 case ARM_HARD_FRAME_POINTER_REGNUM:
9023 /* If there is no stack frame then the hard
9024 frame pointer and the arg pointer coincide. */
9025 if (stack_frame == 0 && call_saved_registers != 0)
9026 return 0;
9027 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
9028 return (frame_pointer_needed
9029 && current_function_needs_context
9030 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
9032 case STACK_POINTER_REGNUM:
9033 /* If nothing has been pushed on the stack at all
9034 then this will return -4. This *is* correct! */
9035 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
9037 default:
9038 abort ();
9040 break;
9042 case FRAME_POINTER_REGNUM:
9043 switch (to)
9045 case THUMB_HARD_FRAME_POINTER_REGNUM:
9046 return 0;
9048 case ARM_HARD_FRAME_POINTER_REGNUM:
9049 /* The hard frame pointer points to the top entry in the
9050 stack frame. The soft frame pointer to the bottom entry
9051 in the stack frame. If there is no stack frame at all,
9052 then they are identical. */
9053 if (call_saved_registers == 0 && stack_frame == 0)
9054 return 0;
9055 return - (call_saved_registers + stack_frame - 4);
9057 case STACK_POINTER_REGNUM:
9058 return local_vars + outgoing_args;
9060 default:
9061 abort ();
9063 break;
9065 default:
9066 /* You cannot eliminate from the stack pointer.
9067 In theory you could eliminate from the hard frame
9068 pointer to the stack pointer, but this will never
9069 happen, since if a stack frame is not needed the
9070 hard frame pointer will never be used. */
9071 abort ();
9075 /* Calculate the size of the stack frame, taking into account any
9076 padding that is required to ensure stack-alignment. */
9078 HOST_WIDE_INT
9079 arm_get_frame_size ()
9081 int regno;
9083 int base_size = ROUND_UP_WORD (get_frame_size ());
9084 int entry_size = 0;
9085 unsigned long func_type = arm_current_func_type ();
9086 int leaf;
9088 if (! TARGET_ARM)
9089 abort();
9091 if (! TARGET_ATPCS)
9092 return base_size;
9094 /* We need to know if we are a leaf function. Unfortunately, it
9095 is possible to be called after start_sequence has been called,
9096 which causes get_insns to return the insns for the sequence,
9097 not the function, which will cause leaf_function_p to return
9098 the incorrect result.
9100 To work around this, we cache the computed frame size. This
9101 works because we will only be calling RTL expanders that need
9102 to know about leaf functions once reload has completed, and the
9103 frame size cannot be changed after that time, so we can safely
9104 use the cached value. */
9106 if (reload_completed)
9107 return cfun->machine->frame_size;
9109 leaf = leaf_function_p ();
9111 /* A leaf function does not need any stack alignment if it has nothing
9112 on the stack. */
9113 if (leaf && base_size == 0)
9115 cfun->machine->frame_size = 0;
9116 return 0;
9119 /* We know that SP will be word aligned on entry, and we must
9120 preserve that condition at any subroutine call. But those are
9121 the only constraints. */
9123 /* Space for variadic functions. */
9124 if (current_function_pretend_args_size)
9125 entry_size += current_function_pretend_args_size;
9127 /* Space for saved registers. */
9128 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
9130 /* Space for saved FPA registers. */
9131 if (! IS_VOLATILE (func_type))
9133 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
9134 if (regs_ever_live[regno] && ! call_used_regs[regno])
9135 entry_size += 12;
9138 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9139 base_size += 4;
9140 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9141 abort ();
9143 cfun->machine->frame_size = base_size;
9145 return base_size;
9148 /* Generate the prologue instructions for entry into an ARM function. */
9150 void
9151 arm_expand_prologue ()
9153 int reg;
9154 rtx amount;
9155 rtx insn;
9156 rtx ip_rtx;
9157 unsigned long live_regs_mask;
9158 unsigned long func_type;
9159 int fp_offset = 0;
9160 int saved_pretend_args = 0;
9161 unsigned int args_to_push;
9163 func_type = arm_current_func_type ();
9165 /* Naked functions don't have prologues. */
9166 if (IS_NAKED (func_type))
9167 return;
9169 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
9170 args_to_push = current_function_pretend_args_size;
9172 /* Compute which register we will have to save onto the stack. */
9173 live_regs_mask = arm_compute_save_reg_mask ();
9175 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
9177 if (frame_pointer_needed)
9179 if (IS_INTERRUPT (func_type))
9181 /* Interrupt functions must not corrupt any registers.
9182 Creating a frame pointer however, corrupts the IP
9183 register, so we must push it first. */
9184 insn = emit_multi_reg_push (1 << IP_REGNUM);
9186 /* Do not set RTX_FRAME_RELATED_P on this insn.
9187 The dwarf stack unwinding code only wants to see one
9188 stack decrement per function, and this is not it. If
9189 this instruction is labeled as being part of the frame
9190 creation sequence then dwarf2out_frame_debug_expr will
9191 abort when it encounters the assignment of IP to FP
9192 later on, since the use of SP here establishes SP as
9193 the CFA register and not IP.
9195 Anyway this instruction is not really part of the stack
9196 frame creation although it is part of the prologue. */
9198 else if (IS_NESTED (func_type))
9200 /* The Static chain register is the same as the IP register
9201 used as a scratch register during stack frame creation.
9202 To get around this need to find somewhere to store IP
9203 whilst the frame is being created. We try the following
9204 places in order:
9206 1. The last argument register.
9207 2. A slot on the stack above the frame. (This only
9208 works if the function is not a varargs function).
9209 3. Register r3, after pushing the argument registers
9210 onto the stack.
9212 Note - we only need to tell the dwarf2 backend about the SP
9213 adjustment in the second variant; the static chain register
9214 doesn't need to be unwound, as it doesn't contain a value
9215 inherited from the caller. */
9217 if (regs_ever_live[3] == 0)
9219 insn = gen_rtx_REG (SImode, 3);
9220 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9221 insn = emit_insn (insn);
9223 else if (args_to_push == 0)
9225 rtx dwarf;
9226 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
9227 insn = gen_rtx_MEM (SImode, insn);
9228 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
9229 insn = emit_insn (insn);
9231 fp_offset = 4;
9233 /* Just tell the dwarf backend that we adjusted SP. */
9234 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
9235 gen_rtx_PLUS (SImode, stack_pointer_rtx,
9236 GEN_INT (-fp_offset)));
9237 RTX_FRAME_RELATED_P (insn) = 1;
9238 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9239 dwarf, REG_NOTES (insn));
9241 else
9243 /* Store the args on the stack. */
9244 if (cfun->machine->uses_anonymous_args)
9245 insn = emit_multi_reg_push
9246 ((0xf0 >> (args_to_push / 4)) & 0xf);
9247 else
9248 insn = emit_insn
9249 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9250 GEN_INT (- args_to_push)));
9252 RTX_FRAME_RELATED_P (insn) = 1;
9254 saved_pretend_args = 1;
9255 fp_offset = args_to_push;
9256 args_to_push = 0;
9258 /* Now reuse r3 to preserve IP. */
9259 insn = gen_rtx_REG (SImode, 3);
9260 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9261 (void) emit_insn (insn);
9265 if (fp_offset)
9267 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
9268 insn = gen_rtx_SET (SImode, ip_rtx, insn);
9270 else
9271 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
9273 insn = emit_insn (insn);
9274 RTX_FRAME_RELATED_P (insn) = 1;
9277 if (args_to_push)
9279 /* Push the argument registers, or reserve space for them. */
9280 if (cfun->machine->uses_anonymous_args)
9281 insn = emit_multi_reg_push
9282 ((0xf0 >> (args_to_push / 4)) & 0xf);
9283 else
9284 insn = emit_insn
9285 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9286 GEN_INT (- args_to_push)));
9287 RTX_FRAME_RELATED_P (insn) = 1;
9290 /* If this is an interrupt service routine, and the link register
9291 is going to be pushed, and we are not creating a stack frame,
9292 (which would involve an extra push of IP and a pop in the epilogue)
9293 subtracting four from LR now will mean that the function return
9294 can be done with a single instruction. */
9295 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
9296 && (live_regs_mask & (1 << LR_REGNUM)) != 0
9297 && ! frame_pointer_needed)
9298 emit_insn (gen_rtx_SET (SImode,
9299 gen_rtx_REG (SImode, LR_REGNUM),
9300 gen_rtx_PLUS (SImode,
9301 gen_rtx_REG (SImode, LR_REGNUM),
9302 GEN_INT (-4))));
9304 if (live_regs_mask)
9306 insn = emit_multi_reg_push (live_regs_mask);
9307 RTX_FRAME_RELATED_P (insn) = 1;
9310 if (! IS_VOLATILE (func_type))
9312 /* Save any floating point call-saved registers used by this
9313 function. */
9314 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
9316 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9317 if (regs_ever_live[reg] && !call_used_regs[reg])
9319 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
9320 insn = gen_rtx_MEM (XFmode, insn);
9321 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9322 gen_rtx_REG (XFmode, reg)));
9323 RTX_FRAME_RELATED_P (insn) = 1;
9326 else
9328 int start_reg = LAST_ARM_FP_REGNUM;
9330 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9332 if (regs_ever_live[reg] && !call_used_regs[reg])
9334 if (start_reg - reg == 3)
9336 insn = emit_sfm (reg, 4);
9337 RTX_FRAME_RELATED_P (insn) = 1;
9338 start_reg = reg - 1;
9341 else
9343 if (start_reg != reg)
9345 insn = emit_sfm (reg + 1, start_reg - reg);
9346 RTX_FRAME_RELATED_P (insn) = 1;
9348 start_reg = reg - 1;
9352 if (start_reg != reg)
9354 insn = emit_sfm (reg + 1, start_reg - reg);
9355 RTX_FRAME_RELATED_P (insn) = 1;
9360 if (frame_pointer_needed)
9362 /* Create the new frame pointer. */
9363 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9364 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9365 RTX_FRAME_RELATED_P (insn) = 1;
9367 if (IS_NESTED (func_type))
9369 /* Recover the static chain register. */
9370 if (regs_ever_live [3] == 0
9371 || saved_pretend_args)
9372 insn = gen_rtx_REG (SImode, 3);
9373 else /* if (current_function_pretend_args_size == 0) */
9375 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
9376 GEN_INT (4));
9377 insn = gen_rtx_MEM (SImode, insn);
9380 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9381 /* Add a USE to stop propagate_one_insn() from barfing. */
9382 emit_insn (gen_prologue_use (ip_rtx));
9386 amount = GEN_INT (-(arm_get_frame_size ()
9387 + current_function_outgoing_args_size));
9389 if (amount != const0_rtx)
9391 /* This add can produce multiple insns for a large constant, so we
9392 need to get tricky. */
9393 rtx last = get_last_insn ();
9394 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9395 amount));
9398 last = last ? NEXT_INSN (last) : get_insns ();
9399 RTX_FRAME_RELATED_P (last) = 1;
9401 while (last != insn);
9403 /* If the frame pointer is needed, emit a special barrier that
9404 will prevent the scheduler from moving stores to the frame
9405 before the stack adjustment. */
9406 if (frame_pointer_needed)
9407 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9408 hard_frame_pointer_rtx));
9411 /* If we are profiling, make sure no instructions are scheduled before
9412 the call to mcount. Similarly if the user has requested no
9413 scheduling in the prolog. */
9414 if (current_function_profile || TARGET_NO_SCHED_PRO)
9415 emit_insn (gen_blockage ());
9417 /* If the link register is being kept alive, with the return address in it,
9418 then make sure that it does not get reused by the ce2 pass. */
9419 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9421 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9422 cfun->machine->lr_save_eliminated = 1;
9426 /* If CODE is 'd', then the X is a condition operand and the instruction
9427 should only be executed if the condition is true.
9428 if CODE is 'D', then the X is a condition operand and the instruction
9429 should only be executed if the condition is false: however, if the mode
9430 of the comparison is CCFPEmode, then always execute the instruction -- we
9431 do this because in these circumstances !GE does not necessarily imply LT;
9432 in these cases the instruction pattern will take care to make sure that
9433 an instruction containing %d will follow, thereby undoing the effects of
9434 doing this instruction unconditionally.
9435 If CODE is 'N' then X is a floating point operand that must be negated
9436 before output.
9437 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9438 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9440 void
9441 arm_print_operand (stream, x, code)
9442 FILE * stream;
9443 rtx x;
9444 int code;
9446 switch (code)
9448 case '@':
9449 fputs (ASM_COMMENT_START, stream);
9450 return;
9452 case '_':
9453 fputs (user_label_prefix, stream);
9454 return;
9456 case '|':
9457 fputs (REGISTER_PREFIX, stream);
9458 return;
9460 case '?':
9461 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9463 if (TARGET_THUMB || current_insn_predicate != NULL)
9464 abort ();
9466 fputs (arm_condition_codes[arm_current_cc], stream);
9468 else if (current_insn_predicate)
9470 enum arm_cond_code code;
9472 if (TARGET_THUMB)
9473 abort ();
9475 code = get_arm_condition_code (current_insn_predicate);
9476 fputs (arm_condition_codes[code], stream);
9478 return;
9480 case 'N':
9482 REAL_VALUE_TYPE r;
9483 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9484 r = REAL_VALUE_NEGATE (r);
9485 fprintf (stream, "%s", fp_const_from_val (&r));
9487 return;
9489 case 'B':
9490 if (GET_CODE (x) == CONST_INT)
9492 HOST_WIDE_INT val;
9493 val = ARM_SIGN_EXTEND (~INTVAL (x));
9494 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9496 else
9498 putc ('~', stream);
9499 output_addr_const (stream, x);
9501 return;
9503 case 'i':
9504 fprintf (stream, "%s", arithmetic_instr (x, 1));
9505 return;
9507 /* Truncate Cirrus shift counts. */
9508 case 's':
9509 if (GET_CODE (x) == CONST_INT)
9511 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9512 return;
9514 arm_print_operand (stream, x, 0);
9515 return;
9517 case 'I':
9518 fprintf (stream, "%s", arithmetic_instr (x, 0));
9519 return;
9521 case 'S':
9523 HOST_WIDE_INT val;
9524 const char * shift = shift_op (x, &val);
9526 if (shift)
9528 fprintf (stream, ", %s ", shift_op (x, &val));
9529 if (val == -1)
9530 arm_print_operand (stream, XEXP (x, 1), 0);
9531 else
9533 fputc ('#', stream);
9534 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9538 return;
9540 /* An explanation of the 'Q', 'R' and 'H' register operands:
9542 In a pair of registers containing a DI or DF value the 'Q'
9543 operand returns the register number of the register containing
9544 the least signficant part of the value. The 'R' operand returns
9545 the register number of the register containing the most
9546 significant part of the value.
9548 The 'H' operand returns the higher of the two register numbers.
9549 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9550 same as the 'Q' operand, since the most signficant part of the
9551 value is held in the lower number register. The reverse is true
9552 on systems where WORDS_BIG_ENDIAN is false.
9554 The purpose of these operands is to distinguish between cases
9555 where the endian-ness of the values is important (for example
9556 when they are added together), and cases where the endian-ness
9557 is irrelevant, but the order of register operations is important.
9558 For example when loading a value from memory into a register
9559 pair, the endian-ness does not matter. Provided that the value
9560 from the lower memory address is put into the lower numbered
9561 register, and the value from the higher address is put into the
9562 higher numbered register, the load will work regardless of whether
9563 the value being loaded is big-wordian or little-wordian. The
9564 order of the two register loads can matter however, if the address
9565 of the memory location is actually held in one of the registers
9566 being overwritten by the load. */
9567 case 'Q':
9568 if (REGNO (x) > LAST_ARM_REGNUM)
9569 abort ();
9570 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9571 return;
9573 case 'R':
9574 if (REGNO (x) > LAST_ARM_REGNUM)
9575 abort ();
9576 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9577 return;
9579 case 'H':
9580 if (REGNO (x) > LAST_ARM_REGNUM)
9581 abort ();
9582 asm_fprintf (stream, "%r", REGNO (x) + 1);
9583 return;
9585 case 'm':
9586 asm_fprintf (stream, "%r",
9587 GET_CODE (XEXP (x, 0)) == REG
9588 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9589 return;
9591 case 'M':
9592 asm_fprintf (stream, "{%r-%r}",
9593 REGNO (x),
9594 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9595 return;
9597 case 'd':
9598 /* CONST_TRUE_RTX means always -- that's the default. */
9599 if (x == const_true_rtx)
9600 return;
9602 if (TARGET_ARM)
9603 fputs (arm_condition_codes[get_arm_condition_code (x)],
9604 stream);
9605 else
9606 fputs (thumb_condition_code (x, 0), stream);
9607 return;
9609 case 'D':
9610 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9611 want to do that. */
9612 if (x == const_true_rtx)
9613 abort ();
9615 if (TARGET_ARM)
9616 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9617 (get_arm_condition_code (x))],
9618 stream);
9619 else
9620 fputs (thumb_condition_code (x, 1), stream);
9621 return;
9624 /* Cirrus registers can be accessed in a variety of ways:
9625 single floating point (f)
9626 double floating point (d)
9627 32bit integer (fx)
9628 64bit integer (dx). */
9629 case 'W': /* Cirrus register in F mode. */
9630 case 'X': /* Cirrus register in D mode. */
9631 case 'Y': /* Cirrus register in FX mode. */
9632 case 'Z': /* Cirrus register in DX mode. */
9633 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9634 abort ();
9636 fprintf (stream, "mv%s%s",
9637 code == 'W' ? "f"
9638 : code == 'X' ? "d"
9639 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9641 return;
9643 /* Print cirrus register in the mode specified by the register's mode. */
9644 case 'V':
9646 int mode = GET_MODE (x);
9648 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9649 abort ();
9651 fprintf (stream, "mv%s%s",
9652 mode == DFmode ? "d"
9653 : mode == SImode ? "fx"
9654 : mode == DImode ? "dx"
9655 : "f", reg_names[REGNO (x)] + 2);
9657 return;
9660 default:
9661 if (x == 0)
9662 abort ();
9664 if (GET_CODE (x) == REG)
9665 asm_fprintf (stream, "%r", REGNO (x));
9666 else if (GET_CODE (x) == MEM)
9668 output_memory_reference_mode = GET_MODE (x);
9669 output_address (XEXP (x, 0));
9671 else if (GET_CODE (x) == CONST_DOUBLE)
9672 fprintf (stream, "#%s", fp_immediate_constant (x));
9673 else if (GET_CODE (x) == NEG)
9674 abort (); /* This should never happen now. */
9675 else
9677 fputc ('#', stream);
9678 output_addr_const (stream, x);
9683 #ifndef AOF_ASSEMBLER
9684 /* Target hook for assembling integer objects. The ARM version needs to
9685 handle word-sized values specially. */
9687 static bool
9688 arm_assemble_integer (x, size, aligned_p)
9689 rtx x;
9690 unsigned int size;
9691 int aligned_p;
9693 if (size == UNITS_PER_WORD && aligned_p)
9695 fputs ("\t.word\t", asm_out_file);
9696 output_addr_const (asm_out_file, x);
9698 /* Mark symbols as position independent. We only do this in the
9699 .text segment, not in the .data segment. */
9700 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9701 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9703 if (GET_CODE (x) == SYMBOL_REF
9704 && (CONSTANT_POOL_ADDRESS_P (x)
9705 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9706 fputs ("(GOTOFF)", asm_out_file);
9707 else if (GET_CODE (x) == LABEL_REF)
9708 fputs ("(GOTOFF)", asm_out_file);
9709 else
9710 fputs ("(GOT)", asm_out_file);
9712 fputc ('\n', asm_out_file);
9713 return true;
9716 return default_assemble_integer (x, size, aligned_p);
9718 #endif
9720 /* A finite state machine takes care of noticing whether or not instructions
9721 can be conditionally executed, and thus decrease execution time and code
9722 size by deleting branch instructions. The fsm is controlled by
9723 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9725 /* The state of the fsm controlling condition codes are:
9726 0: normal, do nothing special
9727 1: make ASM_OUTPUT_OPCODE not output this instruction
9728 2: make ASM_OUTPUT_OPCODE not output this instruction
9729 3: make instructions conditional
9730 4: make instructions conditional
9732 State transitions (state->state by whom under condition):
9733 0 -> 1 final_prescan_insn if the `target' is a label
9734 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9735 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9736 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9737 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9738 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9739 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9740 (the target insn is arm_target_insn).
9742 If the jump clobbers the conditions then we use states 2 and 4.
9744 A similar thing can be done with conditional return insns.
9746 XXX In case the `target' is an unconditional branch, this conditionalising
9747 of the instructions always reduces code size, but not always execution
9748 time. But then, I want to reduce the code size to somewhere near what
9749 /bin/cc produces. */
9751 /* Returns the index of the ARM condition code string in
9752 `arm_condition_codes'. COMPARISON should be an rtx like
9753 `(eq (...) (...))'. */
9755 static enum arm_cond_code
9756 get_arm_condition_code (comparison)
9757 rtx comparison;
9759 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9760 int code;
9761 enum rtx_code comp_code = GET_CODE (comparison);
9763 if (GET_MODE_CLASS (mode) != MODE_CC)
9764 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9765 XEXP (comparison, 1));
9767 switch (mode)
9769 case CC_DNEmode: code = ARM_NE; goto dominance;
9770 case CC_DEQmode: code = ARM_EQ; goto dominance;
9771 case CC_DGEmode: code = ARM_GE; goto dominance;
9772 case CC_DGTmode: code = ARM_GT; goto dominance;
9773 case CC_DLEmode: code = ARM_LE; goto dominance;
9774 case CC_DLTmode: code = ARM_LT; goto dominance;
9775 case CC_DGEUmode: code = ARM_CS; goto dominance;
9776 case CC_DGTUmode: code = ARM_HI; goto dominance;
9777 case CC_DLEUmode: code = ARM_LS; goto dominance;
9778 case CC_DLTUmode: code = ARM_CC;
9780 dominance:
9781 if (comp_code != EQ && comp_code != NE)
9782 abort ();
9784 if (comp_code == EQ)
9785 return ARM_INVERSE_CONDITION_CODE (code);
9786 return code;
9788 case CC_NOOVmode:
9789 switch (comp_code)
9791 case NE: return ARM_NE;
9792 case EQ: return ARM_EQ;
9793 case GE: return ARM_PL;
9794 case LT: return ARM_MI;
9795 default: abort ();
9798 case CC_Zmode:
9799 switch (comp_code)
9801 case NE: return ARM_NE;
9802 case EQ: return ARM_EQ;
9803 default: abort ();
9806 case CCFPEmode:
9807 case CCFPmode:
9808 /* These encodings assume that AC=1 in the FPA system control
9809 byte. This allows us to handle all cases except UNEQ and
9810 LTGT. */
9811 switch (comp_code)
9813 case GE: return ARM_GE;
9814 case GT: return ARM_GT;
9815 case LE: return ARM_LS;
9816 case LT: return ARM_MI;
9817 case NE: return ARM_NE;
9818 case EQ: return ARM_EQ;
9819 case ORDERED: return ARM_VC;
9820 case UNORDERED: return ARM_VS;
9821 case UNLT: return ARM_LT;
9822 case UNLE: return ARM_LE;
9823 case UNGT: return ARM_HI;
9824 case UNGE: return ARM_PL;
9825 /* UNEQ and LTGT do not have a representation. */
9826 case UNEQ: /* Fall through. */
9827 case LTGT: /* Fall through. */
9828 default: abort ();
9831 case CC_SWPmode:
9832 switch (comp_code)
9834 case NE: return ARM_NE;
9835 case EQ: return ARM_EQ;
9836 case GE: return ARM_LE;
9837 case GT: return ARM_LT;
9838 case LE: return ARM_GE;
9839 case LT: return ARM_GT;
9840 case GEU: return ARM_LS;
9841 case GTU: return ARM_CC;
9842 case LEU: return ARM_CS;
9843 case LTU: return ARM_HI;
9844 default: abort ();
9847 case CC_Cmode:
9848 switch (comp_code)
9850 case LTU: return ARM_CS;
9851 case GEU: return ARM_CC;
9852 default: abort ();
9855 case CCmode:
9856 switch (comp_code)
9858 case NE: return ARM_NE;
9859 case EQ: return ARM_EQ;
9860 case GE: return ARM_GE;
9861 case GT: return ARM_GT;
9862 case LE: return ARM_LE;
9863 case LT: return ARM_LT;
9864 case GEU: return ARM_CS;
9865 case GTU: return ARM_HI;
9866 case LEU: return ARM_LS;
9867 case LTU: return ARM_CC;
9868 default: abort ();
9871 default: abort ();
9874 abort ();
9878 void
9879 arm_final_prescan_insn (insn)
9880 rtx insn;
9882 /* BODY will hold the body of INSN. */
9883 rtx body = PATTERN (insn);
9885 /* This will be 1 if trying to repeat the trick, and things need to be
9886 reversed if it appears to fail. */
9887 int reverse = 0;
9889 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9890 taken are clobbered, even if the rtl suggests otherwise. It also
9891 means that we have to grub around within the jump expression to find
9892 out what the conditions are when the jump isn't taken. */
9893 int jump_clobbers = 0;
9895 /* If we start with a return insn, we only succeed if we find another one. */
9896 int seeking_return = 0;
9898 /* START_INSN will hold the insn from where we start looking. This is the
9899 first insn after the following code_label if REVERSE is true. */
9900 rtx start_insn = insn;
9902 /* If in state 4, check if the target branch is reached, in order to
9903 change back to state 0. */
9904 if (arm_ccfsm_state == 4)
9906 if (insn == arm_target_insn)
9908 arm_target_insn = NULL;
9909 arm_ccfsm_state = 0;
9911 return;
9914 /* If in state 3, it is possible to repeat the trick, if this insn is an
9915 unconditional branch to a label, and immediately following this branch
9916 is the previous target label which is only used once, and the label this
9917 branch jumps to is not too far off. */
9918 if (arm_ccfsm_state == 3)
9920 if (simplejump_p (insn))
9922 start_insn = next_nonnote_insn (start_insn);
9923 if (GET_CODE (start_insn) == BARRIER)
9925 /* XXX Isn't this always a barrier? */
9926 start_insn = next_nonnote_insn (start_insn);
9928 if (GET_CODE (start_insn) == CODE_LABEL
9929 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9930 && LABEL_NUSES (start_insn) == 1)
9931 reverse = TRUE;
9932 else
9933 return;
9935 else if (GET_CODE (body) == RETURN)
9937 start_insn = next_nonnote_insn (start_insn);
9938 if (GET_CODE (start_insn) == BARRIER)
9939 start_insn = next_nonnote_insn (start_insn);
9940 if (GET_CODE (start_insn) == CODE_LABEL
9941 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9942 && LABEL_NUSES (start_insn) == 1)
9944 reverse = TRUE;
9945 seeking_return = 1;
9947 else
9948 return;
9950 else
9951 return;
9954 if (arm_ccfsm_state != 0 && !reverse)
9955 abort ();
9956 if (GET_CODE (insn) != JUMP_INSN)
9957 return;
9959 /* This jump might be paralleled with a clobber of the condition codes
9960 the jump should always come first */
9961 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9962 body = XVECEXP (body, 0, 0);
9964 #if 0
9965 /* If this is a conditional return then we don't want to know */
9966 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9967 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9968 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9969 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9970 return;
9971 #endif
9973 if (reverse
9974 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9975 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9977 int insns_skipped;
9978 int fail = FALSE, succeed = FALSE;
9979 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9980 int then_not_else = TRUE;
9981 rtx this_insn = start_insn, label = 0;
9983 /* If the jump cannot be done with one instruction, we cannot
9984 conditionally execute the instruction in the inverse case. */
9985 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9987 jump_clobbers = 1;
9988 return;
9991 /* Register the insn jumped to. */
9992 if (reverse)
9994 if (!seeking_return)
9995 label = XEXP (SET_SRC (body), 0);
9997 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9998 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9999 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
10001 label = XEXP (XEXP (SET_SRC (body), 2), 0);
10002 then_not_else = FALSE;
10004 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
10005 seeking_return = 1;
10006 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
10008 seeking_return = 1;
10009 then_not_else = FALSE;
10011 else
10012 abort ();
10014 /* See how many insns this branch skips, and what kind of insns. If all
10015 insns are okay, and the label or unconditional branch to the same
10016 label is not too far away, succeed. */
10017 for (insns_skipped = 0;
10018 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
10020 rtx scanbody;
10022 this_insn = next_nonnote_insn (this_insn);
10023 if (!this_insn)
10024 break;
10026 switch (GET_CODE (this_insn))
10028 case CODE_LABEL:
10029 /* Succeed if it is the target label, otherwise fail since
10030 control falls in from somewhere else. */
10031 if (this_insn == label)
10033 if (jump_clobbers)
10035 arm_ccfsm_state = 2;
10036 this_insn = next_nonnote_insn (this_insn);
10038 else
10039 arm_ccfsm_state = 1;
10040 succeed = TRUE;
10042 else
10043 fail = TRUE;
10044 break;
10046 case BARRIER:
10047 /* Succeed if the following insn is the target label.
10048 Otherwise fail.
10049 If return insns are used then the last insn in a function
10050 will be a barrier. */
10051 this_insn = next_nonnote_insn (this_insn);
10052 if (this_insn && this_insn == label)
10054 if (jump_clobbers)
10056 arm_ccfsm_state = 2;
10057 this_insn = next_nonnote_insn (this_insn);
10059 else
10060 arm_ccfsm_state = 1;
10061 succeed = TRUE;
10063 else
10064 fail = TRUE;
10065 break;
10067 case CALL_INSN:
10068 /* If using 32-bit addresses the cc is not preserved over
10069 calls. */
10070 if (TARGET_APCS_32)
10072 /* Succeed if the following insn is the target label,
10073 or if the following two insns are a barrier and
10074 the target label. */
10075 this_insn = next_nonnote_insn (this_insn);
10076 if (this_insn && GET_CODE (this_insn) == BARRIER)
10077 this_insn = next_nonnote_insn (this_insn);
10079 if (this_insn && this_insn == label
10080 && insns_skipped < max_insns_skipped)
10082 if (jump_clobbers)
10084 arm_ccfsm_state = 2;
10085 this_insn = next_nonnote_insn (this_insn);
10087 else
10088 arm_ccfsm_state = 1;
10089 succeed = TRUE;
10091 else
10092 fail = TRUE;
10094 break;
10096 case JUMP_INSN:
10097 /* If this is an unconditional branch to the same label, succeed.
10098 If it is to another label, do nothing. If it is conditional,
10099 fail. */
10100 /* XXX Probably, the tests for SET and the PC are unnecessary. */
10102 scanbody = PATTERN (this_insn);
10103 if (GET_CODE (scanbody) == SET
10104 && GET_CODE (SET_DEST (scanbody)) == PC)
10106 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
10107 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
10109 arm_ccfsm_state = 2;
10110 succeed = TRUE;
10112 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
10113 fail = TRUE;
10115 /* Fail if a conditional return is undesirable (eg on a
10116 StrongARM), but still allow this if optimizing for size. */
10117 else if (GET_CODE (scanbody) == RETURN
10118 && !use_return_insn (TRUE)
10119 && !optimize_size)
10120 fail = TRUE;
10121 else if (GET_CODE (scanbody) == RETURN
10122 && seeking_return)
10124 arm_ccfsm_state = 2;
10125 succeed = TRUE;
10127 else if (GET_CODE (scanbody) == PARALLEL)
10129 switch (get_attr_conds (this_insn))
10131 case CONDS_NOCOND:
10132 break;
10133 default:
10134 fail = TRUE;
10135 break;
10138 else
10139 fail = TRUE; /* Unrecognized jump (eg epilogue). */
10141 break;
10143 case INSN:
10144 /* Instructions using or affecting the condition codes make it
10145 fail. */
10146 scanbody = PATTERN (this_insn);
10147 if (!(GET_CODE (scanbody) == SET
10148 || GET_CODE (scanbody) == PARALLEL)
10149 || get_attr_conds (this_insn) != CONDS_NOCOND)
10150 fail = TRUE;
10152 /* A conditional cirrus instruction must be followed by
10153 a non Cirrus instruction. However, since we
10154 conditionalize instructions in this function and by
10155 the time we get here we can't add instructions
10156 (nops), because shorten_branches() has already been
10157 called, we will disable conditionalizing Cirrus
10158 instructions to be safe. */
10159 if (GET_CODE (scanbody) != USE
10160 && GET_CODE (scanbody) != CLOBBER
10161 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
10162 fail = TRUE;
10163 break;
10165 default:
10166 break;
10169 if (succeed)
10171 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
10172 arm_target_label = CODE_LABEL_NUMBER (label);
10173 else if (seeking_return || arm_ccfsm_state == 2)
10175 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
10177 this_insn = next_nonnote_insn (this_insn);
10178 if (this_insn && (GET_CODE (this_insn) == BARRIER
10179 || GET_CODE (this_insn) == CODE_LABEL))
10180 abort ();
10182 if (!this_insn)
10184 /* Oh, dear! we ran off the end.. give up */
10185 recog (PATTERN (insn), insn, NULL);
10186 arm_ccfsm_state = 0;
10187 arm_target_insn = NULL;
10188 return;
10190 arm_target_insn = this_insn;
10192 else
10193 abort ();
10194 if (jump_clobbers)
10196 if (reverse)
10197 abort ();
10198 arm_current_cc =
10199 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
10200 0), 0), 1));
10201 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
10202 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10203 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
10204 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10206 else
10208 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
10209 what it was. */
10210 if (!reverse)
10211 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
10212 0));
10215 if (reverse || then_not_else)
10216 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10219 /* Restore recog_data (getting the attributes of other insns can
10220 destroy this array, but final.c assumes that it remains intact
10221 across this call; since the insn has been recognized already we
10222 call recog direct). */
10223 recog (PATTERN (insn), insn, NULL);
10227 /* Returns true if REGNO is a valid register
10228 for holding a quantity of tyoe MODE. */
10231 arm_hard_regno_mode_ok (regno, mode)
10232 unsigned int regno;
10233 enum machine_mode mode;
10235 if (GET_MODE_CLASS (mode) == MODE_CC)
10236 return regno == CC_REGNUM;
10238 if (TARGET_THUMB)
10239 /* For the Thumb we only allow values bigger than SImode in
10240 registers 0 - 6, so that there is always a second low
10241 register available to hold the upper part of the value.
10242 We probably we ought to ensure that the register is the
10243 start of an even numbered register pair. */
10244 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
10246 if (IS_CIRRUS_REGNUM (regno))
10247 /* We have outlawed SI values in Cirrus registers because they
10248 reside in the lower 32 bits, but SF values reside in the
10249 upper 32 bits. This causes gcc all sorts of grief. We can't
10250 even split the registers into pairs because Cirrus SI values
10251 get sign extended to 64bits-- aldyh. */
10252 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
10254 if (regno <= LAST_ARM_REGNUM)
10255 /* We allow any value to be stored in the general regisetrs. */
10256 return 1;
10258 if ( regno == FRAME_POINTER_REGNUM
10259 || regno == ARG_POINTER_REGNUM)
10260 /* We only allow integers in the fake hard registers. */
10261 return GET_MODE_CLASS (mode) == MODE_INT;
10263 /* The only registers left are the FPA registers
10264 which we only allow to hold FP values. */
10265 return GET_MODE_CLASS (mode) == MODE_FLOAT
10266 && regno >= FIRST_ARM_FP_REGNUM
10267 && regno <= LAST_ARM_FP_REGNUM;
10271 arm_regno_class (regno)
10272 int regno;
10274 if (TARGET_THUMB)
10276 if (regno == STACK_POINTER_REGNUM)
10277 return STACK_REG;
10278 if (regno == CC_REGNUM)
10279 return CC_REG;
10280 if (regno < 8)
10281 return LO_REGS;
10282 return HI_REGS;
10285 if ( regno <= LAST_ARM_REGNUM
10286 || regno == FRAME_POINTER_REGNUM
10287 || regno == ARG_POINTER_REGNUM)
10288 return GENERAL_REGS;
10290 if (regno == CC_REGNUM)
10291 return NO_REGS;
10293 if (IS_CIRRUS_REGNUM (regno))
10294 return CIRRUS_REGS;
10296 return FPA_REGS;
10299 /* Handle a special case when computing the offset
10300 of an argument from the frame pointer. */
10303 arm_debugger_arg_offset (value, addr)
10304 int value;
10305 rtx addr;
10307 rtx insn;
10309 /* We are only interested if dbxout_parms() failed to compute the offset. */
10310 if (value != 0)
10311 return 0;
10313 /* We can only cope with the case where the address is held in a register. */
10314 if (GET_CODE (addr) != REG)
10315 return 0;
10317 /* If we are using the frame pointer to point at the argument, then
10318 an offset of 0 is correct. */
10319 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
10320 return 0;
10322 /* If we are using the stack pointer to point at the
10323 argument, then an offset of 0 is correct. */
10324 if ((TARGET_THUMB || !frame_pointer_needed)
10325 && REGNO (addr) == SP_REGNUM)
10326 return 0;
10328 /* Oh dear. The argument is pointed to by a register rather
10329 than being held in a register, or being stored at a known
10330 offset from the frame pointer. Since GDB only understands
10331 those two kinds of argument we must translate the address
10332 held in the register into an offset from the frame pointer.
10333 We do this by searching through the insns for the function
10334 looking to see where this register gets its value. If the
10335 register is initialized from the frame pointer plus an offset
10336 then we are in luck and we can continue, otherwise we give up.
10338 This code is exercised by producing debugging information
10339 for a function with arguments like this:
10341 double func (double a, double b, int c, double d) {return d;}
10343 Without this code the stab for parameter 'd' will be set to
10344 an offset of 0 from the frame pointer, rather than 8. */
10346 /* The if() statement says:
10348 If the insn is a normal instruction
10349 and if the insn is setting the value in a register
10350 and if the register being set is the register holding the address of the argument
10351 and if the address is computing by an addition
10352 that involves adding to a register
10353 which is the frame pointer
10354 a constant integer
10356 then... */
10358 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10360 if ( GET_CODE (insn) == INSN
10361 && GET_CODE (PATTERN (insn)) == SET
10362 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10363 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10364 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
10365 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
10366 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10369 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10371 break;
10375 if (value == 0)
10377 debug_rtx (addr);
10378 warning ("unable to compute real location of stacked parameter");
10379 value = 8; /* XXX magic hack */
10382 return value;
10385 /* Recursively search through all of the blocks in a function
10386 checking to see if any of the variables created in that
10387 function match the RTX called 'orig'. If they do then
10388 replace them with the RTX called 'new'. */
10390 static void
10391 replace_symbols_in_block (block, orig, new)
10392 tree block;
10393 rtx orig;
10394 rtx new;
10396 for (; block; block = BLOCK_CHAIN (block))
10398 tree sym;
10400 if (!TREE_USED (block))
10401 continue;
10403 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
10405 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
10406 || DECL_IGNORED_P (sym)
10407 || TREE_CODE (sym) != VAR_DECL
10408 || DECL_EXTERNAL (sym)
10409 || !rtx_equal_p (DECL_RTL (sym), orig)
10411 continue;
10413 SET_DECL_RTL (sym, new);
10416 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
10420 /* Return the number (counting from 0) of
10421 the least significant set bit in MASK. */
10423 #ifdef __GNUC__
10424 inline
10425 #endif
10426 static int
10427 number_of_first_bit_set (mask)
10428 int mask;
10430 int bit;
10432 for (bit = 0;
10433 (mask & (1 << bit)) == 0;
10434 ++bit)
10435 continue;
10437 return bit;
10440 /* Generate code to return from a thumb function.
10441 If 'reg_containing_return_addr' is -1, then the return address is
10442 actually on the stack, at the stack pointer. */
10443 static void
10444 thumb_exit (f, reg_containing_return_addr, eh_ofs)
10445 FILE * f;
10446 int reg_containing_return_addr;
10447 rtx eh_ofs;
10449 unsigned regs_available_for_popping;
10450 unsigned regs_to_pop;
10451 int pops_needed;
10452 unsigned available;
10453 unsigned required;
10454 int mode;
10455 int size;
10456 int restore_a4 = FALSE;
10458 /* Compute the registers we need to pop. */
10459 regs_to_pop = 0;
10460 pops_needed = 0;
10462 /* There is an assumption here, that if eh_ofs is not NULL, the
10463 normal return address will have been pushed. */
10464 if (reg_containing_return_addr == -1 || eh_ofs)
10466 /* When we are generating a return for __builtin_eh_return,
10467 reg_containing_return_addr must specify the return regno. */
10468 if (eh_ofs && reg_containing_return_addr == -1)
10469 abort ();
10471 regs_to_pop |= 1 << LR_REGNUM;
10472 ++pops_needed;
10475 if (TARGET_BACKTRACE)
10477 /* Restore the (ARM) frame pointer and stack pointer. */
10478 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10479 pops_needed += 2;
10482 /* If there is nothing to pop then just emit the BX instruction and
10483 return. */
10484 if (pops_needed == 0)
10486 if (eh_ofs)
10487 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10489 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10490 return;
10492 /* Otherwise if we are not supporting interworking and we have not created
10493 a backtrace structure and the function was not entered in ARM mode then
10494 just pop the return address straight into the PC. */
10495 else if (!TARGET_INTERWORK
10496 && !TARGET_BACKTRACE
10497 && !is_called_in_ARM_mode (current_function_decl))
10499 if (eh_ofs)
10501 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10502 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10503 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10505 else
10506 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10508 return;
10511 /* Find out how many of the (return) argument registers we can corrupt. */
10512 regs_available_for_popping = 0;
10514 /* If returning via __builtin_eh_return, the bottom three registers
10515 all contain information needed for the return. */
10516 if (eh_ofs)
10517 size = 12;
10518 else
10520 #ifdef RTX_CODE
10521 /* If we can deduce the registers used from the function's
10522 return value. This is more reliable that examining
10523 regs_ever_live[] because that will be set if the register is
10524 ever used in the function, not just if the register is used
10525 to hold a return value. */
10527 if (current_function_return_rtx != 0)
10528 mode = GET_MODE (current_function_return_rtx);
10529 else
10530 #endif
10531 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10533 size = GET_MODE_SIZE (mode);
10535 if (size == 0)
10537 /* In a void function we can use any argument register.
10538 In a function that returns a structure on the stack
10539 we can use the second and third argument registers. */
10540 if (mode == VOIDmode)
10541 regs_available_for_popping =
10542 (1 << ARG_REGISTER (1))
10543 | (1 << ARG_REGISTER (2))
10544 | (1 << ARG_REGISTER (3));
10545 else
10546 regs_available_for_popping =
10547 (1 << ARG_REGISTER (2))
10548 | (1 << ARG_REGISTER (3));
10550 else if (size <= 4)
10551 regs_available_for_popping =
10552 (1 << ARG_REGISTER (2))
10553 | (1 << ARG_REGISTER (3));
10554 else if (size <= 8)
10555 regs_available_for_popping =
10556 (1 << ARG_REGISTER (3));
10559 /* Match registers to be popped with registers into which we pop them. */
10560 for (available = regs_available_for_popping,
10561 required = regs_to_pop;
10562 required != 0 && available != 0;
10563 available &= ~(available & - available),
10564 required &= ~(required & - required))
10565 -- pops_needed;
10567 /* If we have any popping registers left over, remove them. */
10568 if (available > 0)
10569 regs_available_for_popping &= ~available;
10571 /* Otherwise if we need another popping register we can use
10572 the fourth argument register. */
10573 else if (pops_needed)
10575 /* If we have not found any free argument registers and
10576 reg a4 contains the return address, we must move it. */
10577 if (regs_available_for_popping == 0
10578 && reg_containing_return_addr == LAST_ARG_REGNUM)
10580 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10581 reg_containing_return_addr = LR_REGNUM;
10583 else if (size > 12)
10585 /* Register a4 is being used to hold part of the return value,
10586 but we have dire need of a free, low register. */
10587 restore_a4 = TRUE;
10589 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10592 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10594 /* The fourth argument register is available. */
10595 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10597 --pops_needed;
10601 /* Pop as many registers as we can. */
10602 thumb_pushpop (f, regs_available_for_popping, FALSE);
10604 /* Process the registers we popped. */
10605 if (reg_containing_return_addr == -1)
10607 /* The return address was popped into the lowest numbered register. */
10608 regs_to_pop &= ~(1 << LR_REGNUM);
10610 reg_containing_return_addr =
10611 number_of_first_bit_set (regs_available_for_popping);
10613 /* Remove this register for the mask of available registers, so that
10614 the return address will not be corrupted by further pops. */
10615 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10618 /* If we popped other registers then handle them here. */
10619 if (regs_available_for_popping)
10621 int frame_pointer;
10623 /* Work out which register currently contains the frame pointer. */
10624 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10626 /* Move it into the correct place. */
10627 asm_fprintf (f, "\tmov\t%r, %r\n",
10628 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10630 /* (Temporarily) remove it from the mask of popped registers. */
10631 regs_available_for_popping &= ~(1 << frame_pointer);
10632 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10634 if (regs_available_for_popping)
10636 int stack_pointer;
10638 /* We popped the stack pointer as well,
10639 find the register that contains it. */
10640 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10642 /* Move it into the stack register. */
10643 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10645 /* At this point we have popped all necessary registers, so
10646 do not worry about restoring regs_available_for_popping
10647 to its correct value:
10649 assert (pops_needed == 0)
10650 assert (regs_available_for_popping == (1 << frame_pointer))
10651 assert (regs_to_pop == (1 << STACK_POINTER)) */
10653 else
10655 /* Since we have just move the popped value into the frame
10656 pointer, the popping register is available for reuse, and
10657 we know that we still have the stack pointer left to pop. */
10658 regs_available_for_popping |= (1 << frame_pointer);
10662 /* If we still have registers left on the stack, but we no longer have
10663 any registers into which we can pop them, then we must move the return
10664 address into the link register and make available the register that
10665 contained it. */
10666 if (regs_available_for_popping == 0 && pops_needed > 0)
10668 regs_available_for_popping |= 1 << reg_containing_return_addr;
10670 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10671 reg_containing_return_addr);
10673 reg_containing_return_addr = LR_REGNUM;
10676 /* If we have registers left on the stack then pop some more.
10677 We know that at most we will want to pop FP and SP. */
10678 if (pops_needed > 0)
10680 int popped_into;
10681 int move_to;
10683 thumb_pushpop (f, regs_available_for_popping, FALSE);
10685 /* We have popped either FP or SP.
10686 Move whichever one it is into the correct register. */
10687 popped_into = number_of_first_bit_set (regs_available_for_popping);
10688 move_to = number_of_first_bit_set (regs_to_pop);
10690 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10692 regs_to_pop &= ~(1 << move_to);
10694 --pops_needed;
10697 /* If we still have not popped everything then we must have only
10698 had one register available to us and we are now popping the SP. */
10699 if (pops_needed > 0)
10701 int popped_into;
10703 thumb_pushpop (f, regs_available_for_popping, FALSE);
10705 popped_into = number_of_first_bit_set (regs_available_for_popping);
10707 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10709 assert (regs_to_pop == (1 << STACK_POINTER))
10710 assert (pops_needed == 1)
10714 /* If necessary restore the a4 register. */
10715 if (restore_a4)
10717 if (reg_containing_return_addr != LR_REGNUM)
10719 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10720 reg_containing_return_addr = LR_REGNUM;
10723 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10726 if (eh_ofs)
10727 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10729 /* Return to caller. */
10730 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10733 /* Emit code to push or pop registers to or from the stack. */
10735 static void
10736 thumb_pushpop (f, mask, push)
10737 FILE * f;
10738 int mask;
10739 int push;
10741 int regno;
10742 int lo_mask = mask & 0xFF;
10744 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10746 /* Special case. Do not generate a POP PC statement here, do it in
10747 thumb_exit() */
10748 thumb_exit (f, -1, NULL_RTX);
10749 return;
10752 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10754 /* Look at the low registers first. */
10755 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10757 if (lo_mask & 1)
10759 asm_fprintf (f, "%r", regno);
10761 if ((lo_mask & ~1) != 0)
10762 fprintf (f, ", ");
10766 if (push && (mask & (1 << LR_REGNUM)))
10768 /* Catch pushing the LR. */
10769 if (mask & 0xFF)
10770 fprintf (f, ", ");
10772 asm_fprintf (f, "%r", LR_REGNUM);
10774 else if (!push && (mask & (1 << PC_REGNUM)))
10776 /* Catch popping the PC. */
10777 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10779 /* The PC is never poped directly, instead
10780 it is popped into r3 and then BX is used. */
10781 fprintf (f, "}\n");
10783 thumb_exit (f, -1, NULL_RTX);
10785 return;
10787 else
10789 if (mask & 0xFF)
10790 fprintf (f, ", ");
10792 asm_fprintf (f, "%r", PC_REGNUM);
10796 fprintf (f, "}\n");
10799 void
10800 thumb_final_prescan_insn (insn)
10801 rtx insn;
10803 if (flag_print_asm_name)
10804 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10805 INSN_ADDRESSES (INSN_UID (insn)));
10809 thumb_shiftable_const (val)
10810 unsigned HOST_WIDE_INT val;
10812 unsigned HOST_WIDE_INT mask = 0xff;
10813 int i;
10815 if (val == 0) /* XXX */
10816 return 0;
10818 for (i = 0; i < 25; i++)
10819 if ((val & (mask << i)) == val)
10820 return 1;
10822 return 0;
10825 /* Returns nonzero if the current function contains,
10826 or might contain a far jump. */
10829 thumb_far_jump_used_p (in_prologue)
10830 int in_prologue;
10832 rtx insn;
10834 /* This test is only important for leaf functions. */
10835 /* assert (!leaf_function_p ()); */
10837 /* If we have already decided that far jumps may be used,
10838 do not bother checking again, and always return true even if
10839 it turns out that they are not being used. Once we have made
10840 the decision that far jumps are present (and that hence the link
10841 register will be pushed onto the stack) we cannot go back on it. */
10842 if (cfun->machine->far_jump_used)
10843 return 1;
10845 /* If this function is not being called from the prologue/epilogue
10846 generation code then it must be being called from the
10847 INITIAL_ELIMINATION_OFFSET macro. */
10848 if (!in_prologue)
10850 /* In this case we know that we are being asked about the elimination
10851 of the arg pointer register. If that register is not being used,
10852 then there are no arguments on the stack, and we do not have to
10853 worry that a far jump might force the prologue to push the link
10854 register, changing the stack offsets. In this case we can just
10855 return false, since the presence of far jumps in the function will
10856 not affect stack offsets.
10858 If the arg pointer is live (or if it was live, but has now been
10859 eliminated and so set to dead) then we do have to test to see if
10860 the function might contain a far jump. This test can lead to some
10861 false negatives, since before reload is completed, then length of
10862 branch instructions is not known, so gcc defaults to returning their
10863 longest length, which in turn sets the far jump attribute to true.
10865 A false negative will not result in bad code being generated, but it
10866 will result in a needless push and pop of the link register. We
10867 hope that this does not occur too often. */
10868 if (regs_ever_live [ARG_POINTER_REGNUM])
10869 cfun->machine->arg_pointer_live = 1;
10870 else if (!cfun->machine->arg_pointer_live)
10871 return 0;
10874 /* Check to see if the function contains a branch
10875 insn with the far jump attribute set. */
10876 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10878 if (GET_CODE (insn) == JUMP_INSN
10879 /* Ignore tablejump patterns. */
10880 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10881 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10882 && get_attr_far_jump (insn) == FAR_JUMP_YES
10885 /* Record the fact that we have decied that
10886 the function does use far jumps. */
10887 cfun->machine->far_jump_used = 1;
10888 return 1;
10892 return 0;
10895 /* Return nonzero if FUNC must be entered in ARM mode. */
10898 is_called_in_ARM_mode (func)
10899 tree func;
10901 if (TREE_CODE (func) != FUNCTION_DECL)
10902 abort ();
10904 /* Ignore the problem about functions whoes address is taken. */
10905 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10906 return TRUE;
10908 #ifdef ARM_PE
10909 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10910 #else
10911 return FALSE;
10912 #endif
10915 /* The bits which aren't usefully expanded as rtl. */
10917 const char *
10918 thumb_unexpanded_epilogue ()
10920 int regno;
10921 int live_regs_mask = 0;
10922 int high_regs_pushed = 0;
10923 int leaf_function = leaf_function_p ();
10924 int had_to_push_lr;
10925 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10927 if (return_used_this_function)
10928 return "";
10930 if (IS_NAKED (arm_current_func_type ()))
10931 return "";
10933 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10934 if (THUMB_REG_PUSHED_P (regno))
10935 live_regs_mask |= 1 << regno;
10937 for (regno = 8; regno < 13; regno++)
10938 if (THUMB_REG_PUSHED_P (regno))
10939 high_regs_pushed++;
10941 /* The prolog may have pushed some high registers to use as
10942 work registers. eg the testuite file:
10943 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10944 compiles to produce:
10945 push {r4, r5, r6, r7, lr}
10946 mov r7, r9
10947 mov r6, r8
10948 push {r6, r7}
10949 as part of the prolog. We have to undo that pushing here. */
10951 if (high_regs_pushed)
10953 int mask = live_regs_mask;
10954 int next_hi_reg;
10955 int size;
10956 int mode;
10958 #ifdef RTX_CODE
10959 /* If we can deduce the registers used from the function's return value.
10960 This is more reliable that examining regs_ever_live[] because that
10961 will be set if the register is ever used in the function, not just if
10962 the register is used to hold a return value. */
10964 if (current_function_return_rtx != 0)
10965 mode = GET_MODE (current_function_return_rtx);
10966 else
10967 #endif
10968 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10970 size = GET_MODE_SIZE (mode);
10972 /* Unless we are returning a type of size > 12 register r3 is
10973 available. */
10974 if (size < 13)
10975 mask |= 1 << 3;
10977 if (mask == 0)
10978 /* Oh dear! We have no low registers into which we can pop
10979 high registers! */
10980 internal_error
10981 ("no low registers available for popping high registers");
10983 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10984 if (THUMB_REG_PUSHED_P (next_hi_reg))
10985 break;
10987 while (high_regs_pushed)
10989 /* Find lo register(s) into which the high register(s) can
10990 be popped. */
10991 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10993 if (mask & (1 << regno))
10994 high_regs_pushed--;
10995 if (high_regs_pushed == 0)
10996 break;
10999 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
11001 /* Pop the values into the low register(s). */
11002 thumb_pushpop (asm_out_file, mask, 0);
11004 /* Move the value(s) into the high registers. */
11005 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11007 if (mask & (1 << regno))
11009 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
11010 regno);
11012 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
11013 if (THUMB_REG_PUSHED_P (next_hi_reg))
11014 break;
11020 had_to_push_lr = (live_regs_mask || !leaf_function
11021 || thumb_far_jump_used_p (1));
11023 if (TARGET_BACKTRACE
11024 && ((live_regs_mask & 0xFF) == 0)
11025 && regs_ever_live [LAST_ARG_REGNUM] != 0)
11027 /* The stack backtrace structure creation code had to
11028 push R7 in order to get a work register, so we pop
11029 it now. */
11030 live_regs_mask |= (1 << LAST_LO_REGNUM);
11033 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
11035 if (had_to_push_lr
11036 && !is_called_in_ARM_mode (current_function_decl)
11037 && !eh_ofs)
11038 live_regs_mask |= 1 << PC_REGNUM;
11040 /* Either no argument registers were pushed or a backtrace
11041 structure was created which includes an adjusted stack
11042 pointer, so just pop everything. */
11043 if (live_regs_mask)
11044 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11046 if (eh_ofs)
11047 thumb_exit (asm_out_file, 2, eh_ofs);
11048 /* We have either just popped the return address into the
11049 PC or it is was kept in LR for the entire function or
11050 it is still on the stack because we do not want to
11051 return by doing a pop {pc}. */
11052 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
11053 thumb_exit (asm_out_file,
11054 (had_to_push_lr
11055 && is_called_in_ARM_mode (current_function_decl)) ?
11056 -1 : LR_REGNUM, NULL_RTX);
11058 else
11060 /* Pop everything but the return address. */
11061 live_regs_mask &= ~(1 << PC_REGNUM);
11063 if (live_regs_mask)
11064 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11066 if (had_to_push_lr)
11067 /* Get the return address into a temporary register. */
11068 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
11070 /* Remove the argument registers that were pushed onto the stack. */
11071 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
11072 SP_REGNUM, SP_REGNUM,
11073 current_function_pretend_args_size);
11075 if (eh_ofs)
11076 thumb_exit (asm_out_file, 2, eh_ofs);
11077 else
11078 thumb_exit (asm_out_file,
11079 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
11082 return "";
11085 /* Functions to save and restore machine-specific function data. */
11087 static struct machine_function *
11088 arm_init_machine_status ()
11090 struct machine_function *machine;
11091 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
11093 #if ARM_FT_UNKNOWN != 0
11094 machine->func_type = ARM_FT_UNKNOWN;
11095 #endif
11096 return machine;
11099 /* Return an RTX indicating where the return address to the
11100 calling function can be found. */
11103 arm_return_addr (count, frame)
11104 int count;
11105 rtx frame ATTRIBUTE_UNUSED;
11107 if (count != 0)
11108 return NULL_RTX;
11110 if (TARGET_APCS_32)
11111 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
11112 else
11114 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
11115 GEN_INT (RETURN_ADDR_MASK26));
11116 return get_func_hard_reg_initial_val (cfun, lr);
11120 /* Do anything needed before RTL is emitted for each function. */
11122 void
11123 arm_init_expanders ()
11125 /* Arrange to initialize and mark the machine per-function status. */
11126 init_machine_status = arm_init_machine_status;
11129 HOST_WIDE_INT
11130 thumb_get_frame_size ()
11132 int regno;
11134 int base_size = ROUND_UP_WORD (get_frame_size ());
11135 int count_regs = 0;
11136 int entry_size = 0;
11137 int leaf;
11139 if (! TARGET_THUMB)
11140 abort ();
11142 if (! TARGET_ATPCS)
11143 return base_size;
11145 /* We need to know if we are a leaf function. Unfortunately, it
11146 is possible to be called after start_sequence has been called,
11147 which causes get_insns to return the insns for the sequence,
11148 not the function, which will cause leaf_function_p to return
11149 the incorrect result.
11151 To work around this, we cache the computed frame size. This
11152 works because we will only be calling RTL expanders that need
11153 to know about leaf functions once reload has completed, and the
11154 frame size cannot be changed after that time, so we can safely
11155 use the cached value. */
11157 if (reload_completed)
11158 return cfun->machine->frame_size;
11160 leaf = leaf_function_p ();
11162 /* A leaf function does not need any stack alignment if it has nothing
11163 on the stack. */
11164 if (leaf && base_size == 0)
11166 cfun->machine->frame_size = 0;
11167 return 0;
11170 /* We know that SP will be word aligned on entry, and we must
11171 preserve that condition at any subroutine call. But those are
11172 the only constraints. */
11174 /* Space for variadic functions. */
11175 if (current_function_pretend_args_size)
11176 entry_size += current_function_pretend_args_size;
11178 /* Space for pushed lo registers. */
11179 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11180 if (THUMB_REG_PUSHED_P (regno))
11181 count_regs++;
11183 /* Space for backtrace structure. */
11184 if (TARGET_BACKTRACE)
11186 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
11187 entry_size += 20;
11188 else
11189 entry_size += 16;
11192 if (count_regs || !leaf || thumb_far_jump_used_p (1))
11193 count_regs++; /* LR */
11195 entry_size += count_regs * 4;
11196 count_regs = 0;
11198 /* Space for pushed hi regs. */
11199 for (regno = 8; regno < 13; regno++)
11200 if (THUMB_REG_PUSHED_P (regno))
11201 count_regs++;
11203 entry_size += count_regs * 4;
11205 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11206 base_size += 4;
11207 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11208 abort ();
11210 cfun->machine->frame_size = base_size;
11212 return base_size;
11215 /* Generate the rest of a function's prologue. */
11217 void
11218 thumb_expand_prologue ()
11220 HOST_WIDE_INT amount = (thumb_get_frame_size ()
11221 + current_function_outgoing_args_size);
11222 unsigned long func_type;
11224 func_type = arm_current_func_type ();
11226 /* Naked functions don't have prologues. */
11227 if (IS_NAKED (func_type))
11228 return;
11230 if (IS_INTERRUPT (func_type))
11232 error ("interrupt Service Routines cannot be coded in Thumb mode");
11233 return;
11236 if (frame_pointer_needed)
11237 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
11239 if (amount)
11241 amount = ROUND_UP_WORD (amount);
11243 if (amount < 512)
11244 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11245 GEN_INT (- amount)));
11246 else
11248 int regno;
11249 rtx reg;
11251 /* The stack decrement is too big for an immediate value in a single
11252 insn. In theory we could issue multiple subtracts, but after
11253 three of them it becomes more space efficient to place the full
11254 value in the constant pool and load into a register. (Also the
11255 ARM debugger really likes to see only one stack decrement per
11256 function). So instead we look for a scratch register into which
11257 we can load the decrement, and then we subtract this from the
11258 stack pointer. Unfortunately on the thumb the only available
11259 scratch registers are the argument registers, and we cannot use
11260 these as they may hold arguments to the function. Instead we
11261 attempt to locate a call preserved register which is used by this
11262 function. If we can find one, then we know that it will have
11263 been pushed at the start of the prologue and so we can corrupt
11264 it now. */
11265 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
11266 if (THUMB_REG_PUSHED_P (regno)
11267 && !(frame_pointer_needed
11268 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
11269 break;
11271 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
11273 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
11275 /* Choose an arbitrary, non-argument low register. */
11276 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
11278 /* Save it by copying it into a high, scratch register. */
11279 emit_insn (gen_movsi (spare, reg));
11280 /* Add a USE to stop propagate_one_insn() from barfing. */
11281 emit_insn (gen_prologue_use (spare));
11283 /* Decrement the stack. */
11284 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
11285 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11286 reg));
11288 /* Restore the low register's original value. */
11289 emit_insn (gen_movsi (reg, spare));
11291 /* Emit a USE of the restored scratch register, so that flow
11292 analysis will not consider the restore redundant. The
11293 register won't be used again in this function and isn't
11294 restored by the epilogue. */
11295 emit_insn (gen_prologue_use (reg));
11297 else
11299 reg = gen_rtx (REG, SImode, regno);
11301 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
11302 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11303 reg));
11308 if (current_function_profile || TARGET_NO_SCHED_PRO)
11309 emit_insn (gen_blockage ());
11312 void
11313 thumb_expand_epilogue ()
11315 HOST_WIDE_INT amount = (thumb_get_frame_size ()
11316 + current_function_outgoing_args_size);
11318 /* Naked functions don't have prologues. */
11319 if (IS_NAKED (arm_current_func_type ()))
11320 return;
11322 if (frame_pointer_needed)
11323 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
11324 else if (amount)
11326 amount = ROUND_UP_WORD (amount);
11328 if (amount < 512)
11329 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11330 GEN_INT (amount)));
11331 else
11333 /* r3 is always free in the epilogue. */
11334 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
11336 emit_insn (gen_movsi (reg, GEN_INT (amount)));
11337 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
11341 /* Emit a USE (stack_pointer_rtx), so that
11342 the stack adjustment will not be deleted. */
11343 emit_insn (gen_prologue_use (stack_pointer_rtx));
11345 if (current_function_profile || TARGET_NO_SCHED_PRO)
11346 emit_insn (gen_blockage ());
11349 static void
11350 thumb_output_function_prologue (f, size)
11351 FILE * f;
11352 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11354 int live_regs_mask = 0;
11355 int high_regs_pushed = 0;
11356 int regno;
11358 if (IS_NAKED (arm_current_func_type ()))
11359 return;
11361 if (is_called_in_ARM_mode (current_function_decl))
11363 const char * name;
11365 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
11366 abort ();
11367 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
11368 abort ();
11369 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11371 /* Generate code sequence to switch us into Thumb mode. */
11372 /* The .code 32 directive has already been emitted by
11373 ASM_DECLARE_FUNCTION_NAME. */
11374 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
11375 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
11377 /* Generate a label, so that the debugger will notice the
11378 change in instruction sets. This label is also used by
11379 the assembler to bypass the ARM code when this function
11380 is called from a Thumb encoded function elsewhere in the
11381 same file. Hence the definition of STUB_NAME here must
11382 agree with the definition in gas/config/tc-arm.c */
11384 #define STUB_NAME ".real_start_of"
11386 fprintf (f, "\t.code\t16\n");
11387 #ifdef ARM_PE
11388 if (arm_dllexport_name_p (name))
11389 name = arm_strip_name_encoding (name);
11390 #endif
11391 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
11392 fprintf (f, "\t.thumb_func\n");
11393 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
11396 if (current_function_pretend_args_size)
11398 if (cfun->machine->uses_anonymous_args)
11400 int num_pushes;
11402 fprintf (f, "\tpush\t{");
11404 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
11406 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
11407 regno <= LAST_ARG_REGNUM;
11408 regno++)
11409 asm_fprintf (f, "%r%s", regno,
11410 regno == LAST_ARG_REGNUM ? "" : ", ");
11412 fprintf (f, "}\n");
11414 else
11415 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
11416 SP_REGNUM, SP_REGNUM,
11417 current_function_pretend_args_size);
11420 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11421 if (THUMB_REG_PUSHED_P (regno))
11422 live_regs_mask |= 1 << regno;
11424 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
11425 live_regs_mask |= 1 << LR_REGNUM;
11427 if (TARGET_BACKTRACE)
11429 int offset;
11430 int work_register = 0;
11431 int wr;
11433 /* We have been asked to create a stack backtrace structure.
11434 The code looks like this:
11436 0 .align 2
11437 0 func:
11438 0 sub SP, #16 Reserve space for 4 registers.
11439 2 push {R7} Get a work register.
11440 4 add R7, SP, #20 Get the stack pointer before the push.
11441 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
11442 8 mov R7, PC Get hold of the start of this code plus 12.
11443 10 str R7, [SP, #16] Store it.
11444 12 mov R7, FP Get hold of the current frame pointer.
11445 14 str R7, [SP, #4] Store it.
11446 16 mov R7, LR Get hold of the current return address.
11447 18 str R7, [SP, #12] Store it.
11448 20 add R7, SP, #16 Point at the start of the backtrace structure.
11449 22 mov FP, R7 Put this value into the frame pointer. */
11451 if ((live_regs_mask & 0xFF) == 0)
11453 /* See if the a4 register is free. */
11455 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11456 work_register = LAST_ARG_REGNUM;
11457 else /* We must push a register of our own */
11458 live_regs_mask |= (1 << LAST_LO_REGNUM);
11461 if (work_register == 0)
11463 /* Select a register from the list that will be pushed to
11464 use as our work register. */
11465 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11466 if ((1 << work_register) & live_regs_mask)
11467 break;
11470 asm_fprintf
11471 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11472 SP_REGNUM, SP_REGNUM);
11474 if (live_regs_mask)
11475 thumb_pushpop (f, live_regs_mask, 1);
11477 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11478 if (wr & live_regs_mask)
11479 offset += 4;
11481 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11482 offset + 16 + current_function_pretend_args_size);
11484 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11485 offset + 4);
11487 /* Make sure that the instruction fetching the PC is in the right place
11488 to calculate "start of backtrace creation code + 12". */
11489 if (live_regs_mask)
11491 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11492 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11493 offset + 12);
11494 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11495 ARM_HARD_FRAME_POINTER_REGNUM);
11496 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11497 offset);
11499 else
11501 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11502 ARM_HARD_FRAME_POINTER_REGNUM);
11503 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11504 offset);
11505 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11506 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11507 offset + 12);
11510 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11511 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11512 offset + 8);
11513 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11514 offset + 12);
11515 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11516 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11518 else if (live_regs_mask)
11519 thumb_pushpop (f, live_regs_mask, 1);
11521 for (regno = 8; regno < 13; regno++)
11522 if (THUMB_REG_PUSHED_P (regno))
11523 high_regs_pushed++;
11525 if (high_regs_pushed)
11527 int pushable_regs = 0;
11528 int mask = live_regs_mask & 0xff;
11529 int next_hi_reg;
11531 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11532 if (THUMB_REG_PUSHED_P (next_hi_reg))
11533 break;
11535 pushable_regs = mask;
11537 if (pushable_regs == 0)
11539 /* Desperation time -- this probably will never happen. */
11540 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11541 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11542 mask = 1 << LAST_ARG_REGNUM;
11545 while (high_regs_pushed > 0)
11547 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11549 if (mask & (1 << regno))
11551 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11553 high_regs_pushed--;
11555 if (high_regs_pushed)
11557 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11558 next_hi_reg--)
11559 if (THUMB_REG_PUSHED_P (next_hi_reg))
11560 break;
11562 else
11564 mask &= ~((1 << regno) - 1);
11565 break;
11570 thumb_pushpop (f, mask, 1);
11573 if (pushable_regs == 0
11574 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11575 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11579 /* Handle the case of a double word load into a low register from
11580 a computed memory address. The computed address may involve a
11581 register which is overwritten by the load. */
11583 const char *
11584 thumb_load_double_from_address (operands)
11585 rtx *operands;
11587 rtx addr;
11588 rtx base;
11589 rtx offset;
11590 rtx arg1;
11591 rtx arg2;
11593 if (GET_CODE (operands[0]) != REG)
11594 abort ();
11596 if (GET_CODE (operands[1]) != MEM)
11597 abort ();
11599 /* Get the memory address. */
11600 addr = XEXP (operands[1], 0);
11602 /* Work out how the memory address is computed. */
11603 switch (GET_CODE (addr))
11605 case REG:
11606 operands[2] = gen_rtx (MEM, SImode,
11607 plus_constant (XEXP (operands[1], 0), 4));
11609 if (REGNO (operands[0]) == REGNO (addr))
11611 output_asm_insn ("ldr\t%H0, %2", operands);
11612 output_asm_insn ("ldr\t%0, %1", operands);
11614 else
11616 output_asm_insn ("ldr\t%0, %1", operands);
11617 output_asm_insn ("ldr\t%H0, %2", operands);
11619 break;
11621 case CONST:
11622 /* Compute <address> + 4 for the high order load. */
11623 operands[2] = gen_rtx (MEM, SImode,
11624 plus_constant (XEXP (operands[1], 0), 4));
11626 output_asm_insn ("ldr\t%0, %1", operands);
11627 output_asm_insn ("ldr\t%H0, %2", operands);
11628 break;
11630 case PLUS:
11631 arg1 = XEXP (addr, 0);
11632 arg2 = XEXP (addr, 1);
11634 if (CONSTANT_P (arg1))
11635 base = arg2, offset = arg1;
11636 else
11637 base = arg1, offset = arg2;
11639 if (GET_CODE (base) != REG)
11640 abort ();
11642 /* Catch the case of <address> = <reg> + <reg> */
11643 if (GET_CODE (offset) == REG)
11645 int reg_offset = REGNO (offset);
11646 int reg_base = REGNO (base);
11647 int reg_dest = REGNO (operands[0]);
11649 /* Add the base and offset registers together into the
11650 higher destination register. */
11651 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11652 reg_dest + 1, reg_base, reg_offset);
11654 /* Load the lower destination register from the address in
11655 the higher destination register. */
11656 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11657 reg_dest, reg_dest + 1);
11659 /* Load the higher destination register from its own address
11660 plus 4. */
11661 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11662 reg_dest + 1, reg_dest + 1);
11664 else
11666 /* Compute <address> + 4 for the high order load. */
11667 operands[2] = gen_rtx (MEM, SImode,
11668 plus_constant (XEXP (operands[1], 0), 4));
11670 /* If the computed address is held in the low order register
11671 then load the high order register first, otherwise always
11672 load the low order register first. */
11673 if (REGNO (operands[0]) == REGNO (base))
11675 output_asm_insn ("ldr\t%H0, %2", operands);
11676 output_asm_insn ("ldr\t%0, %1", operands);
11678 else
11680 output_asm_insn ("ldr\t%0, %1", operands);
11681 output_asm_insn ("ldr\t%H0, %2", operands);
11684 break;
11686 case LABEL_REF:
11687 /* With no registers to worry about we can just load the value
11688 directly. */
11689 operands[2] = gen_rtx (MEM, SImode,
11690 plus_constant (XEXP (operands[1], 0), 4));
11692 output_asm_insn ("ldr\t%H0, %2", operands);
11693 output_asm_insn ("ldr\t%0, %1", operands);
11694 break;
11696 default:
11697 abort ();
11698 break;
11701 return "";
11705 const char *
11706 thumb_output_move_mem_multiple (n, operands)
11707 int n;
11708 rtx * operands;
11710 rtx tmp;
11712 switch (n)
11714 case 2:
11715 if (REGNO (operands[4]) > REGNO (operands[5]))
11717 tmp = operands[4];
11718 operands[4] = operands[5];
11719 operands[5] = tmp;
11721 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11722 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11723 break;
11725 case 3:
11726 if (REGNO (operands[4]) > REGNO (operands[5]))
11728 tmp = operands[4];
11729 operands[4] = operands[5];
11730 operands[5] = tmp;
11732 if (REGNO (operands[5]) > REGNO (operands[6]))
11734 tmp = operands[5];
11735 operands[5] = operands[6];
11736 operands[6] = tmp;
11738 if (REGNO (operands[4]) > REGNO (operands[5]))
11740 tmp = operands[4];
11741 operands[4] = operands[5];
11742 operands[5] = tmp;
11745 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11746 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11747 break;
11749 default:
11750 abort ();
11753 return "";
11756 /* Routines for generating rtl. */
11758 void
11759 thumb_expand_movstrqi (operands)
11760 rtx * operands;
11762 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11763 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11764 HOST_WIDE_INT len = INTVAL (operands[2]);
11765 HOST_WIDE_INT offset = 0;
11767 while (len >= 12)
11769 emit_insn (gen_movmem12b (out, in, out, in));
11770 len -= 12;
11773 if (len >= 8)
11775 emit_insn (gen_movmem8b (out, in, out, in));
11776 len -= 8;
11779 if (len >= 4)
11781 rtx reg = gen_reg_rtx (SImode);
11782 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11783 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11784 len -= 4;
11785 offset += 4;
11788 if (len >= 2)
11790 rtx reg = gen_reg_rtx (HImode);
11791 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11792 plus_constant (in, offset))));
11793 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11794 reg));
11795 len -= 2;
11796 offset += 2;
11799 if (len)
11801 rtx reg = gen_reg_rtx (QImode);
11802 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11803 plus_constant (in, offset))));
11804 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11805 reg));
11810 thumb_cmp_operand (op, mode)
11811 rtx op;
11812 enum machine_mode mode;
11814 return ((GET_CODE (op) == CONST_INT
11815 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11816 || register_operand (op, mode));
11819 static const char *
11820 thumb_condition_code (x, invert)
11821 rtx x;
11822 int invert;
11824 static const char * const conds[] =
11826 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11827 "hi", "ls", "ge", "lt", "gt", "le"
11829 int val;
11831 switch (GET_CODE (x))
11833 case EQ: val = 0; break;
11834 case NE: val = 1; break;
11835 case GEU: val = 2; break;
11836 case LTU: val = 3; break;
11837 case GTU: val = 8; break;
11838 case LEU: val = 9; break;
11839 case GE: val = 10; break;
11840 case LT: val = 11; break;
11841 case GT: val = 12; break;
11842 case LE: val = 13; break;
11843 default:
11844 abort ();
11847 return conds[val ^ invert];
11850 /* Handle storing a half-word to memory during reload. */
11852 void
11853 thumb_reload_out_hi (operands)
11854 rtx * operands;
11856 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11859 /* Handle storing a half-word to memory during reload. */
11861 void
11862 thumb_reload_in_hi (operands)
11863 rtx * operands ATTRIBUTE_UNUSED;
11865 abort ();
11868 /* Return the length of a function name prefix
11869 that starts with the character 'c'. */
11871 static int
11872 arm_get_strip_length (c)
11873 int c;
11875 switch (c)
11877 ARM_NAME_ENCODING_LENGTHS
11878 default: return 0;
11882 /* Return a pointer to a function's name with any
11883 and all prefix encodings stripped from it. */
11885 const char *
11886 arm_strip_name_encoding (name)
11887 const char * name;
11889 int skip;
11891 while ((skip = arm_get_strip_length (* name)))
11892 name += skip;
11894 return name;
11897 /* If there is a '*' anywhere in the name's prefix, then
11898 emit the stripped name verbatim, otherwise prepend an
11899 underscore if leading underscores are being used. */
11901 void
11902 arm_asm_output_labelref (stream, name)
11903 FILE * stream;
11904 const char * name;
11906 int skip;
11907 int verbatim = 0;
11909 while ((skip = arm_get_strip_length (* name)))
11911 verbatim |= (*name == '*');
11912 name += skip;
11915 if (verbatim)
11916 fputs (name, stream);
11917 else
11918 asm_fprintf (stream, "%U%s", name);
11921 rtx aof_pic_label;
11923 #ifdef AOF_ASSEMBLER
11924 /* Special functions only needed when producing AOF syntax assembler. */
11926 struct pic_chain
11928 struct pic_chain * next;
11929 const char * symname;
11932 static struct pic_chain * aof_pic_chain = NULL;
11935 aof_pic_entry (x)
11936 rtx x;
11938 struct pic_chain ** chainp;
11939 int offset;
11941 if (aof_pic_label == NULL_RTX)
11943 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11946 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11947 offset += 4, chainp = &(*chainp)->next)
11948 if ((*chainp)->symname == XSTR (x, 0))
11949 return plus_constant (aof_pic_label, offset);
11951 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11952 (*chainp)->next = NULL;
11953 (*chainp)->symname = XSTR (x, 0);
11954 return plus_constant (aof_pic_label, offset);
11957 void
11958 aof_dump_pic_table (f)
11959 FILE * f;
11961 struct pic_chain * chain;
11963 if (aof_pic_chain == NULL)
11964 return;
11966 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11967 PIC_OFFSET_TABLE_REGNUM,
11968 PIC_OFFSET_TABLE_REGNUM);
11969 fputs ("|x$adcons|\n", f);
11971 for (chain = aof_pic_chain; chain; chain = chain->next)
11973 fputs ("\tDCD\t", f);
11974 assemble_name (f, chain->symname);
11975 fputs ("\n", f);
11979 int arm_text_section_count = 1;
11981 char *
11982 aof_text_section ()
11984 static char buf[100];
11985 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11986 arm_text_section_count++);
11987 if (flag_pic)
11988 strcat (buf, ", PIC, REENTRANT");
11989 return buf;
11992 static int arm_data_section_count = 1;
11994 char *
11995 aof_data_section ()
11997 static char buf[100];
11998 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11999 return buf;
12002 /* The AOF assembler is religiously strict about declarations of
12003 imported and exported symbols, so that it is impossible to declare
12004 a function as imported near the beginning of the file, and then to
12005 export it later on. It is, however, possible to delay the decision
12006 until all the functions in the file have been compiled. To get
12007 around this, we maintain a list of the imports and exports, and
12008 delete from it any that are subsequently defined. At the end of
12009 compilation we spit the remainder of the list out before the END
12010 directive. */
12012 struct import
12014 struct import * next;
12015 const char * name;
12018 static struct import * imports_list = NULL;
12020 void
12021 aof_add_import (name)
12022 const char * name;
12024 struct import * new;
12026 for (new = imports_list; new; new = new->next)
12027 if (new->name == name)
12028 return;
12030 new = (struct import *) xmalloc (sizeof (struct import));
12031 new->next = imports_list;
12032 imports_list = new;
12033 new->name = name;
12036 void
12037 aof_delete_import (name)
12038 const char * name;
12040 struct import ** old;
12042 for (old = &imports_list; *old; old = & (*old)->next)
12044 if ((*old)->name == name)
12046 *old = (*old)->next;
12047 return;
12052 int arm_main_function = 0;
12054 void
12055 aof_dump_imports (f)
12056 FILE * f;
12058 /* The AOF assembler needs this to cause the startup code to be extracted
12059 from the library. Brining in __main causes the whole thing to work
12060 automagically. */
12061 if (arm_main_function)
12063 text_section ();
12064 fputs ("\tIMPORT __main\n", f);
12065 fputs ("\tDCD __main\n", f);
12068 /* Now dump the remaining imports. */
12069 while (imports_list)
12071 fprintf (f, "\tIMPORT\t");
12072 assemble_name (f, imports_list->name);
12073 fputc ('\n', f);
12074 imports_list = imports_list->next;
12078 static void
12079 aof_globalize_label (stream, name)
12080 FILE *stream;
12081 const char *name;
12083 default_globalize_label (stream, name);
12084 if (! strcmp (name, "main"))
12085 arm_main_function = 1;
12087 #endif /* AOF_ASSEMBLER */
12089 #ifdef OBJECT_FORMAT_ELF
12090 /* Switch to an arbitrary section NAME with attributes as specified
12091 by FLAGS. ALIGN specifies any known alignment requirements for
12092 the section; 0 if the default should be used.
12094 Differs from the default elf version only in the prefix character
12095 used before the section type. */
12097 static void
12098 arm_elf_asm_named_section (name, flags)
12099 const char *name;
12100 unsigned int flags;
12102 char flagchars[10], *f = flagchars;
12104 if (! named_section_first_declaration (name))
12106 fprintf (asm_out_file, "\t.section\t%s\n", name);
12107 return;
12110 if (!(flags & SECTION_DEBUG))
12111 *f++ = 'a';
12112 if (flags & SECTION_WRITE)
12113 *f++ = 'w';
12114 if (flags & SECTION_CODE)
12115 *f++ = 'x';
12116 if (flags & SECTION_SMALL)
12117 *f++ = 's';
12118 if (flags & SECTION_MERGE)
12119 *f++ = 'M';
12120 if (flags & SECTION_STRINGS)
12121 *f++ = 'S';
12122 if (flags & SECTION_TLS)
12123 *f++ = 'T';
12124 *f = '\0';
12126 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
12128 if (!(flags & SECTION_NOTYPE))
12130 const char *type;
12132 if (flags & SECTION_BSS)
12133 type = "nobits";
12134 else
12135 type = "progbits";
12137 fprintf (asm_out_file, ",%%%s", type);
12139 if (flags & SECTION_ENTSIZE)
12140 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
12143 putc ('\n', asm_out_file);
12145 #endif
12147 #ifndef ARM_PE
12148 /* Symbols in the text segment can be accessed without indirecting via the
12149 constant pool; it may take an extra binary operation, but this is still
12150 faster than indirecting via memory. Don't do this when not optimizing,
12151 since we won't be calculating al of the offsets necessary to do this
12152 simplification. */
12154 static void
12155 arm_encode_section_info (decl, first)
12156 tree decl;
12157 int first;
12159 /* This doesn't work with AOF syntax, since the string table may be in
12160 a different AREA. */
12161 #ifndef AOF_ASSEMBLER
12162 if (optimize > 0 && TREE_CONSTANT (decl)
12163 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
12165 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
12166 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
12167 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
12169 #endif
12171 /* If we are referencing a function that is weak then encode a long call
12172 flag in the function name, otherwise if the function is static or
12173 or known to be defined in this file then encode a short call flag. */
12174 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
12176 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
12177 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
12178 else if (! TREE_PUBLIC (decl))
12179 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
12182 #endif /* !ARM_PE */
12184 static void
12185 arm_internal_label (stream, prefix, labelno)
12186 FILE *stream;
12187 const char *prefix;
12188 unsigned long labelno;
12190 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
12191 && !strcmp (prefix, "L"))
12193 arm_ccfsm_state = 0;
12194 arm_target_insn = NULL;
12196 default_internal_label (stream, prefix, labelno);
12199 /* Output code to add DELTA to the first argument, and then jump
12200 to FUNCTION. Used for C++ multiple inheritance. */
12202 static void
12203 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
12204 FILE *file;
12205 tree thunk ATTRIBUTE_UNUSED;
12206 HOST_WIDE_INT delta;
12207 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
12208 tree function;
12210 int mi_delta = delta;
12211 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
12212 int shift = 0;
12213 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
12214 ? 1 : 0);
12215 if (mi_delta < 0)
12216 mi_delta = - mi_delta;
12217 while (mi_delta != 0)
12219 if ((mi_delta & (3 << shift)) == 0)
12220 shift += 2;
12221 else
12223 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
12224 mi_op, this_regno, this_regno,
12225 mi_delta & (0xff << shift));
12226 mi_delta &= ~(0xff << shift);
12227 shift += 8;
12230 fputs ("\tb\t", file);
12231 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
12232 if (NEED_PLT_RELOC)
12233 fputs ("(PLT)", file);
12234 fputc ('\n', file);