FSF GCC merge 02/23/03
[official-gcc.git] / gcc / config / arm / arm.c
blob5650ffbd086a8e4fc1a8b389ed1a243f15164b3f
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const char extra_reg_names1[][16] =
66 { "mv0", "mv1", "mv2", "mv3", "mv4", "mv5", "mv6", "mv7",
67 "mv8", "mv9", "mv10", "mv11", "mv12", "mv13", "mv14", "mv15"
69 #define extra_reg_names1 bogus1_regnames
71 const struct attribute_spec arm_attribute_table[];
73 /* Forward function declarations. */
74 static void arm_add_gc_roots PARAMS ((void));
75 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
76 static unsigned bit_count PARAMS ((Ulong));
77 static int arm_address_register_rtx_p PARAMS ((rtx, int));
78 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
79 rtx, int));
80 static int thumb_base_register_rtx_p PARAMS ((rtx,
81 enum machine_mode,
82 int));
83 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
84 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
85 static int eliminate_lr2ip PARAMS ((rtx *));
86 static rtx emit_multi_reg_push PARAMS ((int));
87 static rtx emit_sfm PARAMS ((int, int));
88 #ifndef AOF_ASSEMBLER
89 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
90 #endif
91 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
92 static arm_cc get_arm_condition_code PARAMS ((rtx));
93 static void init_fpa_table PARAMS ((void));
94 static Hint int_log2 PARAMS ((Hint));
95 static rtx is_jump_table PARAMS ((rtx));
96 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
97 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
98 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
99 static Ccstar shift_op PARAMS ((rtx, Hint *));
100 static struct machine_function * arm_init_machine_status PARAMS ((void));
101 static int number_of_first_bit_set PARAMS ((int));
102 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
103 static void thumb_exit PARAMS ((FILE *, int, rtx));
104 static void thumb_pushpop PARAMS ((FILE *, int, int));
105 static Ccstar thumb_condition_code PARAMS ((rtx, int));
106 static rtx is_jump_table PARAMS ((rtx));
107 static Hint get_jump_table_size PARAMS ((rtx));
108 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
109 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
110 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
111 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
112 static void assign_minipool_offsets PARAMS ((Mfix *));
113 static void arm_print_value PARAMS ((FILE *, rtx));
114 static void dump_minipool PARAMS ((rtx));
115 static int arm_barrier_cost PARAMS ((rtx));
116 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
117 static void push_minipool_barrier PARAMS ((rtx, Hint));
118 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
119 static void note_invalid_constants PARAMS ((rtx, Hint));
120 static int current_file_function_operand PARAMS ((rtx));
121 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
122 static Ulong arm_compute_save_reg_mask PARAMS ((void));
123 static Ulong arm_isr_value PARAMS ((tree));
124 static Ulong arm_compute_func_type PARAMS ((void));
125 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
126 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
127 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
128 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
129 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
130 static int arm_comp_type_attributes PARAMS ((tree, tree));
131 static void arm_set_default_type_attributes PARAMS ((tree));
132 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
133 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
134 static int arm_get_strip_length PARAMS ((int));
135 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
136 #ifdef OBJECT_FORMAT_ELF
137 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
138 #endif
139 #ifndef ARM_PE
140 static void arm_encode_section_info PARAMS ((tree, int));
141 #endif
142 #ifdef AOF_ASSEMBLER
143 static void aof_globalize_label PARAMS ((FILE *, const char *));
144 #endif
145 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
146 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
147 HOST_WIDE_INT,
148 HOST_WIDE_INT, tree));
149 static int arm_rtx_costs_1 PARAMS ((rtx, enum rtx_code,
150 enum rtx_code));
151 static bool arm_rtx_costs PARAMS ((rtx, int, int, int*));
152 static int arm_address_cost PARAMS ((rtx));
153 static int is_load_address PARAMS ((rtx));
154 static int is_cirrus_insn PARAMS ((rtx));
155 static void cirrus_reorg PARAMS ((rtx));
157 #undef Hint
158 #undef Mmode
159 #undef Ulong
160 #undef Ccstar
162 /* Initialize the GCC target structure. */
163 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
164 #undef TARGET_MERGE_DECL_ATTRIBUTES
165 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
166 #endif
168 #undef TARGET_ATTRIBUTE_TABLE
169 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
171 #ifdef AOF_ASSEMBLER
172 #undef TARGET_ASM_BYTE_OP
173 #define TARGET_ASM_BYTE_OP "\tDCB\t"
174 #undef TARGET_ASM_ALIGNED_HI_OP
175 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
176 #undef TARGET_ASM_ALIGNED_SI_OP
177 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
178 #undef TARGET_ASM_GLOBALIZE_LABEL
179 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
180 #else
181 #undef TARGET_ASM_ALIGNED_SI_OP
182 #define TARGET_ASM_ALIGNED_SI_OP NULL
183 #undef TARGET_ASM_INTEGER
184 #define TARGET_ASM_INTEGER arm_assemble_integer
185 #endif
187 #undef TARGET_ASM_FUNCTION_PROLOGUE
188 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
190 #undef TARGET_ASM_FUNCTION_EPILOGUE
191 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
193 #undef TARGET_COMP_TYPE_ATTRIBUTES
194 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
196 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
197 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
199 #undef TARGET_SCHED_ADJUST_COST
200 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
202 #undef TARGET_ENCODE_SECTION_INFO
203 #ifdef ARM_PE
204 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
205 #else
206 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
207 #endif
209 #undef TARGET_STRIP_NAME_ENCODING
210 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
212 #undef TARGET_ASM_INTERNAL_LABEL
213 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
215 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
216 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
218 #undef TARGET_ASM_OUTPUT_MI_THUNK
219 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
220 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
221 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
223 #undef TARGET_RTX_COSTS
224 #define TARGET_RTX_COSTS arm_rtx_costs
225 #undef TARGET_ADDRESS_COST
226 #define TARGET_ADDRESS_COST arm_address_cost
228 struct gcc_target targetm = TARGET_INITIALIZER;
230 /* Obstack for minipool constant handling. */
231 static struct obstack minipool_obstack;
232 static char * minipool_startobj;
234 /* The maximum number of insns skipped which
235 will be conditionalised if possible. */
236 static int max_insns_skipped = 5;
238 extern FILE * asm_out_file;
240 /* True if we are currently building a constant table. */
241 int making_const_table;
243 /* Define the information needed to generate branch insns. This is
244 stored from the compare operation. */
245 rtx arm_compare_op0, arm_compare_op1;
247 /* What type of floating point are we tuning for? */
248 enum floating_point_type arm_fpu;
250 /* What type of floating point instructions are available? */
251 enum floating_point_type arm_fpu_arch;
253 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
254 enum prog_mode_type arm_prgmode;
256 /* Set by the -mfp=... option. */
257 const char * target_fp_name = NULL;
259 /* Used to parse -mstructure_size_boundary command line option. */
260 const char * structure_size_string = NULL;
261 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
263 /* Bit values used to identify processor capabilities. */
264 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
265 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
266 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
267 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
268 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
269 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
270 #define FL_THUMB (1 << 6) /* Thumb aware */
271 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
272 #define FL_STRONG (1 << 8) /* StrongARM */
273 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
274 #define FL_XSCALE (1 << 10) /* XScale */
275 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
277 /* The bits in this mask specify which
278 instructions we are allowed to generate. */
279 static unsigned long insn_flags = 0;
281 /* The bits in this mask specify which instruction scheduling options should
282 be used. Note - there is an overlap with the FL_FAST_MULT. For some
283 hardware we want to be able to generate the multiply instructions, but to
284 tune as if they were not present in the architecture. */
285 static unsigned long tune_flags = 0;
287 /* The following are used in the arm.md file as equivalents to bits
288 in the above two flag variables. */
290 /* Nonzero if this is an "M" variant of the processor. */
291 int arm_fast_multiply = 0;
293 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
294 int arm_arch4 = 0;
296 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
297 int arm_arch5 = 0;
299 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
300 int arm_arch5e = 0;
302 /* Nonzero if this chip can benefit from load scheduling. */
303 int arm_ld_sched = 0;
305 /* Nonzero if this chip is a StrongARM. */
306 int arm_is_strong = 0;
308 /* Nonzero if this chip is an XScale. */
309 int arm_is_xscale = 0;
311 /* Nonzero if this chip is an ARM6 or an ARM7. */
312 int arm_is_6_or_7 = 0;
314 /* Nonzero if this chip is a Cirrus/DSP. */
315 int arm_is_cirrus = 0;
317 /* Nonzero if generating Thumb instructions. */
318 int thumb_code = 0;
320 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
321 must report the mode of the memory reference from PRINT_OPERAND to
322 PRINT_OPERAND_ADDRESS. */
323 enum machine_mode output_memory_reference_mode;
325 /* The register number to be used for the PIC offset register. */
326 const char * arm_pic_register_string = NULL;
327 int arm_pic_register = INVALID_REGNUM;
329 /* Set to 1 when a return insn is output, this means that the epilogue
330 is not needed. */
331 int return_used_this_function;
333 /* Set to 1 after arm_reorg has started. Reset to start at the start of
334 the next function. */
335 static int after_arm_reorg = 0;
337 /* The maximum number of insns to be used when loading a constant. */
338 static int arm_constant_limit = 3;
340 /* For an explanation of these variables, see final_prescan_insn below. */
341 int arm_ccfsm_state;
342 enum arm_cond_code arm_current_cc;
343 rtx arm_target_insn;
344 int arm_target_label;
346 /* The condition codes of the ARM, and the inverse function. */
347 static const char * const arm_condition_codes[] =
349 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
350 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
353 #define streq(string1, string2) (strcmp (string1, string2) == 0)
355 /* Initialization code. */
357 struct processors
359 const char *const name;
360 const unsigned long flags;
363 /* Not all of these give usefully different compilation alternatives,
364 but there is no simple way of generalizing them. */
365 static const struct processors all_cores[] =
367 /* ARM Cores */
369 {"arm2", FL_CO_PROC | FL_MODE26 },
370 {"arm250", FL_CO_PROC | FL_MODE26 },
371 {"arm3", FL_CO_PROC | FL_MODE26 },
372 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
373 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
374 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
375 {"arm610", FL_MODE26 | FL_MODE32 },
376 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
377 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
378 /* arm7m doesn't exist on its own, but only with D, (and I), but
379 those don't alter the code, so arm7m is sometimes used. */
380 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
381 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
382 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
383 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
384 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
385 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
386 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
387 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
388 {"arm710", FL_MODE26 | FL_MODE32 },
389 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
390 {"arm720", FL_MODE26 | FL_MODE32 },
391 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
392 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
393 {"arm710c", FL_MODE26 | FL_MODE32 },
394 {"arm7100", FL_MODE26 | FL_MODE32 },
395 {"arm7500", FL_MODE26 | FL_MODE32 },
396 /* Doesn't have an external co-proc, but does have embedded fpu. */
397 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
398 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
399 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
400 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
401 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
402 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
403 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
404 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
405 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
406 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
407 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
408 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
409 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
410 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
411 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
412 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
413 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
414 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
416 {NULL, 0}
419 static const struct processors all_architectures[] =
421 /* ARM Architectures */
423 { "armv2", FL_CO_PROC | FL_MODE26 },
424 { "armv2a", FL_CO_PROC | FL_MODE26 },
425 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
426 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
427 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
428 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
429 implementations that support it, so we will leave it out for now. */
430 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
431 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
432 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
433 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
434 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
435 { NULL, 0 }
438 /* This is a magic stucture. The 'string' field is magically filled in
439 with a pointer to the value specified by the user on the command line
440 assuming that the user has specified such a value. */
442 struct arm_cpu_select arm_select[] =
444 /* string name processors */
445 { NULL, "-mcpu=", all_cores },
446 { NULL, "-march=", all_architectures },
447 { NULL, "-mtune=", all_cores }
450 /* Return the number of bits set in VALUE. */
451 static unsigned
452 bit_count (value)
453 unsigned long value;
455 unsigned long count = 0;
457 while (value)
459 count++;
460 value &= value - 1; /* Clear the least-significant set bit. */
463 return count;
466 /* Fix up any incompatible options that the user has specified.
467 This has now turned into a maze. */
468 void
469 arm_override_options ()
471 unsigned i;
473 /* Set up the flags based on the cpu/architecture selected by the user. */
474 for (i = ARRAY_SIZE (arm_select); i--;)
476 struct arm_cpu_select * ptr = arm_select + i;
478 if (ptr->string != NULL && ptr->string[0] != '\0')
480 const struct processors * sel;
482 for (sel = ptr->processors; sel->name != NULL; sel++)
483 if (streq (ptr->string, sel->name))
485 if (i == 2)
486 tune_flags = sel->flags;
487 else
489 /* If we have been given an architecture and a processor
490 make sure that they are compatible. We only generate
491 a warning though, and we prefer the CPU over the
492 architecture. */
493 if (insn_flags != 0 && (insn_flags ^ sel->flags))
494 warning ("switch -mcpu=%s conflicts with -march= switch",
495 ptr->string);
497 insn_flags = sel->flags;
500 break;
503 if (sel->name == NULL)
504 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
508 /* If the user did not specify a processor, choose one for them. */
509 if (insn_flags == 0)
511 const struct processors * sel;
512 unsigned int sought;
513 static const struct cpu_default
515 const int cpu;
516 const char *const name;
518 cpu_defaults[] =
520 { TARGET_CPU_arm2, "arm2" },
521 { TARGET_CPU_arm6, "arm6" },
522 { TARGET_CPU_arm610, "arm610" },
523 { TARGET_CPU_arm710, "arm710" },
524 { TARGET_CPU_arm7m, "arm7m" },
525 { TARGET_CPU_arm7500fe, "arm7500fe" },
526 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
527 { TARGET_CPU_arm8, "arm8" },
528 { TARGET_CPU_arm810, "arm810" },
529 { TARGET_CPU_arm9, "arm9" },
530 { TARGET_CPU_strongarm, "strongarm" },
531 { TARGET_CPU_xscale, "xscale" },
532 { TARGET_CPU_ep9312, "ep9312" },
533 { TARGET_CPU_generic, "arm" },
534 { 0, 0 }
536 const struct cpu_default * def;
538 /* Find the default. */
539 for (def = cpu_defaults; def->name; def++)
540 if (def->cpu == TARGET_CPU_DEFAULT)
541 break;
543 /* Make sure we found the default CPU. */
544 if (def->name == NULL)
545 abort ();
547 /* Find the default CPU's flags. */
548 for (sel = all_cores; sel->name != NULL; sel++)
549 if (streq (def->name, sel->name))
550 break;
552 if (sel->name == NULL)
553 abort ();
555 insn_flags = sel->flags;
557 /* Now check to see if the user has specified some command line
558 switch that require certain abilities from the cpu. */
559 sought = 0;
561 if (TARGET_INTERWORK || TARGET_THUMB)
563 sought |= (FL_THUMB | FL_MODE32);
565 /* Force apcs-32 to be used for interworking. */
566 target_flags |= ARM_FLAG_APCS_32;
568 /* There are no ARM processors that support both APCS-26 and
569 interworking. Therefore we force FL_MODE26 to be removed
570 from insn_flags here (if it was set), so that the search
571 below will always be able to find a compatible processor. */
572 insn_flags &= ~FL_MODE26;
574 else if (!TARGET_APCS_32)
575 sought |= FL_MODE26;
577 if (sought != 0 && ((sought & insn_flags) != sought))
579 /* Try to locate a CPU type that supports all of the abilities
580 of the default CPU, plus the extra abilities requested by
581 the user. */
582 for (sel = all_cores; sel->name != NULL; sel++)
583 if ((sel->flags & sought) == (sought | insn_flags))
584 break;
586 if (sel->name == NULL)
588 unsigned current_bit_count = 0;
589 const struct processors * best_fit = NULL;
591 /* Ideally we would like to issue an error message here
592 saying that it was not possible to find a CPU compatible
593 with the default CPU, but which also supports the command
594 line options specified by the programmer, and so they
595 ought to use the -mcpu=<name> command line option to
596 override the default CPU type.
598 Unfortunately this does not work with multilibing. We
599 need to be able to support multilibs for -mapcs-26 and for
600 -mthumb-interwork and there is no CPU that can support both
601 options. Instead if we cannot find a cpu that has both the
602 characteristics of the default cpu and the given command line
603 options we scan the array again looking for a best match. */
604 for (sel = all_cores; sel->name != NULL; sel++)
605 if ((sel->flags & sought) == sought)
607 unsigned count;
609 count = bit_count (sel->flags & insn_flags);
611 if (count >= current_bit_count)
613 best_fit = sel;
614 current_bit_count = count;
618 if (best_fit == NULL)
619 abort ();
620 else
621 sel = best_fit;
624 insn_flags = sel->flags;
628 /* If tuning has not been specified, tune for whichever processor or
629 architecture has been selected. */
630 if (tune_flags == 0)
631 tune_flags = insn_flags;
633 /* Make sure that the processor choice does not conflict with any of the
634 other command line choices. */
635 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
637 /* If APCS-32 was not the default then it must have been set by the
638 user, so issue a warning message. If the user has specified
639 "-mapcs-32 -mcpu=arm2" then we loose here. */
640 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
641 warning ("target CPU does not support APCS-32" );
642 target_flags &= ~ARM_FLAG_APCS_32;
644 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
646 warning ("target CPU does not support APCS-26" );
647 target_flags |= ARM_FLAG_APCS_32;
650 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
652 warning ("target CPU does not support interworking" );
653 target_flags &= ~ARM_FLAG_INTERWORK;
656 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
658 warning ("target CPU does not support THUMB instructions");
659 target_flags &= ~ARM_FLAG_THUMB;
662 if (TARGET_APCS_FRAME && TARGET_THUMB)
664 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
665 target_flags &= ~ARM_FLAG_APCS_FRAME;
668 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
669 from here where no function is being compiled currently. */
670 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
671 && TARGET_ARM)
672 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
674 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
675 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
677 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
678 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
680 /* If interworking is enabled then APCS-32 must be selected as well. */
681 if (TARGET_INTERWORK)
683 if (!TARGET_APCS_32)
684 warning ("interworking forces APCS-32 to be used" );
685 target_flags |= ARM_FLAG_APCS_32;
688 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
690 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
691 target_flags |= ARM_FLAG_APCS_FRAME;
694 if (TARGET_POKE_FUNCTION_NAME)
695 target_flags |= ARM_FLAG_APCS_FRAME;
697 if (TARGET_APCS_REENT && flag_pic)
698 error ("-fpic and -mapcs-reent are incompatible");
700 if (TARGET_APCS_REENT)
701 warning ("APCS reentrant code not supported. Ignored");
703 /* If this target is normally configured to use APCS frames, warn if they
704 are turned off and debugging is turned on. */
705 if (TARGET_ARM
706 && write_symbols != NO_DEBUG
707 && !TARGET_APCS_FRAME
708 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
709 warning ("-g with -mno-apcs-frame may not give sensible debugging");
711 /* If stack checking is disabled, we can use r10 as the PIC register,
712 which keeps r9 available. */
713 if (flag_pic)
714 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
716 if (TARGET_APCS_FLOAT)
717 warning ("passing floating point arguments in fp regs not yet supported");
719 /* Initialize boolean versions of the flags, for use in the arm.md file. */
720 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
721 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
722 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
723 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
724 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
726 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
727 arm_is_strong = (tune_flags & FL_STRONG) != 0;
728 thumb_code = (TARGET_ARM == 0);
729 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
730 && !(tune_flags & FL_ARCH4))) != 0;
731 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
733 if (arm_is_cirrus)
735 arm_fpu = FP_CIRRUS;
737 /* Ignore -mhard-float if -mcpu=ep9312. */
738 if (TARGET_HARD_FLOAT)
739 target_flags ^= ARM_FLAG_SOFT_FLOAT;
741 else
742 /* Default value for floating point code... if no co-processor
743 bus, then schedule for emulated floating point. Otherwise,
744 assume the user has an FPA.
745 Note: this does not prevent use of floating point instructions,
746 -msoft-float does that. */
747 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
749 if (target_fp_name)
751 if (streq (target_fp_name, "2"))
752 arm_fpu_arch = FP_SOFT2;
753 else if (streq (target_fp_name, "3"))
754 arm_fpu_arch = FP_SOFT3;
755 else
756 error ("invalid floating point emulation option: -mfpe-%s",
757 target_fp_name);
759 else
760 arm_fpu_arch = FP_DEFAULT;
762 if (TARGET_FPE)
764 if (arm_fpu == FP_SOFT3)
765 arm_fpu = FP_SOFT2;
766 else if (arm_fpu == FP_CIRRUS)
767 warning ("-mpfpe switch not supported by ep9312 target cpu - ignored.");
768 else if (arm_fpu != FP_HARD)
769 arm_fpu = FP_SOFT2;
772 /* For arm2/3 there is no need to do any scheduling if there is only
773 a floating point emulator, or we are doing software floating-point. */
774 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
775 && (tune_flags & FL_MODE32) == 0)
776 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
778 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
780 if (structure_size_string != NULL)
782 int size = strtol (structure_size_string, NULL, 0);
784 if (size == 8 || size == 32)
785 arm_structure_size_boundary = size;
786 else
787 warning ("structure size boundary can only be set to 8 or 32");
790 if (arm_pic_register_string != NULL)
792 int pic_register = decode_reg_name (arm_pic_register_string);
794 if (!flag_pic)
795 warning ("-mpic-register= is useless without -fpic");
797 /* Prevent the user from choosing an obviously stupid PIC register. */
798 else if (pic_register < 0 || call_used_regs[pic_register]
799 || pic_register == HARD_FRAME_POINTER_REGNUM
800 || pic_register == STACK_POINTER_REGNUM
801 || pic_register >= PC_REGNUM)
802 error ("unable to use '%s' for PIC register", arm_pic_register_string);
803 else
804 arm_pic_register = pic_register;
807 if (TARGET_THUMB && flag_schedule_insns)
809 /* Don't warn since it's on by default in -O2. */
810 flag_schedule_insns = 0;
813 /* If optimizing for space, don't synthesize constants.
814 For processors with load scheduling, it never costs more than 2 cycles
815 to load a constant, and the load scheduler may well reduce that to 1. */
816 if (optimize_size || (tune_flags & FL_LDSCHED))
817 arm_constant_limit = 1;
819 if (arm_is_xscale)
820 arm_constant_limit = 2;
822 /* If optimizing for size, bump the number of instructions that we
823 are prepared to conditionally execute (even on a StrongARM).
824 Otherwise for the StrongARM, which has early execution of branches,
825 a sequence that is worth skipping is shorter. */
826 if (optimize_size)
827 max_insns_skipped = 6;
828 else if (arm_is_strong)
829 max_insns_skipped = 3;
831 /* Register global variables with the garbage collector. */
832 arm_add_gc_roots ();
835 static void
836 arm_add_gc_roots ()
838 gcc_obstack_init(&minipool_obstack);
839 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
842 /* A table of known ARM exception types.
843 For use with the interrupt function attribute. */
845 typedef struct
847 const char *const arg;
848 const unsigned long return_value;
850 isr_attribute_arg;
852 static const isr_attribute_arg isr_attribute_args [] =
854 { "IRQ", ARM_FT_ISR },
855 { "irq", ARM_FT_ISR },
856 { "FIQ", ARM_FT_FIQ },
857 { "fiq", ARM_FT_FIQ },
858 { "ABORT", ARM_FT_ISR },
859 { "abort", ARM_FT_ISR },
860 { "ABORT", ARM_FT_ISR },
861 { "abort", ARM_FT_ISR },
862 { "UNDEF", ARM_FT_EXCEPTION },
863 { "undef", ARM_FT_EXCEPTION },
864 { "SWI", ARM_FT_EXCEPTION },
865 { "swi", ARM_FT_EXCEPTION },
866 { NULL, ARM_FT_NORMAL }
869 /* Returns the (interrupt) function type of the current
870 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
872 static unsigned long
873 arm_isr_value (argument)
874 tree argument;
876 const isr_attribute_arg * ptr;
877 const char * arg;
879 /* No argument - default to IRQ. */
880 if (argument == NULL_TREE)
881 return ARM_FT_ISR;
883 /* Get the value of the argument. */
884 if (TREE_VALUE (argument) == NULL_TREE
885 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
886 return ARM_FT_UNKNOWN;
888 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
890 /* Check it against the list of known arguments. */
891 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
892 if (streq (arg, ptr->arg))
893 return ptr->return_value;
895 /* An unrecognized interrupt type. */
896 return ARM_FT_UNKNOWN;
899 /* Computes the type of the current function. */
901 static unsigned long
902 arm_compute_func_type ()
904 unsigned long type = ARM_FT_UNKNOWN;
905 tree a;
906 tree attr;
908 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
909 abort ();
911 /* Decide if the current function is volatile. Such functions
912 never return, and many memory cycles can be saved by not storing
913 register values that will never be needed again. This optimization
914 was added to speed up context switching in a kernel application. */
915 if (optimize > 0
916 && current_function_nothrow
917 && TREE_THIS_VOLATILE (current_function_decl))
918 type |= ARM_FT_VOLATILE;
920 if (current_function_needs_context)
921 type |= ARM_FT_NESTED;
923 attr = DECL_ATTRIBUTES (current_function_decl);
925 a = lookup_attribute ("naked", attr);
926 if (a != NULL_TREE)
927 type |= ARM_FT_NAKED;
929 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
930 type |= ARM_FT_EXCEPTION_HANDLER;
931 else
933 a = lookup_attribute ("isr", attr);
934 if (a == NULL_TREE)
935 a = lookup_attribute ("interrupt", attr);
937 if (a == NULL_TREE)
938 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
939 else
940 type |= arm_isr_value (TREE_VALUE (a));
943 return type;
946 /* Returns the type of the current function. */
948 unsigned long
949 arm_current_func_type ()
951 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
952 cfun->machine->func_type = arm_compute_func_type ();
954 return cfun->machine->func_type;
957 /* Return 1 if it is possible to return using a single instruction. */
960 use_return_insn (iscond)
961 int iscond;
963 int regno;
964 unsigned int func_type;
965 unsigned long saved_int_regs;
967 /* Never use a return instruction before reload has run. */
968 if (!reload_completed)
969 return 0;
971 func_type = arm_current_func_type ();
973 /* Naked functions and volatile functions need special
974 consideration. */
975 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
976 return 0;
978 /* So do interrupt functions that use the frame pointer. */
979 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
980 return 0;
982 /* As do variadic functions. */
983 if (current_function_pretend_args_size
984 || cfun->machine->uses_anonymous_args
985 /* Of if the function calls __builtin_eh_return () */
986 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
987 /* Or if there is no frame pointer and there is a stack adjustment. */
988 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
989 && !frame_pointer_needed))
990 return 0;
992 saved_int_regs = arm_compute_save_reg_mask ();
994 /* Can't be done if interworking with Thumb, and any registers have been
995 stacked. */
996 if (TARGET_INTERWORK && saved_int_regs != 0)
997 return 0;
999 /* On StrongARM, conditional returns are expensive if they aren't
1000 taken and multiple registers have been stacked. */
1001 if (iscond && arm_is_strong)
1003 /* Conditional return when just the LR is stored is a simple
1004 conditional-load instruction, that's not expensive. */
1005 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1006 return 0;
1008 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1009 return 0;
1012 /* If there are saved registers but the LR isn't saved, then we need
1013 two instructions for the return. */
1014 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1015 return 0;
1017 /* Can't be done if any of the FPU regs are pushed,
1018 since this also requires an insn. */
1019 if (TARGET_HARD_FLOAT)
1020 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1021 if (regs_ever_live[regno] && !call_used_regs[regno])
1022 return 0;
1024 return 1;
1027 /* Return TRUE if int I is a valid immediate ARM constant. */
1030 const_ok_for_arm (i)
1031 HOST_WIDE_INT i;
1033 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1035 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1036 be all zero, or all one. */
1037 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1038 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1039 != ((~(unsigned HOST_WIDE_INT) 0)
1040 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1041 return FALSE;
1043 /* Fast return for 0 and powers of 2 */
1044 if ((i & (i - 1)) == 0)
1045 return TRUE;
1049 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1050 return TRUE;
1051 mask =
1052 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1053 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1055 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1057 return FALSE;
1060 /* Return true if I is a valid constant for the operation CODE. */
1061 static int
1062 const_ok_for_op (i, code)
1063 HOST_WIDE_INT i;
1064 enum rtx_code code;
1066 if (const_ok_for_arm (i))
1067 return 1;
1069 switch (code)
1071 case PLUS:
1072 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1074 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1075 case XOR:
1076 case IOR:
1077 return 0;
1079 case AND:
1080 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1082 default:
1083 abort ();
1087 /* Emit a sequence of insns to handle a large constant.
1088 CODE is the code of the operation required, it can be any of SET, PLUS,
1089 IOR, AND, XOR, MINUS;
1090 MODE is the mode in which the operation is being performed;
1091 VAL is the integer to operate on;
1092 SOURCE is the other operand (a register, or a null-pointer for SET);
1093 SUBTARGETS means it is safe to create scratch registers if that will
1094 either produce a simpler sequence, or we will want to cse the values.
1095 Return value is the number of insns emitted. */
1098 arm_split_constant (code, mode, val, target, source, subtargets)
1099 enum rtx_code code;
1100 enum machine_mode mode;
1101 HOST_WIDE_INT val;
1102 rtx target;
1103 rtx source;
1104 int subtargets;
1106 if (subtargets || code == SET
1107 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1108 && REGNO (target) != REGNO (source)))
1110 /* After arm_reorg has been called, we can't fix up expensive
1111 constants by pushing them into memory so we must synthesize
1112 them in-line, regardless of the cost. This is only likely to
1113 be more costly on chips that have load delay slots and we are
1114 compiling without running the scheduler (so no splitting
1115 occurred before the final instruction emission).
1117 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1119 if (!after_arm_reorg
1120 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1121 > arm_constant_limit + (code != SET)))
1123 if (code == SET)
1125 /* Currently SET is the only monadic value for CODE, all
1126 the rest are diadic. */
1127 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1128 return 1;
1130 else
1132 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1134 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1135 /* For MINUS, the value is subtracted from, since we never
1136 have subtraction of a constant. */
1137 if (code == MINUS)
1138 emit_insn (gen_rtx_SET (VOIDmode, target,
1139 gen_rtx_MINUS (mode, temp, source)));
1140 else
1141 emit_insn (gen_rtx_SET (VOIDmode, target,
1142 gen_rtx (code, mode, source, temp)));
1143 return 2;
1148 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1151 static int
1152 count_insns_for_constant (remainder, i)
1153 HOST_WIDE_INT remainder;
1154 int i;
1156 HOST_WIDE_INT temp1;
1157 int num_insns = 0;
1160 int end;
1162 if (i <= 0)
1163 i += 32;
1164 if (remainder & (3 << (i - 2)))
1166 end = i - 8;
1167 if (end < 0)
1168 end += 32;
1169 temp1 = remainder & ((0x0ff << end)
1170 | ((i < end) ? (0xff >> (32 - end)) : 0));
1171 remainder &= ~temp1;
1172 num_insns++;
1173 i -= 6;
1175 i -= 2;
1176 } while (remainder);
1177 return num_insns;
1180 /* As above, but extra parameter GENERATE which, if clear, suppresses
1181 RTL generation. */
1183 static int
1184 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1185 enum rtx_code code;
1186 enum machine_mode mode;
1187 HOST_WIDE_INT val;
1188 rtx target;
1189 rtx source;
1190 int subtargets;
1191 int generate;
1193 int can_invert = 0;
1194 int can_negate = 0;
1195 int can_negate_initial = 0;
1196 int can_shift = 0;
1197 int i;
1198 int num_bits_set = 0;
1199 int set_sign_bit_copies = 0;
1200 int clear_sign_bit_copies = 0;
1201 int clear_zero_bit_copies = 0;
1202 int set_zero_bit_copies = 0;
1203 int insns = 0;
1204 unsigned HOST_WIDE_INT temp1, temp2;
1205 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1207 /* Find out which operations are safe for a given CODE. Also do a quick
1208 check for degenerate cases; these can occur when DImode operations
1209 are split. */
1210 switch (code)
1212 case SET:
1213 can_invert = 1;
1214 can_shift = 1;
1215 can_negate = 1;
1216 break;
1218 case PLUS:
1219 can_negate = 1;
1220 can_negate_initial = 1;
1221 break;
1223 case IOR:
1224 if (remainder == 0xffffffff)
1226 if (generate)
1227 emit_insn (gen_rtx_SET (VOIDmode, target,
1228 GEN_INT (ARM_SIGN_EXTEND (val))));
1229 return 1;
1231 if (remainder == 0)
1233 if (reload_completed && rtx_equal_p (target, source))
1234 return 0;
1235 if (generate)
1236 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1237 return 1;
1239 break;
1241 case AND:
1242 if (remainder == 0)
1244 if (generate)
1245 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1246 return 1;
1248 if (remainder == 0xffffffff)
1250 if (reload_completed && rtx_equal_p (target, source))
1251 return 0;
1252 if (generate)
1253 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1254 return 1;
1256 can_invert = 1;
1257 break;
1259 case XOR:
1260 if (remainder == 0)
1262 if (reload_completed && rtx_equal_p (target, source))
1263 return 0;
1264 if (generate)
1265 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1266 return 1;
1268 if (remainder == 0xffffffff)
1270 if (generate)
1271 emit_insn (gen_rtx_SET (VOIDmode, target,
1272 gen_rtx_NOT (mode, source)));
1273 return 1;
1276 /* We don't know how to handle this yet below. */
1277 abort ();
1279 case MINUS:
1280 /* We treat MINUS as (val - source), since (source - val) is always
1281 passed as (source + (-val)). */
1282 if (remainder == 0)
1284 if (generate)
1285 emit_insn (gen_rtx_SET (VOIDmode, target,
1286 gen_rtx_NEG (mode, source)));
1287 return 1;
1289 if (const_ok_for_arm (val))
1291 if (generate)
1292 emit_insn (gen_rtx_SET (VOIDmode, target,
1293 gen_rtx_MINUS (mode, GEN_INT (val),
1294 source)));
1295 return 1;
1297 can_negate = 1;
1299 break;
1301 default:
1302 abort ();
1305 /* If we can do it in one insn get out quickly. */
1306 if (const_ok_for_arm (val)
1307 || (can_negate_initial && const_ok_for_arm (-val))
1308 || (can_invert && const_ok_for_arm (~val)))
1310 if (generate)
1311 emit_insn (gen_rtx_SET (VOIDmode, target,
1312 (source ? gen_rtx (code, mode, source,
1313 GEN_INT (val))
1314 : GEN_INT (val))));
1315 return 1;
1318 /* Calculate a few attributes that may be useful for specific
1319 optimizations. */
1320 for (i = 31; i >= 0; i--)
1322 if ((remainder & (1 << i)) == 0)
1323 clear_sign_bit_copies++;
1324 else
1325 break;
1328 for (i = 31; i >= 0; i--)
1330 if ((remainder & (1 << i)) != 0)
1331 set_sign_bit_copies++;
1332 else
1333 break;
1336 for (i = 0; i <= 31; i++)
1338 if ((remainder & (1 << i)) == 0)
1339 clear_zero_bit_copies++;
1340 else
1341 break;
1344 for (i = 0; i <= 31; i++)
1346 if ((remainder & (1 << i)) != 0)
1347 set_zero_bit_copies++;
1348 else
1349 break;
1352 switch (code)
1354 case SET:
1355 /* See if we can do this by sign_extending a constant that is known
1356 to be negative. This is a good, way of doing it, since the shift
1357 may well merge into a subsequent insn. */
1358 if (set_sign_bit_copies > 1)
1360 if (const_ok_for_arm
1361 (temp1 = ARM_SIGN_EXTEND (remainder
1362 << (set_sign_bit_copies - 1))))
1364 if (generate)
1366 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1367 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1368 GEN_INT (temp1)));
1369 emit_insn (gen_ashrsi3 (target, new_src,
1370 GEN_INT (set_sign_bit_copies - 1)));
1372 return 2;
1374 /* For an inverted constant, we will need to set the low bits,
1375 these will be shifted out of harm's way. */
1376 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1377 if (const_ok_for_arm (~temp1))
1379 if (generate)
1381 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1382 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1383 GEN_INT (temp1)));
1384 emit_insn (gen_ashrsi3 (target, new_src,
1385 GEN_INT (set_sign_bit_copies - 1)));
1387 return 2;
1391 /* See if we can generate this by setting the bottom (or the top)
1392 16 bits, and then shifting these into the other half of the
1393 word. We only look for the simplest cases, to do more would cost
1394 too much. Be careful, however, not to generate this when the
1395 alternative would take fewer insns. */
1396 if (val & 0xffff0000)
1398 temp1 = remainder & 0xffff0000;
1399 temp2 = remainder & 0x0000ffff;
1401 /* Overlaps outside this range are best done using other methods. */
1402 for (i = 9; i < 24; i++)
1404 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1405 && !const_ok_for_arm (temp2))
1407 rtx new_src = (subtargets
1408 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1409 : target);
1410 insns = arm_gen_constant (code, mode, temp2, new_src,
1411 source, subtargets, generate);
1412 source = new_src;
1413 if (generate)
1414 emit_insn (gen_rtx_SET
1415 (VOIDmode, target,
1416 gen_rtx_IOR (mode,
1417 gen_rtx_ASHIFT (mode, source,
1418 GEN_INT (i)),
1419 source)));
1420 return insns + 1;
1424 /* Don't duplicate cases already considered. */
1425 for (i = 17; i < 24; i++)
1427 if (((temp1 | (temp1 >> i)) == remainder)
1428 && !const_ok_for_arm (temp1))
1430 rtx new_src = (subtargets
1431 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1432 : target);
1433 insns = arm_gen_constant (code, mode, temp1, new_src,
1434 source, subtargets, generate);
1435 source = new_src;
1436 if (generate)
1437 emit_insn
1438 (gen_rtx_SET (VOIDmode, target,
1439 gen_rtx_IOR
1440 (mode,
1441 gen_rtx_LSHIFTRT (mode, source,
1442 GEN_INT (i)),
1443 source)));
1444 return insns + 1;
1448 break;
1450 case IOR:
1451 case XOR:
1452 /* If we have IOR or XOR, and the constant can be loaded in a
1453 single instruction, and we can find a temporary to put it in,
1454 then this can be done in two instructions instead of 3-4. */
1455 if (subtargets
1456 /* TARGET can't be NULL if SUBTARGETS is 0 */
1457 || (reload_completed && !reg_mentioned_p (target, source)))
1459 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1461 if (generate)
1463 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1465 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1466 emit_insn (gen_rtx_SET (VOIDmode, target,
1467 gen_rtx (code, mode, source, sub)));
1469 return 2;
1473 if (code == XOR)
1474 break;
1476 if (set_sign_bit_copies > 8
1477 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1479 if (generate)
1481 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1482 rtx shift = GEN_INT (set_sign_bit_copies);
1484 emit_insn (gen_rtx_SET (VOIDmode, sub,
1485 gen_rtx_NOT (mode,
1486 gen_rtx_ASHIFT (mode,
1487 source,
1488 shift))));
1489 emit_insn (gen_rtx_SET (VOIDmode, target,
1490 gen_rtx_NOT (mode,
1491 gen_rtx_LSHIFTRT (mode, sub,
1492 shift))));
1494 return 2;
1497 if (set_zero_bit_copies > 8
1498 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1500 if (generate)
1502 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1503 rtx shift = GEN_INT (set_zero_bit_copies);
1505 emit_insn (gen_rtx_SET (VOIDmode, sub,
1506 gen_rtx_NOT (mode,
1507 gen_rtx_LSHIFTRT (mode,
1508 source,
1509 shift))));
1510 emit_insn (gen_rtx_SET (VOIDmode, target,
1511 gen_rtx_NOT (mode,
1512 gen_rtx_ASHIFT (mode, sub,
1513 shift))));
1515 return 2;
1518 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1520 if (generate)
1522 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1523 emit_insn (gen_rtx_SET (VOIDmode, sub,
1524 gen_rtx_NOT (mode, source)));
1525 source = sub;
1526 if (subtargets)
1527 sub = gen_reg_rtx (mode);
1528 emit_insn (gen_rtx_SET (VOIDmode, sub,
1529 gen_rtx_AND (mode, source,
1530 GEN_INT (temp1))));
1531 emit_insn (gen_rtx_SET (VOIDmode, target,
1532 gen_rtx_NOT (mode, sub)));
1534 return 3;
1536 break;
1538 case AND:
1539 /* See if two shifts will do 2 or more insn's worth of work. */
1540 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1542 HOST_WIDE_INT shift_mask = ((0xffffffff
1543 << (32 - clear_sign_bit_copies))
1544 & 0xffffffff);
1546 if ((remainder | shift_mask) != 0xffffffff)
1548 if (generate)
1550 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1551 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1552 new_src, source, subtargets, 1);
1553 source = new_src;
1555 else
1557 rtx targ = subtargets ? NULL_RTX : target;
1558 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1559 targ, source, subtargets, 0);
1563 if (generate)
1565 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1566 rtx shift = GEN_INT (clear_sign_bit_copies);
1568 emit_insn (gen_ashlsi3 (new_src, source, shift));
1569 emit_insn (gen_lshrsi3 (target, new_src, shift));
1572 return insns + 2;
1575 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1577 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1579 if ((remainder | shift_mask) != 0xffffffff)
1581 if (generate)
1583 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1585 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1586 new_src, source, subtargets, 1);
1587 source = new_src;
1589 else
1591 rtx targ = subtargets ? NULL_RTX : target;
1593 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1594 targ, source, subtargets, 0);
1598 if (generate)
1600 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1601 rtx shift = GEN_INT (clear_zero_bit_copies);
1603 emit_insn (gen_lshrsi3 (new_src, source, shift));
1604 emit_insn (gen_ashlsi3 (target, new_src, shift));
1607 return insns + 2;
1610 break;
1612 default:
1613 break;
1616 for (i = 0; i < 32; i++)
1617 if (remainder & (1 << i))
1618 num_bits_set++;
1620 if (code == AND || (can_invert && num_bits_set > 16))
1621 remainder = (~remainder) & 0xffffffff;
1622 else if (code == PLUS && num_bits_set > 16)
1623 remainder = (-remainder) & 0xffffffff;
1624 else
1626 can_invert = 0;
1627 can_negate = 0;
1630 /* Now try and find a way of doing the job in either two or three
1631 instructions.
1632 We start by looking for the largest block of zeros that are aligned on
1633 a 2-bit boundary, we then fill up the temps, wrapping around to the
1634 top of the word when we drop off the bottom.
1635 In the worst case this code should produce no more than four insns. */
1637 int best_start = 0;
1638 int best_consecutive_zeros = 0;
1640 for (i = 0; i < 32; i += 2)
1642 int consecutive_zeros = 0;
1644 if (!(remainder & (3 << i)))
1646 while ((i < 32) && !(remainder & (3 << i)))
1648 consecutive_zeros += 2;
1649 i += 2;
1651 if (consecutive_zeros > best_consecutive_zeros)
1653 best_consecutive_zeros = consecutive_zeros;
1654 best_start = i - consecutive_zeros;
1656 i -= 2;
1660 /* So long as it won't require any more insns to do so, it's
1661 desirable to emit a small constant (in bits 0...9) in the last
1662 insn. This way there is more chance that it can be combined with
1663 a later addressing insn to form a pre-indexed load or store
1664 operation. Consider:
1666 *((volatile int *)0xe0000100) = 1;
1667 *((volatile int *)0xe0000110) = 2;
1669 We want this to wind up as:
1671 mov rA, #0xe0000000
1672 mov rB, #1
1673 str rB, [rA, #0x100]
1674 mov rB, #2
1675 str rB, [rA, #0x110]
1677 rather than having to synthesize both large constants from scratch.
1679 Therefore, we calculate how many insns would be required to emit
1680 the constant starting from `best_start', and also starting from
1681 zero (ie with bit 31 first to be output). If `best_start' doesn't
1682 yield a shorter sequence, we may as well use zero. */
1683 if (best_start != 0
1684 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1685 && (count_insns_for_constant (remainder, 0) <=
1686 count_insns_for_constant (remainder, best_start)))
1687 best_start = 0;
1689 /* Now start emitting the insns. */
1690 i = best_start;
1693 int end;
1695 if (i <= 0)
1696 i += 32;
1697 if (remainder & (3 << (i - 2)))
1699 end = i - 8;
1700 if (end < 0)
1701 end += 32;
1702 temp1 = remainder & ((0x0ff << end)
1703 | ((i < end) ? (0xff >> (32 - end)) : 0));
1704 remainder &= ~temp1;
1706 if (generate)
1708 rtx new_src, temp1_rtx;
1710 if (code == SET || code == MINUS)
1712 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1713 if (can_invert && code != MINUS)
1714 temp1 = ~temp1;
1716 else
1718 if (remainder && subtargets)
1719 new_src = gen_reg_rtx (mode);
1720 else
1721 new_src = target;
1722 if (can_invert)
1723 temp1 = ~temp1;
1724 else if (can_negate)
1725 temp1 = -temp1;
1728 temp1 = trunc_int_for_mode (temp1, mode);
1729 temp1_rtx = GEN_INT (temp1);
1731 if (code == SET)
1733 else if (code == MINUS)
1734 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1735 else
1736 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1738 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1739 source = new_src;
1742 if (code == SET)
1744 can_invert = 0;
1745 code = PLUS;
1747 else if (code == MINUS)
1748 code = PLUS;
1750 insns++;
1751 i -= 6;
1753 i -= 2;
1755 while (remainder);
1758 return insns;
1761 /* Canonicalize a comparison so that we are more likely to recognize it.
1762 This can be done for a few constant compares, where we can make the
1763 immediate value easier to load. */
1765 enum rtx_code
1766 arm_canonicalize_comparison (code, op1)
1767 enum rtx_code code;
1768 rtx * op1;
1770 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1772 switch (code)
1774 case EQ:
1775 case NE:
1776 return code;
1778 case GT:
1779 case LE:
1780 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1781 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1783 *op1 = GEN_INT (i + 1);
1784 return code == GT ? GE : LT;
1786 break;
1788 case GE:
1789 case LT:
1790 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1791 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1793 *op1 = GEN_INT (i - 1);
1794 return code == GE ? GT : LE;
1796 break;
1798 case GTU:
1799 case LEU:
1800 if (i != ~((unsigned HOST_WIDE_INT) 0)
1801 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1803 *op1 = GEN_INT (i + 1);
1804 return code == GTU ? GEU : LTU;
1806 break;
1808 case GEU:
1809 case LTU:
1810 if (i != 0
1811 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1813 *op1 = GEN_INT (i - 1);
1814 return code == GEU ? GTU : LEU;
1816 break;
1818 default:
1819 abort ();
1822 return code;
1825 /* Decide whether a type should be returned in memory (true)
1826 or in a register (false). This is called by the macro
1827 RETURN_IN_MEMORY. */
1830 arm_return_in_memory (type)
1831 tree type;
1833 HOST_WIDE_INT size;
1835 if (!AGGREGATE_TYPE_P (type))
1836 /* All simple types are returned in registers. */
1837 return 0;
1839 size = int_size_in_bytes (type);
1841 if (TARGET_ATPCS)
1843 /* ATPCS returns aggregate types in memory only if they are
1844 larger than a word (or are variable size). */
1845 return (size < 0 || size > UNITS_PER_WORD);
1848 /* For the arm-wince targets we choose to be compatible with Microsoft's
1849 ARM and Thumb compilers, which always return aggregates in memory. */
1850 #ifndef ARM_WINCE
1851 /* All structures/unions bigger than one word are returned in memory.
1852 Also catch the case where int_size_in_bytes returns -1. In this case
1853 the aggregate is either huge or of variable size, and in either case
1854 we will want to return it via memory and not in a register. */
1855 if (size < 0 || size > UNITS_PER_WORD)
1856 return 1;
1858 if (TREE_CODE (type) == RECORD_TYPE)
1860 tree field;
1862 /* For a struct the APCS says that we only return in a register
1863 if the type is 'integer like' and every addressable element
1864 has an offset of zero. For practical purposes this means
1865 that the structure can have at most one non bit-field element
1866 and that this element must be the first one in the structure. */
1868 /* Find the first field, ignoring non FIELD_DECL things which will
1869 have been created by C++. */
1870 for (field = TYPE_FIELDS (type);
1871 field && TREE_CODE (field) != FIELD_DECL;
1872 field = TREE_CHAIN (field))
1873 continue;
1875 if (field == NULL)
1876 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1878 /* Check that the first field is valid for returning in a register. */
1880 /* ... Floats are not allowed */
1881 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1882 return 1;
1884 /* ... Aggregates that are not themselves valid for returning in
1885 a register are not allowed. */
1886 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1887 return 1;
1889 /* Now check the remaining fields, if any. Only bitfields are allowed,
1890 since they are not addressable. */
1891 for (field = TREE_CHAIN (field);
1892 field;
1893 field = TREE_CHAIN (field))
1895 if (TREE_CODE (field) != FIELD_DECL)
1896 continue;
1898 if (!DECL_BIT_FIELD_TYPE (field))
1899 return 1;
1902 return 0;
1905 if (TREE_CODE (type) == UNION_TYPE)
1907 tree field;
1909 /* Unions can be returned in registers if every element is
1910 integral, or can be returned in an integer register. */
1911 for (field = TYPE_FIELDS (type);
1912 field;
1913 field = TREE_CHAIN (field))
1915 if (TREE_CODE (field) != FIELD_DECL)
1916 continue;
1918 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1919 return 1;
1921 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1922 return 1;
1925 return 0;
1927 #endif /* not ARM_WINCE */
1929 /* Return all other types in memory. */
1930 return 1;
1933 /* Indicate whether or not words of a double are in big-endian order. */
1936 arm_float_words_big_endian ()
1938 if (TARGET_CIRRUS)
1939 return 0;
1941 /* For FPA, float words are always big-endian. For VFP, floats words
1942 follow the memory system mode. */
1944 if (TARGET_HARD_FLOAT)
1946 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1947 return 1;
1950 if (TARGET_VFP)
1951 return (TARGET_BIG_END ? 1 : 0);
1953 return 1;
1956 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1957 for a call to a function whose data type is FNTYPE.
1958 For a library call, FNTYPE is NULL. */
1959 void
1960 arm_init_cumulative_args (pcum, fntype, libname, fndecl)
1961 CUMULATIVE_ARGS * pcum;
1962 tree fntype;
1963 rtx libname ATTRIBUTE_UNUSED;
1964 tree fndecl ATTRIBUTE_UNUSED;
1966 /* On the ARM, the offset starts at 0. */
1967 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1969 pcum->call_cookie = CALL_NORMAL;
1971 if (TARGET_LONG_CALLS)
1972 pcum->call_cookie = CALL_LONG;
1974 /* Check for long call/short call attributes. The attributes
1975 override any command line option. */
1976 if (fntype)
1978 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1979 pcum->call_cookie = CALL_SHORT;
1980 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1981 pcum->call_cookie = CALL_LONG;
1985 /* Determine where to put an argument to a function.
1986 Value is zero to push the argument on the stack,
1987 or a hard register in which to store the argument.
1989 MODE is the argument's machine mode.
1990 TYPE is the data type of the argument (as a tree).
1991 This is null for libcalls where that information may
1992 not be available.
1993 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1994 the preceding args and about the function being called.
1995 NAMED is nonzero if this argument is a named parameter
1996 (otherwise it is an extra parameter matching an ellipsis). */
1999 arm_function_arg (pcum, mode, type, named)
2000 CUMULATIVE_ARGS * pcum;
2001 enum machine_mode mode;
2002 tree type ATTRIBUTE_UNUSED;
2003 int named;
2005 if (mode == VOIDmode)
2006 /* Compute operand 2 of the call insn. */
2007 return GEN_INT (pcum->call_cookie);
2009 if (!named || pcum->nregs >= NUM_ARG_REGS)
2010 return NULL_RTX;
2012 return gen_rtx_REG (mode, pcum->nregs);
2015 /* Variable sized types are passed by reference. This is a GCC
2016 extension to the ARM ABI. */
2019 arm_function_arg_pass_by_reference (cum, mode, type, named)
2020 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2021 enum machine_mode mode ATTRIBUTE_UNUSED;
2022 tree type;
2023 int named ATTRIBUTE_UNUSED;
2025 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2028 /* Implement va_arg. */
2031 arm_va_arg (valist, type)
2032 tree valist, type;
2034 /* Variable sized types are passed by reference. */
2035 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2037 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2038 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2041 return std_expand_builtin_va_arg (valist, type);
2044 /* Encode the current state of the #pragma [no_]long_calls. */
2045 typedef enum
2047 OFF, /* No #pramgma [no_]long_calls is in effect. */
2048 LONG, /* #pragma long_calls is in effect. */
2049 SHORT /* #pragma no_long_calls is in effect. */
2050 } arm_pragma_enum;
2052 static arm_pragma_enum arm_pragma_long_calls = OFF;
2054 void
2055 arm_pr_long_calls (pfile)
2056 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2058 arm_pragma_long_calls = LONG;
2061 void
2062 arm_pr_no_long_calls (pfile)
2063 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2065 arm_pragma_long_calls = SHORT;
2068 void
2069 arm_pr_long_calls_off (pfile)
2070 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2072 arm_pragma_long_calls = OFF;
2075 /* Table of machine attributes. */
2076 const struct attribute_spec arm_attribute_table[] =
2078 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2079 /* Function calls made to this symbol must be done indirectly, because
2080 it may lie outside of the 26 bit addressing range of a normal function
2081 call. */
2082 { "long_call", 0, 0, false, true, true, NULL },
2083 /* Whereas these functions are always known to reside within the 26 bit
2084 addressing range. */
2085 { "short_call", 0, 0, false, true, true, NULL },
2086 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2087 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2088 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2089 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2090 #ifdef ARM_PE
2091 /* ARM/PE has three new attributes:
2092 interfacearm - ?
2093 dllexport - for exporting a function/variable that will live in a dll
2094 dllimport - for importing a function/variable from a dll
2096 Microsoft allows multiple declspecs in one __declspec, separating
2097 them with spaces. We do NOT support this. Instead, use __declspec
2098 multiple times.
2100 { "dllimport", 0, 0, true, false, false, NULL },
2101 { "dllexport", 0, 0, true, false, false, NULL },
2102 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2103 #endif
2104 { NULL, 0, 0, false, false, false, NULL }
2107 /* Handle an attribute requiring a FUNCTION_DECL;
2108 arguments as in struct attribute_spec.handler. */
2110 static tree
2111 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2112 tree * node;
2113 tree name;
2114 tree args ATTRIBUTE_UNUSED;
2115 int flags ATTRIBUTE_UNUSED;
2116 bool * no_add_attrs;
2118 if (TREE_CODE (*node) != FUNCTION_DECL)
2120 warning ("`%s' attribute only applies to functions",
2121 IDENTIFIER_POINTER (name));
2122 *no_add_attrs = true;
2125 return NULL_TREE;
2128 /* Handle an "interrupt" or "isr" attribute;
2129 arguments as in struct attribute_spec.handler. */
2131 static tree
2132 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2133 tree * node;
2134 tree name;
2135 tree args;
2136 int flags;
2137 bool * no_add_attrs;
2139 if (DECL_P (*node))
2141 if (TREE_CODE (*node) != FUNCTION_DECL)
2143 warning ("`%s' attribute only applies to functions",
2144 IDENTIFIER_POINTER (name));
2145 *no_add_attrs = true;
2147 /* FIXME: the argument if any is checked for type attributes;
2148 should it be checked for decl ones? */
2150 else
2152 if (TREE_CODE (*node) == FUNCTION_TYPE
2153 || TREE_CODE (*node) == METHOD_TYPE)
2155 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2157 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2158 *no_add_attrs = true;
2161 else if (TREE_CODE (*node) == POINTER_TYPE
2162 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2163 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2164 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2166 *node = build_type_copy (*node);
2167 TREE_TYPE (*node) = build_type_attribute_variant
2168 (TREE_TYPE (*node),
2169 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2170 *no_add_attrs = true;
2172 else
2174 /* Possibly pass this attribute on from the type to a decl. */
2175 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2176 | (int) ATTR_FLAG_FUNCTION_NEXT
2177 | (int) ATTR_FLAG_ARRAY_NEXT))
2179 *no_add_attrs = true;
2180 return tree_cons (name, args, NULL_TREE);
2182 else
2184 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2189 return NULL_TREE;
2192 /* Return 0 if the attributes for two types are incompatible, 1 if they
2193 are compatible, and 2 if they are nearly compatible (which causes a
2194 warning to be generated). */
2196 static int
2197 arm_comp_type_attributes (type1, type2)
2198 tree type1;
2199 tree type2;
2201 int l1, l2, s1, s2;
2203 /* Check for mismatch of non-default calling convention. */
2204 if (TREE_CODE (type1) != FUNCTION_TYPE)
2205 return 1;
2207 /* Check for mismatched call attributes. */
2208 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2209 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2210 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2211 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2213 /* Only bother to check if an attribute is defined. */
2214 if (l1 | l2 | s1 | s2)
2216 /* If one type has an attribute, the other must have the same attribute. */
2217 if ((l1 != l2) || (s1 != s2))
2218 return 0;
2220 /* Disallow mixed attributes. */
2221 if ((l1 & s2) || (l2 & s1))
2222 return 0;
2225 /* Check for mismatched ISR attribute. */
2226 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2227 if (! l1)
2228 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2229 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2230 if (! l2)
2231 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2232 if (l1 != l2)
2233 return 0;
2235 return 1;
2238 /* Encode long_call or short_call attribute by prefixing
2239 symbol name in DECL with a special character FLAG. */
2241 void
2242 arm_encode_call_attribute (decl, flag)
2243 tree decl;
2244 int flag;
2246 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2247 int len = strlen (str);
2248 char * newstr;
2250 /* Do not allow weak functions to be treated as short call. */
2251 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2252 return;
2254 newstr = alloca (len + 2);
2255 newstr[0] = flag;
2256 strcpy (newstr + 1, str);
2258 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2259 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2262 /* Assigns default attributes to newly defined type. This is used to
2263 set short_call/long_call attributes for function types of
2264 functions defined inside corresponding #pragma scopes. */
2266 static void
2267 arm_set_default_type_attributes (type)
2268 tree type;
2270 /* Add __attribute__ ((long_call)) to all functions, when
2271 inside #pragma long_calls or __attribute__ ((short_call)),
2272 when inside #pragma no_long_calls. */
2273 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2275 tree type_attr_list, attr_name;
2276 type_attr_list = TYPE_ATTRIBUTES (type);
2278 if (arm_pragma_long_calls == LONG)
2279 attr_name = get_identifier ("long_call");
2280 else if (arm_pragma_long_calls == SHORT)
2281 attr_name = get_identifier ("short_call");
2282 else
2283 return;
2285 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2286 TYPE_ATTRIBUTES (type) = type_attr_list;
2290 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2291 defined within the current compilation unit. If this cannot be
2292 determined, then 0 is returned. */
2294 static int
2295 current_file_function_operand (sym_ref)
2296 rtx sym_ref;
2298 /* This is a bit of a fib. A function will have a short call flag
2299 applied to its name if it has the short call attribute, or it has
2300 already been defined within the current compilation unit. */
2301 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2302 return 1;
2304 /* The current function is always defined within the current compilation
2305 unit. if it s a weak definition however, then this may not be the real
2306 definition of the function, and so we have to say no. */
2307 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2308 && !DECL_WEAK (current_function_decl))
2309 return 1;
2311 /* We cannot make the determination - default to returning 0. */
2312 return 0;
2315 /* Return nonzero if a 32 bit "long_call" should be generated for
2316 this call. We generate a long_call if the function:
2318 a. has an __attribute__((long call))
2319 or b. is within the scope of a #pragma long_calls
2320 or c. the -mlong-calls command line switch has been specified
2322 However we do not generate a long call if the function:
2324 d. has an __attribute__ ((short_call))
2325 or e. is inside the scope of a #pragma no_long_calls
2326 or f. has an __attribute__ ((section))
2327 or g. is defined within the current compilation unit.
2329 This function will be called by C fragments contained in the machine
2330 description file. CALL_REF and CALL_COOKIE correspond to the matched
2331 rtl operands. CALL_SYMBOL is used to distinguish between
2332 two different callers of the function. It is set to 1 in the
2333 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2334 and "call_value" patterns. This is because of the difference in the
2335 SYM_REFs passed by these patterns. */
2338 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2339 rtx sym_ref;
2340 int call_cookie;
2341 int call_symbol;
2343 if (!call_symbol)
2345 if (GET_CODE (sym_ref) != MEM)
2346 return 0;
2348 sym_ref = XEXP (sym_ref, 0);
2351 if (GET_CODE (sym_ref) != SYMBOL_REF)
2352 return 0;
2354 if (call_cookie & CALL_SHORT)
2355 return 0;
2357 if (TARGET_LONG_CALLS && flag_function_sections)
2358 return 1;
2360 if (current_file_function_operand (sym_ref))
2361 return 0;
2363 return (call_cookie & CALL_LONG)
2364 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2365 || TARGET_LONG_CALLS;
2368 /* Return nonzero if it is ok to make a tail-call to DECL. */
2370 static bool
2371 arm_function_ok_for_sibcall (decl, exp)
2372 tree decl;
2373 tree exp ATTRIBUTE_UNUSED;
2375 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2377 /* Never tailcall something for which we have no decl, or if we
2378 are in Thumb mode. */
2379 if (decl == NULL || TARGET_THUMB)
2380 return false;
2382 /* Get the calling method. */
2383 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2384 call_type = CALL_SHORT;
2385 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2386 call_type = CALL_LONG;
2388 /* Cannot tail-call to long calls, since these are out of range of
2389 a branch instruction. However, if not compiling PIC, we know
2390 we can reach the symbol if it is in this compilation unit. */
2391 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2392 return false;
2394 /* If we are interworking and the function is not declared static
2395 then we can't tail-call it unless we know that it exists in this
2396 compilation unit (since it might be a Thumb routine). */
2397 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2398 return false;
2400 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2401 if (IS_INTERRUPT (arm_current_func_type ()))
2402 return false;
2404 /* Everything else is ok. */
2405 return true;
2409 /* Addressing mode support functions. */
2411 /* Return non-zero if X is a legitimate immediate operand when compiling
2412 for PIC. */
2414 legitimate_pic_operand_p (x)
2415 rtx x;
2417 if (CONSTANT_P (x)
2418 && flag_pic
2419 && (GET_CODE (x) == SYMBOL_REF
2420 || (GET_CODE (x) == CONST
2421 && GET_CODE (XEXP (x, 0)) == PLUS
2422 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2423 return 0;
2425 return 1;
2429 legitimize_pic_address (orig, mode, reg)
2430 rtx orig;
2431 enum machine_mode mode;
2432 rtx reg;
2434 if (GET_CODE (orig) == SYMBOL_REF
2435 || GET_CODE (orig) == LABEL_REF)
2437 #ifndef AOF_ASSEMBLER
2438 rtx pic_ref, address;
2439 #endif
2440 rtx insn;
2441 int subregs = 0;
2443 if (reg == 0)
2445 if (no_new_pseudos)
2446 abort ();
2447 else
2448 reg = gen_reg_rtx (Pmode);
2450 subregs = 1;
2453 #ifdef AOF_ASSEMBLER
2454 /* The AOF assembler can generate relocations for these directly, and
2455 understands that the PIC register has to be added into the offset. */
2456 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2457 #else
2458 if (subregs)
2459 address = gen_reg_rtx (Pmode);
2460 else
2461 address = reg;
2463 if (TARGET_ARM)
2464 emit_insn (gen_pic_load_addr_arm (address, orig));
2465 else
2466 emit_insn (gen_pic_load_addr_thumb (address, orig));
2468 if ((GET_CODE (orig) == LABEL_REF
2469 || (GET_CODE (orig) == SYMBOL_REF &&
2470 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2471 && NEED_GOT_RELOC)
2472 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2473 else
2475 pic_ref = gen_rtx_MEM (Pmode,
2476 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2477 address));
2478 RTX_UNCHANGING_P (pic_ref) = 1;
2481 insn = emit_move_insn (reg, pic_ref);
2482 #endif
2483 current_function_uses_pic_offset_table = 1;
2484 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2485 by loop. */
2486 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2487 REG_NOTES (insn));
2488 return reg;
2490 else if (GET_CODE (orig) == CONST)
2492 rtx base, offset;
2494 if (GET_CODE (XEXP (orig, 0)) == PLUS
2495 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2496 return orig;
2498 if (reg == 0)
2500 if (no_new_pseudos)
2501 abort ();
2502 else
2503 reg = gen_reg_rtx (Pmode);
2506 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2508 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2509 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2510 base == reg ? 0 : reg);
2512 else
2513 abort ();
2515 if (GET_CODE (offset) == CONST_INT)
2517 /* The base register doesn't really matter, we only want to
2518 test the index for the appropriate mode. */
2519 if (!arm_legitimate_index_p (mode, offset, 0))
2521 if (!no_new_pseudos)
2522 offset = force_reg (Pmode, offset);
2523 else
2524 abort ();
2527 if (GET_CODE (offset) == CONST_INT)
2528 return plus_constant (base, INTVAL (offset));
2531 if (GET_MODE_SIZE (mode) > 4
2532 && (GET_MODE_CLASS (mode) == MODE_INT
2533 || TARGET_SOFT_FLOAT))
2535 emit_insn (gen_addsi3 (reg, base, offset));
2536 return reg;
2539 return gen_rtx_PLUS (Pmode, base, offset);
2542 return orig;
2545 /* Generate code to load the PIC register. PROLOGUE is true if
2546 called from arm_expand_prologue (in which case we want the
2547 generated insns at the start of the function); false if called
2548 by an exception receiver that needs the PIC register reloaded
2549 (in which case the insns are just dumped at the current location). */
2551 void
2552 arm_finalize_pic (prologue)
2553 int prologue ATTRIBUTE_UNUSED;
2555 #ifndef AOF_ASSEMBLER
2556 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2557 rtx global_offset_table;
2559 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2560 return;
2562 if (!flag_pic)
2563 abort ();
2565 start_sequence ();
2566 l1 = gen_label_rtx ();
2568 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2569 /* On the ARM the PC register contains 'dot + 8' at the time of the
2570 addition, on the Thumb it is 'dot + 4'. */
2571 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2572 if (GOT_PCREL)
2573 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2574 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2575 else
2576 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2578 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2580 if (TARGET_ARM)
2582 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2583 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2585 else
2587 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2588 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2591 seq = get_insns ();
2592 end_sequence ();
2593 if (prologue)
2594 emit_insn_after (seq, get_insns ());
2595 else
2596 emit_insn (seq);
2598 /* Need to emit this whether or not we obey regdecls,
2599 since setjmp/longjmp can cause life info to screw up. */
2600 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2601 #endif /* AOF_ASSEMBLER */
2604 /* Return nonzero if X is valid as an ARM state addressing register. */
2605 static int
2606 arm_address_register_rtx_p (x, strict_p)
2607 rtx x;
2608 int strict_p;
2610 int regno;
2612 if (GET_CODE (x) != REG)
2613 return 0;
2615 regno = REGNO (x);
2617 if (strict_p)
2618 return ARM_REGNO_OK_FOR_BASE_P (regno);
2620 return (regno <= LAST_ARM_REGNUM
2621 || regno >= FIRST_PSEUDO_REGISTER
2622 || regno == FRAME_POINTER_REGNUM
2623 || regno == ARG_POINTER_REGNUM);
2626 /* Return nonzero if X is a valid ARM state address operand. */
2628 arm_legitimate_address_p (mode, x, strict_p)
2629 enum machine_mode mode;
2630 rtx x;
2631 int strict_p;
2633 if (arm_address_register_rtx_p (x, strict_p))
2634 return 1;
2636 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2637 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2639 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2640 && GET_MODE_SIZE (mode) <= 4
2641 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2642 && GET_CODE (XEXP (x, 1)) == PLUS
2643 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2644 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2646 /* After reload constants split into minipools will have addresses
2647 from a LABEL_REF. */
2648 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2649 && (GET_CODE (x) == LABEL_REF
2650 || (GET_CODE (x) == CONST
2651 && GET_CODE (XEXP (x, 0)) == PLUS
2652 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2653 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2654 return 1;
2656 else if (mode == TImode)
2657 return 0;
2659 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2661 if (GET_CODE (x) == PLUS
2662 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2663 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2665 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2667 if (val == 4 || val == -4 || val == -8)
2668 return 1;
2672 else if (GET_CODE (x) == PLUS)
2674 rtx xop0 = XEXP (x, 0);
2675 rtx xop1 = XEXP (x, 1);
2677 return ((arm_address_register_rtx_p (xop0, strict_p)
2678 && arm_legitimate_index_p (mode, xop1, strict_p))
2679 || (arm_address_register_rtx_p (xop1, strict_p)
2680 && arm_legitimate_index_p (mode, xop0, strict_p)));
2683 #if 0
2684 /* Reload currently can't handle MINUS, so disable this for now */
2685 else if (GET_CODE (x) == MINUS)
2687 rtx xop0 = XEXP (x, 0);
2688 rtx xop1 = XEXP (x, 1);
2690 return (arm_address_register_rtx_p (xop0, strict_p)
2691 && arm_legitimate_index_p (mode, xop1, strict_p));
2693 #endif
2695 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2696 && GET_CODE (x) == SYMBOL_REF
2697 && CONSTANT_POOL_ADDRESS_P (x)
2698 && ! (flag_pic
2699 && symbol_mentioned_p (get_pool_constant (x))))
2700 return 1;
2702 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2703 && (GET_MODE_SIZE (mode) <= 4)
2704 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2705 return 1;
2707 return 0;
2710 /* Return nonzero if INDEX is valid for an address index operand in
2711 ARM state. */
2712 static int
2713 arm_legitimate_index_p (mode, index, strict_p)
2714 enum machine_mode mode;
2715 rtx index;
2716 int strict_p;
2718 HOST_WIDE_INT range;
2719 enum rtx_code code = GET_CODE (index);
2721 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2722 return (code == CONST_INT && INTVAL (index) < 1024
2723 && INTVAL (index) > -1024
2724 && (INTVAL (index) & 3) == 0);
2726 if (TARGET_CIRRUS
2727 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2728 return (code == CONST_INT
2729 && INTVAL (index) < 255
2730 && INTVAL (index) > -255);
2732 if (arm_address_register_rtx_p (index, strict_p)
2733 && GET_MODE_SIZE (mode) <= 4)
2734 return 1;
2736 /* XXX What about ldrsb? */
2737 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2738 && (!arm_arch4 || (mode) != HImode))
2740 rtx xiop0 = XEXP (index, 0);
2741 rtx xiop1 = XEXP (index, 1);
2743 return ((arm_address_register_rtx_p (xiop0, strict_p)
2744 && power_of_two_operand (xiop1, SImode))
2745 || (arm_address_register_rtx_p (xiop1, strict_p)
2746 && power_of_two_operand (xiop0, SImode)));
2749 if (GET_MODE_SIZE (mode) <= 4
2750 && (code == LSHIFTRT || code == ASHIFTRT
2751 || code == ASHIFT || code == ROTATERT)
2752 && (!arm_arch4 || (mode) != HImode))
2754 rtx op = XEXP (index, 1);
2756 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2757 && GET_CODE (op) == CONST_INT
2758 && INTVAL (op) > 0
2759 && INTVAL (op) <= 31);
2762 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2763 load, but that has a restricted addressing range and we are unable
2764 to tell here whether that is the case. To be safe we restrict all
2765 loads to that range. */
2766 range = ((mode) == HImode || (mode) == QImode)
2767 ? (arm_arch4 ? 256 : 4095) : 4096;
2769 return (code == CONST_INT
2770 && INTVAL (index) < range
2771 && INTVAL (index) > -range);
2774 /* Return nonzero if X is valid as an ARM state addressing register. */
2775 static int
2776 thumb_base_register_rtx_p (x, mode, strict_p)
2777 rtx x;
2778 enum machine_mode mode;
2779 int strict_p;
2781 int regno;
2783 if (GET_CODE (x) != REG)
2784 return 0;
2786 regno = REGNO (x);
2788 if (strict_p)
2789 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2791 return (regno <= LAST_LO_REGNUM
2792 || regno >= FIRST_PSEUDO_REGISTER
2793 || regno == FRAME_POINTER_REGNUM
2794 || (GET_MODE_SIZE (mode) >= 4
2795 && (regno == STACK_POINTER_REGNUM
2796 || x == hard_frame_pointer_rtx
2797 || x == arg_pointer_rtx)));
2800 /* Return nonzero if x is a legitimate index register. This is the case
2801 for any base register that can access a QImode object. */
2802 inline static int
2803 thumb_index_register_rtx_p (x, strict_p)
2804 rtx x;
2805 int strict_p;
2807 return thumb_base_register_rtx_p (x, QImode, strict_p);
2810 /* Return nonzero if x is a legitimate Thumb-state address.
2812 The AP may be eliminated to either the SP or the FP, so we use the
2813 least common denominator, e.g. SImode, and offsets from 0 to 64.
2815 ??? Verify whether the above is the right approach.
2817 ??? Also, the FP may be eliminated to the SP, so perhaps that
2818 needs special handling also.
2820 ??? Look at how the mips16 port solves this problem. It probably uses
2821 better ways to solve some of these problems.
2823 Although it is not incorrect, we don't accept QImode and HImode
2824 addresses based on the frame pointer or arg pointer until the
2825 reload pass starts. This is so that eliminating such addresses
2826 into stack based ones won't produce impossible code. */
2828 thumb_legitimate_address_p (mode, x, strict_p)
2829 enum machine_mode mode;
2830 rtx x;
2831 int strict_p;
2833 /* ??? Not clear if this is right. Experiment. */
2834 if (GET_MODE_SIZE (mode) < 4
2835 && !(reload_in_progress || reload_completed)
2836 && (reg_mentioned_p (frame_pointer_rtx, x)
2837 || reg_mentioned_p (arg_pointer_rtx, x)
2838 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2839 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2840 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2841 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2842 return 0;
2844 /* Accept any base register. SP only in SImode or larger. */
2845 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2846 return 1;
2848 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2849 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2850 && GET_CODE (x) == SYMBOL_REF
2851 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2852 return 1;
2854 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2855 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2856 && (GET_CODE (x) == LABEL_REF
2857 || (GET_CODE (x) == CONST
2858 && GET_CODE (XEXP (x, 0)) == PLUS
2859 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2860 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2861 return 1;
2863 /* Post-inc indexing only supported for SImode and larger. */
2864 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2865 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2866 return 1;
2868 else if (GET_CODE (x) == PLUS)
2870 /* REG+REG address can be any two index registers. */
2871 /* We disallow FRAME+REG addressing since we know that FRAME
2872 will be replaced with STACK, and SP relative addressing only
2873 permits SP+OFFSET. */
2874 if (GET_MODE_SIZE (mode) <= 4
2875 && XEXP (x, 0) != frame_pointer_rtx
2876 && XEXP (x, 1) != frame_pointer_rtx
2877 && XEXP (x, 0) != virtual_stack_vars_rtx
2878 && XEXP (x, 1) != virtual_stack_vars_rtx
2879 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2880 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2881 return 1;
2883 /* REG+const has 5-7 bit offset for non-SP registers. */
2884 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2885 || XEXP (x, 0) == arg_pointer_rtx)
2886 && GET_CODE (XEXP (x, 1)) == CONST_INT
2887 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2888 return 1;
2890 /* REG+const has 10 bit offset for SP, but only SImode and
2891 larger is supported. */
2892 /* ??? Should probably check for DI/DFmode overflow here
2893 just like GO_IF_LEGITIMATE_OFFSET does. */
2894 else if (GET_CODE (XEXP (x, 0)) == REG
2895 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2896 && GET_MODE_SIZE (mode) >= 4
2897 && GET_CODE (XEXP (x, 1)) == CONST_INT
2898 && INTVAL (XEXP (x, 1)) >= 0
2899 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2900 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2901 return 1;
2903 else if (GET_CODE (XEXP (x, 0)) == REG
2904 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2905 && GET_MODE_SIZE (mode) >= 4
2906 && GET_CODE (XEXP (x, 1)) == CONST_INT
2907 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2908 return 1;
2911 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2912 && GET_CODE (x) == SYMBOL_REF
2913 && CONSTANT_POOL_ADDRESS_P (x)
2914 && !(flag_pic
2915 && symbol_mentioned_p (get_pool_constant (x))))
2916 return 1;
2918 return 0;
2921 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2922 instruction of mode MODE. */
2924 thumb_legitimate_offset_p (mode, val)
2925 enum machine_mode mode;
2926 HOST_WIDE_INT val;
2928 switch (GET_MODE_SIZE (mode))
2930 case 1:
2931 return val >= 0 && val < 32;
2933 case 2:
2934 return val >= 0 && val < 64 && (val & 1) == 0;
2936 default:
2937 return (val >= 0
2938 && (val + GET_MODE_SIZE (mode)) <= 128
2939 && (val & 3) == 0);
2943 /* Try machine-dependent ways of modifying an illegitimate address
2944 to be legitimate. If we find one, return the new, valid address. */
2947 arm_legitimize_address (x, orig_x, mode)
2948 rtx x;
2949 rtx orig_x;
2950 enum machine_mode mode;
2952 if (GET_CODE (x) == PLUS)
2954 rtx xop0 = XEXP (x, 0);
2955 rtx xop1 = XEXP (x, 1);
2957 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2958 xop0 = force_reg (SImode, xop0);
2960 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2961 xop1 = force_reg (SImode, xop1);
2963 if (ARM_BASE_REGISTER_RTX_P (xop0)
2964 && GET_CODE (xop1) == CONST_INT)
2966 HOST_WIDE_INT n, low_n;
2967 rtx base_reg, val;
2968 n = INTVAL (xop1);
2970 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2972 low_n = n & 0x0f;
2973 n &= ~0x0f;
2974 if (low_n > 4)
2976 n += 16;
2977 low_n -= 16;
2980 else
2982 low_n = ((mode) == TImode ? 0
2983 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2984 n -= low_n;
2987 base_reg = gen_reg_rtx (SImode);
2988 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2989 GEN_INT (n)), NULL_RTX);
2990 emit_move_insn (base_reg, val);
2991 x = (low_n == 0 ? base_reg
2992 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
2994 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2995 x = gen_rtx_PLUS (SImode, xop0, xop1);
2998 /* XXX We don't allow MINUS any more -- see comment in
2999 arm_legitimate_address_p (). */
3000 else if (GET_CODE (x) == MINUS)
3002 rtx xop0 = XEXP (x, 0);
3003 rtx xop1 = XEXP (x, 1);
3005 if (CONSTANT_P (xop0))
3006 xop0 = force_reg (SImode, xop0);
3008 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3009 xop1 = force_reg (SImode, xop1);
3011 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3012 x = gen_rtx_MINUS (SImode, xop0, xop1);
3015 if (flag_pic)
3017 /* We need to find and carefully transform any SYMBOL and LABEL
3018 references; so go back to the original address expression. */
3019 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3021 if (new_x != orig_x)
3022 x = new_x;
3025 return x;
3030 #define REG_OR_SUBREG_REG(X) \
3031 (GET_CODE (X) == REG \
3032 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3034 #define REG_OR_SUBREG_RTX(X) \
3035 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3037 #ifndef COSTS_N_INSNS
3038 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3039 #endif
3041 static inline int
3042 arm_rtx_costs_1 (x, code, outer)
3043 rtx x;
3044 enum rtx_code code;
3045 enum rtx_code outer;
3047 enum machine_mode mode = GET_MODE (x);
3048 enum rtx_code subcode;
3049 int extra_cost;
3051 if (TARGET_THUMB)
3053 switch (code)
3055 case ASHIFT:
3056 case ASHIFTRT:
3057 case LSHIFTRT:
3058 case ROTATERT:
3059 case PLUS:
3060 case MINUS:
3061 case COMPARE:
3062 case NEG:
3063 case NOT:
3064 return COSTS_N_INSNS (1);
3066 case MULT:
3067 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3069 int cycles = 0;
3070 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3072 while (i)
3074 i >>= 2;
3075 cycles++;
3077 return COSTS_N_INSNS (2) + cycles;
3079 return COSTS_N_INSNS (1) + 16;
3081 case SET:
3082 return (COSTS_N_INSNS (1)
3083 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3084 + GET_CODE (SET_DEST (x)) == MEM));
3086 case CONST_INT:
3087 if (outer == SET)
3089 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3090 return 0;
3091 if (thumb_shiftable_const (INTVAL (x)))
3092 return COSTS_N_INSNS (2);
3093 return COSTS_N_INSNS (3);
3095 else if (outer == PLUS
3096 && INTVAL (x) < 256 && INTVAL (x) > -256)
3097 return 0;
3098 else if (outer == COMPARE
3099 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3100 return 0;
3101 else if (outer == ASHIFT || outer == ASHIFTRT
3102 || outer == LSHIFTRT)
3103 return 0;
3104 return COSTS_N_INSNS (2);
3106 case CONST:
3107 case CONST_DOUBLE:
3108 case LABEL_REF:
3109 case SYMBOL_REF:
3110 return COSTS_N_INSNS (3);
3112 case UDIV:
3113 case UMOD:
3114 case DIV:
3115 case MOD:
3116 return 100;
3118 case TRUNCATE:
3119 return 99;
3121 case AND:
3122 case XOR:
3123 case IOR:
3124 /* XXX guess. */
3125 return 8;
3127 case ADDRESSOF:
3128 case MEM:
3129 /* XXX another guess. */
3130 /* Memory costs quite a lot for the first word, but subsequent words
3131 load at the equivalent of a single insn each. */
3132 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3133 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3134 ? 4 : 0));
3136 case IF_THEN_ELSE:
3137 /* XXX a guess. */
3138 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3139 return 14;
3140 return 2;
3142 case ZERO_EXTEND:
3143 /* XXX still guessing. */
3144 switch (GET_MODE (XEXP (x, 0)))
3146 case QImode:
3147 return (1 + (mode == DImode ? 4 : 0)
3148 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3150 case HImode:
3151 return (4 + (mode == DImode ? 4 : 0)
3152 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3154 case SImode:
3155 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3157 default:
3158 return 99;
3161 default:
3162 return 99;
3163 #if 0
3164 case FFS:
3165 case FLOAT:
3166 case FIX:
3167 case UNSIGNED_FIX:
3168 /* XXX guess */
3169 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3170 rtx_name[code]);
3171 abort ();
3172 #endif
3176 switch (code)
3178 case MEM:
3179 /* Memory costs quite a lot for the first word, but subsequent words
3180 load at the equivalent of a single insn each. */
3181 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3182 + (GET_CODE (x) == SYMBOL_REF
3183 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3185 case DIV:
3186 case MOD:
3187 return 100;
3189 case ROTATE:
3190 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3191 return 4;
3192 /* Fall through */
3193 case ROTATERT:
3194 if (mode != SImode)
3195 return 8;
3196 /* Fall through */
3197 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3198 if (mode == DImode)
3199 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3200 + ((GET_CODE (XEXP (x, 0)) == REG
3201 || (GET_CODE (XEXP (x, 0)) == SUBREG
3202 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3203 ? 0 : 8));
3204 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3205 || (GET_CODE (XEXP (x, 0)) == SUBREG
3206 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3207 ? 0 : 4)
3208 + ((GET_CODE (XEXP (x, 1)) == REG
3209 || (GET_CODE (XEXP (x, 1)) == SUBREG
3210 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3211 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3212 ? 0 : 4));
3214 case MINUS:
3215 if (mode == DImode)
3216 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3217 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3218 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3219 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3220 ? 0 : 8));
3222 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3223 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3224 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3225 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3226 ? 0 : 8)
3227 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3228 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3229 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
3230 ? 0 : 8));
3232 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3233 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3234 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3235 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3236 || subcode == ASHIFTRT || subcode == LSHIFTRT
3237 || subcode == ROTATE || subcode == ROTATERT
3238 || (subcode == MULT
3239 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3240 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3241 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3242 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3243 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3244 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3245 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3246 return 1;
3247 /* Fall through */
3249 case PLUS:
3250 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3251 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3252 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3253 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3254 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3255 ? 0 : 8));
3257 /* Fall through */
3258 case AND: case XOR: case IOR:
3259 extra_cost = 0;
3261 /* Normally the frame registers will be spilt into reg+const during
3262 reload, so it is a bad idea to combine them with other instructions,
3263 since then they might not be moved outside of loops. As a compromise
3264 we allow integration with ops that have a constant as their second
3265 operand. */
3266 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3267 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3268 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3269 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3270 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3271 extra_cost = 4;
3273 if (mode == DImode)
3274 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3275 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3276 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3277 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3278 ? 0 : 8));
3280 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3281 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3282 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3283 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3284 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3285 ? 0 : 4));
3287 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3288 return (1 + extra_cost
3289 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3290 || subcode == LSHIFTRT || subcode == ASHIFTRT
3291 || subcode == ROTATE || subcode == ROTATERT
3292 || (subcode == MULT
3293 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3294 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3295 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3296 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3297 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3298 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3299 ? 0 : 4));
3301 return 8;
3303 case MULT:
3304 /* There is no point basing this on the tuning, since it is always the
3305 fast variant if it exists at all. */
3306 if (arm_fast_multiply && mode == DImode
3307 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3308 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3309 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3310 return 8;
3312 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3313 || mode == DImode)
3314 return 30;
3316 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3318 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3319 & (unsigned HOST_WIDE_INT) 0xffffffff);
3320 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3321 int j;
3323 /* Tune as appropriate. */
3324 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3326 for (j = 0; i && j < 32; j += booth_unit_size)
3328 i >>= booth_unit_size;
3329 add_cost += 2;
3332 return add_cost;
3335 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3336 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3337 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3339 case TRUNCATE:
3340 if (arm_fast_multiply && mode == SImode
3341 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3342 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3343 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3344 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3345 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3346 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3347 return 8;
3348 return 99;
3350 case NEG:
3351 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3352 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3353 /* Fall through */
3354 case NOT:
3355 if (mode == DImode)
3356 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3358 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3360 case IF_THEN_ELSE:
3361 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3362 return 14;
3363 return 2;
3365 case COMPARE:
3366 return 1;
3368 case ABS:
3369 return 4 + (mode == DImode ? 4 : 0);
3371 case SIGN_EXTEND:
3372 if (GET_MODE (XEXP (x, 0)) == QImode)
3373 return (4 + (mode == DImode ? 4 : 0)
3374 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3375 /* Fall through */
3376 case ZERO_EXTEND:
3377 switch (GET_MODE (XEXP (x, 0)))
3379 case QImode:
3380 return (1 + (mode == DImode ? 4 : 0)
3381 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3383 case HImode:
3384 return (4 + (mode == DImode ? 4 : 0)
3385 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3387 case SImode:
3388 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3390 default:
3391 break;
3393 abort ();
3395 case CONST_INT:
3396 if (const_ok_for_arm (INTVAL (x)))
3397 return outer == SET ? 2 : -1;
3398 else if (outer == AND
3399 && const_ok_for_arm (~INTVAL (x)))
3400 return -1;
3401 else if ((outer == COMPARE
3402 || outer == PLUS || outer == MINUS)
3403 && const_ok_for_arm (-INTVAL (x)))
3404 return -1;
3405 else
3406 return 5;
3408 case CONST:
3409 case LABEL_REF:
3410 case SYMBOL_REF:
3411 return 6;
3413 case CONST_DOUBLE:
3414 if (const_double_rtx_ok_for_fpu (x))
3415 return outer == SET ? 2 : -1;
3416 else if ((outer == COMPARE || outer == PLUS)
3417 && neg_const_double_rtx_ok_for_fpu (x))
3418 return -1;
3419 return 7;
3421 default:
3422 return 99;
3426 static bool
3427 arm_rtx_costs (x, code, outer_code, total)
3428 rtx x;
3429 int code, outer_code;
3430 int *total;
3432 *total = arm_rtx_costs_1 (x, code, outer_code);
3433 return true;
3436 /* All address computations that can be done are free, but rtx cost returns
3437 the same for practically all of them. So we weight the different types
3438 of address here in the order (most pref first):
3439 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3441 static int
3442 arm_address_cost (X)
3443 rtx X;
3445 #define ARM_ADDRESS_COST(X) \
3446 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3447 || GET_CODE (X) == SYMBOL_REF) \
3448 ? 0 \
3449 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3450 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3451 ? 10 \
3452 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3453 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3454 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3455 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3456 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3457 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3458 ? 1 : 0)) \
3459 : 4)))))
3461 #define THUMB_ADDRESS_COST(X) \
3462 ((GET_CODE (X) == REG \
3463 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3464 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3465 ? 1 : 2)
3467 return (TARGET_ARM ? ARM_ADDRESS_COST (X) : THUMB_ADDRESS_COST (X));
3470 static int
3471 arm_adjust_cost (insn, link, dep, cost)
3472 rtx insn;
3473 rtx link;
3474 rtx dep;
3475 int cost;
3477 rtx i_pat, d_pat;
3479 /* Some true dependencies can have a higher cost depending
3480 on precisely how certain input operands are used. */
3481 if (arm_is_xscale
3482 && REG_NOTE_KIND (link) == 0
3483 && recog_memoized (insn) < 0
3484 && recog_memoized (dep) < 0)
3486 int shift_opnum = get_attr_shift (insn);
3487 enum attr_type attr_type = get_attr_type (dep);
3489 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3490 operand for INSN. If we have a shifted input operand and the
3491 instruction we depend on is another ALU instruction, then we may
3492 have to account for an additional stall. */
3493 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3495 rtx shifted_operand;
3496 int opno;
3498 /* Get the shifted operand. */
3499 extract_insn (insn);
3500 shifted_operand = recog_data.operand[shift_opnum];
3502 /* Iterate over all the operands in DEP. If we write an operand
3503 that overlaps with SHIFTED_OPERAND, then we have increase the
3504 cost of this dependency. */
3505 extract_insn (dep);
3506 preprocess_constraints ();
3507 for (opno = 0; opno < recog_data.n_operands; opno++)
3509 /* We can ignore strict inputs. */
3510 if (recog_data.operand_type[opno] == OP_IN)
3511 continue;
3513 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3514 shifted_operand))
3515 return 2;
3520 /* XXX This is not strictly true for the FPA. */
3521 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3522 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3523 return 0;
3525 /* Call insns don't incur a stall, even if they follow a load. */
3526 if (REG_NOTE_KIND (link) == 0
3527 && GET_CODE (insn) == CALL_INSN)
3528 return 1;
3530 if ((i_pat = single_set (insn)) != NULL
3531 && GET_CODE (SET_SRC (i_pat)) == MEM
3532 && (d_pat = single_set (dep)) != NULL
3533 && GET_CODE (SET_DEST (d_pat)) == MEM)
3535 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3536 /* This is a load after a store, there is no conflict if the load reads
3537 from a cached area. Assume that loads from the stack, and from the
3538 constant pool are cached, and that others will miss. This is a
3539 hack. */
3541 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3542 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3543 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3544 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3545 return 1;
3548 return cost;
3551 /* This code has been fixed for cross compilation. */
3553 static int fpa_consts_inited = 0;
3555 static const char * const strings_fpa[8] =
3557 "0", "1", "2", "3",
3558 "4", "5", "0.5", "10"
3561 static REAL_VALUE_TYPE values_fpa[8];
3563 static void
3564 init_fpa_table ()
3566 int i;
3567 REAL_VALUE_TYPE r;
3569 for (i = 0; i < 8; i++)
3571 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3572 values_fpa[i] = r;
3575 fpa_consts_inited = 1;
3578 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3581 const_double_rtx_ok_for_fpu (x)
3582 rtx x;
3584 REAL_VALUE_TYPE r;
3585 int i;
3587 if (!fpa_consts_inited)
3588 init_fpa_table ();
3590 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3591 if (REAL_VALUE_MINUS_ZERO (r))
3592 return 0;
3594 for (i = 0; i < 8; i++)
3595 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3596 return 1;
3598 return 0;
3601 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3604 neg_const_double_rtx_ok_for_fpu (x)
3605 rtx x;
3607 REAL_VALUE_TYPE r;
3608 int i;
3610 if (!fpa_consts_inited)
3611 init_fpa_table ();
3613 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3614 r = REAL_VALUE_NEGATE (r);
3615 if (REAL_VALUE_MINUS_ZERO (r))
3616 return 0;
3618 for (i = 0; i < 8; i++)
3619 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3620 return 1;
3622 return 0;
3625 /* Predicates for `match_operand' and `match_operator'. */
3627 /* s_register_operand is the same as register_operand, but it doesn't accept
3628 (SUBREG (MEM)...).
3630 This function exists because at the time it was put in it led to better
3631 code. SUBREG(MEM) always needs a reload in the places where
3632 s_register_operand is used, and this seemed to lead to excessive
3633 reloading. */
3636 s_register_operand (op, mode)
3637 rtx op;
3638 enum machine_mode mode;
3640 if (GET_MODE (op) != mode && mode != VOIDmode)
3641 return 0;
3643 if (GET_CODE (op) == SUBREG)
3644 op = SUBREG_REG (op);
3646 /* We don't consider registers whose class is NO_REGS
3647 to be a register operand. */
3648 /* XXX might have to check for lo regs only for thumb ??? */
3649 return (GET_CODE (op) == REG
3650 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3651 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3654 /* A hard register operand (even before reload. */
3657 arm_hard_register_operand (op, mode)
3658 rtx op;
3659 enum machine_mode mode;
3661 if (GET_MODE (op) != mode && mode != VOIDmode)
3662 return 0;
3664 return (GET_CODE (op) == REG
3665 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3668 /* Only accept reg, subreg(reg), const_int. */
3671 reg_or_int_operand (op, mode)
3672 rtx op;
3673 enum machine_mode mode;
3675 if (GET_CODE (op) == CONST_INT)
3676 return 1;
3678 if (GET_MODE (op) != mode && mode != VOIDmode)
3679 return 0;
3681 if (GET_CODE (op) == SUBREG)
3682 op = SUBREG_REG (op);
3684 /* We don't consider registers whose class is NO_REGS
3685 to be a register operand. */
3686 return (GET_CODE (op) == REG
3687 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3688 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3691 /* Return 1 if OP is an item in memory, given that we are in reload. */
3694 arm_reload_memory_operand (op, mode)
3695 rtx op;
3696 enum machine_mode mode ATTRIBUTE_UNUSED;
3698 int regno = true_regnum (op);
3700 return (!CONSTANT_P (op)
3701 && (regno == -1
3702 || (GET_CODE (op) == REG
3703 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3706 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3707 memory access (architecture V4).
3708 MODE is QImode if called when computing constraints, or VOIDmode when
3709 emitting patterns. In this latter case we cannot use memory_operand()
3710 because it will fail on badly formed MEMs, which is precisely what we are
3711 trying to catch. */
3714 bad_signed_byte_operand (op, mode)
3715 rtx op;
3716 enum machine_mode mode ATTRIBUTE_UNUSED;
3718 #if 0
3719 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3720 return 0;
3721 #endif
3722 if (GET_CODE (op) != MEM)
3723 return 0;
3725 op = XEXP (op, 0);
3727 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3728 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3729 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3730 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3731 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3732 return 1;
3734 /* Big constants are also bad. */
3735 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3736 && (INTVAL (XEXP (op, 1)) > 0xff
3737 || -INTVAL (XEXP (op, 1)) > 0xff))
3738 return 1;
3740 /* Everything else is good, or can will automatically be made so. */
3741 return 0;
3744 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3747 arm_rhs_operand (op, mode)
3748 rtx op;
3749 enum machine_mode mode;
3751 return (s_register_operand (op, mode)
3752 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3755 /* Return TRUE for valid operands for the
3756 rhs of an ARM instruction, or a load. */
3759 arm_rhsm_operand (op, mode)
3760 rtx op;
3761 enum machine_mode mode;
3763 return (s_register_operand (op, mode)
3764 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3765 || memory_operand (op, mode));
3768 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3769 constant that is valid when negated. */
3772 arm_add_operand (op, mode)
3773 rtx op;
3774 enum machine_mode mode;
3776 if (TARGET_THUMB)
3777 return thumb_cmp_operand (op, mode);
3779 return (s_register_operand (op, mode)
3780 || (GET_CODE (op) == CONST_INT
3781 && (const_ok_for_arm (INTVAL (op))
3782 || const_ok_for_arm (-INTVAL (op)))));
3786 arm_not_operand (op, mode)
3787 rtx op;
3788 enum machine_mode mode;
3790 return (s_register_operand (op, mode)
3791 || (GET_CODE (op) == CONST_INT
3792 && (const_ok_for_arm (INTVAL (op))
3793 || const_ok_for_arm (~INTVAL (op)))));
3796 /* Return TRUE if the operand is a memory reference which contains an
3797 offsettable address. */
3800 offsettable_memory_operand (op, mode)
3801 rtx op;
3802 enum machine_mode mode;
3804 if (mode == VOIDmode)
3805 mode = GET_MODE (op);
3807 return (mode == GET_MODE (op)
3808 && GET_CODE (op) == MEM
3809 && offsettable_address_p (reload_completed | reload_in_progress,
3810 mode, XEXP (op, 0)));
3813 /* Return TRUE if the operand is a memory reference which is, or can be
3814 made word aligned by adjusting the offset. */
3817 alignable_memory_operand (op, mode)
3818 rtx op;
3819 enum machine_mode mode;
3821 rtx reg;
3823 if (mode == VOIDmode)
3824 mode = GET_MODE (op);
3826 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3827 return 0;
3829 op = XEXP (op, 0);
3831 return ((GET_CODE (reg = op) == REG
3832 || (GET_CODE (op) == SUBREG
3833 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3834 || (GET_CODE (op) == PLUS
3835 && GET_CODE (XEXP (op, 1)) == CONST_INT
3836 && (GET_CODE (reg = XEXP (op, 0)) == REG
3837 || (GET_CODE (XEXP (op, 0)) == SUBREG
3838 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3839 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3842 /* Similar to s_register_operand, but does not allow hard integer
3843 registers. */
3846 f_register_operand (op, mode)
3847 rtx op;
3848 enum machine_mode mode;
3850 if (GET_MODE (op) != mode && mode != VOIDmode)
3851 return 0;
3853 if (GET_CODE (op) == SUBREG)
3854 op = SUBREG_REG (op);
3856 /* We don't consider registers whose class is NO_REGS
3857 to be a register operand. */
3858 return (GET_CODE (op) == REG
3859 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3860 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3863 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3866 fpu_rhs_operand (op, mode)
3867 rtx op;
3868 enum machine_mode mode;
3870 if (s_register_operand (op, mode))
3871 return TRUE;
3873 if (GET_MODE (op) != mode && mode != VOIDmode)
3874 return FALSE;
3876 if (GET_CODE (op) == CONST_DOUBLE)
3877 return const_double_rtx_ok_for_fpu (op);
3879 return FALSE;
3883 fpu_add_operand (op, mode)
3884 rtx op;
3885 enum machine_mode mode;
3887 if (s_register_operand (op, mode))
3888 return TRUE;
3890 if (GET_MODE (op) != mode && mode != VOIDmode)
3891 return FALSE;
3893 if (GET_CODE (op) == CONST_DOUBLE)
3894 return (const_double_rtx_ok_for_fpu (op)
3895 || neg_const_double_rtx_ok_for_fpu (op));
3897 return FALSE;
3900 /* Return nonzero if OP is a valid Cirrus memory address pattern. */
3903 cirrus_memory_offset (op)
3904 rtx op;
3906 /* Reject eliminable registers. */
3907 if (! (reload_in_progress || reload_completed)
3908 && ( reg_mentioned_p (frame_pointer_rtx, op)
3909 || reg_mentioned_p (arg_pointer_rtx, op)
3910 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3911 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3912 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3913 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3914 return 0;
3916 if (GET_CODE (op) == MEM)
3918 rtx ind;
3920 ind = XEXP (op, 0);
3922 /* Match: (mem (reg)). */
3923 if (GET_CODE (ind) == REG)
3924 return 1;
3926 /* Match:
3927 (mem (plus (reg)
3928 (const))). */
3929 if (GET_CODE (ind) == PLUS
3930 && GET_CODE (XEXP (ind, 0)) == REG
3931 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
3932 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
3933 return 1;
3936 return 0;
3939 /* Return nonzero if OP is a Cirrus or general register. */
3942 cirrus_register_operand (op, mode)
3943 rtx op;
3944 enum machine_mode mode;
3946 if (GET_MODE (op) != mode && mode != VOIDmode)
3947 return FALSE;
3949 if (GET_CODE (op) == SUBREG)
3950 op = SUBREG_REG (op);
3952 return (GET_CODE (op) == REG
3953 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
3954 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
3957 /* Return nonzero if OP is a cirrus FP register. */
3960 cirrus_fp_register (op, mode)
3961 rtx op;
3962 enum machine_mode mode;
3964 if (GET_MODE (op) != mode && mode != VOIDmode)
3965 return FALSE;
3967 if (GET_CODE (op) == SUBREG)
3968 op = SUBREG_REG (op);
3970 return (GET_CODE (op) == REG
3971 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3972 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
3975 /* Return nonzero if OP is a 6bit constant (0..63). */
3978 cirrus_shift_const (op, mode)
3979 rtx op;
3980 enum machine_mode mode ATTRIBUTE_UNUSED;
3982 return (GET_CODE (op) == CONST_INT
3983 && INTVAL (op) >= 0
3984 && INTVAL (op) < 64);
3987 /* Return nonzero if INSN is an LDR R0,ADDR instruction. */
3989 static int
3990 is_load_address (insn)
3991 rtx insn;
3993 rtx body, lhs, rhs;;
3995 if (!insn)
3996 return 0;
3998 if (GET_CODE (insn) != INSN)
3999 return 0;
4001 body = PATTERN (insn);
4003 if (GET_CODE (body) != SET)
4004 return 0;
4006 lhs = XEXP (body, 0);
4007 rhs = XEXP (body, 1);
4009 return (GET_CODE (lhs) == REG
4010 && REGNO_REG_CLASS (REGNO (lhs)) == GENERAL_REGS
4011 && (GET_CODE (rhs) == MEM
4012 || GET_CODE (rhs) == SYMBOL_REF));
4015 /* Return nonzero if INSN is a Cirrus instruction. */
4017 static int
4018 is_cirrus_insn (insn)
4019 rtx insn;
4021 enum attr_cirrus attr;
4023 /* get_attr aborts on USE and CLOBBER. */
4024 if (!insn
4025 || GET_CODE (insn) != INSN
4026 || GET_CODE (PATTERN (insn)) == USE
4027 || GET_CODE (PATTERN (insn)) == CLOBBER)
4028 return 0;
4030 attr = get_attr_cirrus (insn);
4032 return attr != CIRRUS_NO;
4035 /* Cirrus reorg for invalid instruction combinations. */
4037 static void
4038 cirrus_reorg (first)
4039 rtx first;
4041 enum attr_cirrus attr;
4042 rtx body = PATTERN (first);
4043 rtx t;
4044 int nops;
4046 /* Any branch must be followed by 2 non Cirrus instructions. */
4047 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
4049 nops = 0;
4050 t = next_nonnote_insn (first);
4052 if (is_cirrus_insn (t))
4053 ++ nops;
4055 if (is_cirrus_insn (next_nonnote_insn (t)))
4056 ++ nops;
4058 while (nops --)
4059 emit_insn_after (gen_nop (), first);
4061 return;
4064 /* (float (blah)) is in parallel with a clobber. */
4065 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4066 body = XVECEXP (body, 0, 0);
4068 if (GET_CODE (body) == SET)
4070 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4072 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4073 be followed by a non Cirrus insn. */
4074 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4076 if (is_cirrus_insn (next_nonnote_insn (first)))
4077 emit_insn_after (gen_nop (), first);
4079 return;
4081 else if (is_load_address (first))
4083 unsigned int arm_regno;
4085 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4086 ldr/cfmv64hr combination where the Rd field is the same
4087 in both instructions must be split with a non Cirrus
4088 insn. Example:
4090 ldr r0, blah
4092 cfmvsr mvf0, r0. */
4094 /* Get Arm register number for ldr insn. */
4095 if (GET_CODE (lhs) == REG)
4096 arm_regno = REGNO (lhs);
4097 else if (GET_CODE (rhs) == REG)
4098 arm_regno = REGNO (rhs);
4099 else
4100 abort ();
4102 /* Next insn. */
4103 first = next_nonnote_insn (first);
4105 if (!is_cirrus_insn (first))
4106 return;
4108 body = PATTERN (first);
4110 /* (float (blah)) is in parallel with a clobber. */
4111 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4112 body = XVECEXP (body, 0, 0);
4114 if (GET_CODE (body) == FLOAT)
4115 body = XEXP (body, 0);
4117 if (get_attr_cirrus (first) == CIRRUS_MOVE
4118 && GET_CODE (XEXP (body, 1)) == REG
4119 && arm_regno == REGNO (XEXP (body, 1)))
4120 emit_insn_after (gen_nop (), first);
4122 return;
4126 /* get_attr aborts on USE and CLOBBER. */
4127 if (!first
4128 || GET_CODE (first) != INSN
4129 || GET_CODE (PATTERN (first)) == USE
4130 || GET_CODE (PATTERN (first)) == CLOBBER)
4131 return;
4133 attr = get_attr_cirrus (first);
4135 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4136 must be followed by a non-coprocessor instruction. */
4137 if (attr == CIRRUS_COMPARE)
4139 nops = 0;
4141 t = next_nonnote_insn (first);
4143 if (is_cirrus_insn (t))
4144 ++ nops;
4146 if (is_cirrus_insn (next_nonnote_insn (t)))
4147 ++ nops;
4149 while (nops --)
4150 emit_insn_after (gen_nop (), first);
4152 return;
4156 /* Return nonzero if OP is a constant power of two. */
4159 power_of_two_operand (op, mode)
4160 rtx op;
4161 enum machine_mode mode ATTRIBUTE_UNUSED;
4163 if (GET_CODE (op) == CONST_INT)
4165 HOST_WIDE_INT value = INTVAL (op);
4167 return value != 0 && (value & (value - 1)) == 0;
4170 return FALSE;
4173 /* Return TRUE for a valid operand of a DImode operation.
4174 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4175 Note that this disallows MEM(REG+REG), but allows
4176 MEM(PRE/POST_INC/DEC(REG)). */
4179 di_operand (op, mode)
4180 rtx op;
4181 enum machine_mode mode;
4183 if (s_register_operand (op, mode))
4184 return TRUE;
4186 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4187 return FALSE;
4189 if (GET_CODE (op) == SUBREG)
4190 op = SUBREG_REG (op);
4192 switch (GET_CODE (op))
4194 case CONST_DOUBLE:
4195 case CONST_INT:
4196 return TRUE;
4198 case MEM:
4199 return memory_address_p (DImode, XEXP (op, 0));
4201 default:
4202 return FALSE;
4206 /* Like di_operand, but don't accept constants. */
4209 nonimmediate_di_operand (op, mode)
4210 rtx op;
4211 enum machine_mode mode;
4213 if (s_register_operand (op, mode))
4214 return TRUE;
4216 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4217 return FALSE;
4219 if (GET_CODE (op) == SUBREG)
4220 op = SUBREG_REG (op);
4222 if (GET_CODE (op) == MEM)
4223 return memory_address_p (DImode, XEXP (op, 0));
4225 return FALSE;
4228 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
4229 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4230 Note that this disallows MEM(REG+REG), but allows
4231 MEM(PRE/POST_INC/DEC(REG)). */
4234 soft_df_operand (op, mode)
4235 rtx op;
4236 enum machine_mode mode;
4238 if (s_register_operand (op, mode))
4239 return TRUE;
4241 if (mode != VOIDmode && GET_MODE (op) != mode)
4242 return FALSE;
4244 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4245 return FALSE;
4247 if (GET_CODE (op) == SUBREG)
4248 op = SUBREG_REG (op);
4250 switch (GET_CODE (op))
4252 case CONST_DOUBLE:
4253 return TRUE;
4255 case MEM:
4256 return memory_address_p (DFmode, XEXP (op, 0));
4258 default:
4259 return FALSE;
4263 /* Like soft_df_operand, but don't accept constants. */
4266 nonimmediate_soft_df_operand (op, mode)
4267 rtx op;
4268 enum machine_mode mode;
4270 if (s_register_operand (op, mode))
4271 return TRUE;
4273 if (mode != VOIDmode && GET_MODE (op) != mode)
4274 return FALSE;
4276 if (GET_CODE (op) == SUBREG)
4277 op = SUBREG_REG (op);
4279 if (GET_CODE (op) == MEM)
4280 return memory_address_p (DFmode, XEXP (op, 0));
4281 return FALSE;
4284 /* Return TRUE for valid index operands. */
4287 index_operand (op, mode)
4288 rtx op;
4289 enum machine_mode mode;
4291 return (s_register_operand (op, mode)
4292 || (immediate_operand (op, mode)
4293 && (GET_CODE (op) != CONST_INT
4294 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4297 /* Return TRUE for valid shifts by a constant. This also accepts any
4298 power of two on the (somewhat overly relaxed) assumption that the
4299 shift operator in this case was a mult. */
4302 const_shift_operand (op, mode)
4303 rtx op;
4304 enum machine_mode mode;
4306 return (power_of_two_operand (op, mode)
4307 || (immediate_operand (op, mode)
4308 && (GET_CODE (op) != CONST_INT
4309 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4312 /* Return TRUE for arithmetic operators which can be combined with a multiply
4313 (shift). */
4316 shiftable_operator (x, mode)
4317 rtx x;
4318 enum machine_mode mode;
4320 enum rtx_code code;
4322 if (GET_MODE (x) != mode)
4323 return FALSE;
4325 code = GET_CODE (x);
4327 return (code == PLUS || code == MINUS
4328 || code == IOR || code == XOR || code == AND);
4331 /* Return TRUE for binary logical operators. */
4334 logical_binary_operator (x, mode)
4335 rtx x;
4336 enum machine_mode mode;
4338 enum rtx_code code;
4340 if (GET_MODE (x) != mode)
4341 return FALSE;
4343 code = GET_CODE (x);
4345 return (code == IOR || code == XOR || code == AND);
4348 /* Return TRUE for shift operators. */
4351 shift_operator (x, mode)
4352 rtx x;
4353 enum machine_mode mode;
4355 enum rtx_code code;
4357 if (GET_MODE (x) != mode)
4358 return FALSE;
4360 code = GET_CODE (x);
4362 if (code == MULT)
4363 return power_of_two_operand (XEXP (x, 1), mode);
4365 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4366 || code == ROTATERT);
4369 /* Return TRUE if x is EQ or NE. */
4372 equality_operator (x, mode)
4373 rtx x;
4374 enum machine_mode mode ATTRIBUTE_UNUSED;
4376 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4379 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4382 arm_comparison_operator (x, mode)
4383 rtx x;
4384 enum machine_mode mode;
4386 return (comparison_operator (x, mode)
4387 && GET_CODE (x) != LTGT
4388 && GET_CODE (x) != UNEQ);
4391 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4394 minmax_operator (x, mode)
4395 rtx x;
4396 enum machine_mode mode;
4398 enum rtx_code code = GET_CODE (x);
4400 if (GET_MODE (x) != mode)
4401 return FALSE;
4403 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4406 /* Return TRUE if this is the condition code register, if we aren't given
4407 a mode, accept any class CCmode register. */
4410 cc_register (x, mode)
4411 rtx x;
4412 enum machine_mode mode;
4414 if (mode == VOIDmode)
4416 mode = GET_MODE (x);
4418 if (GET_MODE_CLASS (mode) != MODE_CC)
4419 return FALSE;
4422 if ( GET_MODE (x) == mode
4423 && GET_CODE (x) == REG
4424 && REGNO (x) == CC_REGNUM)
4425 return TRUE;
4427 return FALSE;
4430 /* Return TRUE if this is the condition code register, if we aren't given
4431 a mode, accept any class CCmode register which indicates a dominance
4432 expression. */
4435 dominant_cc_register (x, mode)
4436 rtx x;
4437 enum machine_mode mode;
4439 if (mode == VOIDmode)
4441 mode = GET_MODE (x);
4443 if (GET_MODE_CLASS (mode) != MODE_CC)
4444 return FALSE;
4447 if ( mode != CC_DNEmode && mode != CC_DEQmode
4448 && mode != CC_DLEmode && mode != CC_DLTmode
4449 && mode != CC_DGEmode && mode != CC_DGTmode
4450 && mode != CC_DLEUmode && mode != CC_DLTUmode
4451 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4452 return FALSE;
4454 return cc_register (x, mode);
4457 /* Return TRUE if X references a SYMBOL_REF. */
4460 symbol_mentioned_p (x)
4461 rtx x;
4463 const char * fmt;
4464 int i;
4466 if (GET_CODE (x) == SYMBOL_REF)
4467 return 1;
4469 fmt = GET_RTX_FORMAT (GET_CODE (x));
4471 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4473 if (fmt[i] == 'E')
4475 int j;
4477 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4478 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4479 return 1;
4481 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4482 return 1;
4485 return 0;
4488 /* Return TRUE if X references a LABEL_REF. */
4491 label_mentioned_p (x)
4492 rtx x;
4494 const char * fmt;
4495 int i;
4497 if (GET_CODE (x) == LABEL_REF)
4498 return 1;
4500 fmt = GET_RTX_FORMAT (GET_CODE (x));
4501 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4503 if (fmt[i] == 'E')
4505 int j;
4507 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4508 if (label_mentioned_p (XVECEXP (x, i, j)))
4509 return 1;
4511 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4512 return 1;
4515 return 0;
4518 enum rtx_code
4519 minmax_code (x)
4520 rtx x;
4522 enum rtx_code code = GET_CODE (x);
4524 if (code == SMAX)
4525 return GE;
4526 else if (code == SMIN)
4527 return LE;
4528 else if (code == UMIN)
4529 return LEU;
4530 else if (code == UMAX)
4531 return GEU;
4533 abort ();
4536 /* Return 1 if memory locations are adjacent. */
4539 adjacent_mem_locations (a, b)
4540 rtx a, b;
4542 if ((GET_CODE (XEXP (a, 0)) == REG
4543 || (GET_CODE (XEXP (a, 0)) == PLUS
4544 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4545 && (GET_CODE (XEXP (b, 0)) == REG
4546 || (GET_CODE (XEXP (b, 0)) == PLUS
4547 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4549 int val0 = 0, val1 = 0;
4550 int reg0, reg1;
4552 if (GET_CODE (XEXP (a, 0)) == PLUS)
4554 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4555 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4557 else
4558 reg0 = REGNO (XEXP (a, 0));
4560 if (GET_CODE (XEXP (b, 0)) == PLUS)
4562 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4563 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4565 else
4566 reg1 = REGNO (XEXP (b, 0));
4568 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4570 return 0;
4573 /* Return 1 if OP is a load multiple operation. It is known to be
4574 parallel and the first section will be tested. */
4577 load_multiple_operation (op, mode)
4578 rtx op;
4579 enum machine_mode mode ATTRIBUTE_UNUSED;
4581 HOST_WIDE_INT count = XVECLEN (op, 0);
4582 int dest_regno;
4583 rtx src_addr;
4584 HOST_WIDE_INT i = 1, base = 0;
4585 rtx elt;
4587 if (count <= 1
4588 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4589 return 0;
4591 /* Check to see if this might be a write-back. */
4592 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4594 i++;
4595 base = 1;
4597 /* Now check it more carefully. */
4598 if (GET_CODE (SET_DEST (elt)) != REG
4599 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4600 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4601 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4602 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4603 return 0;
4606 /* Perform a quick check so we don't blow up below. */
4607 if (count <= i
4608 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4609 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4610 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4611 return 0;
4613 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4614 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4616 for (; i < count; i++)
4618 elt = XVECEXP (op, 0, i);
4620 if (GET_CODE (elt) != SET
4621 || GET_CODE (SET_DEST (elt)) != REG
4622 || GET_MODE (SET_DEST (elt)) != SImode
4623 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4624 || GET_CODE (SET_SRC (elt)) != MEM
4625 || GET_MODE (SET_SRC (elt)) != SImode
4626 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4627 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4628 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4629 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4630 return 0;
4633 return 1;
4636 /* Return 1 if OP is a store multiple operation. It is known to be
4637 parallel and the first section will be tested. */
4640 store_multiple_operation (op, mode)
4641 rtx op;
4642 enum machine_mode mode ATTRIBUTE_UNUSED;
4644 HOST_WIDE_INT count = XVECLEN (op, 0);
4645 int src_regno;
4646 rtx dest_addr;
4647 HOST_WIDE_INT i = 1, base = 0;
4648 rtx elt;
4650 if (count <= 1
4651 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4652 return 0;
4654 /* Check to see if this might be a write-back. */
4655 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4657 i++;
4658 base = 1;
4660 /* Now check it more carefully. */
4661 if (GET_CODE (SET_DEST (elt)) != REG
4662 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4663 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4664 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4665 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4666 return 0;
4669 /* Perform a quick check so we don't blow up below. */
4670 if (count <= i
4671 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4672 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4673 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4674 return 0;
4676 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4677 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4679 for (; i < count; i++)
4681 elt = XVECEXP (op, 0, i);
4683 if (GET_CODE (elt) != SET
4684 || GET_CODE (SET_SRC (elt)) != REG
4685 || GET_MODE (SET_SRC (elt)) != SImode
4686 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4687 || GET_CODE (SET_DEST (elt)) != MEM
4688 || GET_MODE (SET_DEST (elt)) != SImode
4689 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4690 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4691 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4692 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4693 return 0;
4696 return 1;
4700 load_multiple_sequence (operands, nops, regs, base, load_offset)
4701 rtx * operands;
4702 int nops;
4703 int * regs;
4704 int * base;
4705 HOST_WIDE_INT * load_offset;
4707 int unsorted_regs[4];
4708 HOST_WIDE_INT unsorted_offsets[4];
4709 int order[4];
4710 int base_reg = -1;
4711 int i;
4713 /* Can only handle 2, 3, or 4 insns at present,
4714 though could be easily extended if required. */
4715 if (nops < 2 || nops > 4)
4716 abort ();
4718 /* Loop over the operands and check that the memory references are
4719 suitable (ie immediate offsets from the same base register). At
4720 the same time, extract the target register, and the memory
4721 offsets. */
4722 for (i = 0; i < nops; i++)
4724 rtx reg;
4725 rtx offset;
4727 /* Convert a subreg of a mem into the mem itself. */
4728 if (GET_CODE (operands[nops + i]) == SUBREG)
4729 operands[nops + i] = alter_subreg (operands + (nops + i));
4731 if (GET_CODE (operands[nops + i]) != MEM)
4732 abort ();
4734 /* Don't reorder volatile memory references; it doesn't seem worth
4735 looking for the case where the order is ok anyway. */
4736 if (MEM_VOLATILE_P (operands[nops + i]))
4737 return 0;
4739 offset = const0_rtx;
4741 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4742 || (GET_CODE (reg) == SUBREG
4743 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4744 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4745 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4746 == REG)
4747 || (GET_CODE (reg) == SUBREG
4748 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4749 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4750 == CONST_INT)))
4752 if (i == 0)
4754 base_reg = REGNO (reg);
4755 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4756 ? REGNO (operands[i])
4757 : REGNO (SUBREG_REG (operands[i])));
4758 order[0] = 0;
4760 else
4762 if (base_reg != (int) REGNO (reg))
4763 /* Not addressed from the same base register. */
4764 return 0;
4766 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4767 ? REGNO (operands[i])
4768 : REGNO (SUBREG_REG (operands[i])));
4769 if (unsorted_regs[i] < unsorted_regs[order[0]])
4770 order[0] = i;
4773 /* If it isn't an integer register, or if it overwrites the
4774 base register but isn't the last insn in the list, then
4775 we can't do this. */
4776 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4777 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4778 return 0;
4780 unsorted_offsets[i] = INTVAL (offset);
4782 else
4783 /* Not a suitable memory address. */
4784 return 0;
4787 /* All the useful information has now been extracted from the
4788 operands into unsorted_regs and unsorted_offsets; additionally,
4789 order[0] has been set to the lowest numbered register in the
4790 list. Sort the registers into order, and check that the memory
4791 offsets are ascending and adjacent. */
4793 for (i = 1; i < nops; i++)
4795 int j;
4797 order[i] = order[i - 1];
4798 for (j = 0; j < nops; j++)
4799 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4800 && (order[i] == order[i - 1]
4801 || unsorted_regs[j] < unsorted_regs[order[i]]))
4802 order[i] = j;
4804 /* Have we found a suitable register? if not, one must be used more
4805 than once. */
4806 if (order[i] == order[i - 1])
4807 return 0;
4809 /* Is the memory address adjacent and ascending? */
4810 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4811 return 0;
4814 if (base)
4816 *base = base_reg;
4818 for (i = 0; i < nops; i++)
4819 regs[i] = unsorted_regs[order[i]];
4821 *load_offset = unsorted_offsets[order[0]];
4824 if (unsorted_offsets[order[0]] == 0)
4825 return 1; /* ldmia */
4827 if (unsorted_offsets[order[0]] == 4)
4828 return 2; /* ldmib */
4830 if (unsorted_offsets[order[nops - 1]] == 0)
4831 return 3; /* ldmda */
4833 if (unsorted_offsets[order[nops - 1]] == -4)
4834 return 4; /* ldmdb */
4836 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4837 if the offset isn't small enough. The reason 2 ldrs are faster
4838 is because these ARMs are able to do more than one cache access
4839 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4840 whilst the ARM8 has a double bandwidth cache. This means that
4841 these cores can do both an instruction fetch and a data fetch in
4842 a single cycle, so the trick of calculating the address into a
4843 scratch register (one of the result regs) and then doing a load
4844 multiple actually becomes slower (and no smaller in code size).
4845 That is the transformation
4847 ldr rd1, [rbase + offset]
4848 ldr rd2, [rbase + offset + 4]
4852 add rd1, rbase, offset
4853 ldmia rd1, {rd1, rd2}
4855 produces worse code -- '3 cycles + any stalls on rd2' instead of
4856 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4857 access per cycle, the first sequence could never complete in less
4858 than 6 cycles, whereas the ldm sequence would only take 5 and
4859 would make better use of sequential accesses if not hitting the
4860 cache.
4862 We cheat here and test 'arm_ld_sched' which we currently know to
4863 only be true for the ARM8, ARM9 and StrongARM. If this ever
4864 changes, then the test below needs to be reworked. */
4865 if (nops == 2 && arm_ld_sched)
4866 return 0;
4868 /* Can't do it without setting up the offset, only do this if it takes
4869 no more than one insn. */
4870 return (const_ok_for_arm (unsorted_offsets[order[0]])
4871 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4874 const char *
4875 emit_ldm_seq (operands, nops)
4876 rtx * operands;
4877 int nops;
4879 int regs[4];
4880 int base_reg;
4881 HOST_WIDE_INT offset;
4882 char buf[100];
4883 int i;
4885 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4887 case 1:
4888 strcpy (buf, "ldm%?ia\t");
4889 break;
4891 case 2:
4892 strcpy (buf, "ldm%?ib\t");
4893 break;
4895 case 3:
4896 strcpy (buf, "ldm%?da\t");
4897 break;
4899 case 4:
4900 strcpy (buf, "ldm%?db\t");
4901 break;
4903 case 5:
4904 if (offset >= 0)
4905 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4906 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4907 (long) offset);
4908 else
4909 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4910 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4911 (long) -offset);
4912 output_asm_insn (buf, operands);
4913 base_reg = regs[0];
4914 strcpy (buf, "ldm%?ia\t");
4915 break;
4917 default:
4918 abort ();
4921 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4922 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4924 for (i = 1; i < nops; i++)
4925 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4926 reg_names[regs[i]]);
4928 strcat (buf, "}\t%@ phole ldm");
4930 output_asm_insn (buf, operands);
4931 return "";
4935 store_multiple_sequence (operands, nops, regs, base, load_offset)
4936 rtx * operands;
4937 int nops;
4938 int * regs;
4939 int * base;
4940 HOST_WIDE_INT * load_offset;
4942 int unsorted_regs[4];
4943 HOST_WIDE_INT unsorted_offsets[4];
4944 int order[4];
4945 int base_reg = -1;
4946 int i;
4948 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4949 extended if required. */
4950 if (nops < 2 || nops > 4)
4951 abort ();
4953 /* Loop over the operands and check that the memory references are
4954 suitable (ie immediate offsets from the same base register). At
4955 the same time, extract the target register, and the memory
4956 offsets. */
4957 for (i = 0; i < nops; i++)
4959 rtx reg;
4960 rtx offset;
4962 /* Convert a subreg of a mem into the mem itself. */
4963 if (GET_CODE (operands[nops + i]) == SUBREG)
4964 operands[nops + i] = alter_subreg (operands + (nops + i));
4966 if (GET_CODE (operands[nops + i]) != MEM)
4967 abort ();
4969 /* Don't reorder volatile memory references; it doesn't seem worth
4970 looking for the case where the order is ok anyway. */
4971 if (MEM_VOLATILE_P (operands[nops + i]))
4972 return 0;
4974 offset = const0_rtx;
4976 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4977 || (GET_CODE (reg) == SUBREG
4978 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4979 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4980 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4981 == REG)
4982 || (GET_CODE (reg) == SUBREG
4983 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4984 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4985 == CONST_INT)))
4987 if (i == 0)
4989 base_reg = REGNO (reg);
4990 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4991 ? REGNO (operands[i])
4992 : REGNO (SUBREG_REG (operands[i])));
4993 order[0] = 0;
4995 else
4997 if (base_reg != (int) REGNO (reg))
4998 /* Not addressed from the same base register. */
4999 return 0;
5001 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
5002 ? REGNO (operands[i])
5003 : REGNO (SUBREG_REG (operands[i])));
5004 if (unsorted_regs[i] < unsorted_regs[order[0]])
5005 order[0] = i;
5008 /* If it isn't an integer register, then we can't do this. */
5009 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
5010 return 0;
5012 unsorted_offsets[i] = INTVAL (offset);
5014 else
5015 /* Not a suitable memory address. */
5016 return 0;
5019 /* All the useful information has now been extracted from the
5020 operands into unsorted_regs and unsorted_offsets; additionally,
5021 order[0] has been set to the lowest numbered register in the
5022 list. Sort the registers into order, and check that the memory
5023 offsets are ascending and adjacent. */
5025 for (i = 1; i < nops; i++)
5027 int j;
5029 order[i] = order[i - 1];
5030 for (j = 0; j < nops; j++)
5031 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
5032 && (order[i] == order[i - 1]
5033 || unsorted_regs[j] < unsorted_regs[order[i]]))
5034 order[i] = j;
5036 /* Have we found a suitable register? if not, one must be used more
5037 than once. */
5038 if (order[i] == order[i - 1])
5039 return 0;
5041 /* Is the memory address adjacent and ascending? */
5042 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
5043 return 0;
5046 if (base)
5048 *base = base_reg;
5050 for (i = 0; i < nops; i++)
5051 regs[i] = unsorted_regs[order[i]];
5053 *load_offset = unsorted_offsets[order[0]];
5056 if (unsorted_offsets[order[0]] == 0)
5057 return 1; /* stmia */
5059 if (unsorted_offsets[order[0]] == 4)
5060 return 2; /* stmib */
5062 if (unsorted_offsets[order[nops - 1]] == 0)
5063 return 3; /* stmda */
5065 if (unsorted_offsets[order[nops - 1]] == -4)
5066 return 4; /* stmdb */
5068 return 0;
5071 const char *
5072 emit_stm_seq (operands, nops)
5073 rtx * operands;
5074 int nops;
5076 int regs[4];
5077 int base_reg;
5078 HOST_WIDE_INT offset;
5079 char buf[100];
5080 int i;
5082 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
5084 case 1:
5085 strcpy (buf, "stm%?ia\t");
5086 break;
5088 case 2:
5089 strcpy (buf, "stm%?ib\t");
5090 break;
5092 case 3:
5093 strcpy (buf, "stm%?da\t");
5094 break;
5096 case 4:
5097 strcpy (buf, "stm%?db\t");
5098 break;
5100 default:
5101 abort ();
5104 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
5105 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
5107 for (i = 1; i < nops; i++)
5108 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5109 reg_names[regs[i]]);
5111 strcat (buf, "}\t%@ phole stm");
5113 output_asm_insn (buf, operands);
5114 return "";
5118 multi_register_push (op, mode)
5119 rtx op;
5120 enum machine_mode mode ATTRIBUTE_UNUSED;
5122 if (GET_CODE (op) != PARALLEL
5123 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5124 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
5125 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
5126 return 0;
5128 return 1;
5131 /* Routines for use in generating RTL. */
5134 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
5135 in_struct_p, scalar_p)
5136 int base_regno;
5137 int count;
5138 rtx from;
5139 int up;
5140 int write_back;
5141 int unchanging_p;
5142 int in_struct_p;
5143 int scalar_p;
5145 int i = 0, j;
5146 rtx result;
5147 int sign = up ? 1 : -1;
5148 rtx mem;
5150 /* XScale has load-store double instructions, but they have stricter
5151 alignment requirements than load-store multiple, so we can not
5152 use them.
5154 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5155 the pipeline until completion.
5157 NREGS CYCLES
5163 An ldr instruction takes 1-3 cycles, but does not block the
5164 pipeline.
5166 NREGS CYCLES
5167 1 1-3
5168 2 2-6
5169 3 3-9
5170 4 4-12
5172 Best case ldr will always win. However, the more ldr instructions
5173 we issue, the less likely we are to be able to schedule them well.
5174 Using ldr instructions also increases code size.
5176 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5177 for counts of 3 or 4 regs. */
5178 if (arm_is_xscale && count <= 2 && ! optimize_size)
5180 rtx seq;
5182 start_sequence ();
5184 for (i = 0; i < count; i++)
5186 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
5187 RTX_UNCHANGING_P (mem) = unchanging_p;
5188 MEM_IN_STRUCT_P (mem) = in_struct_p;
5189 MEM_SCALAR_P (mem) = scalar_p;
5190 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5193 if (write_back)
5194 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5196 seq = get_insns ();
5197 end_sequence ();
5199 return seq;
5202 result = gen_rtx_PARALLEL (VOIDmode,
5203 rtvec_alloc (count + (write_back ? 1 : 0)));
5204 if (write_back)
5206 XVECEXP (result, 0, 0)
5207 = gen_rtx_SET (GET_MODE (from), from,
5208 plus_constant (from, count * 4 * sign));
5209 i = 1;
5210 count++;
5213 for (j = 0; i < count; i++, j++)
5215 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
5216 RTX_UNCHANGING_P (mem) = unchanging_p;
5217 MEM_IN_STRUCT_P (mem) = in_struct_p;
5218 MEM_SCALAR_P (mem) = scalar_p;
5219 XVECEXP (result, 0, i)
5220 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
5223 return result;
5227 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
5228 in_struct_p, scalar_p)
5229 int base_regno;
5230 int count;
5231 rtx to;
5232 int up;
5233 int write_back;
5234 int unchanging_p;
5235 int in_struct_p;
5236 int scalar_p;
5238 int i = 0, j;
5239 rtx result;
5240 int sign = up ? 1 : -1;
5241 rtx mem;
5243 /* See arm_gen_load_multiple for discussion of
5244 the pros/cons of ldm/stm usage for XScale. */
5245 if (arm_is_xscale && count <= 2 && ! optimize_size)
5247 rtx seq;
5249 start_sequence ();
5251 for (i = 0; i < count; i++)
5253 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5254 RTX_UNCHANGING_P (mem) = unchanging_p;
5255 MEM_IN_STRUCT_P (mem) = in_struct_p;
5256 MEM_SCALAR_P (mem) = scalar_p;
5257 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5260 if (write_back)
5261 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5263 seq = get_insns ();
5264 end_sequence ();
5266 return seq;
5269 result = gen_rtx_PARALLEL (VOIDmode,
5270 rtvec_alloc (count + (write_back ? 1 : 0)));
5271 if (write_back)
5273 XVECEXP (result, 0, 0)
5274 = gen_rtx_SET (GET_MODE (to), to,
5275 plus_constant (to, count * 4 * sign));
5276 i = 1;
5277 count++;
5280 for (j = 0; i < count; i++, j++)
5282 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
5283 RTX_UNCHANGING_P (mem) = unchanging_p;
5284 MEM_IN_STRUCT_P (mem) = in_struct_p;
5285 MEM_SCALAR_P (mem) = scalar_p;
5287 XVECEXP (result, 0, i)
5288 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5291 return result;
5295 arm_gen_movstrqi (operands)
5296 rtx * operands;
5298 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5299 int i;
5300 rtx src, dst;
5301 rtx st_src, st_dst, fin_src, fin_dst;
5302 rtx part_bytes_reg = NULL;
5303 rtx mem;
5304 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5305 int dst_scalar_p, src_scalar_p;
5307 if (GET_CODE (operands[2]) != CONST_INT
5308 || GET_CODE (operands[3]) != CONST_INT
5309 || INTVAL (operands[2]) > 64
5310 || INTVAL (operands[3]) & 3)
5311 return 0;
5313 st_dst = XEXP (operands[0], 0);
5314 st_src = XEXP (operands[1], 0);
5316 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5317 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5318 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5319 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5320 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5321 src_scalar_p = MEM_SCALAR_P (operands[1]);
5323 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5324 fin_src = src = copy_to_mode_reg (SImode, st_src);
5326 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5327 out_words_to_go = INTVAL (operands[2]) / 4;
5328 last_bytes = INTVAL (operands[2]) & 3;
5330 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5331 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5333 for (i = 0; in_words_to_go >= 2; i+=4)
5335 if (in_words_to_go > 4)
5336 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5337 src_unchanging_p,
5338 src_in_struct_p,
5339 src_scalar_p));
5340 else
5341 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5342 FALSE, src_unchanging_p,
5343 src_in_struct_p, src_scalar_p));
5345 if (out_words_to_go)
5347 if (out_words_to_go > 4)
5348 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5349 dst_unchanging_p,
5350 dst_in_struct_p,
5351 dst_scalar_p));
5352 else if (out_words_to_go != 1)
5353 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5354 dst, TRUE,
5355 (last_bytes == 0
5356 ? FALSE : TRUE),
5357 dst_unchanging_p,
5358 dst_in_struct_p,
5359 dst_scalar_p));
5360 else
5362 mem = gen_rtx_MEM (SImode, dst);
5363 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5364 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5365 MEM_SCALAR_P (mem) = dst_scalar_p;
5366 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5367 if (last_bytes != 0)
5368 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5372 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5373 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5376 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5377 if (out_words_to_go)
5379 rtx sreg;
5381 mem = gen_rtx_MEM (SImode, src);
5382 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5383 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5384 MEM_SCALAR_P (mem) = src_scalar_p;
5385 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5386 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5388 mem = gen_rtx_MEM (SImode, dst);
5389 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5390 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5391 MEM_SCALAR_P (mem) = dst_scalar_p;
5392 emit_move_insn (mem, sreg);
5393 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5394 in_words_to_go--;
5396 if (in_words_to_go) /* Sanity check */
5397 abort ();
5400 if (in_words_to_go)
5402 if (in_words_to_go < 0)
5403 abort ();
5405 mem = gen_rtx_MEM (SImode, src);
5406 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5407 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5408 MEM_SCALAR_P (mem) = src_scalar_p;
5409 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5412 if (last_bytes && part_bytes_reg == NULL)
5413 abort ();
5415 if (BYTES_BIG_ENDIAN && last_bytes)
5417 rtx tmp = gen_reg_rtx (SImode);
5419 /* The bytes we want are in the top end of the word. */
5420 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5421 GEN_INT (8 * (4 - last_bytes))));
5422 part_bytes_reg = tmp;
5424 while (last_bytes)
5426 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5427 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5428 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5429 MEM_SCALAR_P (mem) = dst_scalar_p;
5430 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5432 if (--last_bytes)
5434 tmp = gen_reg_rtx (SImode);
5435 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5436 part_bytes_reg = tmp;
5441 else
5443 if (last_bytes > 1)
5445 mem = gen_rtx_MEM (HImode, dst);
5446 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5447 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5448 MEM_SCALAR_P (mem) = dst_scalar_p;
5449 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5450 last_bytes -= 2;
5451 if (last_bytes)
5453 rtx tmp = gen_reg_rtx (SImode);
5455 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5456 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5457 part_bytes_reg = tmp;
5461 if (last_bytes)
5463 mem = gen_rtx_MEM (QImode, dst);
5464 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5465 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5466 MEM_SCALAR_P (mem) = dst_scalar_p;
5467 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5471 return 1;
5474 /* Generate a memory reference for a half word, such that it will be loaded
5475 into the top 16 bits of the word. We can assume that the address is
5476 known to be alignable and of the form reg, or plus (reg, const). */
5479 arm_gen_rotated_half_load (memref)
5480 rtx memref;
5482 HOST_WIDE_INT offset = 0;
5483 rtx base = XEXP (memref, 0);
5485 if (GET_CODE (base) == PLUS)
5487 offset = INTVAL (XEXP (base, 1));
5488 base = XEXP (base, 0);
5491 /* If we aren't allowed to generate unaligned addresses, then fail. */
5492 if (TARGET_MMU_TRAPS
5493 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5494 return NULL;
5496 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5498 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5499 return base;
5501 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5504 /* Select a dominance comparison mode if possible. We support three forms.
5505 COND_OR == 0 => (X && Y)
5506 COND_OR == 1 => ((! X( || Y)
5507 COND_OR == 2 => (X || Y)
5508 If we are unable to support a dominance comparison we return CC mode.
5509 This will then fail to match for the RTL expressions that generate this
5510 call. */
5512 static enum machine_mode
5513 select_dominance_cc_mode (x, y, cond_or)
5514 rtx x;
5515 rtx y;
5516 HOST_WIDE_INT cond_or;
5518 enum rtx_code cond1, cond2;
5519 int swapped = 0;
5521 /* Currently we will probably get the wrong result if the individual
5522 comparisons are not simple. This also ensures that it is safe to
5523 reverse a comparison if necessary. */
5524 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5525 != CCmode)
5526 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5527 != CCmode))
5528 return CCmode;
5530 /* The if_then_else variant of this tests the second condition if the
5531 first passes, but is true if the first fails. Reverse the first
5532 condition to get a true "inclusive-or" expression. */
5533 if (cond_or == 1)
5534 cond1 = reverse_condition (cond1);
5536 /* If the comparisons are not equal, and one doesn't dominate the other,
5537 then we can't do this. */
5538 if (cond1 != cond2
5539 && !comparison_dominates_p (cond1, cond2)
5540 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5541 return CCmode;
5543 if (swapped)
5545 enum rtx_code temp = cond1;
5546 cond1 = cond2;
5547 cond2 = temp;
5550 switch (cond1)
5552 case EQ:
5553 if (cond2 == EQ || !cond_or)
5554 return CC_DEQmode;
5556 switch (cond2)
5558 case LE: return CC_DLEmode;
5559 case LEU: return CC_DLEUmode;
5560 case GE: return CC_DGEmode;
5561 case GEU: return CC_DGEUmode;
5562 default: break;
5565 break;
5567 case LT:
5568 if (cond2 == LT || !cond_or)
5569 return CC_DLTmode;
5570 if (cond2 == LE)
5571 return CC_DLEmode;
5572 if (cond2 == NE)
5573 return CC_DNEmode;
5574 break;
5576 case GT:
5577 if (cond2 == GT || !cond_or)
5578 return CC_DGTmode;
5579 if (cond2 == GE)
5580 return CC_DGEmode;
5581 if (cond2 == NE)
5582 return CC_DNEmode;
5583 break;
5585 case LTU:
5586 if (cond2 == LTU || !cond_or)
5587 return CC_DLTUmode;
5588 if (cond2 == LEU)
5589 return CC_DLEUmode;
5590 if (cond2 == NE)
5591 return CC_DNEmode;
5592 break;
5594 case GTU:
5595 if (cond2 == GTU || !cond_or)
5596 return CC_DGTUmode;
5597 if (cond2 == GEU)
5598 return CC_DGEUmode;
5599 if (cond2 == NE)
5600 return CC_DNEmode;
5601 break;
5603 /* The remaining cases only occur when both comparisons are the
5604 same. */
5605 case NE:
5606 return CC_DNEmode;
5608 case LE:
5609 return CC_DLEmode;
5611 case GE:
5612 return CC_DGEmode;
5614 case LEU:
5615 return CC_DLEUmode;
5617 case GEU:
5618 return CC_DGEUmode;
5620 default:
5621 break;
5624 abort ();
5627 enum machine_mode
5628 arm_select_cc_mode (op, x, y)
5629 enum rtx_code op;
5630 rtx x;
5631 rtx y;
5633 /* All floating point compares return CCFP if it is an equality
5634 comparison, and CCFPE otherwise. */
5635 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5637 switch (op)
5639 case EQ:
5640 case NE:
5641 case UNORDERED:
5642 case ORDERED:
5643 case UNLT:
5644 case UNLE:
5645 case UNGT:
5646 case UNGE:
5647 case UNEQ:
5648 case LTGT:
5649 return CCFPmode;
5651 case LT:
5652 case LE:
5653 case GT:
5654 case GE:
5655 if (TARGET_CIRRUS)
5656 return CCFPmode;
5657 return CCFPEmode;
5659 default:
5660 abort ();
5664 /* A compare with a shifted operand. Because of canonicalization, the
5665 comparison will have to be swapped when we emit the assembler. */
5666 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5667 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5668 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5669 || GET_CODE (x) == ROTATERT))
5670 return CC_SWPmode;
5672 /* This is a special case that is used by combine to allow a
5673 comparison of a shifted byte load to be split into a zero-extend
5674 followed by a comparison of the shifted integer (only valid for
5675 equalities and unsigned inequalities). */
5676 if (GET_MODE (x) == SImode
5677 && GET_CODE (x) == ASHIFT
5678 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5679 && GET_CODE (XEXP (x, 0)) == SUBREG
5680 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5681 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5682 && (op == EQ || op == NE
5683 || op == GEU || op == GTU || op == LTU || op == LEU)
5684 && GET_CODE (y) == CONST_INT)
5685 return CC_Zmode;
5687 /* A construct for a conditional compare, if the false arm contains
5688 0, then both conditions must be true, otherwise either condition
5689 must be true. Not all conditions are possible, so CCmode is
5690 returned if it can't be done. */
5691 if (GET_CODE (x) == IF_THEN_ELSE
5692 && (XEXP (x, 2) == const0_rtx
5693 || XEXP (x, 2) == const1_rtx)
5694 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5695 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5696 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5697 INTVAL (XEXP (x, 2)));
5699 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5700 if (GET_CODE (x) == AND
5701 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5702 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5703 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5705 if (GET_CODE (x) == IOR
5706 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5707 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5708 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5710 /* An operation that sets the condition codes as a side-effect, the
5711 V flag is not set correctly, so we can only use comparisons where
5712 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5713 instead. */
5714 if (GET_MODE (x) == SImode
5715 && y == const0_rtx
5716 && (op == EQ || op == NE || op == LT || op == GE)
5717 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5718 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5719 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5720 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5721 || GET_CODE (x) == LSHIFTRT
5722 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5723 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5724 return CC_NOOVmode;
5726 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5727 return CC_Zmode;
5729 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5730 && GET_CODE (x) == PLUS
5731 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5732 return CC_Cmode;
5734 return CCmode;
5737 /* X and Y are two things to compare using CODE. Emit the compare insn and
5738 return the rtx for register 0 in the proper mode. FP means this is a
5739 floating point compare: I don't think that it is needed on the arm. */
5742 arm_gen_compare_reg (code, x, y)
5743 enum rtx_code code;
5744 rtx x, y;
5746 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5747 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5749 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5750 gen_rtx_COMPARE (mode, x, y)));
5752 return cc_reg;
5755 /* Generate a sequence of insns that will generate the correct return
5756 address mask depending on the physical architecture that the program
5757 is running on. */
5760 arm_gen_return_addr_mask ()
5762 rtx reg = gen_reg_rtx (Pmode);
5764 emit_insn (gen_return_addr_mask (reg));
5765 return reg;
5768 void
5769 arm_reload_in_hi (operands)
5770 rtx * operands;
5772 rtx ref = operands[1];
5773 rtx base, scratch;
5774 HOST_WIDE_INT offset = 0;
5776 if (GET_CODE (ref) == SUBREG)
5778 offset = SUBREG_BYTE (ref);
5779 ref = SUBREG_REG (ref);
5782 if (GET_CODE (ref) == REG)
5784 /* We have a pseudo which has been spilt onto the stack; there
5785 are two cases here: the first where there is a simple
5786 stack-slot replacement and a second where the stack-slot is
5787 out of range, or is used as a subreg. */
5788 if (reg_equiv_mem[REGNO (ref)])
5790 ref = reg_equiv_mem[REGNO (ref)];
5791 base = find_replacement (&XEXP (ref, 0));
5793 else
5794 /* The slot is out of range, or was dressed up in a SUBREG. */
5795 base = reg_equiv_address[REGNO (ref)];
5797 else
5798 base = find_replacement (&XEXP (ref, 0));
5800 /* Handle the case where the address is too complex to be offset by 1. */
5801 if (GET_CODE (base) == MINUS
5802 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5804 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5806 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5807 base = base_plus;
5809 else if (GET_CODE (base) == PLUS)
5811 /* The addend must be CONST_INT, or we would have dealt with it above. */
5812 HOST_WIDE_INT hi, lo;
5814 offset += INTVAL (XEXP (base, 1));
5815 base = XEXP (base, 0);
5817 /* Rework the address into a legal sequence of insns. */
5818 /* Valid range for lo is -4095 -> 4095 */
5819 lo = (offset >= 0
5820 ? (offset & 0xfff)
5821 : -((-offset) & 0xfff));
5823 /* Corner case, if lo is the max offset then we would be out of range
5824 once we have added the additional 1 below, so bump the msb into the
5825 pre-loading insn(s). */
5826 if (lo == 4095)
5827 lo &= 0x7ff;
5829 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5830 ^ (HOST_WIDE_INT) 0x80000000)
5831 - (HOST_WIDE_INT) 0x80000000);
5833 if (hi + lo != offset)
5834 abort ();
5836 if (hi != 0)
5838 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5840 /* Get the base address; addsi3 knows how to handle constants
5841 that require more than one insn. */
5842 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5843 base = base_plus;
5844 offset = lo;
5848 /* Operands[2] may overlap operands[0] (though it won't overlap
5849 operands[1]), that's why we asked for a DImode reg -- so we can
5850 use the bit that does not overlap. */
5851 if (REGNO (operands[2]) == REGNO (operands[0]))
5852 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5853 else
5854 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5856 emit_insn (gen_zero_extendqisi2 (scratch,
5857 gen_rtx_MEM (QImode,
5858 plus_constant (base,
5859 offset))));
5860 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5861 gen_rtx_MEM (QImode,
5862 plus_constant (base,
5863 offset + 1))));
5864 if (!BYTES_BIG_ENDIAN)
5865 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5866 gen_rtx_IOR (SImode,
5867 gen_rtx_ASHIFT
5868 (SImode,
5869 gen_rtx_SUBREG (SImode, operands[0], 0),
5870 GEN_INT (8)),
5871 scratch)));
5872 else
5873 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5874 gen_rtx_IOR (SImode,
5875 gen_rtx_ASHIFT (SImode, scratch,
5876 GEN_INT (8)),
5877 gen_rtx_SUBREG (SImode, operands[0],
5878 0))));
5881 /* Handle storing a half-word to memory during reload by synthesising as two
5882 byte stores. Take care not to clobber the input values until after we
5883 have moved them somewhere safe. This code assumes that if the DImode
5884 scratch in operands[2] overlaps either the input value or output address
5885 in some way, then that value must die in this insn (we absolutely need
5886 two scratch registers for some corner cases). */
5888 void
5889 arm_reload_out_hi (operands)
5890 rtx * operands;
5892 rtx ref = operands[0];
5893 rtx outval = operands[1];
5894 rtx base, scratch;
5895 HOST_WIDE_INT offset = 0;
5897 if (GET_CODE (ref) == SUBREG)
5899 offset = SUBREG_BYTE (ref);
5900 ref = SUBREG_REG (ref);
5903 if (GET_CODE (ref) == REG)
5905 /* We have a pseudo which has been spilt onto the stack; there
5906 are two cases here: the first where there is a simple
5907 stack-slot replacement and a second where the stack-slot is
5908 out of range, or is used as a subreg. */
5909 if (reg_equiv_mem[REGNO (ref)])
5911 ref = reg_equiv_mem[REGNO (ref)];
5912 base = find_replacement (&XEXP (ref, 0));
5914 else
5915 /* The slot is out of range, or was dressed up in a SUBREG. */
5916 base = reg_equiv_address[REGNO (ref)];
5918 else
5919 base = find_replacement (&XEXP (ref, 0));
5921 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5923 /* Handle the case where the address is too complex to be offset by 1. */
5924 if (GET_CODE (base) == MINUS
5925 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5927 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5929 /* Be careful not to destroy OUTVAL. */
5930 if (reg_overlap_mentioned_p (base_plus, outval))
5932 /* Updating base_plus might destroy outval, see if we can
5933 swap the scratch and base_plus. */
5934 if (!reg_overlap_mentioned_p (scratch, outval))
5936 rtx tmp = scratch;
5937 scratch = base_plus;
5938 base_plus = tmp;
5940 else
5942 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5944 /* Be conservative and copy OUTVAL into the scratch now,
5945 this should only be necessary if outval is a subreg
5946 of something larger than a word. */
5947 /* XXX Might this clobber base? I can't see how it can,
5948 since scratch is known to overlap with OUTVAL, and
5949 must be wider than a word. */
5950 emit_insn (gen_movhi (scratch_hi, outval));
5951 outval = scratch_hi;
5955 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5956 base = base_plus;
5958 else if (GET_CODE (base) == PLUS)
5960 /* The addend must be CONST_INT, or we would have dealt with it above. */
5961 HOST_WIDE_INT hi, lo;
5963 offset += INTVAL (XEXP (base, 1));
5964 base = XEXP (base, 0);
5966 /* Rework the address into a legal sequence of insns. */
5967 /* Valid range for lo is -4095 -> 4095 */
5968 lo = (offset >= 0
5969 ? (offset & 0xfff)
5970 : -((-offset) & 0xfff));
5972 /* Corner case, if lo is the max offset then we would be out of range
5973 once we have added the additional 1 below, so bump the msb into the
5974 pre-loading insn(s). */
5975 if (lo == 4095)
5976 lo &= 0x7ff;
5978 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5979 ^ (HOST_WIDE_INT) 0x80000000)
5980 - (HOST_WIDE_INT) 0x80000000);
5982 if (hi + lo != offset)
5983 abort ();
5985 if (hi != 0)
5987 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5989 /* Be careful not to destroy OUTVAL. */
5990 if (reg_overlap_mentioned_p (base_plus, outval))
5992 /* Updating base_plus might destroy outval, see if we
5993 can swap the scratch and base_plus. */
5994 if (!reg_overlap_mentioned_p (scratch, outval))
5996 rtx tmp = scratch;
5997 scratch = base_plus;
5998 base_plus = tmp;
6000 else
6002 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
6004 /* Be conservative and copy outval into scratch now,
6005 this should only be necessary if outval is a
6006 subreg of something larger than a word. */
6007 /* XXX Might this clobber base? I can't see how it
6008 can, since scratch is known to overlap with
6009 outval. */
6010 emit_insn (gen_movhi (scratch_hi, outval));
6011 outval = scratch_hi;
6015 /* Get the base address; addsi3 knows how to handle constants
6016 that require more than one insn. */
6017 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
6018 base = base_plus;
6019 offset = lo;
6023 if (BYTES_BIG_ENDIAN)
6025 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6026 plus_constant (base, offset + 1)),
6027 gen_lowpart (QImode, outval)));
6028 emit_insn (gen_lshrsi3 (scratch,
6029 gen_rtx_SUBREG (SImode, outval, 0),
6030 GEN_INT (8)));
6031 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
6032 gen_lowpart (QImode, scratch)));
6034 else
6036 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
6037 gen_lowpart (QImode, outval)));
6038 emit_insn (gen_lshrsi3 (scratch,
6039 gen_rtx_SUBREG (SImode, outval, 0),
6040 GEN_INT (8)));
6041 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
6042 plus_constant (base, offset + 1)),
6043 gen_lowpart (QImode, scratch)));
6047 /* Print a symbolic form of X to the debug file, F. */
6049 static void
6050 arm_print_value (f, x)
6051 FILE * f;
6052 rtx x;
6054 switch (GET_CODE (x))
6056 case CONST_INT:
6057 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
6058 return;
6060 case CONST_DOUBLE:
6061 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
6062 return;
6064 case CONST_STRING:
6065 fprintf (f, "\"%s\"", XSTR (x, 0));
6066 return;
6068 case SYMBOL_REF:
6069 fprintf (f, "`%s'", XSTR (x, 0));
6070 return;
6072 case LABEL_REF:
6073 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
6074 return;
6076 case CONST:
6077 arm_print_value (f, XEXP (x, 0));
6078 return;
6080 case PLUS:
6081 arm_print_value (f, XEXP (x, 0));
6082 fprintf (f, "+");
6083 arm_print_value (f, XEXP (x, 1));
6084 return;
6086 case PC:
6087 fprintf (f, "pc");
6088 return;
6090 default:
6091 fprintf (f, "????");
6092 return;
6096 /* Routines for manipulation of the constant pool. */
6098 /* Arm instructions cannot load a large constant directly into a
6099 register; they have to come from a pc relative load. The constant
6100 must therefore be placed in the addressable range of the pc
6101 relative load. Depending on the precise pc relative load
6102 instruction the range is somewhere between 256 bytes and 4k. This
6103 means that we often have to dump a constant inside a function, and
6104 generate code to branch around it.
6106 It is important to minimize this, since the branches will slow
6107 things down and make the code larger.
6109 Normally we can hide the table after an existing unconditional
6110 branch so that there is no interruption of the flow, but in the
6111 worst case the code looks like this:
6113 ldr rn, L1
6115 b L2
6116 align
6117 L1: .long value
6121 ldr rn, L3
6123 b L4
6124 align
6125 L3: .long value
6129 We fix this by performing a scan after scheduling, which notices
6130 which instructions need to have their operands fetched from the
6131 constant table and builds the table.
6133 The algorithm starts by building a table of all the constants that
6134 need fixing up and all the natural barriers in the function (places
6135 where a constant table can be dropped without breaking the flow).
6136 For each fixup we note how far the pc-relative replacement will be
6137 able to reach and the offset of the instruction into the function.
6139 Having built the table we then group the fixes together to form
6140 tables that are as large as possible (subject to addressing
6141 constraints) and emit each table of constants after the last
6142 barrier that is within range of all the instructions in the group.
6143 If a group does not contain a barrier, then we forcibly create one
6144 by inserting a jump instruction into the flow. Once the table has
6145 been inserted, the insns are then modified to reference the
6146 relevant entry in the pool.
6148 Possible enhancements to the algorithm (not implemented) are:
6150 1) For some processors and object formats, there may be benefit in
6151 aligning the pools to the start of cache lines; this alignment
6152 would need to be taken into account when calculating addressability
6153 of a pool. */
6155 /* These typedefs are located at the start of this file, so that
6156 they can be used in the prototypes there. This comment is to
6157 remind readers of that fact so that the following structures
6158 can be understood more easily.
6160 typedef struct minipool_node Mnode;
6161 typedef struct minipool_fixup Mfix; */
6163 struct minipool_node
6165 /* Doubly linked chain of entries. */
6166 Mnode * next;
6167 Mnode * prev;
6168 /* The maximum offset into the code that this entry can be placed. While
6169 pushing fixes for forward references, all entries are sorted in order
6170 of increasing max_address. */
6171 HOST_WIDE_INT max_address;
6172 /* Similarly for an entry inserted for a backwards ref. */
6173 HOST_WIDE_INT min_address;
6174 /* The number of fixes referencing this entry. This can become zero
6175 if we "unpush" an entry. In this case we ignore the entry when we
6176 come to emit the code. */
6177 int refcount;
6178 /* The offset from the start of the minipool. */
6179 HOST_WIDE_INT offset;
6180 /* The value in table. */
6181 rtx value;
6182 /* The mode of value. */
6183 enum machine_mode mode;
6184 int fix_size;
6187 struct minipool_fixup
6189 Mfix * next;
6190 rtx insn;
6191 HOST_WIDE_INT address;
6192 rtx * loc;
6193 enum machine_mode mode;
6194 int fix_size;
6195 rtx value;
6196 Mnode * minipool;
6197 HOST_WIDE_INT forwards;
6198 HOST_WIDE_INT backwards;
6201 /* Fixes less than a word need padding out to a word boundary. */
6202 #define MINIPOOL_FIX_SIZE(mode) \
6203 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
6205 static Mnode * minipool_vector_head;
6206 static Mnode * minipool_vector_tail;
6207 static rtx minipool_vector_label;
6209 /* The linked list of all minipool fixes required for this function. */
6210 Mfix * minipool_fix_head;
6211 Mfix * minipool_fix_tail;
6212 /* The fix entry for the current minipool, once it has been placed. */
6213 Mfix * minipool_barrier;
6215 /* Determines if INSN is the start of a jump table. Returns the end
6216 of the TABLE or NULL_RTX. */
6218 static rtx
6219 is_jump_table (insn)
6220 rtx insn;
6222 rtx table;
6224 if (GET_CODE (insn) == JUMP_INSN
6225 && JUMP_LABEL (insn) != NULL
6226 && ((table = next_real_insn (JUMP_LABEL (insn)))
6227 == next_real_insn (insn))
6228 && table != NULL
6229 && GET_CODE (table) == JUMP_INSN
6230 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6231 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6232 return table;
6234 return NULL_RTX;
6237 #ifndef JUMP_TABLES_IN_TEXT_SECTION
6238 #define JUMP_TABLES_IN_TEXT_SECTION 0
6239 #endif
6241 static HOST_WIDE_INT
6242 get_jump_table_size (insn)
6243 rtx insn;
6245 /* ADDR_VECs only take room if read-only data does into the text
6246 section. */
6247 if (JUMP_TABLES_IN_TEXT_SECTION
6248 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
6249 || 1
6250 #endif
6253 rtx body = PATTERN (insn);
6254 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
6256 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6259 return 0;
6262 /* Move a minipool fix MP from its current location to before MAX_MP.
6263 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
6264 contrains may need updating. */
6266 static Mnode *
6267 move_minipool_fix_forward_ref (mp, max_mp, max_address)
6268 Mnode * mp;
6269 Mnode * max_mp;
6270 HOST_WIDE_INT max_address;
6272 /* This should never be true and the code below assumes these are
6273 different. */
6274 if (mp == max_mp)
6275 abort ();
6277 if (max_mp == NULL)
6279 if (max_address < mp->max_address)
6280 mp->max_address = max_address;
6282 else
6284 if (max_address > max_mp->max_address - mp->fix_size)
6285 mp->max_address = max_mp->max_address - mp->fix_size;
6286 else
6287 mp->max_address = max_address;
6289 /* Unlink MP from its current position. Since max_mp is non-null,
6290 mp->prev must be non-null. */
6291 mp->prev->next = mp->next;
6292 if (mp->next != NULL)
6293 mp->next->prev = mp->prev;
6294 else
6295 minipool_vector_tail = mp->prev;
6297 /* Re-insert it before MAX_MP. */
6298 mp->next = max_mp;
6299 mp->prev = max_mp->prev;
6300 max_mp->prev = mp;
6302 if (mp->prev != NULL)
6303 mp->prev->next = mp;
6304 else
6305 minipool_vector_head = mp;
6308 /* Save the new entry. */
6309 max_mp = mp;
6311 /* Scan over the preceding entries and adjust their addresses as
6312 required. */
6313 while (mp->prev != NULL
6314 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6316 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6317 mp = mp->prev;
6320 return max_mp;
6323 /* Add a constant to the minipool for a forward reference. Returns the
6324 node added or NULL if the constant will not fit in this pool. */
6326 static Mnode *
6327 add_minipool_forward_ref (fix)
6328 Mfix * fix;
6330 /* If set, max_mp is the first pool_entry that has a lower
6331 constraint than the one we are trying to add. */
6332 Mnode * max_mp = NULL;
6333 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6334 Mnode * mp;
6336 /* If this fix's address is greater than the address of the first
6337 entry, then we can't put the fix in this pool. We subtract the
6338 size of the current fix to ensure that if the table is fully
6339 packed we still have enough room to insert this value by suffling
6340 the other fixes forwards. */
6341 if (minipool_vector_head &&
6342 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6343 return NULL;
6345 /* Scan the pool to see if a constant with the same value has
6346 already been added. While we are doing this, also note the
6347 location where we must insert the constant if it doesn't already
6348 exist. */
6349 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6351 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6352 && fix->mode == mp->mode
6353 && (GET_CODE (fix->value) != CODE_LABEL
6354 || (CODE_LABEL_NUMBER (fix->value)
6355 == CODE_LABEL_NUMBER (mp->value)))
6356 && rtx_equal_p (fix->value, mp->value))
6358 /* More than one fix references this entry. */
6359 mp->refcount++;
6360 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6363 /* Note the insertion point if necessary. */
6364 if (max_mp == NULL
6365 && mp->max_address > max_address)
6366 max_mp = mp;
6369 /* The value is not currently in the minipool, so we need to create
6370 a new entry for it. If MAX_MP is NULL, the entry will be put on
6371 the end of the list since the placement is less constrained than
6372 any existing entry. Otherwise, we insert the new fix before
6373 MAX_MP and, if necessary, adjust the constraints on the other
6374 entries. */
6375 mp = xmalloc (sizeof (* mp));
6376 mp->fix_size = fix->fix_size;
6377 mp->mode = fix->mode;
6378 mp->value = fix->value;
6379 mp->refcount = 1;
6380 /* Not yet required for a backwards ref. */
6381 mp->min_address = -65536;
6383 if (max_mp == NULL)
6385 mp->max_address = max_address;
6386 mp->next = NULL;
6387 mp->prev = minipool_vector_tail;
6389 if (mp->prev == NULL)
6391 minipool_vector_head = mp;
6392 minipool_vector_label = gen_label_rtx ();
6394 else
6395 mp->prev->next = mp;
6397 minipool_vector_tail = mp;
6399 else
6401 if (max_address > max_mp->max_address - mp->fix_size)
6402 mp->max_address = max_mp->max_address - mp->fix_size;
6403 else
6404 mp->max_address = max_address;
6406 mp->next = max_mp;
6407 mp->prev = max_mp->prev;
6408 max_mp->prev = mp;
6409 if (mp->prev != NULL)
6410 mp->prev->next = mp;
6411 else
6412 minipool_vector_head = mp;
6415 /* Save the new entry. */
6416 max_mp = mp;
6418 /* Scan over the preceding entries and adjust their addresses as
6419 required. */
6420 while (mp->prev != NULL
6421 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6423 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6424 mp = mp->prev;
6427 return max_mp;
6430 static Mnode *
6431 move_minipool_fix_backward_ref (mp, min_mp, min_address)
6432 Mnode * mp;
6433 Mnode * min_mp;
6434 HOST_WIDE_INT min_address;
6436 HOST_WIDE_INT offset;
6438 /* This should never be true, and the code below assumes these are
6439 different. */
6440 if (mp == min_mp)
6441 abort ();
6443 if (min_mp == NULL)
6445 if (min_address > mp->min_address)
6446 mp->min_address = min_address;
6448 else
6450 /* We will adjust this below if it is too loose. */
6451 mp->min_address = min_address;
6453 /* Unlink MP from its current position. Since min_mp is non-null,
6454 mp->next must be non-null. */
6455 mp->next->prev = mp->prev;
6456 if (mp->prev != NULL)
6457 mp->prev->next = mp->next;
6458 else
6459 minipool_vector_head = mp->next;
6461 /* Reinsert it after MIN_MP. */
6462 mp->prev = min_mp;
6463 mp->next = min_mp->next;
6464 min_mp->next = mp;
6465 if (mp->next != NULL)
6466 mp->next->prev = mp;
6467 else
6468 minipool_vector_tail = mp;
6471 min_mp = mp;
6473 offset = 0;
6474 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6476 mp->offset = offset;
6477 if (mp->refcount > 0)
6478 offset += mp->fix_size;
6480 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6481 mp->next->min_address = mp->min_address + mp->fix_size;
6484 return min_mp;
6487 /* Add a constant to the minipool for a backward reference. Returns the
6488 node added or NULL if the constant will not fit in this pool.
6490 Note that the code for insertion for a backwards reference can be
6491 somewhat confusing because the calculated offsets for each fix do
6492 not take into account the size of the pool (which is still under
6493 construction. */
6495 static Mnode *
6496 add_minipool_backward_ref (fix)
6497 Mfix * fix;
6499 /* If set, min_mp is the last pool_entry that has a lower constraint
6500 than the one we are trying to add. */
6501 Mnode * min_mp = NULL;
6502 /* This can be negative, since it is only a constraint. */
6503 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6504 Mnode * mp;
6506 /* If we can't reach the current pool from this insn, or if we can't
6507 insert this entry at the end of the pool without pushing other
6508 fixes out of range, then we don't try. This ensures that we
6509 can't fail later on. */
6510 if (min_address >= minipool_barrier->address
6511 || (minipool_vector_tail->min_address + fix->fix_size
6512 >= minipool_barrier->address))
6513 return NULL;
6515 /* Scan the pool to see if a constant with the same value has
6516 already been added. While we are doing this, also note the
6517 location where we must insert the constant if it doesn't already
6518 exist. */
6519 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6521 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6522 && fix->mode == mp->mode
6523 && (GET_CODE (fix->value) != CODE_LABEL
6524 || (CODE_LABEL_NUMBER (fix->value)
6525 == CODE_LABEL_NUMBER (mp->value)))
6526 && rtx_equal_p (fix->value, mp->value)
6527 /* Check that there is enough slack to move this entry to the
6528 end of the table (this is conservative). */
6529 && (mp->max_address
6530 > (minipool_barrier->address
6531 + minipool_vector_tail->offset
6532 + minipool_vector_tail->fix_size)))
6534 mp->refcount++;
6535 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6538 if (min_mp != NULL)
6539 mp->min_address += fix->fix_size;
6540 else
6542 /* Note the insertion point if necessary. */
6543 if (mp->min_address < min_address)
6544 min_mp = mp;
6545 else if (mp->max_address
6546 < minipool_barrier->address + mp->offset + fix->fix_size)
6548 /* Inserting before this entry would push the fix beyond
6549 its maximum address (which can happen if we have
6550 re-located a forwards fix); force the new fix to come
6551 after it. */
6552 min_mp = mp;
6553 min_address = mp->min_address + fix->fix_size;
6558 /* We need to create a new entry. */
6559 mp = xmalloc (sizeof (* mp));
6560 mp->fix_size = fix->fix_size;
6561 mp->mode = fix->mode;
6562 mp->value = fix->value;
6563 mp->refcount = 1;
6564 mp->max_address = minipool_barrier->address + 65536;
6566 mp->min_address = min_address;
6568 if (min_mp == NULL)
6570 mp->prev = NULL;
6571 mp->next = minipool_vector_head;
6573 if (mp->next == NULL)
6575 minipool_vector_tail = mp;
6576 minipool_vector_label = gen_label_rtx ();
6578 else
6579 mp->next->prev = mp;
6581 minipool_vector_head = mp;
6583 else
6585 mp->next = min_mp->next;
6586 mp->prev = min_mp;
6587 min_mp->next = mp;
6589 if (mp->next != NULL)
6590 mp->next->prev = mp;
6591 else
6592 minipool_vector_tail = mp;
6595 /* Save the new entry. */
6596 min_mp = mp;
6598 if (mp->prev)
6599 mp = mp->prev;
6600 else
6601 mp->offset = 0;
6603 /* Scan over the following entries and adjust their offsets. */
6604 while (mp->next != NULL)
6606 if (mp->next->min_address < mp->min_address + mp->fix_size)
6607 mp->next->min_address = mp->min_address + mp->fix_size;
6609 if (mp->refcount)
6610 mp->next->offset = mp->offset + mp->fix_size;
6611 else
6612 mp->next->offset = mp->offset;
6614 mp = mp->next;
6617 return min_mp;
6620 static void
6621 assign_minipool_offsets (barrier)
6622 Mfix * barrier;
6624 HOST_WIDE_INT offset = 0;
6625 Mnode * mp;
6627 minipool_barrier = barrier;
6629 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6631 mp->offset = offset;
6633 if (mp->refcount > 0)
6634 offset += mp->fix_size;
6638 /* Output the literal table */
6639 static void
6640 dump_minipool (scan)
6641 rtx scan;
6643 Mnode * mp;
6644 Mnode * nmp;
6646 if (rtl_dump_file)
6647 fprintf (rtl_dump_file,
6648 ";; Emitting minipool after insn %u; address %ld\n",
6649 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6651 scan = emit_label_after (gen_label_rtx (), scan);
6652 scan = emit_insn_after (gen_align_4 (), scan);
6653 scan = emit_label_after (minipool_vector_label, scan);
6655 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6657 if (mp->refcount > 0)
6659 if (rtl_dump_file)
6661 fprintf (rtl_dump_file,
6662 ";; Offset %u, min %ld, max %ld ",
6663 (unsigned) mp->offset, (unsigned long) mp->min_address,
6664 (unsigned long) mp->max_address);
6665 arm_print_value (rtl_dump_file, mp->value);
6666 fputc ('\n', rtl_dump_file);
6669 switch (mp->fix_size)
6671 #ifdef HAVE_consttable_1
6672 case 1:
6673 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6674 break;
6676 #endif
6677 #ifdef HAVE_consttable_2
6678 case 2:
6679 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6680 break;
6682 #endif
6683 #ifdef HAVE_consttable_4
6684 case 4:
6685 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6686 break;
6688 #endif
6689 #ifdef HAVE_consttable_8
6690 case 8:
6691 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6692 break;
6694 #endif
6695 default:
6696 abort ();
6697 break;
6701 nmp = mp->next;
6702 free (mp);
6705 minipool_vector_head = minipool_vector_tail = NULL;
6706 scan = emit_insn_after (gen_consttable_end (), scan);
6707 scan = emit_barrier_after (scan);
6710 /* Return the cost of forcibly inserting a barrier after INSN. */
6712 static int
6713 arm_barrier_cost (insn)
6714 rtx insn;
6716 /* Basing the location of the pool on the loop depth is preferable,
6717 but at the moment, the basic block information seems to be
6718 corrupt by this stage of the compilation. */
6719 int base_cost = 50;
6720 rtx next = next_nonnote_insn (insn);
6722 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6723 base_cost -= 20;
6725 switch (GET_CODE (insn))
6727 case CODE_LABEL:
6728 /* It will always be better to place the table before the label, rather
6729 than after it. */
6730 return 50;
6732 case INSN:
6733 case CALL_INSN:
6734 return base_cost;
6736 case JUMP_INSN:
6737 return base_cost - 10;
6739 default:
6740 return base_cost + 10;
6744 /* Find the best place in the insn stream in the range
6745 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6746 Create the barrier by inserting a jump and add a new fix entry for
6747 it. */
6749 static Mfix *
6750 create_fix_barrier (fix, max_address)
6751 Mfix * fix;
6752 HOST_WIDE_INT max_address;
6754 HOST_WIDE_INT count = 0;
6755 rtx barrier;
6756 rtx from = fix->insn;
6757 rtx selected = from;
6758 int selected_cost;
6759 HOST_WIDE_INT selected_address;
6760 Mfix * new_fix;
6761 HOST_WIDE_INT max_count = max_address - fix->address;
6762 rtx label = gen_label_rtx ();
6764 selected_cost = arm_barrier_cost (from);
6765 selected_address = fix->address;
6767 while (from && count < max_count)
6769 rtx tmp;
6770 int new_cost;
6772 /* This code shouldn't have been called if there was a natural barrier
6773 within range. */
6774 if (GET_CODE (from) == BARRIER)
6775 abort ();
6777 /* Count the length of this insn. */
6778 count += get_attr_length (from);
6780 /* If there is a jump table, add its length. */
6781 tmp = is_jump_table (from);
6782 if (tmp != NULL)
6784 count += get_jump_table_size (tmp);
6786 /* Jump tables aren't in a basic block, so base the cost on
6787 the dispatch insn. If we select this location, we will
6788 still put the pool after the table. */
6789 new_cost = arm_barrier_cost (from);
6791 if (count < max_count && new_cost <= selected_cost)
6793 selected = tmp;
6794 selected_cost = new_cost;
6795 selected_address = fix->address + count;
6798 /* Continue after the dispatch table. */
6799 from = NEXT_INSN (tmp);
6800 continue;
6803 new_cost = arm_barrier_cost (from);
6805 if (count < max_count && new_cost <= selected_cost)
6807 selected = from;
6808 selected_cost = new_cost;
6809 selected_address = fix->address + count;
6812 from = NEXT_INSN (from);
6815 /* Create a new JUMP_INSN that branches around a barrier. */
6816 from = emit_jump_insn_after (gen_jump (label), selected);
6817 JUMP_LABEL (from) = label;
6818 barrier = emit_barrier_after (from);
6819 emit_label_after (label, barrier);
6821 /* Create a minipool barrier entry for the new barrier. */
6822 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6823 new_fix->insn = barrier;
6824 new_fix->address = selected_address;
6825 new_fix->next = fix->next;
6826 fix->next = new_fix;
6828 return new_fix;
6831 /* Record that there is a natural barrier in the insn stream at
6832 ADDRESS. */
6833 static void
6834 push_minipool_barrier (insn, address)
6835 rtx insn;
6836 HOST_WIDE_INT address;
6838 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6840 fix->insn = insn;
6841 fix->address = address;
6843 fix->next = NULL;
6844 if (minipool_fix_head != NULL)
6845 minipool_fix_tail->next = fix;
6846 else
6847 minipool_fix_head = fix;
6849 minipool_fix_tail = fix;
6852 /* Record INSN, which will need fixing up to load a value from the
6853 minipool. ADDRESS is the offset of the insn since the start of the
6854 function; LOC is a pointer to the part of the insn which requires
6855 fixing; VALUE is the constant that must be loaded, which is of type
6856 MODE. */
6857 static void
6858 push_minipool_fix (insn, address, loc, mode, value)
6859 rtx insn;
6860 HOST_WIDE_INT address;
6861 rtx * loc;
6862 enum machine_mode mode;
6863 rtx value;
6865 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6867 #ifdef AOF_ASSEMBLER
6868 /* PIC symbol refereneces need to be converted into offsets into the
6869 based area. */
6870 /* XXX This shouldn't be done here. */
6871 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6872 value = aof_pic_entry (value);
6873 #endif /* AOF_ASSEMBLER */
6875 fix->insn = insn;
6876 fix->address = address;
6877 fix->loc = loc;
6878 fix->mode = mode;
6879 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6880 fix->value = value;
6881 fix->forwards = get_attr_pool_range (insn);
6882 fix->backwards = get_attr_neg_pool_range (insn);
6883 fix->minipool = NULL;
6885 /* If an insn doesn't have a range defined for it, then it isn't
6886 expecting to be reworked by this code. Better to abort now than
6887 to generate duff assembly code. */
6888 if (fix->forwards == 0 && fix->backwards == 0)
6889 abort ();
6891 if (rtl_dump_file)
6893 fprintf (rtl_dump_file,
6894 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6895 GET_MODE_NAME (mode),
6896 INSN_UID (insn), (unsigned long) address,
6897 -1 * (long)fix->backwards, (long)fix->forwards);
6898 arm_print_value (rtl_dump_file, fix->value);
6899 fprintf (rtl_dump_file, "\n");
6902 /* Add it to the chain of fixes. */
6903 fix->next = NULL;
6905 if (minipool_fix_head != NULL)
6906 minipool_fix_tail->next = fix;
6907 else
6908 minipool_fix_head = fix;
6910 minipool_fix_tail = fix;
6913 /* Scan INSN and note any of its operands that need fixing. */
6915 static void
6916 note_invalid_constants (insn, address)
6917 rtx insn;
6918 HOST_WIDE_INT address;
6920 int opno;
6922 extract_insn (insn);
6924 if (!constrain_operands (1))
6925 fatal_insn_not_found (insn);
6927 /* Fill in recog_op_alt with information about the constraints of this
6928 insn. */
6929 preprocess_constraints ();
6931 for (opno = 0; opno < recog_data.n_operands; opno++)
6933 /* Things we need to fix can only occur in inputs. */
6934 if (recog_data.operand_type[opno] != OP_IN)
6935 continue;
6937 /* If this alternative is a memory reference, then any mention
6938 of constants in this alternative is really to fool reload
6939 into allowing us to accept one there. We need to fix them up
6940 now so that we output the right code. */
6941 if (recog_op_alt[opno][which_alternative].memory_ok)
6943 rtx op = recog_data.operand[opno];
6945 if (CONSTANT_P (op))
6946 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6947 recog_data.operand_mode[opno], op);
6948 #if 0
6949 /* RWE: Now we look correctly at the operands for the insn,
6950 this shouldn't be needed any more. */
6951 #ifndef AOF_ASSEMBLER
6952 /* XXX Is this still needed? */
6953 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6954 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6955 recog_data.operand_mode[opno],
6956 XVECEXP (op, 0, 0));
6957 #endif
6958 #endif
6959 else if (GET_CODE (op) == MEM
6960 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6961 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6962 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6963 recog_data.operand_mode[opno],
6964 get_pool_constant (XEXP (op, 0)));
6969 void
6970 arm_reorg (first)
6971 rtx first;
6973 rtx insn;
6974 HOST_WIDE_INT address = 0;
6975 Mfix * fix;
6977 minipool_fix_head = minipool_fix_tail = NULL;
6979 /* The first insn must always be a note, or the code below won't
6980 scan it properly. */
6981 if (GET_CODE (first) != NOTE)
6982 abort ();
6984 /* Scan all the insns and record the operands that will need fixing. */
6985 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6987 if (TARGET_CIRRUS_FIX_INVALID_INSNS
6988 && (is_cirrus_insn (insn)
6989 || GET_CODE (insn) == JUMP_INSN
6990 || is_load_address (insn)))
6991 cirrus_reorg (insn);
6993 if (GET_CODE (insn) == BARRIER)
6994 push_minipool_barrier (insn, address);
6995 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6996 || GET_CODE (insn) == JUMP_INSN)
6998 rtx table;
7000 note_invalid_constants (insn, address);
7001 address += get_attr_length (insn);
7003 /* If the insn is a vector jump, add the size of the table
7004 and skip the table. */
7005 if ((table = is_jump_table (insn)) != NULL)
7007 address += get_jump_table_size (table);
7008 insn = table;
7013 fix = minipool_fix_head;
7015 /* Now scan the fixups and perform the required changes. */
7016 while (fix)
7018 Mfix * ftmp;
7019 Mfix * fdel;
7020 Mfix * last_added_fix;
7021 Mfix * last_barrier = NULL;
7022 Mfix * this_fix;
7024 /* Skip any further barriers before the next fix. */
7025 while (fix && GET_CODE (fix->insn) == BARRIER)
7026 fix = fix->next;
7028 /* No more fixes. */
7029 if (fix == NULL)
7030 break;
7032 last_added_fix = NULL;
7034 for (ftmp = fix; ftmp; ftmp = ftmp->next)
7036 if (GET_CODE (ftmp->insn) == BARRIER)
7038 if (ftmp->address >= minipool_vector_head->max_address)
7039 break;
7041 last_barrier = ftmp;
7043 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
7044 break;
7046 last_added_fix = ftmp; /* Keep track of the last fix added. */
7049 /* If we found a barrier, drop back to that; any fixes that we
7050 could have reached but come after the barrier will now go in
7051 the next mini-pool. */
7052 if (last_barrier != NULL)
7054 /* Reduce the refcount for those fixes that won't go into this
7055 pool after all. */
7056 for (fdel = last_barrier->next;
7057 fdel && fdel != ftmp;
7058 fdel = fdel->next)
7060 fdel->minipool->refcount--;
7061 fdel->minipool = NULL;
7064 ftmp = last_barrier;
7066 else
7068 /* ftmp is first fix that we can't fit into this pool and
7069 there no natural barriers that we could use. Insert a
7070 new barrier in the code somewhere between the previous
7071 fix and this one, and arrange to jump around it. */
7072 HOST_WIDE_INT max_address;
7074 /* The last item on the list of fixes must be a barrier, so
7075 we can never run off the end of the list of fixes without
7076 last_barrier being set. */
7077 if (ftmp == NULL)
7078 abort ();
7080 max_address = minipool_vector_head->max_address;
7081 /* Check that there isn't another fix that is in range that
7082 we couldn't fit into this pool because the pool was
7083 already too large: we need to put the pool before such an
7084 instruction. */
7085 if (ftmp->address < max_address)
7086 max_address = ftmp->address;
7088 last_barrier = create_fix_barrier (last_added_fix, max_address);
7091 assign_minipool_offsets (last_barrier);
7093 while (ftmp)
7095 if (GET_CODE (ftmp->insn) != BARRIER
7096 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
7097 == NULL))
7098 break;
7100 ftmp = ftmp->next;
7103 /* Scan over the fixes we have identified for this pool, fixing them
7104 up and adding the constants to the pool itself. */
7105 for (this_fix = fix; this_fix && ftmp != this_fix;
7106 this_fix = this_fix->next)
7107 if (GET_CODE (this_fix->insn) != BARRIER)
7109 rtx addr
7110 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7111 minipool_vector_label),
7112 this_fix->minipool->offset);
7113 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7116 dump_minipool (last_barrier->insn);
7117 fix = ftmp;
7120 /* From now on we must synthesize any constants that we can't handle
7121 directly. This can happen if the RTL gets split during final
7122 instruction generation. */
7123 after_arm_reorg = 1;
7125 /* Free the minipool memory. */
7126 obstack_free (&minipool_obstack, minipool_startobj);
7129 /* Routines to output assembly language. */
7131 /* If the rtx is the correct value then return the string of the number.
7132 In this way we can ensure that valid double constants are generated even
7133 when cross compiling. */
7135 const char *
7136 fp_immediate_constant (x)
7137 rtx x;
7139 REAL_VALUE_TYPE r;
7140 int i;
7142 if (!fpa_consts_inited)
7143 init_fpa_table ();
7145 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7146 for (i = 0; i < 8; i++)
7147 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
7148 return strings_fpa[i];
7150 abort ();
7153 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
7155 static const char *
7156 fp_const_from_val (r)
7157 REAL_VALUE_TYPE * r;
7159 int i;
7161 if (!fpa_consts_inited)
7162 init_fpa_table ();
7164 for (i = 0; i < 8; i++)
7165 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
7166 return strings_fpa[i];
7168 abort ();
7171 /* Output the operands of a LDM/STM instruction to STREAM.
7172 MASK is the ARM register set mask of which only bits 0-15 are important.
7173 REG is the base register, either the frame pointer or the stack pointer,
7174 INSTR is the possibly suffixed load or store instruction. */
7176 static void
7177 print_multi_reg (stream, instr, reg, mask)
7178 FILE * stream;
7179 const char * instr;
7180 int reg;
7181 int mask;
7183 int i;
7184 int not_first = FALSE;
7186 fputc ('\t', stream);
7187 asm_fprintf (stream, instr, reg);
7188 fputs (", {", stream);
7190 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7191 if (mask & (1 << i))
7193 if (not_first)
7194 fprintf (stream, ", ");
7196 asm_fprintf (stream, "%r", i);
7197 not_first = TRUE;
7200 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
7203 /* Output a 'call' insn. */
7205 const char *
7206 output_call (operands)
7207 rtx * operands;
7209 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
7211 if (REGNO (operands[0]) == LR_REGNUM)
7213 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
7214 output_asm_insn ("mov%?\t%0, %|lr", operands);
7217 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7219 if (TARGET_INTERWORK)
7220 output_asm_insn ("bx%?\t%0", operands);
7221 else
7222 output_asm_insn ("mov%?\t%|pc, %0", operands);
7224 return "";
7227 static int
7228 eliminate_lr2ip (x)
7229 rtx * x;
7231 int something_changed = 0;
7232 rtx x0 = * x;
7233 int code = GET_CODE (x0);
7234 int i, j;
7235 const char * fmt;
7237 switch (code)
7239 case REG:
7240 if (REGNO (x0) == LR_REGNUM)
7242 *x = gen_rtx_REG (SImode, IP_REGNUM);
7243 return 1;
7245 return 0;
7246 default:
7247 /* Scan through the sub-elements and change any references there. */
7248 fmt = GET_RTX_FORMAT (code);
7250 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7251 if (fmt[i] == 'e')
7252 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
7253 else if (fmt[i] == 'E')
7254 for (j = 0; j < XVECLEN (x0, i); j++)
7255 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
7257 return something_changed;
7261 /* Output a 'call' insn that is a reference in memory. */
7263 const char *
7264 output_call_mem (operands)
7265 rtx * operands;
7267 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
7268 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
7269 if (eliminate_lr2ip (&operands[0]))
7270 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
7272 if (TARGET_INTERWORK)
7274 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7275 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7276 output_asm_insn ("bx%?\t%|ip", operands);
7278 else
7280 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7281 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7284 return "";
7288 /* Output a move from arm registers to an fpu registers.
7289 OPERANDS[0] is an fpu register.
7290 OPERANDS[1] is the first registers of an arm register pair. */
7292 const char *
7293 output_mov_long_double_fpu_from_arm (operands)
7294 rtx * operands;
7296 int arm_reg0 = REGNO (operands[1]);
7297 rtx ops[3];
7299 if (arm_reg0 == IP_REGNUM)
7300 abort ();
7302 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7303 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7304 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7306 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7307 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7309 return "";
7312 /* Output a move from an fpu register to arm registers.
7313 OPERANDS[0] is the first registers of an arm register pair.
7314 OPERANDS[1] is an fpu register. */
7316 const char *
7317 output_mov_long_double_arm_from_fpu (operands)
7318 rtx * operands;
7320 int arm_reg0 = REGNO (operands[0]);
7321 rtx ops[3];
7323 if (arm_reg0 == IP_REGNUM)
7324 abort ();
7326 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7327 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7328 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7330 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7331 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7332 return "";
7335 /* Output a move from arm registers to arm registers of a long double
7336 OPERANDS[0] is the destination.
7337 OPERANDS[1] is the source. */
7339 const char *
7340 output_mov_long_double_arm_from_arm (operands)
7341 rtx * operands;
7343 /* We have to be careful here because the two might overlap. */
7344 int dest_start = REGNO (operands[0]);
7345 int src_start = REGNO (operands[1]);
7346 rtx ops[2];
7347 int i;
7349 if (dest_start < src_start)
7351 for (i = 0; i < 3; i++)
7353 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7354 ops[1] = gen_rtx_REG (SImode, src_start + i);
7355 output_asm_insn ("mov%?\t%0, %1", ops);
7358 else
7360 for (i = 2; i >= 0; i--)
7362 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7363 ops[1] = gen_rtx_REG (SImode, src_start + i);
7364 output_asm_insn ("mov%?\t%0, %1", ops);
7368 return "";
7372 /* Output a move from arm registers to an fpu registers.
7373 OPERANDS[0] is an fpu register.
7374 OPERANDS[1] is the first registers of an arm register pair. */
7376 const char *
7377 output_mov_double_fpu_from_arm (operands)
7378 rtx * operands;
7380 int arm_reg0 = REGNO (operands[1]);
7381 rtx ops[2];
7383 if (arm_reg0 == IP_REGNUM)
7384 abort ();
7386 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7387 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7388 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7389 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7390 return "";
7393 /* Output a move from an fpu register to arm registers.
7394 OPERANDS[0] is the first registers of an arm register pair.
7395 OPERANDS[1] is an fpu register. */
7397 const char *
7398 output_mov_double_arm_from_fpu (operands)
7399 rtx * operands;
7401 int arm_reg0 = REGNO (operands[0]);
7402 rtx ops[2];
7404 if (arm_reg0 == IP_REGNUM)
7405 abort ();
7407 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7408 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7409 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7410 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7411 return "";
7414 /* Output a move between double words.
7415 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7416 or MEM<-REG and all MEMs must be offsettable addresses. */
7418 const char *
7419 output_move_double (operands)
7420 rtx * operands;
7422 enum rtx_code code0 = GET_CODE (operands[0]);
7423 enum rtx_code code1 = GET_CODE (operands[1]);
7424 rtx otherops[3];
7426 if (code0 == REG)
7428 int reg0 = REGNO (operands[0]);
7430 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7432 if (code1 == REG)
7434 int reg1 = REGNO (operands[1]);
7435 if (reg1 == IP_REGNUM)
7436 abort ();
7438 /* Ensure the second source is not overwritten. */
7439 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7440 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7441 else
7442 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7444 else if (code1 == CONST_DOUBLE)
7446 if (GET_MODE (operands[1]) == DFmode)
7448 REAL_VALUE_TYPE r;
7449 long l[2];
7451 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7452 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7453 otherops[1] = GEN_INT (l[1]);
7454 operands[1] = GEN_INT (l[0]);
7456 else if (GET_MODE (operands[1]) != VOIDmode)
7457 abort ();
7458 else if (WORDS_BIG_ENDIAN)
7460 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7461 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7463 else
7465 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7466 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7469 output_mov_immediate (operands);
7470 output_mov_immediate (otherops);
7472 else if (code1 == CONST_INT)
7474 #if HOST_BITS_PER_WIDE_INT > 32
7475 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7476 what the upper word is. */
7477 if (WORDS_BIG_ENDIAN)
7479 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7480 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7482 else
7484 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7485 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7487 #else
7488 /* Sign extend the intval into the high-order word. */
7489 if (WORDS_BIG_ENDIAN)
7491 otherops[1] = operands[1];
7492 operands[1] = (INTVAL (operands[1]) < 0
7493 ? constm1_rtx : const0_rtx);
7495 else
7496 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7497 #endif
7498 output_mov_immediate (otherops);
7499 output_mov_immediate (operands);
7501 else if (code1 == MEM)
7503 switch (GET_CODE (XEXP (operands[1], 0)))
7505 case REG:
7506 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7507 break;
7509 case PRE_INC:
7510 abort (); /* Should never happen now. */
7511 break;
7513 case PRE_DEC:
7514 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7515 break;
7517 case POST_INC:
7518 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7519 break;
7521 case POST_DEC:
7522 abort (); /* Should never happen now. */
7523 break;
7525 case LABEL_REF:
7526 case CONST:
7527 output_asm_insn ("adr%?\t%0, %1", operands);
7528 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7529 break;
7531 default:
7532 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7533 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7535 otherops[0] = operands[0];
7536 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7537 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7539 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7541 if (GET_CODE (otherops[2]) == CONST_INT)
7543 switch ((int) INTVAL (otherops[2]))
7545 case -8:
7546 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7547 return "";
7548 case -4:
7549 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7550 return "";
7551 case 4:
7552 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7553 return "";
7556 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7557 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7558 else
7559 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7561 else
7562 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7564 else
7565 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7567 return "ldm%?ia\t%0, %M0";
7569 else
7571 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7572 /* Take care of overlapping base/data reg. */
7573 if (reg_mentioned_p (operands[0], operands[1]))
7575 output_asm_insn ("ldr%?\t%0, %1", otherops);
7576 output_asm_insn ("ldr%?\t%0, %1", operands);
7578 else
7580 output_asm_insn ("ldr%?\t%0, %1", operands);
7581 output_asm_insn ("ldr%?\t%0, %1", otherops);
7586 else
7587 abort (); /* Constraints should prevent this. */
7589 else if (code0 == MEM && code1 == REG)
7591 if (REGNO (operands[1]) == IP_REGNUM)
7592 abort ();
7594 switch (GET_CODE (XEXP (operands[0], 0)))
7596 case REG:
7597 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7598 break;
7600 case PRE_INC:
7601 abort (); /* Should never happen now. */
7602 break;
7604 case PRE_DEC:
7605 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7606 break;
7608 case POST_INC:
7609 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7610 break;
7612 case POST_DEC:
7613 abort (); /* Should never happen now. */
7614 break;
7616 case PLUS:
7617 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7619 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7621 case -8:
7622 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7623 return "";
7625 case -4:
7626 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7627 return "";
7629 case 4:
7630 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7631 return "";
7634 /* Fall through */
7636 default:
7637 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7638 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7639 output_asm_insn ("str%?\t%1, %0", operands);
7640 output_asm_insn ("str%?\t%1, %0", otherops);
7643 else
7644 /* Constraints should prevent this. */
7645 abort ();
7647 return "";
7651 /* Output an arbitrary MOV reg, #n.
7652 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7654 const char *
7655 output_mov_immediate (operands)
7656 rtx * operands;
7658 HOST_WIDE_INT n = INTVAL (operands[1]);
7660 /* Try to use one MOV. */
7661 if (const_ok_for_arm (n))
7662 output_asm_insn ("mov%?\t%0, %1", operands);
7664 /* Try to use one MVN. */
7665 else if (const_ok_for_arm (~n))
7667 operands[1] = GEN_INT (~n);
7668 output_asm_insn ("mvn%?\t%0, %1", operands);
7670 else
7672 int n_ones = 0;
7673 int i;
7675 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7676 for (i = 0; i < 32; i ++)
7677 if (n & 1 << i)
7678 n_ones ++;
7680 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7681 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7682 else
7683 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7686 return "";
7689 /* Output an ADD r, s, #n where n may be too big for one instruction.
7690 If adding zero to one register, output nothing. */
7692 const char *
7693 output_add_immediate (operands)
7694 rtx * operands;
7696 HOST_WIDE_INT n = INTVAL (operands[2]);
7698 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7700 if (n < 0)
7701 output_multi_immediate (operands,
7702 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7703 -n);
7704 else
7705 output_multi_immediate (operands,
7706 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7710 return "";
7713 /* Output a multiple immediate operation.
7714 OPERANDS is the vector of operands referred to in the output patterns.
7715 INSTR1 is the output pattern to use for the first constant.
7716 INSTR2 is the output pattern to use for subsequent constants.
7717 IMMED_OP is the index of the constant slot in OPERANDS.
7718 N is the constant value. */
7720 static const char *
7721 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7722 rtx * operands;
7723 const char * instr1;
7724 const char * instr2;
7725 int immed_op;
7726 HOST_WIDE_INT n;
7728 #if HOST_BITS_PER_WIDE_INT > 32
7729 n &= 0xffffffff;
7730 #endif
7732 if (n == 0)
7734 /* Quick and easy output. */
7735 operands[immed_op] = const0_rtx;
7736 output_asm_insn (instr1, operands);
7738 else
7740 int i;
7741 const char * instr = instr1;
7743 /* Note that n is never zero here (which would give no output). */
7744 for (i = 0; i < 32; i += 2)
7746 if (n & (3 << i))
7748 operands[immed_op] = GEN_INT (n & (255 << i));
7749 output_asm_insn (instr, operands);
7750 instr = instr2;
7751 i += 6;
7756 return "";
7759 /* Return the appropriate ARM instruction for the operation code.
7760 The returned result should not be overwritten. OP is the rtx of the
7761 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7762 was shifted. */
7764 const char *
7765 arithmetic_instr (op, shift_first_arg)
7766 rtx op;
7767 int shift_first_arg;
7769 switch (GET_CODE (op))
7771 case PLUS:
7772 return "add";
7774 case MINUS:
7775 return shift_first_arg ? "rsb" : "sub";
7777 case IOR:
7778 return "orr";
7780 case XOR:
7781 return "eor";
7783 case AND:
7784 return "and";
7786 default:
7787 abort ();
7791 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7792 for the operation code. The returned result should not be overwritten.
7793 OP is the rtx code of the shift.
7794 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7795 shift. */
7797 static const char *
7798 shift_op (op, amountp)
7799 rtx op;
7800 HOST_WIDE_INT *amountp;
7802 const char * mnem;
7803 enum rtx_code code = GET_CODE (op);
7805 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7806 *amountp = -1;
7807 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7808 *amountp = INTVAL (XEXP (op, 1));
7809 else
7810 abort ();
7812 switch (code)
7814 case ASHIFT:
7815 mnem = "asl";
7816 break;
7818 case ASHIFTRT:
7819 mnem = "asr";
7820 break;
7822 case LSHIFTRT:
7823 mnem = "lsr";
7824 break;
7826 case ROTATERT:
7827 mnem = "ror";
7828 break;
7830 case MULT:
7831 /* We never have to worry about the amount being other than a
7832 power of 2, since this case can never be reloaded from a reg. */
7833 if (*amountp != -1)
7834 *amountp = int_log2 (*amountp);
7835 else
7836 abort ();
7837 return "asl";
7839 default:
7840 abort ();
7843 if (*amountp != -1)
7845 /* This is not 100% correct, but follows from the desire to merge
7846 multiplication by a power of 2 with the recognizer for a
7847 shift. >=32 is not a valid shift for "asl", so we must try and
7848 output a shift that produces the correct arithmetical result.
7849 Using lsr #32 is identical except for the fact that the carry bit
7850 is not set correctly if we set the flags; but we never use the
7851 carry bit from such an operation, so we can ignore that. */
7852 if (code == ROTATERT)
7853 /* Rotate is just modulo 32. */
7854 *amountp &= 31;
7855 else if (*amountp != (*amountp & 31))
7857 if (code == ASHIFT)
7858 mnem = "lsr";
7859 *amountp = 32;
7862 /* Shifts of 0 are no-ops. */
7863 if (*amountp == 0)
7864 return NULL;
7867 return mnem;
7870 /* Obtain the shift from the POWER of two. */
7872 static HOST_WIDE_INT
7873 int_log2 (power)
7874 HOST_WIDE_INT power;
7876 HOST_WIDE_INT shift = 0;
7878 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7880 if (shift > 31)
7881 abort ();
7882 shift ++;
7885 return shift;
7888 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7889 /bin/as is horribly restrictive. */
7890 #define MAX_ASCII_LEN 51
7892 void
7893 output_ascii_pseudo_op (stream, p, len)
7894 FILE * stream;
7895 const unsigned char * p;
7896 int len;
7898 int i;
7899 int len_so_far = 0;
7901 fputs ("\t.ascii\t\"", stream);
7903 for (i = 0; i < len; i++)
7905 int c = p[i];
7907 if (len_so_far >= MAX_ASCII_LEN)
7909 fputs ("\"\n\t.ascii\t\"", stream);
7910 len_so_far = 0;
7913 switch (c)
7915 case TARGET_TAB:
7916 fputs ("\\t", stream);
7917 len_so_far += 2;
7918 break;
7920 case TARGET_FF:
7921 fputs ("\\f", stream);
7922 len_so_far += 2;
7923 break;
7925 case TARGET_BS:
7926 fputs ("\\b", stream);
7927 len_so_far += 2;
7928 break;
7930 case TARGET_CR:
7931 fputs ("\\r", stream);
7932 len_so_far += 2;
7933 break;
7935 case TARGET_NEWLINE:
7936 fputs ("\\n", stream);
7937 c = p [i + 1];
7938 if ((c >= ' ' && c <= '~')
7939 || c == TARGET_TAB)
7940 /* This is a good place for a line break. */
7941 len_so_far = MAX_ASCII_LEN;
7942 else
7943 len_so_far += 2;
7944 break;
7946 case '\"':
7947 case '\\':
7948 putc ('\\', stream);
7949 len_so_far++;
7950 /* drop through. */
7952 default:
7953 if (c >= ' ' && c <= '~')
7955 putc (c, stream);
7956 len_so_far++;
7958 else
7960 fprintf (stream, "\\%03o", c);
7961 len_so_far += 4;
7963 break;
7967 fputs ("\"\n", stream);
7970 /* Compute the register sabe mask for registers 0 through 12
7971 inclusive. This code is used by both arm_compute_save_reg_mask
7972 and arm_compute_initial_elimination_offset. */
7974 static unsigned long
7975 arm_compute_save_reg0_reg12_mask ()
7977 unsigned long func_type = arm_current_func_type ();
7978 unsigned int save_reg_mask = 0;
7979 unsigned int reg;
7981 if (IS_INTERRUPT (func_type))
7983 unsigned int max_reg;
7984 /* Interrupt functions must not corrupt any registers,
7985 even call clobbered ones. If this is a leaf function
7986 we can just examine the registers used by the RTL, but
7987 otherwise we have to assume that whatever function is
7988 called might clobber anything, and so we have to save
7989 all the call-clobbered registers as well. */
7990 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7991 /* FIQ handlers have registers r8 - r12 banked, so
7992 we only need to check r0 - r7, Normal ISRs only
7993 bank r14 and r15, so we must check up to r12.
7994 r13 is the stack pointer which is always preserved,
7995 so we do not need to consider it here. */
7996 max_reg = 7;
7997 else
7998 max_reg = 12;
8000 for (reg = 0; reg <= max_reg; reg++)
8001 if (regs_ever_live[reg]
8002 || (! current_function_is_leaf && call_used_regs [reg]))
8003 save_reg_mask |= (1 << reg);
8005 else
8007 /* In the normal case we only need to save those registers
8008 which are call saved and which are used by this function. */
8009 for (reg = 0; reg <= 10; reg++)
8010 if (regs_ever_live[reg] && ! call_used_regs [reg])
8011 save_reg_mask |= (1 << reg);
8013 /* Handle the frame pointer as a special case. */
8014 if (! TARGET_APCS_FRAME
8015 && ! frame_pointer_needed
8016 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
8017 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
8018 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
8020 /* If we aren't loading the PIC register,
8021 don't stack it even though it may be live. */
8022 if (flag_pic
8023 && ! TARGET_SINGLE_PIC_BASE
8024 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
8025 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
8028 return save_reg_mask;
8031 /* Compute a bit mask of which registers need to be
8032 saved on the stack for the current function. */
8034 static unsigned long
8035 arm_compute_save_reg_mask ()
8037 unsigned int save_reg_mask = 0;
8038 unsigned long func_type = arm_current_func_type ();
8040 if (IS_NAKED (func_type))
8041 /* This should never really happen. */
8042 return 0;
8044 /* If we are creating a stack frame, then we must save the frame pointer,
8045 IP (which will hold the old stack pointer), LR and the PC. */
8046 if (frame_pointer_needed)
8047 save_reg_mask |=
8048 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
8049 | (1 << IP_REGNUM)
8050 | (1 << LR_REGNUM)
8051 | (1 << PC_REGNUM);
8053 /* Volatile functions do not return, so there
8054 is no need to save any other registers. */
8055 if (IS_VOLATILE (func_type))
8056 return save_reg_mask;
8058 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
8060 /* Decide if we need to save the link register.
8061 Interrupt routines have their own banked link register,
8062 so they never need to save it.
8063 Otherwise if we do not use the link register we do not need to save
8064 it. If we are pushing other registers onto the stack however, we
8065 can save an instruction in the epilogue by pushing the link register
8066 now and then popping it back into the PC. This incurs extra memory
8067 accesses though, so we only do it when optimising for size, and only
8068 if we know that we will not need a fancy return sequence. */
8069 if (regs_ever_live [LR_REGNUM]
8070 || (save_reg_mask
8071 && optimize_size
8072 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
8073 save_reg_mask |= 1 << LR_REGNUM;
8075 if (cfun->machine->lr_save_eliminated)
8076 save_reg_mask &= ~ (1 << LR_REGNUM);
8078 return save_reg_mask;
8081 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
8082 everything bar the final return instruction. */
8084 const char *
8085 output_return_instruction (operand, really_return, reverse)
8086 rtx operand;
8087 int really_return;
8088 int reverse;
8090 char conditional[10];
8091 char instr[100];
8092 int reg;
8093 unsigned long live_regs_mask;
8094 unsigned long func_type;
8096 func_type = arm_current_func_type ();
8098 if (IS_NAKED (func_type))
8099 return "";
8101 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8103 /* If this function was declared non-returning, and we have found a tail
8104 call, then we have to trust that the called function won't return. */
8105 if (really_return)
8107 rtx ops[2];
8109 /* Otherwise, trap an attempted return by aborting. */
8110 ops[0] = operand;
8111 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
8112 : "abort");
8113 assemble_external_libcall (ops[1]);
8114 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
8117 return "";
8120 if (current_function_calls_alloca && !really_return)
8121 abort ();
8123 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
8125 return_used_this_function = 1;
8127 live_regs_mask = arm_compute_save_reg_mask ();
8129 if (live_regs_mask)
8131 const char * return_reg;
8133 /* If we do not have any special requirements for function exit
8134 (eg interworking, or ISR) then we can load the return address
8135 directly into the PC. Otherwise we must load it into LR. */
8136 if (really_return
8137 && ! TARGET_INTERWORK)
8138 return_reg = reg_names[PC_REGNUM];
8139 else
8140 return_reg = reg_names[LR_REGNUM];
8142 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
8143 /* There are two possible reasons for the IP register being saved.
8144 Either a stack frame was created, in which case IP contains the
8145 old stack pointer, or an ISR routine corrupted it. If this in an
8146 ISR routine then just restore IP, otherwise restore IP into SP. */
8147 if (! IS_INTERRUPT (func_type))
8149 live_regs_mask &= ~ (1 << IP_REGNUM);
8150 live_regs_mask |= (1 << SP_REGNUM);
8153 /* On some ARM architectures it is faster to use LDR rather than
8154 LDM to load a single register. On other architectures, the
8155 cost is the same. In 26 bit mode, or for exception handlers,
8156 we have to use LDM to load the PC so that the CPSR is also
8157 restored. */
8158 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8160 if (live_regs_mask == (unsigned int)(1 << reg))
8161 break;
8163 if (reg <= LAST_ARM_REGNUM
8164 && (reg != LR_REGNUM
8165 || ! really_return
8166 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
8168 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
8169 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
8171 else
8173 char *p;
8174 int first = 1;
8176 /* Generate the load multiple instruction to restore the registers. */
8177 if (frame_pointer_needed)
8178 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
8179 else if (live_regs_mask & (1 << SP_REGNUM))
8180 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
8181 else
8182 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
8184 p = instr + strlen (instr);
8186 for (reg = 0; reg <= SP_REGNUM; reg++)
8187 if (live_regs_mask & (1 << reg))
8189 int l = strlen (reg_names[reg]);
8191 if (first)
8192 first = 0;
8193 else
8195 memcpy (p, ", ", 2);
8196 p += 2;
8199 memcpy (p, "%|", 2);
8200 memcpy (p + 2, reg_names[reg], l);
8201 p += l + 2;
8204 if (live_regs_mask & (1 << LR_REGNUM))
8206 int l = strlen (return_reg);
8208 if (! first)
8210 memcpy (p, ", ", 2);
8211 p += 2;
8214 memcpy (p, "%|", 2);
8215 memcpy (p + 2, return_reg, l);
8216 strcpy (p + 2 + l, ((TARGET_APCS_32
8217 && !IS_INTERRUPT (func_type))
8218 || !really_return)
8219 ? "}" : "}^");
8221 else
8222 strcpy (p, "}");
8225 output_asm_insn (instr, & operand);
8227 /* See if we need to generate an extra instruction to
8228 perform the actual function return. */
8229 if (really_return
8230 && func_type != ARM_FT_INTERWORKED
8231 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8233 /* The return has already been handled
8234 by loading the LR into the PC. */
8235 really_return = 0;
8239 if (really_return)
8241 switch ((int) ARM_FUNC_TYPE (func_type))
8243 case ARM_FT_ISR:
8244 case ARM_FT_FIQ:
8245 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
8246 break;
8248 case ARM_FT_INTERWORKED:
8249 sprintf (instr, "bx%s\t%%|lr", conditional);
8250 break;
8252 case ARM_FT_EXCEPTION:
8253 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
8254 break;
8256 default:
8257 /* ARMv5 implementations always provide BX, so interworking
8258 is the default unless APCS-26 is in use. */
8259 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
8260 sprintf (instr, "bx%s\t%%|lr", conditional);
8261 else
8262 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
8263 conditional, TARGET_APCS_32 ? "" : "s");
8264 break;
8267 output_asm_insn (instr, & operand);
8270 return "";
8273 /* Write the function name into the code section, directly preceding
8274 the function prologue.
8276 Code will be output similar to this:
8278 .ascii "arm_poke_function_name", 0
8279 .align
8281 .word 0xff000000 + (t1 - t0)
8282 arm_poke_function_name
8283 mov ip, sp
8284 stmfd sp!, {fp, ip, lr, pc}
8285 sub fp, ip, #4
8287 When performing a stack backtrace, code can inspect the value
8288 of 'pc' stored at 'fp' + 0. If the trace function then looks
8289 at location pc - 12 and the top 8 bits are set, then we know
8290 that there is a function name embedded immediately preceding this
8291 location and has length ((pc[-3]) & 0xff000000).
8293 We assume that pc is declared as a pointer to an unsigned long.
8295 It is of no benefit to output the function name if we are assembling
8296 a leaf function. These function types will not contain a stack
8297 backtrace structure, therefore it is not possible to determine the
8298 function name. */
8300 void
8301 arm_poke_function_name (stream, name)
8302 FILE * stream;
8303 const char * name;
8305 unsigned long alignlength;
8306 unsigned long length;
8307 rtx x;
8309 length = strlen (name) + 1;
8310 alignlength = ROUND_UP_WORD (length);
8312 ASM_OUTPUT_ASCII (stream, name, length);
8313 ASM_OUTPUT_ALIGN (stream, 2);
8314 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8315 assemble_aligned_integer (UNITS_PER_WORD, x);
8318 /* Place some comments into the assembler stream
8319 describing the current function. */
8321 static void
8322 arm_output_function_prologue (f, frame_size)
8323 FILE * f;
8324 HOST_WIDE_INT frame_size;
8326 unsigned long func_type;
8328 if (!TARGET_ARM)
8330 thumb_output_function_prologue (f, frame_size);
8331 return;
8334 /* Sanity check. */
8335 if (arm_ccfsm_state || arm_target_insn)
8336 abort ();
8338 func_type = arm_current_func_type ();
8340 switch ((int) ARM_FUNC_TYPE (func_type))
8342 default:
8343 case ARM_FT_NORMAL:
8344 break;
8345 case ARM_FT_INTERWORKED:
8346 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8347 break;
8348 case ARM_FT_EXCEPTION_HANDLER:
8349 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8350 break;
8351 case ARM_FT_ISR:
8352 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8353 break;
8354 case ARM_FT_FIQ:
8355 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8356 break;
8357 case ARM_FT_EXCEPTION:
8358 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8359 break;
8362 if (IS_NAKED (func_type))
8363 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8365 if (IS_VOLATILE (func_type))
8366 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8368 if (IS_NESTED (func_type))
8369 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8371 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
8372 current_function_args_size,
8373 current_function_pretend_args_size, frame_size);
8375 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8376 frame_pointer_needed,
8377 cfun->machine->uses_anonymous_args);
8379 if (cfun->machine->lr_save_eliminated)
8380 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8382 #ifdef AOF_ASSEMBLER
8383 if (flag_pic)
8384 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8385 #endif
8387 return_used_this_function = 0;
8390 const char *
8391 arm_output_epilogue (really_return)
8392 int really_return;
8394 int reg;
8395 unsigned long saved_regs_mask;
8396 unsigned long func_type;
8397 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8398 frame that is $fp + 4 for a non-variadic function. */
8399 int floats_offset = 0;
8400 rtx operands[3];
8401 int frame_size = arm_get_frame_size ();
8402 FILE * f = asm_out_file;
8403 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8405 /* If we have already generated the return instruction
8406 then it is futile to generate anything else. */
8407 if (use_return_insn (FALSE) && return_used_this_function)
8408 return "";
8410 func_type = arm_current_func_type ();
8412 if (IS_NAKED (func_type))
8413 /* Naked functions don't have epilogues. */
8414 return "";
8416 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8418 rtx op;
8420 /* A volatile function should never return. Call abort. */
8421 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8422 assemble_external_libcall (op);
8423 output_asm_insn ("bl\t%a0", &op);
8425 return "";
8428 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8429 && ! really_return)
8430 /* If we are throwing an exception, then we really must
8431 be doing a return, so we can't tail-call. */
8432 abort ();
8434 saved_regs_mask = arm_compute_save_reg_mask ();
8436 /* XXX We should adjust floats_offset for any anonymous args, and then
8437 re-adjust vfp_offset below to compensate. */
8439 /* Compute how far away the floats will be. */
8440 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8441 if (saved_regs_mask & (1 << reg))
8442 floats_offset += 4;
8444 if (frame_pointer_needed)
8446 int vfp_offset = 4;
8448 if (arm_fpu_arch == FP_SOFT2)
8450 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8451 if (regs_ever_live[reg] && !call_used_regs[reg])
8453 floats_offset += 12;
8454 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8455 reg, FP_REGNUM, floats_offset - vfp_offset);
8458 else
8460 int start_reg = LAST_ARM_FP_REGNUM;
8462 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8464 if (regs_ever_live[reg] && !call_used_regs[reg])
8466 floats_offset += 12;
8468 /* We can't unstack more than four registers at once. */
8469 if (start_reg - reg == 3)
8471 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8472 reg, FP_REGNUM, floats_offset - vfp_offset);
8473 start_reg = reg - 1;
8476 else
8478 if (reg != start_reg)
8479 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8480 reg + 1, start_reg - reg,
8481 FP_REGNUM, floats_offset - vfp_offset);
8482 start_reg = reg - 1;
8486 /* Just in case the last register checked also needs unstacking. */
8487 if (reg != start_reg)
8488 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8489 reg + 1, start_reg - reg,
8490 FP_REGNUM, floats_offset - vfp_offset);
8493 /* saved_regs_mask should contain the IP, which at the time of stack
8494 frame generation actually contains the old stack pointer. So a
8495 quick way to unwind the stack is just pop the IP register directly
8496 into the stack pointer. */
8497 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8498 abort ();
8499 saved_regs_mask &= ~ (1 << IP_REGNUM);
8500 saved_regs_mask |= (1 << SP_REGNUM);
8502 /* There are two registers left in saved_regs_mask - LR and PC. We
8503 only need to restore the LR register (the return address), but to
8504 save time we can load it directly into the PC, unless we need a
8505 special function exit sequence, or we are not really returning. */
8506 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8507 /* Delete the LR from the register mask, so that the LR on
8508 the stack is loaded into the PC in the register mask. */
8509 saved_regs_mask &= ~ (1 << LR_REGNUM);
8510 else
8511 saved_regs_mask &= ~ (1 << PC_REGNUM);
8513 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8515 if (IS_INTERRUPT (func_type))
8516 /* Interrupt handlers will have pushed the
8517 IP onto the stack, so restore it now. */
8518 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8520 else
8522 /* Restore stack pointer if necessary. */
8523 if (frame_size + current_function_outgoing_args_size != 0)
8525 operands[0] = operands[1] = stack_pointer_rtx;
8526 operands[2] = GEN_INT (frame_size
8527 + current_function_outgoing_args_size);
8528 output_add_immediate (operands);
8531 if (arm_fpu_arch == FP_SOFT2)
8533 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8534 if (regs_ever_live[reg] && !call_used_regs[reg])
8535 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8536 reg, SP_REGNUM);
8538 else
8540 int start_reg = FIRST_ARM_FP_REGNUM;
8542 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8544 if (regs_ever_live[reg] && !call_used_regs[reg])
8546 if (reg - start_reg == 3)
8548 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8549 start_reg, SP_REGNUM);
8550 start_reg = reg + 1;
8553 else
8555 if (reg != start_reg)
8556 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8557 start_reg, reg - start_reg,
8558 SP_REGNUM);
8560 start_reg = reg + 1;
8564 /* Just in case the last register checked also needs unstacking. */
8565 if (reg != start_reg)
8566 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8567 start_reg, reg - start_reg, SP_REGNUM);
8570 /* If we can, restore the LR into the PC. */
8571 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8572 && really_return
8573 && current_function_pretend_args_size == 0
8574 && saved_regs_mask & (1 << LR_REGNUM))
8576 saved_regs_mask &= ~ (1 << LR_REGNUM);
8577 saved_regs_mask |= (1 << PC_REGNUM);
8580 /* Load the registers off the stack. If we only have one register
8581 to load use the LDR instruction - it is faster. */
8582 if (saved_regs_mask == (1 << LR_REGNUM))
8584 /* The exception handler ignores the LR, so we do
8585 not really need to load it off the stack. */
8586 if (eh_ofs)
8587 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8588 else
8589 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8591 else if (saved_regs_mask)
8593 if (saved_regs_mask & (1 << SP_REGNUM))
8594 /* Note - write back to the stack register is not enabled
8595 (ie "ldmfd sp!..."). We know that the stack pointer is
8596 in the list of registers and if we add writeback the
8597 instruction becomes UNPREDICTABLE. */
8598 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8599 else
8600 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8603 if (current_function_pretend_args_size)
8605 /* Unwind the pre-pushed regs. */
8606 operands[0] = operands[1] = stack_pointer_rtx;
8607 operands[2] = GEN_INT (current_function_pretend_args_size);
8608 output_add_immediate (operands);
8612 #if 0
8613 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8614 /* Adjust the stack to remove the exception handler stuff. */
8615 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8616 REGNO (eh_ofs));
8617 #endif
8619 if (! really_return
8620 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8621 && current_function_pretend_args_size == 0
8622 && saved_regs_mask & (1 << PC_REGNUM)))
8623 return "";
8625 /* Generate the return instruction. */
8626 switch ((int) ARM_FUNC_TYPE (func_type))
8628 case ARM_FT_EXCEPTION_HANDLER:
8629 /* Even in 26-bit mode we do a mov (rather than a movs)
8630 because we don't have the PSR bits set in the address. */
8631 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8632 break;
8634 case ARM_FT_ISR:
8635 case ARM_FT_FIQ:
8636 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8637 break;
8639 case ARM_FT_EXCEPTION:
8640 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8641 break;
8643 case ARM_FT_INTERWORKED:
8644 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8645 break;
8647 default:
8648 if (frame_pointer_needed)
8649 /* If we used the frame pointer then the return address
8650 will have been loaded off the stack directly into the
8651 PC, so there is no need to issue a MOV instruction
8652 here. */
8654 else if (current_function_pretend_args_size == 0
8655 && (saved_regs_mask & (1 << LR_REGNUM)))
8656 /* Similarly we may have been able to load LR into the PC
8657 even if we did not create a stack frame. */
8659 else if (TARGET_APCS_32)
8660 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8661 else
8662 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8663 break;
8666 return "";
8669 static void
8670 arm_output_function_epilogue (file, frame_size)
8671 FILE *file ATTRIBUTE_UNUSED;
8672 HOST_WIDE_INT frame_size;
8674 if (TARGET_THUMB)
8676 /* ??? Probably not safe to set this here, since it assumes that a
8677 function will be emitted as assembly immediately after we generate
8678 RTL for it. This does not happen for inline functions. */
8679 return_used_this_function = 0;
8681 else
8683 /* We need to take into account any stack-frame rounding. */
8684 frame_size = arm_get_frame_size ();
8686 if (use_return_insn (FALSE)
8687 && return_used_this_function
8688 && (frame_size + current_function_outgoing_args_size) != 0
8689 && !frame_pointer_needed)
8690 abort ();
8692 /* Reset the ARM-specific per-function variables. */
8693 after_arm_reorg = 0;
8697 /* Generate and emit an insn that we will recognize as a push_multi.
8698 Unfortunately, since this insn does not reflect very well the actual
8699 semantics of the operation, we need to annotate the insn for the benefit
8700 of DWARF2 frame unwind information. */
8702 static rtx
8703 emit_multi_reg_push (mask)
8704 int mask;
8706 int num_regs = 0;
8707 int num_dwarf_regs;
8708 int i, j;
8709 rtx par;
8710 rtx dwarf;
8711 int dwarf_par_index;
8712 rtx tmp, reg;
8714 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8715 if (mask & (1 << i))
8716 num_regs++;
8718 if (num_regs == 0 || num_regs > 16)
8719 abort ();
8721 /* We don't record the PC in the dwarf frame information. */
8722 num_dwarf_regs = num_regs;
8723 if (mask & (1 << PC_REGNUM))
8724 num_dwarf_regs--;
8726 /* For the body of the insn we are going to generate an UNSPEC in
8727 parallel with several USEs. This allows the insn to be recognized
8728 by the push_multi pattern in the arm.md file. The insn looks
8729 something like this:
8731 (parallel [
8732 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8733 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8734 (use (reg:SI 11 fp))
8735 (use (reg:SI 12 ip))
8736 (use (reg:SI 14 lr))
8737 (use (reg:SI 15 pc))
8740 For the frame note however, we try to be more explicit and actually
8741 show each register being stored into the stack frame, plus a (single)
8742 decrement of the stack pointer. We do it this way in order to be
8743 friendly to the stack unwinding code, which only wants to see a single
8744 stack decrement per instruction. The RTL we generate for the note looks
8745 something like this:
8747 (sequence [
8748 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8749 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8750 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8751 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8752 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8755 This sequence is used both by the code to support stack unwinding for
8756 exceptions handlers and the code to generate dwarf2 frame debugging. */
8758 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8759 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8760 dwarf_par_index = 1;
8762 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8764 if (mask & (1 << i))
8766 reg = gen_rtx_REG (SImode, i);
8768 XVECEXP (par, 0, 0)
8769 = gen_rtx_SET (VOIDmode,
8770 gen_rtx_MEM (BLKmode,
8771 gen_rtx_PRE_DEC (BLKmode,
8772 stack_pointer_rtx)),
8773 gen_rtx_UNSPEC (BLKmode,
8774 gen_rtvec (1, reg),
8775 UNSPEC_PUSH_MULT));
8777 if (i != PC_REGNUM)
8779 tmp = gen_rtx_SET (VOIDmode,
8780 gen_rtx_MEM (SImode, stack_pointer_rtx),
8781 reg);
8782 RTX_FRAME_RELATED_P (tmp) = 1;
8783 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8784 dwarf_par_index++;
8787 break;
8791 for (j = 1, i++; j < num_regs; i++)
8793 if (mask & (1 << i))
8795 reg = gen_rtx_REG (SImode, i);
8797 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8799 if (i != PC_REGNUM)
8801 tmp = gen_rtx_SET (VOIDmode,
8802 gen_rtx_MEM (SImode,
8803 plus_constant (stack_pointer_rtx,
8804 4 * j)),
8805 reg);
8806 RTX_FRAME_RELATED_P (tmp) = 1;
8807 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8810 j++;
8814 par = emit_insn (par);
8816 tmp = gen_rtx_SET (SImode,
8817 stack_pointer_rtx,
8818 gen_rtx_PLUS (SImode,
8819 stack_pointer_rtx,
8820 GEN_INT (-4 * num_regs)));
8821 RTX_FRAME_RELATED_P (tmp) = 1;
8822 XVECEXP (dwarf, 0, 0) = tmp;
8824 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8825 REG_NOTES (par));
8826 return par;
8829 static rtx
8830 emit_sfm (base_reg, count)
8831 int base_reg;
8832 int count;
8834 rtx par;
8835 rtx dwarf;
8836 rtx tmp, reg;
8837 int i;
8839 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8840 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8842 reg = gen_rtx_REG (XFmode, base_reg++);
8844 XVECEXP (par, 0, 0)
8845 = gen_rtx_SET (VOIDmode,
8846 gen_rtx_MEM (BLKmode,
8847 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8848 gen_rtx_UNSPEC (BLKmode,
8849 gen_rtvec (1, reg),
8850 UNSPEC_PUSH_MULT));
8852 = gen_rtx_SET (VOIDmode,
8853 gen_rtx_MEM (XFmode,
8854 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8855 reg);
8856 RTX_FRAME_RELATED_P (tmp) = 1;
8857 XVECEXP (dwarf, 0, count - 1) = tmp;
8859 for (i = 1; i < count; i++)
8861 reg = gen_rtx_REG (XFmode, base_reg++);
8862 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8864 tmp = gen_rtx_SET (VOIDmode,
8865 gen_rtx_MEM (XFmode,
8866 gen_rtx_PRE_DEC (BLKmode,
8867 stack_pointer_rtx)),
8868 reg);
8869 RTX_FRAME_RELATED_P (tmp) = 1;
8870 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8873 par = emit_insn (par);
8874 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8875 REG_NOTES (par));
8876 return par;
8879 /* Compute the distance from register FROM to register TO.
8880 These can be the arg pointer (26), the soft frame pointer (25),
8881 the stack pointer (13) or the hard frame pointer (11).
8882 Typical stack layout looks like this:
8884 old stack pointer -> | |
8885 ----
8886 | | \
8887 | | saved arguments for
8888 | | vararg functions
8889 | | /
8891 hard FP & arg pointer -> | | \
8892 | | stack
8893 | | frame
8894 | | /
8896 | | \
8897 | | call saved
8898 | | registers
8899 soft frame pointer -> | | /
8901 | | \
8902 | | local
8903 | | variables
8904 | | /
8906 | | \
8907 | | outgoing
8908 | | arguments
8909 current stack pointer -> | | /
8912 For a given function some or all of these stack components
8913 may not be needed, giving rise to the possibility of
8914 eliminating some of the registers.
8916 The values returned by this function must reflect the behavior
8917 of arm_expand_prologue() and arm_compute_save_reg_mask().
8919 The sign of the number returned reflects the direction of stack
8920 growth, so the values are positive for all eliminations except
8921 from the soft frame pointer to the hard frame pointer. */
8923 unsigned int
8924 arm_compute_initial_elimination_offset (from, to)
8925 unsigned int from;
8926 unsigned int to;
8928 unsigned int local_vars = arm_get_frame_size ();
8929 unsigned int outgoing_args = current_function_outgoing_args_size;
8930 unsigned int stack_frame;
8931 unsigned int call_saved_registers;
8932 unsigned long func_type;
8934 func_type = arm_current_func_type ();
8936 /* Volatile functions never return, so there is
8937 no need to save call saved registers. */
8938 call_saved_registers = 0;
8939 if (! IS_VOLATILE (func_type))
8941 unsigned int reg_mask;
8942 unsigned int reg;
8944 /* Make sure that we compute which registers will be saved
8945 on the stack using the same algorithm that is used by
8946 arm_compute_save_reg_mask(). */
8947 reg_mask = arm_compute_save_reg0_reg12_mask ();
8949 /* Now count the number of bits set in save_reg_mask.
8950 For each set bit we need 4 bytes of stack space. */
8951 while (reg_mask)
8953 call_saved_registers += 4;
8954 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8957 if ((regs_ever_live[LR_REGNUM]
8958 /* If optimizing for size, then we save the link register if
8959 any other integer register is saved. This gives a smaller
8960 return sequence. */
8961 || (optimize_size && call_saved_registers > 0))
8962 /* But if a stack frame is going to be created, the LR will
8963 be saved as part of that, so we do not need to allow for
8964 it here. */
8965 && ! frame_pointer_needed)
8966 call_saved_registers += 4;
8968 /* If the hard floating point registers are going to be
8969 used then they must be saved on the stack as well.
8970 Each register occupies 12 bytes of stack space. */
8971 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8972 if (regs_ever_live[reg] && ! call_used_regs[reg])
8973 call_saved_registers += 12;
8976 /* The stack frame contains 4 registers - the old frame pointer,
8977 the old stack pointer, the return address and PC of the start
8978 of the function. */
8979 stack_frame = frame_pointer_needed ? 16 : 0;
8981 /* OK, now we have enough information to compute the distances.
8982 There must be an entry in these switch tables for each pair
8983 of registers in ELIMINABLE_REGS, even if some of the entries
8984 seem to be redundant or useless. */
8985 switch (from)
8987 case ARG_POINTER_REGNUM:
8988 switch (to)
8990 case THUMB_HARD_FRAME_POINTER_REGNUM:
8991 return 0;
8993 case FRAME_POINTER_REGNUM:
8994 /* This is the reverse of the soft frame pointer
8995 to hard frame pointer elimination below. */
8996 if (call_saved_registers == 0 && stack_frame == 0)
8997 return 0;
8998 return (call_saved_registers + stack_frame - 4);
9000 case ARM_HARD_FRAME_POINTER_REGNUM:
9001 /* If there is no stack frame then the hard
9002 frame pointer and the arg pointer coincide. */
9003 if (stack_frame == 0 && call_saved_registers != 0)
9004 return 0;
9005 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
9006 return (frame_pointer_needed
9007 && current_function_needs_context
9008 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
9010 case STACK_POINTER_REGNUM:
9011 /* If nothing has been pushed on the stack at all
9012 then this will return -4. This *is* correct! */
9013 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
9015 default:
9016 abort ();
9018 break;
9020 case FRAME_POINTER_REGNUM:
9021 switch (to)
9023 case THUMB_HARD_FRAME_POINTER_REGNUM:
9024 return 0;
9026 case ARM_HARD_FRAME_POINTER_REGNUM:
9027 /* The hard frame pointer points to the top entry in the
9028 stack frame. The soft frame pointer to the bottom entry
9029 in the stack frame. If there is no stack frame at all,
9030 then they are identical. */
9031 if (call_saved_registers == 0 && stack_frame == 0)
9032 return 0;
9033 return - (call_saved_registers + stack_frame - 4);
9035 case STACK_POINTER_REGNUM:
9036 return local_vars + outgoing_args;
9038 default:
9039 abort ();
9041 break;
9043 default:
9044 /* You cannot eliminate from the stack pointer.
9045 In theory you could eliminate from the hard frame
9046 pointer to the stack pointer, but this will never
9047 happen, since if a stack frame is not needed the
9048 hard frame pointer will never be used. */
9049 abort ();
9053 /* Calculate the size of the stack frame, taking into account any
9054 padding that is required to ensure stack-alignment. */
9056 HOST_WIDE_INT
9057 arm_get_frame_size ()
9059 int regno;
9061 int base_size = ROUND_UP_WORD (get_frame_size ());
9062 int entry_size = 0;
9063 unsigned long func_type = arm_current_func_type ();
9064 int leaf;
9066 if (! TARGET_ARM)
9067 abort();
9069 if (! TARGET_ATPCS)
9070 return base_size;
9072 /* We need to know if we are a leaf function. Unfortunately, it
9073 is possible to be called after start_sequence has been called,
9074 which causes get_insns to return the insns for the sequence,
9075 not the function, which will cause leaf_function_p to return
9076 the incorrect result.
9078 To work around this, we cache the computed frame size. This
9079 works because we will only be calling RTL expanders that need
9080 to know about leaf functions once reload has completed, and the
9081 frame size cannot be changed after that time, so we can safely
9082 use the cached value. */
9084 if (reload_completed)
9085 return cfun->machine->frame_size;
9087 leaf = leaf_function_p ();
9089 /* A leaf function does not need any stack alignment if it has nothing
9090 on the stack. */
9091 if (leaf && base_size == 0)
9093 cfun->machine->frame_size = 0;
9094 return 0;
9097 /* We know that SP will be word aligned on entry, and we must
9098 preserve that condition at any subroutine call. But those are
9099 the only constraints. */
9101 /* Space for variadic functions. */
9102 if (current_function_pretend_args_size)
9103 entry_size += current_function_pretend_args_size;
9105 /* Space for saved registers. */
9106 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
9108 /* Space for saved FPA registers. */
9109 if (! IS_VOLATILE (func_type))
9111 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
9112 if (regs_ever_live[regno] && ! call_used_regs[regno])
9113 entry_size += 12;
9116 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9117 base_size += 4;
9118 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9119 abort ();
9121 cfun->machine->frame_size = base_size;
9123 return base_size;
9126 /* Generate the prologue instructions for entry into an ARM function. */
9128 void
9129 arm_expand_prologue ()
9131 int reg;
9132 rtx amount;
9133 rtx insn;
9134 rtx ip_rtx;
9135 unsigned long live_regs_mask;
9136 unsigned long func_type;
9137 int fp_offset = 0;
9138 int saved_pretend_args = 0;
9139 unsigned int args_to_push;
9141 func_type = arm_current_func_type ();
9143 /* Naked functions don't have prologues. */
9144 if (IS_NAKED (func_type))
9145 return;
9147 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
9148 args_to_push = current_function_pretend_args_size;
9150 /* Compute which register we will have to save onto the stack. */
9151 live_regs_mask = arm_compute_save_reg_mask ();
9153 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
9155 if (frame_pointer_needed)
9157 if (IS_INTERRUPT (func_type))
9159 /* Interrupt functions must not corrupt any registers.
9160 Creating a frame pointer however, corrupts the IP
9161 register, so we must push it first. */
9162 insn = emit_multi_reg_push (1 << IP_REGNUM);
9164 /* Do not set RTX_FRAME_RELATED_P on this insn.
9165 The dwarf stack unwinding code only wants to see one
9166 stack decrement per function, and this is not it. If
9167 this instruction is labeled as being part of the frame
9168 creation sequence then dwarf2out_frame_debug_expr will
9169 abort when it encounters the assignment of IP to FP
9170 later on, since the use of SP here establishes SP as
9171 the CFA register and not IP.
9173 Anyway this instruction is not really part of the stack
9174 frame creation although it is part of the prologue. */
9176 else if (IS_NESTED (func_type))
9178 /* The Static chain register is the same as the IP register
9179 used as a scratch register during stack frame creation.
9180 To get around this need to find somewhere to store IP
9181 whilst the frame is being created. We try the following
9182 places in order:
9184 1. The last argument register.
9185 2. A slot on the stack above the frame. (This only
9186 works if the function is not a varargs function).
9187 3. Register r3, after pushing the argument registers
9188 onto the stack.
9190 Note - we only need to tell the dwarf2 backend about the SP
9191 adjustment in the second variant; the static chain register
9192 doesn't need to be unwound, as it doesn't contain a value
9193 inherited from the caller. */
9195 if (regs_ever_live[3] == 0)
9197 insn = gen_rtx_REG (SImode, 3);
9198 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9199 insn = emit_insn (insn);
9201 else if (args_to_push == 0)
9203 rtx dwarf;
9204 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
9205 insn = gen_rtx_MEM (SImode, insn);
9206 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
9207 insn = emit_insn (insn);
9209 fp_offset = 4;
9211 /* Just tell the dwarf backend that we adjusted SP. */
9212 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
9213 gen_rtx_PLUS (SImode, stack_pointer_rtx,
9214 GEN_INT (-fp_offset)));
9215 RTX_FRAME_RELATED_P (insn) = 1;
9216 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9217 dwarf, REG_NOTES (insn));
9219 else
9221 /* Store the args on the stack. */
9222 if (cfun->machine->uses_anonymous_args)
9223 insn = emit_multi_reg_push
9224 ((0xf0 >> (args_to_push / 4)) & 0xf);
9225 else
9226 insn = emit_insn
9227 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9228 GEN_INT (- args_to_push)));
9230 RTX_FRAME_RELATED_P (insn) = 1;
9232 saved_pretend_args = 1;
9233 fp_offset = args_to_push;
9234 args_to_push = 0;
9236 /* Now reuse r3 to preserve IP. */
9237 insn = gen_rtx_REG (SImode, 3);
9238 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9239 (void) emit_insn (insn);
9243 if (fp_offset)
9245 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
9246 insn = gen_rtx_SET (SImode, ip_rtx, insn);
9248 else
9249 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
9251 insn = emit_insn (insn);
9252 RTX_FRAME_RELATED_P (insn) = 1;
9255 if (args_to_push)
9257 /* Push the argument registers, or reserve space for them. */
9258 if (cfun->machine->uses_anonymous_args)
9259 insn = emit_multi_reg_push
9260 ((0xf0 >> (args_to_push / 4)) & 0xf);
9261 else
9262 insn = emit_insn
9263 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9264 GEN_INT (- args_to_push)));
9265 RTX_FRAME_RELATED_P (insn) = 1;
9268 /* If this is an interrupt service routine, and the link register
9269 is going to be pushed, and we are not creating a stack frame,
9270 (which would involve an extra push of IP and a pop in the epilogue)
9271 subtracting four from LR now will mean that the function return
9272 can be done with a single instruction. */
9273 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
9274 && (live_regs_mask & (1 << LR_REGNUM)) != 0
9275 && ! frame_pointer_needed)
9276 emit_insn (gen_rtx_SET (SImode,
9277 gen_rtx_REG (SImode, LR_REGNUM),
9278 gen_rtx_PLUS (SImode,
9279 gen_rtx_REG (SImode, LR_REGNUM),
9280 GEN_INT (-4))));
9282 if (live_regs_mask)
9284 insn = emit_multi_reg_push (live_regs_mask);
9285 RTX_FRAME_RELATED_P (insn) = 1;
9288 if (! IS_VOLATILE (func_type))
9290 /* Save any floating point call-saved registers used by this function. */
9291 if (arm_fpu_arch == FP_SOFT2)
9293 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
9294 if (regs_ever_live[reg] && !call_used_regs[reg])
9296 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
9297 insn = gen_rtx_MEM (XFmode, insn);
9298 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9299 gen_rtx_REG (XFmode, reg)));
9300 RTX_FRAME_RELATED_P (insn) = 1;
9303 else
9305 int start_reg = LAST_ARM_FP_REGNUM;
9307 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
9309 if (regs_ever_live[reg] && !call_used_regs[reg])
9311 if (start_reg - reg == 3)
9313 insn = emit_sfm (reg, 4);
9314 RTX_FRAME_RELATED_P (insn) = 1;
9315 start_reg = reg - 1;
9318 else
9320 if (start_reg != reg)
9322 insn = emit_sfm (reg + 1, start_reg - reg);
9323 RTX_FRAME_RELATED_P (insn) = 1;
9325 start_reg = reg - 1;
9329 if (start_reg != reg)
9331 insn = emit_sfm (reg + 1, start_reg - reg);
9332 RTX_FRAME_RELATED_P (insn) = 1;
9337 if (frame_pointer_needed)
9339 /* Create the new frame pointer. */
9340 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9341 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9342 RTX_FRAME_RELATED_P (insn) = 1;
9344 if (IS_NESTED (func_type))
9346 /* Recover the static chain register. */
9347 if (regs_ever_live [3] == 0
9348 || saved_pretend_args)
9349 insn = gen_rtx_REG (SImode, 3);
9350 else /* if (current_function_pretend_args_size == 0) */
9352 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
9353 insn = gen_rtx_MEM (SImode, insn);
9356 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9357 /* Add a USE to stop propagate_one_insn() from barfing. */
9358 emit_insn (gen_prologue_use (ip_rtx));
9362 amount = GEN_INT (-(arm_get_frame_size ()
9363 + current_function_outgoing_args_size));
9365 if (amount != const0_rtx)
9367 /* This add can produce multiple insns for a large constant, so we
9368 need to get tricky. */
9369 rtx last = get_last_insn ();
9370 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9371 amount));
9374 last = last ? NEXT_INSN (last) : get_insns ();
9375 RTX_FRAME_RELATED_P (last) = 1;
9377 while (last != insn);
9379 /* If the frame pointer is needed, emit a special barrier that
9380 will prevent the scheduler from moving stores to the frame
9381 before the stack adjustment. */
9382 if (frame_pointer_needed)
9383 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9384 hard_frame_pointer_rtx));
9387 /* If we are profiling, make sure no instructions are scheduled before
9388 the call to mcount. Similarly if the user has requested no
9389 scheduling in the prolog. */
9390 if (current_function_profile || TARGET_NO_SCHED_PRO)
9391 emit_insn (gen_blockage ());
9393 /* If the link register is being kept alive, with the return address in it,
9394 then make sure that it does not get reused by the ce2 pass. */
9395 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9397 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9398 cfun->machine->lr_save_eliminated = 1;
9402 /* If CODE is 'd', then the X is a condition operand and the instruction
9403 should only be executed if the condition is true.
9404 if CODE is 'D', then the X is a condition operand and the instruction
9405 should only be executed if the condition is false: however, if the mode
9406 of the comparison is CCFPEmode, then always execute the instruction -- we
9407 do this because in these circumstances !GE does not necessarily imply LT;
9408 in these cases the instruction pattern will take care to make sure that
9409 an instruction containing %d will follow, thereby undoing the effects of
9410 doing this instruction unconditionally.
9411 If CODE is 'N' then X is a floating point operand that must be negated
9412 before output.
9413 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9414 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9416 void
9417 arm_print_operand (stream, x, code)
9418 FILE * stream;
9419 rtx x;
9420 int code;
9422 switch (code)
9424 case '@':
9425 fputs (ASM_COMMENT_START, stream);
9426 return;
9428 case '_':
9429 fputs (user_label_prefix, stream);
9430 return;
9432 case '|':
9433 fputs (REGISTER_PREFIX, stream);
9434 return;
9436 case '?':
9437 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9439 if (TARGET_THUMB || current_insn_predicate != NULL)
9440 abort ();
9442 fputs (arm_condition_codes[arm_current_cc], stream);
9444 else if (current_insn_predicate)
9446 enum arm_cond_code code;
9448 if (TARGET_THUMB)
9449 abort ();
9451 code = get_arm_condition_code (current_insn_predicate);
9452 fputs (arm_condition_codes[code], stream);
9454 return;
9456 case 'N':
9458 REAL_VALUE_TYPE r;
9459 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9460 r = REAL_VALUE_NEGATE (r);
9461 fprintf (stream, "%s", fp_const_from_val (&r));
9463 return;
9465 case 'B':
9466 if (GET_CODE (x) == CONST_INT)
9468 HOST_WIDE_INT val;
9469 val = ARM_SIGN_EXTEND (~INTVAL (x));
9470 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9472 else
9474 putc ('~', stream);
9475 output_addr_const (stream, x);
9477 return;
9479 case 'i':
9480 fprintf (stream, "%s", arithmetic_instr (x, 1));
9481 return;
9483 /* Truncate Cirrus shift counts. */
9484 case 's':
9485 if (GET_CODE (x) == CONST_INT)
9487 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9488 return;
9490 arm_print_operand (stream, x, 0);
9491 return;
9493 case 'I':
9494 fprintf (stream, "%s", arithmetic_instr (x, 0));
9495 return;
9497 case 'S':
9499 HOST_WIDE_INT val;
9500 const char * shift = shift_op (x, &val);
9502 if (shift)
9504 fprintf (stream, ", %s ", shift_op (x, &val));
9505 if (val == -1)
9506 arm_print_operand (stream, XEXP (x, 1), 0);
9507 else
9509 fputc ('#', stream);
9510 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9514 return;
9516 /* An explanation of the 'Q', 'R' and 'H' register operands:
9518 In a pair of registers containing a DI or DF value the 'Q'
9519 operand returns the register number of the register containing
9520 the least signficant part of the value. The 'R' operand returns
9521 the register number of the register containing the most
9522 significant part of the value.
9524 The 'H' operand returns the higher of the two register numbers.
9525 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9526 same as the 'Q' operand, since the most signficant part of the
9527 value is held in the lower number register. The reverse is true
9528 on systems where WORDS_BIG_ENDIAN is false.
9530 The purpose of these operands is to distinguish between cases
9531 where the endian-ness of the values is important (for example
9532 when they are added together), and cases where the endian-ness
9533 is irrelevant, but the order of register operations is important.
9534 For example when loading a value from memory into a register
9535 pair, the endian-ness does not matter. Provided that the value
9536 from the lower memory address is put into the lower numbered
9537 register, and the value from the higher address is put into the
9538 higher numbered register, the load will work regardless of whether
9539 the value being loaded is big-wordian or little-wordian. The
9540 order of the two register loads can matter however, if the address
9541 of the memory location is actually held in one of the registers
9542 being overwritten by the load. */
9543 case 'Q':
9544 if (REGNO (x) > LAST_ARM_REGNUM)
9545 abort ();
9546 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9547 return;
9549 case 'R':
9550 if (REGNO (x) > LAST_ARM_REGNUM)
9551 abort ();
9552 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9553 return;
9555 case 'H':
9556 if (REGNO (x) > LAST_ARM_REGNUM)
9557 abort ();
9558 asm_fprintf (stream, "%r", REGNO (x) + 1);
9559 return;
9561 case 'm':
9562 asm_fprintf (stream, "%r",
9563 GET_CODE (XEXP (x, 0)) == REG
9564 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9565 return;
9567 case 'M':
9568 asm_fprintf (stream, "{%r-%r}",
9569 REGNO (x),
9570 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9571 return;
9573 case 'd':
9574 /* CONST_TRUE_RTX means always -- that's the default. */
9575 if (x == const_true_rtx)
9576 return;
9578 if (TARGET_ARM)
9579 fputs (arm_condition_codes[get_arm_condition_code (x)],
9580 stream);
9581 else
9582 fputs (thumb_condition_code (x, 0), stream);
9583 return;
9585 case 'D':
9586 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9587 want to do that. */
9588 if (x == const_true_rtx)
9589 abort ();
9591 if (TARGET_ARM)
9592 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9593 (get_arm_condition_code (x))],
9594 stream);
9595 else
9596 fputs (thumb_condition_code (x, 1), stream);
9597 return;
9600 /* Cirrus registers can be accessed in a variety of ways:
9601 single floating point (f)
9602 double floating point (d)
9603 32bit integer (fx)
9604 64bit integer (dx). */
9605 case 'W': /* Cirrus register in F mode. */
9606 case 'X': /* Cirrus register in D mode. */
9607 case 'Y': /* Cirrus register in FX mode. */
9608 case 'Z': /* Cirrus register in DX mode. */
9609 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9610 abort ();
9612 fprintf (stream, "mv%s%s",
9613 code == 'W' ? "f"
9614 : code == 'X' ? "d"
9615 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9617 return;
9619 /* Print cirrus register in the mode specified by the register's mode. */
9620 case 'V':
9622 int mode = GET_MODE (x);
9624 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9625 abort ();
9627 fprintf (stream, "mv%s%s",
9628 mode == DFmode ? "d"
9629 : mode == SImode ? "fx"
9630 : mode == DImode ? "dx"
9631 : "f", reg_names[REGNO (x)] + 2);
9633 return;
9636 default:
9637 if (x == 0)
9638 abort ();
9640 if (GET_CODE (x) == REG)
9641 asm_fprintf (stream, "%r", REGNO (x));
9642 else if (GET_CODE (x) == MEM)
9644 output_memory_reference_mode = GET_MODE (x);
9645 output_address (XEXP (x, 0));
9647 else if (GET_CODE (x) == CONST_DOUBLE)
9648 fprintf (stream, "#%s", fp_immediate_constant (x));
9649 else if (GET_CODE (x) == NEG)
9650 abort (); /* This should never happen now. */
9651 else
9653 fputc ('#', stream);
9654 output_addr_const (stream, x);
9659 #ifndef AOF_ASSEMBLER
9660 /* Target hook for assembling integer objects. The ARM version needs to
9661 handle word-sized values specially. */
9663 static bool
9664 arm_assemble_integer (x, size, aligned_p)
9665 rtx x;
9666 unsigned int size;
9667 int aligned_p;
9669 if (size == UNITS_PER_WORD && aligned_p)
9671 fputs ("\t.word\t", asm_out_file);
9672 output_addr_const (asm_out_file, x);
9674 /* Mark symbols as position independent. We only do this in the
9675 .text segment, not in the .data segment. */
9676 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9677 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9679 if (GET_CODE (x) == SYMBOL_REF
9680 && (CONSTANT_POOL_ADDRESS_P (x)
9681 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9682 fputs ("(GOTOFF)", asm_out_file);
9683 else if (GET_CODE (x) == LABEL_REF)
9684 fputs ("(GOTOFF)", asm_out_file);
9685 else
9686 fputs ("(GOT)", asm_out_file);
9688 fputc ('\n', asm_out_file);
9689 return true;
9692 return default_assemble_integer (x, size, aligned_p);
9694 #endif
9696 /* A finite state machine takes care of noticing whether or not instructions
9697 can be conditionally executed, and thus decrease execution time and code
9698 size by deleting branch instructions. The fsm is controlled by
9699 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9701 /* The state of the fsm controlling condition codes are:
9702 0: normal, do nothing special
9703 1: make ASM_OUTPUT_OPCODE not output this instruction
9704 2: make ASM_OUTPUT_OPCODE not output this instruction
9705 3: make instructions conditional
9706 4: make instructions conditional
9708 State transitions (state->state by whom under condition):
9709 0 -> 1 final_prescan_insn if the `target' is a label
9710 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9711 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9712 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9713 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9714 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9715 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9716 (the target insn is arm_target_insn).
9718 If the jump clobbers the conditions then we use states 2 and 4.
9720 A similar thing can be done with conditional return insns.
9722 XXX In case the `target' is an unconditional branch, this conditionalising
9723 of the instructions always reduces code size, but not always execution
9724 time. But then, I want to reduce the code size to somewhere near what
9725 /bin/cc produces. */
9727 /* Returns the index of the ARM condition code string in
9728 `arm_condition_codes'. COMPARISON should be an rtx like
9729 `(eq (...) (...))'. */
9731 static enum arm_cond_code
9732 get_arm_condition_code (comparison)
9733 rtx comparison;
9735 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9736 int code;
9737 enum rtx_code comp_code = GET_CODE (comparison);
9739 if (GET_MODE_CLASS (mode) != MODE_CC)
9740 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9741 XEXP (comparison, 1));
9743 switch (mode)
9745 case CC_DNEmode: code = ARM_NE; goto dominance;
9746 case CC_DEQmode: code = ARM_EQ; goto dominance;
9747 case CC_DGEmode: code = ARM_GE; goto dominance;
9748 case CC_DGTmode: code = ARM_GT; goto dominance;
9749 case CC_DLEmode: code = ARM_LE; goto dominance;
9750 case CC_DLTmode: code = ARM_LT; goto dominance;
9751 case CC_DGEUmode: code = ARM_CS; goto dominance;
9752 case CC_DGTUmode: code = ARM_HI; goto dominance;
9753 case CC_DLEUmode: code = ARM_LS; goto dominance;
9754 case CC_DLTUmode: code = ARM_CC;
9756 dominance:
9757 if (comp_code != EQ && comp_code != NE)
9758 abort ();
9760 if (comp_code == EQ)
9761 return ARM_INVERSE_CONDITION_CODE (code);
9762 return code;
9764 case CC_NOOVmode:
9765 switch (comp_code)
9767 case NE: return ARM_NE;
9768 case EQ: return ARM_EQ;
9769 case GE: return ARM_PL;
9770 case LT: return ARM_MI;
9771 default: abort ();
9774 case CC_Zmode:
9775 switch (comp_code)
9777 case NE: return ARM_NE;
9778 case EQ: return ARM_EQ;
9779 default: abort ();
9782 case CCFPEmode:
9783 case CCFPmode:
9784 /* These encodings assume that AC=1 in the FPA system control
9785 byte. This allows us to handle all cases except UNEQ and
9786 LTGT. */
9787 switch (comp_code)
9789 case GE: return ARM_GE;
9790 case GT: return ARM_GT;
9791 case LE: return ARM_LS;
9792 case LT: return ARM_MI;
9793 case NE: return ARM_NE;
9794 case EQ: return ARM_EQ;
9795 case ORDERED: return ARM_VC;
9796 case UNORDERED: return ARM_VS;
9797 case UNLT: return ARM_LT;
9798 case UNLE: return ARM_LE;
9799 case UNGT: return ARM_HI;
9800 case UNGE: return ARM_PL;
9801 /* UNEQ and LTGT do not have a representation. */
9802 case UNEQ: /* Fall through. */
9803 case LTGT: /* Fall through. */
9804 default: abort ();
9807 case CC_SWPmode:
9808 switch (comp_code)
9810 case NE: return ARM_NE;
9811 case EQ: return ARM_EQ;
9812 case GE: return ARM_LE;
9813 case GT: return ARM_LT;
9814 case LE: return ARM_GE;
9815 case LT: return ARM_GT;
9816 case GEU: return ARM_LS;
9817 case GTU: return ARM_CC;
9818 case LEU: return ARM_CS;
9819 case LTU: return ARM_HI;
9820 default: abort ();
9823 case CC_Cmode:
9824 switch (comp_code)
9826 case LTU: return ARM_CS;
9827 case GEU: return ARM_CC;
9828 default: abort ();
9831 case CCmode:
9832 switch (comp_code)
9834 case NE: return ARM_NE;
9835 case EQ: return ARM_EQ;
9836 case GE: return ARM_GE;
9837 case GT: return ARM_GT;
9838 case LE: return ARM_LE;
9839 case LT: return ARM_LT;
9840 case GEU: return ARM_CS;
9841 case GTU: return ARM_HI;
9842 case LEU: return ARM_LS;
9843 case LTU: return ARM_CC;
9844 default: abort ();
9847 default: abort ();
9850 abort ();
9854 void
9855 arm_final_prescan_insn (insn)
9856 rtx insn;
9858 /* BODY will hold the body of INSN. */
9859 rtx body = PATTERN (insn);
9861 /* This will be 1 if trying to repeat the trick, and things need to be
9862 reversed if it appears to fail. */
9863 int reverse = 0;
9865 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9866 taken are clobbered, even if the rtl suggests otherwise. It also
9867 means that we have to grub around within the jump expression to find
9868 out what the conditions are when the jump isn't taken. */
9869 int jump_clobbers = 0;
9871 /* If we start with a return insn, we only succeed if we find another one. */
9872 int seeking_return = 0;
9874 /* START_INSN will hold the insn from where we start looking. This is the
9875 first insn after the following code_label if REVERSE is true. */
9876 rtx start_insn = insn;
9878 /* If in state 4, check if the target branch is reached, in order to
9879 change back to state 0. */
9880 if (arm_ccfsm_state == 4)
9882 if (insn == arm_target_insn)
9884 arm_target_insn = NULL;
9885 arm_ccfsm_state = 0;
9887 return;
9890 /* If in state 3, it is possible to repeat the trick, if this insn is an
9891 unconditional branch to a label, and immediately following this branch
9892 is the previous target label which is only used once, and the label this
9893 branch jumps to is not too far off. */
9894 if (arm_ccfsm_state == 3)
9896 if (simplejump_p (insn))
9898 start_insn = next_nonnote_insn (start_insn);
9899 if (GET_CODE (start_insn) == BARRIER)
9901 /* XXX Isn't this always a barrier? */
9902 start_insn = next_nonnote_insn (start_insn);
9904 if (GET_CODE (start_insn) == CODE_LABEL
9905 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9906 && LABEL_NUSES (start_insn) == 1)
9907 reverse = TRUE;
9908 else
9909 return;
9911 else if (GET_CODE (body) == RETURN)
9913 start_insn = next_nonnote_insn (start_insn);
9914 if (GET_CODE (start_insn) == BARRIER)
9915 start_insn = next_nonnote_insn (start_insn);
9916 if (GET_CODE (start_insn) == CODE_LABEL
9917 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9918 && LABEL_NUSES (start_insn) == 1)
9920 reverse = TRUE;
9921 seeking_return = 1;
9923 else
9924 return;
9926 else
9927 return;
9930 if (arm_ccfsm_state != 0 && !reverse)
9931 abort ();
9932 if (GET_CODE (insn) != JUMP_INSN)
9933 return;
9935 /* This jump might be paralleled with a clobber of the condition codes
9936 the jump should always come first */
9937 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9938 body = XVECEXP (body, 0, 0);
9940 #if 0
9941 /* If this is a conditional return then we don't want to know */
9942 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9943 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9944 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9945 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9946 return;
9947 #endif
9949 if (reverse
9950 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9951 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9953 int insns_skipped;
9954 int fail = FALSE, succeed = FALSE;
9955 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9956 int then_not_else = TRUE;
9957 rtx this_insn = start_insn, label = 0;
9959 /* If the jump cannot be done with one instruction, we cannot
9960 conditionally execute the instruction in the inverse case. */
9961 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9963 jump_clobbers = 1;
9964 return;
9967 /* Register the insn jumped to. */
9968 if (reverse)
9970 if (!seeking_return)
9971 label = XEXP (SET_SRC (body), 0);
9973 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9974 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9975 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9977 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9978 then_not_else = FALSE;
9980 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9981 seeking_return = 1;
9982 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9984 seeking_return = 1;
9985 then_not_else = FALSE;
9987 else
9988 abort ();
9990 /* See how many insns this branch skips, and what kind of insns. If all
9991 insns are okay, and the label or unconditional branch to the same
9992 label is not too far away, succeed. */
9993 for (insns_skipped = 0;
9994 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9996 rtx scanbody;
9998 this_insn = next_nonnote_insn (this_insn);
9999 if (!this_insn)
10000 break;
10002 switch (GET_CODE (this_insn))
10004 case CODE_LABEL:
10005 /* Succeed if it is the target label, otherwise fail since
10006 control falls in from somewhere else. */
10007 if (this_insn == label)
10009 if (jump_clobbers)
10011 arm_ccfsm_state = 2;
10012 this_insn = next_nonnote_insn (this_insn);
10014 else
10015 arm_ccfsm_state = 1;
10016 succeed = TRUE;
10018 else
10019 fail = TRUE;
10020 break;
10022 case BARRIER:
10023 /* Succeed if the following insn is the target label.
10024 Otherwise fail.
10025 If return insns are used then the last insn in a function
10026 will be a barrier. */
10027 this_insn = next_nonnote_insn (this_insn);
10028 if (this_insn && this_insn == label)
10030 if (jump_clobbers)
10032 arm_ccfsm_state = 2;
10033 this_insn = next_nonnote_insn (this_insn);
10035 else
10036 arm_ccfsm_state = 1;
10037 succeed = TRUE;
10039 else
10040 fail = TRUE;
10041 break;
10043 case CALL_INSN:
10044 /* If using 32-bit addresses the cc is not preserved over
10045 calls. */
10046 if (TARGET_APCS_32)
10048 /* Succeed if the following insn is the target label,
10049 or if the following two insns are a barrier and
10050 the target label. */
10051 this_insn = next_nonnote_insn (this_insn);
10052 if (this_insn && GET_CODE (this_insn) == BARRIER)
10053 this_insn = next_nonnote_insn (this_insn);
10055 if (this_insn && this_insn == label
10056 && insns_skipped < max_insns_skipped)
10058 if (jump_clobbers)
10060 arm_ccfsm_state = 2;
10061 this_insn = next_nonnote_insn (this_insn);
10063 else
10064 arm_ccfsm_state = 1;
10065 succeed = TRUE;
10067 else
10068 fail = TRUE;
10070 break;
10072 case JUMP_INSN:
10073 /* If this is an unconditional branch to the same label, succeed.
10074 If it is to another label, do nothing. If it is conditional,
10075 fail. */
10076 /* XXX Probably, the tests for SET and the PC are unnecessary. */
10078 scanbody = PATTERN (this_insn);
10079 if (GET_CODE (scanbody) == SET
10080 && GET_CODE (SET_DEST (scanbody)) == PC)
10082 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
10083 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
10085 arm_ccfsm_state = 2;
10086 succeed = TRUE;
10088 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
10089 fail = TRUE;
10091 /* Fail if a conditional return is undesirable (eg on a
10092 StrongARM), but still allow this if optimizing for size. */
10093 else if (GET_CODE (scanbody) == RETURN
10094 && !use_return_insn (TRUE)
10095 && !optimize_size)
10096 fail = TRUE;
10097 else if (GET_CODE (scanbody) == RETURN
10098 && seeking_return)
10100 arm_ccfsm_state = 2;
10101 succeed = TRUE;
10103 else if (GET_CODE (scanbody) == PARALLEL)
10105 switch (get_attr_conds (this_insn))
10107 case CONDS_NOCOND:
10108 break;
10109 default:
10110 fail = TRUE;
10111 break;
10114 else
10115 fail = TRUE; /* Unrecognized jump (eg epilogue). */
10117 break;
10119 case INSN:
10120 /* Instructions using or affecting the condition codes make it
10121 fail. */
10122 scanbody = PATTERN (this_insn);
10123 if (!(GET_CODE (scanbody) == SET
10124 || GET_CODE (scanbody) == PARALLEL)
10125 || get_attr_conds (this_insn) != CONDS_NOCOND)
10126 fail = TRUE;
10128 /* A conditional cirrus instruction must be followed by
10129 a non Cirrus instruction. However, since we
10130 conditionalize instructions in this function and by
10131 the time we get here we can't add instructions
10132 (nops), because shorten_branches() has already been
10133 called, we will disable conditionalizing Cirrus
10134 instructions to be safe. */
10135 if (GET_CODE (scanbody) != USE
10136 && GET_CODE (scanbody) != CLOBBER
10137 && get_attr_cirrus (this_insn) != CIRRUS_NO)
10138 fail = TRUE;
10139 break;
10141 default:
10142 break;
10145 if (succeed)
10147 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
10148 arm_target_label = CODE_LABEL_NUMBER (label);
10149 else if (seeking_return || arm_ccfsm_state == 2)
10151 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
10153 this_insn = next_nonnote_insn (this_insn);
10154 if (this_insn && (GET_CODE (this_insn) == BARRIER
10155 || GET_CODE (this_insn) == CODE_LABEL))
10156 abort ();
10158 if (!this_insn)
10160 /* Oh, dear! we ran off the end.. give up */
10161 recog (PATTERN (insn), insn, NULL);
10162 arm_ccfsm_state = 0;
10163 arm_target_insn = NULL;
10164 return;
10166 arm_target_insn = this_insn;
10168 else
10169 abort ();
10170 if (jump_clobbers)
10172 if (reverse)
10173 abort ();
10174 arm_current_cc =
10175 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
10176 0), 0), 1));
10177 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
10178 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10179 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
10180 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10182 else
10184 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
10185 what it was. */
10186 if (!reverse)
10187 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
10188 0));
10191 if (reverse || then_not_else)
10192 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10195 /* Restore recog_data (getting the attributes of other insns can
10196 destroy this array, but final.c assumes that it remains intact
10197 across this call; since the insn has been recognized already we
10198 call recog direct). */
10199 recog (PATTERN (insn), insn, NULL);
10203 /* Returns true if REGNO is a valid register
10204 for holding a quantity of tyoe MODE. */
10207 arm_hard_regno_mode_ok (regno, mode)
10208 unsigned int regno;
10209 enum machine_mode mode;
10211 if (GET_MODE_CLASS (mode) == MODE_CC)
10212 return regno == CC_REGNUM;
10214 if (TARGET_THUMB)
10215 /* For the Thumb we only allow values bigger than SImode in
10216 registers 0 - 6, so that there is always a second low
10217 register available to hold the upper part of the value.
10218 We probably we ought to ensure that the register is the
10219 start of an even numbered register pair. */
10220 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
10222 if (IS_CIRRUS_REGNUM (regno))
10223 /* We have outlawed SI values in Cirrus registers because they
10224 reside in the lower 32 bits, but SF values reside in the
10225 upper 32 bits. This causes gcc all sorts of grief. We can't
10226 even split the registers into pairs because Cirrus SI values
10227 get sign extended to 64bits-- aldyh. */
10228 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
10230 if (regno <= LAST_ARM_REGNUM)
10231 /* We allow any value to be stored in the general regisetrs. */
10232 return 1;
10234 if ( regno == FRAME_POINTER_REGNUM
10235 || regno == ARG_POINTER_REGNUM)
10236 /* We only allow integers in the fake hard registers. */
10237 return GET_MODE_CLASS (mode) == MODE_INT;
10239 /* The only registers left are the FPU registers
10240 which we only allow to hold FP values. */
10241 return GET_MODE_CLASS (mode) == MODE_FLOAT
10242 && regno >= FIRST_ARM_FP_REGNUM
10243 && regno <= LAST_ARM_FP_REGNUM;
10247 arm_regno_class (regno)
10248 int regno;
10250 if (TARGET_THUMB)
10252 if (regno == STACK_POINTER_REGNUM)
10253 return STACK_REG;
10254 if (regno == CC_REGNUM)
10255 return CC_REG;
10256 if (regno < 8)
10257 return LO_REGS;
10258 return HI_REGS;
10261 if ( regno <= LAST_ARM_REGNUM
10262 || regno == FRAME_POINTER_REGNUM
10263 || regno == ARG_POINTER_REGNUM)
10264 return GENERAL_REGS;
10266 if (regno == CC_REGNUM)
10267 return NO_REGS;
10269 if (IS_CIRRUS_REGNUM (regno))
10270 return CIRRUS_REGS;
10272 return FPU_REGS;
10275 /* Handle a special case when computing the offset
10276 of an argument from the frame pointer. */
10279 arm_debugger_arg_offset (value, addr)
10280 int value;
10281 rtx addr;
10283 rtx insn;
10285 /* We are only interested if dbxout_parms() failed to compute the offset. */
10286 if (value != 0)
10287 return 0;
10289 /* We can only cope with the case where the address is held in a register. */
10290 if (GET_CODE (addr) != REG)
10291 return 0;
10293 /* If we are using the frame pointer to point at the argument, then
10294 an offset of 0 is correct. */
10295 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
10296 return 0;
10298 /* If we are using the stack pointer to point at the
10299 argument, then an offset of 0 is correct. */
10300 if ((TARGET_THUMB || !frame_pointer_needed)
10301 && REGNO (addr) == SP_REGNUM)
10302 return 0;
10304 /* Oh dear. The argument is pointed to by a register rather
10305 than being held in a register, or being stored at a known
10306 offset from the frame pointer. Since GDB only understands
10307 those two kinds of argument we must translate the address
10308 held in the register into an offset from the frame pointer.
10309 We do this by searching through the insns for the function
10310 looking to see where this register gets its value. If the
10311 register is initialized from the frame pointer plus an offset
10312 then we are in luck and we can continue, otherwise we give up.
10314 This code is exercised by producing debugging information
10315 for a function with arguments like this:
10317 double func (double a, double b, int c, double d) {return d;}
10319 Without this code the stab for parameter 'd' will be set to
10320 an offset of 0 from the frame pointer, rather than 8. */
10322 /* The if() statement says:
10324 If the insn is a normal instruction
10325 and if the insn is setting the value in a register
10326 and if the register being set is the register holding the address of the argument
10327 and if the address is computing by an addition
10328 that involves adding to a register
10329 which is the frame pointer
10330 a constant integer
10332 then... */
10334 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10336 if ( GET_CODE (insn) == INSN
10337 && GET_CODE (PATTERN (insn)) == SET
10338 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10339 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10340 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
10341 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
10342 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10345 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10347 break;
10351 if (value == 0)
10353 debug_rtx (addr);
10354 warning ("unable to compute real location of stacked parameter");
10355 value = 8; /* XXX magic hack */
10358 return value;
10361 /* Recursively search through all of the blocks in a function
10362 checking to see if any of the variables created in that
10363 function match the RTX called 'orig'. If they do then
10364 replace them with the RTX called 'new'. */
10366 static void
10367 replace_symbols_in_block (block, orig, new)
10368 tree block;
10369 rtx orig;
10370 rtx new;
10372 for (; block; block = BLOCK_CHAIN (block))
10374 tree sym;
10376 if (!TREE_USED (block))
10377 continue;
10379 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
10381 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
10382 || DECL_IGNORED_P (sym)
10383 || TREE_CODE (sym) != VAR_DECL
10384 || DECL_EXTERNAL (sym)
10385 || !rtx_equal_p (DECL_RTL (sym), orig)
10387 continue;
10389 SET_DECL_RTL (sym, new);
10392 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
10396 /* Return the number (counting from 0) of
10397 the least significant set bit in MASK. */
10399 #ifdef __GNUC__
10400 inline
10401 #endif
10402 static int
10403 number_of_first_bit_set (mask)
10404 int mask;
10406 int bit;
10408 for (bit = 0;
10409 (mask & (1 << bit)) == 0;
10410 ++bit)
10411 continue;
10413 return bit;
10416 /* Generate code to return from a thumb function.
10417 If 'reg_containing_return_addr' is -1, then the return address is
10418 actually on the stack, at the stack pointer. */
10419 static void
10420 thumb_exit (f, reg_containing_return_addr, eh_ofs)
10421 FILE * f;
10422 int reg_containing_return_addr;
10423 rtx eh_ofs;
10425 unsigned regs_available_for_popping;
10426 unsigned regs_to_pop;
10427 int pops_needed;
10428 unsigned available;
10429 unsigned required;
10430 int mode;
10431 int size;
10432 int restore_a4 = FALSE;
10434 /* Compute the registers we need to pop. */
10435 regs_to_pop = 0;
10436 pops_needed = 0;
10438 /* There is an assumption here, that if eh_ofs is not NULL, the
10439 normal return address will have been pushed. */
10440 if (reg_containing_return_addr == -1 || eh_ofs)
10442 /* When we are generating a return for __builtin_eh_return,
10443 reg_containing_return_addr must specify the return regno. */
10444 if (eh_ofs && reg_containing_return_addr == -1)
10445 abort ();
10447 regs_to_pop |= 1 << LR_REGNUM;
10448 ++pops_needed;
10451 if (TARGET_BACKTRACE)
10453 /* Restore the (ARM) frame pointer and stack pointer. */
10454 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10455 pops_needed += 2;
10458 /* If there is nothing to pop then just emit the BX instruction and
10459 return. */
10460 if (pops_needed == 0)
10462 if (eh_ofs)
10463 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10465 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10466 return;
10468 /* Otherwise if we are not supporting interworking and we have not created
10469 a backtrace structure and the function was not entered in ARM mode then
10470 just pop the return address straight into the PC. */
10471 else if (!TARGET_INTERWORK
10472 && !TARGET_BACKTRACE
10473 && !is_called_in_ARM_mode (current_function_decl))
10475 if (eh_ofs)
10477 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10478 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10479 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10481 else
10482 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10484 return;
10487 /* Find out how many of the (return) argument registers we can corrupt. */
10488 regs_available_for_popping = 0;
10490 /* If returning via __builtin_eh_return, the bottom three registers
10491 all contain information needed for the return. */
10492 if (eh_ofs)
10493 size = 12;
10494 else
10496 #ifdef RTX_CODE
10497 /* If we can deduce the registers used from the function's
10498 return value. This is more reliable that examining
10499 regs_ever_live[] because that will be set if the register is
10500 ever used in the function, not just if the register is used
10501 to hold a return value. */
10503 if (current_function_return_rtx != 0)
10504 mode = GET_MODE (current_function_return_rtx);
10505 else
10506 #endif
10507 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10509 size = GET_MODE_SIZE (mode);
10511 if (size == 0)
10513 /* In a void function we can use any argument register.
10514 In a function that returns a structure on the stack
10515 we can use the second and third argument registers. */
10516 if (mode == VOIDmode)
10517 regs_available_for_popping =
10518 (1 << ARG_REGISTER (1))
10519 | (1 << ARG_REGISTER (2))
10520 | (1 << ARG_REGISTER (3));
10521 else
10522 regs_available_for_popping =
10523 (1 << ARG_REGISTER (2))
10524 | (1 << ARG_REGISTER (3));
10526 else if (size <= 4)
10527 regs_available_for_popping =
10528 (1 << ARG_REGISTER (2))
10529 | (1 << ARG_REGISTER (3));
10530 else if (size <= 8)
10531 regs_available_for_popping =
10532 (1 << ARG_REGISTER (3));
10535 /* Match registers to be popped with registers into which we pop them. */
10536 for (available = regs_available_for_popping,
10537 required = regs_to_pop;
10538 required != 0 && available != 0;
10539 available &= ~(available & - available),
10540 required &= ~(required & - required))
10541 -- pops_needed;
10543 /* If we have any popping registers left over, remove them. */
10544 if (available > 0)
10545 regs_available_for_popping &= ~available;
10547 /* Otherwise if we need another popping register we can use
10548 the fourth argument register. */
10549 else if (pops_needed)
10551 /* If we have not found any free argument registers and
10552 reg a4 contains the return address, we must move it. */
10553 if (regs_available_for_popping == 0
10554 && reg_containing_return_addr == LAST_ARG_REGNUM)
10556 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10557 reg_containing_return_addr = LR_REGNUM;
10559 else if (size > 12)
10561 /* Register a4 is being used to hold part of the return value,
10562 but we have dire need of a free, low register. */
10563 restore_a4 = TRUE;
10565 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10568 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10570 /* The fourth argument register is available. */
10571 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10573 --pops_needed;
10577 /* Pop as many registers as we can. */
10578 thumb_pushpop (f, regs_available_for_popping, FALSE);
10580 /* Process the registers we popped. */
10581 if (reg_containing_return_addr == -1)
10583 /* The return address was popped into the lowest numbered register. */
10584 regs_to_pop &= ~(1 << LR_REGNUM);
10586 reg_containing_return_addr =
10587 number_of_first_bit_set (regs_available_for_popping);
10589 /* Remove this register for the mask of available registers, so that
10590 the return address will not be corrupted by further pops. */
10591 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10594 /* If we popped other registers then handle them here. */
10595 if (regs_available_for_popping)
10597 int frame_pointer;
10599 /* Work out which register currently contains the frame pointer. */
10600 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10602 /* Move it into the correct place. */
10603 asm_fprintf (f, "\tmov\t%r, %r\n",
10604 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10606 /* (Temporarily) remove it from the mask of popped registers. */
10607 regs_available_for_popping &= ~(1 << frame_pointer);
10608 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10610 if (regs_available_for_popping)
10612 int stack_pointer;
10614 /* We popped the stack pointer as well,
10615 find the register that contains it. */
10616 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10618 /* Move it into the stack register. */
10619 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10621 /* At this point we have popped all necessary registers, so
10622 do not worry about restoring regs_available_for_popping
10623 to its correct value:
10625 assert (pops_needed == 0)
10626 assert (regs_available_for_popping == (1 << frame_pointer))
10627 assert (regs_to_pop == (1 << STACK_POINTER)) */
10629 else
10631 /* Since we have just move the popped value into the frame
10632 pointer, the popping register is available for reuse, and
10633 we know that we still have the stack pointer left to pop. */
10634 regs_available_for_popping |= (1 << frame_pointer);
10638 /* If we still have registers left on the stack, but we no longer have
10639 any registers into which we can pop them, then we must move the return
10640 address into the link register and make available the register that
10641 contained it. */
10642 if (regs_available_for_popping == 0 && pops_needed > 0)
10644 regs_available_for_popping |= 1 << reg_containing_return_addr;
10646 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10647 reg_containing_return_addr);
10649 reg_containing_return_addr = LR_REGNUM;
10652 /* If we have registers left on the stack then pop some more.
10653 We know that at most we will want to pop FP and SP. */
10654 if (pops_needed > 0)
10656 int popped_into;
10657 int move_to;
10659 thumb_pushpop (f, regs_available_for_popping, FALSE);
10661 /* We have popped either FP or SP.
10662 Move whichever one it is into the correct register. */
10663 popped_into = number_of_first_bit_set (regs_available_for_popping);
10664 move_to = number_of_first_bit_set (regs_to_pop);
10666 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10668 regs_to_pop &= ~(1 << move_to);
10670 --pops_needed;
10673 /* If we still have not popped everything then we must have only
10674 had one register available to us and we are now popping the SP. */
10675 if (pops_needed > 0)
10677 int popped_into;
10679 thumb_pushpop (f, regs_available_for_popping, FALSE);
10681 popped_into = number_of_first_bit_set (regs_available_for_popping);
10683 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10685 assert (regs_to_pop == (1 << STACK_POINTER))
10686 assert (pops_needed == 1)
10690 /* If necessary restore the a4 register. */
10691 if (restore_a4)
10693 if (reg_containing_return_addr != LR_REGNUM)
10695 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10696 reg_containing_return_addr = LR_REGNUM;
10699 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10702 if (eh_ofs)
10703 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10705 /* Return to caller. */
10706 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10709 /* Emit code to push or pop registers to or from the stack. */
10711 static void
10712 thumb_pushpop (f, mask, push)
10713 FILE * f;
10714 int mask;
10715 int push;
10717 int regno;
10718 int lo_mask = mask & 0xFF;
10720 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10722 /* Special case. Do not generate a POP PC statement here, do it in
10723 thumb_exit() */
10724 thumb_exit (f, -1, NULL_RTX);
10725 return;
10728 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10730 /* Look at the low registers first. */
10731 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10733 if (lo_mask & 1)
10735 asm_fprintf (f, "%r", regno);
10737 if ((lo_mask & ~1) != 0)
10738 fprintf (f, ", ");
10742 if (push && (mask & (1 << LR_REGNUM)))
10744 /* Catch pushing the LR. */
10745 if (mask & 0xFF)
10746 fprintf (f, ", ");
10748 asm_fprintf (f, "%r", LR_REGNUM);
10750 else if (!push && (mask & (1 << PC_REGNUM)))
10752 /* Catch popping the PC. */
10753 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10755 /* The PC is never poped directly, instead
10756 it is popped into r3 and then BX is used. */
10757 fprintf (f, "}\n");
10759 thumb_exit (f, -1, NULL_RTX);
10761 return;
10763 else
10765 if (mask & 0xFF)
10766 fprintf (f, ", ");
10768 asm_fprintf (f, "%r", PC_REGNUM);
10772 fprintf (f, "}\n");
10775 void
10776 thumb_final_prescan_insn (insn)
10777 rtx insn;
10779 if (flag_print_asm_name)
10780 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10781 INSN_ADDRESSES (INSN_UID (insn)));
10785 thumb_shiftable_const (val)
10786 unsigned HOST_WIDE_INT val;
10788 unsigned HOST_WIDE_INT mask = 0xff;
10789 int i;
10791 if (val == 0) /* XXX */
10792 return 0;
10794 for (i = 0; i < 25; i++)
10795 if ((val & (mask << i)) == val)
10796 return 1;
10798 return 0;
10801 /* Returns nonzero if the current function contains,
10802 or might contain a far jump. */
10805 thumb_far_jump_used_p (in_prologue)
10806 int in_prologue;
10808 rtx insn;
10810 /* This test is only important for leaf functions. */
10811 /* assert (!leaf_function_p ()); */
10813 /* If we have already decided that far jumps may be used,
10814 do not bother checking again, and always return true even if
10815 it turns out that they are not being used. Once we have made
10816 the decision that far jumps are present (and that hence the link
10817 register will be pushed onto the stack) we cannot go back on it. */
10818 if (cfun->machine->far_jump_used)
10819 return 1;
10821 /* If this function is not being called from the prologue/epilogue
10822 generation code then it must be being called from the
10823 INITIAL_ELIMINATION_OFFSET macro. */
10824 if (!in_prologue)
10826 /* In this case we know that we are being asked about the elimination
10827 of the arg pointer register. If that register is not being used,
10828 then there are no arguments on the stack, and we do not have to
10829 worry that a far jump might force the prologue to push the link
10830 register, changing the stack offsets. In this case we can just
10831 return false, since the presence of far jumps in the function will
10832 not affect stack offsets.
10834 If the arg pointer is live (or if it was live, but has now been
10835 eliminated and so set to dead) then we do have to test to see if
10836 the function might contain a far jump. This test can lead to some
10837 false negatives, since before reload is completed, then length of
10838 branch instructions is not known, so gcc defaults to returning their
10839 longest length, which in turn sets the far jump attribute to true.
10841 A false negative will not result in bad code being generated, but it
10842 will result in a needless push and pop of the link register. We
10843 hope that this does not occur too often. */
10844 if (regs_ever_live [ARG_POINTER_REGNUM])
10845 cfun->machine->arg_pointer_live = 1;
10846 else if (!cfun->machine->arg_pointer_live)
10847 return 0;
10850 /* Check to see if the function contains a branch
10851 insn with the far jump attribute set. */
10852 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10854 if (GET_CODE (insn) == JUMP_INSN
10855 /* Ignore tablejump patterns. */
10856 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10857 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10858 && get_attr_far_jump (insn) == FAR_JUMP_YES
10861 /* Record the fact that we have decied that
10862 the function does use far jumps. */
10863 cfun->machine->far_jump_used = 1;
10864 return 1;
10868 return 0;
10871 /* Return nonzero if FUNC must be entered in ARM mode. */
10874 is_called_in_ARM_mode (func)
10875 tree func;
10877 if (TREE_CODE (func) != FUNCTION_DECL)
10878 abort ();
10880 /* Ignore the problem about functions whoes address is taken. */
10881 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10882 return TRUE;
10884 #ifdef ARM_PE
10885 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10886 #else
10887 return FALSE;
10888 #endif
10891 /* The bits which aren't usefully expanded as rtl. */
10893 const char *
10894 thumb_unexpanded_epilogue ()
10896 int regno;
10897 int live_regs_mask = 0;
10898 int high_regs_pushed = 0;
10899 int leaf_function = leaf_function_p ();
10900 int had_to_push_lr;
10901 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10903 if (return_used_this_function)
10904 return "";
10906 if (IS_NAKED (arm_current_func_type ()))
10907 return "";
10909 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10910 if (THUMB_REG_PUSHED_P (regno))
10911 live_regs_mask |= 1 << regno;
10913 for (regno = 8; regno < 13; regno++)
10914 if (THUMB_REG_PUSHED_P (regno))
10915 high_regs_pushed++;
10917 /* The prolog may have pushed some high registers to use as
10918 work registers. eg the testuite file:
10919 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10920 compiles to produce:
10921 push {r4, r5, r6, r7, lr}
10922 mov r7, r9
10923 mov r6, r8
10924 push {r6, r7}
10925 as part of the prolog. We have to undo that pushing here. */
10927 if (high_regs_pushed)
10929 int mask = live_regs_mask;
10930 int next_hi_reg;
10931 int size;
10932 int mode;
10934 #ifdef RTX_CODE
10935 /* If we can deduce the registers used from the function's return value.
10936 This is more reliable that examining regs_ever_live[] because that
10937 will be set if the register is ever used in the function, not just if
10938 the register is used to hold a return value. */
10940 if (current_function_return_rtx != 0)
10941 mode = GET_MODE (current_function_return_rtx);
10942 else
10943 #endif
10944 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10946 size = GET_MODE_SIZE (mode);
10948 /* Unless we are returning a type of size > 12 register r3 is
10949 available. */
10950 if (size < 13)
10951 mask |= 1 << 3;
10953 if (mask == 0)
10954 /* Oh dear! We have no low registers into which we can pop
10955 high registers! */
10956 internal_error
10957 ("no low registers available for popping high registers");
10959 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10960 if (THUMB_REG_PUSHED_P (next_hi_reg))
10961 break;
10963 while (high_regs_pushed)
10965 /* Find lo register(s) into which the high register(s) can
10966 be popped. */
10967 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10969 if (mask & (1 << regno))
10970 high_regs_pushed--;
10971 if (high_regs_pushed == 0)
10972 break;
10975 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10977 /* Pop the values into the low register(s). */
10978 thumb_pushpop (asm_out_file, mask, 0);
10980 /* Move the value(s) into the high registers. */
10981 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10983 if (mask & (1 << regno))
10985 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10986 regno);
10988 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10989 if (THUMB_REG_PUSHED_P (next_hi_reg))
10990 break;
10996 had_to_push_lr = (live_regs_mask || !leaf_function
10997 || thumb_far_jump_used_p (1));
10999 if (TARGET_BACKTRACE
11000 && ((live_regs_mask & 0xFF) == 0)
11001 && regs_ever_live [LAST_ARG_REGNUM] != 0)
11003 /* The stack backtrace structure creation code had to
11004 push R7 in order to get a work register, so we pop
11005 it now. */
11006 live_regs_mask |= (1 << LAST_LO_REGNUM);
11009 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
11011 if (had_to_push_lr
11012 && !is_called_in_ARM_mode (current_function_decl)
11013 && !eh_ofs)
11014 live_regs_mask |= 1 << PC_REGNUM;
11016 /* Either no argument registers were pushed or a backtrace
11017 structure was created which includes an adjusted stack
11018 pointer, so just pop everything. */
11019 if (live_regs_mask)
11020 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11022 if (eh_ofs)
11023 thumb_exit (asm_out_file, 2, eh_ofs);
11024 /* We have either just popped the return address into the
11025 PC or it is was kept in LR for the entire function or
11026 it is still on the stack because we do not want to
11027 return by doing a pop {pc}. */
11028 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
11029 thumb_exit (asm_out_file,
11030 (had_to_push_lr
11031 && is_called_in_ARM_mode (current_function_decl)) ?
11032 -1 : LR_REGNUM, NULL_RTX);
11034 else
11036 /* Pop everything but the return address. */
11037 live_regs_mask &= ~(1 << PC_REGNUM);
11039 if (live_regs_mask)
11040 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11042 if (had_to_push_lr)
11043 /* Get the return address into a temporary register. */
11044 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
11046 /* Remove the argument registers that were pushed onto the stack. */
11047 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
11048 SP_REGNUM, SP_REGNUM,
11049 current_function_pretend_args_size);
11051 if (eh_ofs)
11052 thumb_exit (asm_out_file, 2, eh_ofs);
11053 else
11054 thumb_exit (asm_out_file,
11055 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
11058 return "";
11061 /* Functions to save and restore machine-specific function data. */
11063 static struct machine_function *
11064 arm_init_machine_status ()
11066 struct machine_function *machine;
11067 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
11069 #if ARM_FT_UNKNOWN != 0
11070 machine->func_type = ARM_FT_UNKNOWN;
11071 #endif
11072 return machine;
11075 /* Return an RTX indicating where the return address to the
11076 calling function can be found. */
11079 arm_return_addr (count, frame)
11080 int count;
11081 rtx frame ATTRIBUTE_UNUSED;
11083 if (count != 0)
11084 return NULL_RTX;
11086 if (TARGET_APCS_32)
11087 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
11088 else
11090 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
11091 GEN_INT (RETURN_ADDR_MASK26));
11092 return get_func_hard_reg_initial_val (cfun, lr);
11096 /* Do anything needed before RTL is emitted for each function. */
11098 void
11099 arm_init_expanders ()
11101 /* Arrange to initialize and mark the machine per-function status. */
11102 init_machine_status = arm_init_machine_status;
11105 HOST_WIDE_INT
11106 thumb_get_frame_size ()
11108 int regno;
11110 int base_size = ROUND_UP_WORD (get_frame_size ());
11111 int count_regs = 0;
11112 int entry_size = 0;
11113 int leaf;
11115 if (! TARGET_THUMB)
11116 abort ();
11118 if (! TARGET_ATPCS)
11119 return base_size;
11121 /* We need to know if we are a leaf function. Unfortunately, it
11122 is possible to be called after start_sequence has been called,
11123 which causes get_insns to return the insns for the sequence,
11124 not the function, which will cause leaf_function_p to return
11125 the incorrect result.
11127 To work around this, we cache the computed frame size. This
11128 works because we will only be calling RTL expanders that need
11129 to know about leaf functions once reload has completed, and the
11130 frame size cannot be changed after that time, so we can safely
11131 use the cached value. */
11133 if (reload_completed)
11134 return cfun->machine->frame_size;
11136 leaf = leaf_function_p ();
11138 /* A leaf function does not need any stack alignment if it has nothing
11139 on the stack. */
11140 if (leaf && base_size == 0)
11142 cfun->machine->frame_size = 0;
11143 return 0;
11146 /* We know that SP will be word aligned on entry, and we must
11147 preserve that condition at any subroutine call. But those are
11148 the only constraints. */
11150 /* Space for variadic functions. */
11151 if (current_function_pretend_args_size)
11152 entry_size += current_function_pretend_args_size;
11154 /* Space for pushed lo registers. */
11155 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11156 if (THUMB_REG_PUSHED_P (regno))
11157 count_regs++;
11159 /* Space for backtrace structure. */
11160 if (TARGET_BACKTRACE)
11162 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
11163 entry_size += 20;
11164 else
11165 entry_size += 16;
11168 if (count_regs || !leaf || thumb_far_jump_used_p (1))
11169 count_regs++; /* LR */
11171 entry_size += count_regs * 4;
11172 count_regs = 0;
11174 /* Space for pushed hi regs. */
11175 for (regno = 8; regno < 13; regno++)
11176 if (THUMB_REG_PUSHED_P (regno))
11177 count_regs++;
11179 entry_size += count_regs * 4;
11181 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11182 base_size += 4;
11183 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11184 abort ();
11186 cfun->machine->frame_size = base_size;
11188 return base_size;
11191 /* Generate the rest of a function's prologue. */
11193 void
11194 thumb_expand_prologue ()
11196 HOST_WIDE_INT amount = (thumb_get_frame_size ()
11197 + current_function_outgoing_args_size);
11198 unsigned long func_type;
11200 func_type = arm_current_func_type ();
11202 /* Naked functions don't have prologues. */
11203 if (IS_NAKED (func_type))
11204 return;
11206 if (IS_INTERRUPT (func_type))
11208 error ("interrupt Service Routines cannot be coded in Thumb mode");
11209 return;
11212 if (frame_pointer_needed)
11213 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
11215 if (amount)
11217 amount = ROUND_UP_WORD (amount);
11219 if (amount < 512)
11220 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11221 GEN_INT (- amount)));
11222 else
11224 int regno;
11225 rtx reg;
11227 /* The stack decrement is too big for an immediate value in a single
11228 insn. In theory we could issue multiple subtracts, but after
11229 three of them it becomes more space efficient to place the full
11230 value in the constant pool and load into a register. (Also the
11231 ARM debugger really likes to see only one stack decrement per
11232 function). So instead we look for a scratch register into which
11233 we can load the decrement, and then we subtract this from the
11234 stack pointer. Unfortunately on the thumb the only available
11235 scratch registers are the argument registers, and we cannot use
11236 these as they may hold arguments to the function. Instead we
11237 attempt to locate a call preserved register which is used by this
11238 function. If we can find one, then we know that it will have
11239 been pushed at the start of the prologue and so we can corrupt
11240 it now. */
11241 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
11242 if (THUMB_REG_PUSHED_P (regno)
11243 && !(frame_pointer_needed
11244 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
11245 break;
11247 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
11249 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
11251 /* Choose an arbitrary, non-argument low register. */
11252 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
11254 /* Save it by copying it into a high, scratch register. */
11255 emit_insn (gen_movsi (spare, reg));
11256 /* Add a USE to stop propagate_one_insn() from barfing. */
11257 emit_insn (gen_prologue_use (spare));
11259 /* Decrement the stack. */
11260 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
11261 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11262 reg));
11264 /* Restore the low register's original value. */
11265 emit_insn (gen_movsi (reg, spare));
11267 /* Emit a USE of the restored scratch register, so that flow
11268 analysis will not consider the restore redundant. The
11269 register won't be used again in this function and isn't
11270 restored by the epilogue. */
11271 emit_insn (gen_prologue_use (reg));
11273 else
11275 reg = gen_rtx (REG, SImode, regno);
11277 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
11278 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11279 reg));
11284 if (current_function_profile || TARGET_NO_SCHED_PRO)
11285 emit_insn (gen_blockage ());
11288 void
11289 thumb_expand_epilogue ()
11291 HOST_WIDE_INT amount = (thumb_get_frame_size ()
11292 + current_function_outgoing_args_size);
11294 /* Naked functions don't have prologues. */
11295 if (IS_NAKED (arm_current_func_type ()))
11296 return;
11298 if (frame_pointer_needed)
11299 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
11300 else if (amount)
11302 amount = ROUND_UP_WORD (amount);
11304 if (amount < 512)
11305 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11306 GEN_INT (amount)));
11307 else
11309 /* r3 is always free in the epilogue. */
11310 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
11312 emit_insn (gen_movsi (reg, GEN_INT (amount)));
11313 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
11317 /* Emit a USE (stack_pointer_rtx), so that
11318 the stack adjustment will not be deleted. */
11319 emit_insn (gen_prologue_use (stack_pointer_rtx));
11321 if (current_function_profile || TARGET_NO_SCHED_PRO)
11322 emit_insn (gen_blockage ());
11325 static void
11326 thumb_output_function_prologue (f, size)
11327 FILE * f;
11328 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11330 int live_regs_mask = 0;
11331 int high_regs_pushed = 0;
11332 int regno;
11334 if (IS_NAKED (arm_current_func_type ()))
11335 return;
11337 if (is_called_in_ARM_mode (current_function_decl))
11339 const char * name;
11341 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
11342 abort ();
11343 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
11344 abort ();
11345 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11347 /* Generate code sequence to switch us into Thumb mode. */
11348 /* The .code 32 directive has already been emitted by
11349 ASM_DECLARE_FUNCTION_NAME. */
11350 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
11351 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
11353 /* Generate a label, so that the debugger will notice the
11354 change in instruction sets. This label is also used by
11355 the assembler to bypass the ARM code when this function
11356 is called from a Thumb encoded function elsewhere in the
11357 same file. Hence the definition of STUB_NAME here must
11358 agree with the definition in gas/config/tc-arm.c */
11360 #define STUB_NAME ".real_start_of"
11362 fprintf (f, "\t.code\t16\n");
11363 #ifdef ARM_PE
11364 if (arm_dllexport_name_p (name))
11365 name = arm_strip_name_encoding (name);
11366 #endif
11367 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
11368 fprintf (f, "\t.thumb_func\n");
11369 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
11372 if (current_function_pretend_args_size)
11374 if (cfun->machine->uses_anonymous_args)
11376 int num_pushes;
11378 fprintf (f, "\tpush\t{");
11380 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
11382 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
11383 regno <= LAST_ARG_REGNUM;
11384 regno++)
11385 asm_fprintf (f, "%r%s", regno,
11386 regno == LAST_ARG_REGNUM ? "" : ", ");
11388 fprintf (f, "}\n");
11390 else
11391 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
11392 SP_REGNUM, SP_REGNUM,
11393 current_function_pretend_args_size);
11396 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11397 if (THUMB_REG_PUSHED_P (regno))
11398 live_regs_mask |= 1 << regno;
11400 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
11401 live_regs_mask |= 1 << LR_REGNUM;
11403 if (TARGET_BACKTRACE)
11405 int offset;
11406 int work_register = 0;
11407 int wr;
11409 /* We have been asked to create a stack backtrace structure.
11410 The code looks like this:
11412 0 .align 2
11413 0 func:
11414 0 sub SP, #16 Reserve space for 4 registers.
11415 2 push {R7} Get a work register.
11416 4 add R7, SP, #20 Get the stack pointer before the push.
11417 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
11418 8 mov R7, PC Get hold of the start of this code plus 12.
11419 10 str R7, [SP, #16] Store it.
11420 12 mov R7, FP Get hold of the current frame pointer.
11421 14 str R7, [SP, #4] Store it.
11422 16 mov R7, LR Get hold of the current return address.
11423 18 str R7, [SP, #12] Store it.
11424 20 add R7, SP, #16 Point at the start of the backtrace structure.
11425 22 mov FP, R7 Put this value into the frame pointer. */
11427 if ((live_regs_mask & 0xFF) == 0)
11429 /* See if the a4 register is free. */
11431 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11432 work_register = LAST_ARG_REGNUM;
11433 else /* We must push a register of our own */
11434 live_regs_mask |= (1 << LAST_LO_REGNUM);
11437 if (work_register == 0)
11439 /* Select a register from the list that will be pushed to
11440 use as our work register. */
11441 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11442 if ((1 << work_register) & live_regs_mask)
11443 break;
11446 asm_fprintf
11447 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11448 SP_REGNUM, SP_REGNUM);
11450 if (live_regs_mask)
11451 thumb_pushpop (f, live_regs_mask, 1);
11453 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11454 if (wr & live_regs_mask)
11455 offset += 4;
11457 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11458 offset + 16 + current_function_pretend_args_size);
11460 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11461 offset + 4);
11463 /* Make sure that the instruction fetching the PC is in the right place
11464 to calculate "start of backtrace creation code + 12". */
11465 if (live_regs_mask)
11467 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11468 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11469 offset + 12);
11470 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11471 ARM_HARD_FRAME_POINTER_REGNUM);
11472 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11473 offset);
11475 else
11477 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11478 ARM_HARD_FRAME_POINTER_REGNUM);
11479 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11480 offset);
11481 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11482 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11483 offset + 12);
11486 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11487 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11488 offset + 8);
11489 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11490 offset + 12);
11491 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11492 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11494 else if (live_regs_mask)
11495 thumb_pushpop (f, live_regs_mask, 1);
11497 for (regno = 8; regno < 13; regno++)
11498 if (THUMB_REG_PUSHED_P (regno))
11499 high_regs_pushed++;
11501 if (high_regs_pushed)
11503 int pushable_regs = 0;
11504 int mask = live_regs_mask & 0xff;
11505 int next_hi_reg;
11507 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11508 if (THUMB_REG_PUSHED_P (next_hi_reg))
11509 break;
11511 pushable_regs = mask;
11513 if (pushable_regs == 0)
11515 /* Desperation time -- this probably will never happen. */
11516 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11517 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11518 mask = 1 << LAST_ARG_REGNUM;
11521 while (high_regs_pushed > 0)
11523 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11525 if (mask & (1 << regno))
11527 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11529 high_regs_pushed--;
11531 if (high_regs_pushed)
11533 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11534 next_hi_reg--)
11535 if (THUMB_REG_PUSHED_P (next_hi_reg))
11536 break;
11538 else
11540 mask &= ~((1 << regno) - 1);
11541 break;
11546 thumb_pushpop (f, mask, 1);
11549 if (pushable_regs == 0
11550 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11551 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11555 /* Handle the case of a double word load into a low register from
11556 a computed memory address. The computed address may involve a
11557 register which is overwritten by the load. */
11559 const char *
11560 thumb_load_double_from_address (operands)
11561 rtx *operands;
11563 rtx addr;
11564 rtx base;
11565 rtx offset;
11566 rtx arg1;
11567 rtx arg2;
11569 if (GET_CODE (operands[0]) != REG)
11570 abort ();
11572 if (GET_CODE (operands[1]) != MEM)
11573 abort ();
11575 /* Get the memory address. */
11576 addr = XEXP (operands[1], 0);
11578 /* Work out how the memory address is computed. */
11579 switch (GET_CODE (addr))
11581 case REG:
11582 operands[2] = gen_rtx (MEM, SImode,
11583 plus_constant (XEXP (operands[1], 0), 4));
11585 if (REGNO (operands[0]) == REGNO (addr))
11587 output_asm_insn ("ldr\t%H0, %2", operands);
11588 output_asm_insn ("ldr\t%0, %1", operands);
11590 else
11592 output_asm_insn ("ldr\t%0, %1", operands);
11593 output_asm_insn ("ldr\t%H0, %2", operands);
11595 break;
11597 case CONST:
11598 /* Compute <address> + 4 for the high order load. */
11599 operands[2] = gen_rtx (MEM, SImode,
11600 plus_constant (XEXP (operands[1], 0), 4));
11602 output_asm_insn ("ldr\t%0, %1", operands);
11603 output_asm_insn ("ldr\t%H0, %2", operands);
11604 break;
11606 case PLUS:
11607 arg1 = XEXP (addr, 0);
11608 arg2 = XEXP (addr, 1);
11610 if (CONSTANT_P (arg1))
11611 base = arg2, offset = arg1;
11612 else
11613 base = arg1, offset = arg2;
11615 if (GET_CODE (base) != REG)
11616 abort ();
11618 /* Catch the case of <address> = <reg> + <reg> */
11619 if (GET_CODE (offset) == REG)
11621 int reg_offset = REGNO (offset);
11622 int reg_base = REGNO (base);
11623 int reg_dest = REGNO (operands[0]);
11625 /* Add the base and offset registers together into the
11626 higher destination register. */
11627 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11628 reg_dest + 1, reg_base, reg_offset);
11630 /* Load the lower destination register from the address in
11631 the higher destination register. */
11632 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11633 reg_dest, reg_dest + 1);
11635 /* Load the higher destination register from its own address
11636 plus 4. */
11637 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11638 reg_dest + 1, reg_dest + 1);
11640 else
11642 /* Compute <address> + 4 for the high order load. */
11643 operands[2] = gen_rtx (MEM, SImode,
11644 plus_constant (XEXP (operands[1], 0), 4));
11646 /* If the computed address is held in the low order register
11647 then load the high order register first, otherwise always
11648 load the low order register first. */
11649 if (REGNO (operands[0]) == REGNO (base))
11651 output_asm_insn ("ldr\t%H0, %2", operands);
11652 output_asm_insn ("ldr\t%0, %1", operands);
11654 else
11656 output_asm_insn ("ldr\t%0, %1", operands);
11657 output_asm_insn ("ldr\t%H0, %2", operands);
11660 break;
11662 case LABEL_REF:
11663 /* With no registers to worry about we can just load the value
11664 directly. */
11665 operands[2] = gen_rtx (MEM, SImode,
11666 plus_constant (XEXP (operands[1], 0), 4));
11668 output_asm_insn ("ldr\t%H0, %2", operands);
11669 output_asm_insn ("ldr\t%0, %1", operands);
11670 break;
11672 default:
11673 abort ();
11674 break;
11677 return "";
11681 const char *
11682 thumb_output_move_mem_multiple (n, operands)
11683 int n;
11684 rtx * operands;
11686 rtx tmp;
11688 switch (n)
11690 case 2:
11691 if (REGNO (operands[4]) > REGNO (operands[5]))
11693 tmp = operands[4];
11694 operands[4] = operands[5];
11695 operands[5] = tmp;
11697 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11698 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11699 break;
11701 case 3:
11702 if (REGNO (operands[4]) > REGNO (operands[5]))
11704 tmp = operands[4];
11705 operands[4] = operands[5];
11706 operands[5] = tmp;
11708 if (REGNO (operands[5]) > REGNO (operands[6]))
11710 tmp = operands[5];
11711 operands[5] = operands[6];
11712 operands[6] = tmp;
11714 if (REGNO (operands[4]) > REGNO (operands[5]))
11716 tmp = operands[4];
11717 operands[4] = operands[5];
11718 operands[5] = tmp;
11721 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11722 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11723 break;
11725 default:
11726 abort ();
11729 return "";
11732 /* Routines for generating rtl. */
11734 void
11735 thumb_expand_movstrqi (operands)
11736 rtx * operands;
11738 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11739 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11740 HOST_WIDE_INT len = INTVAL (operands[2]);
11741 HOST_WIDE_INT offset = 0;
11743 while (len >= 12)
11745 emit_insn (gen_movmem12b (out, in, out, in));
11746 len -= 12;
11749 if (len >= 8)
11751 emit_insn (gen_movmem8b (out, in, out, in));
11752 len -= 8;
11755 if (len >= 4)
11757 rtx reg = gen_reg_rtx (SImode);
11758 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11759 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11760 len -= 4;
11761 offset += 4;
11764 if (len >= 2)
11766 rtx reg = gen_reg_rtx (HImode);
11767 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11768 plus_constant (in, offset))));
11769 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11770 reg));
11771 len -= 2;
11772 offset += 2;
11775 if (len)
11777 rtx reg = gen_reg_rtx (QImode);
11778 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11779 plus_constant (in, offset))));
11780 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11781 reg));
11786 thumb_cmp_operand (op, mode)
11787 rtx op;
11788 enum machine_mode mode;
11790 return ((GET_CODE (op) == CONST_INT
11791 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11792 || register_operand (op, mode));
11795 static const char *
11796 thumb_condition_code (x, invert)
11797 rtx x;
11798 int invert;
11800 static const char * const conds[] =
11802 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11803 "hi", "ls", "ge", "lt", "gt", "le"
11805 int val;
11807 switch (GET_CODE (x))
11809 case EQ: val = 0; break;
11810 case NE: val = 1; break;
11811 case GEU: val = 2; break;
11812 case LTU: val = 3; break;
11813 case GTU: val = 8; break;
11814 case LEU: val = 9; break;
11815 case GE: val = 10; break;
11816 case LT: val = 11; break;
11817 case GT: val = 12; break;
11818 case LE: val = 13; break;
11819 default:
11820 abort ();
11823 return conds[val ^ invert];
11826 /* Handle storing a half-word to memory during reload. */
11828 void
11829 thumb_reload_out_hi (operands)
11830 rtx * operands;
11832 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11835 /* Handle storing a half-word to memory during reload. */
11837 void
11838 thumb_reload_in_hi (operands)
11839 rtx * operands ATTRIBUTE_UNUSED;
11841 abort ();
11844 /* Return the length of a function name prefix
11845 that starts with the character 'c'. */
11847 static int
11848 arm_get_strip_length (c)
11849 int c;
11851 switch (c)
11853 ARM_NAME_ENCODING_LENGTHS
11854 default: return 0;
11858 /* Return a pointer to a function's name with any
11859 and all prefix encodings stripped from it. */
11861 const char *
11862 arm_strip_name_encoding (name)
11863 const char * name;
11865 int skip;
11867 while ((skip = arm_get_strip_length (* name)))
11868 name += skip;
11870 return name;
11873 /* If there is a '*' anywhere in the name's prefix, then
11874 emit the stripped name verbatim, otherwise prepend an
11875 underscore if leading underscores are being used. */
11877 void
11878 arm_asm_output_labelref (stream, name)
11879 FILE * stream;
11880 const char * name;
11882 int skip;
11883 int verbatim = 0;
11885 while ((skip = arm_get_strip_length (* name)))
11887 verbatim |= (*name == '*');
11888 name += skip;
11891 if (verbatim)
11892 fputs (name, stream);
11893 else
11894 asm_fprintf (stream, "%U%s", name);
11897 rtx aof_pic_label;
11899 #ifdef AOF_ASSEMBLER
11900 /* Special functions only needed when producing AOF syntax assembler. */
11902 struct pic_chain
11904 struct pic_chain * next;
11905 const char * symname;
11908 static struct pic_chain * aof_pic_chain = NULL;
11911 aof_pic_entry (x)
11912 rtx x;
11914 struct pic_chain ** chainp;
11915 int offset;
11917 if (aof_pic_label == NULL_RTX)
11919 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11922 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11923 offset += 4, chainp = &(*chainp)->next)
11924 if ((*chainp)->symname == XSTR (x, 0))
11925 return plus_constant (aof_pic_label, offset);
11927 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11928 (*chainp)->next = NULL;
11929 (*chainp)->symname = XSTR (x, 0);
11930 return plus_constant (aof_pic_label, offset);
11933 void
11934 aof_dump_pic_table (f)
11935 FILE * f;
11937 struct pic_chain * chain;
11939 if (aof_pic_chain == NULL)
11940 return;
11942 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11943 PIC_OFFSET_TABLE_REGNUM,
11944 PIC_OFFSET_TABLE_REGNUM);
11945 fputs ("|x$adcons|\n", f);
11947 for (chain = aof_pic_chain; chain; chain = chain->next)
11949 fputs ("\tDCD\t", f);
11950 assemble_name (f, chain->symname);
11951 fputs ("\n", f);
11955 int arm_text_section_count = 1;
11957 char *
11958 aof_text_section ()
11960 static char buf[100];
11961 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11962 arm_text_section_count++);
11963 if (flag_pic)
11964 strcat (buf, ", PIC, REENTRANT");
11965 return buf;
11968 static int arm_data_section_count = 1;
11970 char *
11971 aof_data_section ()
11973 static char buf[100];
11974 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11975 return buf;
11978 /* The AOF assembler is religiously strict about declarations of
11979 imported and exported symbols, so that it is impossible to declare
11980 a function as imported near the beginning of the file, and then to
11981 export it later on. It is, however, possible to delay the decision
11982 until all the functions in the file have been compiled. To get
11983 around this, we maintain a list of the imports and exports, and
11984 delete from it any that are subsequently defined. At the end of
11985 compilation we spit the remainder of the list out before the END
11986 directive. */
11988 struct import
11990 struct import * next;
11991 const char * name;
11994 static struct import * imports_list = NULL;
11996 void
11997 aof_add_import (name)
11998 const char * name;
12000 struct import * new;
12002 for (new = imports_list; new; new = new->next)
12003 if (new->name == name)
12004 return;
12006 new = (struct import *) xmalloc (sizeof (struct import));
12007 new->next = imports_list;
12008 imports_list = new;
12009 new->name = name;
12012 void
12013 aof_delete_import (name)
12014 const char * name;
12016 struct import ** old;
12018 for (old = &imports_list; *old; old = & (*old)->next)
12020 if ((*old)->name == name)
12022 *old = (*old)->next;
12023 return;
12028 int arm_main_function = 0;
12030 void
12031 aof_dump_imports (f)
12032 FILE * f;
12034 /* The AOF assembler needs this to cause the startup code to be extracted
12035 from the library. Brining in __main causes the whole thing to work
12036 automagically. */
12037 if (arm_main_function)
12039 text_section ();
12040 fputs ("\tIMPORT __main\n", f);
12041 fputs ("\tDCD __main\n", f);
12044 /* Now dump the remaining imports. */
12045 while (imports_list)
12047 fprintf (f, "\tIMPORT\t");
12048 assemble_name (f, imports_list->name);
12049 fputc ('\n', f);
12050 imports_list = imports_list->next;
12054 static void
12055 aof_globalize_label (stream, name)
12056 FILE *stream;
12057 const char *name;
12059 default_globalize_label (stream, name);
12060 if (! strcmp (name, "main"))
12061 arm_main_function = 1;
12063 #endif /* AOF_ASSEMBLER */
12065 #ifdef OBJECT_FORMAT_ELF
12066 /* Switch to an arbitrary section NAME with attributes as specified
12067 by FLAGS. ALIGN specifies any known alignment requirements for
12068 the section; 0 if the default should be used.
12070 Differs from the default elf version only in the prefix character
12071 used before the section type. */
12073 static void
12074 arm_elf_asm_named_section (name, flags)
12075 const char *name;
12076 unsigned int flags;
12078 char flagchars[10], *f = flagchars;
12080 if (! named_section_first_declaration (name))
12082 fprintf (asm_out_file, "\t.section\t%s\n", name);
12083 return;
12086 if (!(flags & SECTION_DEBUG))
12087 *f++ = 'a';
12088 if (flags & SECTION_WRITE)
12089 *f++ = 'w';
12090 if (flags & SECTION_CODE)
12091 *f++ = 'x';
12092 if (flags & SECTION_SMALL)
12093 *f++ = 's';
12094 if (flags & SECTION_MERGE)
12095 *f++ = 'M';
12096 if (flags & SECTION_STRINGS)
12097 *f++ = 'S';
12098 if (flags & SECTION_TLS)
12099 *f++ = 'T';
12100 *f = '\0';
12102 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
12104 if (!(flags & SECTION_NOTYPE))
12106 const char *type;
12108 if (flags & SECTION_BSS)
12109 type = "nobits";
12110 else
12111 type = "progbits";
12113 fprintf (asm_out_file, ",%%%s", type);
12115 if (flags & SECTION_ENTSIZE)
12116 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
12119 putc ('\n', asm_out_file);
12121 #endif
12123 #ifndef ARM_PE
12124 /* Symbols in the text segment can be accessed without indirecting via the
12125 constant pool; it may take an extra binary operation, but this is still
12126 faster than indirecting via memory. Don't do this when not optimizing,
12127 since we won't be calculating al of the offsets necessary to do this
12128 simplification. */
12130 static void
12131 arm_encode_section_info (decl, first)
12132 tree decl;
12133 int first;
12135 /* This doesn't work with AOF syntax, since the string table may be in
12136 a different AREA. */
12137 #ifndef AOF_ASSEMBLER
12138 if (optimize > 0 && TREE_CONSTANT (decl)
12139 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
12141 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
12142 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
12143 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
12145 #endif
12147 /* If we are referencing a function that is weak then encode a long call
12148 flag in the function name, otherwise if the function is static or
12149 or known to be defined in this file then encode a short call flag. */
12150 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
12152 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
12153 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
12154 else if (! TREE_PUBLIC (decl))
12155 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
12158 #endif /* !ARM_PE */
12160 static void
12161 arm_internal_label (stream, prefix, labelno)
12162 FILE *stream;
12163 const char *prefix;
12164 unsigned long labelno;
12166 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
12167 && !strcmp (prefix, "L"))
12169 arm_ccfsm_state = 0;
12170 arm_target_insn = NULL;
12172 default_internal_label (stream, prefix, labelno);
12175 /* Output code to add DELTA to the first argument, and then jump
12176 to FUNCTION. Used for C++ multiple inheritance. */
12178 static void
12179 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
12180 FILE *file;
12181 tree thunk ATTRIBUTE_UNUSED;
12182 HOST_WIDE_INT delta;
12183 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
12184 tree function;
12186 int mi_delta = delta;
12187 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
12188 int shift = 0;
12189 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
12190 ? 1 : 0);
12191 if (mi_delta < 0)
12192 mi_delta = - mi_delta;
12193 while (mi_delta != 0)
12195 if ((mi_delta & (3 << shift)) == 0)
12196 shift += 2;
12197 else
12199 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
12200 mi_op, this_regno, this_regno,
12201 mi_delta & (0xff << shift));
12202 mi_delta &= ~(0xff << shift);
12203 shift += 8;
12206 fputs ("\tb\t", file);
12207 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
12208 if (NEED_PLT_RELOC)
12209 fputs ("(PLT)", file);
12210 fputc ('\n', file);