* arm.c (arm_address_register_rtx_p): New function.
[official-gcc.git] / gcc / config / arm / arm.c
blob4a8d85a2146f450c7bbd72f3b639ad2820cf651f
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const struct attribute_spec arm_attribute_table[];
67 /* Forward function declarations. */
68 static void arm_add_gc_roots PARAMS ((void));
69 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
70 static unsigned bit_count PARAMS ((Ulong));
71 static int arm_address_register_rtx_p PARAMS ((rtx, int));
72 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
73 rtx, int));
74 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
75 static int eliminate_lr2ip PARAMS ((rtx *));
76 static rtx emit_multi_reg_push PARAMS ((int));
77 static rtx emit_sfm PARAMS ((int, int));
78 #ifndef AOF_ASSEMBLER
79 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
80 #endif
81 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
82 static arm_cc get_arm_condition_code PARAMS ((rtx));
83 static void init_fpa_table PARAMS ((void));
84 static Hint int_log2 PARAMS ((Hint));
85 static rtx is_jump_table PARAMS ((rtx));
86 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
87 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
88 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
89 static Ccstar shift_op PARAMS ((rtx, Hint *));
90 static struct machine_function * arm_init_machine_status PARAMS ((void));
91 static int number_of_first_bit_set PARAMS ((int));
92 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
93 static void thumb_exit PARAMS ((FILE *, int, rtx));
94 static void thumb_pushpop PARAMS ((FILE *, int, int));
95 static Ccstar thumb_condition_code PARAMS ((rtx, int));
96 static rtx is_jump_table PARAMS ((rtx));
97 static Hint get_jump_table_size PARAMS ((rtx));
98 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
99 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
100 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
101 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
102 static void assign_minipool_offsets PARAMS ((Mfix *));
103 static void arm_print_value PARAMS ((FILE *, rtx));
104 static void dump_minipool PARAMS ((rtx));
105 static int arm_barrier_cost PARAMS ((rtx));
106 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
107 static void push_minipool_barrier PARAMS ((rtx, Hint));
108 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
109 static void note_invalid_constants PARAMS ((rtx, Hint));
110 static int current_file_function_operand PARAMS ((rtx));
111 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
112 static Ulong arm_compute_save_reg_mask PARAMS ((void));
113 static Ulong arm_isr_value PARAMS ((tree));
114 static Ulong arm_compute_func_type PARAMS ((void));
115 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
116 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
117 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
118 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
119 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
120 static int arm_comp_type_attributes PARAMS ((tree, tree));
121 static void arm_set_default_type_attributes PARAMS ((tree));
122 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
123 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
124 static int arm_get_strip_length PARAMS ((int));
125 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
126 #ifdef OBJECT_FORMAT_ELF
127 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
128 #endif
129 #ifndef ARM_PE
130 static void arm_encode_section_info PARAMS ((tree, int));
131 #endif
132 #ifdef AOF_ASSEMBLER
133 static void aof_globalize_label PARAMS ((FILE *, const char *));
134 #endif
135 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
136 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
137 HOST_WIDE_INT,
138 HOST_WIDE_INT, tree));
140 #undef Hint
141 #undef Mmode
142 #undef Ulong
143 #undef Ccstar
145 /* Initialize the GCC target structure. */
146 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
147 #undef TARGET_MERGE_DECL_ATTRIBUTES
148 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
149 #endif
151 #undef TARGET_ATTRIBUTE_TABLE
152 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
154 #ifdef AOF_ASSEMBLER
155 #undef TARGET_ASM_BYTE_OP
156 #define TARGET_ASM_BYTE_OP "\tDCB\t"
157 #undef TARGET_ASM_ALIGNED_HI_OP
158 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
159 #undef TARGET_ASM_ALIGNED_SI_OP
160 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
161 #undef TARGET_ASM_GLOBALIZE_LABEL
162 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
163 #else
164 #undef TARGET_ASM_ALIGNED_SI_OP
165 #define TARGET_ASM_ALIGNED_SI_OP NULL
166 #undef TARGET_ASM_INTEGER
167 #define TARGET_ASM_INTEGER arm_assemble_integer
168 #endif
170 #undef TARGET_ASM_FUNCTION_PROLOGUE
171 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
173 #undef TARGET_ASM_FUNCTION_EPILOGUE
174 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
176 #undef TARGET_COMP_TYPE_ATTRIBUTES
177 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
179 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
180 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
182 #undef TARGET_INIT_BUILTINS
183 #define TARGET_INIT_BUILTINS arm_init_builtins
185 #undef TARGET_EXPAND_BUILTIN
186 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
188 #undef TARGET_SCHED_ADJUST_COST
189 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
191 #undef TARGET_ENCODE_SECTION_INFO
192 #ifdef ARM_PE
193 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
194 #else
195 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
196 #endif
198 #undef TARGET_STRIP_NAME_ENCODING
199 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
201 #undef TARGET_ASM_INTERNAL_LABEL
202 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
204 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
205 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
207 #undef TARGET_ASM_OUTPUT_MI_THUNK
208 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
209 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
210 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
212 struct gcc_target targetm = TARGET_INITIALIZER;
214 /* Obstack for minipool constant handling. */
215 static struct obstack minipool_obstack;
216 static char * minipool_startobj;
218 /* The maximum number of insns skipped which
219 will be conditionalised if possible. */
220 static int max_insns_skipped = 5;
222 extern FILE * asm_out_file;
224 /* True if we are currently building a constant table. */
225 int making_const_table;
227 /* Define the information needed to generate branch insns. This is
228 stored from the compare operation. */
229 rtx arm_compare_op0, arm_compare_op1;
231 /* What type of floating point are we tuning for? */
232 enum floating_point_type arm_fpu;
234 /* What type of floating point instructions are available? */
235 enum floating_point_type arm_fpu_arch;
237 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
238 enum prog_mode_type arm_prgmode;
240 /* Set by the -mfp=... option. */
241 const char * target_fp_name = NULL;
243 /* Used to parse -mstructure_size_boundary command line option. */
244 const char * structure_size_string = NULL;
245 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
247 /* Bit values used to identify processor capabilities. */
248 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
249 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
250 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
251 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
252 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
253 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
254 #define FL_THUMB (1 << 6) /* Thumb aware */
255 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
256 #define FL_STRONG (1 << 8) /* StrongARM */
257 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
258 #define FL_XSCALE (1 << 10) /* XScale */
260 /* The bits in this mask specify which
261 instructions we are allowed to generate. */
262 static unsigned long insn_flags = 0;
264 /* The bits in this mask specify which instruction scheduling options should
265 be used. Note - there is an overlap with the FL_FAST_MULT. For some
266 hardware we want to be able to generate the multiply instructions, but to
267 tune as if they were not present in the architecture. */
268 static unsigned long tune_flags = 0;
270 /* The following are used in the arm.md file as equivalents to bits
271 in the above two flag variables. */
273 /* Nonzero if this is an "M" variant of the processor. */
274 int arm_fast_multiply = 0;
276 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
277 int arm_arch4 = 0;
279 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
280 int arm_arch5 = 0;
282 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
283 int arm_arch5e = 0;
285 /* Nonzero if this chip can benefit from load scheduling. */
286 int arm_ld_sched = 0;
288 /* Nonzero if this chip is a StrongARM. */
289 int arm_is_strong = 0;
291 /* Nonzero if this chip is an XScale. */
292 int arm_is_xscale = 0;
294 /* Nonzero if this chip is an ARM6 or an ARM7. */
295 int arm_is_6_or_7 = 0;
297 /* Nonzero if generating Thumb instructions. */
298 int thumb_code = 0;
300 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
301 must report the mode of the memory reference from PRINT_OPERAND to
302 PRINT_OPERAND_ADDRESS. */
303 enum machine_mode output_memory_reference_mode;
305 /* The register number to be used for the PIC offset register. */
306 const char * arm_pic_register_string = NULL;
307 int arm_pic_register = INVALID_REGNUM;
309 /* Set to 1 when a return insn is output, this means that the epilogue
310 is not needed. */
311 int return_used_this_function;
313 /* Set to 1 after arm_reorg has started. Reset to start at the start of
314 the next function. */
315 static int after_arm_reorg = 0;
317 /* The maximum number of insns to be used when loading a constant. */
318 static int arm_constant_limit = 3;
320 /* For an explanation of these variables, see final_prescan_insn below. */
321 int arm_ccfsm_state;
322 enum arm_cond_code arm_current_cc;
323 rtx arm_target_insn;
324 int arm_target_label;
326 /* The condition codes of the ARM, and the inverse function. */
327 static const char * const arm_condition_codes[] =
329 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
330 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
333 #define streq(string1, string2) (strcmp (string1, string2) == 0)
335 /* Initialization code. */
337 struct processors
339 const char *const name;
340 const unsigned long flags;
343 /* Not all of these give usefully different compilation alternatives,
344 but there is no simple way of generalizing them. */
345 static const struct processors all_cores[] =
347 /* ARM Cores */
349 {"arm2", FL_CO_PROC | FL_MODE26 },
350 {"arm250", FL_CO_PROC | FL_MODE26 },
351 {"arm3", FL_CO_PROC | FL_MODE26 },
352 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
353 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
354 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
355 {"arm610", FL_MODE26 | FL_MODE32 },
356 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
357 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
358 /* arm7m doesn't exist on its own, but only with D, (and I), but
359 those don't alter the code, so arm7m is sometimes used. */
360 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
361 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
362 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
363 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
364 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
365 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
366 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
367 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
368 {"arm710", FL_MODE26 | FL_MODE32 },
369 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
370 {"arm720", FL_MODE26 | FL_MODE32 },
371 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
372 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
373 {"arm710c", FL_MODE26 | FL_MODE32 },
374 {"arm7100", FL_MODE26 | FL_MODE32 },
375 {"arm7500", FL_MODE26 | FL_MODE32 },
376 /* Doesn't have an external co-proc, but does have embedded fpu. */
377 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
378 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
379 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
380 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
381 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
382 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
383 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
384 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
385 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
386 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
387 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
388 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
389 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
390 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
391 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
392 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
393 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
395 {NULL, 0}
398 static const struct processors all_architectures[] =
400 /* ARM Architectures */
402 { "armv2", FL_CO_PROC | FL_MODE26 },
403 { "armv2a", FL_CO_PROC | FL_MODE26 },
404 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
405 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
406 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
407 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
408 implementations that support it, so we will leave it out for now. */
409 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
410 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
411 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
412 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
413 { NULL, 0 }
416 /* This is a magic stucture. The 'string' field is magically filled in
417 with a pointer to the value specified by the user on the command line
418 assuming that the user has specified such a value. */
420 struct arm_cpu_select arm_select[] =
422 /* string name processors */
423 { NULL, "-mcpu=", all_cores },
424 { NULL, "-march=", all_architectures },
425 { NULL, "-mtune=", all_cores }
428 /* Return the number of bits set in VALUE. */
429 static unsigned
430 bit_count (value)
431 unsigned long value;
433 unsigned long count = 0;
435 while (value)
437 count++;
438 value &= value - 1; /* Clear the least-significant set bit. */
441 return count;
444 /* Fix up any incompatible options that the user has specified.
445 This has now turned into a maze. */
446 void
447 arm_override_options ()
449 unsigned i;
451 /* Set up the flags based on the cpu/architecture selected by the user. */
452 for (i = ARRAY_SIZE (arm_select); i--;)
454 struct arm_cpu_select * ptr = arm_select + i;
456 if (ptr->string != NULL && ptr->string[0] != '\0')
458 const struct processors * sel;
460 for (sel = ptr->processors; sel->name != NULL; sel++)
461 if (streq (ptr->string, sel->name))
463 if (i == 2)
464 tune_flags = sel->flags;
465 else
467 /* If we have been given an architecture and a processor
468 make sure that they are compatible. We only generate
469 a warning though, and we prefer the CPU over the
470 architecture. */
471 if (insn_flags != 0 && (insn_flags ^ sel->flags))
472 warning ("switch -mcpu=%s conflicts with -march= switch",
473 ptr->string);
475 insn_flags = sel->flags;
478 break;
481 if (sel->name == NULL)
482 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
486 /* If the user did not specify a processor, choose one for them. */
487 if (insn_flags == 0)
489 const struct processors * sel;
490 unsigned int sought;
491 static const struct cpu_default
493 const int cpu;
494 const char *const name;
496 cpu_defaults[] =
498 { TARGET_CPU_arm2, "arm2" },
499 { TARGET_CPU_arm6, "arm6" },
500 { TARGET_CPU_arm610, "arm610" },
501 { TARGET_CPU_arm710, "arm710" },
502 { TARGET_CPU_arm7m, "arm7m" },
503 { TARGET_CPU_arm7500fe, "arm7500fe" },
504 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
505 { TARGET_CPU_arm8, "arm8" },
506 { TARGET_CPU_arm810, "arm810" },
507 { TARGET_CPU_arm9, "arm9" },
508 { TARGET_CPU_strongarm, "strongarm" },
509 { TARGET_CPU_xscale, "xscale" },
510 { TARGET_CPU_generic, "arm" },
511 { 0, 0 }
513 const struct cpu_default * def;
515 /* Find the default. */
516 for (def = cpu_defaults; def->name; def++)
517 if (def->cpu == TARGET_CPU_DEFAULT)
518 break;
520 /* Make sure we found the default CPU. */
521 if (def->name == NULL)
522 abort ();
524 /* Find the default CPU's flags. */
525 for (sel = all_cores; sel->name != NULL; sel++)
526 if (streq (def->name, sel->name))
527 break;
529 if (sel->name == NULL)
530 abort ();
532 insn_flags = sel->flags;
534 /* Now check to see if the user has specified some command line
535 switch that require certain abilities from the cpu. */
536 sought = 0;
538 if (TARGET_INTERWORK || TARGET_THUMB)
540 sought |= (FL_THUMB | FL_MODE32);
542 /* Force apcs-32 to be used for interworking. */
543 target_flags |= ARM_FLAG_APCS_32;
545 /* There are no ARM processors that support both APCS-26 and
546 interworking. Therefore we force FL_MODE26 to be removed
547 from insn_flags here (if it was set), so that the search
548 below will always be able to find a compatible processor. */
549 insn_flags &= ~FL_MODE26;
551 else if (!TARGET_APCS_32)
552 sought |= FL_MODE26;
554 if (sought != 0 && ((sought & insn_flags) != sought))
556 /* Try to locate a CPU type that supports all of the abilities
557 of the default CPU, plus the extra abilities requested by
558 the user. */
559 for (sel = all_cores; sel->name != NULL; sel++)
560 if ((sel->flags & sought) == (sought | insn_flags))
561 break;
563 if (sel->name == NULL)
565 unsigned current_bit_count = 0;
566 const struct processors * best_fit = NULL;
568 /* Ideally we would like to issue an error message here
569 saying that it was not possible to find a CPU compatible
570 with the default CPU, but which also supports the command
571 line options specified by the programmer, and so they
572 ought to use the -mcpu=<name> command line option to
573 override the default CPU type.
575 Unfortunately this does not work with multilibing. We
576 need to be able to support multilibs for -mapcs-26 and for
577 -mthumb-interwork and there is no CPU that can support both
578 options. Instead if we cannot find a cpu that has both the
579 characteristics of the default cpu and the given command line
580 options we scan the array again looking for a best match. */
581 for (sel = all_cores; sel->name != NULL; sel++)
582 if ((sel->flags & sought) == sought)
584 unsigned count;
586 count = bit_count (sel->flags & insn_flags);
588 if (count >= current_bit_count)
590 best_fit = sel;
591 current_bit_count = count;
595 if (best_fit == NULL)
596 abort ();
597 else
598 sel = best_fit;
601 insn_flags = sel->flags;
605 /* If tuning has not been specified, tune for whichever processor or
606 architecture has been selected. */
607 if (tune_flags == 0)
608 tune_flags = insn_flags;
610 /* Make sure that the processor choice does not conflict with any of the
611 other command line choices. */
612 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
614 /* If APCS-32 was not the default then it must have been set by the
615 user, so issue a warning message. If the user has specified
616 "-mapcs-32 -mcpu=arm2" then we loose here. */
617 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
618 warning ("target CPU does not support APCS-32" );
619 target_flags &= ~ARM_FLAG_APCS_32;
621 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
623 warning ("target CPU does not support APCS-26" );
624 target_flags |= ARM_FLAG_APCS_32;
627 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
629 warning ("target CPU does not support interworking" );
630 target_flags &= ~ARM_FLAG_INTERWORK;
633 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
635 warning ("target CPU does not support THUMB instructions");
636 target_flags &= ~ARM_FLAG_THUMB;
639 if (TARGET_APCS_FRAME && TARGET_THUMB)
641 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
642 target_flags &= ~ARM_FLAG_APCS_FRAME;
645 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
646 from here where no function is being compiled currently. */
647 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
648 && TARGET_ARM)
649 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
651 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
652 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
654 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
655 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
657 /* If interworking is enabled then APCS-32 must be selected as well. */
658 if (TARGET_INTERWORK)
660 if (!TARGET_APCS_32)
661 warning ("interworking forces APCS-32 to be used" );
662 target_flags |= ARM_FLAG_APCS_32;
665 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
667 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
668 target_flags |= ARM_FLAG_APCS_FRAME;
671 if (TARGET_POKE_FUNCTION_NAME)
672 target_flags |= ARM_FLAG_APCS_FRAME;
674 if (TARGET_APCS_REENT && flag_pic)
675 error ("-fpic and -mapcs-reent are incompatible");
677 if (TARGET_APCS_REENT)
678 warning ("APCS reentrant code not supported. Ignored");
680 /* If this target is normally configured to use APCS frames, warn if they
681 are turned off and debugging is turned on. */
682 if (TARGET_ARM
683 && write_symbols != NO_DEBUG
684 && !TARGET_APCS_FRAME
685 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
686 warning ("-g with -mno-apcs-frame may not give sensible debugging");
688 /* If stack checking is disabled, we can use r10 as the PIC register,
689 which keeps r9 available. */
690 if (flag_pic)
691 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
693 if (TARGET_APCS_FLOAT)
694 warning ("passing floating point arguments in fp regs not yet supported");
696 /* Initialize boolean versions of the flags, for use in the arm.md file. */
697 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
698 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
699 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
700 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
701 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
703 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
704 arm_is_strong = (tune_flags & FL_STRONG) != 0;
705 thumb_code = (TARGET_ARM == 0);
706 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
707 && !(tune_flags & FL_ARCH4))) != 0;
709 /* Default value for floating point code... if no co-processor
710 bus, then schedule for emulated floating point. Otherwise,
711 assume the user has an FPA.
712 Note: this does not prevent use of floating point instructions,
713 -msoft-float does that. */
714 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
716 if (target_fp_name)
718 if (streq (target_fp_name, "2"))
719 arm_fpu_arch = FP_SOFT2;
720 else if (streq (target_fp_name, "3"))
721 arm_fpu_arch = FP_SOFT3;
722 else
723 error ("invalid floating point emulation option: -mfpe-%s",
724 target_fp_name);
726 else
727 arm_fpu_arch = FP_DEFAULT;
729 if (TARGET_FPE && arm_fpu != FP_HARD)
730 arm_fpu = FP_SOFT2;
732 /* For arm2/3 there is no need to do any scheduling if there is only
733 a floating point emulator, or we are doing software floating-point. */
734 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
735 && (tune_flags & FL_MODE32) == 0)
736 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
738 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
740 if (structure_size_string != NULL)
742 int size = strtol (structure_size_string, NULL, 0);
744 if (size == 8 || size == 32)
745 arm_structure_size_boundary = size;
746 else
747 warning ("structure size boundary can only be set to 8 or 32");
750 if (arm_pic_register_string != NULL)
752 int pic_register = decode_reg_name (arm_pic_register_string);
754 if (!flag_pic)
755 warning ("-mpic-register= is useless without -fpic");
757 /* Prevent the user from choosing an obviously stupid PIC register. */
758 else if (pic_register < 0 || call_used_regs[pic_register]
759 || pic_register == HARD_FRAME_POINTER_REGNUM
760 || pic_register == STACK_POINTER_REGNUM
761 || pic_register >= PC_REGNUM)
762 error ("unable to use '%s' for PIC register", arm_pic_register_string);
763 else
764 arm_pic_register = pic_register;
767 if (TARGET_THUMB && flag_schedule_insns)
769 /* Don't warn since it's on by default in -O2. */
770 flag_schedule_insns = 0;
773 /* If optimizing for space, don't synthesize constants.
774 For processors with load scheduling, it never costs more than 2 cycles
775 to load a constant, and the load scheduler may well reduce that to 1. */
776 if (optimize_size || (tune_flags & FL_LDSCHED))
777 arm_constant_limit = 1;
779 if (arm_is_xscale)
780 arm_constant_limit = 2;
782 /* If optimizing for size, bump the number of instructions that we
783 are prepared to conditionally execute (even on a StrongARM).
784 Otherwise for the StrongARM, which has early execution of branches,
785 a sequence that is worth skipping is shorter. */
786 if (optimize_size)
787 max_insns_skipped = 6;
788 else if (arm_is_strong)
789 max_insns_skipped = 3;
791 /* Register global variables with the garbage collector. */
792 arm_add_gc_roots ();
795 static void
796 arm_add_gc_roots ()
798 gcc_obstack_init(&minipool_obstack);
799 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
802 /* A table of known ARM exception types.
803 For use with the interrupt function attribute. */
805 typedef struct
807 const char *const arg;
808 const unsigned long return_value;
810 isr_attribute_arg;
812 static const isr_attribute_arg isr_attribute_args [] =
814 { "IRQ", ARM_FT_ISR },
815 { "irq", ARM_FT_ISR },
816 { "FIQ", ARM_FT_FIQ },
817 { "fiq", ARM_FT_FIQ },
818 { "ABORT", ARM_FT_ISR },
819 { "abort", ARM_FT_ISR },
820 { "ABORT", ARM_FT_ISR },
821 { "abort", ARM_FT_ISR },
822 { "UNDEF", ARM_FT_EXCEPTION },
823 { "undef", ARM_FT_EXCEPTION },
824 { "SWI", ARM_FT_EXCEPTION },
825 { "swi", ARM_FT_EXCEPTION },
826 { NULL, ARM_FT_NORMAL }
829 /* Returns the (interrupt) function type of the current
830 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
832 static unsigned long
833 arm_isr_value (argument)
834 tree argument;
836 const isr_attribute_arg * ptr;
837 const char * arg;
839 /* No argument - default to IRQ. */
840 if (argument == NULL_TREE)
841 return ARM_FT_ISR;
843 /* Get the value of the argument. */
844 if (TREE_VALUE (argument) == NULL_TREE
845 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
846 return ARM_FT_UNKNOWN;
848 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
850 /* Check it against the list of known arguments. */
851 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
852 if (streq (arg, ptr->arg))
853 return ptr->return_value;
855 /* An unrecognized interrupt type. */
856 return ARM_FT_UNKNOWN;
859 /* Computes the type of the current function. */
861 static unsigned long
862 arm_compute_func_type ()
864 unsigned long type = ARM_FT_UNKNOWN;
865 tree a;
866 tree attr;
868 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
869 abort ();
871 /* Decide if the current function is volatile. Such functions
872 never return, and many memory cycles can be saved by not storing
873 register values that will never be needed again. This optimization
874 was added to speed up context switching in a kernel application. */
875 if (optimize > 0
876 && current_function_nothrow
877 && TREE_THIS_VOLATILE (current_function_decl))
878 type |= ARM_FT_VOLATILE;
880 if (current_function_needs_context)
881 type |= ARM_FT_NESTED;
883 attr = DECL_ATTRIBUTES (current_function_decl);
885 a = lookup_attribute ("naked", attr);
886 if (a != NULL_TREE)
887 type |= ARM_FT_NAKED;
889 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
890 type |= ARM_FT_EXCEPTION_HANDLER;
891 else
893 a = lookup_attribute ("isr", attr);
894 if (a == NULL_TREE)
895 a = lookup_attribute ("interrupt", attr);
897 if (a == NULL_TREE)
898 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
899 else
900 type |= arm_isr_value (TREE_VALUE (a));
903 return type;
906 /* Returns the type of the current function. */
908 unsigned long
909 arm_current_func_type ()
911 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
912 cfun->machine->func_type = arm_compute_func_type ();
914 return cfun->machine->func_type;
917 /* Return 1 if it is possible to return using a single instruction. */
920 use_return_insn (iscond)
921 int iscond;
923 int regno;
924 unsigned int func_type;
925 unsigned long saved_int_regs;
927 /* Never use a return instruction before reload has run. */
928 if (!reload_completed)
929 return 0;
931 func_type = arm_current_func_type ();
933 /* Naked functions and volatile functions need special
934 consideration. */
935 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
936 return 0;
938 /* As do variadic functions. */
939 if (current_function_pretend_args_size
940 || cfun->machine->uses_anonymous_args
941 /* Of if the function calls __builtin_eh_return () */
942 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
943 /* Or if there is no frame pointer and there is a stack adjustment. */
944 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
945 && !frame_pointer_needed))
946 return 0;
948 saved_int_regs = arm_compute_save_reg_mask ();
950 /* Can't be done if interworking with Thumb, and any registers have been
951 stacked. */
952 if (TARGET_INTERWORK && saved_int_regs != 0)
953 return 0;
955 /* On StrongARM, conditional returns are expensive if they aren't
956 taken and multiple registers have been stacked. */
957 if (iscond && arm_is_strong)
959 /* Conditional return when just the LR is stored is a simple
960 conditional-load instruction, that's not expensive. */
961 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
962 return 0;
964 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
965 return 0;
968 /* If there are saved registers but the LR isn't saved, then we need
969 two instructions for the return. */
970 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
971 return 0;
973 /* Can't be done if any of the FPU regs are pushed,
974 since this also requires an insn. */
975 if (TARGET_HARD_FLOAT)
976 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
977 if (regs_ever_live[regno] && !call_used_regs[regno])
978 return 0;
980 return 1;
983 /* Return TRUE if int I is a valid immediate ARM constant. */
986 const_ok_for_arm (i)
987 HOST_WIDE_INT i;
989 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
991 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
992 be all zero, or all one. */
993 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
994 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
995 != ((~(unsigned HOST_WIDE_INT) 0)
996 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
997 return FALSE;
999 /* Fast return for 0 and powers of 2 */
1000 if ((i & (i - 1)) == 0)
1001 return TRUE;
1005 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1006 return TRUE;
1007 mask =
1008 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1009 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1011 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1013 return FALSE;
1016 /* Return true if I is a valid constant for the operation CODE. */
1017 static int
1018 const_ok_for_op (i, code)
1019 HOST_WIDE_INT i;
1020 enum rtx_code code;
1022 if (const_ok_for_arm (i))
1023 return 1;
1025 switch (code)
1027 case PLUS:
1028 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1030 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1031 case XOR:
1032 case IOR:
1033 return 0;
1035 case AND:
1036 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1038 default:
1039 abort ();
1043 /* Emit a sequence of insns to handle a large constant.
1044 CODE is the code of the operation required, it can be any of SET, PLUS,
1045 IOR, AND, XOR, MINUS;
1046 MODE is the mode in which the operation is being performed;
1047 VAL is the integer to operate on;
1048 SOURCE is the other operand (a register, or a null-pointer for SET);
1049 SUBTARGETS means it is safe to create scratch registers if that will
1050 either produce a simpler sequence, or we will want to cse the values.
1051 Return value is the number of insns emitted. */
1054 arm_split_constant (code, mode, val, target, source, subtargets)
1055 enum rtx_code code;
1056 enum machine_mode mode;
1057 HOST_WIDE_INT val;
1058 rtx target;
1059 rtx source;
1060 int subtargets;
1062 if (subtargets || code == SET
1063 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1064 && REGNO (target) != REGNO (source)))
1066 /* After arm_reorg has been called, we can't fix up expensive
1067 constants by pushing them into memory so we must synthesize
1068 them in-line, regardless of the cost. This is only likely to
1069 be more costly on chips that have load delay slots and we are
1070 compiling without running the scheduler (so no splitting
1071 occurred before the final instruction emission).
1073 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1075 if (!after_arm_reorg
1076 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1077 > arm_constant_limit + (code != SET)))
1079 if (code == SET)
1081 /* Currently SET is the only monadic value for CODE, all
1082 the rest are diadic. */
1083 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1084 return 1;
1086 else
1088 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1090 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1091 /* For MINUS, the value is subtracted from, since we never
1092 have subtraction of a constant. */
1093 if (code == MINUS)
1094 emit_insn (gen_rtx_SET (VOIDmode, target,
1095 gen_rtx_MINUS (mode, temp, source)));
1096 else
1097 emit_insn (gen_rtx_SET (VOIDmode, target,
1098 gen_rtx (code, mode, source, temp)));
1099 return 2;
1104 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1107 static int
1108 count_insns_for_constant (remainder, i)
1109 HOST_WIDE_INT remainder;
1110 int i;
1112 HOST_WIDE_INT temp1;
1113 int num_insns = 0;
1116 int end;
1118 if (i <= 0)
1119 i += 32;
1120 if (remainder & (3 << (i - 2)))
1122 end = i - 8;
1123 if (end < 0)
1124 end += 32;
1125 temp1 = remainder & ((0x0ff << end)
1126 | ((i < end) ? (0xff >> (32 - end)) : 0));
1127 remainder &= ~temp1;
1128 num_insns++;
1129 i -= 6;
1131 i -= 2;
1132 } while (remainder);
1133 return num_insns;
1136 /* As above, but extra parameter GENERATE which, if clear, suppresses
1137 RTL generation. */
1139 static int
1140 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1141 enum rtx_code code;
1142 enum machine_mode mode;
1143 HOST_WIDE_INT val;
1144 rtx target;
1145 rtx source;
1146 int subtargets;
1147 int generate;
1149 int can_invert = 0;
1150 int can_negate = 0;
1151 int can_negate_initial = 0;
1152 int can_shift = 0;
1153 int i;
1154 int num_bits_set = 0;
1155 int set_sign_bit_copies = 0;
1156 int clear_sign_bit_copies = 0;
1157 int clear_zero_bit_copies = 0;
1158 int set_zero_bit_copies = 0;
1159 int insns = 0;
1160 unsigned HOST_WIDE_INT temp1, temp2;
1161 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1163 /* Find out which operations are safe for a given CODE. Also do a quick
1164 check for degenerate cases; these can occur when DImode operations
1165 are split. */
1166 switch (code)
1168 case SET:
1169 can_invert = 1;
1170 can_shift = 1;
1171 can_negate = 1;
1172 break;
1174 case PLUS:
1175 can_negate = 1;
1176 can_negate_initial = 1;
1177 break;
1179 case IOR:
1180 if (remainder == 0xffffffff)
1182 if (generate)
1183 emit_insn (gen_rtx_SET (VOIDmode, target,
1184 GEN_INT (ARM_SIGN_EXTEND (val))));
1185 return 1;
1187 if (remainder == 0)
1189 if (reload_completed && rtx_equal_p (target, source))
1190 return 0;
1191 if (generate)
1192 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1193 return 1;
1195 break;
1197 case AND:
1198 if (remainder == 0)
1200 if (generate)
1201 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1202 return 1;
1204 if (remainder == 0xffffffff)
1206 if (reload_completed && rtx_equal_p (target, source))
1207 return 0;
1208 if (generate)
1209 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1210 return 1;
1212 can_invert = 1;
1213 break;
1215 case XOR:
1216 if (remainder == 0)
1218 if (reload_completed && rtx_equal_p (target, source))
1219 return 0;
1220 if (generate)
1221 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1222 return 1;
1224 if (remainder == 0xffffffff)
1226 if (generate)
1227 emit_insn (gen_rtx_SET (VOIDmode, target,
1228 gen_rtx_NOT (mode, source)));
1229 return 1;
1232 /* We don't know how to handle this yet below. */
1233 abort ();
1235 case MINUS:
1236 /* We treat MINUS as (val - source), since (source - val) is always
1237 passed as (source + (-val)). */
1238 if (remainder == 0)
1240 if (generate)
1241 emit_insn (gen_rtx_SET (VOIDmode, target,
1242 gen_rtx_NEG (mode, source)));
1243 return 1;
1245 if (const_ok_for_arm (val))
1247 if (generate)
1248 emit_insn (gen_rtx_SET (VOIDmode, target,
1249 gen_rtx_MINUS (mode, GEN_INT (val),
1250 source)));
1251 return 1;
1253 can_negate = 1;
1255 break;
1257 default:
1258 abort ();
1261 /* If we can do it in one insn get out quickly. */
1262 if (const_ok_for_arm (val)
1263 || (can_negate_initial && const_ok_for_arm (-val))
1264 || (can_invert && const_ok_for_arm (~val)))
1266 if (generate)
1267 emit_insn (gen_rtx_SET (VOIDmode, target,
1268 (source ? gen_rtx (code, mode, source,
1269 GEN_INT (val))
1270 : GEN_INT (val))));
1271 return 1;
1274 /* Calculate a few attributes that may be useful for specific
1275 optimizations. */
1276 for (i = 31; i >= 0; i--)
1278 if ((remainder & (1 << i)) == 0)
1279 clear_sign_bit_copies++;
1280 else
1281 break;
1284 for (i = 31; i >= 0; i--)
1286 if ((remainder & (1 << i)) != 0)
1287 set_sign_bit_copies++;
1288 else
1289 break;
1292 for (i = 0; i <= 31; i++)
1294 if ((remainder & (1 << i)) == 0)
1295 clear_zero_bit_copies++;
1296 else
1297 break;
1300 for (i = 0; i <= 31; i++)
1302 if ((remainder & (1 << i)) != 0)
1303 set_zero_bit_copies++;
1304 else
1305 break;
1308 switch (code)
1310 case SET:
1311 /* See if we can do this by sign_extending a constant that is known
1312 to be negative. This is a good, way of doing it, since the shift
1313 may well merge into a subsequent insn. */
1314 if (set_sign_bit_copies > 1)
1316 if (const_ok_for_arm
1317 (temp1 = ARM_SIGN_EXTEND (remainder
1318 << (set_sign_bit_copies - 1))))
1320 if (generate)
1322 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1323 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1324 GEN_INT (temp1)));
1325 emit_insn (gen_ashrsi3 (target, new_src,
1326 GEN_INT (set_sign_bit_copies - 1)));
1328 return 2;
1330 /* For an inverted constant, we will need to set the low bits,
1331 these will be shifted out of harm's way. */
1332 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1333 if (const_ok_for_arm (~temp1))
1335 if (generate)
1337 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1338 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1339 GEN_INT (temp1)));
1340 emit_insn (gen_ashrsi3 (target, new_src,
1341 GEN_INT (set_sign_bit_copies - 1)));
1343 return 2;
1347 /* See if we can generate this by setting the bottom (or the top)
1348 16 bits, and then shifting these into the other half of the
1349 word. We only look for the simplest cases, to do more would cost
1350 too much. Be careful, however, not to generate this when the
1351 alternative would take fewer insns. */
1352 if (val & 0xffff0000)
1354 temp1 = remainder & 0xffff0000;
1355 temp2 = remainder & 0x0000ffff;
1357 /* Overlaps outside this range are best done using other methods. */
1358 for (i = 9; i < 24; i++)
1360 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1361 && !const_ok_for_arm (temp2))
1363 rtx new_src = (subtargets
1364 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1365 : target);
1366 insns = arm_gen_constant (code, mode, temp2, new_src,
1367 source, subtargets, generate);
1368 source = new_src;
1369 if (generate)
1370 emit_insn (gen_rtx_SET
1371 (VOIDmode, target,
1372 gen_rtx_IOR (mode,
1373 gen_rtx_ASHIFT (mode, source,
1374 GEN_INT (i)),
1375 source)));
1376 return insns + 1;
1380 /* Don't duplicate cases already considered. */
1381 for (i = 17; i < 24; i++)
1383 if (((temp1 | (temp1 >> i)) == remainder)
1384 && !const_ok_for_arm (temp1))
1386 rtx new_src = (subtargets
1387 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1388 : target);
1389 insns = arm_gen_constant (code, mode, temp1, new_src,
1390 source, subtargets, generate);
1391 source = new_src;
1392 if (generate)
1393 emit_insn
1394 (gen_rtx_SET (VOIDmode, target,
1395 gen_rtx_IOR
1396 (mode,
1397 gen_rtx_LSHIFTRT (mode, source,
1398 GEN_INT (i)),
1399 source)));
1400 return insns + 1;
1404 break;
1406 case IOR:
1407 case XOR:
1408 /* If we have IOR or XOR, and the constant can be loaded in a
1409 single instruction, and we can find a temporary to put it in,
1410 then this can be done in two instructions instead of 3-4. */
1411 if (subtargets
1412 /* TARGET can't be NULL if SUBTARGETS is 0 */
1413 || (reload_completed && !reg_mentioned_p (target, source)))
1415 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1417 if (generate)
1419 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1421 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1422 emit_insn (gen_rtx_SET (VOIDmode, target,
1423 gen_rtx (code, mode, source, sub)));
1425 return 2;
1429 if (code == XOR)
1430 break;
1432 if (set_sign_bit_copies > 8
1433 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1435 if (generate)
1437 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1438 rtx shift = GEN_INT (set_sign_bit_copies);
1440 emit_insn (gen_rtx_SET (VOIDmode, sub,
1441 gen_rtx_NOT (mode,
1442 gen_rtx_ASHIFT (mode,
1443 source,
1444 shift))));
1445 emit_insn (gen_rtx_SET (VOIDmode, target,
1446 gen_rtx_NOT (mode,
1447 gen_rtx_LSHIFTRT (mode, sub,
1448 shift))));
1450 return 2;
1453 if (set_zero_bit_copies > 8
1454 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1456 if (generate)
1458 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1459 rtx shift = GEN_INT (set_zero_bit_copies);
1461 emit_insn (gen_rtx_SET (VOIDmode, sub,
1462 gen_rtx_NOT (mode,
1463 gen_rtx_LSHIFTRT (mode,
1464 source,
1465 shift))));
1466 emit_insn (gen_rtx_SET (VOIDmode, target,
1467 gen_rtx_NOT (mode,
1468 gen_rtx_ASHIFT (mode, sub,
1469 shift))));
1471 return 2;
1474 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1476 if (generate)
1478 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1479 emit_insn (gen_rtx_SET (VOIDmode, sub,
1480 gen_rtx_NOT (mode, source)));
1481 source = sub;
1482 if (subtargets)
1483 sub = gen_reg_rtx (mode);
1484 emit_insn (gen_rtx_SET (VOIDmode, sub,
1485 gen_rtx_AND (mode, source,
1486 GEN_INT (temp1))));
1487 emit_insn (gen_rtx_SET (VOIDmode, target,
1488 gen_rtx_NOT (mode, sub)));
1490 return 3;
1492 break;
1494 case AND:
1495 /* See if two shifts will do 2 or more insn's worth of work. */
1496 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1498 HOST_WIDE_INT shift_mask = ((0xffffffff
1499 << (32 - clear_sign_bit_copies))
1500 & 0xffffffff);
1502 if ((remainder | shift_mask) != 0xffffffff)
1504 if (generate)
1506 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1507 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1508 new_src, source, subtargets, 1);
1509 source = new_src;
1511 else
1513 rtx targ = subtargets ? NULL_RTX : target;
1514 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1515 targ, source, subtargets, 0);
1519 if (generate)
1521 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1522 rtx shift = GEN_INT (clear_sign_bit_copies);
1524 emit_insn (gen_ashlsi3 (new_src, source, shift));
1525 emit_insn (gen_lshrsi3 (target, new_src, shift));
1528 return insns + 2;
1531 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1533 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1535 if ((remainder | shift_mask) != 0xffffffff)
1537 if (generate)
1539 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1541 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1542 new_src, source, subtargets, 1);
1543 source = new_src;
1545 else
1547 rtx targ = subtargets ? NULL_RTX : target;
1549 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1550 targ, source, subtargets, 0);
1554 if (generate)
1556 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1557 rtx shift = GEN_INT (clear_zero_bit_copies);
1559 emit_insn (gen_lshrsi3 (new_src, source, shift));
1560 emit_insn (gen_ashlsi3 (target, new_src, shift));
1563 return insns + 2;
1566 break;
1568 default:
1569 break;
1572 for (i = 0; i < 32; i++)
1573 if (remainder & (1 << i))
1574 num_bits_set++;
1576 if (code == AND || (can_invert && num_bits_set > 16))
1577 remainder = (~remainder) & 0xffffffff;
1578 else if (code == PLUS && num_bits_set > 16)
1579 remainder = (-remainder) & 0xffffffff;
1580 else
1582 can_invert = 0;
1583 can_negate = 0;
1586 /* Now try and find a way of doing the job in either two or three
1587 instructions.
1588 We start by looking for the largest block of zeros that are aligned on
1589 a 2-bit boundary, we then fill up the temps, wrapping around to the
1590 top of the word when we drop off the bottom.
1591 In the worst case this code should produce no more than four insns. */
1593 int best_start = 0;
1594 int best_consecutive_zeros = 0;
1596 for (i = 0; i < 32; i += 2)
1598 int consecutive_zeros = 0;
1600 if (!(remainder & (3 << i)))
1602 while ((i < 32) && !(remainder & (3 << i)))
1604 consecutive_zeros += 2;
1605 i += 2;
1607 if (consecutive_zeros > best_consecutive_zeros)
1609 best_consecutive_zeros = consecutive_zeros;
1610 best_start = i - consecutive_zeros;
1612 i -= 2;
1616 /* So long as it won't require any more insns to do so, it's
1617 desirable to emit a small constant (in bits 0...9) in the last
1618 insn. This way there is more chance that it can be combined with
1619 a later addressing insn to form a pre-indexed load or store
1620 operation. Consider:
1622 *((volatile int *)0xe0000100) = 1;
1623 *((volatile int *)0xe0000110) = 2;
1625 We want this to wind up as:
1627 mov rA, #0xe0000000
1628 mov rB, #1
1629 str rB, [rA, #0x100]
1630 mov rB, #2
1631 str rB, [rA, #0x110]
1633 rather than having to synthesize both large constants from scratch.
1635 Therefore, we calculate how many insns would be required to emit
1636 the constant starting from `best_start', and also starting from
1637 zero (ie with bit 31 first to be output). If `best_start' doesn't
1638 yield a shorter sequence, we may as well use zero. */
1639 if (best_start != 0
1640 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1641 && (count_insns_for_constant (remainder, 0) <=
1642 count_insns_for_constant (remainder, best_start)))
1643 best_start = 0;
1645 /* Now start emitting the insns. */
1646 i = best_start;
1649 int end;
1651 if (i <= 0)
1652 i += 32;
1653 if (remainder & (3 << (i - 2)))
1655 end = i - 8;
1656 if (end < 0)
1657 end += 32;
1658 temp1 = remainder & ((0x0ff << end)
1659 | ((i < end) ? (0xff >> (32 - end)) : 0));
1660 remainder &= ~temp1;
1662 if (generate)
1664 rtx new_src, temp1_rtx;
1666 if (code == SET || code == MINUS)
1668 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1669 if (can_invert && code != MINUS)
1670 temp1 = ~temp1;
1672 else
1674 if (remainder && subtargets)
1675 new_src = gen_reg_rtx (mode);
1676 else
1677 new_src = target;
1678 if (can_invert)
1679 temp1 = ~temp1;
1680 else if (can_negate)
1681 temp1 = -temp1;
1684 temp1 = trunc_int_for_mode (temp1, mode);
1685 temp1_rtx = GEN_INT (temp1);
1687 if (code == SET)
1689 else if (code == MINUS)
1690 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1691 else
1692 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1694 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1695 source = new_src;
1698 if (code == SET)
1700 can_invert = 0;
1701 code = PLUS;
1703 else if (code == MINUS)
1704 code = PLUS;
1706 insns++;
1707 i -= 6;
1709 i -= 2;
1711 while (remainder);
1714 return insns;
1717 /* Canonicalize a comparison so that we are more likely to recognize it.
1718 This can be done for a few constant compares, where we can make the
1719 immediate value easier to load. */
1721 enum rtx_code
1722 arm_canonicalize_comparison (code, op1)
1723 enum rtx_code code;
1724 rtx * op1;
1726 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1728 switch (code)
1730 case EQ:
1731 case NE:
1732 return code;
1734 case GT:
1735 case LE:
1736 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1737 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1739 *op1 = GEN_INT (i + 1);
1740 return code == GT ? GE : LT;
1742 break;
1744 case GE:
1745 case LT:
1746 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1747 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1749 *op1 = GEN_INT (i - 1);
1750 return code == GE ? GT : LE;
1752 break;
1754 case GTU:
1755 case LEU:
1756 if (i != ~((unsigned HOST_WIDE_INT) 0)
1757 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1759 *op1 = GEN_INT (i + 1);
1760 return code == GTU ? GEU : LTU;
1762 break;
1764 case GEU:
1765 case LTU:
1766 if (i != 0
1767 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1769 *op1 = GEN_INT (i - 1);
1770 return code == GEU ? GTU : LEU;
1772 break;
1774 default:
1775 abort ();
1778 return code;
1781 /* Decide whether a type should be returned in memory (true)
1782 or in a register (false). This is called by the macro
1783 RETURN_IN_MEMORY. */
1786 arm_return_in_memory (type)
1787 tree type;
1789 HOST_WIDE_INT size;
1791 if (!AGGREGATE_TYPE_P (type))
1792 /* All simple types are returned in registers. */
1793 return 0;
1795 size = int_size_in_bytes (type);
1797 if (TARGET_ATPCS)
1799 /* ATPCS returns aggregate types in memory only if they are
1800 larger than a word (or are variable size). */
1801 return (size < 0 || size > UNITS_PER_WORD);
1804 /* For the arm-wince targets we choose to be compitable with Microsoft's
1805 ARM and Thumb compilers, which always return aggregates in memory. */
1806 #ifndef ARM_WINCE
1807 /* All structures/unions bigger than one word are returned in memory.
1808 Also catch the case where int_size_in_bytes returns -1. In this case
1809 the aggregate is either huge or of varaible size, and in either case
1810 we will want to return it via memory and not in a register. */
1811 if (size < 0 || size > UNITS_PER_WORD)
1812 return 1;
1814 if (TREE_CODE (type) == RECORD_TYPE)
1816 tree field;
1818 /* For a struct the APCS says that we only return in a register
1819 if the type is 'integer like' and every addressable element
1820 has an offset of zero. For practical purposes this means
1821 that the structure can have at most one non bit-field element
1822 and that this element must be the first one in the structure. */
1824 /* Find the first field, ignoring non FIELD_DECL things which will
1825 have been created by C++. */
1826 for (field = TYPE_FIELDS (type);
1827 field && TREE_CODE (field) != FIELD_DECL;
1828 field = TREE_CHAIN (field))
1829 continue;
1831 if (field == NULL)
1832 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1834 /* Check that the first field is valid for returning in a register. */
1836 /* ... Floats are not allowed */
1837 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1838 return 1;
1840 /* ... Aggregates that are not themselves valid for returning in
1841 a register are not allowed. */
1842 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1843 return 1;
1845 /* Now check the remaining fields, if any. Only bitfields are allowed,
1846 since they are not addressable. */
1847 for (field = TREE_CHAIN (field);
1848 field;
1849 field = TREE_CHAIN (field))
1851 if (TREE_CODE (field) != FIELD_DECL)
1852 continue;
1854 if (!DECL_BIT_FIELD_TYPE (field))
1855 return 1;
1858 return 0;
1861 if (TREE_CODE (type) == UNION_TYPE)
1863 tree field;
1865 /* Unions can be returned in registers if every element is
1866 integral, or can be returned in an integer register. */
1867 for (field = TYPE_FIELDS (type);
1868 field;
1869 field = TREE_CHAIN (field))
1871 if (TREE_CODE (field) != FIELD_DECL)
1872 continue;
1874 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1875 return 1;
1877 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1878 return 1;
1881 return 0;
1883 #endif /* not ARM_WINCE */
1885 /* Return all other types in memory. */
1886 return 1;
1889 /* Indicate whether or not words of a double are in big-endian order. */
1892 arm_float_words_big_endian ()
1895 /* For FPA, float words are always big-endian. For VFP, floats words
1896 follow the memory system mode. */
1898 if (TARGET_HARD_FLOAT)
1900 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1901 return 1;
1904 if (TARGET_VFP)
1905 return (TARGET_BIG_END ? 1 : 0);
1907 return 1;
1910 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1911 for a call to a function whose data type is FNTYPE.
1912 For a library call, FNTYPE is NULL. */
1913 void
1914 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1915 CUMULATIVE_ARGS * pcum;
1916 tree fntype;
1917 rtx libname ATTRIBUTE_UNUSED;
1918 int indirect ATTRIBUTE_UNUSED;
1920 /* On the ARM, the offset starts at 0. */
1921 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1923 pcum->call_cookie = CALL_NORMAL;
1925 if (TARGET_LONG_CALLS)
1926 pcum->call_cookie = CALL_LONG;
1928 /* Check for long call/short call attributes. The attributes
1929 override any command line option. */
1930 if (fntype)
1932 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1933 pcum->call_cookie = CALL_SHORT;
1934 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1935 pcum->call_cookie = CALL_LONG;
1939 /* Determine where to put an argument to a function.
1940 Value is zero to push the argument on the stack,
1941 or a hard register in which to store the argument.
1943 MODE is the argument's machine mode.
1944 TYPE is the data type of the argument (as a tree).
1945 This is null for libcalls where that information may
1946 not be available.
1947 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1948 the preceding args and about the function being called.
1949 NAMED is nonzero if this argument is a named parameter
1950 (otherwise it is an extra parameter matching an ellipsis). */
1953 arm_function_arg (pcum, mode, type, named)
1954 CUMULATIVE_ARGS * pcum;
1955 enum machine_mode mode;
1956 tree type ATTRIBUTE_UNUSED;
1957 int named;
1959 if (mode == VOIDmode)
1960 /* Compute operand 2 of the call insn. */
1961 return GEN_INT (pcum->call_cookie);
1963 if (!named || pcum->nregs >= NUM_ARG_REGS)
1964 return NULL_RTX;
1966 return gen_rtx_REG (mode, pcum->nregs);
1969 /* Variable sized types are passed by reference. This is a GCC
1970 extension to the ARM ABI. */
1973 arm_function_arg_pass_by_reference (cum, mode, type, named)
1974 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
1975 enum machine_mode mode ATTRIBUTE_UNUSED;
1976 tree type;
1977 int named ATTRIBUTE_UNUSED;
1979 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1982 /* Implement va_arg. */
1985 arm_va_arg (valist, type)
1986 tree valist, type;
1988 /* Variable sized types are passed by reference. */
1989 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1991 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
1992 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
1995 return std_expand_builtin_va_arg (valist, type);
1998 /* Encode the current state of the #pragma [no_]long_calls. */
1999 typedef enum
2001 OFF, /* No #pramgma [no_]long_calls is in effect. */
2002 LONG, /* #pragma long_calls is in effect. */
2003 SHORT /* #pragma no_long_calls is in effect. */
2004 } arm_pragma_enum;
2006 static arm_pragma_enum arm_pragma_long_calls = OFF;
2008 void
2009 arm_pr_long_calls (pfile)
2010 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2012 arm_pragma_long_calls = LONG;
2015 void
2016 arm_pr_no_long_calls (pfile)
2017 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2019 arm_pragma_long_calls = SHORT;
2022 void
2023 arm_pr_long_calls_off (pfile)
2024 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2026 arm_pragma_long_calls = OFF;
2029 /* Table of machine attributes. */
2030 const struct attribute_spec arm_attribute_table[] =
2032 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2033 /* Function calls made to this symbol must be done indirectly, because
2034 it may lie outside of the 26 bit addressing range of a normal function
2035 call. */
2036 { "long_call", 0, 0, false, true, true, NULL },
2037 /* Whereas these functions are always known to reside within the 26 bit
2038 addressing range. */
2039 { "short_call", 0, 0, false, true, true, NULL },
2040 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2041 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2042 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2043 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2044 #ifdef ARM_PE
2045 /* ARM/PE has three new attributes:
2046 interfacearm - ?
2047 dllexport - for exporting a function/variable that will live in a dll
2048 dllimport - for importing a function/variable from a dll
2050 Microsoft allows multiple declspecs in one __declspec, separating
2051 them with spaces. We do NOT support this. Instead, use __declspec
2052 multiple times.
2054 { "dllimport", 0, 0, true, false, false, NULL },
2055 { "dllexport", 0, 0, true, false, false, NULL },
2056 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2057 #endif
2058 { NULL, 0, 0, false, false, false, NULL }
2061 /* Handle an attribute requiring a FUNCTION_DECL;
2062 arguments as in struct attribute_spec.handler. */
2064 static tree
2065 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2066 tree * node;
2067 tree name;
2068 tree args ATTRIBUTE_UNUSED;
2069 int flags ATTRIBUTE_UNUSED;
2070 bool * no_add_attrs;
2072 if (TREE_CODE (*node) != FUNCTION_DECL)
2074 warning ("`%s' attribute only applies to functions",
2075 IDENTIFIER_POINTER (name));
2076 *no_add_attrs = true;
2079 return NULL_TREE;
2082 /* Handle an "interrupt" or "isr" attribute;
2083 arguments as in struct attribute_spec.handler. */
2085 static tree
2086 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2087 tree * node;
2088 tree name;
2089 tree args;
2090 int flags;
2091 bool * no_add_attrs;
2093 if (DECL_P (*node))
2095 if (TREE_CODE (*node) != FUNCTION_DECL)
2097 warning ("`%s' attribute only applies to functions",
2098 IDENTIFIER_POINTER (name));
2099 *no_add_attrs = true;
2101 /* FIXME: the argument if any is checked for type attributes;
2102 should it be checked for decl ones? */
2104 else
2106 if (TREE_CODE (*node) == FUNCTION_TYPE
2107 || TREE_CODE (*node) == METHOD_TYPE)
2109 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2111 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2112 *no_add_attrs = true;
2115 else if (TREE_CODE (*node) == POINTER_TYPE
2116 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2117 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2118 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2120 *node = build_type_copy (*node);
2121 TREE_TYPE (*node) = build_type_attribute_variant
2122 (TREE_TYPE (*node),
2123 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2124 *no_add_attrs = true;
2126 else
2128 /* Possibly pass this attribute on from the type to a decl. */
2129 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2130 | (int) ATTR_FLAG_FUNCTION_NEXT
2131 | (int) ATTR_FLAG_ARRAY_NEXT))
2133 *no_add_attrs = true;
2134 return tree_cons (name, args, NULL_TREE);
2136 else
2138 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2143 return NULL_TREE;
2146 /* Return 0 if the attributes for two types are incompatible, 1 if they
2147 are compatible, and 2 if they are nearly compatible (which causes a
2148 warning to be generated). */
2150 static int
2151 arm_comp_type_attributes (type1, type2)
2152 tree type1;
2153 tree type2;
2155 int l1, l2, s1, s2;
2157 /* Check for mismatch of non-default calling convention. */
2158 if (TREE_CODE (type1) != FUNCTION_TYPE)
2159 return 1;
2161 /* Check for mismatched call attributes. */
2162 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2163 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2164 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2165 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2167 /* Only bother to check if an attribute is defined. */
2168 if (l1 | l2 | s1 | s2)
2170 /* If one type has an attribute, the other must have the same attribute. */
2171 if ((l1 != l2) || (s1 != s2))
2172 return 0;
2174 /* Disallow mixed attributes. */
2175 if ((l1 & s2) || (l2 & s1))
2176 return 0;
2179 /* Check for mismatched ISR attribute. */
2180 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2181 if (! l1)
2182 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2183 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2184 if (! l2)
2185 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2186 if (l1 != l2)
2187 return 0;
2189 return 1;
2192 /* Encode long_call or short_call attribute by prefixing
2193 symbol name in DECL with a special character FLAG. */
2195 void
2196 arm_encode_call_attribute (decl, flag)
2197 tree decl;
2198 int flag;
2200 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2201 int len = strlen (str);
2202 char * newstr;
2204 /* Do not allow weak functions to be treated as short call. */
2205 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2206 return;
2208 newstr = alloca (len + 2);
2209 newstr[0] = flag;
2210 strcpy (newstr + 1, str);
2212 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2213 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2216 /* Assigns default attributes to newly defined type. This is used to
2217 set short_call/long_call attributes for function types of
2218 functions defined inside corresponding #pragma scopes. */
2220 static void
2221 arm_set_default_type_attributes (type)
2222 tree type;
2224 /* Add __attribute__ ((long_call)) to all functions, when
2225 inside #pragma long_calls or __attribute__ ((short_call)),
2226 when inside #pragma no_long_calls. */
2227 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2229 tree type_attr_list, attr_name;
2230 type_attr_list = TYPE_ATTRIBUTES (type);
2232 if (arm_pragma_long_calls == LONG)
2233 attr_name = get_identifier ("long_call");
2234 else if (arm_pragma_long_calls == SHORT)
2235 attr_name = get_identifier ("short_call");
2236 else
2237 return;
2239 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2240 TYPE_ATTRIBUTES (type) = type_attr_list;
2244 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2245 defined within the current compilation unit. If this caanot be
2246 determined, then 0 is returned. */
2248 static int
2249 current_file_function_operand (sym_ref)
2250 rtx sym_ref;
2252 /* This is a bit of a fib. A function will have a short call flag
2253 applied to its name if it has the short call attribute, or it has
2254 already been defined within the current compilation unit. */
2255 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2256 return 1;
2258 /* The current function is always defined within the current compilation
2259 unit. if it s a weak definition however, then this may not be the real
2260 definition of the function, and so we have to say no. */
2261 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2262 && !DECL_WEAK (current_function_decl))
2263 return 1;
2265 /* We cannot make the determination - default to returning 0. */
2266 return 0;
2269 /* Return nonzero if a 32 bit "long_call" should be generated for
2270 this call. We generate a long_call if the function:
2272 a. has an __attribute__((long call))
2273 or b. is within the scope of a #pragma long_calls
2274 or c. the -mlong-calls command line switch has been specified
2276 However we do not generate a long call if the function:
2278 d. has an __attribute__ ((short_call))
2279 or e. is inside the scope of a #pragma no_long_calls
2280 or f. has an __attribute__ ((section))
2281 or g. is defined within the current compilation unit.
2283 This function will be called by C fragments contained in the machine
2284 description file. CALL_REF and CALL_COOKIE correspond to the matched
2285 rtl operands. CALL_SYMBOL is used to distinguish between
2286 two different callers of the function. It is set to 1 in the
2287 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2288 and "call_value" patterns. This is because of the difference in the
2289 SYM_REFs passed by these patterns. */
2292 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2293 rtx sym_ref;
2294 int call_cookie;
2295 int call_symbol;
2297 if (!call_symbol)
2299 if (GET_CODE (sym_ref) != MEM)
2300 return 0;
2302 sym_ref = XEXP (sym_ref, 0);
2305 if (GET_CODE (sym_ref) != SYMBOL_REF)
2306 return 0;
2308 if (call_cookie & CALL_SHORT)
2309 return 0;
2311 if (TARGET_LONG_CALLS && flag_function_sections)
2312 return 1;
2314 if (current_file_function_operand (sym_ref))
2315 return 0;
2317 return (call_cookie & CALL_LONG)
2318 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2319 || TARGET_LONG_CALLS;
2322 /* Return nonzero if it is ok to make a tail-call to DECL. */
2324 static bool
2325 arm_function_ok_for_sibcall (decl, exp)
2326 tree decl;
2327 tree exp ATTRIBUTE_UNUSED;
2329 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2331 /* Never tailcall something for which we have no decl, or if we
2332 are in Thumb mode. */
2333 if (decl == NULL || TARGET_THUMB)
2334 return false;
2336 /* Get the calling method. */
2337 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2338 call_type = CALL_SHORT;
2339 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2340 call_type = CALL_LONG;
2342 /* Cannot tail-call to long calls, since these are out of range of
2343 a branch instruction. However, if not compiling PIC, we know
2344 we can reach the symbol if it is in this compilation unit. */
2345 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2346 return false;
2348 /* If we are interworking and the function is not declared static
2349 then we can't tail-call it unless we know that it exists in this
2350 compilation unit (since it might be a Thumb routine). */
2351 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2352 return false;
2354 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2355 if (IS_INTERRUPT (arm_current_func_type ()))
2356 return false;
2358 /* Everything else is ok. */
2359 return true;
2363 /* Addressing mode support functions. */
2365 /* Return non-zero if X is a legitimate immediate operand when compiling
2366 for PIC. */
2368 legitimate_pic_operand_p (x)
2369 rtx x;
2371 if (CONSTANT_P (x)
2372 && flag_pic
2373 && (GET_CODE (x) == SYMBOL_REF
2374 || (GET_CODE (x) == CONST
2375 && GET_CODE (XEXP (x, 0)) == PLUS
2376 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2377 return 0;
2379 return 1;
2383 legitimize_pic_address (orig, mode, reg)
2384 rtx orig;
2385 enum machine_mode mode;
2386 rtx reg;
2388 if (GET_CODE (orig) == SYMBOL_REF
2389 || GET_CODE (orig) == LABEL_REF)
2391 #ifndef AOF_ASSEMBLER
2392 rtx pic_ref, address;
2393 #endif
2394 rtx insn;
2395 int subregs = 0;
2397 if (reg == 0)
2399 if (no_new_pseudos)
2400 abort ();
2401 else
2402 reg = gen_reg_rtx (Pmode);
2404 subregs = 1;
2407 #ifdef AOF_ASSEMBLER
2408 /* The AOF assembler can generate relocations for these directly, and
2409 understands that the PIC register has to be added into the offset. */
2410 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2411 #else
2412 if (subregs)
2413 address = gen_reg_rtx (Pmode);
2414 else
2415 address = reg;
2417 if (TARGET_ARM)
2418 emit_insn (gen_pic_load_addr_arm (address, orig));
2419 else
2420 emit_insn (gen_pic_load_addr_thumb (address, orig));
2422 if ((GET_CODE (orig) == LABEL_REF
2423 || (GET_CODE (orig) == SYMBOL_REF &&
2424 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2425 && NEED_GOT_RELOC)
2426 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2427 else
2429 pic_ref = gen_rtx_MEM (Pmode,
2430 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2431 address));
2432 RTX_UNCHANGING_P (pic_ref) = 1;
2435 insn = emit_move_insn (reg, pic_ref);
2436 #endif
2437 current_function_uses_pic_offset_table = 1;
2438 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2439 by loop. */
2440 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2441 REG_NOTES (insn));
2442 return reg;
2444 else if (GET_CODE (orig) == CONST)
2446 rtx base, offset;
2448 if (GET_CODE (XEXP (orig, 0)) == PLUS
2449 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2450 return orig;
2452 if (reg == 0)
2454 if (no_new_pseudos)
2455 abort ();
2456 else
2457 reg = gen_reg_rtx (Pmode);
2460 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2462 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2463 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2464 base == reg ? 0 : reg);
2466 else
2467 abort ();
2469 if (GET_CODE (offset) == CONST_INT)
2471 /* The base register doesn't really matter, we only want to
2472 test the index for the appropriate mode. */
2473 if (!arm_legitimate_index_p (mode, offset, 0))
2475 if (!no_new_pseudos)
2476 offset = force_reg (Pmode, offset);
2477 else
2478 abort ();
2481 if (GET_CODE (offset) == CONST_INT)
2482 return plus_constant (base, INTVAL (offset));
2485 if (GET_MODE_SIZE (mode) > 4
2486 && (GET_MODE_CLASS (mode) == MODE_INT
2487 || TARGET_SOFT_FLOAT))
2489 emit_insn (gen_addsi3 (reg, base, offset));
2490 return reg;
2493 return gen_rtx_PLUS (Pmode, base, offset);
2496 return orig;
2499 /* Generate code to load the PIC register. PROLOGUE is true if
2500 called from arm_expand_prologue (in which case we want the
2501 generated insns at the start of the function); false if called
2502 by an exception receiver that needs the PIC register reloaded
2503 (in which case the insns are just dumped at the current location). */
2505 void
2506 arm_finalize_pic (prologue)
2507 int prologue ATTRIBUTE_UNUSED;
2509 #ifndef AOF_ASSEMBLER
2510 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2511 rtx global_offset_table;
2513 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2514 return;
2516 if (!flag_pic)
2517 abort ();
2519 start_sequence ();
2520 l1 = gen_label_rtx ();
2522 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2523 /* On the ARM the PC register contains 'dot + 8' at the time of the
2524 addition, on the Thumb it is 'dot + 4'. */
2525 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2526 if (GOT_PCREL)
2527 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2528 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2529 else
2530 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2532 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2534 if (TARGET_ARM)
2536 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2537 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2539 else
2541 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2542 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2545 seq = get_insns ();
2546 end_sequence ();
2547 if (prologue)
2548 emit_insn_after (seq, get_insns ());
2549 else
2550 emit_insn (seq);
2552 /* Need to emit this whether or not we obey regdecls,
2553 since setjmp/longjmp can cause life info to screw up. */
2554 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2555 #endif /* AOF_ASSEMBLER */
2558 /* Return nonzero if X is valid as an ARM state addressing register. */
2559 static int
2560 arm_address_register_rtx_p (x, strict_p)
2561 rtx x;
2562 int strict_p;
2564 int regno;
2566 if (GET_CODE (x) != REG)
2567 return 0;
2569 regno = REGNO (x);
2571 if (strict_p)
2572 return ARM_REGNO_OK_FOR_BASE_P (regno);
2574 return (regno <= LAST_ARM_REGNUM
2575 || regno >= FIRST_PSEUDO_REGISTER
2576 || regno == FRAME_POINTER_REGNUM
2577 || regno == ARG_POINTER_REGNUM);
2580 /* Return nonzero if X is a valid ARM state address operand. */
2582 arm_legitimate_address_p (mode, x, strict_p)
2583 enum machine_mode mode;
2584 rtx x;
2585 int strict_p;
2587 if (arm_address_register_rtx_p (x, strict_p))
2588 return 1;
2590 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2591 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2593 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2594 && GET_MODE_SIZE (mode) <= 4
2595 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2596 && GET_CODE (XEXP (x, 1)) == PLUS
2597 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2598 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2600 /* After reload constants split into minipools will have addresses
2601 from a LABEL_REF. */
2602 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2603 && (GET_CODE (x) == LABEL_REF
2604 || (GET_CODE (x) == CONST
2605 && GET_CODE (XEXP (x, 0)) == PLUS
2606 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2607 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2608 return 1;
2610 else if (mode == TImode)
2611 return 0;
2613 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2615 if (GET_CODE (x) == PLUS
2616 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2617 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2619 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2621 if (val == 4 || val == -4 || val == -8)
2622 return 1;
2626 else if (GET_CODE (x) == PLUS)
2628 rtx xop0 = XEXP (x, 0);
2629 rtx xop1 = XEXP (x, 1);
2631 return ((arm_address_register_rtx_p (xop0, strict_p)
2632 && arm_legitimate_index_p (mode, xop1, strict_p))
2633 || (arm_address_register_rtx_p (xop1, strict_p)
2634 && arm_legitimate_index_p (mode, xop0, strict_p)));
2637 #if 0
2638 /* Reload currently can't handle MINUS, so disable this for now */
2639 else if (GET_CODE (x) == MINUS)
2641 rtx xop0 = XEXP (x, 0);
2642 rtx xop1 = XEXP (x, 1);
2644 return (arm_address_register_rtx_p (xop0, strict_p)
2645 && arm_legitimate_index_p (mode, xop1, strict_p));
2647 #endif
2649 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2650 && GET_CODE (x) == SYMBOL_REF
2651 && CONSTANT_POOL_ADDRESS_P (x)
2652 && ! (flag_pic
2653 && symbol_mentioned_p (get_pool_constant (x))))
2654 return 1;
2656 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2657 && (GET_MODE_SIZE (mode) <= 4)
2658 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2659 return 1;
2661 return 0;
2664 /* Return nonzero if INDEX is valid for an address index operand in
2665 ARM state. */
2666 static int
2667 arm_legitimate_index_p (mode, index, strict_p)
2668 enum machine_mode mode;
2669 rtx index;
2670 int strict_p;
2672 HOST_WIDE_INT range;
2673 enum rtx_code code = GET_CODE (index);
2675 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2676 return (code == CONST_INT && INTVAL (index) < 1024
2677 && INTVAL (index) > -1024
2678 && (INTVAL (index) & 3) == 0);
2680 if (arm_address_register_rtx_p (index, strict_p)
2681 && GET_MODE_SIZE (mode) <= 4)
2682 return 1;
2684 /* XXX What about ldrsb? */
2685 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2686 && (!arm_arch4 || (mode) != HImode))
2688 rtx xiop0 = XEXP (index, 0);
2689 rtx xiop1 = XEXP (index, 1);
2691 return ((arm_address_register_rtx_p (xiop0, strict_p)
2692 && power_of_two_operand (xiop1, SImode))
2693 || (arm_address_register_rtx_p (xiop1, strict_p)
2694 && power_of_two_operand (xiop0, SImode)));
2697 if (GET_MODE_SIZE (mode) <= 4
2698 && (code == LSHIFTRT || code == ASHIFTRT
2699 || code == ASHIFT || code == ROTATERT)
2700 && (!arm_arch4 || (mode) != HImode))
2702 rtx op = XEXP (index, 1);
2704 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2705 && GET_CODE (op) == CONST_INT
2706 && INTVAL (op) > 0
2707 && INTVAL (op) <= 31);
2710 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2711 load, but that has a restricted addressing range and we are unable
2712 to tell here whether that is the case. To be safe we restrict all
2713 loads to that range. */
2714 range = ((mode) == HImode || (mode) == QImode)
2715 ? (arm_arch4 ? 256 : 4095) : 4096;
2717 return (code == CONST_INT
2718 && INTVAL (index) < range
2719 && INTVAL (index) > -range);
2723 #define REG_OR_SUBREG_REG(X) \
2724 (GET_CODE (X) == REG \
2725 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2727 #define REG_OR_SUBREG_RTX(X) \
2728 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2730 #ifndef COSTS_N_INSNS
2731 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2732 #endif
2735 arm_rtx_costs (x, code, outer)
2736 rtx x;
2737 enum rtx_code code;
2738 enum rtx_code outer;
2740 enum machine_mode mode = GET_MODE (x);
2741 enum rtx_code subcode;
2742 int extra_cost;
2744 if (TARGET_THUMB)
2746 switch (code)
2748 case ASHIFT:
2749 case ASHIFTRT:
2750 case LSHIFTRT:
2751 case ROTATERT:
2752 case PLUS:
2753 case MINUS:
2754 case COMPARE:
2755 case NEG:
2756 case NOT:
2757 return COSTS_N_INSNS (1);
2759 case MULT:
2760 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2762 int cycles = 0;
2763 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2765 while (i)
2767 i >>= 2;
2768 cycles++;
2770 return COSTS_N_INSNS (2) + cycles;
2772 return COSTS_N_INSNS (1) + 16;
2774 case SET:
2775 return (COSTS_N_INSNS (1)
2776 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2777 + GET_CODE (SET_DEST (x)) == MEM));
2779 case CONST_INT:
2780 if (outer == SET)
2782 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2783 return 0;
2784 if (thumb_shiftable_const (INTVAL (x)))
2785 return COSTS_N_INSNS (2);
2786 return COSTS_N_INSNS (3);
2788 else if (outer == PLUS
2789 && INTVAL (x) < 256 && INTVAL (x) > -256)
2790 return 0;
2791 else if (outer == COMPARE
2792 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2793 return 0;
2794 else if (outer == ASHIFT || outer == ASHIFTRT
2795 || outer == LSHIFTRT)
2796 return 0;
2797 return COSTS_N_INSNS (2);
2799 case CONST:
2800 case CONST_DOUBLE:
2801 case LABEL_REF:
2802 case SYMBOL_REF:
2803 return COSTS_N_INSNS (3);
2805 case UDIV:
2806 case UMOD:
2807 case DIV:
2808 case MOD:
2809 return 100;
2811 case TRUNCATE:
2812 return 99;
2814 case AND:
2815 case XOR:
2816 case IOR:
2817 /* XXX guess. */
2818 return 8;
2820 case ADDRESSOF:
2821 case MEM:
2822 /* XXX another guess. */
2823 /* Memory costs quite a lot for the first word, but subsequent words
2824 load at the equivalent of a single insn each. */
2825 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2826 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2827 ? 4 : 0));
2829 case IF_THEN_ELSE:
2830 /* XXX a guess. */
2831 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2832 return 14;
2833 return 2;
2835 case ZERO_EXTEND:
2836 /* XXX still guessing. */
2837 switch (GET_MODE (XEXP (x, 0)))
2839 case QImode:
2840 return (1 + (mode == DImode ? 4 : 0)
2841 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2843 case HImode:
2844 return (4 + (mode == DImode ? 4 : 0)
2845 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2847 case SImode:
2848 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2850 default:
2851 return 99;
2854 default:
2855 return 99;
2856 #if 0
2857 case FFS:
2858 case FLOAT:
2859 case FIX:
2860 case UNSIGNED_FIX:
2861 /* XXX guess */
2862 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2863 rtx_name[code]);
2864 abort ();
2865 #endif
2869 switch (code)
2871 case MEM:
2872 /* Memory costs quite a lot for the first word, but subsequent words
2873 load at the equivalent of a single insn each. */
2874 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2875 + (GET_CODE (x) == SYMBOL_REF
2876 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2878 case DIV:
2879 case MOD:
2880 return 100;
2882 case ROTATE:
2883 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2884 return 4;
2885 /* Fall through */
2886 case ROTATERT:
2887 if (mode != SImode)
2888 return 8;
2889 /* Fall through */
2890 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2891 if (mode == DImode)
2892 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2893 + ((GET_CODE (XEXP (x, 0)) == REG
2894 || (GET_CODE (XEXP (x, 0)) == SUBREG
2895 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2896 ? 0 : 8));
2897 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2898 || (GET_CODE (XEXP (x, 0)) == SUBREG
2899 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2900 ? 0 : 4)
2901 + ((GET_CODE (XEXP (x, 1)) == REG
2902 || (GET_CODE (XEXP (x, 1)) == SUBREG
2903 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2904 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2905 ? 0 : 4));
2907 case MINUS:
2908 if (mode == DImode)
2909 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2910 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2911 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2912 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2913 ? 0 : 8));
2915 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2916 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2917 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2918 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2919 ? 0 : 8)
2920 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2921 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2922 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2923 ? 0 : 8));
2925 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2926 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2927 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2928 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2929 || subcode == ASHIFTRT || subcode == LSHIFTRT
2930 || subcode == ROTATE || subcode == ROTATERT
2931 || (subcode == MULT
2932 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2933 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2934 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2935 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2936 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2937 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2938 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2939 return 1;
2940 /* Fall through */
2942 case PLUS:
2943 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2944 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2945 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2946 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2947 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2948 ? 0 : 8));
2950 /* Fall through */
2951 case AND: case XOR: case IOR:
2952 extra_cost = 0;
2954 /* Normally the frame registers will be spilt into reg+const during
2955 reload, so it is a bad idea to combine them with other instructions,
2956 since then they might not be moved outside of loops. As a compromise
2957 we allow integration with ops that have a constant as their second
2958 operand. */
2959 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2960 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2961 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2962 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2963 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2964 extra_cost = 4;
2966 if (mode == DImode)
2967 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2968 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2969 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2970 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2971 ? 0 : 8));
2973 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2974 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2975 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2976 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2977 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2978 ? 0 : 4));
2980 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2981 return (1 + extra_cost
2982 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2983 || subcode == LSHIFTRT || subcode == ASHIFTRT
2984 || subcode == ROTATE || subcode == ROTATERT
2985 || (subcode == MULT
2986 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2987 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2988 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2989 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2990 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2991 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2992 ? 0 : 4));
2994 return 8;
2996 case MULT:
2997 /* There is no point basing this on the tuning, since it is always the
2998 fast variant if it exists at all. */
2999 if (arm_fast_multiply && mode == DImode
3000 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3001 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3002 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3003 return 8;
3005 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3006 || mode == DImode)
3007 return 30;
3009 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3011 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3012 & (unsigned HOST_WIDE_INT) 0xffffffff);
3013 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3014 int j;
3016 /* Tune as appropriate. */
3017 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3019 for (j = 0; i && j < 32; j += booth_unit_size)
3021 i >>= booth_unit_size;
3022 add_cost += 2;
3025 return add_cost;
3028 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3029 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3030 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3032 case TRUNCATE:
3033 if (arm_fast_multiply && mode == SImode
3034 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3035 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3036 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3037 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3038 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3039 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3040 return 8;
3041 return 99;
3043 case NEG:
3044 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3045 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3046 /* Fall through */
3047 case NOT:
3048 if (mode == DImode)
3049 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3051 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3053 case IF_THEN_ELSE:
3054 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3055 return 14;
3056 return 2;
3058 case COMPARE:
3059 return 1;
3061 case ABS:
3062 return 4 + (mode == DImode ? 4 : 0);
3064 case SIGN_EXTEND:
3065 if (GET_MODE (XEXP (x, 0)) == QImode)
3066 return (4 + (mode == DImode ? 4 : 0)
3067 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3068 /* Fall through */
3069 case ZERO_EXTEND:
3070 switch (GET_MODE (XEXP (x, 0)))
3072 case QImode:
3073 return (1 + (mode == DImode ? 4 : 0)
3074 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3076 case HImode:
3077 return (4 + (mode == DImode ? 4 : 0)
3078 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3080 case SImode:
3081 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3083 default:
3084 break;
3086 abort ();
3088 case CONST_INT:
3089 if (const_ok_for_arm (INTVAL (x)))
3090 return outer == SET ? 2 : -1;
3091 else if (outer == AND
3092 && const_ok_for_arm (~INTVAL (x)))
3093 return -1;
3094 else if ((outer == COMPARE
3095 || outer == PLUS || outer == MINUS)
3096 && const_ok_for_arm (-INTVAL (x)))
3097 return -1;
3098 else
3099 return 5;
3101 case CONST:
3102 case LABEL_REF:
3103 case SYMBOL_REF:
3104 return 6;
3106 case CONST_DOUBLE:
3107 if (const_double_rtx_ok_for_fpu (x))
3108 return outer == SET ? 2 : -1;
3109 else if ((outer == COMPARE || outer == PLUS)
3110 && neg_const_double_rtx_ok_for_fpu (x))
3111 return -1;
3112 return 7;
3114 default:
3115 return 99;
3119 static int
3120 arm_adjust_cost (insn, link, dep, cost)
3121 rtx insn;
3122 rtx link;
3123 rtx dep;
3124 int cost;
3126 rtx i_pat, d_pat;
3128 /* Some true dependencies can have a higher cost depending
3129 on precisely how certain input operands are used. */
3130 if (arm_is_xscale
3131 && REG_NOTE_KIND (link) == 0
3132 && recog_memoized (insn) < 0
3133 && recog_memoized (dep) < 0)
3135 int shift_opnum = get_attr_shift (insn);
3136 enum attr_type attr_type = get_attr_type (dep);
3138 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3139 operand for INSN. If we have a shifted input operand and the
3140 instruction we depend on is another ALU instruction, then we may
3141 have to account for an additional stall. */
3142 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3144 rtx shifted_operand;
3145 int opno;
3147 /* Get the shifted operand. */
3148 extract_insn (insn);
3149 shifted_operand = recog_data.operand[shift_opnum];
3151 /* Iterate over all the operands in DEP. If we write an operand
3152 that overlaps with SHIFTED_OPERAND, then we have increase the
3153 cost of this dependency. */
3154 extract_insn (dep);
3155 preprocess_constraints ();
3156 for (opno = 0; opno < recog_data.n_operands; opno++)
3158 /* We can ignore strict inputs. */
3159 if (recog_data.operand_type[opno] == OP_IN)
3160 continue;
3162 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3163 shifted_operand))
3164 return 2;
3169 /* XXX This is not strictly true for the FPA. */
3170 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3171 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3172 return 0;
3174 /* Call insns don't incur a stall, even if they follow a load. */
3175 if (REG_NOTE_KIND (link) == 0
3176 && GET_CODE (insn) == CALL_INSN)
3177 return 1;
3179 if ((i_pat = single_set (insn)) != NULL
3180 && GET_CODE (SET_SRC (i_pat)) == MEM
3181 && (d_pat = single_set (dep)) != NULL
3182 && GET_CODE (SET_DEST (d_pat)) == MEM)
3184 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3185 /* This is a load after a store, there is no conflict if the load reads
3186 from a cached area. Assume that loads from the stack, and from the
3187 constant pool are cached, and that others will miss. This is a
3188 hack. */
3190 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3191 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3192 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3193 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3194 return 1;
3197 return cost;
3200 /* This code has been fixed for cross compilation. */
3202 static int fpa_consts_inited = 0;
3204 static const char * const strings_fpa[8] =
3206 "0", "1", "2", "3",
3207 "4", "5", "0.5", "10"
3210 static REAL_VALUE_TYPE values_fpa[8];
3212 static void
3213 init_fpa_table ()
3215 int i;
3216 REAL_VALUE_TYPE r;
3218 for (i = 0; i < 8; i++)
3220 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3221 values_fpa[i] = r;
3224 fpa_consts_inited = 1;
3227 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3230 const_double_rtx_ok_for_fpu (x)
3231 rtx x;
3233 REAL_VALUE_TYPE r;
3234 int i;
3236 if (!fpa_consts_inited)
3237 init_fpa_table ();
3239 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3240 if (REAL_VALUE_MINUS_ZERO (r))
3241 return 0;
3243 for (i = 0; i < 8; i++)
3244 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3245 return 1;
3247 return 0;
3250 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3253 neg_const_double_rtx_ok_for_fpu (x)
3254 rtx x;
3256 REAL_VALUE_TYPE r;
3257 int i;
3259 if (!fpa_consts_inited)
3260 init_fpa_table ();
3262 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3263 r = REAL_VALUE_NEGATE (r);
3264 if (REAL_VALUE_MINUS_ZERO (r))
3265 return 0;
3267 for (i = 0; i < 8; i++)
3268 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3269 return 1;
3271 return 0;
3274 /* Predicates for `match_operand' and `match_operator'. */
3276 /* s_register_operand is the same as register_operand, but it doesn't accept
3277 (SUBREG (MEM)...).
3279 This function exists because at the time it was put in it led to better
3280 code. SUBREG(MEM) always needs a reload in the places where
3281 s_register_operand is used, and this seemed to lead to excessive
3282 reloading. */
3285 s_register_operand (op, mode)
3286 rtx op;
3287 enum machine_mode mode;
3289 if (GET_MODE (op) != mode && mode != VOIDmode)
3290 return 0;
3292 if (GET_CODE (op) == SUBREG)
3293 op = SUBREG_REG (op);
3295 /* We don't consider registers whose class is NO_REGS
3296 to be a register operand. */
3297 /* XXX might have to check for lo regs only for thumb ??? */
3298 return (GET_CODE (op) == REG
3299 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3300 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3303 /* A hard register operand (even before reload. */
3306 arm_hard_register_operand (op, mode)
3307 rtx op;
3308 enum machine_mode mode;
3310 if (GET_MODE (op) != mode && mode != VOIDmode)
3311 return 0;
3313 return (GET_CODE (op) == REG
3314 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3317 /* Only accept reg, subreg(reg), const_int. */
3320 reg_or_int_operand (op, mode)
3321 rtx op;
3322 enum machine_mode mode;
3324 if (GET_CODE (op) == CONST_INT)
3325 return 1;
3327 if (GET_MODE (op) != mode && mode != VOIDmode)
3328 return 0;
3330 if (GET_CODE (op) == SUBREG)
3331 op = SUBREG_REG (op);
3333 /* We don't consider registers whose class is NO_REGS
3334 to be a register operand. */
3335 return (GET_CODE (op) == REG
3336 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3337 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3340 /* Return 1 if OP is an item in memory, given that we are in reload. */
3343 arm_reload_memory_operand (op, mode)
3344 rtx op;
3345 enum machine_mode mode ATTRIBUTE_UNUSED;
3347 int regno = true_regnum (op);
3349 return (!CONSTANT_P (op)
3350 && (regno == -1
3351 || (GET_CODE (op) == REG
3352 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3355 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3356 memory access (architecture V4).
3357 MODE is QImode if called when computing constraints, or VOIDmode when
3358 emitting patterns. In this latter case we cannot use memory_operand()
3359 because it will fail on badly formed MEMs, which is precisly what we are
3360 trying to catch. */
3363 bad_signed_byte_operand (op, mode)
3364 rtx op;
3365 enum machine_mode mode ATTRIBUTE_UNUSED;
3367 #if 0
3368 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3369 return 0;
3370 #endif
3371 if (GET_CODE (op) != MEM)
3372 return 0;
3374 op = XEXP (op, 0);
3376 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3377 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3378 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3379 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3380 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3381 return 1;
3383 /* Big constants are also bad. */
3384 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3385 && (INTVAL (XEXP (op, 1)) > 0xff
3386 || -INTVAL (XEXP (op, 1)) > 0xff))
3387 return 1;
3389 /* Everything else is good, or can will automatically be made so. */
3390 return 0;
3393 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3396 arm_rhs_operand (op, mode)
3397 rtx op;
3398 enum machine_mode mode;
3400 return (s_register_operand (op, mode)
3401 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3404 /* Return TRUE for valid operands for the
3405 rhs of an ARM instruction, or a load. */
3408 arm_rhsm_operand (op, mode)
3409 rtx op;
3410 enum machine_mode mode;
3412 return (s_register_operand (op, mode)
3413 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3414 || memory_operand (op, mode));
3417 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3418 constant that is valid when negated. */
3421 arm_add_operand (op, mode)
3422 rtx op;
3423 enum machine_mode mode;
3425 if (TARGET_THUMB)
3426 return thumb_cmp_operand (op, mode);
3428 return (s_register_operand (op, mode)
3429 || (GET_CODE (op) == CONST_INT
3430 && (const_ok_for_arm (INTVAL (op))
3431 || const_ok_for_arm (-INTVAL (op)))));
3435 arm_not_operand (op, mode)
3436 rtx op;
3437 enum machine_mode mode;
3439 return (s_register_operand (op, mode)
3440 || (GET_CODE (op) == CONST_INT
3441 && (const_ok_for_arm (INTVAL (op))
3442 || const_ok_for_arm (~INTVAL (op)))));
3445 /* Return TRUE if the operand is a memory reference which contains an
3446 offsettable address. */
3449 offsettable_memory_operand (op, mode)
3450 rtx op;
3451 enum machine_mode mode;
3453 if (mode == VOIDmode)
3454 mode = GET_MODE (op);
3456 return (mode == GET_MODE (op)
3457 && GET_CODE (op) == MEM
3458 && offsettable_address_p (reload_completed | reload_in_progress,
3459 mode, XEXP (op, 0)));
3462 /* Return TRUE if the operand is a memory reference which is, or can be
3463 made word aligned by adjusting the offset. */
3466 alignable_memory_operand (op, mode)
3467 rtx op;
3468 enum machine_mode mode;
3470 rtx reg;
3472 if (mode == VOIDmode)
3473 mode = GET_MODE (op);
3475 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3476 return 0;
3478 op = XEXP (op, 0);
3480 return ((GET_CODE (reg = op) == REG
3481 || (GET_CODE (op) == SUBREG
3482 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3483 || (GET_CODE (op) == PLUS
3484 && GET_CODE (XEXP (op, 1)) == CONST_INT
3485 && (GET_CODE (reg = XEXP (op, 0)) == REG
3486 || (GET_CODE (XEXP (op, 0)) == SUBREG
3487 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3488 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3491 /* Similar to s_register_operand, but does not allow hard integer
3492 registers. */
3495 f_register_operand (op, mode)
3496 rtx op;
3497 enum machine_mode mode;
3499 if (GET_MODE (op) != mode && mode != VOIDmode)
3500 return 0;
3502 if (GET_CODE (op) == SUBREG)
3503 op = SUBREG_REG (op);
3505 /* We don't consider registers whose class is NO_REGS
3506 to be a register operand. */
3507 return (GET_CODE (op) == REG
3508 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3509 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3512 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3515 fpu_rhs_operand (op, mode)
3516 rtx op;
3517 enum machine_mode mode;
3519 if (s_register_operand (op, mode))
3520 return TRUE;
3522 if (GET_MODE (op) != mode && mode != VOIDmode)
3523 return FALSE;
3525 if (GET_CODE (op) == CONST_DOUBLE)
3526 return const_double_rtx_ok_for_fpu (op);
3528 return FALSE;
3532 fpu_add_operand (op, mode)
3533 rtx op;
3534 enum machine_mode mode;
3536 if (s_register_operand (op, mode))
3537 return TRUE;
3539 if (GET_MODE (op) != mode && mode != VOIDmode)
3540 return FALSE;
3542 if (GET_CODE (op) == CONST_DOUBLE)
3543 return (const_double_rtx_ok_for_fpu (op)
3544 || neg_const_double_rtx_ok_for_fpu (op));
3546 return FALSE;
3549 /* Return nonzero if OP is a constant power of two. */
3552 power_of_two_operand (op, mode)
3553 rtx op;
3554 enum machine_mode mode ATTRIBUTE_UNUSED;
3556 if (GET_CODE (op) == CONST_INT)
3558 HOST_WIDE_INT value = INTVAL (op);
3560 return value != 0 && (value & (value - 1)) == 0;
3563 return FALSE;
3566 /* Return TRUE for a valid operand of a DImode operation.
3567 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3568 Note that this disallows MEM(REG+REG), but allows
3569 MEM(PRE/POST_INC/DEC(REG)). */
3572 di_operand (op, mode)
3573 rtx op;
3574 enum machine_mode mode;
3576 if (s_register_operand (op, mode))
3577 return TRUE;
3579 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3580 return FALSE;
3582 if (GET_CODE (op) == SUBREG)
3583 op = SUBREG_REG (op);
3585 switch (GET_CODE (op))
3587 case CONST_DOUBLE:
3588 case CONST_INT:
3589 return TRUE;
3591 case MEM:
3592 return memory_address_p (DImode, XEXP (op, 0));
3594 default:
3595 return FALSE;
3599 /* Like di_operand, but don't accept constants. */
3602 nonimmediate_di_operand (op, mode)
3603 rtx op;
3604 enum machine_mode mode;
3606 if (s_register_operand (op, mode))
3607 return TRUE;
3609 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3610 return FALSE;
3612 if (GET_CODE (op) == SUBREG)
3613 op = SUBREG_REG (op);
3615 if (GET_CODE (op) == MEM)
3616 return memory_address_p (DImode, XEXP (op, 0));
3618 return FALSE;
3621 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3622 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3623 Note that this disallows MEM(REG+REG), but allows
3624 MEM(PRE/POST_INC/DEC(REG)). */
3627 soft_df_operand (op, mode)
3628 rtx op;
3629 enum machine_mode mode;
3631 if (s_register_operand (op, mode))
3632 return TRUE;
3634 if (mode != VOIDmode && GET_MODE (op) != mode)
3635 return FALSE;
3637 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3638 return FALSE;
3640 if (GET_CODE (op) == SUBREG)
3641 op = SUBREG_REG (op);
3643 switch (GET_CODE (op))
3645 case CONST_DOUBLE:
3646 return TRUE;
3648 case MEM:
3649 return memory_address_p (DFmode, XEXP (op, 0));
3651 default:
3652 return FALSE;
3656 /* Like soft_df_operand, but don't accept constants. */
3659 nonimmediate_soft_df_operand (op, mode)
3660 rtx op;
3661 enum machine_mode mode;
3663 if (s_register_operand (op, mode))
3664 return TRUE;
3666 if (mode != VOIDmode && GET_MODE (op) != mode)
3667 return FALSE;
3669 if (GET_CODE (op) == SUBREG)
3670 op = SUBREG_REG (op);
3672 if (GET_CODE (op) == MEM)
3673 return memory_address_p (DFmode, XEXP (op, 0));
3674 return FALSE;
3677 /* Return TRUE for valid index operands. */
3680 index_operand (op, mode)
3681 rtx op;
3682 enum machine_mode mode;
3684 return (s_register_operand (op, mode)
3685 || (immediate_operand (op, mode)
3686 && (GET_CODE (op) != CONST_INT
3687 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3690 /* Return TRUE for valid shifts by a constant. This also accepts any
3691 power of two on the (somewhat overly relaxed) assumption that the
3692 shift operator in this case was a mult. */
3695 const_shift_operand (op, mode)
3696 rtx op;
3697 enum machine_mode mode;
3699 return (power_of_two_operand (op, mode)
3700 || (immediate_operand (op, mode)
3701 && (GET_CODE (op) != CONST_INT
3702 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3705 /* Return TRUE for arithmetic operators which can be combined with a multiply
3706 (shift). */
3709 shiftable_operator (x, mode)
3710 rtx x;
3711 enum machine_mode mode;
3713 enum rtx_code code;
3715 if (GET_MODE (x) != mode)
3716 return FALSE;
3718 code = GET_CODE (x);
3720 return (code == PLUS || code == MINUS
3721 || code == IOR || code == XOR || code == AND);
3724 /* Return TRUE for binary logical operators. */
3727 logical_binary_operator (x, mode)
3728 rtx x;
3729 enum machine_mode mode;
3731 enum rtx_code code;
3733 if (GET_MODE (x) != mode)
3734 return FALSE;
3736 code = GET_CODE (x);
3738 return (code == IOR || code == XOR || code == AND);
3741 /* Return TRUE for shift operators. */
3744 shift_operator (x, mode)
3745 rtx x;
3746 enum machine_mode mode;
3748 enum rtx_code code;
3750 if (GET_MODE (x) != mode)
3751 return FALSE;
3753 code = GET_CODE (x);
3755 if (code == MULT)
3756 return power_of_two_operand (XEXP (x, 1), mode);
3758 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3759 || code == ROTATERT);
3762 /* Return TRUE if x is EQ or NE. */
3765 equality_operator (x, mode)
3766 rtx x;
3767 enum machine_mode mode ATTRIBUTE_UNUSED;
3769 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3772 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3775 arm_comparison_operator (x, mode)
3776 rtx x;
3777 enum machine_mode mode;
3779 return (comparison_operator (x, mode)
3780 && GET_CODE (x) != LTGT
3781 && GET_CODE (x) != UNEQ);
3784 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3787 minmax_operator (x, mode)
3788 rtx x;
3789 enum machine_mode mode;
3791 enum rtx_code code = GET_CODE (x);
3793 if (GET_MODE (x) != mode)
3794 return FALSE;
3796 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3799 /* Return TRUE if this is the condition code register, if we aren't given
3800 a mode, accept any class CCmode register. */
3803 cc_register (x, mode)
3804 rtx x;
3805 enum machine_mode mode;
3807 if (mode == VOIDmode)
3809 mode = GET_MODE (x);
3811 if (GET_MODE_CLASS (mode) != MODE_CC)
3812 return FALSE;
3815 if ( GET_MODE (x) == mode
3816 && GET_CODE (x) == REG
3817 && REGNO (x) == CC_REGNUM)
3818 return TRUE;
3820 return FALSE;
3823 /* Return TRUE if this is the condition code register, if we aren't given
3824 a mode, accept any class CCmode register which indicates a dominance
3825 expression. */
3828 dominant_cc_register (x, mode)
3829 rtx x;
3830 enum machine_mode mode;
3832 if (mode == VOIDmode)
3834 mode = GET_MODE (x);
3836 if (GET_MODE_CLASS (mode) != MODE_CC)
3837 return FALSE;
3840 if ( mode != CC_DNEmode && mode != CC_DEQmode
3841 && mode != CC_DLEmode && mode != CC_DLTmode
3842 && mode != CC_DGEmode && mode != CC_DGTmode
3843 && mode != CC_DLEUmode && mode != CC_DLTUmode
3844 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3845 return FALSE;
3847 return cc_register (x, mode);
3850 /* Return TRUE if X references a SYMBOL_REF. */
3853 symbol_mentioned_p (x)
3854 rtx x;
3856 const char * fmt;
3857 int i;
3859 if (GET_CODE (x) == SYMBOL_REF)
3860 return 1;
3862 fmt = GET_RTX_FORMAT (GET_CODE (x));
3864 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3866 if (fmt[i] == 'E')
3868 int j;
3870 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3871 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3872 return 1;
3874 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3875 return 1;
3878 return 0;
3881 /* Return TRUE if X references a LABEL_REF. */
3884 label_mentioned_p (x)
3885 rtx x;
3887 const char * fmt;
3888 int i;
3890 if (GET_CODE (x) == LABEL_REF)
3891 return 1;
3893 fmt = GET_RTX_FORMAT (GET_CODE (x));
3894 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3896 if (fmt[i] == 'E')
3898 int j;
3900 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3901 if (label_mentioned_p (XVECEXP (x, i, j)))
3902 return 1;
3904 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3905 return 1;
3908 return 0;
3911 enum rtx_code
3912 minmax_code (x)
3913 rtx x;
3915 enum rtx_code code = GET_CODE (x);
3917 if (code == SMAX)
3918 return GE;
3919 else if (code == SMIN)
3920 return LE;
3921 else if (code == UMIN)
3922 return LEU;
3923 else if (code == UMAX)
3924 return GEU;
3926 abort ();
3929 /* Return 1 if memory locations are adjacent. */
3932 adjacent_mem_locations (a, b)
3933 rtx a, b;
3935 if ((GET_CODE (XEXP (a, 0)) == REG
3936 || (GET_CODE (XEXP (a, 0)) == PLUS
3937 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3938 && (GET_CODE (XEXP (b, 0)) == REG
3939 || (GET_CODE (XEXP (b, 0)) == PLUS
3940 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3942 int val0 = 0, val1 = 0;
3943 int reg0, reg1;
3945 if (GET_CODE (XEXP (a, 0)) == PLUS)
3947 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3948 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3950 else
3951 reg0 = REGNO (XEXP (a, 0));
3953 if (GET_CODE (XEXP (b, 0)) == PLUS)
3955 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3956 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3958 else
3959 reg1 = REGNO (XEXP (b, 0));
3961 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3963 return 0;
3966 /* Return 1 if OP is a load multiple operation. It is known to be
3967 parallel and the first section will be tested. */
3970 load_multiple_operation (op, mode)
3971 rtx op;
3972 enum machine_mode mode ATTRIBUTE_UNUSED;
3974 HOST_WIDE_INT count = XVECLEN (op, 0);
3975 int dest_regno;
3976 rtx src_addr;
3977 HOST_WIDE_INT i = 1, base = 0;
3978 rtx elt;
3980 if (count <= 1
3981 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3982 return 0;
3984 /* Check to see if this might be a write-back. */
3985 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3987 i++;
3988 base = 1;
3990 /* Now check it more carefully. */
3991 if (GET_CODE (SET_DEST (elt)) != REG
3992 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3993 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3994 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3995 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3996 return 0;
3999 /* Perform a quick check so we don't blow up below. */
4000 if (count <= i
4001 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4002 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4003 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4004 return 0;
4006 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4007 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4009 for (; i < count; i++)
4011 elt = XVECEXP (op, 0, i);
4013 if (GET_CODE (elt) != SET
4014 || GET_CODE (SET_DEST (elt)) != REG
4015 || GET_MODE (SET_DEST (elt)) != SImode
4016 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4017 || GET_CODE (SET_SRC (elt)) != MEM
4018 || GET_MODE (SET_SRC (elt)) != SImode
4019 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4020 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4021 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4022 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4023 return 0;
4026 return 1;
4029 /* Return 1 if OP is a store multiple operation. It is known to be
4030 parallel and the first section will be tested. */
4033 store_multiple_operation (op, mode)
4034 rtx op;
4035 enum machine_mode mode ATTRIBUTE_UNUSED;
4037 HOST_WIDE_INT count = XVECLEN (op, 0);
4038 int src_regno;
4039 rtx dest_addr;
4040 HOST_WIDE_INT i = 1, base = 0;
4041 rtx elt;
4043 if (count <= 1
4044 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4045 return 0;
4047 /* Check to see if this might be a write-back. */
4048 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4050 i++;
4051 base = 1;
4053 /* Now check it more carefully. */
4054 if (GET_CODE (SET_DEST (elt)) != REG
4055 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4056 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4057 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4058 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4059 return 0;
4062 /* Perform a quick check so we don't blow up below. */
4063 if (count <= i
4064 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4065 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4066 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4067 return 0;
4069 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4070 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4072 for (; i < count; i++)
4074 elt = XVECEXP (op, 0, i);
4076 if (GET_CODE (elt) != SET
4077 || GET_CODE (SET_SRC (elt)) != REG
4078 || GET_MODE (SET_SRC (elt)) != SImode
4079 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4080 || GET_CODE (SET_DEST (elt)) != MEM
4081 || GET_MODE (SET_DEST (elt)) != SImode
4082 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4083 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4084 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4085 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4086 return 0;
4089 return 1;
4093 load_multiple_sequence (operands, nops, regs, base, load_offset)
4094 rtx * operands;
4095 int nops;
4096 int * regs;
4097 int * base;
4098 HOST_WIDE_INT * load_offset;
4100 int unsorted_regs[4];
4101 HOST_WIDE_INT unsorted_offsets[4];
4102 int order[4];
4103 int base_reg = -1;
4104 int i;
4106 /* Can only handle 2, 3, or 4 insns at present,
4107 though could be easily extended if required. */
4108 if (nops < 2 || nops > 4)
4109 abort ();
4111 /* Loop over the operands and check that the memory references are
4112 suitable (ie immediate offsets from the same base register). At
4113 the same time, extract the target register, and the memory
4114 offsets. */
4115 for (i = 0; i < nops; i++)
4117 rtx reg;
4118 rtx offset;
4120 /* Convert a subreg of a mem into the mem itself. */
4121 if (GET_CODE (operands[nops + i]) == SUBREG)
4122 operands[nops + i] = alter_subreg (operands + (nops + i));
4124 if (GET_CODE (operands[nops + i]) != MEM)
4125 abort ();
4127 /* Don't reorder volatile memory references; it doesn't seem worth
4128 looking for the case where the order is ok anyway. */
4129 if (MEM_VOLATILE_P (operands[nops + i]))
4130 return 0;
4132 offset = const0_rtx;
4134 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4135 || (GET_CODE (reg) == SUBREG
4136 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4137 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4138 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4139 == REG)
4140 || (GET_CODE (reg) == SUBREG
4141 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4142 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4143 == CONST_INT)))
4145 if (i == 0)
4147 base_reg = REGNO (reg);
4148 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4149 ? REGNO (operands[i])
4150 : REGNO (SUBREG_REG (operands[i])));
4151 order[0] = 0;
4153 else
4155 if (base_reg != (int) REGNO (reg))
4156 /* Not addressed from the same base register. */
4157 return 0;
4159 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4160 ? REGNO (operands[i])
4161 : REGNO (SUBREG_REG (operands[i])));
4162 if (unsorted_regs[i] < unsorted_regs[order[0]])
4163 order[0] = i;
4166 /* If it isn't an integer register, or if it overwrites the
4167 base register but isn't the last insn in the list, then
4168 we can't do this. */
4169 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4170 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4171 return 0;
4173 unsorted_offsets[i] = INTVAL (offset);
4175 else
4176 /* Not a suitable memory address. */
4177 return 0;
4180 /* All the useful information has now been extracted from the
4181 operands into unsorted_regs and unsorted_offsets; additionally,
4182 order[0] has been set to the lowest numbered register in the
4183 list. Sort the registers into order, and check that the memory
4184 offsets are ascending and adjacent. */
4186 for (i = 1; i < nops; i++)
4188 int j;
4190 order[i] = order[i - 1];
4191 for (j = 0; j < nops; j++)
4192 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4193 && (order[i] == order[i - 1]
4194 || unsorted_regs[j] < unsorted_regs[order[i]]))
4195 order[i] = j;
4197 /* Have we found a suitable register? if not, one must be used more
4198 than once. */
4199 if (order[i] == order[i - 1])
4200 return 0;
4202 /* Is the memory address adjacent and ascending? */
4203 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4204 return 0;
4207 if (base)
4209 *base = base_reg;
4211 for (i = 0; i < nops; i++)
4212 regs[i] = unsorted_regs[order[i]];
4214 *load_offset = unsorted_offsets[order[0]];
4217 if (unsorted_offsets[order[0]] == 0)
4218 return 1; /* ldmia */
4220 if (unsorted_offsets[order[0]] == 4)
4221 return 2; /* ldmib */
4223 if (unsorted_offsets[order[nops - 1]] == 0)
4224 return 3; /* ldmda */
4226 if (unsorted_offsets[order[nops - 1]] == -4)
4227 return 4; /* ldmdb */
4229 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4230 if the offset isn't small enough. The reason 2 ldrs are faster
4231 is because these ARMs are able to do more than one cache access
4232 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4233 whilst the ARM8 has a double bandwidth cache. This means that
4234 these cores can do both an instruction fetch and a data fetch in
4235 a single cycle, so the trick of calculating the address into a
4236 scratch register (one of the result regs) and then doing a load
4237 multiple actually becomes slower (and no smaller in code size).
4238 That is the transformation
4240 ldr rd1, [rbase + offset]
4241 ldr rd2, [rbase + offset + 4]
4245 add rd1, rbase, offset
4246 ldmia rd1, {rd1, rd2}
4248 produces worse code -- '3 cycles + any stalls on rd2' instead of
4249 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4250 access per cycle, the first sequence could never complete in less
4251 than 6 cycles, whereas the ldm sequence would only take 5 and
4252 would make better use of sequential accesses if not hitting the
4253 cache.
4255 We cheat here and test 'arm_ld_sched' which we currently know to
4256 only be true for the ARM8, ARM9 and StrongARM. If this ever
4257 changes, then the test below needs to be reworked. */
4258 if (nops == 2 && arm_ld_sched)
4259 return 0;
4261 /* Can't do it without setting up the offset, only do this if it takes
4262 no more than one insn. */
4263 return (const_ok_for_arm (unsorted_offsets[order[0]])
4264 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4267 const char *
4268 emit_ldm_seq (operands, nops)
4269 rtx * operands;
4270 int nops;
4272 int regs[4];
4273 int base_reg;
4274 HOST_WIDE_INT offset;
4275 char buf[100];
4276 int i;
4278 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4280 case 1:
4281 strcpy (buf, "ldm%?ia\t");
4282 break;
4284 case 2:
4285 strcpy (buf, "ldm%?ib\t");
4286 break;
4288 case 3:
4289 strcpy (buf, "ldm%?da\t");
4290 break;
4292 case 4:
4293 strcpy (buf, "ldm%?db\t");
4294 break;
4296 case 5:
4297 if (offset >= 0)
4298 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4299 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4300 (long) offset);
4301 else
4302 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4303 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4304 (long) -offset);
4305 output_asm_insn (buf, operands);
4306 base_reg = regs[0];
4307 strcpy (buf, "ldm%?ia\t");
4308 break;
4310 default:
4311 abort ();
4314 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4315 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4317 for (i = 1; i < nops; i++)
4318 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4319 reg_names[regs[i]]);
4321 strcat (buf, "}\t%@ phole ldm");
4323 output_asm_insn (buf, operands);
4324 return "";
4328 store_multiple_sequence (operands, nops, regs, base, load_offset)
4329 rtx * operands;
4330 int nops;
4331 int * regs;
4332 int * base;
4333 HOST_WIDE_INT * load_offset;
4335 int unsorted_regs[4];
4336 HOST_WIDE_INT unsorted_offsets[4];
4337 int order[4];
4338 int base_reg = -1;
4339 int i;
4341 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4342 extended if required. */
4343 if (nops < 2 || nops > 4)
4344 abort ();
4346 /* Loop over the operands and check that the memory references are
4347 suitable (ie immediate offsets from the same base register). At
4348 the same time, extract the target register, and the memory
4349 offsets. */
4350 for (i = 0; i < nops; i++)
4352 rtx reg;
4353 rtx offset;
4355 /* Convert a subreg of a mem into the mem itself. */
4356 if (GET_CODE (operands[nops + i]) == SUBREG)
4357 operands[nops + i] = alter_subreg (operands + (nops + i));
4359 if (GET_CODE (operands[nops + i]) != MEM)
4360 abort ();
4362 /* Don't reorder volatile memory references; it doesn't seem worth
4363 looking for the case where the order is ok anyway. */
4364 if (MEM_VOLATILE_P (operands[nops + i]))
4365 return 0;
4367 offset = const0_rtx;
4369 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4370 || (GET_CODE (reg) == SUBREG
4371 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4372 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4373 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4374 == REG)
4375 || (GET_CODE (reg) == SUBREG
4376 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4377 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4378 == CONST_INT)))
4380 if (i == 0)
4382 base_reg = REGNO (reg);
4383 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4384 ? REGNO (operands[i])
4385 : REGNO (SUBREG_REG (operands[i])));
4386 order[0] = 0;
4388 else
4390 if (base_reg != (int) REGNO (reg))
4391 /* Not addressed from the same base register. */
4392 return 0;
4394 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4395 ? REGNO (operands[i])
4396 : REGNO (SUBREG_REG (operands[i])));
4397 if (unsorted_regs[i] < unsorted_regs[order[0]])
4398 order[0] = i;
4401 /* If it isn't an integer register, then we can't do this. */
4402 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4403 return 0;
4405 unsorted_offsets[i] = INTVAL (offset);
4407 else
4408 /* Not a suitable memory address. */
4409 return 0;
4412 /* All the useful information has now been extracted from the
4413 operands into unsorted_regs and unsorted_offsets; additionally,
4414 order[0] has been set to the lowest numbered register in the
4415 list. Sort the registers into order, and check that the memory
4416 offsets are ascending and adjacent. */
4418 for (i = 1; i < nops; i++)
4420 int j;
4422 order[i] = order[i - 1];
4423 for (j = 0; j < nops; j++)
4424 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4425 && (order[i] == order[i - 1]
4426 || unsorted_regs[j] < unsorted_regs[order[i]]))
4427 order[i] = j;
4429 /* Have we found a suitable register? if not, one must be used more
4430 than once. */
4431 if (order[i] == order[i - 1])
4432 return 0;
4434 /* Is the memory address adjacent and ascending? */
4435 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4436 return 0;
4439 if (base)
4441 *base = base_reg;
4443 for (i = 0; i < nops; i++)
4444 regs[i] = unsorted_regs[order[i]];
4446 *load_offset = unsorted_offsets[order[0]];
4449 if (unsorted_offsets[order[0]] == 0)
4450 return 1; /* stmia */
4452 if (unsorted_offsets[order[0]] == 4)
4453 return 2; /* stmib */
4455 if (unsorted_offsets[order[nops - 1]] == 0)
4456 return 3; /* stmda */
4458 if (unsorted_offsets[order[nops - 1]] == -4)
4459 return 4; /* stmdb */
4461 return 0;
4464 const char *
4465 emit_stm_seq (operands, nops)
4466 rtx * operands;
4467 int nops;
4469 int regs[4];
4470 int base_reg;
4471 HOST_WIDE_INT offset;
4472 char buf[100];
4473 int i;
4475 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4477 case 1:
4478 strcpy (buf, "stm%?ia\t");
4479 break;
4481 case 2:
4482 strcpy (buf, "stm%?ib\t");
4483 break;
4485 case 3:
4486 strcpy (buf, "stm%?da\t");
4487 break;
4489 case 4:
4490 strcpy (buf, "stm%?db\t");
4491 break;
4493 default:
4494 abort ();
4497 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4498 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4500 for (i = 1; i < nops; i++)
4501 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4502 reg_names[regs[i]]);
4504 strcat (buf, "}\t%@ phole stm");
4506 output_asm_insn (buf, operands);
4507 return "";
4511 multi_register_push (op, mode)
4512 rtx op;
4513 enum machine_mode mode ATTRIBUTE_UNUSED;
4515 if (GET_CODE (op) != PARALLEL
4516 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4517 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4518 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4519 return 0;
4521 return 1;
4524 /* Routines for use in generating RTL. */
4527 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4528 in_struct_p, scalar_p)
4529 int base_regno;
4530 int count;
4531 rtx from;
4532 int up;
4533 int write_back;
4534 int unchanging_p;
4535 int in_struct_p;
4536 int scalar_p;
4538 int i = 0, j;
4539 rtx result;
4540 int sign = up ? 1 : -1;
4541 rtx mem;
4543 /* XScale has load-store double instructions, but they have stricter
4544 alignment requirements than load-store multiple, so we can not
4545 use them.
4547 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4548 the pipeline until completion.
4550 NREGS CYCLES
4556 An ldr instruction takes 1-3 cycles, but does not block the
4557 pipeline.
4559 NREGS CYCLES
4560 1 1-3
4561 2 2-6
4562 3 3-9
4563 4 4-12
4565 Best case ldr will always win. However, the more ldr instructions
4566 we issue, the less likely we are to be able to schedule them well.
4567 Using ldr instructions also increases code size.
4569 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4570 for counts of 3 or 4 regs. */
4571 if (arm_is_xscale && count <= 2 && ! optimize_size)
4573 rtx seq;
4575 start_sequence ();
4577 for (i = 0; i < count; i++)
4579 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4580 RTX_UNCHANGING_P (mem) = unchanging_p;
4581 MEM_IN_STRUCT_P (mem) = in_struct_p;
4582 MEM_SCALAR_P (mem) = scalar_p;
4583 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4586 if (write_back)
4587 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4589 seq = get_insns ();
4590 end_sequence ();
4592 return seq;
4595 result = gen_rtx_PARALLEL (VOIDmode,
4596 rtvec_alloc (count + (write_back ? 1 : 0)));
4597 if (write_back)
4599 XVECEXP (result, 0, 0)
4600 = gen_rtx_SET (GET_MODE (from), from,
4601 plus_constant (from, count * 4 * sign));
4602 i = 1;
4603 count++;
4606 for (j = 0; i < count; i++, j++)
4608 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4609 RTX_UNCHANGING_P (mem) = unchanging_p;
4610 MEM_IN_STRUCT_P (mem) = in_struct_p;
4611 MEM_SCALAR_P (mem) = scalar_p;
4612 XVECEXP (result, 0, i)
4613 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4616 return result;
4620 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4621 in_struct_p, scalar_p)
4622 int base_regno;
4623 int count;
4624 rtx to;
4625 int up;
4626 int write_back;
4627 int unchanging_p;
4628 int in_struct_p;
4629 int scalar_p;
4631 int i = 0, j;
4632 rtx result;
4633 int sign = up ? 1 : -1;
4634 rtx mem;
4636 /* See arm_gen_load_multiple for discussion of
4637 the pros/cons of ldm/stm usage for XScale. */
4638 if (arm_is_xscale && count <= 2 && ! optimize_size)
4640 rtx seq;
4642 start_sequence ();
4644 for (i = 0; i < count; i++)
4646 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4647 RTX_UNCHANGING_P (mem) = unchanging_p;
4648 MEM_IN_STRUCT_P (mem) = in_struct_p;
4649 MEM_SCALAR_P (mem) = scalar_p;
4650 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4653 if (write_back)
4654 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4656 seq = get_insns ();
4657 end_sequence ();
4659 return seq;
4662 result = gen_rtx_PARALLEL (VOIDmode,
4663 rtvec_alloc (count + (write_back ? 1 : 0)));
4664 if (write_back)
4666 XVECEXP (result, 0, 0)
4667 = gen_rtx_SET (GET_MODE (to), to,
4668 plus_constant (to, count * 4 * sign));
4669 i = 1;
4670 count++;
4673 for (j = 0; i < count; i++, j++)
4675 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4676 RTX_UNCHANGING_P (mem) = unchanging_p;
4677 MEM_IN_STRUCT_P (mem) = in_struct_p;
4678 MEM_SCALAR_P (mem) = scalar_p;
4680 XVECEXP (result, 0, i)
4681 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4684 return result;
4688 arm_gen_movstrqi (operands)
4689 rtx * operands;
4691 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4692 int i;
4693 rtx src, dst;
4694 rtx st_src, st_dst, fin_src, fin_dst;
4695 rtx part_bytes_reg = NULL;
4696 rtx mem;
4697 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4698 int dst_scalar_p, src_scalar_p;
4700 if (GET_CODE (operands[2]) != CONST_INT
4701 || GET_CODE (operands[3]) != CONST_INT
4702 || INTVAL (operands[2]) > 64
4703 || INTVAL (operands[3]) & 3)
4704 return 0;
4706 st_dst = XEXP (operands[0], 0);
4707 st_src = XEXP (operands[1], 0);
4709 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4710 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4711 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4712 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4713 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4714 src_scalar_p = MEM_SCALAR_P (operands[1]);
4716 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4717 fin_src = src = copy_to_mode_reg (SImode, st_src);
4719 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
4720 out_words_to_go = INTVAL (operands[2]) / 4;
4721 last_bytes = INTVAL (operands[2]) & 3;
4723 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4724 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4726 for (i = 0; in_words_to_go >= 2; i+=4)
4728 if (in_words_to_go > 4)
4729 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4730 src_unchanging_p,
4731 src_in_struct_p,
4732 src_scalar_p));
4733 else
4734 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4735 FALSE, src_unchanging_p,
4736 src_in_struct_p, src_scalar_p));
4738 if (out_words_to_go)
4740 if (out_words_to_go > 4)
4741 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4742 dst_unchanging_p,
4743 dst_in_struct_p,
4744 dst_scalar_p));
4745 else if (out_words_to_go != 1)
4746 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4747 dst, TRUE,
4748 (last_bytes == 0
4749 ? FALSE : TRUE),
4750 dst_unchanging_p,
4751 dst_in_struct_p,
4752 dst_scalar_p));
4753 else
4755 mem = gen_rtx_MEM (SImode, dst);
4756 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4757 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4758 MEM_SCALAR_P (mem) = dst_scalar_p;
4759 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4760 if (last_bytes != 0)
4761 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4765 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4766 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4769 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4770 if (out_words_to_go)
4772 rtx sreg;
4774 mem = gen_rtx_MEM (SImode, src);
4775 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4776 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4777 MEM_SCALAR_P (mem) = src_scalar_p;
4778 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4779 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4781 mem = gen_rtx_MEM (SImode, dst);
4782 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4783 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4784 MEM_SCALAR_P (mem) = dst_scalar_p;
4785 emit_move_insn (mem, sreg);
4786 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4787 in_words_to_go--;
4789 if (in_words_to_go) /* Sanity check */
4790 abort ();
4793 if (in_words_to_go)
4795 if (in_words_to_go < 0)
4796 abort ();
4798 mem = gen_rtx_MEM (SImode, src);
4799 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4800 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4801 MEM_SCALAR_P (mem) = src_scalar_p;
4802 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4805 if (last_bytes && part_bytes_reg == NULL)
4806 abort ();
4808 if (BYTES_BIG_ENDIAN && last_bytes)
4810 rtx tmp = gen_reg_rtx (SImode);
4812 /* The bytes we want are in the top end of the word. */
4813 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4814 GEN_INT (8 * (4 - last_bytes))));
4815 part_bytes_reg = tmp;
4817 while (last_bytes)
4819 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4820 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4821 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4822 MEM_SCALAR_P (mem) = dst_scalar_p;
4823 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4825 if (--last_bytes)
4827 tmp = gen_reg_rtx (SImode);
4828 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4829 part_bytes_reg = tmp;
4834 else
4836 if (last_bytes > 1)
4838 mem = gen_rtx_MEM (HImode, dst);
4839 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4840 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4841 MEM_SCALAR_P (mem) = dst_scalar_p;
4842 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
4843 last_bytes -= 2;
4844 if (last_bytes)
4846 rtx tmp = gen_reg_rtx (SImode);
4848 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4849 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4850 part_bytes_reg = tmp;
4854 if (last_bytes)
4856 mem = gen_rtx_MEM (QImode, dst);
4857 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4858 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4859 MEM_SCALAR_P (mem) = dst_scalar_p;
4860 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4864 return 1;
4867 /* Generate a memory reference for a half word, such that it will be loaded
4868 into the top 16 bits of the word. We can assume that the address is
4869 known to be alignable and of the form reg, or plus (reg, const). */
4872 arm_gen_rotated_half_load (memref)
4873 rtx memref;
4875 HOST_WIDE_INT offset = 0;
4876 rtx base = XEXP (memref, 0);
4878 if (GET_CODE (base) == PLUS)
4880 offset = INTVAL (XEXP (base, 1));
4881 base = XEXP (base, 0);
4884 /* If we aren't allowed to generate unaligned addresses, then fail. */
4885 if (TARGET_MMU_TRAPS
4886 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4887 return NULL;
4889 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4891 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4892 return base;
4894 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4897 /* Select a dominance comparison mode if possible. We support three forms.
4898 COND_OR == 0 => (X && Y)
4899 COND_OR == 1 => ((! X( || Y)
4900 COND_OR == 2 => (X || Y)
4901 If we are unable to support a dominance comparsison we return CC mode.
4902 This will then fail to match for the RTL expressions that generate this
4903 call. */
4905 static enum machine_mode
4906 select_dominance_cc_mode (x, y, cond_or)
4907 rtx x;
4908 rtx y;
4909 HOST_WIDE_INT cond_or;
4911 enum rtx_code cond1, cond2;
4912 int swapped = 0;
4914 /* Currently we will probably get the wrong result if the individual
4915 comparisons are not simple. This also ensures that it is safe to
4916 reverse a comparison if necessary. */
4917 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4918 != CCmode)
4919 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4920 != CCmode))
4921 return CCmode;
4923 /* The if_then_else variant of this tests the second condition if the
4924 first passes, but is true if the first fails. Reverse the first
4925 condition to get a true "inclusive-or" expression. */
4926 if (cond_or == 1)
4927 cond1 = reverse_condition (cond1);
4929 /* If the comparisons are not equal, and one doesn't dominate the other,
4930 then we can't do this. */
4931 if (cond1 != cond2
4932 && !comparison_dominates_p (cond1, cond2)
4933 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4934 return CCmode;
4936 if (swapped)
4938 enum rtx_code temp = cond1;
4939 cond1 = cond2;
4940 cond2 = temp;
4943 switch (cond1)
4945 case EQ:
4946 if (cond2 == EQ || !cond_or)
4947 return CC_DEQmode;
4949 switch (cond2)
4951 case LE: return CC_DLEmode;
4952 case LEU: return CC_DLEUmode;
4953 case GE: return CC_DGEmode;
4954 case GEU: return CC_DGEUmode;
4955 default: break;
4958 break;
4960 case LT:
4961 if (cond2 == LT || !cond_or)
4962 return CC_DLTmode;
4963 if (cond2 == LE)
4964 return CC_DLEmode;
4965 if (cond2 == NE)
4966 return CC_DNEmode;
4967 break;
4969 case GT:
4970 if (cond2 == GT || !cond_or)
4971 return CC_DGTmode;
4972 if (cond2 == GE)
4973 return CC_DGEmode;
4974 if (cond2 == NE)
4975 return CC_DNEmode;
4976 break;
4978 case LTU:
4979 if (cond2 == LTU || !cond_or)
4980 return CC_DLTUmode;
4981 if (cond2 == LEU)
4982 return CC_DLEUmode;
4983 if (cond2 == NE)
4984 return CC_DNEmode;
4985 break;
4987 case GTU:
4988 if (cond2 == GTU || !cond_or)
4989 return CC_DGTUmode;
4990 if (cond2 == GEU)
4991 return CC_DGEUmode;
4992 if (cond2 == NE)
4993 return CC_DNEmode;
4994 break;
4996 /* The remaining cases only occur when both comparisons are the
4997 same. */
4998 case NE:
4999 return CC_DNEmode;
5001 case LE:
5002 return CC_DLEmode;
5004 case GE:
5005 return CC_DGEmode;
5007 case LEU:
5008 return CC_DLEUmode;
5010 case GEU:
5011 return CC_DGEUmode;
5013 default:
5014 break;
5017 abort ();
5020 enum machine_mode
5021 arm_select_cc_mode (op, x, y)
5022 enum rtx_code op;
5023 rtx x;
5024 rtx y;
5026 /* All floating point compares return CCFP if it is an equality
5027 comparison, and CCFPE otherwise. */
5028 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5030 switch (op)
5032 case EQ:
5033 case NE:
5034 case UNORDERED:
5035 case ORDERED:
5036 case UNLT:
5037 case UNLE:
5038 case UNGT:
5039 case UNGE:
5040 case UNEQ:
5041 case LTGT:
5042 return CCFPmode;
5044 case LT:
5045 case LE:
5046 case GT:
5047 case GE:
5048 return CCFPEmode;
5050 default:
5051 abort ();
5055 /* A compare with a shifted operand. Because of canonicalization, the
5056 comparison will have to be swapped when we emit the assembler. */
5057 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5058 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5059 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5060 || GET_CODE (x) == ROTATERT))
5061 return CC_SWPmode;
5063 /* This is a special case that is used by combine to allow a
5064 comparison of a shifted byte load to be split into a zero-extend
5065 followed by a comparison of the shifted integer (only valid for
5066 equalities and unsigned inequalities). */
5067 if (GET_MODE (x) == SImode
5068 && GET_CODE (x) == ASHIFT
5069 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5070 && GET_CODE (XEXP (x, 0)) == SUBREG
5071 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5072 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5073 && (op == EQ || op == NE
5074 || op == GEU || op == GTU || op == LTU || op == LEU)
5075 && GET_CODE (y) == CONST_INT)
5076 return CC_Zmode;
5078 /* A construct for a conditional compare, if the false arm contains
5079 0, then both conditions must be true, otherwise either condition
5080 must be true. Not all conditions are possible, so CCmode is
5081 returned if it can't be done. */
5082 if (GET_CODE (x) == IF_THEN_ELSE
5083 && (XEXP (x, 2) == const0_rtx
5084 || XEXP (x, 2) == const1_rtx)
5085 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5086 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5087 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5088 INTVAL (XEXP (x, 2)));
5090 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5091 if (GET_CODE (x) == AND
5092 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5093 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5094 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5096 if (GET_CODE (x) == IOR
5097 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5098 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5099 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5101 /* An operation that sets the condition codes as a side-effect, the
5102 V flag is not set correctly, so we can only use comparisons where
5103 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5104 instead. */
5105 if (GET_MODE (x) == SImode
5106 && y == const0_rtx
5107 && (op == EQ || op == NE || op == LT || op == GE)
5108 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5109 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5110 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5111 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5112 || GET_CODE (x) == LSHIFTRT
5113 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5114 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5115 return CC_NOOVmode;
5117 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5118 return CC_Zmode;
5120 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5121 && GET_CODE (x) == PLUS
5122 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5123 return CC_Cmode;
5125 return CCmode;
5128 /* X and Y are two things to compare using CODE. Emit the compare insn and
5129 return the rtx for register 0 in the proper mode. FP means this is a
5130 floating point compare: I don't think that it is needed on the arm. */
5133 arm_gen_compare_reg (code, x, y)
5134 enum rtx_code code;
5135 rtx x, y;
5137 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5138 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5140 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5141 gen_rtx_COMPARE (mode, x, y)));
5143 return cc_reg;
5146 /* Generate a sequence of insns that will generate the correct return
5147 address mask depending on the physical architecture that the program
5148 is running on. */
5151 arm_gen_return_addr_mask ()
5153 rtx reg = gen_reg_rtx (Pmode);
5155 emit_insn (gen_return_addr_mask (reg));
5156 return reg;
5159 void
5160 arm_reload_in_hi (operands)
5161 rtx * operands;
5163 rtx ref = operands[1];
5164 rtx base, scratch;
5165 HOST_WIDE_INT offset = 0;
5167 if (GET_CODE (ref) == SUBREG)
5169 offset = SUBREG_BYTE (ref);
5170 ref = SUBREG_REG (ref);
5173 if (GET_CODE (ref) == REG)
5175 /* We have a pseudo which has been spilt onto the stack; there
5176 are two cases here: the first where there is a simple
5177 stack-slot replacement and a second where the stack-slot is
5178 out of range, or is used as a subreg. */
5179 if (reg_equiv_mem[REGNO (ref)])
5181 ref = reg_equiv_mem[REGNO (ref)];
5182 base = find_replacement (&XEXP (ref, 0));
5184 else
5185 /* The slot is out of range, or was dressed up in a SUBREG. */
5186 base = reg_equiv_address[REGNO (ref)];
5188 else
5189 base = find_replacement (&XEXP (ref, 0));
5191 /* Handle the case where the address is too complex to be offset by 1. */
5192 if (GET_CODE (base) == MINUS
5193 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5195 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5197 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5198 base = base_plus;
5200 else if (GET_CODE (base) == PLUS)
5202 /* The addend must be CONST_INT, or we would have dealt with it above. */
5203 HOST_WIDE_INT hi, lo;
5205 offset += INTVAL (XEXP (base, 1));
5206 base = XEXP (base, 0);
5208 /* Rework the address into a legal sequence of insns. */
5209 /* Valid range for lo is -4095 -> 4095 */
5210 lo = (offset >= 0
5211 ? (offset & 0xfff)
5212 : -((-offset) & 0xfff));
5214 /* Corner case, if lo is the max offset then we would be out of range
5215 once we have added the additional 1 below, so bump the msb into the
5216 pre-loading insn(s). */
5217 if (lo == 4095)
5218 lo &= 0x7ff;
5220 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5221 ^ (HOST_WIDE_INT) 0x80000000)
5222 - (HOST_WIDE_INT) 0x80000000);
5224 if (hi + lo != offset)
5225 abort ();
5227 if (hi != 0)
5229 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5231 /* Get the base address; addsi3 knows how to handle constants
5232 that require more than one insn. */
5233 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5234 base = base_plus;
5235 offset = lo;
5239 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5240 emit_insn (gen_zero_extendqisi2 (scratch,
5241 gen_rtx_MEM (QImode,
5242 plus_constant (base,
5243 offset))));
5244 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5245 gen_rtx_MEM (QImode,
5246 plus_constant (base,
5247 offset + 1))));
5248 if (!BYTES_BIG_ENDIAN)
5249 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5250 gen_rtx_IOR (SImode,
5251 gen_rtx_ASHIFT
5252 (SImode,
5253 gen_rtx_SUBREG (SImode, operands[0], 0),
5254 GEN_INT (8)),
5255 scratch)));
5256 else
5257 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5258 gen_rtx_IOR (SImode,
5259 gen_rtx_ASHIFT (SImode, scratch,
5260 GEN_INT (8)),
5261 gen_rtx_SUBREG (SImode, operands[0],
5262 0))));
5265 /* Handle storing a half-word to memory during reload by synthesising as two
5266 byte stores. Take care not to clobber the input values until after we
5267 have moved them somewhere safe. This code assumes that if the DImode
5268 scratch in operands[2] overlaps either the input value or output address
5269 in some way, then that value must die in this insn (we absolutely need
5270 two scratch registers for some corner cases). */
5272 void
5273 arm_reload_out_hi (operands)
5274 rtx * operands;
5276 rtx ref = operands[0];
5277 rtx outval = operands[1];
5278 rtx base, scratch;
5279 HOST_WIDE_INT offset = 0;
5281 if (GET_CODE (ref) == SUBREG)
5283 offset = SUBREG_BYTE (ref);
5284 ref = SUBREG_REG (ref);
5287 if (GET_CODE (ref) == REG)
5289 /* We have a pseudo which has been spilt onto the stack; there
5290 are two cases here: the first where there is a simple
5291 stack-slot replacement and a second where the stack-slot is
5292 out of range, or is used as a subreg. */
5293 if (reg_equiv_mem[REGNO (ref)])
5295 ref = reg_equiv_mem[REGNO (ref)];
5296 base = find_replacement (&XEXP (ref, 0));
5298 else
5299 /* The slot is out of range, or was dressed up in a SUBREG. */
5300 base = reg_equiv_address[REGNO (ref)];
5302 else
5303 base = find_replacement (&XEXP (ref, 0));
5305 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5307 /* Handle the case where the address is too complex to be offset by 1. */
5308 if (GET_CODE (base) == MINUS
5309 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5311 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5313 /* Be careful not to destroy OUTVAL. */
5314 if (reg_overlap_mentioned_p (base_plus, outval))
5316 /* Updating base_plus might destroy outval, see if we can
5317 swap the scratch and base_plus. */
5318 if (!reg_overlap_mentioned_p (scratch, outval))
5320 rtx tmp = scratch;
5321 scratch = base_plus;
5322 base_plus = tmp;
5324 else
5326 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5328 /* Be conservative and copy OUTVAL into the scratch now,
5329 this should only be necessary if outval is a subreg
5330 of something larger than a word. */
5331 /* XXX Might this clobber base? I can't see how it can,
5332 since scratch is known to overlap with OUTVAL, and
5333 must be wider than a word. */
5334 emit_insn (gen_movhi (scratch_hi, outval));
5335 outval = scratch_hi;
5339 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5340 base = base_plus;
5342 else if (GET_CODE (base) == PLUS)
5344 /* The addend must be CONST_INT, or we would have dealt with it above. */
5345 HOST_WIDE_INT hi, lo;
5347 offset += INTVAL (XEXP (base, 1));
5348 base = XEXP (base, 0);
5350 /* Rework the address into a legal sequence of insns. */
5351 /* Valid range for lo is -4095 -> 4095 */
5352 lo = (offset >= 0
5353 ? (offset & 0xfff)
5354 : -((-offset) & 0xfff));
5356 /* Corner case, if lo is the max offset then we would be out of range
5357 once we have added the additional 1 below, so bump the msb into the
5358 pre-loading insn(s). */
5359 if (lo == 4095)
5360 lo &= 0x7ff;
5362 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5363 ^ (HOST_WIDE_INT) 0x80000000)
5364 - (HOST_WIDE_INT) 0x80000000);
5366 if (hi + lo != offset)
5367 abort ();
5369 if (hi != 0)
5371 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5373 /* Be careful not to destroy OUTVAL. */
5374 if (reg_overlap_mentioned_p (base_plus, outval))
5376 /* Updating base_plus might destroy outval, see if we
5377 can swap the scratch and base_plus. */
5378 if (!reg_overlap_mentioned_p (scratch, outval))
5380 rtx tmp = scratch;
5381 scratch = base_plus;
5382 base_plus = tmp;
5384 else
5386 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5388 /* Be conservative and copy outval into scratch now,
5389 this should only be necessary if outval is a
5390 subreg of something larger than a word. */
5391 /* XXX Might this clobber base? I can't see how it
5392 can, since scratch is known to overlap with
5393 outval. */
5394 emit_insn (gen_movhi (scratch_hi, outval));
5395 outval = scratch_hi;
5399 /* Get the base address; addsi3 knows how to handle constants
5400 that require more than one insn. */
5401 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5402 base = base_plus;
5403 offset = lo;
5407 if (BYTES_BIG_ENDIAN)
5409 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5410 plus_constant (base, offset + 1)),
5411 gen_lowpart (QImode, outval)));
5412 emit_insn (gen_lshrsi3 (scratch,
5413 gen_rtx_SUBREG (SImode, outval, 0),
5414 GEN_INT (8)));
5415 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5416 gen_lowpart (QImode, scratch)));
5418 else
5420 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5421 gen_lowpart (QImode, outval)));
5422 emit_insn (gen_lshrsi3 (scratch,
5423 gen_rtx_SUBREG (SImode, outval, 0),
5424 GEN_INT (8)));
5425 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5426 plus_constant (base, offset + 1)),
5427 gen_lowpart (QImode, scratch)));
5431 /* Print a symbolic form of X to the debug file, F. */
5433 static void
5434 arm_print_value (f, x)
5435 FILE * f;
5436 rtx x;
5438 switch (GET_CODE (x))
5440 case CONST_INT:
5441 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5442 return;
5444 case CONST_DOUBLE:
5445 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5446 return;
5448 case CONST_STRING:
5449 fprintf (f, "\"%s\"", XSTR (x, 0));
5450 return;
5452 case SYMBOL_REF:
5453 fprintf (f, "`%s'", XSTR (x, 0));
5454 return;
5456 case LABEL_REF:
5457 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5458 return;
5460 case CONST:
5461 arm_print_value (f, XEXP (x, 0));
5462 return;
5464 case PLUS:
5465 arm_print_value (f, XEXP (x, 0));
5466 fprintf (f, "+");
5467 arm_print_value (f, XEXP (x, 1));
5468 return;
5470 case PC:
5471 fprintf (f, "pc");
5472 return;
5474 default:
5475 fprintf (f, "????");
5476 return;
5480 /* Routines for manipulation of the constant pool. */
5482 /* Arm instructions cannot load a large constant directly into a
5483 register; they have to come from a pc relative load. The constant
5484 must therefore be placed in the addressable range of the pc
5485 relative load. Depending on the precise pc relative load
5486 instruction the range is somewhere between 256 bytes and 4k. This
5487 means that we often have to dump a constant inside a function, and
5488 generate code to branch around it.
5490 It is important to minimize this, since the branches will slow
5491 things down and make the code larger.
5493 Normally we can hide the table after an existing unconditional
5494 branch so that there is no interruption of the flow, but in the
5495 worst case the code looks like this:
5497 ldr rn, L1
5499 b L2
5500 align
5501 L1: .long value
5505 ldr rn, L3
5507 b L4
5508 align
5509 L3: .long value
5513 We fix this by performing a scan after scheduling, which notices
5514 which instructions need to have their operands fetched from the
5515 constant table and builds the table.
5517 The algorithm starts by building a table of all the constants that
5518 need fixing up and all the natural barriers in the function (places
5519 where a constant table can be dropped without breaking the flow).
5520 For each fixup we note how far the pc-relative replacement will be
5521 able to reach and the offset of the instruction into the function.
5523 Having built the table we then group the fixes together to form
5524 tables that are as large as possible (subject to addressing
5525 constraints) and emit each table of constants after the last
5526 barrier that is within range of all the instructions in the group.
5527 If a group does not contain a barrier, then we forcibly create one
5528 by inserting a jump instruction into the flow. Once the table has
5529 been inserted, the insns are then modified to reference the
5530 relevant entry in the pool.
5532 Possible enhancements to the algorithm (not implemented) are:
5534 1) For some processors and object formats, there may be benefit in
5535 aligning the pools to the start of cache lines; this alignment
5536 would need to be taken into account when calculating addressability
5537 of a pool. */
5539 /* These typedefs are located at the start of this file, so that
5540 they can be used in the prototypes there. This comment is to
5541 remind readers of that fact so that the following structures
5542 can be understood more easily.
5544 typedef struct minipool_node Mnode;
5545 typedef struct minipool_fixup Mfix; */
5547 struct minipool_node
5549 /* Doubly linked chain of entries. */
5550 Mnode * next;
5551 Mnode * prev;
5552 /* The maximum offset into the code that this entry can be placed. While
5553 pushing fixes for forward references, all entries are sorted in order
5554 of increasing max_address. */
5555 HOST_WIDE_INT max_address;
5556 /* Similarly for an entry inserted for a backwards ref. */
5557 HOST_WIDE_INT min_address;
5558 /* The number of fixes referencing this entry. This can become zero
5559 if we "unpush" an entry. In this case we ignore the entry when we
5560 come to emit the code. */
5561 int refcount;
5562 /* The offset from the start of the minipool. */
5563 HOST_WIDE_INT offset;
5564 /* The value in table. */
5565 rtx value;
5566 /* The mode of value. */
5567 enum machine_mode mode;
5568 int fix_size;
5571 struct minipool_fixup
5573 Mfix * next;
5574 rtx insn;
5575 HOST_WIDE_INT address;
5576 rtx * loc;
5577 enum machine_mode mode;
5578 int fix_size;
5579 rtx value;
5580 Mnode * minipool;
5581 HOST_WIDE_INT forwards;
5582 HOST_WIDE_INT backwards;
5585 /* Fixes less than a word need padding out to a word boundary. */
5586 #define MINIPOOL_FIX_SIZE(mode) \
5587 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5589 static Mnode * minipool_vector_head;
5590 static Mnode * minipool_vector_tail;
5591 static rtx minipool_vector_label;
5593 /* The linked list of all minipool fixes required for this function. */
5594 Mfix * minipool_fix_head;
5595 Mfix * minipool_fix_tail;
5596 /* The fix entry for the current minipool, once it has been placed. */
5597 Mfix * minipool_barrier;
5599 /* Determines if INSN is the start of a jump table. Returns the end
5600 of the TABLE or NULL_RTX. */
5602 static rtx
5603 is_jump_table (insn)
5604 rtx insn;
5606 rtx table;
5608 if (GET_CODE (insn) == JUMP_INSN
5609 && JUMP_LABEL (insn) != NULL
5610 && ((table = next_real_insn (JUMP_LABEL (insn)))
5611 == next_real_insn (insn))
5612 && table != NULL
5613 && GET_CODE (table) == JUMP_INSN
5614 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5615 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5616 return table;
5618 return NULL_RTX;
5621 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5622 #define JUMP_TABLES_IN_TEXT_SECTION 0
5623 #endif
5625 static HOST_WIDE_INT
5626 get_jump_table_size (insn)
5627 rtx insn;
5629 /* ADDR_VECs only take room if read-only data does into the text
5630 section. */
5631 if (JUMP_TABLES_IN_TEXT_SECTION
5632 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5633 || 1
5634 #endif
5637 rtx body = PATTERN (insn);
5638 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5640 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5643 return 0;
5646 /* Move a minipool fix MP from its current location to before MAX_MP.
5647 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5648 contrains may need updating. */
5650 static Mnode *
5651 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5652 Mnode * mp;
5653 Mnode * max_mp;
5654 HOST_WIDE_INT max_address;
5656 /* This should never be true and the code below assumes these are
5657 different. */
5658 if (mp == max_mp)
5659 abort ();
5661 if (max_mp == NULL)
5663 if (max_address < mp->max_address)
5664 mp->max_address = max_address;
5666 else
5668 if (max_address > max_mp->max_address - mp->fix_size)
5669 mp->max_address = max_mp->max_address - mp->fix_size;
5670 else
5671 mp->max_address = max_address;
5673 /* Unlink MP from its current position. Since max_mp is non-null,
5674 mp->prev must be non-null. */
5675 mp->prev->next = mp->next;
5676 if (mp->next != NULL)
5677 mp->next->prev = mp->prev;
5678 else
5679 minipool_vector_tail = mp->prev;
5681 /* Re-insert it before MAX_MP. */
5682 mp->next = max_mp;
5683 mp->prev = max_mp->prev;
5684 max_mp->prev = mp;
5686 if (mp->prev != NULL)
5687 mp->prev->next = mp;
5688 else
5689 minipool_vector_head = mp;
5692 /* Save the new entry. */
5693 max_mp = mp;
5695 /* Scan over the preceding entries and adjust their addresses as
5696 required. */
5697 while (mp->prev != NULL
5698 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5700 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5701 mp = mp->prev;
5704 return max_mp;
5707 /* Add a constant to the minipool for a forward reference. Returns the
5708 node added or NULL if the constant will not fit in this pool. */
5710 static Mnode *
5711 add_minipool_forward_ref (fix)
5712 Mfix * fix;
5714 /* If set, max_mp is the first pool_entry that has a lower
5715 constraint than the one we are trying to add. */
5716 Mnode * max_mp = NULL;
5717 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5718 Mnode * mp;
5720 /* If this fix's address is greater than the address of the first
5721 entry, then we can't put the fix in this pool. We subtract the
5722 size of the current fix to ensure that if the table is fully
5723 packed we still have enough room to insert this value by suffling
5724 the other fixes forwards. */
5725 if (minipool_vector_head &&
5726 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5727 return NULL;
5729 /* Scan the pool to see if a constant with the same value has
5730 already been added. While we are doing this, also note the
5731 location where we must insert the constant if it doesn't already
5732 exist. */
5733 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5735 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5736 && fix->mode == mp->mode
5737 && (GET_CODE (fix->value) != CODE_LABEL
5738 || (CODE_LABEL_NUMBER (fix->value)
5739 == CODE_LABEL_NUMBER (mp->value)))
5740 && rtx_equal_p (fix->value, mp->value))
5742 /* More than one fix references this entry. */
5743 mp->refcount++;
5744 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5747 /* Note the insertion point if necessary. */
5748 if (max_mp == NULL
5749 && mp->max_address > max_address)
5750 max_mp = mp;
5753 /* The value is not currently in the minipool, so we need to create
5754 a new entry for it. If MAX_MP is NULL, the entry will be put on
5755 the end of the list since the placement is less constrained than
5756 any existing entry. Otherwise, we insert the new fix before
5757 MAX_MP and, if neceesary, adjust the constraints on the other
5758 entries. */
5759 mp = xmalloc (sizeof (* mp));
5760 mp->fix_size = fix->fix_size;
5761 mp->mode = fix->mode;
5762 mp->value = fix->value;
5763 mp->refcount = 1;
5764 /* Not yet required for a backwards ref. */
5765 mp->min_address = -65536;
5767 if (max_mp == NULL)
5769 mp->max_address = max_address;
5770 mp->next = NULL;
5771 mp->prev = minipool_vector_tail;
5773 if (mp->prev == NULL)
5775 minipool_vector_head = mp;
5776 minipool_vector_label = gen_label_rtx ();
5778 else
5779 mp->prev->next = mp;
5781 minipool_vector_tail = mp;
5783 else
5785 if (max_address > max_mp->max_address - mp->fix_size)
5786 mp->max_address = max_mp->max_address - mp->fix_size;
5787 else
5788 mp->max_address = max_address;
5790 mp->next = max_mp;
5791 mp->prev = max_mp->prev;
5792 max_mp->prev = mp;
5793 if (mp->prev != NULL)
5794 mp->prev->next = mp;
5795 else
5796 minipool_vector_head = mp;
5799 /* Save the new entry. */
5800 max_mp = mp;
5802 /* Scan over the preceding entries and adjust their addresses as
5803 required. */
5804 while (mp->prev != NULL
5805 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5807 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5808 mp = mp->prev;
5811 return max_mp;
5814 static Mnode *
5815 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5816 Mnode * mp;
5817 Mnode * min_mp;
5818 HOST_WIDE_INT min_address;
5820 HOST_WIDE_INT offset;
5822 /* This should never be true, and the code below assumes these are
5823 different. */
5824 if (mp == min_mp)
5825 abort ();
5827 if (min_mp == NULL)
5829 if (min_address > mp->min_address)
5830 mp->min_address = min_address;
5832 else
5834 /* We will adjust this below if it is too loose. */
5835 mp->min_address = min_address;
5837 /* Unlink MP from its current position. Since min_mp is non-null,
5838 mp->next must be non-null. */
5839 mp->next->prev = mp->prev;
5840 if (mp->prev != NULL)
5841 mp->prev->next = mp->next;
5842 else
5843 minipool_vector_head = mp->next;
5845 /* Reinsert it after MIN_MP. */
5846 mp->prev = min_mp;
5847 mp->next = min_mp->next;
5848 min_mp->next = mp;
5849 if (mp->next != NULL)
5850 mp->next->prev = mp;
5851 else
5852 minipool_vector_tail = mp;
5855 min_mp = mp;
5857 offset = 0;
5858 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5860 mp->offset = offset;
5861 if (mp->refcount > 0)
5862 offset += mp->fix_size;
5864 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5865 mp->next->min_address = mp->min_address + mp->fix_size;
5868 return min_mp;
5871 /* Add a constant to the minipool for a backward reference. Returns the
5872 node added or NULL if the constant will not fit in this pool.
5874 Note that the code for insertion for a backwards reference can be
5875 somewhat confusing because the calculated offsets for each fix do
5876 not take into account the size of the pool (which is still under
5877 construction. */
5879 static Mnode *
5880 add_minipool_backward_ref (fix)
5881 Mfix * fix;
5883 /* If set, min_mp is the last pool_entry that has a lower constraint
5884 than the one we are trying to add. */
5885 Mnode * min_mp = NULL;
5886 /* This can be negative, since it is only a constraint. */
5887 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5888 Mnode * mp;
5890 /* If we can't reach the current pool from this insn, or if we can't
5891 insert this entry at the end of the pool without pushing other
5892 fixes out of range, then we don't try. This ensures that we
5893 can't fail later on. */
5894 if (min_address >= minipool_barrier->address
5895 || (minipool_vector_tail->min_address + fix->fix_size
5896 >= minipool_barrier->address))
5897 return NULL;
5899 /* Scan the pool to see if a constant with the same value has
5900 already been added. While we are doing this, also note the
5901 location where we must insert the constant if it doesn't already
5902 exist. */
5903 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5905 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5906 && fix->mode == mp->mode
5907 && (GET_CODE (fix->value) != CODE_LABEL
5908 || (CODE_LABEL_NUMBER (fix->value)
5909 == CODE_LABEL_NUMBER (mp->value)))
5910 && rtx_equal_p (fix->value, mp->value)
5911 /* Check that there is enough slack to move this entry to the
5912 end of the table (this is conservative). */
5913 && (mp->max_address
5914 > (minipool_barrier->address
5915 + minipool_vector_tail->offset
5916 + minipool_vector_tail->fix_size)))
5918 mp->refcount++;
5919 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5922 if (min_mp != NULL)
5923 mp->min_address += fix->fix_size;
5924 else
5926 /* Note the insertion point if necessary. */
5927 if (mp->min_address < min_address)
5928 min_mp = mp;
5929 else if (mp->max_address
5930 < minipool_barrier->address + mp->offset + fix->fix_size)
5932 /* Inserting before this entry would push the fix beyond
5933 its maximum address (which can happen if we have
5934 re-located a forwards fix); force the new fix to come
5935 after it. */
5936 min_mp = mp;
5937 min_address = mp->min_address + fix->fix_size;
5942 /* We need to create a new entry. */
5943 mp = xmalloc (sizeof (* mp));
5944 mp->fix_size = fix->fix_size;
5945 mp->mode = fix->mode;
5946 mp->value = fix->value;
5947 mp->refcount = 1;
5948 mp->max_address = minipool_barrier->address + 65536;
5950 mp->min_address = min_address;
5952 if (min_mp == NULL)
5954 mp->prev = NULL;
5955 mp->next = minipool_vector_head;
5957 if (mp->next == NULL)
5959 minipool_vector_tail = mp;
5960 minipool_vector_label = gen_label_rtx ();
5962 else
5963 mp->next->prev = mp;
5965 minipool_vector_head = mp;
5967 else
5969 mp->next = min_mp->next;
5970 mp->prev = min_mp;
5971 min_mp->next = mp;
5973 if (mp->next != NULL)
5974 mp->next->prev = mp;
5975 else
5976 minipool_vector_tail = mp;
5979 /* Save the new entry. */
5980 min_mp = mp;
5982 if (mp->prev)
5983 mp = mp->prev;
5984 else
5985 mp->offset = 0;
5987 /* Scan over the following entries and adjust their offsets. */
5988 while (mp->next != NULL)
5990 if (mp->next->min_address < mp->min_address + mp->fix_size)
5991 mp->next->min_address = mp->min_address + mp->fix_size;
5993 if (mp->refcount)
5994 mp->next->offset = mp->offset + mp->fix_size;
5995 else
5996 mp->next->offset = mp->offset;
5998 mp = mp->next;
6001 return min_mp;
6004 static void
6005 assign_minipool_offsets (barrier)
6006 Mfix * barrier;
6008 HOST_WIDE_INT offset = 0;
6009 Mnode * mp;
6011 minipool_barrier = barrier;
6013 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6015 mp->offset = offset;
6017 if (mp->refcount > 0)
6018 offset += mp->fix_size;
6022 /* Output the literal table */
6023 static void
6024 dump_minipool (scan)
6025 rtx scan;
6027 Mnode * mp;
6028 Mnode * nmp;
6030 if (rtl_dump_file)
6031 fprintf (rtl_dump_file,
6032 ";; Emitting minipool after insn %u; address %ld\n",
6033 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6035 scan = emit_label_after (gen_label_rtx (), scan);
6036 scan = emit_insn_after (gen_align_4 (), scan);
6037 scan = emit_label_after (minipool_vector_label, scan);
6039 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6041 if (mp->refcount > 0)
6043 if (rtl_dump_file)
6045 fprintf (rtl_dump_file,
6046 ";; Offset %u, min %ld, max %ld ",
6047 (unsigned) mp->offset, (unsigned long) mp->min_address,
6048 (unsigned long) mp->max_address);
6049 arm_print_value (rtl_dump_file, mp->value);
6050 fputc ('\n', rtl_dump_file);
6053 switch (mp->fix_size)
6055 #ifdef HAVE_consttable_1
6056 case 1:
6057 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6058 break;
6060 #endif
6061 #ifdef HAVE_consttable_2
6062 case 2:
6063 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6064 break;
6066 #endif
6067 #ifdef HAVE_consttable_4
6068 case 4:
6069 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6070 break;
6072 #endif
6073 #ifdef HAVE_consttable_8
6074 case 8:
6075 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6076 break;
6078 #endif
6079 default:
6080 abort ();
6081 break;
6085 nmp = mp->next;
6086 free (mp);
6089 minipool_vector_head = minipool_vector_tail = NULL;
6090 scan = emit_insn_after (gen_consttable_end (), scan);
6091 scan = emit_barrier_after (scan);
6094 /* Return the cost of forcibly inserting a barrier after INSN. */
6096 static int
6097 arm_barrier_cost (insn)
6098 rtx insn;
6100 /* Basing the location of the pool on the loop depth is preferable,
6101 but at the moment, the basic block information seems to be
6102 corrupt by this stage of the compilation. */
6103 int base_cost = 50;
6104 rtx next = next_nonnote_insn (insn);
6106 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6107 base_cost -= 20;
6109 switch (GET_CODE (insn))
6111 case CODE_LABEL:
6112 /* It will always be better to place the table before the label, rather
6113 than after it. */
6114 return 50;
6116 case INSN:
6117 case CALL_INSN:
6118 return base_cost;
6120 case JUMP_INSN:
6121 return base_cost - 10;
6123 default:
6124 return base_cost + 10;
6128 /* Find the best place in the insn stream in the range
6129 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6130 Create the barrier by inserting a jump and add a new fix entry for
6131 it. */
6133 static Mfix *
6134 create_fix_barrier (fix, max_address)
6135 Mfix * fix;
6136 HOST_WIDE_INT max_address;
6138 HOST_WIDE_INT count = 0;
6139 rtx barrier;
6140 rtx from = fix->insn;
6141 rtx selected = from;
6142 int selected_cost;
6143 HOST_WIDE_INT selected_address;
6144 Mfix * new_fix;
6145 HOST_WIDE_INT max_count = max_address - fix->address;
6146 rtx label = gen_label_rtx ();
6148 selected_cost = arm_barrier_cost (from);
6149 selected_address = fix->address;
6151 while (from && count < max_count)
6153 rtx tmp;
6154 int new_cost;
6156 /* This code shouldn't have been called if there was a natural barrier
6157 within range. */
6158 if (GET_CODE (from) == BARRIER)
6159 abort ();
6161 /* Count the length of this insn. */
6162 count += get_attr_length (from);
6164 /* If there is a jump table, add its length. */
6165 tmp = is_jump_table (from);
6166 if (tmp != NULL)
6168 count += get_jump_table_size (tmp);
6170 /* Jump tables aren't in a basic block, so base the cost on
6171 the dispatch insn. If we select this location, we will
6172 still put the pool after the table. */
6173 new_cost = arm_barrier_cost (from);
6175 if (count < max_count && new_cost <= selected_cost)
6177 selected = tmp;
6178 selected_cost = new_cost;
6179 selected_address = fix->address + count;
6182 /* Continue after the dispatch table. */
6183 from = NEXT_INSN (tmp);
6184 continue;
6187 new_cost = arm_barrier_cost (from);
6189 if (count < max_count && new_cost <= selected_cost)
6191 selected = from;
6192 selected_cost = new_cost;
6193 selected_address = fix->address + count;
6196 from = NEXT_INSN (from);
6199 /* Create a new JUMP_INSN that branches around a barrier. */
6200 from = emit_jump_insn_after (gen_jump (label), selected);
6201 JUMP_LABEL (from) = label;
6202 barrier = emit_barrier_after (from);
6203 emit_label_after (label, barrier);
6205 /* Create a minipool barrier entry for the new barrier. */
6206 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6207 new_fix->insn = barrier;
6208 new_fix->address = selected_address;
6209 new_fix->next = fix->next;
6210 fix->next = new_fix;
6212 return new_fix;
6215 /* Record that there is a natural barrier in the insn stream at
6216 ADDRESS. */
6217 static void
6218 push_minipool_barrier (insn, address)
6219 rtx insn;
6220 HOST_WIDE_INT address;
6222 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6224 fix->insn = insn;
6225 fix->address = address;
6227 fix->next = NULL;
6228 if (minipool_fix_head != NULL)
6229 minipool_fix_tail->next = fix;
6230 else
6231 minipool_fix_head = fix;
6233 minipool_fix_tail = fix;
6236 /* Record INSN, which will need fixing up to load a value from the
6237 minipool. ADDRESS is the offset of the insn since the start of the
6238 function; LOC is a pointer to the part of the insn which requires
6239 fixing; VALUE is the constant that must be loaded, which is of type
6240 MODE. */
6241 static void
6242 push_minipool_fix (insn, address, loc, mode, value)
6243 rtx insn;
6244 HOST_WIDE_INT address;
6245 rtx * loc;
6246 enum machine_mode mode;
6247 rtx value;
6249 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6251 #ifdef AOF_ASSEMBLER
6252 /* PIC symbol refereneces need to be converted into offsets into the
6253 based area. */
6254 /* XXX This shouldn't be done here. */
6255 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6256 value = aof_pic_entry (value);
6257 #endif /* AOF_ASSEMBLER */
6259 fix->insn = insn;
6260 fix->address = address;
6261 fix->loc = loc;
6262 fix->mode = mode;
6263 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6264 fix->value = value;
6265 fix->forwards = get_attr_pool_range (insn);
6266 fix->backwards = get_attr_neg_pool_range (insn);
6267 fix->minipool = NULL;
6269 /* If an insn doesn't have a range defined for it, then it isn't
6270 expecting to be reworked by this code. Better to abort now than
6271 to generate duff assembly code. */
6272 if (fix->forwards == 0 && fix->backwards == 0)
6273 abort ();
6275 if (rtl_dump_file)
6277 fprintf (rtl_dump_file,
6278 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6279 GET_MODE_NAME (mode),
6280 INSN_UID (insn), (unsigned long) address,
6281 -1 * (long)fix->backwards, (long)fix->forwards);
6282 arm_print_value (rtl_dump_file, fix->value);
6283 fprintf (rtl_dump_file, "\n");
6286 /* Add it to the chain of fixes. */
6287 fix->next = NULL;
6289 if (minipool_fix_head != NULL)
6290 minipool_fix_tail->next = fix;
6291 else
6292 minipool_fix_head = fix;
6294 minipool_fix_tail = fix;
6297 /* Scan INSN and note any of its operands that need fixing. */
6299 static void
6300 note_invalid_constants (insn, address)
6301 rtx insn;
6302 HOST_WIDE_INT address;
6304 int opno;
6306 extract_insn (insn);
6308 if (!constrain_operands (1))
6309 fatal_insn_not_found (insn);
6311 /* Fill in recog_op_alt with information about the constraints of this
6312 insn. */
6313 preprocess_constraints ();
6315 for (opno = 0; opno < recog_data.n_operands; opno++)
6317 /* Things we need to fix can only occur in inputs. */
6318 if (recog_data.operand_type[opno] != OP_IN)
6319 continue;
6321 /* If this alternative is a memory reference, then any mention
6322 of constants in this alternative is really to fool reload
6323 into allowing us to accept one there. We need to fix them up
6324 now so that we output the right code. */
6325 if (recog_op_alt[opno][which_alternative].memory_ok)
6327 rtx op = recog_data.operand[opno];
6329 if (CONSTANT_P (op))
6330 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6331 recog_data.operand_mode[opno], op);
6332 #if 0
6333 /* RWE: Now we look correctly at the operands for the insn,
6334 this shouldn't be needed any more. */
6335 #ifndef AOF_ASSEMBLER
6336 /* XXX Is this still needed? */
6337 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6338 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6339 recog_data.operand_mode[opno],
6340 XVECEXP (op, 0, 0));
6341 #endif
6342 #endif
6343 else if (GET_CODE (op) == MEM
6344 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6345 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6346 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6347 recog_data.operand_mode[opno],
6348 get_pool_constant (XEXP (op, 0)));
6353 void
6354 arm_reorg (first)
6355 rtx first;
6357 rtx insn;
6358 HOST_WIDE_INT address = 0;
6359 Mfix * fix;
6361 minipool_fix_head = minipool_fix_tail = NULL;
6363 /* The first insn must always be a note, or the code below won't
6364 scan it properly. */
6365 if (GET_CODE (first) != NOTE)
6366 abort ();
6368 /* Scan all the insns and record the operands that will need fixing. */
6369 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6371 if (GET_CODE (insn) == BARRIER)
6372 push_minipool_barrier (insn, address);
6373 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6374 || GET_CODE (insn) == JUMP_INSN)
6376 rtx table;
6378 note_invalid_constants (insn, address);
6379 address += get_attr_length (insn);
6381 /* If the insn is a vector jump, add the size of the table
6382 and skip the table. */
6383 if ((table = is_jump_table (insn)) != NULL)
6385 address += get_jump_table_size (table);
6386 insn = table;
6391 fix = minipool_fix_head;
6393 /* Now scan the fixups and perform the required changes. */
6394 while (fix)
6396 Mfix * ftmp;
6397 Mfix * fdel;
6398 Mfix * last_added_fix;
6399 Mfix * last_barrier = NULL;
6400 Mfix * this_fix;
6402 /* Skip any further barriers before the next fix. */
6403 while (fix && GET_CODE (fix->insn) == BARRIER)
6404 fix = fix->next;
6406 /* No more fixes. */
6407 if (fix == NULL)
6408 break;
6410 last_added_fix = NULL;
6412 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6414 if (GET_CODE (ftmp->insn) == BARRIER)
6416 if (ftmp->address >= minipool_vector_head->max_address)
6417 break;
6419 last_barrier = ftmp;
6421 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6422 break;
6424 last_added_fix = ftmp; /* Keep track of the last fix added. */
6427 /* If we found a barrier, drop back to that; any fixes that we
6428 could have reached but come after the barrier will now go in
6429 the next mini-pool. */
6430 if (last_barrier != NULL)
6432 /* Reduce the refcount for those fixes that won't go into this
6433 pool after all. */
6434 for (fdel = last_barrier->next;
6435 fdel && fdel != ftmp;
6436 fdel = fdel->next)
6438 fdel->minipool->refcount--;
6439 fdel->minipool = NULL;
6442 ftmp = last_barrier;
6444 else
6446 /* ftmp is first fix that we can't fit into this pool and
6447 there no natural barriers that we could use. Insert a
6448 new barrier in the code somewhere between the previous
6449 fix and this one, and arrange to jump around it. */
6450 HOST_WIDE_INT max_address;
6452 /* The last item on the list of fixes must be a barrier, so
6453 we can never run off the end of the list of fixes without
6454 last_barrier being set. */
6455 if (ftmp == NULL)
6456 abort ();
6458 max_address = minipool_vector_head->max_address;
6459 /* Check that there isn't another fix that is in range that
6460 we couldn't fit into this pool because the pool was
6461 already too large: we need to put the pool before such an
6462 instruction. */
6463 if (ftmp->address < max_address)
6464 max_address = ftmp->address;
6466 last_barrier = create_fix_barrier (last_added_fix, max_address);
6469 assign_minipool_offsets (last_barrier);
6471 while (ftmp)
6473 if (GET_CODE (ftmp->insn) != BARRIER
6474 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6475 == NULL))
6476 break;
6478 ftmp = ftmp->next;
6481 /* Scan over the fixes we have identified for this pool, fixing them
6482 up and adding the constants to the pool itself. */
6483 for (this_fix = fix; this_fix && ftmp != this_fix;
6484 this_fix = this_fix->next)
6485 if (GET_CODE (this_fix->insn) != BARRIER)
6487 rtx addr
6488 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6489 minipool_vector_label),
6490 this_fix->minipool->offset);
6491 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6494 dump_minipool (last_barrier->insn);
6495 fix = ftmp;
6498 /* From now on we must synthesize any constants that we can't handle
6499 directly. This can happen if the RTL gets split during final
6500 instruction generation. */
6501 after_arm_reorg = 1;
6503 /* Free the minipool memory. */
6504 obstack_free (&minipool_obstack, minipool_startobj);
6507 /* Routines to output assembly language. */
6509 /* If the rtx is the correct value then return the string of the number.
6510 In this way we can ensure that valid double constants are generated even
6511 when cross compiling. */
6513 const char *
6514 fp_immediate_constant (x)
6515 rtx x;
6517 REAL_VALUE_TYPE r;
6518 int i;
6520 if (!fpa_consts_inited)
6521 init_fpa_table ();
6523 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6524 for (i = 0; i < 8; i++)
6525 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6526 return strings_fpa[i];
6528 abort ();
6531 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6533 static const char *
6534 fp_const_from_val (r)
6535 REAL_VALUE_TYPE * r;
6537 int i;
6539 if (!fpa_consts_inited)
6540 init_fpa_table ();
6542 for (i = 0; i < 8; i++)
6543 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6544 return strings_fpa[i];
6546 abort ();
6549 /* Output the operands of a LDM/STM instruction to STREAM.
6550 MASK is the ARM register set mask of which only bits 0-15 are important.
6551 REG is the base register, either the frame pointer or the stack pointer,
6552 INSTR is the possibly suffixed load or store instruction. */
6554 static void
6555 print_multi_reg (stream, instr, reg, mask)
6556 FILE * stream;
6557 const char * instr;
6558 int reg;
6559 int mask;
6561 int i;
6562 int not_first = FALSE;
6564 fputc ('\t', stream);
6565 asm_fprintf (stream, instr, reg);
6566 fputs (", {", stream);
6568 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6569 if (mask & (1 << i))
6571 if (not_first)
6572 fprintf (stream, ", ");
6574 asm_fprintf (stream, "%r", i);
6575 not_first = TRUE;
6578 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6581 /* Output a 'call' insn. */
6583 const char *
6584 output_call (operands)
6585 rtx * operands;
6587 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6589 if (REGNO (operands[0]) == LR_REGNUM)
6591 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6592 output_asm_insn ("mov%?\t%0, %|lr", operands);
6595 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6597 if (TARGET_INTERWORK)
6598 output_asm_insn ("bx%?\t%0", operands);
6599 else
6600 output_asm_insn ("mov%?\t%|pc, %0", operands);
6602 return "";
6605 static int
6606 eliminate_lr2ip (x)
6607 rtx * x;
6609 int something_changed = 0;
6610 rtx x0 = * x;
6611 int code = GET_CODE (x0);
6612 int i, j;
6613 const char * fmt;
6615 switch (code)
6617 case REG:
6618 if (REGNO (x0) == LR_REGNUM)
6620 *x = gen_rtx_REG (SImode, IP_REGNUM);
6621 return 1;
6623 return 0;
6624 default:
6625 /* Scan through the sub-elements and change any references there. */
6626 fmt = GET_RTX_FORMAT (code);
6628 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6629 if (fmt[i] == 'e')
6630 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6631 else if (fmt[i] == 'E')
6632 for (j = 0; j < XVECLEN (x0, i); j++)
6633 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6635 return something_changed;
6639 /* Output a 'call' insn that is a reference in memory. */
6641 const char *
6642 output_call_mem (operands)
6643 rtx * operands;
6645 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6646 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6647 if (eliminate_lr2ip (&operands[0]))
6648 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6650 if (TARGET_INTERWORK)
6652 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6653 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6654 output_asm_insn ("bx%?\t%|ip", operands);
6656 else
6658 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6659 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6662 return "";
6666 /* Output a move from arm registers to an fpu registers.
6667 OPERANDS[0] is an fpu register.
6668 OPERANDS[1] is the first registers of an arm register pair. */
6670 const char *
6671 output_mov_long_double_fpu_from_arm (operands)
6672 rtx * operands;
6674 int arm_reg0 = REGNO (operands[1]);
6675 rtx ops[3];
6677 if (arm_reg0 == IP_REGNUM)
6678 abort ();
6680 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6681 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6682 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6684 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6685 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6687 return "";
6690 /* Output a move from an fpu register to arm registers.
6691 OPERANDS[0] is the first registers of an arm register pair.
6692 OPERANDS[1] is an fpu register. */
6694 const char *
6695 output_mov_long_double_arm_from_fpu (operands)
6696 rtx * operands;
6698 int arm_reg0 = REGNO (operands[0]);
6699 rtx ops[3];
6701 if (arm_reg0 == IP_REGNUM)
6702 abort ();
6704 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6705 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6706 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6708 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6709 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6710 return "";
6713 /* Output a move from arm registers to arm registers of a long double
6714 OPERANDS[0] is the destination.
6715 OPERANDS[1] is the source. */
6717 const char *
6718 output_mov_long_double_arm_from_arm (operands)
6719 rtx * operands;
6721 /* We have to be careful here because the two might overlap. */
6722 int dest_start = REGNO (operands[0]);
6723 int src_start = REGNO (operands[1]);
6724 rtx ops[2];
6725 int i;
6727 if (dest_start < src_start)
6729 for (i = 0; i < 3; i++)
6731 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6732 ops[1] = gen_rtx_REG (SImode, src_start + i);
6733 output_asm_insn ("mov%?\t%0, %1", ops);
6736 else
6738 for (i = 2; i >= 0; i--)
6740 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6741 ops[1] = gen_rtx_REG (SImode, src_start + i);
6742 output_asm_insn ("mov%?\t%0, %1", ops);
6746 return "";
6750 /* Output a move from arm registers to an fpu registers.
6751 OPERANDS[0] is an fpu register.
6752 OPERANDS[1] is the first registers of an arm register pair. */
6754 const char *
6755 output_mov_double_fpu_from_arm (operands)
6756 rtx * operands;
6758 int arm_reg0 = REGNO (operands[1]);
6759 rtx ops[2];
6761 if (arm_reg0 == IP_REGNUM)
6762 abort ();
6764 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6765 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6766 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6767 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6768 return "";
6771 /* Output a move from an fpu register to arm registers.
6772 OPERANDS[0] is the first registers of an arm register pair.
6773 OPERANDS[1] is an fpu register. */
6775 const char *
6776 output_mov_double_arm_from_fpu (operands)
6777 rtx * operands;
6779 int arm_reg0 = REGNO (operands[0]);
6780 rtx ops[2];
6782 if (arm_reg0 == IP_REGNUM)
6783 abort ();
6785 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6786 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6787 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6788 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6789 return "";
6792 /* Output a move between double words.
6793 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6794 or MEM<-REG and all MEMs must be offsettable addresses. */
6796 const char *
6797 output_move_double (operands)
6798 rtx * operands;
6800 enum rtx_code code0 = GET_CODE (operands[0]);
6801 enum rtx_code code1 = GET_CODE (operands[1]);
6802 rtx otherops[3];
6804 if (code0 == REG)
6806 int reg0 = REGNO (operands[0]);
6808 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6810 if (code1 == REG)
6812 int reg1 = REGNO (operands[1]);
6813 if (reg1 == IP_REGNUM)
6814 abort ();
6816 /* Ensure the second source is not overwritten. */
6817 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6818 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6819 else
6820 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6822 else if (code1 == CONST_DOUBLE)
6824 if (GET_MODE (operands[1]) == DFmode)
6826 REAL_VALUE_TYPE r;
6827 long l[2];
6829 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6830 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
6831 otherops[1] = GEN_INT (l[1]);
6832 operands[1] = GEN_INT (l[0]);
6834 else if (GET_MODE (operands[1]) != VOIDmode)
6835 abort ();
6836 else if (WORDS_BIG_ENDIAN)
6838 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6839 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6841 else
6843 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6844 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6847 output_mov_immediate (operands);
6848 output_mov_immediate (otherops);
6850 else if (code1 == CONST_INT)
6852 #if HOST_BITS_PER_WIDE_INT > 32
6853 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6854 what the upper word is. */
6855 if (WORDS_BIG_ENDIAN)
6857 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6858 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6860 else
6862 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6863 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6865 #else
6866 /* Sign extend the intval into the high-order word. */
6867 if (WORDS_BIG_ENDIAN)
6869 otherops[1] = operands[1];
6870 operands[1] = (INTVAL (operands[1]) < 0
6871 ? constm1_rtx : const0_rtx);
6873 else
6874 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6875 #endif
6876 output_mov_immediate (otherops);
6877 output_mov_immediate (operands);
6879 else if (code1 == MEM)
6881 switch (GET_CODE (XEXP (operands[1], 0)))
6883 case REG:
6884 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6885 break;
6887 case PRE_INC:
6888 abort (); /* Should never happen now. */
6889 break;
6891 case PRE_DEC:
6892 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6893 break;
6895 case POST_INC:
6896 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6897 break;
6899 case POST_DEC:
6900 abort (); /* Should never happen now. */
6901 break;
6903 case LABEL_REF:
6904 case CONST:
6905 output_asm_insn ("adr%?\t%0, %1", operands);
6906 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6907 break;
6909 default:
6910 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6911 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6913 otherops[0] = operands[0];
6914 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6915 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6917 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6919 if (GET_CODE (otherops[2]) == CONST_INT)
6921 switch (INTVAL (otherops[2]))
6923 case -8:
6924 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6925 return "";
6926 case -4:
6927 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6928 return "";
6929 case 4:
6930 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6931 return "";
6934 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6935 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6936 else
6937 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6939 else
6940 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6942 else
6943 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6945 return "ldm%?ia\t%0, %M0";
6947 else
6949 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6950 /* Take care of overlapping base/data reg. */
6951 if (reg_mentioned_p (operands[0], operands[1]))
6953 output_asm_insn ("ldr%?\t%0, %1", otherops);
6954 output_asm_insn ("ldr%?\t%0, %1", operands);
6956 else
6958 output_asm_insn ("ldr%?\t%0, %1", operands);
6959 output_asm_insn ("ldr%?\t%0, %1", otherops);
6964 else
6965 abort (); /* Constraints should prevent this. */
6967 else if (code0 == MEM && code1 == REG)
6969 if (REGNO (operands[1]) == IP_REGNUM)
6970 abort ();
6972 switch (GET_CODE (XEXP (operands[0], 0)))
6974 case REG:
6975 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6976 break;
6978 case PRE_INC:
6979 abort (); /* Should never happen now. */
6980 break;
6982 case PRE_DEC:
6983 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6984 break;
6986 case POST_INC:
6987 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6988 break;
6990 case POST_DEC:
6991 abort (); /* Should never happen now. */
6992 break;
6994 case PLUS:
6995 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6997 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6999 case -8:
7000 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7001 return "";
7003 case -4:
7004 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7005 return "";
7007 case 4:
7008 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7009 return "";
7012 /* Fall through */
7014 default:
7015 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7016 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7017 output_asm_insn ("str%?\t%1, %0", operands);
7018 output_asm_insn ("str%?\t%1, %0", otherops);
7021 else
7022 /* Constraints should prevent this. */
7023 abort ();
7025 return "";
7029 /* Output an arbitrary MOV reg, #n.
7030 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7032 const char *
7033 output_mov_immediate (operands)
7034 rtx * operands;
7036 HOST_WIDE_INT n = INTVAL (operands[1]);
7038 /* Try to use one MOV. */
7039 if (const_ok_for_arm (n))
7040 output_asm_insn ("mov%?\t%0, %1", operands);
7042 /* Try to use one MVN. */
7043 else if (const_ok_for_arm (~n))
7045 operands[1] = GEN_INT (~n);
7046 output_asm_insn ("mvn%?\t%0, %1", operands);
7048 else
7050 int n_ones = 0;
7051 int i;
7053 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7054 for (i = 0; i < 32; i ++)
7055 if (n & 1 << i)
7056 n_ones ++;
7058 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7059 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7060 else
7061 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7064 return "";
7067 /* Output an ADD r, s, #n where n may be too big for one instruction.
7068 If adding zero to one register, output nothing. */
7070 const char *
7071 output_add_immediate (operands)
7072 rtx * operands;
7074 HOST_WIDE_INT n = INTVAL (operands[2]);
7076 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7078 if (n < 0)
7079 output_multi_immediate (operands,
7080 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7081 -n);
7082 else
7083 output_multi_immediate (operands,
7084 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7088 return "";
7091 /* Output a multiple immediate operation.
7092 OPERANDS is the vector of operands referred to in the output patterns.
7093 INSTR1 is the output pattern to use for the first constant.
7094 INSTR2 is the output pattern to use for subsequent constants.
7095 IMMED_OP is the index of the constant slot in OPERANDS.
7096 N is the constant value. */
7098 static const char *
7099 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7100 rtx * operands;
7101 const char * instr1;
7102 const char * instr2;
7103 int immed_op;
7104 HOST_WIDE_INT n;
7106 #if HOST_BITS_PER_WIDE_INT > 32
7107 n &= 0xffffffff;
7108 #endif
7110 if (n == 0)
7112 /* Quick and easy output. */
7113 operands[immed_op] = const0_rtx;
7114 output_asm_insn (instr1, operands);
7116 else
7118 int i;
7119 const char * instr = instr1;
7121 /* Note that n is never zero here (which would give no output). */
7122 for (i = 0; i < 32; i += 2)
7124 if (n & (3 << i))
7126 operands[immed_op] = GEN_INT (n & (255 << i));
7127 output_asm_insn (instr, operands);
7128 instr = instr2;
7129 i += 6;
7134 return "";
7137 /* Return the appropriate ARM instruction for the operation code.
7138 The returned result should not be overwritten. OP is the rtx of the
7139 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7140 was shifted. */
7142 const char *
7143 arithmetic_instr (op, shift_first_arg)
7144 rtx op;
7145 int shift_first_arg;
7147 switch (GET_CODE (op))
7149 case PLUS:
7150 return "add";
7152 case MINUS:
7153 return shift_first_arg ? "rsb" : "sub";
7155 case IOR:
7156 return "orr";
7158 case XOR:
7159 return "eor";
7161 case AND:
7162 return "and";
7164 default:
7165 abort ();
7169 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7170 for the operation code. The returned result should not be overwritten.
7171 OP is the rtx code of the shift.
7172 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7173 shift. */
7175 static const char *
7176 shift_op (op, amountp)
7177 rtx op;
7178 HOST_WIDE_INT *amountp;
7180 const char * mnem;
7181 enum rtx_code code = GET_CODE (op);
7183 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7184 *amountp = -1;
7185 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7186 *amountp = INTVAL (XEXP (op, 1));
7187 else
7188 abort ();
7190 switch (code)
7192 case ASHIFT:
7193 mnem = "asl";
7194 break;
7196 case ASHIFTRT:
7197 mnem = "asr";
7198 break;
7200 case LSHIFTRT:
7201 mnem = "lsr";
7202 break;
7204 case ROTATERT:
7205 mnem = "ror";
7206 break;
7208 case MULT:
7209 /* We never have to worry about the amount being other than a
7210 power of 2, since this case can never be reloaded from a reg. */
7211 if (*amountp != -1)
7212 *amountp = int_log2 (*amountp);
7213 else
7214 abort ();
7215 return "asl";
7217 default:
7218 abort ();
7221 if (*amountp != -1)
7223 /* This is not 100% correct, but follows from the desire to merge
7224 multiplication by a power of 2 with the recognizer for a
7225 shift. >=32 is not a valid shift for "asl", so we must try and
7226 output a shift that produces the correct arithmetical result.
7227 Using lsr #32 is identical except for the fact that the carry bit
7228 is not set correctly if we set the flags; but we never use the
7229 carry bit from such an operation, so we can ignore that. */
7230 if (code == ROTATERT)
7231 /* Rotate is just modulo 32. */
7232 *amountp &= 31;
7233 else if (*amountp != (*amountp & 31))
7235 if (code == ASHIFT)
7236 mnem = "lsr";
7237 *amountp = 32;
7240 /* Shifts of 0 are no-ops. */
7241 if (*amountp == 0)
7242 return NULL;
7245 return mnem;
7248 /* Obtain the shift from the POWER of two. */
7250 static HOST_WIDE_INT
7251 int_log2 (power)
7252 HOST_WIDE_INT power;
7254 HOST_WIDE_INT shift = 0;
7256 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7258 if (shift > 31)
7259 abort ();
7260 shift ++;
7263 return shift;
7266 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7267 /bin/as is horribly restrictive. */
7268 #define MAX_ASCII_LEN 51
7270 void
7271 output_ascii_pseudo_op (stream, p, len)
7272 FILE * stream;
7273 const unsigned char * p;
7274 int len;
7276 int i;
7277 int len_so_far = 0;
7279 fputs ("\t.ascii\t\"", stream);
7281 for (i = 0; i < len; i++)
7283 int c = p[i];
7285 if (len_so_far >= MAX_ASCII_LEN)
7287 fputs ("\"\n\t.ascii\t\"", stream);
7288 len_so_far = 0;
7291 switch (c)
7293 case TARGET_TAB:
7294 fputs ("\\t", stream);
7295 len_so_far += 2;
7296 break;
7298 case TARGET_FF:
7299 fputs ("\\f", stream);
7300 len_so_far += 2;
7301 break;
7303 case TARGET_BS:
7304 fputs ("\\b", stream);
7305 len_so_far += 2;
7306 break;
7308 case TARGET_CR:
7309 fputs ("\\r", stream);
7310 len_so_far += 2;
7311 break;
7313 case TARGET_NEWLINE:
7314 fputs ("\\n", stream);
7315 c = p [i + 1];
7316 if ((c >= ' ' && c <= '~')
7317 || c == TARGET_TAB)
7318 /* This is a good place for a line break. */
7319 len_so_far = MAX_ASCII_LEN;
7320 else
7321 len_so_far += 2;
7322 break;
7324 case '\"':
7325 case '\\':
7326 putc ('\\', stream);
7327 len_so_far++;
7328 /* drop through. */
7330 default:
7331 if (c >= ' ' && c <= '~')
7333 putc (c, stream);
7334 len_so_far++;
7336 else
7338 fprintf (stream, "\\%03o", c);
7339 len_so_far += 4;
7341 break;
7345 fputs ("\"\n", stream);
7348 /* Compute the register sabe mask for registers 0 through 12
7349 inclusive. This code is used by both arm_compute_save_reg_mask
7350 and arm_compute_initial_elimination_offset. */
7352 static unsigned long
7353 arm_compute_save_reg0_reg12_mask ()
7355 unsigned long func_type = arm_current_func_type ();
7356 unsigned int save_reg_mask = 0;
7357 unsigned int reg;
7359 if (IS_INTERRUPT (func_type))
7361 unsigned int max_reg;
7362 /* Interrupt functions must not corrupt any registers,
7363 even call clobbered ones. If this is a leaf function
7364 we can just examine the registers used by the RTL, but
7365 otherwise we have to assume that whatever function is
7366 called might clobber anything, and so we have to save
7367 all the call-clobbered registers as well. */
7368 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7369 /* FIQ handlers have registers r8 - r12 banked, so
7370 we only need to check r0 - r7, Normal ISRs only
7371 bank r14 and r15, so we must check up to r12.
7372 r13 is the stack pointer which is always preserved,
7373 so we do not need to consider it here. */
7374 max_reg = 7;
7375 else
7376 max_reg = 12;
7378 for (reg = 0; reg <= max_reg; reg++)
7379 if (regs_ever_live[reg]
7380 || (! current_function_is_leaf && call_used_regs [reg]))
7381 save_reg_mask |= (1 << reg);
7383 else
7385 /* In the normal case we only need to save those registers
7386 which are call saved and which are used by this function. */
7387 for (reg = 0; reg <= 10; reg++)
7388 if (regs_ever_live[reg] && ! call_used_regs [reg])
7389 save_reg_mask |= (1 << reg);
7391 /* Handle the frame pointer as a special case. */
7392 if (! TARGET_APCS_FRAME
7393 && ! frame_pointer_needed
7394 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7395 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7396 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7398 /* If we aren't loading the PIC register,
7399 don't stack it even though it may be live. */
7400 if (flag_pic
7401 && ! TARGET_SINGLE_PIC_BASE
7402 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7403 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7406 return save_reg_mask;
7409 /* Compute a bit mask of which registers need to be
7410 saved on the stack for the current function. */
7412 static unsigned long
7413 arm_compute_save_reg_mask ()
7415 unsigned int save_reg_mask = 0;
7416 unsigned long func_type = arm_current_func_type ();
7418 if (IS_NAKED (func_type))
7419 /* This should never really happen. */
7420 return 0;
7422 /* If we are creating a stack frame, then we must save the frame pointer,
7423 IP (which will hold the old stack pointer), LR and the PC. */
7424 if (frame_pointer_needed)
7425 save_reg_mask |=
7426 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7427 | (1 << IP_REGNUM)
7428 | (1 << LR_REGNUM)
7429 | (1 << PC_REGNUM);
7431 /* Volatile functions do not return, so there
7432 is no need to save any other registers. */
7433 if (IS_VOLATILE (func_type))
7434 return save_reg_mask;
7436 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7438 /* Decide if we need to save the link register.
7439 Interrupt routines have their own banked link register,
7440 so they never need to save it.
7441 Otherwise if we do not use the link register we do not need to save
7442 it. If we are pushing other registers onto the stack however, we
7443 can save an instruction in the epilogue by pushing the link register
7444 now and then popping it back into the PC. This incurs extra memory
7445 accesses though, so we only do it when optimising for size, and only
7446 if we know that we will not need a fancy return sequence. */
7447 if (regs_ever_live [LR_REGNUM]
7448 || (save_reg_mask
7449 && optimize_size
7450 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7451 save_reg_mask |= 1 << LR_REGNUM;
7453 if (cfun->machine->lr_save_eliminated)
7454 save_reg_mask &= ~ (1 << LR_REGNUM);
7456 return save_reg_mask;
7459 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7460 everything bar the final return instruction. */
7462 const char *
7463 output_return_instruction (operand, really_return, reverse)
7464 rtx operand;
7465 int really_return;
7466 int reverse;
7468 char conditional[10];
7469 char instr[100];
7470 int reg;
7471 unsigned long live_regs_mask;
7472 unsigned long func_type;
7474 func_type = arm_current_func_type ();
7476 if (IS_NAKED (func_type))
7477 return "";
7479 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7481 /* If this function was declared non-returning, and we have found a tail
7482 call, then we have to trust that the called function won't return. */
7483 if (really_return)
7485 rtx ops[2];
7487 /* Otherwise, trap an attempted return by aborting. */
7488 ops[0] = operand;
7489 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7490 : "abort");
7491 assemble_external_libcall (ops[1]);
7492 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7495 return "";
7498 if (current_function_calls_alloca && !really_return)
7499 abort ();
7501 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7503 return_used_this_function = 1;
7505 live_regs_mask = arm_compute_save_reg_mask ();
7507 if (live_regs_mask)
7509 const char * return_reg;
7511 /* If we do not have any special requirements for function exit
7512 (eg interworking, or ISR) then we can load the return address
7513 directly into the PC. Otherwise we must load it into LR. */
7514 if (really_return
7515 && ! TARGET_INTERWORK)
7516 return_reg = reg_names[PC_REGNUM];
7517 else
7518 return_reg = reg_names[LR_REGNUM];
7520 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7521 /* There are two possible reasons for the IP register being saved.
7522 Either a stack frame was created, in which case IP contains the
7523 old stack pointer, or an ISR routine corrupted it. If this in an
7524 ISR routine then just restore IP, otherwise restore IP into SP. */
7525 if (! IS_INTERRUPT (func_type))
7527 live_regs_mask &= ~ (1 << IP_REGNUM);
7528 live_regs_mask |= (1 << SP_REGNUM);
7531 /* On some ARM architectures it is faster to use LDR rather than
7532 LDM to load a single register. On other architectures, the
7533 cost is the same. In 26 bit mode, or for exception handlers,
7534 we have to use LDM to load the PC so that the CPSR is also
7535 restored. */
7536 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7538 if (live_regs_mask == (unsigned int)(1 << reg))
7539 break;
7541 if (reg <= LAST_ARM_REGNUM
7542 && (reg != LR_REGNUM
7543 || ! really_return
7544 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7546 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7547 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7549 else
7551 char *p;
7552 int first = 1;
7554 /* Generate the load multiple instruction to restore the registers. */
7555 if (frame_pointer_needed)
7556 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7557 else if (live_regs_mask & (1 << SP_REGNUM))
7558 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7559 else
7560 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7562 p = instr + strlen (instr);
7564 for (reg = 0; reg <= SP_REGNUM; reg++)
7565 if (live_regs_mask & (1 << reg))
7567 int l = strlen (reg_names[reg]);
7569 if (first)
7570 first = 0;
7571 else
7573 memcpy (p, ", ", 2);
7574 p += 2;
7577 memcpy (p, "%|", 2);
7578 memcpy (p + 2, reg_names[reg], l);
7579 p += l + 2;
7582 if (live_regs_mask & (1 << LR_REGNUM))
7584 int l = strlen (return_reg);
7586 if (! first)
7588 memcpy (p, ", ", 2);
7589 p += 2;
7592 memcpy (p, "%|", 2);
7593 memcpy (p + 2, return_reg, l);
7594 strcpy (p + 2 + l, ((TARGET_APCS_32
7595 && !IS_INTERRUPT (func_type))
7596 || !really_return)
7597 ? "}" : "}^");
7599 else
7600 strcpy (p, "}");
7603 output_asm_insn (instr, & operand);
7605 /* See if we need to generate an extra instruction to
7606 perform the actual function return. */
7607 if (really_return
7608 && func_type != ARM_FT_INTERWORKED
7609 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7611 /* The return has already been handled
7612 by loading the LR into the PC. */
7613 really_return = 0;
7617 if (really_return)
7619 switch ((int) ARM_FUNC_TYPE (func_type))
7621 case ARM_FT_ISR:
7622 case ARM_FT_FIQ:
7623 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7624 break;
7626 case ARM_FT_INTERWORKED:
7627 sprintf (instr, "bx%s\t%%|lr", conditional);
7628 break;
7630 case ARM_FT_EXCEPTION:
7631 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7632 break;
7634 default:
7635 /* ARMv5 implementations always provide BX, so interworking
7636 is the default unless APCS-26 is in use. */
7637 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7638 sprintf (instr, "bx%s\t%%|lr", conditional);
7639 else
7640 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7641 conditional, TARGET_APCS_32 ? "" : "s");
7642 break;
7645 output_asm_insn (instr, & operand);
7648 return "";
7651 /* Write the function name into the code section, directly preceding
7652 the function prologue.
7654 Code will be output similar to this:
7656 .ascii "arm_poke_function_name", 0
7657 .align
7659 .word 0xff000000 + (t1 - t0)
7660 arm_poke_function_name
7661 mov ip, sp
7662 stmfd sp!, {fp, ip, lr, pc}
7663 sub fp, ip, #4
7665 When performing a stack backtrace, code can inspect the value
7666 of 'pc' stored at 'fp' + 0. If the trace function then looks
7667 at location pc - 12 and the top 8 bits are set, then we know
7668 that there is a function name embedded immediately preceding this
7669 location and has length ((pc[-3]) & 0xff000000).
7671 We assume that pc is declared as a pointer to an unsigned long.
7673 It is of no benefit to output the function name if we are assembling
7674 a leaf function. These function types will not contain a stack
7675 backtrace structure, therefore it is not possible to determine the
7676 function name. */
7678 void
7679 arm_poke_function_name (stream, name)
7680 FILE * stream;
7681 const char * name;
7683 unsigned long alignlength;
7684 unsigned long length;
7685 rtx x;
7687 length = strlen (name) + 1;
7688 alignlength = ROUND_UP_WORD (length);
7690 ASM_OUTPUT_ASCII (stream, name, length);
7691 ASM_OUTPUT_ALIGN (stream, 2);
7692 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7693 assemble_aligned_integer (UNITS_PER_WORD, x);
7696 /* Place some comments into the assembler stream
7697 describing the current function. */
7699 static void
7700 arm_output_function_prologue (f, frame_size)
7701 FILE * f;
7702 HOST_WIDE_INT frame_size;
7704 unsigned long func_type;
7706 if (!TARGET_ARM)
7708 thumb_output_function_prologue (f, frame_size);
7709 return;
7712 /* Sanity check. */
7713 if (arm_ccfsm_state || arm_target_insn)
7714 abort ();
7716 func_type = arm_current_func_type ();
7718 switch ((int) ARM_FUNC_TYPE (func_type))
7720 default:
7721 case ARM_FT_NORMAL:
7722 break;
7723 case ARM_FT_INTERWORKED:
7724 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7725 break;
7726 case ARM_FT_EXCEPTION_HANDLER:
7727 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7728 break;
7729 case ARM_FT_ISR:
7730 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7731 break;
7732 case ARM_FT_FIQ:
7733 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7734 break;
7735 case ARM_FT_EXCEPTION:
7736 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7737 break;
7740 if (IS_NAKED (func_type))
7741 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7743 if (IS_VOLATILE (func_type))
7744 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7746 if (IS_NESTED (func_type))
7747 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7749 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7750 current_function_args_size,
7751 current_function_pretend_args_size, frame_size);
7753 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7754 frame_pointer_needed,
7755 cfun->machine->uses_anonymous_args);
7757 if (cfun->machine->lr_save_eliminated)
7758 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7760 #ifdef AOF_ASSEMBLER
7761 if (flag_pic)
7762 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7763 #endif
7765 return_used_this_function = 0;
7768 const char *
7769 arm_output_epilogue (really_return)
7770 int really_return;
7772 int reg;
7773 unsigned long saved_regs_mask;
7774 unsigned long func_type;
7775 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7776 frame that is $fp + 4 for a non-variadic function. */
7777 int floats_offset = 0;
7778 rtx operands[3];
7779 int frame_size = arm_get_frame_size ();
7780 FILE * f = asm_out_file;
7781 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7783 /* If we have already generated the return instruction
7784 then it is futile to generate anything else. */
7785 if (use_return_insn (FALSE) && return_used_this_function)
7786 return "";
7788 func_type = arm_current_func_type ();
7790 if (IS_NAKED (func_type))
7791 /* Naked functions don't have epilogues. */
7792 return "";
7794 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7796 rtx op;
7798 /* A volatile function should never return. Call abort. */
7799 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7800 assemble_external_libcall (op);
7801 output_asm_insn ("bl\t%a0", &op);
7803 return "";
7806 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7807 && ! really_return)
7808 /* If we are throwing an exception, then we really must
7809 be doing a return, so we can't tail-call. */
7810 abort ();
7812 saved_regs_mask = arm_compute_save_reg_mask ();
7814 /* XXX We should adjust floats_offset for any anonymous args, and then
7815 re-adjust vfp_offset below to compensate. */
7817 /* Compute how far away the floats will be. */
7818 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7819 if (saved_regs_mask & (1 << reg))
7820 floats_offset += 4;
7822 if (frame_pointer_needed)
7824 int vfp_offset = 4;
7826 if (arm_fpu_arch == FP_SOFT2)
7828 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7829 if (regs_ever_live[reg] && !call_used_regs[reg])
7831 floats_offset += 12;
7832 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7833 reg, FP_REGNUM, floats_offset - vfp_offset);
7836 else
7838 int start_reg = LAST_ARM_FP_REGNUM;
7840 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7842 if (regs_ever_live[reg] && !call_used_regs[reg])
7844 floats_offset += 12;
7846 /* We can't unstack more than four registers at once. */
7847 if (start_reg - reg == 3)
7849 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7850 reg, FP_REGNUM, floats_offset - vfp_offset);
7851 start_reg = reg - 1;
7854 else
7856 if (reg != start_reg)
7857 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7858 reg + 1, start_reg - reg,
7859 FP_REGNUM, floats_offset - vfp_offset);
7860 start_reg = reg - 1;
7864 /* Just in case the last register checked also needs unstacking. */
7865 if (reg != start_reg)
7866 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7867 reg + 1, start_reg - reg,
7868 FP_REGNUM, floats_offset - vfp_offset);
7871 /* saved_regs_mask should contain the IP, which at the time of stack
7872 frame generation actually contains the old stack pointer. So a
7873 quick way to unwind the stack is just pop the IP register directly
7874 into the stack pointer. */
7875 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7876 abort ();
7877 saved_regs_mask &= ~ (1 << IP_REGNUM);
7878 saved_regs_mask |= (1 << SP_REGNUM);
7880 /* There are two registers left in saved_regs_mask - LR and PC. We
7881 only need to restore the LR register (the return address), but to
7882 save time we can load it directly into the PC, unless we need a
7883 special function exit sequence, or we are not really returning. */
7884 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7885 /* Delete the LR from the register mask, so that the LR on
7886 the stack is loaded into the PC in the register mask. */
7887 saved_regs_mask &= ~ (1 << LR_REGNUM);
7888 else
7889 saved_regs_mask &= ~ (1 << PC_REGNUM);
7891 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7893 if (IS_INTERRUPT (func_type))
7894 /* Interrupt handlers will have pushed the
7895 IP onto the stack, so restore it now. */
7896 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7898 else
7900 /* Restore stack pointer if necessary. */
7901 if (frame_size + current_function_outgoing_args_size != 0)
7903 operands[0] = operands[1] = stack_pointer_rtx;
7904 operands[2] = GEN_INT (frame_size
7905 + current_function_outgoing_args_size);
7906 output_add_immediate (operands);
7909 if (arm_fpu_arch == FP_SOFT2)
7911 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7912 if (regs_ever_live[reg] && !call_used_regs[reg])
7913 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7914 reg, SP_REGNUM);
7916 else
7918 int start_reg = FIRST_ARM_FP_REGNUM;
7920 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7922 if (regs_ever_live[reg] && !call_used_regs[reg])
7924 if (reg - start_reg == 3)
7926 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7927 start_reg, SP_REGNUM);
7928 start_reg = reg + 1;
7931 else
7933 if (reg != start_reg)
7934 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7935 start_reg, reg - start_reg,
7936 SP_REGNUM);
7938 start_reg = reg + 1;
7942 /* Just in case the last register checked also needs unstacking. */
7943 if (reg != start_reg)
7944 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7945 start_reg, reg - start_reg, SP_REGNUM);
7948 /* If we can, restore the LR into the PC. */
7949 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7950 && really_return
7951 && current_function_pretend_args_size == 0
7952 && saved_regs_mask & (1 << LR_REGNUM))
7954 saved_regs_mask &= ~ (1 << LR_REGNUM);
7955 saved_regs_mask |= (1 << PC_REGNUM);
7958 /* Load the registers off the stack. If we only have one register
7959 to load use the LDR instruction - it is faster. */
7960 if (saved_regs_mask == (1 << LR_REGNUM))
7962 /* The exception handler ignores the LR, so we do
7963 not really need to load it off the stack. */
7964 if (eh_ofs)
7965 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7966 else
7967 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7969 else if (saved_regs_mask)
7971 if (saved_regs_mask & (1 << SP_REGNUM))
7972 /* Note - write back to the stack register is not enabled
7973 (ie "ldmfd sp!..."). We know that the stack pointer is
7974 in the list of registers and if we add writeback the
7975 instruction becomes UNPREDICTABLE. */
7976 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
7977 else
7978 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7981 if (current_function_pretend_args_size)
7983 /* Unwind the pre-pushed regs. */
7984 operands[0] = operands[1] = stack_pointer_rtx;
7985 operands[2] = GEN_INT (current_function_pretend_args_size);
7986 output_add_immediate (operands);
7990 #if 0
7991 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7992 /* Adjust the stack to remove the exception handler stuff. */
7993 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7994 REGNO (eh_ofs));
7995 #endif
7997 if (! really_return
7998 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7999 && current_function_pretend_args_size == 0
8000 && saved_regs_mask & (1 << PC_REGNUM)))
8001 return "";
8003 /* Generate the return instruction. */
8004 switch ((int) ARM_FUNC_TYPE (func_type))
8006 case ARM_FT_EXCEPTION_HANDLER:
8007 /* Even in 26-bit mode we do a mov (rather than a movs)
8008 because we don't have the PSR bits set in the address. */
8009 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8010 break;
8012 case ARM_FT_ISR:
8013 case ARM_FT_FIQ:
8014 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8015 break;
8017 case ARM_FT_EXCEPTION:
8018 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8019 break;
8021 case ARM_FT_INTERWORKED:
8022 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8023 break;
8025 default:
8026 if (frame_pointer_needed)
8027 /* If we used the frame pointer then the return adddress
8028 will have been loaded off the stack directly into the
8029 PC, so there is no need to issue a MOV instruction
8030 here. */
8032 else if (current_function_pretend_args_size == 0
8033 && (saved_regs_mask & (1 << LR_REGNUM)))
8034 /* Similarly we may have been able to load LR into the PC
8035 even if we did not create a stack frame. */
8037 else if (TARGET_APCS_32)
8038 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8039 else
8040 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8041 break;
8044 return "";
8047 static void
8048 arm_output_function_epilogue (file, frame_size)
8049 FILE *file ATTRIBUTE_UNUSED;
8050 HOST_WIDE_INT frame_size;
8052 if (TARGET_THUMB)
8054 /* ??? Probably not safe to set this here, since it assumes that a
8055 function will be emitted as assembly immediately after we generate
8056 RTL for it. This does not happen for inline functions. */
8057 return_used_this_function = 0;
8059 else
8061 /* We need to take into account any stack-frame rounding. */
8062 frame_size = arm_get_frame_size ();
8064 if (use_return_insn (FALSE)
8065 && return_used_this_function
8066 && (frame_size + current_function_outgoing_args_size) != 0
8067 && !frame_pointer_needed)
8068 abort ();
8070 /* Reset the ARM-specific per-function variables. */
8071 after_arm_reorg = 0;
8075 /* Generate and emit an insn that we will recognize as a push_multi.
8076 Unfortunately, since this insn does not reflect very well the actual
8077 semantics of the operation, we need to annotate the insn for the benefit
8078 of DWARF2 frame unwind information. */
8080 static rtx
8081 emit_multi_reg_push (mask)
8082 int mask;
8084 int num_regs = 0;
8085 int num_dwarf_regs;
8086 int i, j;
8087 rtx par;
8088 rtx dwarf;
8089 int dwarf_par_index;
8090 rtx tmp, reg;
8092 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8093 if (mask & (1 << i))
8094 num_regs++;
8096 if (num_regs == 0 || num_regs > 16)
8097 abort ();
8099 /* We don't record the PC in the dwarf frame information. */
8100 num_dwarf_regs = num_regs;
8101 if (mask & (1 << PC_REGNUM))
8102 num_dwarf_regs--;
8104 /* For the body of the insn we are going to generate an UNSPEC in
8105 parallel with several USEs. This allows the insn to be recognized
8106 by the push_multi pattern in the arm.md file. The insn looks
8107 something like this:
8109 (parallel [
8110 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8111 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8112 (use (reg:SI 11 fp))
8113 (use (reg:SI 12 ip))
8114 (use (reg:SI 14 lr))
8115 (use (reg:SI 15 pc))
8118 For the frame note however, we try to be more explicit and actually
8119 show each register being stored into the stack frame, plus a (single)
8120 decrement of the stack pointer. We do it this way in order to be
8121 friendly to the stack unwinding code, which only wants to see a single
8122 stack decrement per instruction. The RTL we generate for the note looks
8123 something like this:
8125 (sequence [
8126 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8127 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8128 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8129 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8130 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8133 This sequence is used both by the code to support stack unwinding for
8134 exceptions handlers and the code to generate dwarf2 frame debugging. */
8136 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8137 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8138 dwarf_par_index = 1;
8140 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8142 if (mask & (1 << i))
8144 reg = gen_rtx_REG (SImode, i);
8146 XVECEXP (par, 0, 0)
8147 = gen_rtx_SET (VOIDmode,
8148 gen_rtx_MEM (BLKmode,
8149 gen_rtx_PRE_DEC (BLKmode,
8150 stack_pointer_rtx)),
8151 gen_rtx_UNSPEC (BLKmode,
8152 gen_rtvec (1, reg),
8153 UNSPEC_PUSH_MULT));
8155 if (i != PC_REGNUM)
8157 tmp = gen_rtx_SET (VOIDmode,
8158 gen_rtx_MEM (SImode, stack_pointer_rtx),
8159 reg);
8160 RTX_FRAME_RELATED_P (tmp) = 1;
8161 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8162 dwarf_par_index++;
8165 break;
8169 for (j = 1, i++; j < num_regs; i++)
8171 if (mask & (1 << i))
8173 reg = gen_rtx_REG (SImode, i);
8175 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8177 if (i != PC_REGNUM)
8179 tmp = gen_rtx_SET (VOIDmode,
8180 gen_rtx_MEM (SImode,
8181 plus_constant (stack_pointer_rtx,
8182 4 * j)),
8183 reg);
8184 RTX_FRAME_RELATED_P (tmp) = 1;
8185 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8188 j++;
8192 par = emit_insn (par);
8194 tmp = gen_rtx_SET (SImode,
8195 stack_pointer_rtx,
8196 gen_rtx_PLUS (SImode,
8197 stack_pointer_rtx,
8198 GEN_INT (-4 * num_regs)));
8199 RTX_FRAME_RELATED_P (tmp) = 1;
8200 XVECEXP (dwarf, 0, 0) = tmp;
8202 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8203 REG_NOTES (par));
8204 return par;
8207 static rtx
8208 emit_sfm (base_reg, count)
8209 int base_reg;
8210 int count;
8212 rtx par;
8213 rtx dwarf;
8214 rtx tmp, reg;
8215 int i;
8217 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8218 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8220 reg = gen_rtx_REG (XFmode, base_reg++);
8222 XVECEXP (par, 0, 0)
8223 = gen_rtx_SET (VOIDmode,
8224 gen_rtx_MEM (BLKmode,
8225 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8226 gen_rtx_UNSPEC (BLKmode,
8227 gen_rtvec (1, reg),
8228 UNSPEC_PUSH_MULT));
8230 = gen_rtx_SET (VOIDmode,
8231 gen_rtx_MEM (XFmode,
8232 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8233 reg);
8234 RTX_FRAME_RELATED_P (tmp) = 1;
8235 XVECEXP (dwarf, 0, count - 1) = tmp;
8237 for (i = 1; i < count; i++)
8239 reg = gen_rtx_REG (XFmode, base_reg++);
8240 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8242 tmp = gen_rtx_SET (VOIDmode,
8243 gen_rtx_MEM (XFmode,
8244 gen_rtx_PRE_DEC (BLKmode,
8245 stack_pointer_rtx)),
8246 reg);
8247 RTX_FRAME_RELATED_P (tmp) = 1;
8248 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8251 par = emit_insn (par);
8252 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8253 REG_NOTES (par));
8254 return par;
8257 /* Compute the distance from register FROM to register TO.
8258 These can be the arg pointer (26), the soft frame pointer (25),
8259 the stack pointer (13) or the hard frame pointer (11).
8260 Typical stack layout looks like this:
8262 old stack pointer -> | |
8263 ----
8264 | | \
8265 | | saved arguments for
8266 | | vararg functions
8267 | | /
8269 hard FP & arg pointer -> | | \
8270 | | stack
8271 | | frame
8272 | | /
8274 | | \
8275 | | call saved
8276 | | registers
8277 soft frame pointer -> | | /
8279 | | \
8280 | | local
8281 | | variables
8282 | | /
8284 | | \
8285 | | outgoing
8286 | | arguments
8287 current stack pointer -> | | /
8290 For a given function some or all of these stack components
8291 may not be needed, giving rise to the possibility of
8292 eliminating some of the registers.
8294 The values returned by this function must reflect the behavior
8295 of arm_expand_prologue() and arm_compute_save_reg_mask().
8297 The sign of the number returned reflects the direction of stack
8298 growth, so the values are positive for all eliminations except
8299 from the soft frame pointer to the hard frame pointer. */
8301 unsigned int
8302 arm_compute_initial_elimination_offset (from, to)
8303 unsigned int from;
8304 unsigned int to;
8306 unsigned int local_vars = arm_get_frame_size ();
8307 unsigned int outgoing_args = current_function_outgoing_args_size;
8308 unsigned int stack_frame;
8309 unsigned int call_saved_registers;
8310 unsigned long func_type;
8312 func_type = arm_current_func_type ();
8314 /* Volatile functions never return, so there is
8315 no need to save call saved registers. */
8316 call_saved_registers = 0;
8317 if (! IS_VOLATILE (func_type))
8319 unsigned int reg_mask;
8320 unsigned int reg;
8322 /* Make sure that we compute which registers will be saved
8323 on the stack using the same algorithm that is used by
8324 arm_compute_save_reg_mask(). */
8325 reg_mask = arm_compute_save_reg0_reg12_mask ();
8327 /* Now count the number of bits set in save_reg_mask.
8328 For each set bit we need 4 bytes of stack space. */
8329 while (reg_mask)
8331 call_saved_registers += 4;
8332 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8335 if (regs_ever_live[LR_REGNUM]
8336 /* If a stack frame is going to be created, the LR will
8337 be saved as part of that, so we do not need to allow
8338 for it here. */
8339 && ! frame_pointer_needed)
8340 call_saved_registers += 4;
8342 /* If the hard floating point registers are going to be
8343 used then they must be saved on the stack as well.
8344 Each register occupies 12 bytes of stack space. */
8345 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8346 if (regs_ever_live[reg] && ! call_used_regs[reg])
8347 call_saved_registers += 12;
8350 /* The stack frame contains 4 registers - the old frame pointer,
8351 the old stack pointer, the return address and PC of the start
8352 of the function. */
8353 stack_frame = frame_pointer_needed ? 16 : 0;
8355 /* OK, now we have enough information to compute the distances.
8356 There must be an entry in these switch tables for each pair
8357 of registers in ELIMINABLE_REGS, even if some of the entries
8358 seem to be redundant or useless. */
8359 switch (from)
8361 case ARG_POINTER_REGNUM:
8362 switch (to)
8364 case THUMB_HARD_FRAME_POINTER_REGNUM:
8365 return 0;
8367 case FRAME_POINTER_REGNUM:
8368 /* This is the reverse of the soft frame pointer
8369 to hard frame pointer elimination below. */
8370 if (call_saved_registers == 0 && stack_frame == 0)
8371 return 0;
8372 return (call_saved_registers + stack_frame - 4);
8374 case ARM_HARD_FRAME_POINTER_REGNUM:
8375 /* If there is no stack frame then the hard
8376 frame pointer and the arg pointer coincide. */
8377 if (stack_frame == 0 && call_saved_registers != 0)
8378 return 0;
8379 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8380 return (frame_pointer_needed
8381 && current_function_needs_context
8382 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8384 case STACK_POINTER_REGNUM:
8385 /* If nothing has been pushed on the stack at all
8386 then this will return -4. This *is* correct! */
8387 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8389 default:
8390 abort ();
8392 break;
8394 case FRAME_POINTER_REGNUM:
8395 switch (to)
8397 case THUMB_HARD_FRAME_POINTER_REGNUM:
8398 return 0;
8400 case ARM_HARD_FRAME_POINTER_REGNUM:
8401 /* The hard frame pointer points to the top entry in the
8402 stack frame. The soft frame pointer to the bottom entry
8403 in the stack frame. If there is no stack frame at all,
8404 then they are identical. */
8405 if (call_saved_registers == 0 && stack_frame == 0)
8406 return 0;
8407 return - (call_saved_registers + stack_frame - 4);
8409 case STACK_POINTER_REGNUM:
8410 return local_vars + outgoing_args;
8412 default:
8413 abort ();
8415 break;
8417 default:
8418 /* You cannot eliminate from the stack pointer.
8419 In theory you could eliminate from the hard frame
8420 pointer to the stack pointer, but this will never
8421 happen, since if a stack frame is not needed the
8422 hard frame pointer will never be used. */
8423 abort ();
8427 /* Calculate the size of the stack frame, taking into account any
8428 padding that is required to ensure stack-alignment. */
8430 HOST_WIDE_INT
8431 arm_get_frame_size ()
8433 int regno;
8435 int base_size = ROUND_UP_WORD (get_frame_size ());
8436 int entry_size = 0;
8437 unsigned long func_type = arm_current_func_type ();
8438 int leaf;
8440 if (! TARGET_ARM)
8441 abort();
8443 if (! TARGET_ATPCS)
8444 return base_size;
8446 /* We need to know if we are a leaf function. Unfortunately, it
8447 is possible to be called after start_sequence has been called,
8448 which causes get_insns to return the insns for the sequence,
8449 not the function, which will cause leaf_function_p to return
8450 the incorrect result.
8452 To work around this, we cache the computed frame size. This
8453 works because we will only be calling RTL expanders that need
8454 to know about leaf functions once reload has completed, and the
8455 frame size cannot be changed after that time, so we can safely
8456 use the cached value. */
8458 if (reload_completed)
8459 return cfun->machine->frame_size;
8461 leaf = leaf_function_p ();
8463 /* A leaf function does not need any stack alignment if it has nothing
8464 on the stack. */
8465 if (leaf && base_size == 0)
8467 cfun->machine->frame_size = 0;
8468 return 0;
8471 /* We know that SP will be word aligned on entry, and we must
8472 preserve that condition at any subroutine call. But those are
8473 the only constraints. */
8475 /* Space for variadic functions. */
8476 if (current_function_pretend_args_size)
8477 entry_size += current_function_pretend_args_size;
8479 /* Space for saved registers. */
8480 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8482 /* Space for saved FPA registers. */
8483 if (! IS_VOLATILE (func_type))
8485 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8486 if (regs_ever_live[regno] && ! call_used_regs[regno])
8487 entry_size += 12;
8490 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8491 base_size += 4;
8492 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8493 abort ();
8495 cfun->machine->frame_size = base_size;
8497 return base_size;
8500 /* Generate the prologue instructions for entry into an ARM function. */
8502 void
8503 arm_expand_prologue ()
8505 int reg;
8506 rtx amount;
8507 rtx insn;
8508 rtx ip_rtx;
8509 unsigned long live_regs_mask;
8510 unsigned long func_type;
8511 int fp_offset = 0;
8512 int saved_pretend_args = 0;
8513 unsigned int args_to_push;
8515 func_type = arm_current_func_type ();
8517 /* Naked functions don't have prologues. */
8518 if (IS_NAKED (func_type))
8519 return;
8521 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8522 args_to_push = current_function_pretend_args_size;
8524 /* Compute which register we will have to save onto the stack. */
8525 live_regs_mask = arm_compute_save_reg_mask ();
8527 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8529 if (frame_pointer_needed)
8531 if (IS_INTERRUPT (func_type))
8533 /* Interrupt functions must not corrupt any registers.
8534 Creating a frame pointer however, corrupts the IP
8535 register, so we must push it first. */
8536 insn = emit_multi_reg_push (1 << IP_REGNUM);
8538 /* Do not set RTX_FRAME_RELATED_P on this insn.
8539 The dwarf stack unwinding code only wants to see one
8540 stack decrement per function, and this is not it. If
8541 this instruction is labeled as being part of the frame
8542 creation sequence then dwarf2out_frame_debug_expr will
8543 abort when it encounters the assignment of IP to FP
8544 later on, since the use of SP here establishes SP as
8545 the CFA register and not IP.
8547 Anyway this instruction is not really part of the stack
8548 frame creation although it is part of the prologue. */
8550 else if (IS_NESTED (func_type))
8552 /* The Static chain register is the same as the IP register
8553 used as a scratch register during stack frame creation.
8554 To get around this need to find somewhere to store IP
8555 whilst the frame is being created. We try the following
8556 places in order:
8558 1. The last argument register.
8559 2. A slot on the stack above the frame. (This only
8560 works if the function is not a varargs function).
8561 3. Register r3, after pushing the argument registers
8562 onto the stack.
8564 Note - we only need to tell the dwarf2 backend about the SP
8565 adjustment in the second variant; the static chain register
8566 doesn't need to be unwound, as it doesn't contain a value
8567 inherited from the caller. */
8569 if (regs_ever_live[3] == 0)
8571 insn = gen_rtx_REG (SImode, 3);
8572 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8573 insn = emit_insn (insn);
8575 else if (args_to_push == 0)
8577 rtx dwarf;
8578 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8579 insn = gen_rtx_MEM (SImode, insn);
8580 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8581 insn = emit_insn (insn);
8583 fp_offset = 4;
8585 /* Just tell the dwarf backend that we adjusted SP. */
8586 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8587 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8588 GEN_INT (-fp_offset)));
8589 RTX_FRAME_RELATED_P (insn) = 1;
8590 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8591 dwarf, REG_NOTES (insn));
8593 else
8595 /* Store the args on the stack. */
8596 if (cfun->machine->uses_anonymous_args)
8597 insn = emit_multi_reg_push
8598 ((0xf0 >> (args_to_push / 4)) & 0xf);
8599 else
8600 insn = emit_insn
8601 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8602 GEN_INT (- args_to_push)));
8604 RTX_FRAME_RELATED_P (insn) = 1;
8606 saved_pretend_args = 1;
8607 fp_offset = args_to_push;
8608 args_to_push = 0;
8610 /* Now reuse r3 to preserve IP. */
8611 insn = gen_rtx_REG (SImode, 3);
8612 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8613 (void) emit_insn (insn);
8617 if (fp_offset)
8619 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8620 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8622 else
8623 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8625 insn = emit_insn (insn);
8626 RTX_FRAME_RELATED_P (insn) = 1;
8629 if (args_to_push)
8631 /* Push the argument registers, or reserve space for them. */
8632 if (cfun->machine->uses_anonymous_args)
8633 insn = emit_multi_reg_push
8634 ((0xf0 >> (args_to_push / 4)) & 0xf);
8635 else
8636 insn = emit_insn
8637 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8638 GEN_INT (- args_to_push)));
8639 RTX_FRAME_RELATED_P (insn) = 1;
8642 /* If this is an interrupt service routine, and the link register is
8643 going to be pushed, subtracting four now will mean that the
8644 function return can be done with a single instruction. */
8645 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8646 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8648 emit_insn (gen_rtx_SET (SImode,
8649 gen_rtx_REG (SImode, LR_REGNUM),
8650 gen_rtx_PLUS (SImode,
8651 gen_rtx_REG (SImode, LR_REGNUM),
8652 GEN_INT (-4))));
8655 if (live_regs_mask)
8657 insn = emit_multi_reg_push (live_regs_mask);
8658 RTX_FRAME_RELATED_P (insn) = 1;
8661 if (! IS_VOLATILE (func_type))
8663 /* Save any floating point call-saved registers used by this function. */
8664 if (arm_fpu_arch == FP_SOFT2)
8666 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8667 if (regs_ever_live[reg] && !call_used_regs[reg])
8669 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8670 insn = gen_rtx_MEM (XFmode, insn);
8671 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8672 gen_rtx_REG (XFmode, reg)));
8673 RTX_FRAME_RELATED_P (insn) = 1;
8676 else
8678 int start_reg = LAST_ARM_FP_REGNUM;
8680 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8682 if (regs_ever_live[reg] && !call_used_regs[reg])
8684 if (start_reg - reg == 3)
8686 insn = emit_sfm (reg, 4);
8687 RTX_FRAME_RELATED_P (insn) = 1;
8688 start_reg = reg - 1;
8691 else
8693 if (start_reg != reg)
8695 insn = emit_sfm (reg + 1, start_reg - reg);
8696 RTX_FRAME_RELATED_P (insn) = 1;
8698 start_reg = reg - 1;
8702 if (start_reg != reg)
8704 insn = emit_sfm (reg + 1, start_reg - reg);
8705 RTX_FRAME_RELATED_P (insn) = 1;
8710 if (frame_pointer_needed)
8712 /* Create the new frame pointer. */
8713 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8714 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8715 RTX_FRAME_RELATED_P (insn) = 1;
8717 if (IS_NESTED (func_type))
8719 /* Recover the static chain register. */
8720 if (regs_ever_live [3] == 0
8721 || saved_pretend_args)
8722 insn = gen_rtx_REG (SImode, 3);
8723 else /* if (current_function_pretend_args_size == 0) */
8725 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8726 insn = gen_rtx_MEM (SImode, insn);
8729 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8730 /* Add a USE to stop propagate_one_insn() from barfing. */
8731 emit_insn (gen_prologue_use (ip_rtx));
8735 amount = GEN_INT (-(arm_get_frame_size ()
8736 + current_function_outgoing_args_size));
8738 if (amount != const0_rtx)
8740 /* This add can produce multiple insns for a large constant, so we
8741 need to get tricky. */
8742 rtx last = get_last_insn ();
8743 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8744 amount));
8747 last = last ? NEXT_INSN (last) : get_insns ();
8748 RTX_FRAME_RELATED_P (last) = 1;
8750 while (last != insn);
8752 /* If the frame pointer is needed, emit a special barrier that
8753 will prevent the scheduler from moving stores to the frame
8754 before the stack adjustment. */
8755 if (frame_pointer_needed)
8756 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
8757 hard_frame_pointer_rtx));
8760 /* If we are profiling, make sure no instructions are scheduled before
8761 the call to mcount. Similarly if the user has requested no
8762 scheduling in the prolog. */
8763 if (current_function_profile || TARGET_NO_SCHED_PRO)
8764 emit_insn (gen_blockage ());
8766 /* If the link register is being kept alive, with the return address in it,
8767 then make sure that it does not get reused by the ce2 pass. */
8768 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8770 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8771 cfun->machine->lr_save_eliminated = 1;
8775 /* If CODE is 'd', then the X is a condition operand and the instruction
8776 should only be executed if the condition is true.
8777 if CODE is 'D', then the X is a condition operand and the instruction
8778 should only be executed if the condition is false: however, if the mode
8779 of the comparison is CCFPEmode, then always execute the instruction -- we
8780 do this because in these circumstances !GE does not necessarily imply LT;
8781 in these cases the instruction pattern will take care to make sure that
8782 an instruction containing %d will follow, thereby undoing the effects of
8783 doing this instruction unconditionally.
8784 If CODE is 'N' then X is a floating point operand that must be negated
8785 before output.
8786 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8787 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8789 void
8790 arm_print_operand (stream, x, code)
8791 FILE * stream;
8792 rtx x;
8793 int code;
8795 switch (code)
8797 case '@':
8798 fputs (ASM_COMMENT_START, stream);
8799 return;
8801 case '_':
8802 fputs (user_label_prefix, stream);
8803 return;
8805 case '|':
8806 fputs (REGISTER_PREFIX, stream);
8807 return;
8809 case '?':
8810 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8812 if (TARGET_THUMB || current_insn_predicate != NULL)
8813 abort ();
8815 fputs (arm_condition_codes[arm_current_cc], stream);
8817 else if (current_insn_predicate)
8819 enum arm_cond_code code;
8821 if (TARGET_THUMB)
8822 abort ();
8824 code = get_arm_condition_code (current_insn_predicate);
8825 fputs (arm_condition_codes[code], stream);
8827 return;
8829 case 'N':
8831 REAL_VALUE_TYPE r;
8832 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8833 r = REAL_VALUE_NEGATE (r);
8834 fprintf (stream, "%s", fp_const_from_val (&r));
8836 return;
8838 case 'B':
8839 if (GET_CODE (x) == CONST_INT)
8841 HOST_WIDE_INT val;
8842 val = ARM_SIGN_EXTEND (~INTVAL (x));
8843 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8845 else
8847 putc ('~', stream);
8848 output_addr_const (stream, x);
8850 return;
8852 case 'i':
8853 fprintf (stream, "%s", arithmetic_instr (x, 1));
8854 return;
8856 case 'I':
8857 fprintf (stream, "%s", arithmetic_instr (x, 0));
8858 return;
8860 case 'S':
8862 HOST_WIDE_INT val;
8863 const char * shift = shift_op (x, &val);
8865 if (shift)
8867 fprintf (stream, ", %s ", shift_op (x, &val));
8868 if (val == -1)
8869 arm_print_operand (stream, XEXP (x, 1), 0);
8870 else
8872 fputc ('#', stream);
8873 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8877 return;
8879 /* An explanation of the 'Q', 'R' and 'H' register operands:
8881 In a pair of registers containing a DI or DF value the 'Q'
8882 operand returns the register number of the register containing
8883 the least signficant part of the value. The 'R' operand returns
8884 the register number of the register containing the most
8885 significant part of the value.
8887 The 'H' operand returns the higher of the two register numbers.
8888 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8889 same as the 'Q' operand, since the most signficant part of the
8890 value is held in the lower number register. The reverse is true
8891 on systems where WORDS_BIG_ENDIAN is false.
8893 The purpose of these operands is to distinguish between cases
8894 where the endian-ness of the values is important (for example
8895 when they are added together), and cases where the endian-ness
8896 is irrelevant, but the order of register operations is important.
8897 For example when loading a value from memory into a register
8898 pair, the endian-ness does not matter. Provided that the value
8899 from the lower memory address is put into the lower numbered
8900 register, and the value from the higher address is put into the
8901 higher numbered register, the load will work regardless of whether
8902 the value being loaded is big-wordian or little-wordian. The
8903 order of the two register loads can matter however, if the address
8904 of the memory location is actually held in one of the registers
8905 being overwritten by the load. */
8906 case 'Q':
8907 if (REGNO (x) > LAST_ARM_REGNUM)
8908 abort ();
8909 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8910 return;
8912 case 'R':
8913 if (REGNO (x) > LAST_ARM_REGNUM)
8914 abort ();
8915 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8916 return;
8918 case 'H':
8919 if (REGNO (x) > LAST_ARM_REGNUM)
8920 abort ();
8921 asm_fprintf (stream, "%r", REGNO (x) + 1);
8922 return;
8924 case 'm':
8925 asm_fprintf (stream, "%r",
8926 GET_CODE (XEXP (x, 0)) == REG
8927 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8928 return;
8930 case 'M':
8931 asm_fprintf (stream, "{%r-%r}",
8932 REGNO (x),
8933 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
8934 return;
8936 case 'd':
8937 /* CONST_TRUE_RTX means always -- that's the default. */
8938 if (x == const_true_rtx)
8939 return;
8941 if (TARGET_ARM)
8942 fputs (arm_condition_codes[get_arm_condition_code (x)],
8943 stream);
8944 else
8945 fputs (thumb_condition_code (x, 0), stream);
8946 return;
8948 case 'D':
8949 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8950 want to do that. */
8951 if (x == const_true_rtx)
8952 abort ();
8954 if (TARGET_ARM)
8955 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8956 (get_arm_condition_code (x))],
8957 stream);
8958 else
8959 fputs (thumb_condition_code (x, 1), stream);
8960 return;
8962 default:
8963 if (x == 0)
8964 abort ();
8966 if (GET_CODE (x) == REG)
8967 asm_fprintf (stream, "%r", REGNO (x));
8968 else if (GET_CODE (x) == MEM)
8970 output_memory_reference_mode = GET_MODE (x);
8971 output_address (XEXP (x, 0));
8973 else if (GET_CODE (x) == CONST_DOUBLE)
8974 fprintf (stream, "#%s", fp_immediate_constant (x));
8975 else if (GET_CODE (x) == NEG)
8976 abort (); /* This should never happen now. */
8977 else
8979 fputc ('#', stream);
8980 output_addr_const (stream, x);
8985 #ifndef AOF_ASSEMBLER
8986 /* Target hook for assembling integer objects. The ARM version needs to
8987 handle word-sized values specially. */
8989 static bool
8990 arm_assemble_integer (x, size, aligned_p)
8991 rtx x;
8992 unsigned int size;
8993 int aligned_p;
8995 if (size == UNITS_PER_WORD && aligned_p)
8997 fputs ("\t.word\t", asm_out_file);
8998 output_addr_const (asm_out_file, x);
9000 /* Mark symbols as position independent. We only do this in the
9001 .text segment, not in the .data segment. */
9002 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9003 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9005 if (GET_CODE (x) == SYMBOL_REF
9006 && (CONSTANT_POOL_ADDRESS_P (x)
9007 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9008 fputs ("(GOTOFF)", asm_out_file);
9009 else if (GET_CODE (x) == LABEL_REF)
9010 fputs ("(GOTOFF)", asm_out_file);
9011 else
9012 fputs ("(GOT)", asm_out_file);
9014 fputc ('\n', asm_out_file);
9015 return true;
9018 return default_assemble_integer (x, size, aligned_p);
9020 #endif
9022 /* A finite state machine takes care of noticing whether or not instructions
9023 can be conditionally executed, and thus decrease execution time and code
9024 size by deleting branch instructions. The fsm is controlled by
9025 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9027 /* The state of the fsm controlling condition codes are:
9028 0: normal, do nothing special
9029 1: make ASM_OUTPUT_OPCODE not output this instruction
9030 2: make ASM_OUTPUT_OPCODE not output this instruction
9031 3: make instructions conditional
9032 4: make instructions conditional
9034 State transitions (state->state by whom under condition):
9035 0 -> 1 final_prescan_insn if the `target' is a label
9036 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9037 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9038 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9039 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9040 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9041 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9042 (the target insn is arm_target_insn).
9044 If the jump clobbers the conditions then we use states 2 and 4.
9046 A similar thing can be done with conditional return insns.
9048 XXX In case the `target' is an unconditional branch, this conditionalising
9049 of the instructions always reduces code size, but not always execution
9050 time. But then, I want to reduce the code size to somewhere near what
9051 /bin/cc produces. */
9053 /* Returns the index of the ARM condition code string in
9054 `arm_condition_codes'. COMPARISON should be an rtx like
9055 `(eq (...) (...))'. */
9057 static enum arm_cond_code
9058 get_arm_condition_code (comparison)
9059 rtx comparison;
9061 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9062 int code;
9063 enum rtx_code comp_code = GET_CODE (comparison);
9065 if (GET_MODE_CLASS (mode) != MODE_CC)
9066 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9067 XEXP (comparison, 1));
9069 switch (mode)
9071 case CC_DNEmode: code = ARM_NE; goto dominance;
9072 case CC_DEQmode: code = ARM_EQ; goto dominance;
9073 case CC_DGEmode: code = ARM_GE; goto dominance;
9074 case CC_DGTmode: code = ARM_GT; goto dominance;
9075 case CC_DLEmode: code = ARM_LE; goto dominance;
9076 case CC_DLTmode: code = ARM_LT; goto dominance;
9077 case CC_DGEUmode: code = ARM_CS; goto dominance;
9078 case CC_DGTUmode: code = ARM_HI; goto dominance;
9079 case CC_DLEUmode: code = ARM_LS; goto dominance;
9080 case CC_DLTUmode: code = ARM_CC;
9082 dominance:
9083 if (comp_code != EQ && comp_code != NE)
9084 abort ();
9086 if (comp_code == EQ)
9087 return ARM_INVERSE_CONDITION_CODE (code);
9088 return code;
9090 case CC_NOOVmode:
9091 switch (comp_code)
9093 case NE: return ARM_NE;
9094 case EQ: return ARM_EQ;
9095 case GE: return ARM_PL;
9096 case LT: return ARM_MI;
9097 default: abort ();
9100 case CC_Zmode:
9101 switch (comp_code)
9103 case NE: return ARM_NE;
9104 case EQ: return ARM_EQ;
9105 default: abort ();
9108 case CCFPEmode:
9109 case CCFPmode:
9110 /* These encodings assume that AC=1 in the FPA system control
9111 byte. This allows us to handle all cases except UNEQ and
9112 LTGT. */
9113 switch (comp_code)
9115 case GE: return ARM_GE;
9116 case GT: return ARM_GT;
9117 case LE: return ARM_LS;
9118 case LT: return ARM_MI;
9119 case NE: return ARM_NE;
9120 case EQ: return ARM_EQ;
9121 case ORDERED: return ARM_VC;
9122 case UNORDERED: return ARM_VS;
9123 case UNLT: return ARM_LT;
9124 case UNLE: return ARM_LE;
9125 case UNGT: return ARM_HI;
9126 case UNGE: return ARM_PL;
9127 /* UNEQ and LTGT do not have a representation. */
9128 case UNEQ: /* Fall through. */
9129 case LTGT: /* Fall through. */
9130 default: abort ();
9133 case CC_SWPmode:
9134 switch (comp_code)
9136 case NE: return ARM_NE;
9137 case EQ: return ARM_EQ;
9138 case GE: return ARM_LE;
9139 case GT: return ARM_LT;
9140 case LE: return ARM_GE;
9141 case LT: return ARM_GT;
9142 case GEU: return ARM_LS;
9143 case GTU: return ARM_CC;
9144 case LEU: return ARM_CS;
9145 case LTU: return ARM_HI;
9146 default: abort ();
9149 case CC_Cmode:
9150 switch (comp_code)
9152 case LTU: return ARM_CS;
9153 case GEU: return ARM_CC;
9154 default: abort ();
9157 case CCmode:
9158 switch (comp_code)
9160 case NE: return ARM_NE;
9161 case EQ: return ARM_EQ;
9162 case GE: return ARM_GE;
9163 case GT: return ARM_GT;
9164 case LE: return ARM_LE;
9165 case LT: return ARM_LT;
9166 case GEU: return ARM_CS;
9167 case GTU: return ARM_HI;
9168 case LEU: return ARM_LS;
9169 case LTU: return ARM_CC;
9170 default: abort ();
9173 default: abort ();
9176 abort ();
9180 void
9181 arm_final_prescan_insn (insn)
9182 rtx insn;
9184 /* BODY will hold the body of INSN. */
9185 rtx body = PATTERN (insn);
9187 /* This will be 1 if trying to repeat the trick, and things need to be
9188 reversed if it appears to fail. */
9189 int reverse = 0;
9191 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9192 taken are clobbered, even if the rtl suggests otherwise. It also
9193 means that we have to grub around within the jump expression to find
9194 out what the conditions are when the jump isn't taken. */
9195 int jump_clobbers = 0;
9197 /* If we start with a return insn, we only succeed if we find another one. */
9198 int seeking_return = 0;
9200 /* START_INSN will hold the insn from where we start looking. This is the
9201 first insn after the following code_label if REVERSE is true. */
9202 rtx start_insn = insn;
9204 /* If in state 4, check if the target branch is reached, in order to
9205 change back to state 0. */
9206 if (arm_ccfsm_state == 4)
9208 if (insn == arm_target_insn)
9210 arm_target_insn = NULL;
9211 arm_ccfsm_state = 0;
9213 return;
9216 /* If in state 3, it is possible to repeat the trick, if this insn is an
9217 unconditional branch to a label, and immediately following this branch
9218 is the previous target label which is only used once, and the label this
9219 branch jumps to is not too far off. */
9220 if (arm_ccfsm_state == 3)
9222 if (simplejump_p (insn))
9224 start_insn = next_nonnote_insn (start_insn);
9225 if (GET_CODE (start_insn) == BARRIER)
9227 /* XXX Isn't this always a barrier? */
9228 start_insn = next_nonnote_insn (start_insn);
9230 if (GET_CODE (start_insn) == CODE_LABEL
9231 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9232 && LABEL_NUSES (start_insn) == 1)
9233 reverse = TRUE;
9234 else
9235 return;
9237 else if (GET_CODE (body) == RETURN)
9239 start_insn = next_nonnote_insn (start_insn);
9240 if (GET_CODE (start_insn) == BARRIER)
9241 start_insn = next_nonnote_insn (start_insn);
9242 if (GET_CODE (start_insn) == CODE_LABEL
9243 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9244 && LABEL_NUSES (start_insn) == 1)
9246 reverse = TRUE;
9247 seeking_return = 1;
9249 else
9250 return;
9252 else
9253 return;
9256 if (arm_ccfsm_state != 0 && !reverse)
9257 abort ();
9258 if (GET_CODE (insn) != JUMP_INSN)
9259 return;
9261 /* This jump might be paralleled with a clobber of the condition codes
9262 the jump should always come first */
9263 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9264 body = XVECEXP (body, 0, 0);
9266 #if 0
9267 /* If this is a conditional return then we don't want to know */
9268 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9269 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9270 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9271 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9272 return;
9273 #endif
9275 if (reverse
9276 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9277 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9279 int insns_skipped;
9280 int fail = FALSE, succeed = FALSE;
9281 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9282 int then_not_else = TRUE;
9283 rtx this_insn = start_insn, label = 0;
9285 /* If the jump cannot be done with one instruction, we cannot
9286 conditionally execute the instruction in the inverse case. */
9287 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9289 jump_clobbers = 1;
9290 return;
9293 /* Register the insn jumped to. */
9294 if (reverse)
9296 if (!seeking_return)
9297 label = XEXP (SET_SRC (body), 0);
9299 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9300 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9301 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9303 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9304 then_not_else = FALSE;
9306 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9307 seeking_return = 1;
9308 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9310 seeking_return = 1;
9311 then_not_else = FALSE;
9313 else
9314 abort ();
9316 /* See how many insns this branch skips, and what kind of insns. If all
9317 insns are okay, and the label or unconditional branch to the same
9318 label is not too far away, succeed. */
9319 for (insns_skipped = 0;
9320 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9322 rtx scanbody;
9324 this_insn = next_nonnote_insn (this_insn);
9325 if (!this_insn)
9326 break;
9328 switch (GET_CODE (this_insn))
9330 case CODE_LABEL:
9331 /* Succeed if it is the target label, otherwise fail since
9332 control falls in from somewhere else. */
9333 if (this_insn == label)
9335 if (jump_clobbers)
9337 arm_ccfsm_state = 2;
9338 this_insn = next_nonnote_insn (this_insn);
9340 else
9341 arm_ccfsm_state = 1;
9342 succeed = TRUE;
9344 else
9345 fail = TRUE;
9346 break;
9348 case BARRIER:
9349 /* Succeed if the following insn is the target label.
9350 Otherwise fail.
9351 If return insns are used then the last insn in a function
9352 will be a barrier. */
9353 this_insn = next_nonnote_insn (this_insn);
9354 if (this_insn && this_insn == label)
9356 if (jump_clobbers)
9358 arm_ccfsm_state = 2;
9359 this_insn = next_nonnote_insn (this_insn);
9361 else
9362 arm_ccfsm_state = 1;
9363 succeed = TRUE;
9365 else
9366 fail = TRUE;
9367 break;
9369 case CALL_INSN:
9370 /* If using 32-bit addresses the cc is not preserved over
9371 calls. */
9372 if (TARGET_APCS_32)
9374 /* Succeed if the following insn is the target label,
9375 or if the following two insns are a barrier and
9376 the target label. */
9377 this_insn = next_nonnote_insn (this_insn);
9378 if (this_insn && GET_CODE (this_insn) == BARRIER)
9379 this_insn = next_nonnote_insn (this_insn);
9381 if (this_insn && this_insn == label
9382 && insns_skipped < max_insns_skipped)
9384 if (jump_clobbers)
9386 arm_ccfsm_state = 2;
9387 this_insn = next_nonnote_insn (this_insn);
9389 else
9390 arm_ccfsm_state = 1;
9391 succeed = TRUE;
9393 else
9394 fail = TRUE;
9396 break;
9398 case JUMP_INSN:
9399 /* If this is an unconditional branch to the same label, succeed.
9400 If it is to another label, do nothing. If it is conditional,
9401 fail. */
9402 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9404 scanbody = PATTERN (this_insn);
9405 if (GET_CODE (scanbody) == SET
9406 && GET_CODE (SET_DEST (scanbody)) == PC)
9408 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9409 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9411 arm_ccfsm_state = 2;
9412 succeed = TRUE;
9414 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9415 fail = TRUE;
9417 /* Fail if a conditional return is undesirable (eg on a
9418 StrongARM), but still allow this if optimizing for size. */
9419 else if (GET_CODE (scanbody) == RETURN
9420 && !use_return_insn (TRUE)
9421 && !optimize_size)
9422 fail = TRUE;
9423 else if (GET_CODE (scanbody) == RETURN
9424 && seeking_return)
9426 arm_ccfsm_state = 2;
9427 succeed = TRUE;
9429 else if (GET_CODE (scanbody) == PARALLEL)
9431 switch (get_attr_conds (this_insn))
9433 case CONDS_NOCOND:
9434 break;
9435 default:
9436 fail = TRUE;
9437 break;
9440 else
9441 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9443 break;
9445 case INSN:
9446 /* Instructions using or affecting the condition codes make it
9447 fail. */
9448 scanbody = PATTERN (this_insn);
9449 if (!(GET_CODE (scanbody) == SET
9450 || GET_CODE (scanbody) == PARALLEL)
9451 || get_attr_conds (this_insn) != CONDS_NOCOND)
9452 fail = TRUE;
9453 break;
9455 default:
9456 break;
9459 if (succeed)
9461 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9462 arm_target_label = CODE_LABEL_NUMBER (label);
9463 else if (seeking_return || arm_ccfsm_state == 2)
9465 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9467 this_insn = next_nonnote_insn (this_insn);
9468 if (this_insn && (GET_CODE (this_insn) == BARRIER
9469 || GET_CODE (this_insn) == CODE_LABEL))
9470 abort ();
9472 if (!this_insn)
9474 /* Oh, dear! we ran off the end.. give up */
9475 recog (PATTERN (insn), insn, NULL);
9476 arm_ccfsm_state = 0;
9477 arm_target_insn = NULL;
9478 return;
9480 arm_target_insn = this_insn;
9482 else
9483 abort ();
9484 if (jump_clobbers)
9486 if (reverse)
9487 abort ();
9488 arm_current_cc =
9489 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9490 0), 0), 1));
9491 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9492 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9493 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9494 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9496 else
9498 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9499 what it was. */
9500 if (!reverse)
9501 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9502 0));
9505 if (reverse || then_not_else)
9506 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9509 /* Restore recog_data (getting the attributes of other insns can
9510 destroy this array, but final.c assumes that it remains intact
9511 across this call; since the insn has been recognized already we
9512 call recog direct). */
9513 recog (PATTERN (insn), insn, NULL);
9517 /* Returns true if REGNO is a valid register
9518 for holding a quantity of tyoe MODE. */
9521 arm_hard_regno_mode_ok (regno, mode)
9522 unsigned int regno;
9523 enum machine_mode mode;
9525 if (GET_MODE_CLASS (mode) == MODE_CC)
9526 return regno == CC_REGNUM;
9528 if (TARGET_THUMB)
9529 /* For the Thumb we only allow values bigger than SImode in
9530 registers 0 - 6, so that there is always a second low
9531 register available to hold the upper part of the value.
9532 We probably we ought to ensure that the register is the
9533 start of an even numbered register pair. */
9534 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9536 if (regno <= LAST_ARM_REGNUM)
9537 /* We allow any value to be stored in the general regisetrs. */
9538 return 1;
9540 if ( regno == FRAME_POINTER_REGNUM
9541 || regno == ARG_POINTER_REGNUM)
9542 /* We only allow integers in the fake hard registers. */
9543 return GET_MODE_CLASS (mode) == MODE_INT;
9545 /* The only registers left are the FPU registers
9546 which we only allow to hold FP values. */
9547 return GET_MODE_CLASS (mode) == MODE_FLOAT
9548 && regno >= FIRST_ARM_FP_REGNUM
9549 && regno <= LAST_ARM_FP_REGNUM;
9553 arm_regno_class (regno)
9554 int regno;
9556 if (TARGET_THUMB)
9558 if (regno == STACK_POINTER_REGNUM)
9559 return STACK_REG;
9560 if (regno == CC_REGNUM)
9561 return CC_REG;
9562 if (regno < 8)
9563 return LO_REGS;
9564 return HI_REGS;
9567 if ( regno <= LAST_ARM_REGNUM
9568 || regno == FRAME_POINTER_REGNUM
9569 || regno == ARG_POINTER_REGNUM)
9570 return GENERAL_REGS;
9572 if (regno == CC_REGNUM)
9573 return NO_REGS;
9575 return FPU_REGS;
9578 /* Handle a special case when computing the offset
9579 of an argument from the frame pointer. */
9582 arm_debugger_arg_offset (value, addr)
9583 int value;
9584 rtx addr;
9586 rtx insn;
9588 /* We are only interested if dbxout_parms() failed to compute the offset. */
9589 if (value != 0)
9590 return 0;
9592 /* We can only cope with the case where the address is held in a register. */
9593 if (GET_CODE (addr) != REG)
9594 return 0;
9596 /* If we are using the frame pointer to point at the argument, then
9597 an offset of 0 is correct. */
9598 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9599 return 0;
9601 /* If we are using the stack pointer to point at the
9602 argument, then an offset of 0 is correct. */
9603 if ((TARGET_THUMB || !frame_pointer_needed)
9604 && REGNO (addr) == SP_REGNUM)
9605 return 0;
9607 /* Oh dear. The argument is pointed to by a register rather
9608 than being held in a register, or being stored at a known
9609 offset from the frame pointer. Since GDB only understands
9610 those two kinds of argument we must translate the address
9611 held in the register into an offset from the frame pointer.
9612 We do this by searching through the insns for the function
9613 looking to see where this register gets its value. If the
9614 register is initialized from the frame pointer plus an offset
9615 then we are in luck and we can continue, otherwise we give up.
9617 This code is exercised by producing debugging information
9618 for a function with arguments like this:
9620 double func (double a, double b, int c, double d) {return d;}
9622 Without this code the stab for parameter 'd' will be set to
9623 an offset of 0 from the frame pointer, rather than 8. */
9625 /* The if() statement says:
9627 If the insn is a normal instruction
9628 and if the insn is setting the value in a register
9629 and if the register being set is the register holding the address of the argument
9630 and if the address is computing by an addition
9631 that involves adding to a register
9632 which is the frame pointer
9633 a constant integer
9635 then... */
9637 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9639 if ( GET_CODE (insn) == INSN
9640 && GET_CODE (PATTERN (insn)) == SET
9641 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9642 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9643 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9644 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9645 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9648 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9650 break;
9654 if (value == 0)
9656 debug_rtx (addr);
9657 warning ("unable to compute real location of stacked parameter");
9658 value = 8; /* XXX magic hack */
9661 return value;
9664 #define def_builtin(NAME, TYPE, CODE) \
9665 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
9667 void
9668 arm_init_builtins ()
9670 tree endlink = void_list_node;
9671 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9672 tree pchar_type_node = build_pointer_type (char_type_node);
9674 tree int_ftype_int, void_ftype_pchar;
9676 /* void func (char *) */
9677 void_ftype_pchar
9678 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
9680 /* int func (int) */
9681 int_ftype_int
9682 = build_function_type (integer_type_node, int_endlink);
9684 /* Initialize arm V5 builtins. */
9685 if (arm_arch5)
9686 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9689 /* Expand an expression EXP that calls a built-in function,
9690 with result going to TARGET if that's convenient
9691 (and in mode MODE if that's convenient).
9692 SUBTARGET may be used as the target for computing one of EXP's operands.
9693 IGNORE is nonzero if the value is to be ignored. */
9696 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9697 tree exp;
9698 rtx target;
9699 rtx subtarget ATTRIBUTE_UNUSED;
9700 enum machine_mode mode ATTRIBUTE_UNUSED;
9701 int ignore ATTRIBUTE_UNUSED;
9703 enum insn_code icode;
9704 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9705 tree arglist = TREE_OPERAND (exp, 1);
9706 tree arg0;
9707 rtx op0, pat;
9708 enum machine_mode tmode, mode0;
9709 int fcode = DECL_FUNCTION_CODE (fndecl);
9711 switch (fcode)
9713 default:
9714 break;
9716 case ARM_BUILTIN_CLZ:
9717 icode = CODE_FOR_clz;
9718 arg0 = TREE_VALUE (arglist);
9719 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9720 tmode = insn_data[icode].operand[0].mode;
9721 mode0 = insn_data[icode].operand[1].mode;
9723 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9724 op0 = copy_to_mode_reg (mode0, op0);
9725 if (target == 0
9726 || GET_MODE (target) != tmode
9727 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9728 target = gen_reg_rtx (tmode);
9729 pat = GEN_FCN (icode) (target, op0);
9730 if (! pat)
9731 return 0;
9732 emit_insn (pat);
9733 return target;
9736 /* @@@ Should really do something sensible here. */
9737 return NULL_RTX;
9740 /* Recursively search through all of the blocks in a function
9741 checking to see if any of the variables created in that
9742 function match the RTX called 'orig'. If they do then
9743 replace them with the RTX called 'new'. */
9745 static void
9746 replace_symbols_in_block (block, orig, new)
9747 tree block;
9748 rtx orig;
9749 rtx new;
9751 for (; block; block = BLOCK_CHAIN (block))
9753 tree sym;
9755 if (!TREE_USED (block))
9756 continue;
9758 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9760 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9761 || DECL_IGNORED_P (sym)
9762 || TREE_CODE (sym) != VAR_DECL
9763 || DECL_EXTERNAL (sym)
9764 || !rtx_equal_p (DECL_RTL (sym), orig)
9766 continue;
9768 SET_DECL_RTL (sym, new);
9771 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9775 /* Return the number (counting from 0) of
9776 the least significant set bit in MASK. */
9778 #ifdef __GNUC__
9779 inline
9780 #endif
9781 static int
9782 number_of_first_bit_set (mask)
9783 int mask;
9785 int bit;
9787 for (bit = 0;
9788 (mask & (1 << bit)) == 0;
9789 ++bit)
9790 continue;
9792 return bit;
9795 /* Generate code to return from a thumb function.
9796 If 'reg_containing_return_addr' is -1, then the return address is
9797 actually on the stack, at the stack pointer. */
9798 static void
9799 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9800 FILE * f;
9801 int reg_containing_return_addr;
9802 rtx eh_ofs;
9804 unsigned regs_available_for_popping;
9805 unsigned regs_to_pop;
9806 int pops_needed;
9807 unsigned available;
9808 unsigned required;
9809 int mode;
9810 int size;
9811 int restore_a4 = FALSE;
9813 /* Compute the registers we need to pop. */
9814 regs_to_pop = 0;
9815 pops_needed = 0;
9817 /* There is an assumption here, that if eh_ofs is not NULL, the
9818 normal return address will have been pushed. */
9819 if (reg_containing_return_addr == -1 || eh_ofs)
9821 /* When we are generating a return for __builtin_eh_return,
9822 reg_containing_return_addr must specify the return regno. */
9823 if (eh_ofs && reg_containing_return_addr == -1)
9824 abort ();
9826 regs_to_pop |= 1 << LR_REGNUM;
9827 ++pops_needed;
9830 if (TARGET_BACKTRACE)
9832 /* Restore the (ARM) frame pointer and stack pointer. */
9833 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9834 pops_needed += 2;
9837 /* If there is nothing to pop then just emit the BX instruction and
9838 return. */
9839 if (pops_needed == 0)
9841 if (eh_ofs)
9842 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9844 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9845 return;
9847 /* Otherwise if we are not supporting interworking and we have not created
9848 a backtrace structure and the function was not entered in ARM mode then
9849 just pop the return address straight into the PC. */
9850 else if (!TARGET_INTERWORK
9851 && !TARGET_BACKTRACE
9852 && !is_called_in_ARM_mode (current_function_decl))
9854 if (eh_ofs)
9856 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9857 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9858 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9860 else
9861 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9863 return;
9866 /* Find out how many of the (return) argument registers we can corrupt. */
9867 regs_available_for_popping = 0;
9869 /* If returning via __builtin_eh_return, the bottom three registers
9870 all contain information needed for the return. */
9871 if (eh_ofs)
9872 size = 12;
9873 else
9875 #ifdef RTX_CODE
9876 /* If we can deduce the registers used from the function's
9877 return value. This is more reliable that examining
9878 regs_ever_live[] because that will be set if the register is
9879 ever used in the function, not just if the register is used
9880 to hold a return value. */
9882 if (current_function_return_rtx != 0)
9883 mode = GET_MODE (current_function_return_rtx);
9884 else
9885 #endif
9886 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9888 size = GET_MODE_SIZE (mode);
9890 if (size == 0)
9892 /* In a void function we can use any argument register.
9893 In a function that returns a structure on the stack
9894 we can use the second and third argument registers. */
9895 if (mode == VOIDmode)
9896 regs_available_for_popping =
9897 (1 << ARG_REGISTER (1))
9898 | (1 << ARG_REGISTER (2))
9899 | (1 << ARG_REGISTER (3));
9900 else
9901 regs_available_for_popping =
9902 (1 << ARG_REGISTER (2))
9903 | (1 << ARG_REGISTER (3));
9905 else if (size <= 4)
9906 regs_available_for_popping =
9907 (1 << ARG_REGISTER (2))
9908 | (1 << ARG_REGISTER (3));
9909 else if (size <= 8)
9910 regs_available_for_popping =
9911 (1 << ARG_REGISTER (3));
9914 /* Match registers to be popped with registers into which we pop them. */
9915 for (available = regs_available_for_popping,
9916 required = regs_to_pop;
9917 required != 0 && available != 0;
9918 available &= ~(available & - available),
9919 required &= ~(required & - required))
9920 -- pops_needed;
9922 /* If we have any popping registers left over, remove them. */
9923 if (available > 0)
9924 regs_available_for_popping &= ~available;
9926 /* Otherwise if we need another popping register we can use
9927 the fourth argument register. */
9928 else if (pops_needed)
9930 /* If we have not found any free argument registers and
9931 reg a4 contains the return address, we must move it. */
9932 if (regs_available_for_popping == 0
9933 && reg_containing_return_addr == LAST_ARG_REGNUM)
9935 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9936 reg_containing_return_addr = LR_REGNUM;
9938 else if (size > 12)
9940 /* Register a4 is being used to hold part of the return value,
9941 but we have dire need of a free, low register. */
9942 restore_a4 = TRUE;
9944 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9947 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9949 /* The fourth argument register is available. */
9950 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9952 --pops_needed;
9956 /* Pop as many registers as we can. */
9957 thumb_pushpop (f, regs_available_for_popping, FALSE);
9959 /* Process the registers we popped. */
9960 if (reg_containing_return_addr == -1)
9962 /* The return address was popped into the lowest numbered register. */
9963 regs_to_pop &= ~(1 << LR_REGNUM);
9965 reg_containing_return_addr =
9966 number_of_first_bit_set (regs_available_for_popping);
9968 /* Remove this register for the mask of available registers, so that
9969 the return address will not be corrupted by futher pops. */
9970 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9973 /* If we popped other registers then handle them here. */
9974 if (regs_available_for_popping)
9976 int frame_pointer;
9978 /* Work out which register currently contains the frame pointer. */
9979 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9981 /* Move it into the correct place. */
9982 asm_fprintf (f, "\tmov\t%r, %r\n",
9983 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9985 /* (Temporarily) remove it from the mask of popped registers. */
9986 regs_available_for_popping &= ~(1 << frame_pointer);
9987 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9989 if (regs_available_for_popping)
9991 int stack_pointer;
9993 /* We popped the stack pointer as well,
9994 find the register that contains it. */
9995 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9997 /* Move it into the stack register. */
9998 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10000 /* At this point we have popped all necessary registers, so
10001 do not worry about restoring regs_available_for_popping
10002 to its correct value:
10004 assert (pops_needed == 0)
10005 assert (regs_available_for_popping == (1 << frame_pointer))
10006 assert (regs_to_pop == (1 << STACK_POINTER)) */
10008 else
10010 /* Since we have just move the popped value into the frame
10011 pointer, the popping register is available for reuse, and
10012 we know that we still have the stack pointer left to pop. */
10013 regs_available_for_popping |= (1 << frame_pointer);
10017 /* If we still have registers left on the stack, but we no longer have
10018 any registers into which we can pop them, then we must move the return
10019 address into the link register and make available the register that
10020 contained it. */
10021 if (regs_available_for_popping == 0 && pops_needed > 0)
10023 regs_available_for_popping |= 1 << reg_containing_return_addr;
10025 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10026 reg_containing_return_addr);
10028 reg_containing_return_addr = LR_REGNUM;
10031 /* If we have registers left on the stack then pop some more.
10032 We know that at most we will want to pop FP and SP. */
10033 if (pops_needed > 0)
10035 int popped_into;
10036 int move_to;
10038 thumb_pushpop (f, regs_available_for_popping, FALSE);
10040 /* We have popped either FP or SP.
10041 Move whichever one it is into the correct register. */
10042 popped_into = number_of_first_bit_set (regs_available_for_popping);
10043 move_to = number_of_first_bit_set (regs_to_pop);
10045 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10047 regs_to_pop &= ~(1 << move_to);
10049 --pops_needed;
10052 /* If we still have not popped everything then we must have only
10053 had one register available to us and we are now popping the SP. */
10054 if (pops_needed > 0)
10056 int popped_into;
10058 thumb_pushpop (f, regs_available_for_popping, FALSE);
10060 popped_into = number_of_first_bit_set (regs_available_for_popping);
10062 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10064 assert (regs_to_pop == (1 << STACK_POINTER))
10065 assert (pops_needed == 1)
10069 /* If necessary restore the a4 register. */
10070 if (restore_a4)
10072 if (reg_containing_return_addr != LR_REGNUM)
10074 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10075 reg_containing_return_addr = LR_REGNUM;
10078 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10081 if (eh_ofs)
10082 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10084 /* Return to caller. */
10085 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10088 /* Emit code to push or pop registers to or from the stack. */
10090 static void
10091 thumb_pushpop (f, mask, push)
10092 FILE * f;
10093 int mask;
10094 int push;
10096 int regno;
10097 int lo_mask = mask & 0xFF;
10099 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10101 /* Special case. Do not generate a POP PC statement here, do it in
10102 thumb_exit() */
10103 thumb_exit (f, -1, NULL_RTX);
10104 return;
10107 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10109 /* Look at the low registers first. */
10110 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10112 if (lo_mask & 1)
10114 asm_fprintf (f, "%r", regno);
10116 if ((lo_mask & ~1) != 0)
10117 fprintf (f, ", ");
10121 if (push && (mask & (1 << LR_REGNUM)))
10123 /* Catch pushing the LR. */
10124 if (mask & 0xFF)
10125 fprintf (f, ", ");
10127 asm_fprintf (f, "%r", LR_REGNUM);
10129 else if (!push && (mask & (1 << PC_REGNUM)))
10131 /* Catch popping the PC. */
10132 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10134 /* The PC is never poped directly, instead
10135 it is popped into r3 and then BX is used. */
10136 fprintf (f, "}\n");
10138 thumb_exit (f, -1, NULL_RTX);
10140 return;
10142 else
10144 if (mask & 0xFF)
10145 fprintf (f, ", ");
10147 asm_fprintf (f, "%r", PC_REGNUM);
10151 fprintf (f, "}\n");
10154 void
10155 thumb_final_prescan_insn (insn)
10156 rtx insn;
10158 if (flag_print_asm_name)
10159 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10160 INSN_ADDRESSES (INSN_UID (insn)));
10164 thumb_shiftable_const (val)
10165 unsigned HOST_WIDE_INT val;
10167 unsigned HOST_WIDE_INT mask = 0xff;
10168 int i;
10170 if (val == 0) /* XXX */
10171 return 0;
10173 for (i = 0; i < 25; i++)
10174 if ((val & (mask << i)) == val)
10175 return 1;
10177 return 0;
10180 /* Returns nonzero if the current function contains,
10181 or might contain a far jump. */
10184 thumb_far_jump_used_p (in_prologue)
10185 int in_prologue;
10187 rtx insn;
10189 /* This test is only important for leaf functions. */
10190 /* assert (!leaf_function_p ()); */
10192 /* If we have already decided that far jumps may be used,
10193 do not bother checking again, and always return true even if
10194 it turns out that they are not being used. Once we have made
10195 the decision that far jumps are present (and that hence the link
10196 register will be pushed onto the stack) we cannot go back on it. */
10197 if (cfun->machine->far_jump_used)
10198 return 1;
10200 /* If this function is not being called from the prologue/epilogue
10201 generation code then it must be being called from the
10202 INITIAL_ELIMINATION_OFFSET macro. */
10203 if (!in_prologue)
10205 /* In this case we know that we are being asked about the elimination
10206 of the arg pointer register. If that register is not being used,
10207 then there are no arguments on the stack, and we do not have to
10208 worry that a far jump might force the prologue to push the link
10209 register, changing the stack offsets. In this case we can just
10210 return false, since the presence of far jumps in the function will
10211 not affect stack offsets.
10213 If the arg pointer is live (or if it was live, but has now been
10214 eliminated and so set to dead) then we do have to test to see if
10215 the function might contain a far jump. This test can lead to some
10216 false negatives, since before reload is completed, then length of
10217 branch instructions is not known, so gcc defaults to returning their
10218 longest length, which in turn sets the far jump attribute to true.
10220 A false negative will not result in bad code being generated, but it
10221 will result in a needless push and pop of the link register. We
10222 hope that this does not occur too often. */
10223 if (regs_ever_live [ARG_POINTER_REGNUM])
10224 cfun->machine->arg_pointer_live = 1;
10225 else if (!cfun->machine->arg_pointer_live)
10226 return 0;
10229 /* Check to see if the function contains a branch
10230 insn with the far jump attribute set. */
10231 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10233 if (GET_CODE (insn) == JUMP_INSN
10234 /* Ignore tablejump patterns. */
10235 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10236 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10237 && get_attr_far_jump (insn) == FAR_JUMP_YES
10240 /* Record the fact that we have decied that
10241 the function does use far jumps. */
10242 cfun->machine->far_jump_used = 1;
10243 return 1;
10247 return 0;
10250 /* Return nonzero if FUNC must be entered in ARM mode. */
10253 is_called_in_ARM_mode (func)
10254 tree func;
10256 if (TREE_CODE (func) != FUNCTION_DECL)
10257 abort ();
10259 /* Ignore the problem about functions whoes address is taken. */
10260 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10261 return TRUE;
10263 #ifdef ARM_PE
10264 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10265 #else
10266 return FALSE;
10267 #endif
10270 /* The bits which aren't usefully expanded as rtl. */
10272 const char *
10273 thumb_unexpanded_epilogue ()
10275 int regno;
10276 int live_regs_mask = 0;
10277 int high_regs_pushed = 0;
10278 int leaf_function = leaf_function_p ();
10279 int had_to_push_lr;
10280 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10282 if (return_used_this_function)
10283 return "";
10285 if (IS_NAKED (arm_current_func_type ()))
10286 return "";
10288 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10289 if (THUMB_REG_PUSHED_P (regno))
10290 live_regs_mask |= 1 << regno;
10292 for (regno = 8; regno < 13; regno++)
10293 if (THUMB_REG_PUSHED_P (regno))
10294 high_regs_pushed++;
10296 /* The prolog may have pushed some high registers to use as
10297 work registers. eg the testuite file:
10298 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10299 compiles to produce:
10300 push {r4, r5, r6, r7, lr}
10301 mov r7, r9
10302 mov r6, r8
10303 push {r6, r7}
10304 as part of the prolog. We have to undo that pushing here. */
10306 if (high_regs_pushed)
10308 int mask = live_regs_mask;
10309 int next_hi_reg;
10310 int size;
10311 int mode;
10313 #ifdef RTX_CODE
10314 /* If we can deduce the registers used from the function's return value.
10315 This is more reliable that examining regs_ever_live[] because that
10316 will be set if the register is ever used in the function, not just if
10317 the register is used to hold a return value. */
10319 if (current_function_return_rtx != 0)
10320 mode = GET_MODE (current_function_return_rtx);
10321 else
10322 #endif
10323 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10325 size = GET_MODE_SIZE (mode);
10327 /* Unless we are returning a type of size > 12 register r3 is
10328 available. */
10329 if (size < 13)
10330 mask |= 1 << 3;
10332 if (mask == 0)
10333 /* Oh dear! We have no low registers into which we can pop
10334 high registers! */
10335 internal_error
10336 ("no low registers available for popping high registers");
10338 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10339 if (THUMB_REG_PUSHED_P (next_hi_reg))
10340 break;
10342 while (high_regs_pushed)
10344 /* Find lo register(s) into which the high register(s) can
10345 be popped. */
10346 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10348 if (mask & (1 << regno))
10349 high_regs_pushed--;
10350 if (high_regs_pushed == 0)
10351 break;
10354 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10356 /* Pop the values into the low register(s). */
10357 thumb_pushpop (asm_out_file, mask, 0);
10359 /* Move the value(s) into the high registers. */
10360 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10362 if (mask & (1 << regno))
10364 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10365 regno);
10367 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10368 if (THUMB_REG_PUSHED_P (next_hi_reg))
10369 break;
10375 had_to_push_lr = (live_regs_mask || !leaf_function
10376 || thumb_far_jump_used_p (1));
10378 if (TARGET_BACKTRACE
10379 && ((live_regs_mask & 0xFF) == 0)
10380 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10382 /* The stack backtrace structure creation code had to
10383 push R7 in order to get a work register, so we pop
10384 it now. */
10385 live_regs_mask |= (1 << LAST_LO_REGNUM);
10388 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10390 if (had_to_push_lr
10391 && !is_called_in_ARM_mode (current_function_decl)
10392 && !eh_ofs)
10393 live_regs_mask |= 1 << PC_REGNUM;
10395 /* Either no argument registers were pushed or a backtrace
10396 structure was created which includes an adjusted stack
10397 pointer, so just pop everything. */
10398 if (live_regs_mask)
10399 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10401 if (eh_ofs)
10402 thumb_exit (asm_out_file, 2, eh_ofs);
10403 /* We have either just popped the return address into the
10404 PC or it is was kept in LR for the entire function or
10405 it is still on the stack because we do not want to
10406 return by doing a pop {pc}. */
10407 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10408 thumb_exit (asm_out_file,
10409 (had_to_push_lr
10410 && is_called_in_ARM_mode (current_function_decl)) ?
10411 -1 : LR_REGNUM, NULL_RTX);
10413 else
10415 /* Pop everything but the return address. */
10416 live_regs_mask &= ~(1 << PC_REGNUM);
10418 if (live_regs_mask)
10419 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10421 if (had_to_push_lr)
10422 /* Get the return address into a temporary register. */
10423 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10425 /* Remove the argument registers that were pushed onto the stack. */
10426 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10427 SP_REGNUM, SP_REGNUM,
10428 current_function_pretend_args_size);
10430 if (eh_ofs)
10431 thumb_exit (asm_out_file, 2, eh_ofs);
10432 else
10433 thumb_exit (asm_out_file,
10434 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10437 return "";
10440 /* Functions to save and restore machine-specific function data. */
10442 static struct machine_function *
10443 arm_init_machine_status ()
10445 struct machine_function *machine;
10446 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10448 #if ARM_FT_UNKNOWN != 0
10449 machine->func_type = ARM_FT_UNKNOWN;
10450 #endif
10451 return machine;
10454 /* Return an RTX indicating where the return address to the
10455 calling function can be found. */
10458 arm_return_addr (count, frame)
10459 int count;
10460 rtx frame ATTRIBUTE_UNUSED;
10462 if (count != 0)
10463 return NULL_RTX;
10465 if (TARGET_APCS_32)
10466 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10467 else
10469 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10470 GEN_INT (RETURN_ADDR_MASK26));
10471 return get_func_hard_reg_initial_val (cfun, lr);
10475 /* Do anything needed before RTL is emitted for each function. */
10477 void
10478 arm_init_expanders ()
10480 /* Arrange to initialize and mark the machine per-function status. */
10481 init_machine_status = arm_init_machine_status;
10484 HOST_WIDE_INT
10485 thumb_get_frame_size ()
10487 int regno;
10489 int base_size = ROUND_UP_WORD (get_frame_size ());
10490 int count_regs = 0;
10491 int entry_size = 0;
10492 int leaf;
10494 if (! TARGET_THUMB)
10495 abort ();
10497 if (! TARGET_ATPCS)
10498 return base_size;
10500 /* We need to know if we are a leaf function. Unfortunately, it
10501 is possible to be called after start_sequence has been called,
10502 which causes get_insns to return the insns for the sequence,
10503 not the function, which will cause leaf_function_p to return
10504 the incorrect result.
10506 To work around this, we cache the computed frame size. This
10507 works because we will only be calling RTL expanders that need
10508 to know about leaf functions once reload has completed, and the
10509 frame size cannot be changed after that time, so we can safely
10510 use the cached value. */
10512 if (reload_completed)
10513 return cfun->machine->frame_size;
10515 leaf = leaf_function_p ();
10517 /* A leaf function does not need any stack alignment if it has nothing
10518 on the stack. */
10519 if (leaf && base_size == 0)
10521 cfun->machine->frame_size = 0;
10522 return 0;
10525 /* We know that SP will be word aligned on entry, and we must
10526 preserve that condition at any subroutine call. But those are
10527 the only constraints. */
10529 /* Space for variadic functions. */
10530 if (current_function_pretend_args_size)
10531 entry_size += current_function_pretend_args_size;
10533 /* Space for pushed lo registers. */
10534 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10535 if (THUMB_REG_PUSHED_P (regno))
10536 count_regs++;
10538 /* Space for backtrace structure. */
10539 if (TARGET_BACKTRACE)
10541 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10542 entry_size += 20;
10543 else
10544 entry_size += 16;
10547 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10548 count_regs++; /* LR */
10550 entry_size += count_regs * 4;
10551 count_regs = 0;
10553 /* Space for pushed hi regs. */
10554 for (regno = 8; regno < 13; regno++)
10555 if (THUMB_REG_PUSHED_P (regno))
10556 count_regs++;
10558 entry_size += count_regs * 4;
10560 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10561 base_size += 4;
10562 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10563 abort ();
10565 cfun->machine->frame_size = base_size;
10567 return base_size;
10570 /* Generate the rest of a function's prologue. */
10572 void
10573 thumb_expand_prologue ()
10575 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10576 + current_function_outgoing_args_size);
10577 unsigned long func_type;
10579 func_type = arm_current_func_type ();
10581 /* Naked functions don't have prologues. */
10582 if (IS_NAKED (func_type))
10583 return;
10585 if (IS_INTERRUPT (func_type))
10587 error ("interrupt Service Routines cannot be coded in Thumb mode");
10588 return;
10591 if (frame_pointer_needed)
10592 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10594 if (amount)
10596 amount = ROUND_UP_WORD (amount);
10598 if (amount < 512)
10599 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10600 GEN_INT (- amount)));
10601 else
10603 int regno;
10604 rtx reg;
10606 /* The stack decrement is too big for an immediate value in a single
10607 insn. In theory we could issue multiple subtracts, but after
10608 three of them it becomes more space efficient to place the full
10609 value in the constant pool and load into a register. (Also the
10610 ARM debugger really likes to see only one stack decrement per
10611 function). So instead we look for a scratch register into which
10612 we can load the decrement, and then we subtract this from the
10613 stack pointer. Unfortunately on the thumb the only available
10614 scratch registers are the argument registers, and we cannot use
10615 these as they may hold arguments to the function. Instead we
10616 attempt to locate a call preserved register which is used by this
10617 function. If we can find one, then we know that it will have
10618 been pushed at the start of the prologue and so we can corrupt
10619 it now. */
10620 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10621 if (THUMB_REG_PUSHED_P (regno)
10622 && !(frame_pointer_needed
10623 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10624 break;
10626 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10628 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10630 /* Choose an arbitary, non-argument low register. */
10631 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10633 /* Save it by copying it into a high, scratch register. */
10634 emit_insn (gen_movsi (spare, reg));
10635 /* Add a USE to stop propagate_one_insn() from barfing. */
10636 emit_insn (gen_prologue_use (spare));
10638 /* Decrement the stack. */
10639 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10640 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10641 reg));
10643 /* Restore the low register's original value. */
10644 emit_insn (gen_movsi (reg, spare));
10646 /* Emit a USE of the restored scratch register, so that flow
10647 analysis will not consider the restore redundant. The
10648 register won't be used again in this function and isn't
10649 restored by the epilogue. */
10650 emit_insn (gen_prologue_use (reg));
10652 else
10654 reg = gen_rtx (REG, SImode, regno);
10656 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10657 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10658 reg));
10663 if (current_function_profile || TARGET_NO_SCHED_PRO)
10664 emit_insn (gen_blockage ());
10667 void
10668 thumb_expand_epilogue ()
10670 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10671 + current_function_outgoing_args_size);
10673 /* Naked functions don't have prologues. */
10674 if (IS_NAKED (arm_current_func_type ()))
10675 return;
10677 if (frame_pointer_needed)
10678 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10679 else if (amount)
10681 amount = ROUND_UP_WORD (amount);
10683 if (amount < 512)
10684 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10685 GEN_INT (amount)));
10686 else
10688 /* r3 is always free in the epilogue. */
10689 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10691 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10692 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10696 /* Emit a USE (stack_pointer_rtx), so that
10697 the stack adjustment will not be deleted. */
10698 emit_insn (gen_prologue_use (stack_pointer_rtx));
10700 if (current_function_profile || TARGET_NO_SCHED_PRO)
10701 emit_insn (gen_blockage ());
10704 static void
10705 thumb_output_function_prologue (f, size)
10706 FILE * f;
10707 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10709 int live_regs_mask = 0;
10710 int high_regs_pushed = 0;
10711 int regno;
10713 if (IS_NAKED (arm_current_func_type ()))
10714 return;
10716 if (is_called_in_ARM_mode (current_function_decl))
10718 const char * name;
10720 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10721 abort ();
10722 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10723 abort ();
10724 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10726 /* Generate code sequence to switch us into Thumb mode. */
10727 /* The .code 32 directive has already been emitted by
10728 ASM_DECLARE_FUNCTION_NAME. */
10729 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10730 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10732 /* Generate a label, so that the debugger will notice the
10733 change in instruction sets. This label is also used by
10734 the assembler to bypass the ARM code when this function
10735 is called from a Thumb encoded function elsewhere in the
10736 same file. Hence the definition of STUB_NAME here must
10737 agree with the definition in gas/config/tc-arm.c */
10739 #define STUB_NAME ".real_start_of"
10741 fprintf (f, "\t.code\t16\n");
10742 #ifdef ARM_PE
10743 if (arm_dllexport_name_p (name))
10744 name = arm_strip_name_encoding (name);
10745 #endif
10746 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10747 fprintf (f, "\t.thumb_func\n");
10748 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10751 if (current_function_pretend_args_size)
10753 if (cfun->machine->uses_anonymous_args)
10755 int num_pushes;
10757 fprintf (f, "\tpush\t{");
10759 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
10761 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10762 regno <= LAST_ARG_REGNUM;
10763 regno++)
10764 asm_fprintf (f, "%r%s", regno,
10765 regno == LAST_ARG_REGNUM ? "" : ", ");
10767 fprintf (f, "}\n");
10769 else
10770 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10771 SP_REGNUM, SP_REGNUM,
10772 current_function_pretend_args_size);
10775 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10776 if (THUMB_REG_PUSHED_P (regno))
10777 live_regs_mask |= 1 << regno;
10779 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10780 live_regs_mask |= 1 << LR_REGNUM;
10782 if (TARGET_BACKTRACE)
10784 int offset;
10785 int work_register = 0;
10786 int wr;
10788 /* We have been asked to create a stack backtrace structure.
10789 The code looks like this:
10791 0 .align 2
10792 0 func:
10793 0 sub SP, #16 Reserve space for 4 registers.
10794 2 push {R7} Get a work register.
10795 4 add R7, SP, #20 Get the stack pointer before the push.
10796 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10797 8 mov R7, PC Get hold of the start of this code plus 12.
10798 10 str R7, [SP, #16] Store it.
10799 12 mov R7, FP Get hold of the current frame pointer.
10800 14 str R7, [SP, #4] Store it.
10801 16 mov R7, LR Get hold of the current return address.
10802 18 str R7, [SP, #12] Store it.
10803 20 add R7, SP, #16 Point at the start of the backtrace structure.
10804 22 mov FP, R7 Put this value into the frame pointer. */
10806 if ((live_regs_mask & 0xFF) == 0)
10808 /* See if the a4 register is free. */
10810 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10811 work_register = LAST_ARG_REGNUM;
10812 else /* We must push a register of our own */
10813 live_regs_mask |= (1 << LAST_LO_REGNUM);
10816 if (work_register == 0)
10818 /* Select a register from the list that will be pushed to
10819 use as our work register. */
10820 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10821 if ((1 << work_register) & live_regs_mask)
10822 break;
10825 asm_fprintf
10826 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10827 SP_REGNUM, SP_REGNUM);
10829 if (live_regs_mask)
10830 thumb_pushpop (f, live_regs_mask, 1);
10832 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10833 if (wr & live_regs_mask)
10834 offset += 4;
10836 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10837 offset + 16 + current_function_pretend_args_size);
10839 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10840 offset + 4);
10842 /* Make sure that the instruction fetching the PC is in the right place
10843 to calculate "start of backtrace creation code + 12". */
10844 if (live_regs_mask)
10846 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10847 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10848 offset + 12);
10849 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10850 ARM_HARD_FRAME_POINTER_REGNUM);
10851 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10852 offset);
10854 else
10856 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10857 ARM_HARD_FRAME_POINTER_REGNUM);
10858 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10859 offset);
10860 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10861 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10862 offset + 12);
10865 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10866 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10867 offset + 8);
10868 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10869 offset + 12);
10870 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10871 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10873 else if (live_regs_mask)
10874 thumb_pushpop (f, live_regs_mask, 1);
10876 for (regno = 8; regno < 13; regno++)
10877 if (THUMB_REG_PUSHED_P (regno))
10878 high_regs_pushed++;
10880 if (high_regs_pushed)
10882 int pushable_regs = 0;
10883 int mask = live_regs_mask & 0xff;
10884 int next_hi_reg;
10886 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10887 if (THUMB_REG_PUSHED_P (next_hi_reg))
10888 break;
10890 pushable_regs = mask;
10892 if (pushable_regs == 0)
10894 /* Desperation time -- this probably will never happen. */
10895 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
10896 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10897 mask = 1 << LAST_ARG_REGNUM;
10900 while (high_regs_pushed > 0)
10902 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10904 if (mask & (1 << regno))
10906 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10908 high_regs_pushed--;
10910 if (high_regs_pushed)
10912 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10913 next_hi_reg--)
10914 if (THUMB_REG_PUSHED_P (next_hi_reg))
10915 break;
10917 else
10919 mask &= ~((1 << regno) - 1);
10920 break;
10925 thumb_pushpop (f, mask, 1);
10928 if (pushable_regs == 0
10929 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
10930 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10934 /* Handle the case of a double word load into a low register from
10935 a computed memory address. The computed address may involve a
10936 register which is overwritten by the load. */
10938 const char *
10939 thumb_load_double_from_address (operands)
10940 rtx *operands;
10942 rtx addr;
10943 rtx base;
10944 rtx offset;
10945 rtx arg1;
10946 rtx arg2;
10948 if (GET_CODE (operands[0]) != REG)
10949 abort ();
10951 if (GET_CODE (operands[1]) != MEM)
10952 abort ();
10954 /* Get the memory address. */
10955 addr = XEXP (operands[1], 0);
10957 /* Work out how the memory address is computed. */
10958 switch (GET_CODE (addr))
10960 case REG:
10961 operands[2] = gen_rtx (MEM, SImode,
10962 plus_constant (XEXP (operands[1], 0), 4));
10964 if (REGNO (operands[0]) == REGNO (addr))
10966 output_asm_insn ("ldr\t%H0, %2", operands);
10967 output_asm_insn ("ldr\t%0, %1", operands);
10969 else
10971 output_asm_insn ("ldr\t%0, %1", operands);
10972 output_asm_insn ("ldr\t%H0, %2", operands);
10974 break;
10976 case CONST:
10977 /* Compute <address> + 4 for the high order load. */
10978 operands[2] = gen_rtx (MEM, SImode,
10979 plus_constant (XEXP (operands[1], 0), 4));
10981 output_asm_insn ("ldr\t%0, %1", operands);
10982 output_asm_insn ("ldr\t%H0, %2", operands);
10983 break;
10985 case PLUS:
10986 arg1 = XEXP (addr, 0);
10987 arg2 = XEXP (addr, 1);
10989 if (CONSTANT_P (arg1))
10990 base = arg2, offset = arg1;
10991 else
10992 base = arg1, offset = arg2;
10994 if (GET_CODE (base) != REG)
10995 abort ();
10997 /* Catch the case of <address> = <reg> + <reg> */
10998 if (GET_CODE (offset) == REG)
11000 int reg_offset = REGNO (offset);
11001 int reg_base = REGNO (base);
11002 int reg_dest = REGNO (operands[0]);
11004 /* Add the base and offset registers together into the
11005 higher destination register. */
11006 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11007 reg_dest + 1, reg_base, reg_offset);
11009 /* Load the lower destination register from the address in
11010 the higher destination register. */
11011 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11012 reg_dest, reg_dest + 1);
11014 /* Load the higher destination register from its own address
11015 plus 4. */
11016 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11017 reg_dest + 1, reg_dest + 1);
11019 else
11021 /* Compute <address> + 4 for the high order load. */
11022 operands[2] = gen_rtx (MEM, SImode,
11023 plus_constant (XEXP (operands[1], 0), 4));
11025 /* If the computed address is held in the low order register
11026 then load the high order register first, otherwise always
11027 load the low order register first. */
11028 if (REGNO (operands[0]) == REGNO (base))
11030 output_asm_insn ("ldr\t%H0, %2", operands);
11031 output_asm_insn ("ldr\t%0, %1", operands);
11033 else
11035 output_asm_insn ("ldr\t%0, %1", operands);
11036 output_asm_insn ("ldr\t%H0, %2", operands);
11039 break;
11041 case LABEL_REF:
11042 /* With no registers to worry about we can just load the value
11043 directly. */
11044 operands[2] = gen_rtx (MEM, SImode,
11045 plus_constant (XEXP (operands[1], 0), 4));
11047 output_asm_insn ("ldr\t%H0, %2", operands);
11048 output_asm_insn ("ldr\t%0, %1", operands);
11049 break;
11051 default:
11052 abort ();
11053 break;
11056 return "";
11060 const char *
11061 thumb_output_move_mem_multiple (n, operands)
11062 int n;
11063 rtx * operands;
11065 rtx tmp;
11067 switch (n)
11069 case 2:
11070 if (REGNO (operands[4]) > REGNO (operands[5]))
11072 tmp = operands[4];
11073 operands[4] = operands[5];
11074 operands[5] = tmp;
11076 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11077 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11078 break;
11080 case 3:
11081 if (REGNO (operands[4]) > REGNO (operands[5]))
11083 tmp = operands[4];
11084 operands[4] = operands[5];
11085 operands[5] = tmp;
11087 if (REGNO (operands[5]) > REGNO (operands[6]))
11089 tmp = operands[5];
11090 operands[5] = operands[6];
11091 operands[6] = tmp;
11093 if (REGNO (operands[4]) > REGNO (operands[5]))
11095 tmp = operands[4];
11096 operands[4] = operands[5];
11097 operands[5] = tmp;
11100 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11101 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11102 break;
11104 default:
11105 abort ();
11108 return "";
11111 /* Routines for generating rtl. */
11113 void
11114 thumb_expand_movstrqi (operands)
11115 rtx * operands;
11117 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11118 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11119 HOST_WIDE_INT len = INTVAL (operands[2]);
11120 HOST_WIDE_INT offset = 0;
11122 while (len >= 12)
11124 emit_insn (gen_movmem12b (out, in, out, in));
11125 len -= 12;
11128 if (len >= 8)
11130 emit_insn (gen_movmem8b (out, in, out, in));
11131 len -= 8;
11134 if (len >= 4)
11136 rtx reg = gen_reg_rtx (SImode);
11137 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11138 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11139 len -= 4;
11140 offset += 4;
11143 if (len >= 2)
11145 rtx reg = gen_reg_rtx (HImode);
11146 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11147 plus_constant (in, offset))));
11148 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11149 reg));
11150 len -= 2;
11151 offset += 2;
11154 if (len)
11156 rtx reg = gen_reg_rtx (QImode);
11157 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11158 plus_constant (in, offset))));
11159 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11160 reg));
11165 thumb_cmp_operand (op, mode)
11166 rtx op;
11167 enum machine_mode mode;
11169 return ((GET_CODE (op) == CONST_INT
11170 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11171 || register_operand (op, mode));
11174 static const char *
11175 thumb_condition_code (x, invert)
11176 rtx x;
11177 int invert;
11179 static const char * const conds[] =
11181 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11182 "hi", "ls", "ge", "lt", "gt", "le"
11184 int val;
11186 switch (GET_CODE (x))
11188 case EQ: val = 0; break;
11189 case NE: val = 1; break;
11190 case GEU: val = 2; break;
11191 case LTU: val = 3; break;
11192 case GTU: val = 8; break;
11193 case LEU: val = 9; break;
11194 case GE: val = 10; break;
11195 case LT: val = 11; break;
11196 case GT: val = 12; break;
11197 case LE: val = 13; break;
11198 default:
11199 abort ();
11202 return conds[val ^ invert];
11205 /* Handle storing a half-word to memory during reload. */
11207 void
11208 thumb_reload_out_hi (operands)
11209 rtx * operands;
11211 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11214 /* Handle storing a half-word to memory during reload. */
11216 void
11217 thumb_reload_in_hi (operands)
11218 rtx * operands ATTRIBUTE_UNUSED;
11220 abort ();
11223 /* Return the length of a function name prefix
11224 that starts with the character 'c'. */
11226 static int
11227 arm_get_strip_length (c)
11228 int c;
11230 switch (c)
11232 ARM_NAME_ENCODING_LENGTHS
11233 default: return 0;
11237 /* Return a pointer to a function's name with any
11238 and all prefix encodings stripped from it. */
11240 const char *
11241 arm_strip_name_encoding (name)
11242 const char * name;
11244 int skip;
11246 while ((skip = arm_get_strip_length (* name)))
11247 name += skip;
11249 return name;
11252 /* If there is a '*' anywhere in the name's prefix, then
11253 emit the stripped name verbatim, otherwise prepend an
11254 underscore if leading underscores are being used. */
11256 void
11257 arm_asm_output_labelref (stream, name)
11258 FILE * stream;
11259 const char * name;
11261 int skip;
11262 int verbatim = 0;
11264 while ((skip = arm_get_strip_length (* name)))
11266 verbatim |= (*name == '*');
11267 name += skip;
11270 if (verbatim)
11271 fputs (name, stream);
11272 else
11273 asm_fprintf (stream, "%U%s", name);
11276 rtx aof_pic_label;
11278 #ifdef AOF_ASSEMBLER
11279 /* Special functions only needed when producing AOF syntax assembler. */
11281 struct pic_chain
11283 struct pic_chain * next;
11284 const char * symname;
11287 static struct pic_chain * aof_pic_chain = NULL;
11290 aof_pic_entry (x)
11291 rtx x;
11293 struct pic_chain ** chainp;
11294 int offset;
11296 if (aof_pic_label == NULL_RTX)
11298 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11301 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11302 offset += 4, chainp = &(*chainp)->next)
11303 if ((*chainp)->symname == XSTR (x, 0))
11304 return plus_constant (aof_pic_label, offset);
11306 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11307 (*chainp)->next = NULL;
11308 (*chainp)->symname = XSTR (x, 0);
11309 return plus_constant (aof_pic_label, offset);
11312 void
11313 aof_dump_pic_table (f)
11314 FILE * f;
11316 struct pic_chain * chain;
11318 if (aof_pic_chain == NULL)
11319 return;
11321 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11322 PIC_OFFSET_TABLE_REGNUM,
11323 PIC_OFFSET_TABLE_REGNUM);
11324 fputs ("|x$adcons|\n", f);
11326 for (chain = aof_pic_chain; chain; chain = chain->next)
11328 fputs ("\tDCD\t", f);
11329 assemble_name (f, chain->symname);
11330 fputs ("\n", f);
11334 int arm_text_section_count = 1;
11336 char *
11337 aof_text_section ()
11339 static char buf[100];
11340 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11341 arm_text_section_count++);
11342 if (flag_pic)
11343 strcat (buf, ", PIC, REENTRANT");
11344 return buf;
11347 static int arm_data_section_count = 1;
11349 char *
11350 aof_data_section ()
11352 static char buf[100];
11353 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11354 return buf;
11357 /* The AOF assembler is religiously strict about declarations of
11358 imported and exported symbols, so that it is impossible to declare
11359 a function as imported near the beginning of the file, and then to
11360 export it later on. It is, however, possible to delay the decision
11361 until all the functions in the file have been compiled. To get
11362 around this, we maintain a list of the imports and exports, and
11363 delete from it any that are subsequently defined. At the end of
11364 compilation we spit the remainder of the list out before the END
11365 directive. */
11367 struct import
11369 struct import * next;
11370 const char * name;
11373 static struct import * imports_list = NULL;
11375 void
11376 aof_add_import (name)
11377 const char * name;
11379 struct import * new;
11381 for (new = imports_list; new; new = new->next)
11382 if (new->name == name)
11383 return;
11385 new = (struct import *) xmalloc (sizeof (struct import));
11386 new->next = imports_list;
11387 imports_list = new;
11388 new->name = name;
11391 void
11392 aof_delete_import (name)
11393 const char * name;
11395 struct import ** old;
11397 for (old = &imports_list; *old; old = & (*old)->next)
11399 if ((*old)->name == name)
11401 *old = (*old)->next;
11402 return;
11407 int arm_main_function = 0;
11409 void
11410 aof_dump_imports (f)
11411 FILE * f;
11413 /* The AOF assembler needs this to cause the startup code to be extracted
11414 from the library. Brining in __main causes the whole thing to work
11415 automagically. */
11416 if (arm_main_function)
11418 text_section ();
11419 fputs ("\tIMPORT __main\n", f);
11420 fputs ("\tDCD __main\n", f);
11423 /* Now dump the remaining imports. */
11424 while (imports_list)
11426 fprintf (f, "\tIMPORT\t");
11427 assemble_name (f, imports_list->name);
11428 fputc ('\n', f);
11429 imports_list = imports_list->next;
11433 static void
11434 aof_globalize_label (stream, name)
11435 FILE *stream;
11436 const char *name;
11438 default_globalize_label (stream, name);
11439 if (! strcmp (name, "main"))
11440 arm_main_function = 1;
11442 #endif /* AOF_ASSEMBLER */
11444 #ifdef OBJECT_FORMAT_ELF
11445 /* Switch to an arbitrary section NAME with attributes as specified
11446 by FLAGS. ALIGN specifies any known alignment requirements for
11447 the section; 0 if the default should be used.
11449 Differs from the default elf version only in the prefix character
11450 used before the section type. */
11452 static void
11453 arm_elf_asm_named_section (name, flags)
11454 const char *name;
11455 unsigned int flags;
11457 char flagchars[10], *f = flagchars;
11459 if (! named_section_first_declaration (name))
11461 fprintf (asm_out_file, "\t.section\t%s\n", name);
11462 return;
11465 if (!(flags & SECTION_DEBUG))
11466 *f++ = 'a';
11467 if (flags & SECTION_WRITE)
11468 *f++ = 'w';
11469 if (flags & SECTION_CODE)
11470 *f++ = 'x';
11471 if (flags & SECTION_SMALL)
11472 *f++ = 's';
11473 if (flags & SECTION_MERGE)
11474 *f++ = 'M';
11475 if (flags & SECTION_STRINGS)
11476 *f++ = 'S';
11477 if (flags & SECTION_TLS)
11478 *f++ = 'T';
11479 *f = '\0';
11481 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11483 if (!(flags & SECTION_NOTYPE))
11485 const char *type;
11487 if (flags & SECTION_BSS)
11488 type = "nobits";
11489 else
11490 type = "progbits";
11492 fprintf (asm_out_file, ",%%%s", type);
11494 if (flags & SECTION_ENTSIZE)
11495 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11498 putc ('\n', asm_out_file);
11500 #endif
11502 #ifndef ARM_PE
11503 /* Symbols in the text segment can be accessed without indirecting via the
11504 constant pool; it may take an extra binary operation, but this is still
11505 faster than indirecting via memory. Don't do this when not optimizing,
11506 since we won't be calculating al of the offsets necessary to do this
11507 simplification. */
11509 static void
11510 arm_encode_section_info (decl, first)
11511 tree decl;
11512 int first;
11514 /* This doesn't work with AOF syntax, since the string table may be in
11515 a different AREA. */
11516 #ifndef AOF_ASSEMBLER
11517 if (optimize > 0 && TREE_CONSTANT (decl)
11518 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11520 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11521 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11522 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11524 #endif
11526 /* If we are referencing a function that is weak then encode a long call
11527 flag in the function name, otherwise if the function is static or
11528 or known to be defined in this file then encode a short call flag. */
11529 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11531 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11532 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11533 else if (! TREE_PUBLIC (decl))
11534 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11537 #endif /* !ARM_PE */
11539 static void
11540 arm_internal_label (stream, prefix, labelno)
11541 FILE *stream;
11542 const char *prefix;
11543 unsigned long labelno;
11545 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11546 && !strcmp (prefix, "L"))
11548 arm_ccfsm_state = 0;
11549 arm_target_insn = NULL;
11551 default_internal_label (stream, prefix, labelno);
11554 /* Output code to add DELTA to the first argument, and then jump
11555 to FUNCTION. Used for C++ multiple inheritance. */
11557 static void
11558 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
11559 FILE *file;
11560 tree thunk ATTRIBUTE_UNUSED;
11561 HOST_WIDE_INT delta;
11562 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11563 tree function;
11565 int mi_delta = delta;
11566 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11567 int shift = 0;
11568 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11569 ? 1 : 0);
11570 if (mi_delta < 0)
11571 mi_delta = - mi_delta;
11572 while (mi_delta != 0)
11574 if ((mi_delta & (3 << shift)) == 0)
11575 shift += 2;
11576 else
11578 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11579 mi_op, this_regno, this_regno,
11580 mi_delta & (0xff << shift));
11581 mi_delta &= ~(0xff << shift);
11582 shift += 8;
11585 fputs ("\tb\t", file);
11586 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11587 if (NEED_PLT_RELOC)
11588 fputs ("(PLT)", file);
11589 fputc ('\n', file);