Replace current_function_anonymous_args with cfun->machine->uses_anonymous_args
[official-gcc.git] / gcc / config / arm / arm.c
blob160cd297225f951313d3d93f812a597b488622ad
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "obstack.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "toplev.h"
43 #include "recog.h"
44 #include "ggc.h"
45 #include "except.h"
46 #include "c-pragma.h"
47 #include "integrate.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode;
54 typedef struct minipool_fixup Mfix;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots PARAMS ((void));
67 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
68 static Ulong bit_count PARAMS ((signed int));
69 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70 static int eliminate_lr2ip PARAMS ((rtx *));
71 static rtx emit_multi_reg_push PARAMS ((int));
72 static rtx emit_sfm PARAMS ((int, int));
73 #ifndef AOF_ASSEMBLER
74 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
75 #endif
76 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
77 static arm_cc get_arm_condition_code PARAMS ((rtx));
78 static void init_fpa_table PARAMS ((void));
79 static Hint int_log2 PARAMS ((Hint));
80 static rtx is_jump_table PARAMS ((rtx));
81 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
83 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
84 static Ccstar shift_op PARAMS ((rtx, Hint *));
85 static void arm_init_machine_status PARAMS ((struct function *));
86 static void arm_mark_machine_status PARAMS ((struct function *));
87 static void arm_free_machine_status PARAMS ((struct function *));
88 static int number_of_first_bit_set PARAMS ((int));
89 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
90 static void thumb_exit PARAMS ((FILE *, int, rtx));
91 static void thumb_pushpop PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code PARAMS ((rtx, int));
93 static rtx is_jump_table PARAMS ((rtx));
94 static Hint get_jump_table_size PARAMS ((rtx));
95 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
96 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
97 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
98 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
99 static void assign_minipool_offsets PARAMS ((Mfix *));
100 static void arm_print_value PARAMS ((FILE *, rtx));
101 static void dump_minipool PARAMS ((rtx));
102 static int arm_barrier_cost PARAMS ((rtx));
103 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
104 static void push_minipool_barrier PARAMS ((rtx, Hint));
105 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
106 static void note_invalid_constants PARAMS ((rtx, Hint));
107 static int current_file_function_operand PARAMS ((rtx));
108 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask PARAMS ((void));
110 static Ulong arm_isr_value PARAMS ((tree));
111 static Ulong arm_compute_func_type PARAMS ((void));
112 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
113 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
114 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
115 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
116 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
117 static int arm_comp_type_attributes PARAMS ((tree, tree));
118 static void arm_set_default_type_attributes PARAMS ((tree));
119 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
122 #endif
124 #undef Hint
125 #undef Mmode
126 #undef Ulong
127 #undef Ccstar
129 /* Initialize the GCC target structure. */
130 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
131 #undef TARGET_MERGE_DECL_ATTRIBUTES
132 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
133 #endif
135 #undef TARGET_ATTRIBUTE_TABLE
136 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
138 #ifdef AOF_ASSEMBLER
139 #undef TARGET_ASM_BYTE_OP
140 #define TARGET_ASM_BYTE_OP "\tDCB\t"
141 #undef TARGET_ASM_ALIGNED_HI_OP
142 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
143 #undef TARGET_ASM_ALIGNED_SI_OP
144 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
145 #else
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP NULL
148 #undef TARGET_ASM_INTEGER
149 #define TARGET_ASM_INTEGER arm_assemble_integer
150 #endif
152 #undef TARGET_ASM_FUNCTION_PROLOGUE
153 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155 #undef TARGET_ASM_FUNCTION_EPILOGUE
156 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158 #undef TARGET_COMP_TYPE_ATTRIBUTES
159 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
162 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164 #undef TARGET_INIT_BUILTINS
165 #define TARGET_INIT_BUILTINS arm_init_builtins
167 #undef TARGET_EXPAND_BUILTIN
168 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
170 #undef TARGET_SCHED_ADJUST_COST
171 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173 struct gcc_target targetm = TARGET_INITIALIZER;
175 /* Obstack for minipool constant handling. */
176 static struct obstack minipool_obstack;
177 static char * minipool_startobj;
179 #define obstack_chunk_alloc xmalloc
180 #define obstack_chunk_free free
182 /* The maximum number of insns skipped which
183 will be conditionalised if possible. */
184 static int max_insns_skipped = 5;
186 extern FILE * asm_out_file;
188 /* True if we are currently building a constant table. */
189 int making_const_table;
191 /* Define the information needed to generate branch insns. This is
192 stored from the compare operation. */
193 rtx arm_compare_op0, arm_compare_op1;
195 /* What type of floating point are we tuning for? */
196 enum floating_point_type arm_fpu;
198 /* What type of floating point instructions are available? */
199 enum floating_point_type arm_fpu_arch;
201 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
202 enum prog_mode_type arm_prgmode;
204 /* Set by the -mfp=... option. */
205 const char * target_fp_name = NULL;
207 /* Used to parse -mstructure_size_boundary command line option. */
208 const char * structure_size_string = NULL;
209 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
211 /* Bit values used to identify processor capabilities. */
212 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
213 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
214 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
215 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
216 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
217 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
218 #define FL_THUMB (1 << 6) /* Thumb aware */
219 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
220 #define FL_STRONG (1 << 8) /* StrongARM */
221 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
222 #define FL_XSCALE (1 << 10) /* XScale */
224 /* The bits in this mask specify which
225 instructions we are allowed to generate. */
226 static int insn_flags = 0;
228 /* The bits in this mask specify which instruction scheduling options should
229 be used. Note - there is an overlap with the FL_FAST_MULT. For some
230 hardware we want to be able to generate the multiply instructions, but to
231 tune as if they were not present in the architecture. */
232 static int tune_flags = 0;
234 /* The following are used in the arm.md file as equivalents to bits
235 in the above two flag variables. */
237 /* Nonzero if this is an "M" variant of the processor. */
238 int arm_fast_multiply = 0;
240 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
241 int arm_arch4 = 0;
243 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
244 int arm_arch5 = 0;
246 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
247 int arm_arch5e = 0;
249 /* Nonzero if this chip can benefit from load scheduling. */
250 int arm_ld_sched = 0;
252 /* Nonzero if this chip is a StrongARM. */
253 int arm_is_strong = 0;
255 /* Nonzero if this chip is an XScale. */
256 int arm_is_xscale = 0;
258 /* Nonzero if this chip is an ARM6 or an ARM7. */
259 int arm_is_6_or_7 = 0;
261 /* Nonzero if generating Thumb instructions. */
262 int thumb_code = 0;
264 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
265 must report the mode of the memory reference from PRINT_OPERAND to
266 PRINT_OPERAND_ADDRESS. */
267 enum machine_mode output_memory_reference_mode;
269 /* The register number to be used for the PIC offset register. */
270 const char * arm_pic_register_string = NULL;
271 int arm_pic_register = 9;
273 /* Set to 1 when a return insn is output, this means that the epilogue
274 is not needed. */
275 int return_used_this_function;
277 /* Set to 1 after arm_reorg has started. Reset to start at the start of
278 the next function. */
279 static int after_arm_reorg = 0;
281 /* The maximum number of insns to be used when loading a constant. */
282 static int arm_constant_limit = 3;
284 /* For an explanation of these variables, see final_prescan_insn below. */
285 int arm_ccfsm_state;
286 enum arm_cond_code arm_current_cc;
287 rtx arm_target_insn;
288 int arm_target_label;
290 /* The condition codes of the ARM, and the inverse function. */
291 static const char * const arm_condition_codes[] =
293 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
294 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
297 #define streq(string1, string2) (strcmp (string1, string2) == 0)
299 /* Initialization code. */
301 struct processors
303 const char *const name;
304 const unsigned int flags;
307 /* Not all of these give usefully different compilation alternatives,
308 but there is no simple way of generalizing them. */
309 static const struct processors all_cores[] =
311 /* ARM Cores */
313 {"arm2", FL_CO_PROC | FL_MODE26 },
314 {"arm250", FL_CO_PROC | FL_MODE26 },
315 {"arm3", FL_CO_PROC | FL_MODE26 },
316 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
317 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
318 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
319 {"arm610", FL_MODE26 | FL_MODE32 },
320 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
321 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
322 /* arm7m doesn't exist on its own, but only with D, (and I), but
323 those don't alter the code, so arm7m is sometimes used. */
324 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
325 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
326 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
327 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
328 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
329 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
330 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
331 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
332 {"arm710", FL_MODE26 | FL_MODE32 },
333 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
334 {"arm720", FL_MODE26 | FL_MODE32 },
335 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
336 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
337 {"arm710c", FL_MODE26 | FL_MODE32 },
338 {"arm7100", FL_MODE26 | FL_MODE32 },
339 {"arm7500", FL_MODE26 | FL_MODE32 },
340 /* Doesn't have an external co-proc, but does have embedded fpu. */
341 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
342 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
343 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
344 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
345 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
346 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
347 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
348 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
349 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
350 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
351 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
352 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
353 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
354 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
355 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
356 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
357 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
359 {NULL, 0}
362 static const struct processors all_architectures[] =
364 /* ARM Architectures */
366 { "armv2", FL_CO_PROC | FL_MODE26 },
367 { "armv2a", FL_CO_PROC | FL_MODE26 },
368 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
369 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
370 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
371 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
372 implementations that support it, so we will leave it out for now. */
373 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
374 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
375 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
376 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
377 { NULL, 0 }
380 /* This is a magic stucture. The 'string' field is magically filled in
381 with a pointer to the value specified by the user on the command line
382 assuming that the user has specified such a value. */
384 struct arm_cpu_select arm_select[] =
386 /* string name processors */
387 { NULL, "-mcpu=", all_cores },
388 { NULL, "-march=", all_architectures },
389 { NULL, "-mtune=", all_cores }
392 /* Return the number of bits set in value' */
393 static unsigned long
394 bit_count (value)
395 signed int value;
397 unsigned long count = 0;
399 while (value)
401 value &= ~(value & -value);
402 ++count;
405 return count;
408 /* Fix up any incompatible options that the user has specified.
409 This has now turned into a maze. */
410 void
411 arm_override_options ()
413 unsigned i;
415 /* Set up the flags based on the cpu/architecture selected by the user. */
416 for (i = ARRAY_SIZE (arm_select); i--;)
418 struct arm_cpu_select * ptr = arm_select + i;
420 if (ptr->string != NULL && ptr->string[0] != '\0')
422 const struct processors * sel;
424 for (sel = ptr->processors; sel->name != NULL; sel++)
425 if (streq (ptr->string, sel->name))
427 if (i == 2)
428 tune_flags = sel->flags;
429 else
431 /* If we have been given an architecture and a processor
432 make sure that they are compatible. We only generate
433 a warning though, and we prefer the CPU over the
434 architecture. */
435 if (insn_flags != 0 && (insn_flags ^ sel->flags))
436 warning ("switch -mcpu=%s conflicts with -march= switch",
437 ptr->string);
439 insn_flags = sel->flags;
442 break;
445 if (sel->name == NULL)
446 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
450 /* If the user did not specify a processor, choose one for them. */
451 if (insn_flags == 0)
453 const struct processors * sel;
454 unsigned int sought;
455 static const struct cpu_default
457 const int cpu;
458 const char *const name;
460 cpu_defaults[] =
462 { TARGET_CPU_arm2, "arm2" },
463 { TARGET_CPU_arm6, "arm6" },
464 { TARGET_CPU_arm610, "arm610" },
465 { TARGET_CPU_arm710, "arm710" },
466 { TARGET_CPU_arm7m, "arm7m" },
467 { TARGET_CPU_arm7500fe, "arm7500fe" },
468 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
469 { TARGET_CPU_arm8, "arm8" },
470 { TARGET_CPU_arm810, "arm810" },
471 { TARGET_CPU_arm9, "arm9" },
472 { TARGET_CPU_strongarm, "strongarm" },
473 { TARGET_CPU_xscale, "xscale" },
474 { TARGET_CPU_generic, "arm" },
475 { 0, 0 }
477 const struct cpu_default * def;
479 /* Find the default. */
480 for (def = cpu_defaults; def->name; def++)
481 if (def->cpu == TARGET_CPU_DEFAULT)
482 break;
484 /* Make sure we found the default CPU. */
485 if (def->name == NULL)
486 abort ();
488 /* Find the default CPU's flags. */
489 for (sel = all_cores; sel->name != NULL; sel++)
490 if (streq (def->name, sel->name))
491 break;
493 if (sel->name == NULL)
494 abort ();
496 insn_flags = sel->flags;
498 /* Now check to see if the user has specified some command line
499 switch that require certain abilities from the cpu. */
500 sought = 0;
502 if (TARGET_INTERWORK || TARGET_THUMB)
504 sought |= (FL_THUMB | FL_MODE32);
506 /* Force apcs-32 to be used for interworking. */
507 target_flags |= ARM_FLAG_APCS_32;
509 /* There are no ARM processors that support both APCS-26 and
510 interworking. Therefore we force FL_MODE26 to be removed
511 from insn_flags here (if it was set), so that the search
512 below will always be able to find a compatible processor. */
513 insn_flags &= ~FL_MODE26;
515 else if (!TARGET_APCS_32)
516 sought |= FL_MODE26;
518 if (sought != 0 && ((sought & insn_flags) != sought))
520 /* Try to locate a CPU type that supports all of the abilities
521 of the default CPU, plus the extra abilities requested by
522 the user. */
523 for (sel = all_cores; sel->name != NULL; sel++)
524 if ((sel->flags & sought) == (sought | insn_flags))
525 break;
527 if (sel->name == NULL)
529 unsigned int current_bit_count = 0;
530 const struct processors * best_fit = NULL;
532 /* Ideally we would like to issue an error message here
533 saying that it was not possible to find a CPU compatible
534 with the default CPU, but which also supports the command
535 line options specified by the programmer, and so they
536 ought to use the -mcpu=<name> command line option to
537 override the default CPU type.
539 Unfortunately this does not work with multilibing. We
540 need to be able to support multilibs for -mapcs-26 and for
541 -mthumb-interwork and there is no CPU that can support both
542 options. Instead if we cannot find a cpu that has both the
543 characteristics of the default cpu and the given command line
544 options we scan the array again looking for a best match. */
545 for (sel = all_cores; sel->name != NULL; sel++)
546 if ((sel->flags & sought) == sought)
548 unsigned int count;
550 count = bit_count (sel->flags & insn_flags);
552 if (count >= current_bit_count)
554 best_fit = sel;
555 current_bit_count = count;
559 if (best_fit == NULL)
560 abort ();
561 else
562 sel = best_fit;
565 insn_flags = sel->flags;
569 /* If tuning has not been specified, tune for whichever processor or
570 architecture has been selected. */
571 if (tune_flags == 0)
572 tune_flags = insn_flags;
574 /* Make sure that the processor choice does not conflict with any of the
575 other command line choices. */
576 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
578 /* If APCS-32 was not the default then it must have been set by the
579 user, so issue a warning message. If the user has specified
580 "-mapcs-32 -mcpu=arm2" then we loose here. */
581 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
582 warning ("target CPU does not support APCS-32" );
583 target_flags &= ~ARM_FLAG_APCS_32;
585 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
587 warning ("target CPU does not support APCS-26" );
588 target_flags |= ARM_FLAG_APCS_32;
591 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
593 warning ("target CPU does not support interworking" );
594 target_flags &= ~ARM_FLAG_INTERWORK;
597 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
599 warning ("target CPU does not support THUMB instructions");
600 target_flags &= ~ARM_FLAG_THUMB;
603 if (TARGET_APCS_FRAME && TARGET_THUMB)
605 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
606 target_flags &= ~ARM_FLAG_APCS_FRAME;
609 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
610 from here where no function is being compiled currently. */
611 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
612 && TARGET_ARM)
613 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
615 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
616 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
618 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
619 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
621 /* If interworking is enabled then APCS-32 must be selected as well. */
622 if (TARGET_INTERWORK)
624 if (!TARGET_APCS_32)
625 warning ("interworking forces APCS-32 to be used" );
626 target_flags |= ARM_FLAG_APCS_32;
629 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
631 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
632 target_flags |= ARM_FLAG_APCS_FRAME;
635 if (TARGET_POKE_FUNCTION_NAME)
636 target_flags |= ARM_FLAG_APCS_FRAME;
638 if (TARGET_APCS_REENT && flag_pic)
639 error ("-fpic and -mapcs-reent are incompatible");
641 if (TARGET_APCS_REENT)
642 warning ("APCS reentrant code not supported. Ignored");
644 /* If this target is normally configured to use APCS frames, warn if they
645 are turned off and debugging is turned on. */
646 if (TARGET_ARM
647 && write_symbols != NO_DEBUG
648 && !TARGET_APCS_FRAME
649 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
650 warning ("-g with -mno-apcs-frame may not give sensible debugging");
652 /* If stack checking is disabled, we can use r10 as the PIC register,
653 which keeps r9 available. */
654 if (flag_pic && !TARGET_APCS_STACK)
655 arm_pic_register = 10;
657 if (TARGET_APCS_FLOAT)
658 warning ("passing floating point arguments in fp regs not yet supported");
660 /* Initialise boolean versions of the flags, for use in the arm.md file. */
661 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
662 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
663 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
664 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
665 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
667 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
668 arm_is_strong = (tune_flags & FL_STRONG) != 0;
669 thumb_code = (TARGET_ARM == 0);
670 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
671 && !(tune_flags & FL_ARCH4))) != 0;
673 /* Default value for floating point code... if no co-processor
674 bus, then schedule for emulated floating point. Otherwise,
675 assume the user has an FPA.
676 Note: this does not prevent use of floating point instructions,
677 -msoft-float does that. */
678 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
680 if (target_fp_name)
682 if (streq (target_fp_name, "2"))
683 arm_fpu_arch = FP_SOFT2;
684 else if (streq (target_fp_name, "3"))
685 arm_fpu_arch = FP_SOFT3;
686 else
687 error ("invalid floating point emulation option: -mfpe-%s",
688 target_fp_name);
690 else
691 arm_fpu_arch = FP_DEFAULT;
693 if (TARGET_FPE && arm_fpu != FP_HARD)
694 arm_fpu = FP_SOFT2;
696 /* For arm2/3 there is no need to do any scheduling if there is only
697 a floating point emulator, or we are doing software floating-point. */
698 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
699 && (tune_flags & FL_MODE32) == 0)
700 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
702 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
704 if (structure_size_string != NULL)
706 int size = strtol (structure_size_string, NULL, 0);
708 if (size == 8 || size == 32)
709 arm_structure_size_boundary = size;
710 else
711 warning ("structure size boundary can only be set to 8 or 32");
714 if (arm_pic_register_string != NULL)
716 int pic_register;
718 if (!flag_pic)
719 warning ("-mpic-register= is useless without -fpic");
721 pic_register = decode_reg_name (arm_pic_register_string);
723 /* Prevent the user from choosing an obviously stupid PIC register. */
724 if (pic_register < 0 || call_used_regs[pic_register]
725 || pic_register == HARD_FRAME_POINTER_REGNUM
726 || pic_register == STACK_POINTER_REGNUM
727 || pic_register >= PC_REGNUM)
728 error ("unable to use '%s' for PIC register", arm_pic_register_string);
729 else
730 arm_pic_register = pic_register;
733 if (TARGET_THUMB && flag_schedule_insns)
735 /* Don't warn since it's on by default in -O2. */
736 flag_schedule_insns = 0;
739 /* If optimizing for space, don't synthesize constants.
740 For processors with load scheduling, it never costs more than 2 cycles
741 to load a constant, and the load scheduler may well reduce that to 1. */
742 if (optimize_size || (tune_flags & FL_LDSCHED))
743 arm_constant_limit = 1;
745 if (arm_is_xscale)
746 arm_constant_limit = 2;
748 /* If optimizing for size, bump the number of instructions that we
749 are prepared to conditionally execute (even on a StrongARM).
750 Otherwise for the StrongARM, which has early execution of branches,
751 a sequence that is worth skipping is shorter. */
752 if (optimize_size)
753 max_insns_skipped = 6;
754 else if (arm_is_strong)
755 max_insns_skipped = 3;
757 /* Register global variables with the garbage collector. */
758 arm_add_gc_roots ();
761 static void
762 arm_add_gc_roots ()
764 ggc_add_rtx_root (&arm_compare_op0, 1);
765 ggc_add_rtx_root (&arm_compare_op1, 1);
766 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
768 gcc_obstack_init(&minipool_obstack);
769 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
772 /* A table of known ARM exception types.
773 For use with the interrupt function attribute. */
775 typedef struct
777 const char *const arg;
778 const unsigned long return_value;
780 isr_attribute_arg;
782 static const isr_attribute_arg isr_attribute_args [] =
784 { "IRQ", ARM_FT_ISR },
785 { "irq", ARM_FT_ISR },
786 { "FIQ", ARM_FT_FIQ },
787 { "fiq", ARM_FT_FIQ },
788 { "ABORT", ARM_FT_ISR },
789 { "abort", ARM_FT_ISR },
790 { "ABORT", ARM_FT_ISR },
791 { "abort", ARM_FT_ISR },
792 { "UNDEF", ARM_FT_EXCEPTION },
793 { "undef", ARM_FT_EXCEPTION },
794 { "SWI", ARM_FT_EXCEPTION },
795 { "swi", ARM_FT_EXCEPTION },
796 { NULL, ARM_FT_NORMAL }
799 /* Returns the (interrupt) function type of the current
800 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
802 static unsigned long
803 arm_isr_value (argument)
804 tree argument;
806 const isr_attribute_arg * ptr;
807 const char * arg;
809 /* No argument - default to IRQ. */
810 if (argument == NULL_TREE)
811 return ARM_FT_ISR;
813 /* Get the value of the argument. */
814 if (TREE_VALUE (argument) == NULL_TREE
815 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
816 return ARM_FT_UNKNOWN;
818 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
820 /* Check it against the list of known arguments. */
821 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
822 if (streq (arg, ptr->arg))
823 return ptr->return_value;
825 /* An unrecognised interrupt type. */
826 return ARM_FT_UNKNOWN;
829 /* Computes the type of the current function. */
831 static unsigned long
832 arm_compute_func_type ()
834 unsigned long type = ARM_FT_UNKNOWN;
835 tree a;
836 tree attr;
838 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
839 abort ();
841 /* Decide if the current function is volatile. Such functions
842 never return, and many memory cycles can be saved by not storing
843 register values that will never be needed again. This optimization
844 was added to speed up context switching in a kernel application. */
845 if (optimize > 0
846 && current_function_nothrow
847 && TREE_THIS_VOLATILE (current_function_decl))
848 type |= ARM_FT_VOLATILE;
850 if (current_function_needs_context)
851 type |= ARM_FT_NESTED;
853 attr = DECL_ATTRIBUTES (current_function_decl);
855 a = lookup_attribute ("naked", attr);
856 if (a != NULL_TREE)
857 type |= ARM_FT_NAKED;
859 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
860 type |= ARM_FT_EXCEPTION_HANDLER;
861 else
863 a = lookup_attribute ("isr", attr);
864 if (a == NULL_TREE)
865 a = lookup_attribute ("interrupt", attr);
867 if (a == NULL_TREE)
868 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
869 else
870 type |= arm_isr_value (TREE_VALUE (a));
873 return type;
876 /* Returns the type of the current function. */
878 unsigned long
879 arm_current_func_type ()
881 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
882 cfun->machine->func_type = arm_compute_func_type ();
884 return cfun->machine->func_type;
887 /* Return 1 if it is possible to return using a single instruction. */
890 use_return_insn (iscond)
891 int iscond;
893 int regno;
894 unsigned int func_type;
896 /* Never use a return instruction before reload has run. */
897 if (!reload_completed)
898 return 0;
900 func_type = arm_current_func_type ();
902 /* Naked functions, volatile functiond and interrupt
903 functions all need special consideration. */
904 if (func_type & (ARM_FT_INTERRUPT | ARM_FT_VOLATILE | ARM_FT_NAKED))
905 return 0;
907 /* As do variadic functions. */
908 if (current_function_pretend_args_size
909 || cfun->machine->uses_anonymous_args
910 /* Of if the function calls __builtin_eh_return () */
911 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
912 /* Or if there is no frame pointer and there is a stack adjustment. */
913 || ((get_frame_size () + current_function_outgoing_args_size != 0)
914 && !frame_pointer_needed))
915 return 0;
917 /* Can't be done if interworking with Thumb, and any registers have been
918 stacked. Similarly, on StrongARM, conditional returns are expensive
919 if they aren't taken and registers have been stacked. */
920 if (iscond && arm_is_strong && frame_pointer_needed)
921 return 0;
923 if ((iscond && arm_is_strong)
924 || TARGET_INTERWORK)
926 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
927 if (regs_ever_live[regno] && !call_used_regs[regno])
928 return 0;
930 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
931 return 0;
934 /* Can't be done if any of the FPU regs are pushed,
935 since this also requires an insn. */
936 if (TARGET_HARD_FLOAT)
937 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
938 if (regs_ever_live[regno] && !call_used_regs[regno])
939 return 0;
941 return 1;
944 /* Return TRUE if int I is a valid immediate ARM constant. */
947 const_ok_for_arm (i)
948 HOST_WIDE_INT i;
950 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
952 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
953 be all zero, or all one. */
954 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
955 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
956 != ((~(unsigned HOST_WIDE_INT) 0)
957 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
958 return FALSE;
960 /* Fast return for 0 and powers of 2 */
961 if ((i & (i - 1)) == 0)
962 return TRUE;
966 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
967 return TRUE;
968 mask =
969 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
970 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
972 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
974 return FALSE;
977 /* Return true if I is a valid constant for the operation CODE. */
978 static int
979 const_ok_for_op (i, code)
980 HOST_WIDE_INT i;
981 enum rtx_code code;
983 if (const_ok_for_arm (i))
984 return 1;
986 switch (code)
988 case PLUS:
989 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
991 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
992 case XOR:
993 case IOR:
994 return 0;
996 case AND:
997 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
999 default:
1000 abort ();
1004 /* Emit a sequence of insns to handle a large constant.
1005 CODE is the code of the operation required, it can be any of SET, PLUS,
1006 IOR, AND, XOR, MINUS;
1007 MODE is the mode in which the operation is being performed;
1008 VAL is the integer to operate on;
1009 SOURCE is the other operand (a register, or a null-pointer for SET);
1010 SUBTARGETS means it is safe to create scratch registers if that will
1011 either produce a simpler sequence, or we will want to cse the values.
1012 Return value is the number of insns emitted. */
1015 arm_split_constant (code, mode, val, target, source, subtargets)
1016 enum rtx_code code;
1017 enum machine_mode mode;
1018 HOST_WIDE_INT val;
1019 rtx target;
1020 rtx source;
1021 int subtargets;
1023 if (subtargets || code == SET
1024 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1025 && REGNO (target) != REGNO (source)))
1027 /* After arm_reorg has been called, we can't fix up expensive
1028 constants by pushing them into memory so we must synthesise
1029 them in-line, regardless of the cost. This is only likely to
1030 be more costly on chips that have load delay slots and we are
1031 compiling without running the scheduler (so no splitting
1032 occurred before the final instruction emission).
1034 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1036 if (!after_arm_reorg
1037 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1038 > arm_constant_limit + (code != SET)))
1040 if (code == SET)
1042 /* Currently SET is the only monadic value for CODE, all
1043 the rest are diadic. */
1044 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1045 return 1;
1047 else
1049 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1051 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1052 /* For MINUS, the value is subtracted from, since we never
1053 have subtraction of a constant. */
1054 if (code == MINUS)
1055 emit_insn (gen_rtx_SET (VOIDmode, target,
1056 gen_rtx_MINUS (mode, temp, source)));
1057 else
1058 emit_insn (gen_rtx_SET (VOIDmode, target,
1059 gen_rtx (code, mode, source, temp)));
1060 return 2;
1065 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1068 static int
1069 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1071 HOST_WIDE_INT temp1;
1072 int num_insns = 0;
1075 int end;
1077 if (i <= 0)
1078 i += 32;
1079 if (remainder & (3 << (i - 2)))
1081 end = i - 8;
1082 if (end < 0)
1083 end += 32;
1084 temp1 = remainder & ((0x0ff << end)
1085 | ((i < end) ? (0xff >> (32 - end)) : 0));
1086 remainder &= ~temp1;
1087 num_insns++;
1088 i -= 6;
1090 i -= 2;
1091 } while (remainder);
1092 return num_insns;
1095 /* As above, but extra parameter GENERATE which, if clear, suppresses
1096 RTL generation. */
1098 static int
1099 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1100 enum rtx_code code;
1101 enum machine_mode mode;
1102 HOST_WIDE_INT val;
1103 rtx target;
1104 rtx source;
1105 int subtargets;
1106 int generate;
1108 int can_invert = 0;
1109 int can_negate = 0;
1110 int can_negate_initial = 0;
1111 int can_shift = 0;
1112 int i;
1113 int num_bits_set = 0;
1114 int set_sign_bit_copies = 0;
1115 int clear_sign_bit_copies = 0;
1116 int clear_zero_bit_copies = 0;
1117 int set_zero_bit_copies = 0;
1118 int insns = 0;
1119 unsigned HOST_WIDE_INT temp1, temp2;
1120 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1122 /* Find out which operations are safe for a given CODE. Also do a quick
1123 check for degenerate cases; these can occur when DImode operations
1124 are split. */
1125 switch (code)
1127 case SET:
1128 can_invert = 1;
1129 can_shift = 1;
1130 can_negate = 1;
1131 break;
1133 case PLUS:
1134 can_negate = 1;
1135 can_negate_initial = 1;
1136 break;
1138 case IOR:
1139 if (remainder == 0xffffffff)
1141 if (generate)
1142 emit_insn (gen_rtx_SET (VOIDmode, target,
1143 GEN_INT (ARM_SIGN_EXTEND (val))));
1144 return 1;
1146 if (remainder == 0)
1148 if (reload_completed && rtx_equal_p (target, source))
1149 return 0;
1150 if (generate)
1151 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1152 return 1;
1154 break;
1156 case AND:
1157 if (remainder == 0)
1159 if (generate)
1160 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1161 return 1;
1163 if (remainder == 0xffffffff)
1165 if (reload_completed && rtx_equal_p (target, source))
1166 return 0;
1167 if (generate)
1168 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1169 return 1;
1171 can_invert = 1;
1172 break;
1174 case XOR:
1175 if (remainder == 0)
1177 if (reload_completed && rtx_equal_p (target, source))
1178 return 0;
1179 if (generate)
1180 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1181 return 1;
1183 if (remainder == 0xffffffff)
1185 if (generate)
1186 emit_insn (gen_rtx_SET (VOIDmode, target,
1187 gen_rtx_NOT (mode, source)));
1188 return 1;
1191 /* We don't know how to handle this yet below. */
1192 abort ();
1194 case MINUS:
1195 /* We treat MINUS as (val - source), since (source - val) is always
1196 passed as (source + (-val)). */
1197 if (remainder == 0)
1199 if (generate)
1200 emit_insn (gen_rtx_SET (VOIDmode, target,
1201 gen_rtx_NEG (mode, source)));
1202 return 1;
1204 if (const_ok_for_arm (val))
1206 if (generate)
1207 emit_insn (gen_rtx_SET (VOIDmode, target,
1208 gen_rtx_MINUS (mode, GEN_INT (val),
1209 source)));
1210 return 1;
1212 can_negate = 1;
1214 break;
1216 default:
1217 abort ();
1220 /* If we can do it in one insn get out quickly. */
1221 if (const_ok_for_arm (val)
1222 || (can_negate_initial && const_ok_for_arm (-val))
1223 || (can_invert && const_ok_for_arm (~val)))
1225 if (generate)
1226 emit_insn (gen_rtx_SET (VOIDmode, target,
1227 (source ? gen_rtx (code, mode, source,
1228 GEN_INT (val))
1229 : GEN_INT (val))));
1230 return 1;
1233 /* Calculate a few attributes that may be useful for specific
1234 optimizations. */
1235 for (i = 31; i >= 0; i--)
1237 if ((remainder & (1 << i)) == 0)
1238 clear_sign_bit_copies++;
1239 else
1240 break;
1243 for (i = 31; i >= 0; i--)
1245 if ((remainder & (1 << i)) != 0)
1246 set_sign_bit_copies++;
1247 else
1248 break;
1251 for (i = 0; i <= 31; i++)
1253 if ((remainder & (1 << i)) == 0)
1254 clear_zero_bit_copies++;
1255 else
1256 break;
1259 for (i = 0; i <= 31; i++)
1261 if ((remainder & (1 << i)) != 0)
1262 set_zero_bit_copies++;
1263 else
1264 break;
1267 switch (code)
1269 case SET:
1270 /* See if we can do this by sign_extending a constant that is known
1271 to be negative. This is a good, way of doing it, since the shift
1272 may well merge into a subsequent insn. */
1273 if (set_sign_bit_copies > 1)
1275 if (const_ok_for_arm
1276 (temp1 = ARM_SIGN_EXTEND (remainder
1277 << (set_sign_bit_copies - 1))))
1279 if (generate)
1281 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1282 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1283 GEN_INT (temp1)));
1284 emit_insn (gen_ashrsi3 (target, new_src,
1285 GEN_INT (set_sign_bit_copies - 1)));
1287 return 2;
1289 /* For an inverted constant, we will need to set the low bits,
1290 these will be shifted out of harm's way. */
1291 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1292 if (const_ok_for_arm (~temp1))
1294 if (generate)
1296 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1297 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1298 GEN_INT (temp1)));
1299 emit_insn (gen_ashrsi3 (target, new_src,
1300 GEN_INT (set_sign_bit_copies - 1)));
1302 return 2;
1306 /* See if we can generate this by setting the bottom (or the top)
1307 16 bits, and then shifting these into the other half of the
1308 word. We only look for the simplest cases, to do more would cost
1309 too much. Be careful, however, not to generate this when the
1310 alternative would take fewer insns. */
1311 if (val & 0xffff0000)
1313 temp1 = remainder & 0xffff0000;
1314 temp2 = remainder & 0x0000ffff;
1316 /* Overlaps outside this range are best done using other methods. */
1317 for (i = 9; i < 24; i++)
1319 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1320 && !const_ok_for_arm (temp2))
1322 rtx new_src = (subtargets
1323 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1324 : target);
1325 insns = arm_gen_constant (code, mode, temp2, new_src,
1326 source, subtargets, generate);
1327 source = new_src;
1328 if (generate)
1329 emit_insn (gen_rtx_SET
1330 (VOIDmode, target,
1331 gen_rtx_IOR (mode,
1332 gen_rtx_ASHIFT (mode, source,
1333 GEN_INT (i)),
1334 source)));
1335 return insns + 1;
1339 /* Don't duplicate cases already considered. */
1340 for (i = 17; i < 24; i++)
1342 if (((temp1 | (temp1 >> i)) == remainder)
1343 && !const_ok_for_arm (temp1))
1345 rtx new_src = (subtargets
1346 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1347 : target);
1348 insns = arm_gen_constant (code, mode, temp1, new_src,
1349 source, subtargets, generate);
1350 source = new_src;
1351 if (generate)
1352 emit_insn
1353 (gen_rtx_SET (VOIDmode, target,
1354 gen_rtx_IOR
1355 (mode,
1356 gen_rtx_LSHIFTRT (mode, source,
1357 GEN_INT (i)),
1358 source)));
1359 return insns + 1;
1363 break;
1365 case IOR:
1366 case XOR:
1367 /* If we have IOR or XOR, and the constant can be loaded in a
1368 single instruction, and we can find a temporary to put it in,
1369 then this can be done in two instructions instead of 3-4. */
1370 if (subtargets
1371 /* TARGET can't be NULL if SUBTARGETS is 0 */
1372 || (reload_completed && !reg_mentioned_p (target, source)))
1374 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1376 if (generate)
1378 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1380 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1381 emit_insn (gen_rtx_SET (VOIDmode, target,
1382 gen_rtx (code, mode, source, sub)));
1384 return 2;
1388 if (code == XOR)
1389 break;
1391 if (set_sign_bit_copies > 8
1392 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1394 if (generate)
1396 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1397 rtx shift = GEN_INT (set_sign_bit_copies);
1399 emit_insn (gen_rtx_SET (VOIDmode, sub,
1400 gen_rtx_NOT (mode,
1401 gen_rtx_ASHIFT (mode,
1402 source,
1403 shift))));
1404 emit_insn (gen_rtx_SET (VOIDmode, target,
1405 gen_rtx_NOT (mode,
1406 gen_rtx_LSHIFTRT (mode, sub,
1407 shift))));
1409 return 2;
1412 if (set_zero_bit_copies > 8
1413 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1415 if (generate)
1417 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1418 rtx shift = GEN_INT (set_zero_bit_copies);
1420 emit_insn (gen_rtx_SET (VOIDmode, sub,
1421 gen_rtx_NOT (mode,
1422 gen_rtx_LSHIFTRT (mode,
1423 source,
1424 shift))));
1425 emit_insn (gen_rtx_SET (VOIDmode, target,
1426 gen_rtx_NOT (mode,
1427 gen_rtx_ASHIFT (mode, sub,
1428 shift))));
1430 return 2;
1433 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1435 if (generate)
1437 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1438 emit_insn (gen_rtx_SET (VOIDmode, sub,
1439 gen_rtx_NOT (mode, source)));
1440 source = sub;
1441 if (subtargets)
1442 sub = gen_reg_rtx (mode);
1443 emit_insn (gen_rtx_SET (VOIDmode, sub,
1444 gen_rtx_AND (mode, source,
1445 GEN_INT (temp1))));
1446 emit_insn (gen_rtx_SET (VOIDmode, target,
1447 gen_rtx_NOT (mode, sub)));
1449 return 3;
1451 break;
1453 case AND:
1454 /* See if two shifts will do 2 or more insn's worth of work. */
1455 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1457 HOST_WIDE_INT shift_mask = ((0xffffffff
1458 << (32 - clear_sign_bit_copies))
1459 & 0xffffffff);
1461 if ((remainder | shift_mask) != 0xffffffff)
1463 if (generate)
1465 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1466 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1467 new_src, source, subtargets, 1);
1468 source = new_src;
1470 else
1472 rtx targ = subtargets ? NULL_RTX : target;
1473 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1474 targ, source, subtargets, 0);
1478 if (generate)
1480 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1481 rtx shift = GEN_INT (clear_sign_bit_copies);
1483 emit_insn (gen_ashlsi3 (new_src, source, shift));
1484 emit_insn (gen_lshrsi3 (target, new_src, shift));
1487 return insns + 2;
1490 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1492 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1494 if ((remainder | shift_mask) != 0xffffffff)
1496 if (generate)
1498 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1500 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1501 new_src, source, subtargets, 1);
1502 source = new_src;
1504 else
1506 rtx targ = subtargets ? NULL_RTX : target;
1508 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1509 targ, source, subtargets, 0);
1513 if (generate)
1515 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1516 rtx shift = GEN_INT (clear_zero_bit_copies);
1518 emit_insn (gen_lshrsi3 (new_src, source, shift));
1519 emit_insn (gen_ashlsi3 (target, new_src, shift));
1522 return insns + 2;
1525 break;
1527 default:
1528 break;
1531 for (i = 0; i < 32; i++)
1532 if (remainder & (1 << i))
1533 num_bits_set++;
1535 if (code == AND || (can_invert && num_bits_set > 16))
1536 remainder = (~remainder) & 0xffffffff;
1537 else if (code == PLUS && num_bits_set > 16)
1538 remainder = (-remainder) & 0xffffffff;
1539 else
1541 can_invert = 0;
1542 can_negate = 0;
1545 /* Now try and find a way of doing the job in either two or three
1546 instructions.
1547 We start by looking for the largest block of zeros that are aligned on
1548 a 2-bit boundary, we then fill up the temps, wrapping around to the
1549 top of the word when we drop off the bottom.
1550 In the worst case this code should produce no more than four insns. */
1552 int best_start = 0;
1553 int best_consecutive_zeros = 0;
1555 for (i = 0; i < 32; i += 2)
1557 int consecutive_zeros = 0;
1559 if (!(remainder & (3 << i)))
1561 while ((i < 32) && !(remainder & (3 << i)))
1563 consecutive_zeros += 2;
1564 i += 2;
1566 if (consecutive_zeros > best_consecutive_zeros)
1568 best_consecutive_zeros = consecutive_zeros;
1569 best_start = i - consecutive_zeros;
1571 i -= 2;
1575 /* So long as it won't require any more insns to do so, it's
1576 desirable to emit a small constant (in bits 0...9) in the last
1577 insn. This way there is more chance that it can be combined with
1578 a later addressing insn to form a pre-indexed load or store
1579 operation. Consider:
1581 *((volatile int *)0xe0000100) = 1;
1582 *((volatile int *)0xe0000110) = 2;
1584 We want this to wind up as:
1586 mov rA, #0xe0000000
1587 mov rB, #1
1588 str rB, [rA, #0x100]
1589 mov rB, #2
1590 str rB, [rA, #0x110]
1592 rather than having to synthesize both large constants from scratch.
1594 Therefore, we calculate how many insns would be required to emit
1595 the constant starting from `best_start', and also starting from
1596 zero (ie with bit 31 first to be output). If `best_start' doesn't
1597 yield a shorter sequence, we may as well use zero. */
1598 if (best_start != 0
1599 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1600 && (count_insns_for_constant (remainder, 0) <=
1601 count_insns_for_constant (remainder, best_start)))
1602 best_start = 0;
1604 /* Now start emitting the insns. */
1605 i = best_start;
1608 int end;
1610 if (i <= 0)
1611 i += 32;
1612 if (remainder & (3 << (i - 2)))
1614 end = i - 8;
1615 if (end < 0)
1616 end += 32;
1617 temp1 = remainder & ((0x0ff << end)
1618 | ((i < end) ? (0xff >> (32 - end)) : 0));
1619 remainder &= ~temp1;
1621 if (generate)
1623 rtx new_src, temp1_rtx;
1625 if (code == SET || code == MINUS)
1627 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1628 if (can_invert && code != MINUS)
1629 temp1 = ~temp1;
1631 else
1633 if (remainder && subtargets)
1634 new_src = gen_reg_rtx (mode);
1635 else
1636 new_src = target;
1637 if (can_invert)
1638 temp1 = ~temp1;
1639 else if (can_negate)
1640 temp1 = -temp1;
1643 temp1 = trunc_int_for_mode (temp1, mode);
1644 temp1_rtx = GEN_INT (temp1);
1646 if (code == SET)
1648 else if (code == MINUS)
1649 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1650 else
1651 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1653 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1654 source = new_src;
1657 if (code == SET)
1659 can_invert = 0;
1660 code = PLUS;
1662 else if (code == MINUS)
1663 code = PLUS;
1665 insns++;
1666 i -= 6;
1668 i -= 2;
1670 while (remainder);
1673 return insns;
1676 /* Canonicalize a comparison so that we are more likely to recognize it.
1677 This can be done for a few constant compares, where we can make the
1678 immediate value easier to load. */
1680 enum rtx_code
1681 arm_canonicalize_comparison (code, op1)
1682 enum rtx_code code;
1683 rtx * op1;
1685 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1687 switch (code)
1689 case EQ:
1690 case NE:
1691 return code;
1693 case GT:
1694 case LE:
1695 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1696 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1698 *op1 = GEN_INT (i + 1);
1699 return code == GT ? GE : LT;
1701 break;
1703 case GE:
1704 case LT:
1705 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1706 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1708 *op1 = GEN_INT (i - 1);
1709 return code == GE ? GT : LE;
1711 break;
1713 case GTU:
1714 case LEU:
1715 if (i != ~((unsigned HOST_WIDE_INT) 0)
1716 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1718 *op1 = GEN_INT (i + 1);
1719 return code == GTU ? GEU : LTU;
1721 break;
1723 case GEU:
1724 case LTU:
1725 if (i != 0
1726 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1728 *op1 = GEN_INT (i - 1);
1729 return code == GEU ? GTU : LEU;
1731 break;
1733 default:
1734 abort ();
1737 return code;
1740 /* Decide whether a type should be returned in memory (true)
1741 or in a register (false). This is called by the macro
1742 RETURN_IN_MEMORY. */
1745 arm_return_in_memory (type)
1746 tree type;
1748 if (!AGGREGATE_TYPE_P (type))
1749 /* All simple types are returned in registers. */
1750 return 0;
1752 /* For the arm-wince targets we choose to be compitable with Microsoft's
1753 ARM and Thumb compilers, which always return aggregates in memory. */
1754 #ifndef ARM_WINCE
1755 /* All structures/unions bigger than one word are returned in memory.
1756 Also catch the case where int_size_in_bytes returns -1. In this case
1757 the aggregate is either huge or of varaible size, and in either case
1758 we will want to return it via memory and not in a register. */
1759 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
1760 return 1;
1762 if (TREE_CODE (type) == RECORD_TYPE)
1764 tree field;
1766 /* For a struct the APCS says that we only return in a register
1767 if the type is 'integer like' and every addressable element
1768 has an offset of zero. For practical purposes this means
1769 that the structure can have at most one non bit-field element
1770 and that this element must be the first one in the structure. */
1772 /* Find the first field, ignoring non FIELD_DECL things which will
1773 have been created by C++. */
1774 for (field = TYPE_FIELDS (type);
1775 field && TREE_CODE (field) != FIELD_DECL;
1776 field = TREE_CHAIN (field))
1777 continue;
1779 if (field == NULL)
1780 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1782 /* Check that the first field is valid for returning in a register. */
1784 /* ... Floats are not allowed */
1785 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1786 return 1;
1788 /* ... Aggregates that are not themselves valid for returning in
1789 a register are not allowed. */
1790 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1791 return 1;
1793 /* Now check the remaining fields, if any. Only bitfields are allowed,
1794 since they are not addressable. */
1795 for (field = TREE_CHAIN (field);
1796 field;
1797 field = TREE_CHAIN (field))
1799 if (TREE_CODE (field) != FIELD_DECL)
1800 continue;
1802 if (!DECL_BIT_FIELD_TYPE (field))
1803 return 1;
1806 return 0;
1809 if (TREE_CODE (type) == UNION_TYPE)
1811 tree field;
1813 /* Unions can be returned in registers if every element is
1814 integral, or can be returned in an integer register. */
1815 for (field = TYPE_FIELDS (type);
1816 field;
1817 field = TREE_CHAIN (field))
1819 if (TREE_CODE (field) != FIELD_DECL)
1820 continue;
1822 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1823 return 1;
1825 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1826 return 1;
1829 return 0;
1831 #endif /* not ARM_WINCE */
1833 /* Return all other types in memory. */
1834 return 1;
1837 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1838 for a call to a function whose data type is FNTYPE.
1839 For a library call, FNTYPE is NULL. */
1840 void
1841 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1842 CUMULATIVE_ARGS * pcum;
1843 tree fntype;
1844 rtx libname ATTRIBUTE_UNUSED;
1845 int indirect ATTRIBUTE_UNUSED;
1847 /* On the ARM, the offset starts at 0. */
1848 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1850 pcum->call_cookie = CALL_NORMAL;
1852 if (TARGET_LONG_CALLS)
1853 pcum->call_cookie = CALL_LONG;
1855 /* Check for long call/short call attributes. The attributes
1856 override any command line option. */
1857 if (fntype)
1859 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1860 pcum->call_cookie = CALL_SHORT;
1861 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1862 pcum->call_cookie = CALL_LONG;
1866 /* Determine where to put an argument to a function.
1867 Value is zero to push the argument on the stack,
1868 or a hard register in which to store the argument.
1870 MODE is the argument's machine mode.
1871 TYPE is the data type of the argument (as a tree).
1872 This is null for libcalls where that information may
1873 not be available.
1874 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1875 the preceding args and about the function being called.
1876 NAMED is nonzero if this argument is a named parameter
1877 (otherwise it is an extra parameter matching an ellipsis). */
1880 arm_function_arg (pcum, mode, type, named)
1881 CUMULATIVE_ARGS * pcum;
1882 enum machine_mode mode;
1883 tree type ATTRIBUTE_UNUSED;
1884 int named;
1886 if (mode == VOIDmode)
1887 /* Compute operand 2 of the call insn. */
1888 return GEN_INT (pcum->call_cookie);
1890 if (!named || pcum->nregs >= NUM_ARG_REGS)
1891 return NULL_RTX;
1893 return gen_rtx_REG (mode, pcum->nregs);
1896 /* Encode the current state of the #pragma [no_]long_calls. */
1897 typedef enum
1899 OFF, /* No #pramgma [no_]long_calls is in effect. */
1900 LONG, /* #pragma long_calls is in effect. */
1901 SHORT /* #pragma no_long_calls is in effect. */
1902 } arm_pragma_enum;
1904 static arm_pragma_enum arm_pragma_long_calls = OFF;
1906 void
1907 arm_pr_long_calls (pfile)
1908 cpp_reader * pfile ATTRIBUTE_UNUSED;
1910 arm_pragma_long_calls = LONG;
1913 void
1914 arm_pr_no_long_calls (pfile)
1915 cpp_reader * pfile ATTRIBUTE_UNUSED;
1917 arm_pragma_long_calls = SHORT;
1920 void
1921 arm_pr_long_calls_off (pfile)
1922 cpp_reader * pfile ATTRIBUTE_UNUSED;
1924 arm_pragma_long_calls = OFF;
1927 /* Table of machine attributes. */
1928 const struct attribute_spec arm_attribute_table[] =
1930 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1931 /* Function calls made to this symbol must be done indirectly, because
1932 it may lie outside of the 26 bit addressing range of a normal function
1933 call. */
1934 { "long_call", 0, 0, false, true, true, NULL },
1935 /* Whereas these functions are always known to reside within the 26 bit
1936 addressing range. */
1937 { "short_call", 0, 0, false, true, true, NULL },
1938 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1939 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1940 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1941 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1942 #ifdef ARM_PE
1943 /* ARM/PE has three new attributes:
1944 interfacearm - ?
1945 dllexport - for exporting a function/variable that will live in a dll
1946 dllimport - for importing a function/variable from a dll
1948 Microsoft allows multiple declspecs in one __declspec, separating
1949 them with spaces. We do NOT support this. Instead, use __declspec
1950 multiple times.
1952 { "dllimport", 0, 0, true, false, false, NULL },
1953 { "dllexport", 0, 0, true, false, false, NULL },
1954 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1955 #endif
1956 { NULL, 0, 0, false, false, false, NULL }
1959 /* Handle an attribute requiring a FUNCTION_DECL;
1960 arguments as in struct attribute_spec.handler. */
1962 static tree
1963 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1964 tree * node;
1965 tree name;
1966 tree args ATTRIBUTE_UNUSED;
1967 int flags ATTRIBUTE_UNUSED;
1968 bool * no_add_attrs;
1970 if (TREE_CODE (*node) != FUNCTION_DECL)
1972 warning ("`%s' attribute only applies to functions",
1973 IDENTIFIER_POINTER (name));
1974 *no_add_attrs = true;
1977 return NULL_TREE;
1980 /* Handle an "interrupt" or "isr" attribute;
1981 arguments as in struct attribute_spec.handler. */
1983 static tree
1984 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1985 tree * node;
1986 tree name;
1987 tree args;
1988 int flags;
1989 bool * no_add_attrs;
1991 if (DECL_P (*node))
1993 if (TREE_CODE (*node) != FUNCTION_DECL)
1995 warning ("`%s' attribute only applies to functions",
1996 IDENTIFIER_POINTER (name));
1997 *no_add_attrs = true;
1999 /* FIXME: the argument if any is checked for type attributes;
2000 should it be checked for decl ones? */
2002 else
2004 if (TREE_CODE (*node) == FUNCTION_TYPE
2005 || TREE_CODE (*node) == METHOD_TYPE)
2007 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2009 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2010 *no_add_attrs = true;
2013 else if (TREE_CODE (*node) == POINTER_TYPE
2014 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2015 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2016 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2018 *node = build_type_copy (*node);
2019 TREE_TYPE (*node) = build_type_attribute_variant
2020 (TREE_TYPE (*node),
2021 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2022 *no_add_attrs = true;
2024 else
2026 /* Possibly pass this attribute on from the type to a decl. */
2027 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2028 | (int) ATTR_FLAG_FUNCTION_NEXT
2029 | (int) ATTR_FLAG_ARRAY_NEXT))
2031 *no_add_attrs = true;
2032 return tree_cons (name, args, NULL_TREE);
2034 else
2036 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2041 return NULL_TREE;
2044 /* Return 0 if the attributes for two types are incompatible, 1 if they
2045 are compatible, and 2 if they are nearly compatible (which causes a
2046 warning to be generated). */
2048 static int
2049 arm_comp_type_attributes (type1, type2)
2050 tree type1;
2051 tree type2;
2053 int l1, l2, s1, s2;
2055 /* Check for mismatch of non-default calling convention. */
2056 if (TREE_CODE (type1) != FUNCTION_TYPE)
2057 return 1;
2059 /* Check for mismatched call attributes. */
2060 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2061 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2062 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2063 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2065 /* Only bother to check if an attribute is defined. */
2066 if (l1 | l2 | s1 | s2)
2068 /* If one type has an attribute, the other must have the same attribute. */
2069 if ((l1 != l2) || (s1 != s2))
2070 return 0;
2072 /* Disallow mixed attributes. */
2073 if ((l1 & s2) || (l2 & s1))
2074 return 0;
2077 /* Check for mismatched ISR attribute. */
2078 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2079 if (! l1)
2080 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2081 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2082 if (! l2)
2083 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2084 if (l1 != l2)
2085 return 0;
2087 return 1;
2090 /* Encode long_call or short_call attribute by prefixing
2091 symbol name in DECL with a special character FLAG. */
2093 void
2094 arm_encode_call_attribute (decl, flag)
2095 tree decl;
2096 int flag;
2098 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2099 int len = strlen (str);
2100 char * newstr;
2102 if (TREE_CODE (decl) != FUNCTION_DECL)
2103 return;
2105 /* Do not allow weak functions to be treated as short call. */
2106 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2107 return;
2109 newstr = alloca (len + 2);
2110 newstr[0] = flag;
2111 strcpy (newstr + 1, str);
2113 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2114 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2117 /* Assigns default attributes to newly defined type. This is used to
2118 set short_call/long_call attributes for function types of
2119 functions defined inside corresponding #pragma scopes. */
2121 static void
2122 arm_set_default_type_attributes (type)
2123 tree type;
2125 /* Add __attribute__ ((long_call)) to all functions, when
2126 inside #pragma long_calls or __attribute__ ((short_call)),
2127 when inside #pragma no_long_calls. */
2128 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2130 tree type_attr_list, attr_name;
2131 type_attr_list = TYPE_ATTRIBUTES (type);
2133 if (arm_pragma_long_calls == LONG)
2134 attr_name = get_identifier ("long_call");
2135 else if (arm_pragma_long_calls == SHORT)
2136 attr_name = get_identifier ("short_call");
2137 else
2138 return;
2140 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2141 TYPE_ATTRIBUTES (type) = type_attr_list;
2145 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2146 defined within the current compilation unit. If this caanot be
2147 determined, then 0 is returned. */
2149 static int
2150 current_file_function_operand (sym_ref)
2151 rtx sym_ref;
2153 /* This is a bit of a fib. A function will have a short call flag
2154 applied to its name if it has the short call attribute, or it has
2155 already been defined within the current compilation unit. */
2156 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2157 return 1;
2159 /* The current function is always defined within the current compilation
2160 unit. if it s a weak definition however, then this may not be the real
2161 definition of the function, and so we have to say no. */
2162 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2163 && !DECL_WEAK (current_function_decl))
2164 return 1;
2166 /* We cannot make the determination - default to returning 0. */
2167 return 0;
2170 /* Return non-zero if a 32 bit "long_call" should be generated for
2171 this call. We generate a long_call if the function:
2173 a. has an __attribute__((long call))
2174 or b. is within the scope of a #pragma long_calls
2175 or c. the -mlong-calls command line switch has been specified
2177 However we do not generate a long call if the function:
2179 d. has an __attribute__ ((short_call))
2180 or e. is inside the scope of a #pragma no_long_calls
2181 or f. has an __attribute__ ((section))
2182 or g. is defined within the current compilation unit.
2184 This function will be called by C fragments contained in the machine
2185 description file. CALL_REF and CALL_COOKIE correspond to the matched
2186 rtl operands. CALL_SYMBOL is used to distinguish between
2187 two different callers of the function. It is set to 1 in the
2188 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2189 and "call_value" patterns. This is because of the difference in the
2190 SYM_REFs passed by these patterns. */
2193 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2194 rtx sym_ref;
2195 int call_cookie;
2196 int call_symbol;
2198 if (!call_symbol)
2200 if (GET_CODE (sym_ref) != MEM)
2201 return 0;
2203 sym_ref = XEXP (sym_ref, 0);
2206 if (GET_CODE (sym_ref) != SYMBOL_REF)
2207 return 0;
2209 if (call_cookie & CALL_SHORT)
2210 return 0;
2212 if (TARGET_LONG_CALLS && flag_function_sections)
2213 return 1;
2215 if (current_file_function_operand (sym_ref))
2216 return 0;
2218 return (call_cookie & CALL_LONG)
2219 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2220 || TARGET_LONG_CALLS;
2223 /* Return non-zero if it is ok to make a tail-call to DECL. */
2226 arm_function_ok_for_sibcall (decl)
2227 tree decl;
2229 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2231 /* Never tailcall something for which we have no decl, or if we
2232 are in Thumb mode. */
2233 if (decl == NULL || TARGET_THUMB)
2234 return 0;
2236 /* Get the calling method. */
2237 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2238 call_type = CALL_SHORT;
2239 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2240 call_type = CALL_LONG;
2242 /* Cannot tail-call to long calls, since these are out of range of
2243 a branch instruction. However, if not compiling PIC, we know
2244 we can reach the symbol if it is in this compilation unit. */
2245 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2246 return 0;
2248 /* If we are interworking and the function is not declared static
2249 then we can't tail-call it unless we know that it exists in this
2250 compilation unit (since it might be a Thumb routine). */
2251 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2252 return 0;
2254 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2255 if (IS_INTERRUPT (arm_current_func_type ()))
2256 return 0;
2258 /* Everything else is ok. */
2259 return 1;
2264 legitimate_pic_operand_p (x)
2265 rtx x;
2267 if (CONSTANT_P (x)
2268 && flag_pic
2269 && (GET_CODE (x) == SYMBOL_REF
2270 || (GET_CODE (x) == CONST
2271 && GET_CODE (XEXP (x, 0)) == PLUS
2272 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2273 return 0;
2275 return 1;
2279 legitimize_pic_address (orig, mode, reg)
2280 rtx orig;
2281 enum machine_mode mode;
2282 rtx reg;
2284 if (GET_CODE (orig) == SYMBOL_REF
2285 || GET_CODE (orig) == LABEL_REF)
2287 #ifndef AOF_ASSEMBLER
2288 rtx pic_ref, address;
2289 #endif
2290 rtx insn;
2291 int subregs = 0;
2293 if (reg == 0)
2295 if (no_new_pseudos)
2296 abort ();
2297 else
2298 reg = gen_reg_rtx (Pmode);
2300 subregs = 1;
2303 #ifdef AOF_ASSEMBLER
2304 /* The AOF assembler can generate relocations for these directly, and
2305 understands that the PIC register has to be added into the offset. */
2306 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2307 #else
2308 if (subregs)
2309 address = gen_reg_rtx (Pmode);
2310 else
2311 address = reg;
2313 if (TARGET_ARM)
2314 emit_insn (gen_pic_load_addr_arm (address, orig));
2315 else
2316 emit_insn (gen_pic_load_addr_thumb (address, orig));
2318 if (GET_CODE (orig) == LABEL_REF && NEED_GOT_RELOC)
2319 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2320 else
2322 pic_ref = gen_rtx_MEM (Pmode,
2323 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2324 address));
2325 RTX_UNCHANGING_P (pic_ref) = 1;
2328 insn = emit_move_insn (reg, pic_ref);
2329 #endif
2330 current_function_uses_pic_offset_table = 1;
2331 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2332 by loop. */
2333 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2334 REG_NOTES (insn));
2335 return reg;
2337 else if (GET_CODE (orig) == CONST)
2339 rtx base, offset;
2341 if (GET_CODE (XEXP (orig, 0)) == PLUS
2342 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2343 return orig;
2345 if (reg == 0)
2347 if (no_new_pseudos)
2348 abort ();
2349 else
2350 reg = gen_reg_rtx (Pmode);
2353 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2355 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2356 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2357 base == reg ? 0 : reg);
2359 else
2360 abort ();
2362 if (GET_CODE (offset) == CONST_INT)
2364 /* The base register doesn't really matter, we only want to
2365 test the index for the appropriate mode. */
2366 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
2368 if (!no_new_pseudos)
2369 offset = force_reg (Pmode, offset);
2370 else
2371 abort ();
2373 win:
2374 if (GET_CODE (offset) == CONST_INT)
2375 return plus_constant (base, INTVAL (offset));
2378 if (GET_MODE_SIZE (mode) > 4
2379 && (GET_MODE_CLASS (mode) == MODE_INT
2380 || TARGET_SOFT_FLOAT))
2382 emit_insn (gen_addsi3 (reg, base, offset));
2383 return reg;
2386 return gen_rtx_PLUS (Pmode, base, offset);
2389 return orig;
2392 /* Generate code to load the PIC register. PROLOGUE is true if
2393 called from arm_expand_prologue (in which case we want the
2394 generated insns at the start of the function); false if called
2395 by an exception receiver that needs the PIC register reloaded
2396 (in which case the insns are just dumped at the current location). */
2398 void
2399 arm_finalize_pic (prologue)
2400 int prologue ATTRIBUTE_UNUSED;
2402 #ifndef AOF_ASSEMBLER
2403 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2404 rtx global_offset_table;
2406 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2407 return;
2409 if (!flag_pic)
2410 abort ();
2412 start_sequence ();
2413 l1 = gen_label_rtx ();
2415 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2416 /* On the ARM the PC register contains 'dot + 8' at the time of the
2417 addition, on the Thumb it is 'dot + 4'. */
2418 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2419 if (GOT_PCREL)
2420 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2421 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2422 else
2423 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2425 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2427 if (TARGET_ARM)
2429 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2430 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2432 else
2434 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2435 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2438 seq = gen_sequence ();
2439 end_sequence ();
2440 if (prologue)
2441 emit_insn_after (seq, get_insns ());
2442 else
2443 emit_insn (seq);
2445 /* Need to emit this whether or not we obey regdecls,
2446 since setjmp/longjmp can cause life info to screw up. */
2447 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2448 #endif /* AOF_ASSEMBLER */
2451 #define REG_OR_SUBREG_REG(X) \
2452 (GET_CODE (X) == REG \
2453 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2455 #define REG_OR_SUBREG_RTX(X) \
2456 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2458 #ifndef COSTS_N_INSNS
2459 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2460 #endif
2463 arm_rtx_costs (x, code, outer)
2464 rtx x;
2465 enum rtx_code code;
2466 enum rtx_code outer;
2468 enum machine_mode mode = GET_MODE (x);
2469 enum rtx_code subcode;
2470 int extra_cost;
2472 if (TARGET_THUMB)
2474 switch (code)
2476 case ASHIFT:
2477 case ASHIFTRT:
2478 case LSHIFTRT:
2479 case ROTATERT:
2480 case PLUS:
2481 case MINUS:
2482 case COMPARE:
2483 case NEG:
2484 case NOT:
2485 return COSTS_N_INSNS (1);
2487 case MULT:
2488 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2490 int cycles = 0;
2491 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2493 while (i)
2495 i >>= 2;
2496 cycles++;
2498 return COSTS_N_INSNS (2) + cycles;
2500 return COSTS_N_INSNS (1) + 16;
2502 case SET:
2503 return (COSTS_N_INSNS (1)
2504 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2505 + GET_CODE (SET_DEST (x)) == MEM));
2507 case CONST_INT:
2508 if (outer == SET)
2510 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2511 return 0;
2512 if (thumb_shiftable_const (INTVAL (x)))
2513 return COSTS_N_INSNS (2);
2514 return COSTS_N_INSNS (3);
2516 else if (outer == PLUS
2517 && INTVAL (x) < 256 && INTVAL (x) > -256)
2518 return 0;
2519 else if (outer == COMPARE
2520 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2521 return 0;
2522 else if (outer == ASHIFT || outer == ASHIFTRT
2523 || outer == LSHIFTRT)
2524 return 0;
2525 return COSTS_N_INSNS (2);
2527 case CONST:
2528 case CONST_DOUBLE:
2529 case LABEL_REF:
2530 case SYMBOL_REF:
2531 return COSTS_N_INSNS (3);
2533 case UDIV:
2534 case UMOD:
2535 case DIV:
2536 case MOD:
2537 return 100;
2539 case TRUNCATE:
2540 return 99;
2542 case AND:
2543 case XOR:
2544 case IOR:
2545 /* XXX guess. */
2546 return 8;
2548 case ADDRESSOF:
2549 case MEM:
2550 /* XXX another guess. */
2551 /* Memory costs quite a lot for the first word, but subsequent words
2552 load at the equivalent of a single insn each. */
2553 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2554 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2556 case IF_THEN_ELSE:
2557 /* XXX a guess. */
2558 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2559 return 14;
2560 return 2;
2562 case ZERO_EXTEND:
2563 /* XXX still guessing. */
2564 switch (GET_MODE (XEXP (x, 0)))
2566 case QImode:
2567 return (1 + (mode == DImode ? 4 : 0)
2568 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2570 case HImode:
2571 return (4 + (mode == DImode ? 4 : 0)
2572 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2574 case SImode:
2575 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2577 default:
2578 return 99;
2581 default:
2582 return 99;
2583 #if 0
2584 case FFS:
2585 case FLOAT:
2586 case FIX:
2587 case UNSIGNED_FIX:
2588 /* XXX guess */
2589 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2590 rtx_name[code]);
2591 abort ();
2592 #endif
2596 switch (code)
2598 case MEM:
2599 /* Memory costs quite a lot for the first word, but subsequent words
2600 load at the equivalent of a single insn each. */
2601 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2602 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2604 case DIV:
2605 case MOD:
2606 return 100;
2608 case ROTATE:
2609 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2610 return 4;
2611 /* Fall through */
2612 case ROTATERT:
2613 if (mode != SImode)
2614 return 8;
2615 /* Fall through */
2616 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2617 if (mode == DImode)
2618 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2619 + ((GET_CODE (XEXP (x, 0)) == REG
2620 || (GET_CODE (XEXP (x, 0)) == SUBREG
2621 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2622 ? 0 : 8));
2623 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2624 || (GET_CODE (XEXP (x, 0)) == SUBREG
2625 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2626 ? 0 : 4)
2627 + ((GET_CODE (XEXP (x, 1)) == REG
2628 || (GET_CODE (XEXP (x, 1)) == SUBREG
2629 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2630 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2631 ? 0 : 4));
2633 case MINUS:
2634 if (mode == DImode)
2635 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2636 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2637 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2638 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2639 ? 0 : 8));
2641 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2642 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2643 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2644 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2645 ? 0 : 8)
2646 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2647 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2648 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2649 ? 0 : 8));
2651 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2652 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2653 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2654 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2655 || subcode == ASHIFTRT || subcode == LSHIFTRT
2656 || subcode == ROTATE || subcode == ROTATERT
2657 || (subcode == MULT
2658 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2659 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2660 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2661 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2662 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2663 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2664 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2665 return 1;
2666 /* Fall through */
2668 case PLUS:
2669 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2670 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2671 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2672 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2673 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2674 ? 0 : 8));
2676 /* Fall through */
2677 case AND: case XOR: case IOR:
2678 extra_cost = 0;
2680 /* Normally the frame registers will be spilt into reg+const during
2681 reload, so it is a bad idea to combine them with other instructions,
2682 since then they might not be moved outside of loops. As a compromise
2683 we allow integration with ops that have a constant as their second
2684 operand. */
2685 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2686 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2687 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2688 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2689 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2690 extra_cost = 4;
2692 if (mode == DImode)
2693 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2694 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2695 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2696 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2697 ? 0 : 8));
2699 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2700 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2701 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2702 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2703 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2704 ? 0 : 4));
2706 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2707 return (1 + extra_cost
2708 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2709 || subcode == LSHIFTRT || subcode == ASHIFTRT
2710 || subcode == ROTATE || subcode == ROTATERT
2711 || (subcode == MULT
2712 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2713 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2714 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2715 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2716 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2717 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2718 ? 0 : 4));
2720 return 8;
2722 case MULT:
2723 /* There is no point basing this on the tuning, since it is always the
2724 fast variant if it exists at all. */
2725 if (arm_fast_multiply && mode == DImode
2726 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2727 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2728 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2729 return 8;
2731 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2732 || mode == DImode)
2733 return 30;
2735 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2737 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2738 & (unsigned HOST_WIDE_INT) 0xffffffff);
2739 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2740 int j;
2742 /* Tune as appropriate. */
2743 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2745 for (j = 0; i && j < 32; j += booth_unit_size)
2747 i >>= booth_unit_size;
2748 add_cost += 2;
2751 return add_cost;
2754 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2755 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2756 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2758 case TRUNCATE:
2759 if (arm_fast_multiply && mode == SImode
2760 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2761 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2762 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2763 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2764 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2765 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2766 return 8;
2767 return 99;
2769 case NEG:
2770 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2771 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2772 /* Fall through */
2773 case NOT:
2774 if (mode == DImode)
2775 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2777 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2779 case IF_THEN_ELSE:
2780 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2781 return 14;
2782 return 2;
2784 case COMPARE:
2785 return 1;
2787 case ABS:
2788 return 4 + (mode == DImode ? 4 : 0);
2790 case SIGN_EXTEND:
2791 if (GET_MODE (XEXP (x, 0)) == QImode)
2792 return (4 + (mode == DImode ? 4 : 0)
2793 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2794 /* Fall through */
2795 case ZERO_EXTEND:
2796 switch (GET_MODE (XEXP (x, 0)))
2798 case QImode:
2799 return (1 + (mode == DImode ? 4 : 0)
2800 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2802 case HImode:
2803 return (4 + (mode == DImode ? 4 : 0)
2804 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2806 case SImode:
2807 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2809 default:
2810 break;
2812 abort ();
2814 case CONST_INT:
2815 if (const_ok_for_arm (INTVAL (x)))
2816 return outer == SET ? 2 : -1;
2817 else if (outer == AND
2818 && const_ok_for_arm (~INTVAL (x)))
2819 return -1;
2820 else if ((outer == COMPARE
2821 || outer == PLUS || outer == MINUS)
2822 && const_ok_for_arm (-INTVAL (x)))
2823 return -1;
2824 else
2825 return 5;
2827 case CONST:
2828 case LABEL_REF:
2829 case SYMBOL_REF:
2830 return 6;
2832 case CONST_DOUBLE:
2833 if (const_double_rtx_ok_for_fpu (x))
2834 return outer == SET ? 2 : -1;
2835 else if ((outer == COMPARE || outer == PLUS)
2836 && neg_const_double_rtx_ok_for_fpu (x))
2837 return -1;
2838 return 7;
2840 default:
2841 return 99;
2845 static int
2846 arm_adjust_cost (insn, link, dep, cost)
2847 rtx insn;
2848 rtx link;
2849 rtx dep;
2850 int cost;
2852 rtx i_pat, d_pat;
2854 /* Some true dependencies can have a higher cost depending
2855 on precisely how certain input operands are used. */
2856 if (arm_is_xscale
2857 && REG_NOTE_KIND (link) == 0
2858 && recog_memoized (insn) < 0
2859 && recog_memoized (dep) < 0)
2861 int shift_opnum = get_attr_shift (insn);
2862 enum attr_type attr_type = get_attr_type (dep);
2864 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2865 operand for INSN. If we have a shifted input operand and the
2866 instruction we depend on is another ALU instruction, then we may
2867 have to account for an additional stall. */
2868 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2870 rtx shifted_operand;
2871 int opno;
2873 /* Get the shifted operand. */
2874 extract_insn (insn);
2875 shifted_operand = recog_data.operand[shift_opnum];
2877 /* Iterate over all the operands in DEP. If we write an operand
2878 that overlaps with SHIFTED_OPERAND, then we have increase the
2879 cost of this dependency. */
2880 extract_insn (dep);
2881 preprocess_constraints ();
2882 for (opno = 0; opno < recog_data.n_operands; opno++)
2884 /* We can ignore strict inputs. */
2885 if (recog_data.operand_type[opno] == OP_IN)
2886 continue;
2888 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2889 shifted_operand))
2890 return 2;
2895 /* XXX This is not strictly true for the FPA. */
2896 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2897 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2898 return 0;
2900 /* Call insns don't incur a stall, even if they follow a load. */
2901 if (REG_NOTE_KIND (link) == 0
2902 && GET_CODE (insn) == CALL_INSN)
2903 return 1;
2905 if ((i_pat = single_set (insn)) != NULL
2906 && GET_CODE (SET_SRC (i_pat)) == MEM
2907 && (d_pat = single_set (dep)) != NULL
2908 && GET_CODE (SET_DEST (d_pat)) == MEM)
2910 /* This is a load after a store, there is no conflict if the load reads
2911 from a cached area. Assume that loads from the stack, and from the
2912 constant pool are cached, and that others will miss. This is a
2913 hack. */
2915 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2916 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2917 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2918 || reg_mentioned_p (hard_frame_pointer_rtx,
2919 XEXP (SET_SRC (i_pat), 0)))
2920 return 1;
2923 return cost;
2926 /* This code has been fixed for cross compilation. */
2928 static int fpa_consts_inited = 0;
2930 static const char * const strings_fpa[8] =
2932 "0", "1", "2", "3",
2933 "4", "5", "0.5", "10"
2936 static REAL_VALUE_TYPE values_fpa[8];
2938 static void
2939 init_fpa_table ()
2941 int i;
2942 REAL_VALUE_TYPE r;
2944 for (i = 0; i < 8; i++)
2946 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2947 values_fpa[i] = r;
2950 fpa_consts_inited = 1;
2953 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2956 const_double_rtx_ok_for_fpu (x)
2957 rtx x;
2959 REAL_VALUE_TYPE r;
2960 int i;
2962 if (!fpa_consts_inited)
2963 init_fpa_table ();
2965 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2966 if (REAL_VALUE_MINUS_ZERO (r))
2967 return 0;
2969 for (i = 0; i < 8; i++)
2970 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2971 return 1;
2973 return 0;
2976 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2979 neg_const_double_rtx_ok_for_fpu (x)
2980 rtx x;
2982 REAL_VALUE_TYPE r;
2983 int i;
2985 if (!fpa_consts_inited)
2986 init_fpa_table ();
2988 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2989 r = REAL_VALUE_NEGATE (r);
2990 if (REAL_VALUE_MINUS_ZERO (r))
2991 return 0;
2993 for (i = 0; i < 8; i++)
2994 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2995 return 1;
2997 return 0;
3000 /* Predicates for `match_operand' and `match_operator'. */
3002 /* s_register_operand is the same as register_operand, but it doesn't accept
3003 (SUBREG (MEM)...).
3005 This function exists because at the time it was put in it led to better
3006 code. SUBREG(MEM) always needs a reload in the places where
3007 s_register_operand is used, and this seemed to lead to excessive
3008 reloading. */
3011 s_register_operand (op, mode)
3012 rtx op;
3013 enum machine_mode mode;
3015 if (GET_MODE (op) != mode && mode != VOIDmode)
3016 return 0;
3018 if (GET_CODE (op) == SUBREG)
3019 op = SUBREG_REG (op);
3021 /* We don't consider registers whose class is NO_REGS
3022 to be a register operand. */
3023 /* XXX might have to check for lo regs only for thumb ??? */
3024 return (GET_CODE (op) == REG
3025 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3026 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3029 /* A hard register operand (even before reload. */
3032 arm_hard_register_operand (op, mode)
3033 rtx op;
3034 enum machine_mode mode;
3036 if (GET_MODE (op) != mode && mode != VOIDmode)
3037 return 0;
3039 return (GET_CODE (op) == REG
3040 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3043 /* Only accept reg, subreg(reg), const_int. */
3046 reg_or_int_operand (op, mode)
3047 rtx op;
3048 enum machine_mode mode;
3050 if (GET_CODE (op) == CONST_INT)
3051 return 1;
3053 if (GET_MODE (op) != mode && mode != VOIDmode)
3054 return 0;
3056 if (GET_CODE (op) == SUBREG)
3057 op = SUBREG_REG (op);
3059 /* We don't consider registers whose class is NO_REGS
3060 to be a register operand. */
3061 return (GET_CODE (op) == REG
3062 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3063 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3066 /* Return 1 if OP is an item in memory, given that we are in reload. */
3069 arm_reload_memory_operand (op, mode)
3070 rtx op;
3071 enum machine_mode mode ATTRIBUTE_UNUSED;
3073 int regno = true_regnum (op);
3075 return (!CONSTANT_P (op)
3076 && (regno == -1
3077 || (GET_CODE (op) == REG
3078 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3081 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3082 memory access (architecture V4).
3083 MODE is QImode if called when computing constraints, or VOIDmode when
3084 emitting patterns. In this latter case we cannot use memory_operand()
3085 because it will fail on badly formed MEMs, which is precisly what we are
3086 trying to catch. */
3089 bad_signed_byte_operand (op, mode)
3090 rtx op;
3091 enum machine_mode mode ATTRIBUTE_UNUSED;
3093 #if 0
3094 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3095 return 0;
3096 #endif
3097 if (GET_CODE (op) != MEM)
3098 return 0;
3100 op = XEXP (op, 0);
3102 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3103 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3104 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3105 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3106 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3107 return 1;
3109 /* Big constants are also bad. */
3110 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3111 && (INTVAL (XEXP (op, 1)) > 0xff
3112 || -INTVAL (XEXP (op, 1)) > 0xff))
3113 return 1;
3115 /* Everything else is good, or can will automatically be made so. */
3116 return 0;
3119 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3122 arm_rhs_operand (op, mode)
3123 rtx op;
3124 enum machine_mode mode;
3126 return (s_register_operand (op, mode)
3127 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3130 /* Return TRUE for valid operands for the
3131 rhs of an ARM instruction, or a load. */
3134 arm_rhsm_operand (op, mode)
3135 rtx op;
3136 enum machine_mode mode;
3138 return (s_register_operand (op, mode)
3139 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3140 || memory_operand (op, mode));
3143 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3144 constant that is valid when negated. */
3147 arm_add_operand (op, mode)
3148 rtx op;
3149 enum machine_mode mode;
3151 if (TARGET_THUMB)
3152 return thumb_cmp_operand (op, mode);
3154 return (s_register_operand (op, mode)
3155 || (GET_CODE (op) == CONST_INT
3156 && (const_ok_for_arm (INTVAL (op))
3157 || const_ok_for_arm (-INTVAL (op)))));
3161 arm_not_operand (op, mode)
3162 rtx op;
3163 enum machine_mode mode;
3165 return (s_register_operand (op, mode)
3166 || (GET_CODE (op) == CONST_INT
3167 && (const_ok_for_arm (INTVAL (op))
3168 || const_ok_for_arm (~INTVAL (op)))));
3171 /* Return TRUE if the operand is a memory reference which contains an
3172 offsettable address. */
3175 offsettable_memory_operand (op, mode)
3176 rtx op;
3177 enum machine_mode mode;
3179 if (mode == VOIDmode)
3180 mode = GET_MODE (op);
3182 return (mode == GET_MODE (op)
3183 && GET_CODE (op) == MEM
3184 && offsettable_address_p (reload_completed | reload_in_progress,
3185 mode, XEXP (op, 0)));
3188 /* Return TRUE if the operand is a memory reference which is, or can be
3189 made word aligned by adjusting the offset. */
3192 alignable_memory_operand (op, mode)
3193 rtx op;
3194 enum machine_mode mode;
3196 rtx reg;
3198 if (mode == VOIDmode)
3199 mode = GET_MODE (op);
3201 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3202 return 0;
3204 op = XEXP (op, 0);
3206 return ((GET_CODE (reg = op) == REG
3207 || (GET_CODE (op) == SUBREG
3208 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3209 || (GET_CODE (op) == PLUS
3210 && GET_CODE (XEXP (op, 1)) == CONST_INT
3211 && (GET_CODE (reg = XEXP (op, 0)) == REG
3212 || (GET_CODE (XEXP (op, 0)) == SUBREG
3213 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3214 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3217 /* Similar to s_register_operand, but does not allow hard integer
3218 registers. */
3221 f_register_operand (op, mode)
3222 rtx op;
3223 enum machine_mode mode;
3225 if (GET_MODE (op) != mode && mode != VOIDmode)
3226 return 0;
3228 if (GET_CODE (op) == SUBREG)
3229 op = SUBREG_REG (op);
3231 /* We don't consider registers whose class is NO_REGS
3232 to be a register operand. */
3233 return (GET_CODE (op) == REG
3234 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3235 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3238 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3241 fpu_rhs_operand (op, mode)
3242 rtx op;
3243 enum machine_mode mode;
3245 if (s_register_operand (op, mode))
3246 return TRUE;
3248 if (GET_MODE (op) != mode && mode != VOIDmode)
3249 return FALSE;
3251 if (GET_CODE (op) == CONST_DOUBLE)
3252 return const_double_rtx_ok_for_fpu (op);
3254 return FALSE;
3258 fpu_add_operand (op, mode)
3259 rtx op;
3260 enum machine_mode mode;
3262 if (s_register_operand (op, mode))
3263 return TRUE;
3265 if (GET_MODE (op) != mode && mode != VOIDmode)
3266 return FALSE;
3268 if (GET_CODE (op) == CONST_DOUBLE)
3269 return (const_double_rtx_ok_for_fpu (op)
3270 || neg_const_double_rtx_ok_for_fpu (op));
3272 return FALSE;
3275 /* Return nonzero if OP is a constant power of two. */
3278 power_of_two_operand (op, mode)
3279 rtx op;
3280 enum machine_mode mode ATTRIBUTE_UNUSED;
3282 if (GET_CODE (op) == CONST_INT)
3284 HOST_WIDE_INT value = INTVAL (op);
3286 return value != 0 && (value & (value - 1)) == 0;
3289 return FALSE;
3292 /* Return TRUE for a valid operand of a DImode operation.
3293 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3294 Note that this disallows MEM(REG+REG), but allows
3295 MEM(PRE/POST_INC/DEC(REG)). */
3298 di_operand (op, mode)
3299 rtx op;
3300 enum machine_mode mode;
3302 if (s_register_operand (op, mode))
3303 return TRUE;
3305 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3306 return FALSE;
3308 if (GET_CODE (op) == SUBREG)
3309 op = SUBREG_REG (op);
3311 switch (GET_CODE (op))
3313 case CONST_DOUBLE:
3314 case CONST_INT:
3315 return TRUE;
3317 case MEM:
3318 return memory_address_p (DImode, XEXP (op, 0));
3320 default:
3321 return FALSE;
3325 /* Like di_operand, but don't accept constants. */
3328 nonimmediate_di_operand (op, mode)
3329 rtx op;
3330 enum machine_mode mode;
3332 if (s_register_operand (op, mode))
3333 return TRUE;
3335 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3336 return FALSE;
3338 if (GET_CODE (op) == SUBREG)
3339 op = SUBREG_REG (op);
3341 if (GET_CODE (op) == MEM)
3342 return memory_address_p (DImode, XEXP (op, 0));
3344 return FALSE;
3347 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3348 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3349 Note that this disallows MEM(REG+REG), but allows
3350 MEM(PRE/POST_INC/DEC(REG)). */
3353 soft_df_operand (op, mode)
3354 rtx op;
3355 enum machine_mode mode;
3357 if (s_register_operand (op, mode))
3358 return TRUE;
3360 if (mode != VOIDmode && GET_MODE (op) != mode)
3361 return FALSE;
3363 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3364 return FALSE;
3366 if (GET_CODE (op) == SUBREG)
3367 op = SUBREG_REG (op);
3369 switch (GET_CODE (op))
3371 case CONST_DOUBLE:
3372 return TRUE;
3374 case MEM:
3375 return memory_address_p (DFmode, XEXP (op, 0));
3377 default:
3378 return FALSE;
3382 /* Like soft_df_operand, but don't accept constants. */
3385 nonimmediate_soft_df_operand (op, mode)
3386 rtx op;
3387 enum machine_mode mode;
3389 if (s_register_operand (op, mode))
3390 return TRUE;
3392 if (mode != VOIDmode && GET_MODE (op) != mode)
3393 return FALSE;
3395 if (GET_CODE (op) == SUBREG)
3396 op = SUBREG_REG (op);
3398 if (GET_CODE (op) == MEM)
3399 return memory_address_p (DFmode, XEXP (op, 0));
3400 return FALSE;
3403 /* Return TRUE for valid index operands. */
3406 index_operand (op, mode)
3407 rtx op;
3408 enum machine_mode mode;
3410 return (s_register_operand (op, mode)
3411 || (immediate_operand (op, mode)
3412 && (GET_CODE (op) != CONST_INT
3413 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3416 /* Return TRUE for valid shifts by a constant. This also accepts any
3417 power of two on the (somewhat overly relaxed) assumption that the
3418 shift operator in this case was a mult. */
3421 const_shift_operand (op, mode)
3422 rtx op;
3423 enum machine_mode mode;
3425 return (power_of_two_operand (op, mode)
3426 || (immediate_operand (op, mode)
3427 && (GET_CODE (op) != CONST_INT
3428 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3431 /* Return TRUE for arithmetic operators which can be combined with a multiply
3432 (shift). */
3435 shiftable_operator (x, mode)
3436 rtx x;
3437 enum machine_mode mode;
3439 enum rtx_code code;
3441 if (GET_MODE (x) != mode)
3442 return FALSE;
3444 code = GET_CODE (x);
3446 return (code == PLUS || code == MINUS
3447 || code == IOR || code == XOR || code == AND);
3450 /* Return TRUE for binary logical operators. */
3453 logical_binary_operator (x, mode)
3454 rtx x;
3455 enum machine_mode mode;
3457 enum rtx_code code;
3459 if (GET_MODE (x) != mode)
3460 return FALSE;
3462 code = GET_CODE (x);
3464 return (code == IOR || code == XOR || code == AND);
3467 /* Return TRUE for shift operators. */
3470 shift_operator (x, mode)
3471 rtx x;
3472 enum machine_mode mode;
3474 enum rtx_code code;
3476 if (GET_MODE (x) != mode)
3477 return FALSE;
3479 code = GET_CODE (x);
3481 if (code == MULT)
3482 return power_of_two_operand (XEXP (x, 1), mode);
3484 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3485 || code == ROTATERT);
3488 /* Return TRUE if x is EQ or NE. */
3491 equality_operator (x, mode)
3492 rtx x;
3493 enum machine_mode mode ATTRIBUTE_UNUSED;
3495 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3498 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3501 arm_comparison_operator (x, mode)
3502 rtx x;
3503 enum machine_mode mode;
3505 return (comparison_operator (x, mode)
3506 && GET_CODE (x) != LTGT
3507 && GET_CODE (x) != UNEQ);
3510 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3513 minmax_operator (x, mode)
3514 rtx x;
3515 enum machine_mode mode;
3517 enum rtx_code code = GET_CODE (x);
3519 if (GET_MODE (x) != mode)
3520 return FALSE;
3522 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3525 /* Return TRUE if this is the condition code register, if we aren't given
3526 a mode, accept any class CCmode register. */
3529 cc_register (x, mode)
3530 rtx x;
3531 enum machine_mode mode;
3533 if (mode == VOIDmode)
3535 mode = GET_MODE (x);
3537 if (GET_MODE_CLASS (mode) != MODE_CC)
3538 return FALSE;
3541 if ( GET_MODE (x) == mode
3542 && GET_CODE (x) == REG
3543 && REGNO (x) == CC_REGNUM)
3544 return TRUE;
3546 return FALSE;
3549 /* Return TRUE if this is the condition code register, if we aren't given
3550 a mode, accept any class CCmode register which indicates a dominance
3551 expression. */
3554 dominant_cc_register (x, mode)
3555 rtx x;
3556 enum machine_mode mode;
3558 if (mode == VOIDmode)
3560 mode = GET_MODE (x);
3562 if (GET_MODE_CLASS (mode) != MODE_CC)
3563 return FALSE;
3566 if ( mode != CC_DNEmode && mode != CC_DEQmode
3567 && mode != CC_DLEmode && mode != CC_DLTmode
3568 && mode != CC_DGEmode && mode != CC_DGTmode
3569 && mode != CC_DLEUmode && mode != CC_DLTUmode
3570 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3571 return FALSE;
3573 return cc_register (x, mode);
3576 /* Return TRUE if X references a SYMBOL_REF. */
3579 symbol_mentioned_p (x)
3580 rtx x;
3582 const char * fmt;
3583 int i;
3585 if (GET_CODE (x) == SYMBOL_REF)
3586 return 1;
3588 fmt = GET_RTX_FORMAT (GET_CODE (x));
3590 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3592 if (fmt[i] == 'E')
3594 int j;
3596 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3597 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3598 return 1;
3600 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3601 return 1;
3604 return 0;
3607 /* Return TRUE if X references a LABEL_REF. */
3610 label_mentioned_p (x)
3611 rtx x;
3613 const char * fmt;
3614 int i;
3616 if (GET_CODE (x) == LABEL_REF)
3617 return 1;
3619 fmt = GET_RTX_FORMAT (GET_CODE (x));
3620 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3622 if (fmt[i] == 'E')
3624 int j;
3626 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3627 if (label_mentioned_p (XVECEXP (x, i, j)))
3628 return 1;
3630 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3631 return 1;
3634 return 0;
3637 enum rtx_code
3638 minmax_code (x)
3639 rtx x;
3641 enum rtx_code code = GET_CODE (x);
3643 if (code == SMAX)
3644 return GE;
3645 else if (code == SMIN)
3646 return LE;
3647 else if (code == UMIN)
3648 return LEU;
3649 else if (code == UMAX)
3650 return GEU;
3652 abort ();
3655 /* Return 1 if memory locations are adjacent. */
3658 adjacent_mem_locations (a, b)
3659 rtx a, b;
3661 if ((GET_CODE (XEXP (a, 0)) == REG
3662 || (GET_CODE (XEXP (a, 0)) == PLUS
3663 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3664 && (GET_CODE (XEXP (b, 0)) == REG
3665 || (GET_CODE (XEXP (b, 0)) == PLUS
3666 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3668 int val0 = 0, val1 = 0;
3669 int reg0, reg1;
3671 if (GET_CODE (XEXP (a, 0)) == PLUS)
3673 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3674 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3676 else
3677 reg0 = REGNO (XEXP (a, 0));
3679 if (GET_CODE (XEXP (b, 0)) == PLUS)
3681 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3682 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3684 else
3685 reg1 = REGNO (XEXP (b, 0));
3687 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3689 return 0;
3692 /* Return 1 if OP is a load multiple operation. It is known to be
3693 parallel and the first section will be tested. */
3696 load_multiple_operation (op, mode)
3697 rtx op;
3698 enum machine_mode mode ATTRIBUTE_UNUSED;
3700 HOST_WIDE_INT count = XVECLEN (op, 0);
3701 int dest_regno;
3702 rtx src_addr;
3703 HOST_WIDE_INT i = 1, base = 0;
3704 rtx elt;
3706 if (count <= 1
3707 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3708 return 0;
3710 /* Check to see if this might be a write-back. */
3711 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3713 i++;
3714 base = 1;
3716 /* Now check it more carefully. */
3717 if (GET_CODE (SET_DEST (elt)) != REG
3718 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3719 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3720 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3721 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3722 return 0;
3725 /* Perform a quick check so we don't blow up below. */
3726 if (count <= i
3727 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3728 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3729 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3730 return 0;
3732 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3733 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3735 for (; i < count; i++)
3737 elt = XVECEXP (op, 0, i);
3739 if (GET_CODE (elt) != SET
3740 || GET_CODE (SET_DEST (elt)) != REG
3741 || GET_MODE (SET_DEST (elt)) != SImode
3742 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3743 || GET_CODE (SET_SRC (elt)) != MEM
3744 || GET_MODE (SET_SRC (elt)) != SImode
3745 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3746 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3747 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3748 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3749 return 0;
3752 return 1;
3755 /* Return 1 if OP is a store multiple operation. It is known to be
3756 parallel and the first section will be tested. */
3759 store_multiple_operation (op, mode)
3760 rtx op;
3761 enum machine_mode mode ATTRIBUTE_UNUSED;
3763 HOST_WIDE_INT count = XVECLEN (op, 0);
3764 int src_regno;
3765 rtx dest_addr;
3766 HOST_WIDE_INT i = 1, base = 0;
3767 rtx elt;
3769 if (count <= 1
3770 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3771 return 0;
3773 /* Check to see if this might be a write-back. */
3774 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3776 i++;
3777 base = 1;
3779 /* Now check it more carefully. */
3780 if (GET_CODE (SET_DEST (elt)) != REG
3781 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3782 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3783 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3784 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3785 return 0;
3788 /* Perform a quick check so we don't blow up below. */
3789 if (count <= i
3790 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3791 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3792 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3793 return 0;
3795 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3796 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3798 for (; i < count; i++)
3800 elt = XVECEXP (op, 0, i);
3802 if (GET_CODE (elt) != SET
3803 || GET_CODE (SET_SRC (elt)) != REG
3804 || GET_MODE (SET_SRC (elt)) != SImode
3805 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3806 || GET_CODE (SET_DEST (elt)) != MEM
3807 || GET_MODE (SET_DEST (elt)) != SImode
3808 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3809 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3810 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3811 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3812 return 0;
3815 return 1;
3819 load_multiple_sequence (operands, nops, regs, base, load_offset)
3820 rtx * operands;
3821 int nops;
3822 int * regs;
3823 int * base;
3824 HOST_WIDE_INT * load_offset;
3826 int unsorted_regs[4];
3827 HOST_WIDE_INT unsorted_offsets[4];
3828 int order[4];
3829 int base_reg = -1;
3830 int i;
3832 /* Can only handle 2, 3, or 4 insns at present,
3833 though could be easily extended if required. */
3834 if (nops < 2 || nops > 4)
3835 abort ();
3837 /* Loop over the operands and check that the memory references are
3838 suitable (ie immediate offsets from the same base register). At
3839 the same time, extract the target register, and the memory
3840 offsets. */
3841 for (i = 0; i < nops; i++)
3843 rtx reg;
3844 rtx offset;
3846 /* Convert a subreg of a mem into the mem itself. */
3847 if (GET_CODE (operands[nops + i]) == SUBREG)
3848 operands[nops + i] = alter_subreg (operands + (nops + i));
3850 if (GET_CODE (operands[nops + i]) != MEM)
3851 abort ();
3853 /* Don't reorder volatile memory references; it doesn't seem worth
3854 looking for the case where the order is ok anyway. */
3855 if (MEM_VOLATILE_P (operands[nops + i]))
3856 return 0;
3858 offset = const0_rtx;
3860 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3861 || (GET_CODE (reg) == SUBREG
3862 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3863 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3864 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3865 == REG)
3866 || (GET_CODE (reg) == SUBREG
3867 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3868 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3869 == CONST_INT)))
3871 if (i == 0)
3873 base_reg = REGNO (reg);
3874 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3875 ? REGNO (operands[i])
3876 : REGNO (SUBREG_REG (operands[i])));
3877 order[0] = 0;
3879 else
3881 if (base_reg != (int) REGNO (reg))
3882 /* Not addressed from the same base register. */
3883 return 0;
3885 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3886 ? REGNO (operands[i])
3887 : REGNO (SUBREG_REG (operands[i])));
3888 if (unsorted_regs[i] < unsorted_regs[order[0]])
3889 order[0] = i;
3892 /* If it isn't an integer register, or if it overwrites the
3893 base register but isn't the last insn in the list, then
3894 we can't do this. */
3895 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3896 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3897 return 0;
3899 unsorted_offsets[i] = INTVAL (offset);
3901 else
3902 /* Not a suitable memory address. */
3903 return 0;
3906 /* All the useful information has now been extracted from the
3907 operands into unsorted_regs and unsorted_offsets; additionally,
3908 order[0] has been set to the lowest numbered register in the
3909 list. Sort the registers into order, and check that the memory
3910 offsets are ascending and adjacent. */
3912 for (i = 1; i < nops; i++)
3914 int j;
3916 order[i] = order[i - 1];
3917 for (j = 0; j < nops; j++)
3918 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3919 && (order[i] == order[i - 1]
3920 || unsorted_regs[j] < unsorted_regs[order[i]]))
3921 order[i] = j;
3923 /* Have we found a suitable register? if not, one must be used more
3924 than once. */
3925 if (order[i] == order[i - 1])
3926 return 0;
3928 /* Is the memory address adjacent and ascending? */
3929 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3930 return 0;
3933 if (base)
3935 *base = base_reg;
3937 for (i = 0; i < nops; i++)
3938 regs[i] = unsorted_regs[order[i]];
3940 *load_offset = unsorted_offsets[order[0]];
3943 if (unsorted_offsets[order[0]] == 0)
3944 return 1; /* ldmia */
3946 if (unsorted_offsets[order[0]] == 4)
3947 return 2; /* ldmib */
3949 if (unsorted_offsets[order[nops - 1]] == 0)
3950 return 3; /* ldmda */
3952 if (unsorted_offsets[order[nops - 1]] == -4)
3953 return 4; /* ldmdb */
3955 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3956 if the offset isn't small enough. The reason 2 ldrs are faster
3957 is because these ARMs are able to do more than one cache access
3958 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3959 whilst the ARM8 has a double bandwidth cache. This means that
3960 these cores can do both an instruction fetch and a data fetch in
3961 a single cycle, so the trick of calculating the address into a
3962 scratch register (one of the result regs) and then doing a load
3963 multiple actually becomes slower (and no smaller in code size).
3964 That is the transformation
3966 ldr rd1, [rbase + offset]
3967 ldr rd2, [rbase + offset + 4]
3971 add rd1, rbase, offset
3972 ldmia rd1, {rd1, rd2}
3974 produces worse code -- '3 cycles + any stalls on rd2' instead of
3975 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3976 access per cycle, the first sequence could never complete in less
3977 than 6 cycles, whereas the ldm sequence would only take 5 and
3978 would make better use of sequential accesses if not hitting the
3979 cache.
3981 We cheat here and test 'arm_ld_sched' which we currently know to
3982 only be true for the ARM8, ARM9 and StrongARM. If this ever
3983 changes, then the test below needs to be reworked. */
3984 if (nops == 2 && arm_ld_sched)
3985 return 0;
3987 /* Can't do it without setting up the offset, only do this if it takes
3988 no more than one insn. */
3989 return (const_ok_for_arm (unsorted_offsets[order[0]])
3990 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3993 const char *
3994 emit_ldm_seq (operands, nops)
3995 rtx * operands;
3996 int nops;
3998 int regs[4];
3999 int base_reg;
4000 HOST_WIDE_INT offset;
4001 char buf[100];
4002 int i;
4004 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4006 case 1:
4007 strcpy (buf, "ldm%?ia\t");
4008 break;
4010 case 2:
4011 strcpy (buf, "ldm%?ib\t");
4012 break;
4014 case 3:
4015 strcpy (buf, "ldm%?da\t");
4016 break;
4018 case 4:
4019 strcpy (buf, "ldm%?db\t");
4020 break;
4022 case 5:
4023 if (offset >= 0)
4024 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4025 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4026 (long) offset);
4027 else
4028 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4029 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4030 (long) -offset);
4031 output_asm_insn (buf, operands);
4032 base_reg = regs[0];
4033 strcpy (buf, "ldm%?ia\t");
4034 break;
4036 default:
4037 abort ();
4040 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4041 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4043 for (i = 1; i < nops; i++)
4044 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4045 reg_names[regs[i]]);
4047 strcat (buf, "}\t%@ phole ldm");
4049 output_asm_insn (buf, operands);
4050 return "";
4054 store_multiple_sequence (operands, nops, regs, base, load_offset)
4055 rtx * operands;
4056 int nops;
4057 int * regs;
4058 int * base;
4059 HOST_WIDE_INT * load_offset;
4061 int unsorted_regs[4];
4062 HOST_WIDE_INT unsorted_offsets[4];
4063 int order[4];
4064 int base_reg = -1;
4065 int i;
4067 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4068 extended if required. */
4069 if (nops < 2 || nops > 4)
4070 abort ();
4072 /* Loop over the operands and check that the memory references are
4073 suitable (ie immediate offsets from the same base register). At
4074 the same time, extract the target register, and the memory
4075 offsets. */
4076 for (i = 0; i < nops; i++)
4078 rtx reg;
4079 rtx offset;
4081 /* Convert a subreg of a mem into the mem itself. */
4082 if (GET_CODE (operands[nops + i]) == SUBREG)
4083 operands[nops + i] = alter_subreg (operands + (nops + i));
4085 if (GET_CODE (operands[nops + i]) != MEM)
4086 abort ();
4088 /* Don't reorder volatile memory references; it doesn't seem worth
4089 looking for the case where the order is ok anyway. */
4090 if (MEM_VOLATILE_P (operands[nops + i]))
4091 return 0;
4093 offset = const0_rtx;
4095 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4096 || (GET_CODE (reg) == SUBREG
4097 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4098 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4099 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4100 == REG)
4101 || (GET_CODE (reg) == SUBREG
4102 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4103 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4104 == CONST_INT)))
4106 if (i == 0)
4108 base_reg = REGNO (reg);
4109 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4110 ? REGNO (operands[i])
4111 : REGNO (SUBREG_REG (operands[i])));
4112 order[0] = 0;
4114 else
4116 if (base_reg != (int) REGNO (reg))
4117 /* Not addressed from the same base register. */
4118 return 0;
4120 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4121 ? REGNO (operands[i])
4122 : REGNO (SUBREG_REG (operands[i])));
4123 if (unsorted_regs[i] < unsorted_regs[order[0]])
4124 order[0] = i;
4127 /* If it isn't an integer register, then we can't do this. */
4128 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4129 return 0;
4131 unsorted_offsets[i] = INTVAL (offset);
4133 else
4134 /* Not a suitable memory address. */
4135 return 0;
4138 /* All the useful information has now been extracted from the
4139 operands into unsorted_regs and unsorted_offsets; additionally,
4140 order[0] has been set to the lowest numbered register in the
4141 list. Sort the registers into order, and check that the memory
4142 offsets are ascending and adjacent. */
4144 for (i = 1; i < nops; i++)
4146 int j;
4148 order[i] = order[i - 1];
4149 for (j = 0; j < nops; j++)
4150 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4151 && (order[i] == order[i - 1]
4152 || unsorted_regs[j] < unsorted_regs[order[i]]))
4153 order[i] = j;
4155 /* Have we found a suitable register? if not, one must be used more
4156 than once. */
4157 if (order[i] == order[i - 1])
4158 return 0;
4160 /* Is the memory address adjacent and ascending? */
4161 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4162 return 0;
4165 if (base)
4167 *base = base_reg;
4169 for (i = 0; i < nops; i++)
4170 regs[i] = unsorted_regs[order[i]];
4172 *load_offset = unsorted_offsets[order[0]];
4175 if (unsorted_offsets[order[0]] == 0)
4176 return 1; /* stmia */
4178 if (unsorted_offsets[order[0]] == 4)
4179 return 2; /* stmib */
4181 if (unsorted_offsets[order[nops - 1]] == 0)
4182 return 3; /* stmda */
4184 if (unsorted_offsets[order[nops - 1]] == -4)
4185 return 4; /* stmdb */
4187 return 0;
4190 const char *
4191 emit_stm_seq (operands, nops)
4192 rtx * operands;
4193 int nops;
4195 int regs[4];
4196 int base_reg;
4197 HOST_WIDE_INT offset;
4198 char buf[100];
4199 int i;
4201 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4203 case 1:
4204 strcpy (buf, "stm%?ia\t");
4205 break;
4207 case 2:
4208 strcpy (buf, "stm%?ib\t");
4209 break;
4211 case 3:
4212 strcpy (buf, "stm%?da\t");
4213 break;
4215 case 4:
4216 strcpy (buf, "stm%?db\t");
4217 break;
4219 default:
4220 abort ();
4223 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4224 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4226 for (i = 1; i < nops; i++)
4227 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4228 reg_names[regs[i]]);
4230 strcat (buf, "}\t%@ phole stm");
4232 output_asm_insn (buf, operands);
4233 return "";
4237 multi_register_push (op, mode)
4238 rtx op;
4239 enum machine_mode mode ATTRIBUTE_UNUSED;
4241 if (GET_CODE (op) != PARALLEL
4242 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4243 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4244 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4245 return 0;
4247 return 1;
4250 /* Routines for use in generating RTL. */
4253 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4254 in_struct_p, scalar_p)
4255 int base_regno;
4256 int count;
4257 rtx from;
4258 int up;
4259 int write_back;
4260 int unchanging_p;
4261 int in_struct_p;
4262 int scalar_p;
4264 int i = 0, j;
4265 rtx result;
4266 int sign = up ? 1 : -1;
4267 rtx mem;
4269 /* XScale has load-store double instructions, but they have stricter
4270 alignment requirements than load-store multiple, so we can not
4271 use them.
4273 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4274 the pipeline until completion.
4276 NREGS CYCLES
4282 An ldr instruction takes 1-3 cycles, but does not block the
4283 pipeline.
4285 NREGS CYCLES
4286 1 1-3
4287 2 2-6
4288 3 3-9
4289 4 4-12
4291 Best case ldr will always win. However, the more ldr instructions
4292 we issue, the less likely we are to be able to schedule them well.
4293 Using ldr instructions also increases code size.
4295 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4296 for counts of 3 or 4 regs. */
4297 if (arm_is_xscale && count <= 2 && ! optimize_size)
4299 rtx seq;
4301 start_sequence ();
4303 for (i = 0; i < count; i++)
4305 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4306 RTX_UNCHANGING_P (mem) = unchanging_p;
4307 MEM_IN_STRUCT_P (mem) = in_struct_p;
4308 MEM_SCALAR_P (mem) = scalar_p;
4309 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4312 if (write_back)
4313 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4315 seq = gen_sequence ();
4316 end_sequence ();
4318 return seq;
4321 result = gen_rtx_PARALLEL (VOIDmode,
4322 rtvec_alloc (count + (write_back ? 1 : 0)));
4323 if (write_back)
4325 XVECEXP (result, 0, 0)
4326 = gen_rtx_SET (GET_MODE (from), from,
4327 plus_constant (from, count * 4 * sign));
4328 i = 1;
4329 count++;
4332 for (j = 0; i < count; i++, j++)
4334 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4335 RTX_UNCHANGING_P (mem) = unchanging_p;
4336 MEM_IN_STRUCT_P (mem) = in_struct_p;
4337 MEM_SCALAR_P (mem) = scalar_p;
4338 XVECEXP (result, 0, i)
4339 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4342 return result;
4346 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4347 in_struct_p, scalar_p)
4348 int base_regno;
4349 int count;
4350 rtx to;
4351 int up;
4352 int write_back;
4353 int unchanging_p;
4354 int in_struct_p;
4355 int scalar_p;
4357 int i = 0, j;
4358 rtx result;
4359 int sign = up ? 1 : -1;
4360 rtx mem;
4362 /* See arm_gen_load_multiple for discussion of
4363 the pros/cons of ldm/stm usage for XScale. */
4364 if (arm_is_xscale && count <= 2 && ! optimize_size)
4366 rtx seq;
4368 start_sequence ();
4370 for (i = 0; i < count; i++)
4372 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4373 RTX_UNCHANGING_P (mem) = unchanging_p;
4374 MEM_IN_STRUCT_P (mem) = in_struct_p;
4375 MEM_SCALAR_P (mem) = scalar_p;
4376 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4379 if (write_back)
4380 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4382 seq = gen_sequence ();
4383 end_sequence ();
4385 return seq;
4388 result = gen_rtx_PARALLEL (VOIDmode,
4389 rtvec_alloc (count + (write_back ? 1 : 0)));
4390 if (write_back)
4392 XVECEXP (result, 0, 0)
4393 = gen_rtx_SET (GET_MODE (to), to,
4394 plus_constant (to, count * 4 * sign));
4395 i = 1;
4396 count++;
4399 for (j = 0; i < count; i++, j++)
4401 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4402 RTX_UNCHANGING_P (mem) = unchanging_p;
4403 MEM_IN_STRUCT_P (mem) = in_struct_p;
4404 MEM_SCALAR_P (mem) = scalar_p;
4406 XVECEXP (result, 0, i)
4407 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4410 return result;
4414 arm_gen_movstrqi (operands)
4415 rtx * operands;
4417 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4418 int i;
4419 rtx src, dst;
4420 rtx st_src, st_dst, fin_src, fin_dst;
4421 rtx part_bytes_reg = NULL;
4422 rtx mem;
4423 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4424 int dst_scalar_p, src_scalar_p;
4426 if (GET_CODE (operands[2]) != CONST_INT
4427 || GET_CODE (operands[3]) != CONST_INT
4428 || INTVAL (operands[2]) > 64
4429 || INTVAL (operands[3]) & 3)
4430 return 0;
4432 st_dst = XEXP (operands[0], 0);
4433 st_src = XEXP (operands[1], 0);
4435 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4436 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4437 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4438 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4439 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4440 src_scalar_p = MEM_SCALAR_P (operands[1]);
4442 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4443 fin_src = src = copy_to_mode_reg (SImode, st_src);
4445 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4446 out_words_to_go = INTVAL (operands[2]) / 4;
4447 last_bytes = INTVAL (operands[2]) & 3;
4449 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4450 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4452 for (i = 0; in_words_to_go >= 2; i+=4)
4454 if (in_words_to_go > 4)
4455 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4456 src_unchanging_p,
4457 src_in_struct_p,
4458 src_scalar_p));
4459 else
4460 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4461 FALSE, src_unchanging_p,
4462 src_in_struct_p, src_scalar_p));
4464 if (out_words_to_go)
4466 if (out_words_to_go > 4)
4467 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4468 dst_unchanging_p,
4469 dst_in_struct_p,
4470 dst_scalar_p));
4471 else if (out_words_to_go != 1)
4472 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4473 dst, TRUE,
4474 (last_bytes == 0
4475 ? FALSE : TRUE),
4476 dst_unchanging_p,
4477 dst_in_struct_p,
4478 dst_scalar_p));
4479 else
4481 mem = gen_rtx_MEM (SImode, dst);
4482 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4483 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4484 MEM_SCALAR_P (mem) = dst_scalar_p;
4485 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4486 if (last_bytes != 0)
4487 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4491 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4492 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4495 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4496 if (out_words_to_go)
4498 rtx sreg;
4500 mem = gen_rtx_MEM (SImode, src);
4501 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4502 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4503 MEM_SCALAR_P (mem) = src_scalar_p;
4504 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4505 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4507 mem = gen_rtx_MEM (SImode, dst);
4508 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4509 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4510 MEM_SCALAR_P (mem) = dst_scalar_p;
4511 emit_move_insn (mem, sreg);
4512 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4513 in_words_to_go--;
4515 if (in_words_to_go) /* Sanity check */
4516 abort ();
4519 if (in_words_to_go)
4521 if (in_words_to_go < 0)
4522 abort ();
4524 mem = gen_rtx_MEM (SImode, src);
4525 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4526 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4527 MEM_SCALAR_P (mem) = src_scalar_p;
4528 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4531 if (last_bytes && part_bytes_reg == NULL)
4532 abort ();
4534 if (BYTES_BIG_ENDIAN && last_bytes)
4536 rtx tmp = gen_reg_rtx (SImode);
4538 /* The bytes we want are in the top end of the word. */
4539 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4540 GEN_INT (8 * (4 - last_bytes))));
4541 part_bytes_reg = tmp;
4543 while (last_bytes)
4545 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4546 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4547 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4548 MEM_SCALAR_P (mem) = dst_scalar_p;
4549 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4551 if (--last_bytes)
4553 tmp = gen_reg_rtx (SImode);
4554 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4555 part_bytes_reg = tmp;
4560 else
4562 if (last_bytes > 1)
4564 mem = gen_rtx_MEM (HImode, dst);
4565 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4566 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4567 MEM_SCALAR_P (mem) = dst_scalar_p;
4568 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4569 last_bytes -= 2;
4570 if (last_bytes)
4572 rtx tmp = gen_reg_rtx (SImode);
4574 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4575 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4576 part_bytes_reg = tmp;
4580 if (last_bytes)
4582 mem = gen_rtx_MEM (QImode, dst);
4583 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4584 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4585 MEM_SCALAR_P (mem) = dst_scalar_p;
4586 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4590 return 1;
4593 /* Generate a memory reference for a half word, such that it will be loaded
4594 into the top 16 bits of the word. We can assume that the address is
4595 known to be alignable and of the form reg, or plus (reg, const). */
4598 arm_gen_rotated_half_load (memref)
4599 rtx memref;
4601 HOST_WIDE_INT offset = 0;
4602 rtx base = XEXP (memref, 0);
4604 if (GET_CODE (base) == PLUS)
4606 offset = INTVAL (XEXP (base, 1));
4607 base = XEXP (base, 0);
4610 /* If we aren't allowed to generate unaligned addresses, then fail. */
4611 if (TARGET_MMU_TRAPS
4612 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4613 return NULL;
4615 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4617 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4618 return base;
4620 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4623 /* Select a dominance comparison mode if possible. We support three forms.
4624 COND_OR == 0 => (X && Y)
4625 COND_OR == 1 => ((! X( || Y)
4626 COND_OR == 2 => (X || Y)
4627 If we are unable to support a dominance comparsison we return CC mode.
4628 This will then fail to match for the RTL expressions that generate this
4629 call. */
4631 static enum machine_mode
4632 select_dominance_cc_mode (x, y, cond_or)
4633 rtx x;
4634 rtx y;
4635 HOST_WIDE_INT cond_or;
4637 enum rtx_code cond1, cond2;
4638 int swapped = 0;
4640 /* Currently we will probably get the wrong result if the individual
4641 comparisons are not simple. This also ensures that it is safe to
4642 reverse a comparison if necessary. */
4643 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4644 != CCmode)
4645 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4646 != CCmode))
4647 return CCmode;
4649 /* The if_then_else variant of this tests the second condition if the
4650 first passes, but is true if the first fails. Reverse the first
4651 condition to get a true "inclusive-or" expression. */
4652 if (cond_or == 1)
4653 cond1 = reverse_condition (cond1);
4655 /* If the comparisons are not equal, and one doesn't dominate the other,
4656 then we can't do this. */
4657 if (cond1 != cond2
4658 && !comparison_dominates_p (cond1, cond2)
4659 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4660 return CCmode;
4662 if (swapped)
4664 enum rtx_code temp = cond1;
4665 cond1 = cond2;
4666 cond2 = temp;
4669 switch (cond1)
4671 case EQ:
4672 if (cond2 == EQ || !cond_or)
4673 return CC_DEQmode;
4675 switch (cond2)
4677 case LE: return CC_DLEmode;
4678 case LEU: return CC_DLEUmode;
4679 case GE: return CC_DGEmode;
4680 case GEU: return CC_DGEUmode;
4681 default: break;
4684 break;
4686 case LT:
4687 if (cond2 == LT || !cond_or)
4688 return CC_DLTmode;
4689 if (cond2 == LE)
4690 return CC_DLEmode;
4691 if (cond2 == NE)
4692 return CC_DNEmode;
4693 break;
4695 case GT:
4696 if (cond2 == GT || !cond_or)
4697 return CC_DGTmode;
4698 if (cond2 == GE)
4699 return CC_DGEmode;
4700 if (cond2 == NE)
4701 return CC_DNEmode;
4702 break;
4704 case LTU:
4705 if (cond2 == LTU || !cond_or)
4706 return CC_DLTUmode;
4707 if (cond2 == LEU)
4708 return CC_DLEUmode;
4709 if (cond2 == NE)
4710 return CC_DNEmode;
4711 break;
4713 case GTU:
4714 if (cond2 == GTU || !cond_or)
4715 return CC_DGTUmode;
4716 if (cond2 == GEU)
4717 return CC_DGEUmode;
4718 if (cond2 == NE)
4719 return CC_DNEmode;
4720 break;
4722 /* The remaining cases only occur when both comparisons are the
4723 same. */
4724 case NE:
4725 return CC_DNEmode;
4727 case LE:
4728 return CC_DLEmode;
4730 case GE:
4731 return CC_DGEmode;
4733 case LEU:
4734 return CC_DLEUmode;
4736 case GEU:
4737 return CC_DGEUmode;
4739 default:
4740 break;
4743 abort ();
4746 enum machine_mode
4747 arm_select_cc_mode (op, x, y)
4748 enum rtx_code op;
4749 rtx x;
4750 rtx y;
4752 /* All floating point compares return CCFP if it is an equality
4753 comparison, and CCFPE otherwise. */
4754 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4756 switch (op)
4758 case EQ:
4759 case NE:
4760 case UNORDERED:
4761 case ORDERED:
4762 case UNLT:
4763 case UNLE:
4764 case UNGT:
4765 case UNGE:
4766 case UNEQ:
4767 case LTGT:
4768 return CCFPmode;
4770 case LT:
4771 case LE:
4772 case GT:
4773 case GE:
4774 return CCFPEmode;
4776 default:
4777 abort ();
4781 /* A compare with a shifted operand. Because of canonicalization, the
4782 comparison will have to be swapped when we emit the assembler. */
4783 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4784 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4785 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4786 || GET_CODE (x) == ROTATERT))
4787 return CC_SWPmode;
4789 /* This is a special case that is used by combine to allow a
4790 comparison of a shifted byte load to be split into a zero-extend
4791 followed by a comparison of the shifted integer (only valid for
4792 equalities and unsigned inequalities). */
4793 if (GET_MODE (x) == SImode
4794 && GET_CODE (x) == ASHIFT
4795 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4796 && GET_CODE (XEXP (x, 0)) == SUBREG
4797 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4798 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4799 && (op == EQ || op == NE
4800 || op == GEU || op == GTU || op == LTU || op == LEU)
4801 && GET_CODE (y) == CONST_INT)
4802 return CC_Zmode;
4804 /* A construct for a conditional compare, if the false arm contains
4805 0, then both conditions must be true, otherwise either condition
4806 must be true. Not all conditions are possible, so CCmode is
4807 returned if it can't be done. */
4808 if (GET_CODE (x) == IF_THEN_ELSE
4809 && (XEXP (x, 2) == const0_rtx
4810 || XEXP (x, 2) == const1_rtx)
4811 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4812 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4813 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4814 INTVAL (XEXP (x, 2)));
4816 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4817 if (GET_CODE (x) == AND
4818 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4819 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4820 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4822 if (GET_CODE (x) == IOR
4823 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4824 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4825 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4827 /* An operation that sets the condition codes as a side-effect, the
4828 V flag is not set correctly, so we can only use comparisons where
4829 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4830 instead. */
4831 if (GET_MODE (x) == SImode
4832 && y == const0_rtx
4833 && (op == EQ || op == NE || op == LT || op == GE)
4834 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4835 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4836 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4837 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4838 || GET_CODE (x) == LSHIFTRT
4839 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4840 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4841 return CC_NOOVmode;
4843 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4844 return CC_Zmode;
4846 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4847 && GET_CODE (x) == PLUS
4848 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4849 return CC_Cmode;
4851 return CCmode;
4854 /* X and Y are two things to compare using CODE. Emit the compare insn and
4855 return the rtx for register 0 in the proper mode. FP means this is a
4856 floating point compare: I don't think that it is needed on the arm. */
4859 arm_gen_compare_reg (code, x, y)
4860 enum rtx_code code;
4861 rtx x, y;
4863 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4864 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4866 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4867 gen_rtx_COMPARE (mode, x, y)));
4869 return cc_reg;
4872 void
4873 arm_reload_in_hi (operands)
4874 rtx * operands;
4876 rtx ref = operands[1];
4877 rtx base, scratch;
4878 HOST_WIDE_INT offset = 0;
4880 if (GET_CODE (ref) == SUBREG)
4882 offset = SUBREG_BYTE (ref);
4883 ref = SUBREG_REG (ref);
4886 if (GET_CODE (ref) == REG)
4888 /* We have a pseudo which has been spilt onto the stack; there
4889 are two cases here: the first where there is a simple
4890 stack-slot replacement and a second where the stack-slot is
4891 out of range, or is used as a subreg. */
4892 if (reg_equiv_mem[REGNO (ref)])
4894 ref = reg_equiv_mem[REGNO (ref)];
4895 base = find_replacement (&XEXP (ref, 0));
4897 else
4898 /* The slot is out of range, or was dressed up in a SUBREG. */
4899 base = reg_equiv_address[REGNO (ref)];
4901 else
4902 base = find_replacement (&XEXP (ref, 0));
4904 /* Handle the case where the address is too complex to be offset by 1. */
4905 if (GET_CODE (base) == MINUS
4906 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4908 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4910 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4911 base = base_plus;
4913 else if (GET_CODE (base) == PLUS)
4915 /* The addend must be CONST_INT, or we would have dealt with it above. */
4916 HOST_WIDE_INT hi, lo;
4918 offset += INTVAL (XEXP (base, 1));
4919 base = XEXP (base, 0);
4921 /* Rework the address into a legal sequence of insns. */
4922 /* Valid range for lo is -4095 -> 4095 */
4923 lo = (offset >= 0
4924 ? (offset & 0xfff)
4925 : -((-offset) & 0xfff));
4927 /* Corner case, if lo is the max offset then we would be out of range
4928 once we have added the additional 1 below, so bump the msb into the
4929 pre-loading insn(s). */
4930 if (lo == 4095)
4931 lo &= 0x7ff;
4933 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4934 ^ (HOST_WIDE_INT) 0x80000000)
4935 - (HOST_WIDE_INT) 0x80000000);
4937 if (hi + lo != offset)
4938 abort ();
4940 if (hi != 0)
4942 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4944 /* Get the base address; addsi3 knows how to handle constants
4945 that require more than one insn. */
4946 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4947 base = base_plus;
4948 offset = lo;
4952 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4953 emit_insn (gen_zero_extendqisi2 (scratch,
4954 gen_rtx_MEM (QImode,
4955 plus_constant (base,
4956 offset))));
4957 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4958 gen_rtx_MEM (QImode,
4959 plus_constant (base,
4960 offset + 1))));
4961 if (!BYTES_BIG_ENDIAN)
4962 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4963 gen_rtx_IOR (SImode,
4964 gen_rtx_ASHIFT
4965 (SImode,
4966 gen_rtx_SUBREG (SImode, operands[0], 0),
4967 GEN_INT (8)),
4968 scratch)));
4969 else
4970 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4971 gen_rtx_IOR (SImode,
4972 gen_rtx_ASHIFT (SImode, scratch,
4973 GEN_INT (8)),
4974 gen_rtx_SUBREG (SImode, operands[0],
4975 0))));
4978 /* Handle storing a half-word to memory during reload by synthesising as two
4979 byte stores. Take care not to clobber the input values until after we
4980 have moved them somewhere safe. This code assumes that if the DImode
4981 scratch in operands[2] overlaps either the input value or output address
4982 in some way, then that value must die in this insn (we absolutely need
4983 two scratch registers for some corner cases). */
4985 void
4986 arm_reload_out_hi (operands)
4987 rtx * operands;
4989 rtx ref = operands[0];
4990 rtx outval = operands[1];
4991 rtx base, scratch;
4992 HOST_WIDE_INT offset = 0;
4994 if (GET_CODE (ref) == SUBREG)
4996 offset = SUBREG_BYTE (ref);
4997 ref = SUBREG_REG (ref);
5000 if (GET_CODE (ref) == REG)
5002 /* We have a pseudo which has been spilt onto the stack; there
5003 are two cases here: the first where there is a simple
5004 stack-slot replacement and a second where the stack-slot is
5005 out of range, or is used as a subreg. */
5006 if (reg_equiv_mem[REGNO (ref)])
5008 ref = reg_equiv_mem[REGNO (ref)];
5009 base = find_replacement (&XEXP (ref, 0));
5011 else
5012 /* The slot is out of range, or was dressed up in a SUBREG. */
5013 base = reg_equiv_address[REGNO (ref)];
5015 else
5016 base = find_replacement (&XEXP (ref, 0));
5018 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5020 /* Handle the case where the address is too complex to be offset by 1. */
5021 if (GET_CODE (base) == MINUS
5022 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5024 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5026 /* Be careful not to destroy OUTVAL. */
5027 if (reg_overlap_mentioned_p (base_plus, outval))
5029 /* Updating base_plus might destroy outval, see if we can
5030 swap the scratch and base_plus. */
5031 if (!reg_overlap_mentioned_p (scratch, outval))
5033 rtx tmp = scratch;
5034 scratch = base_plus;
5035 base_plus = tmp;
5037 else
5039 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5041 /* Be conservative and copy OUTVAL into the scratch now,
5042 this should only be necessary if outval is a subreg
5043 of something larger than a word. */
5044 /* XXX Might this clobber base? I can't see how it can,
5045 since scratch is known to overlap with OUTVAL, and
5046 must be wider than a word. */
5047 emit_insn (gen_movhi (scratch_hi, outval));
5048 outval = scratch_hi;
5052 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5053 base = base_plus;
5055 else if (GET_CODE (base) == PLUS)
5057 /* The addend must be CONST_INT, or we would have dealt with it above. */
5058 HOST_WIDE_INT hi, lo;
5060 offset += INTVAL (XEXP (base, 1));
5061 base = XEXP (base, 0);
5063 /* Rework the address into a legal sequence of insns. */
5064 /* Valid range for lo is -4095 -> 4095 */
5065 lo = (offset >= 0
5066 ? (offset & 0xfff)
5067 : -((-offset) & 0xfff));
5069 /* Corner case, if lo is the max offset then we would be out of range
5070 once we have added the additional 1 below, so bump the msb into the
5071 pre-loading insn(s). */
5072 if (lo == 4095)
5073 lo &= 0x7ff;
5075 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5076 ^ (HOST_WIDE_INT) 0x80000000)
5077 - (HOST_WIDE_INT) 0x80000000);
5079 if (hi + lo != offset)
5080 abort ();
5082 if (hi != 0)
5084 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5086 /* Be careful not to destroy OUTVAL. */
5087 if (reg_overlap_mentioned_p (base_plus, outval))
5089 /* Updating base_plus might destroy outval, see if we
5090 can swap the scratch and base_plus. */
5091 if (!reg_overlap_mentioned_p (scratch, outval))
5093 rtx tmp = scratch;
5094 scratch = base_plus;
5095 base_plus = tmp;
5097 else
5099 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5101 /* Be conservative and copy outval into scratch now,
5102 this should only be necessary if outval is a
5103 subreg of something larger than a word. */
5104 /* XXX Might this clobber base? I can't see how it
5105 can, since scratch is known to overlap with
5106 outval. */
5107 emit_insn (gen_movhi (scratch_hi, outval));
5108 outval = scratch_hi;
5112 /* Get the base address; addsi3 knows how to handle constants
5113 that require more than one insn. */
5114 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5115 base = base_plus;
5116 offset = lo;
5120 if (BYTES_BIG_ENDIAN)
5122 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5123 plus_constant (base, offset + 1)),
5124 gen_rtx_SUBREG (QImode, outval, 0)));
5125 emit_insn (gen_lshrsi3 (scratch,
5126 gen_rtx_SUBREG (SImode, outval, 0),
5127 GEN_INT (8)));
5128 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5129 gen_rtx_SUBREG (QImode, scratch, 0)));
5131 else
5133 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5134 gen_rtx_SUBREG (QImode, outval, 0)));
5135 emit_insn (gen_lshrsi3 (scratch,
5136 gen_rtx_SUBREG (SImode, outval, 0),
5137 GEN_INT (8)));
5138 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5139 plus_constant (base, offset + 1)),
5140 gen_rtx_SUBREG (QImode, scratch, 0)));
5144 /* Print a symbolic form of X to the debug file, F. */
5146 static void
5147 arm_print_value (f, x)
5148 FILE * f;
5149 rtx x;
5151 switch (GET_CODE (x))
5153 case CONST_INT:
5154 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5155 return;
5157 case CONST_DOUBLE:
5158 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5159 return;
5161 case CONST_STRING:
5162 fprintf (f, "\"%s\"", XSTR (x, 0));
5163 return;
5165 case SYMBOL_REF:
5166 fprintf (f, "`%s'", XSTR (x, 0));
5167 return;
5169 case LABEL_REF:
5170 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5171 return;
5173 case CONST:
5174 arm_print_value (f, XEXP (x, 0));
5175 return;
5177 case PLUS:
5178 arm_print_value (f, XEXP (x, 0));
5179 fprintf (f, "+");
5180 arm_print_value (f, XEXP (x, 1));
5181 return;
5183 case PC:
5184 fprintf (f, "pc");
5185 return;
5187 default:
5188 fprintf (f, "????");
5189 return;
5193 /* Routines for manipulation of the constant pool. */
5195 /* Arm instructions cannot load a large constant directly into a
5196 register; they have to come from a pc relative load. The constant
5197 must therefore be placed in the addressable range of the pc
5198 relative load. Depending on the precise pc relative load
5199 instruction the range is somewhere between 256 bytes and 4k. This
5200 means that we often have to dump a constant inside a function, and
5201 generate code to branch around it.
5203 It is important to minimize this, since the branches will slow
5204 things down and make the code larger.
5206 Normally we can hide the table after an existing unconditional
5207 branch so that there is no interruption of the flow, but in the
5208 worst case the code looks like this:
5210 ldr rn, L1
5212 b L2
5213 align
5214 L1: .long value
5218 ldr rn, L3
5220 b L4
5221 align
5222 L3: .long value
5226 We fix this by performing a scan after scheduling, which notices
5227 which instructions need to have their operands fetched from the
5228 constant table and builds the table.
5230 The algorithm starts by building a table of all the constants that
5231 need fixing up and all the natural barriers in the function (places
5232 where a constant table can be dropped without breaking the flow).
5233 For each fixup we note how far the pc-relative replacement will be
5234 able to reach and the offset of the instruction into the function.
5236 Having built the table we then group the fixes together to form
5237 tables that are as large as possible (subject to addressing
5238 constraints) and emit each table of constants after the last
5239 barrier that is within range of all the instructions in the group.
5240 If a group does not contain a barrier, then we forcibly create one
5241 by inserting a jump instruction into the flow. Once the table has
5242 been inserted, the insns are then modified to reference the
5243 relevant entry in the pool.
5245 Possible enhancements to the algorithm (not implemented) are:
5247 1) For some processors and object formats, there may be benefit in
5248 aligning the pools to the start of cache lines; this alignment
5249 would need to be taken into account when calculating addressability
5250 of a pool. */
5252 /* These typedefs are located at the start of this file, so that
5253 they can be used in the prototypes there. This comment is to
5254 remind readers of that fact so that the following structures
5255 can be understood more easily.
5257 typedef struct minipool_node Mnode;
5258 typedef struct minipool_fixup Mfix; */
5260 struct minipool_node
5262 /* Doubly linked chain of entries. */
5263 Mnode * next;
5264 Mnode * prev;
5265 /* The maximum offset into the code that this entry can be placed. While
5266 pushing fixes for forward references, all entries are sorted in order
5267 of increasing max_address. */
5268 HOST_WIDE_INT max_address;
5269 /* Similarly for an entry inserted for a backwards ref. */
5270 HOST_WIDE_INT min_address;
5271 /* The number of fixes referencing this entry. This can become zero
5272 if we "unpush" an entry. In this case we ignore the entry when we
5273 come to emit the code. */
5274 int refcount;
5275 /* The offset from the start of the minipool. */
5276 HOST_WIDE_INT offset;
5277 /* The value in table. */
5278 rtx value;
5279 /* The mode of value. */
5280 enum machine_mode mode;
5281 int fix_size;
5284 struct minipool_fixup
5286 Mfix * next;
5287 rtx insn;
5288 HOST_WIDE_INT address;
5289 rtx * loc;
5290 enum machine_mode mode;
5291 int fix_size;
5292 rtx value;
5293 Mnode * minipool;
5294 HOST_WIDE_INT forwards;
5295 HOST_WIDE_INT backwards;
5298 /* Fixes less than a word need padding out to a word boundary. */
5299 #define MINIPOOL_FIX_SIZE(mode) \
5300 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5302 static Mnode * minipool_vector_head;
5303 static Mnode * minipool_vector_tail;
5304 static rtx minipool_vector_label;
5306 /* The linked list of all minipool fixes required for this function. */
5307 Mfix * minipool_fix_head;
5308 Mfix * minipool_fix_tail;
5309 /* The fix entry for the current minipool, once it has been placed. */
5310 Mfix * minipool_barrier;
5312 /* Determines if INSN is the start of a jump table. Returns the end
5313 of the TABLE or NULL_RTX. */
5315 static rtx
5316 is_jump_table (insn)
5317 rtx insn;
5319 rtx table;
5321 if (GET_CODE (insn) == JUMP_INSN
5322 && JUMP_LABEL (insn) != NULL
5323 && ((table = next_real_insn (JUMP_LABEL (insn)))
5324 == next_real_insn (insn))
5325 && table != NULL
5326 && GET_CODE (table) == JUMP_INSN
5327 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5328 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5329 return table;
5331 return NULL_RTX;
5334 static HOST_WIDE_INT
5335 get_jump_table_size (insn)
5336 rtx insn;
5338 rtx body = PATTERN (insn);
5339 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5341 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5344 /* Move a minipool fix MP from its current location to before MAX_MP.
5345 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5346 contrains may need updating. */
5348 static Mnode *
5349 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5350 Mnode * mp;
5351 Mnode * max_mp;
5352 HOST_WIDE_INT max_address;
5354 /* This should never be true and the code below assumes these are
5355 different. */
5356 if (mp == max_mp)
5357 abort ();
5359 if (max_mp == NULL)
5361 if (max_address < mp->max_address)
5362 mp->max_address = max_address;
5364 else
5366 if (max_address > max_mp->max_address - mp->fix_size)
5367 mp->max_address = max_mp->max_address - mp->fix_size;
5368 else
5369 mp->max_address = max_address;
5371 /* Unlink MP from its current position. Since max_mp is non-null,
5372 mp->prev must be non-null. */
5373 mp->prev->next = mp->next;
5374 if (mp->next != NULL)
5375 mp->next->prev = mp->prev;
5376 else
5377 minipool_vector_tail = mp->prev;
5379 /* Re-insert it before MAX_MP. */
5380 mp->next = max_mp;
5381 mp->prev = max_mp->prev;
5382 max_mp->prev = mp;
5384 if (mp->prev != NULL)
5385 mp->prev->next = mp;
5386 else
5387 minipool_vector_head = mp;
5390 /* Save the new entry. */
5391 max_mp = mp;
5393 /* Scan over the preceding entries and adjust their addresses as
5394 required. */
5395 while (mp->prev != NULL
5396 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5398 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5399 mp = mp->prev;
5402 return max_mp;
5405 /* Add a constant to the minipool for a forward reference. Returns the
5406 node added or NULL if the constant will not fit in this pool. */
5408 static Mnode *
5409 add_minipool_forward_ref (fix)
5410 Mfix * fix;
5412 /* If set, max_mp is the first pool_entry that has a lower
5413 constraint than the one we are trying to add. */
5414 Mnode * max_mp = NULL;
5415 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5416 Mnode * mp;
5418 /* If this fix's address is greater than the address of the first
5419 entry, then we can't put the fix in this pool. We subtract the
5420 size of the current fix to ensure that if the table is fully
5421 packed we still have enough room to insert this value by suffling
5422 the other fixes forwards. */
5423 if (minipool_vector_head &&
5424 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5425 return NULL;
5427 /* Scan the pool to see if a constant with the same value has
5428 already been added. While we are doing this, also note the
5429 location where we must insert the constant if it doesn't already
5430 exist. */
5431 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5433 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5434 && fix->mode == mp->mode
5435 && (GET_CODE (fix->value) != CODE_LABEL
5436 || (CODE_LABEL_NUMBER (fix->value)
5437 == CODE_LABEL_NUMBER (mp->value)))
5438 && rtx_equal_p (fix->value, mp->value))
5440 /* More than one fix references this entry. */
5441 mp->refcount++;
5442 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5445 /* Note the insertion point if necessary. */
5446 if (max_mp == NULL
5447 && mp->max_address > max_address)
5448 max_mp = mp;
5451 /* The value is not currently in the minipool, so we need to create
5452 a new entry for it. If MAX_MP is NULL, the entry will be put on
5453 the end of the list since the placement is less constrained than
5454 any existing entry. Otherwise, we insert the new fix before
5455 MAX_MP and, if neceesary, adjust the constraints on the other
5456 entries. */
5457 mp = xmalloc (sizeof (* mp));
5458 mp->fix_size = fix->fix_size;
5459 mp->mode = fix->mode;
5460 mp->value = fix->value;
5461 mp->refcount = 1;
5462 /* Not yet required for a backwards ref. */
5463 mp->min_address = -65536;
5465 if (max_mp == NULL)
5467 mp->max_address = max_address;
5468 mp->next = NULL;
5469 mp->prev = minipool_vector_tail;
5471 if (mp->prev == NULL)
5473 minipool_vector_head = mp;
5474 minipool_vector_label = gen_label_rtx ();
5476 else
5477 mp->prev->next = mp;
5479 minipool_vector_tail = mp;
5481 else
5483 if (max_address > max_mp->max_address - mp->fix_size)
5484 mp->max_address = max_mp->max_address - mp->fix_size;
5485 else
5486 mp->max_address = max_address;
5488 mp->next = max_mp;
5489 mp->prev = max_mp->prev;
5490 max_mp->prev = mp;
5491 if (mp->prev != NULL)
5492 mp->prev->next = mp;
5493 else
5494 minipool_vector_head = mp;
5497 /* Save the new entry. */
5498 max_mp = mp;
5500 /* Scan over the preceding entries and adjust their addresses as
5501 required. */
5502 while (mp->prev != NULL
5503 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5505 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5506 mp = mp->prev;
5509 return max_mp;
5512 static Mnode *
5513 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5514 Mnode * mp;
5515 Mnode * min_mp;
5516 HOST_WIDE_INT min_address;
5518 HOST_WIDE_INT offset;
5520 /* This should never be true, and the code below assumes these are
5521 different. */
5522 if (mp == min_mp)
5523 abort ();
5525 if (min_mp == NULL)
5527 if (min_address > mp->min_address)
5528 mp->min_address = min_address;
5530 else
5532 /* We will adjust this below if it is too loose. */
5533 mp->min_address = min_address;
5535 /* Unlink MP from its current position. Since min_mp is non-null,
5536 mp->next must be non-null. */
5537 mp->next->prev = mp->prev;
5538 if (mp->prev != NULL)
5539 mp->prev->next = mp->next;
5540 else
5541 minipool_vector_head = mp->next;
5543 /* Reinsert it after MIN_MP. */
5544 mp->prev = min_mp;
5545 mp->next = min_mp->next;
5546 min_mp->next = mp;
5547 if (mp->next != NULL)
5548 mp->next->prev = mp;
5549 else
5550 minipool_vector_tail = mp;
5553 min_mp = mp;
5555 offset = 0;
5556 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5558 mp->offset = offset;
5559 if (mp->refcount > 0)
5560 offset += mp->fix_size;
5562 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5563 mp->next->min_address = mp->min_address + mp->fix_size;
5566 return min_mp;
5569 /* Add a constant to the minipool for a backward reference. Returns the
5570 node added or NULL if the constant will not fit in this pool.
5572 Note that the code for insertion for a backwards reference can be
5573 somewhat confusing because the calculated offsets for each fix do
5574 not take into account the size of the pool (which is still under
5575 construction. */
5577 static Mnode *
5578 add_minipool_backward_ref (fix)
5579 Mfix * fix;
5581 /* If set, min_mp is the last pool_entry that has a lower constraint
5582 than the one we are trying to add. */
5583 Mnode * min_mp = NULL;
5584 /* This can be negative, since it is only a constraint. */
5585 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5586 Mnode * mp;
5588 /* If we can't reach the current pool from this insn, or if we can't
5589 insert this entry at the end of the pool without pushing other
5590 fixes out of range, then we don't try. This ensures that we
5591 can't fail later on. */
5592 if (min_address >= minipool_barrier->address
5593 || (minipool_vector_tail->min_address + fix->fix_size
5594 >= minipool_barrier->address))
5595 return NULL;
5597 /* Scan the pool to see if a constant with the same value has
5598 already been added. While we are doing this, also note the
5599 location where we must insert the constant if it doesn't already
5600 exist. */
5601 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5603 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5604 && fix->mode == mp->mode
5605 && (GET_CODE (fix->value) != CODE_LABEL
5606 || (CODE_LABEL_NUMBER (fix->value)
5607 == CODE_LABEL_NUMBER (mp->value)))
5608 && rtx_equal_p (fix->value, mp->value)
5609 /* Check that there is enough slack to move this entry to the
5610 end of the table (this is conservative). */
5611 && (mp->max_address
5612 > (minipool_barrier->address
5613 + minipool_vector_tail->offset
5614 + minipool_vector_tail->fix_size)))
5616 mp->refcount++;
5617 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5620 if (min_mp != NULL)
5621 mp->min_address += fix->fix_size;
5622 else
5624 /* Note the insertion point if necessary. */
5625 if (mp->min_address < min_address)
5626 min_mp = mp;
5627 else if (mp->max_address
5628 < minipool_barrier->address + mp->offset + fix->fix_size)
5630 /* Inserting before this entry would push the fix beyond
5631 its maximum address (which can happen if we have
5632 re-located a forwards fix); force the new fix to come
5633 after it. */
5634 min_mp = mp;
5635 min_address = mp->min_address + fix->fix_size;
5640 /* We need to create a new entry. */
5641 mp = xmalloc (sizeof (* mp));
5642 mp->fix_size = fix->fix_size;
5643 mp->mode = fix->mode;
5644 mp->value = fix->value;
5645 mp->refcount = 1;
5646 mp->max_address = minipool_barrier->address + 65536;
5648 mp->min_address = min_address;
5650 if (min_mp == NULL)
5652 mp->prev = NULL;
5653 mp->next = minipool_vector_head;
5655 if (mp->next == NULL)
5657 minipool_vector_tail = mp;
5658 minipool_vector_label = gen_label_rtx ();
5660 else
5661 mp->next->prev = mp;
5663 minipool_vector_head = mp;
5665 else
5667 mp->next = min_mp->next;
5668 mp->prev = min_mp;
5669 min_mp->next = mp;
5671 if (mp->next != NULL)
5672 mp->next->prev = mp;
5673 else
5674 minipool_vector_tail = mp;
5677 /* Save the new entry. */
5678 min_mp = mp;
5680 if (mp->prev)
5681 mp = mp->prev;
5682 else
5683 mp->offset = 0;
5685 /* Scan over the following entries and adjust their offsets. */
5686 while (mp->next != NULL)
5688 if (mp->next->min_address < mp->min_address + mp->fix_size)
5689 mp->next->min_address = mp->min_address + mp->fix_size;
5691 if (mp->refcount)
5692 mp->next->offset = mp->offset + mp->fix_size;
5693 else
5694 mp->next->offset = mp->offset;
5696 mp = mp->next;
5699 return min_mp;
5702 static void
5703 assign_minipool_offsets (barrier)
5704 Mfix * barrier;
5706 HOST_WIDE_INT offset = 0;
5707 Mnode * mp;
5709 minipool_barrier = barrier;
5711 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5713 mp->offset = offset;
5715 if (mp->refcount > 0)
5716 offset += mp->fix_size;
5720 /* Output the literal table */
5721 static void
5722 dump_minipool (scan)
5723 rtx scan;
5725 Mnode * mp;
5726 Mnode * nmp;
5728 if (rtl_dump_file)
5729 fprintf (rtl_dump_file,
5730 ";; Emitting minipool after insn %u; address %ld\n",
5731 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5733 scan = emit_label_after (gen_label_rtx (), scan);
5734 scan = emit_insn_after (gen_align_4 (), scan);
5735 scan = emit_label_after (minipool_vector_label, scan);
5737 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5739 if (mp->refcount > 0)
5741 if (rtl_dump_file)
5743 fprintf (rtl_dump_file,
5744 ";; Offset %u, min %ld, max %ld ",
5745 (unsigned) mp->offset, (unsigned long) mp->min_address,
5746 (unsigned long) mp->max_address);
5747 arm_print_value (rtl_dump_file, mp->value);
5748 fputc ('\n', rtl_dump_file);
5751 switch (mp->fix_size)
5753 #ifdef HAVE_consttable_1
5754 case 1:
5755 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5756 break;
5758 #endif
5759 #ifdef HAVE_consttable_2
5760 case 2:
5761 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5762 break;
5764 #endif
5765 #ifdef HAVE_consttable_4
5766 case 4:
5767 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5768 break;
5770 #endif
5771 #ifdef HAVE_consttable_8
5772 case 8:
5773 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5774 break;
5776 #endif
5777 default:
5778 abort ();
5779 break;
5783 nmp = mp->next;
5784 free (mp);
5787 minipool_vector_head = minipool_vector_tail = NULL;
5788 scan = emit_insn_after (gen_consttable_end (), scan);
5789 scan = emit_barrier_after (scan);
5792 /* Return the cost of forcibly inserting a barrier after INSN. */
5794 static int
5795 arm_barrier_cost (insn)
5796 rtx insn;
5798 /* Basing the location of the pool on the loop depth is preferable,
5799 but at the moment, the basic block information seems to be
5800 corrupt by this stage of the compilation. */
5801 int base_cost = 50;
5802 rtx next = next_nonnote_insn (insn);
5804 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5805 base_cost -= 20;
5807 switch (GET_CODE (insn))
5809 case CODE_LABEL:
5810 /* It will always be better to place the table before the label, rather
5811 than after it. */
5812 return 50;
5814 case INSN:
5815 case CALL_INSN:
5816 return base_cost;
5818 case JUMP_INSN:
5819 return base_cost - 10;
5821 default:
5822 return base_cost + 10;
5826 /* Find the best place in the insn stream in the range
5827 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5828 Create the barrier by inserting a jump and add a new fix entry for
5829 it. */
5831 static Mfix *
5832 create_fix_barrier (fix, max_address)
5833 Mfix * fix;
5834 HOST_WIDE_INT max_address;
5836 HOST_WIDE_INT count = 0;
5837 rtx barrier;
5838 rtx from = fix->insn;
5839 rtx selected = from;
5840 int selected_cost;
5841 HOST_WIDE_INT selected_address;
5842 Mfix * new_fix;
5843 HOST_WIDE_INT max_count = max_address - fix->address;
5844 rtx label = gen_label_rtx ();
5846 selected_cost = arm_barrier_cost (from);
5847 selected_address = fix->address;
5849 while (from && count < max_count)
5851 rtx tmp;
5852 int new_cost;
5854 /* This code shouldn't have been called if there was a natural barrier
5855 within range. */
5856 if (GET_CODE (from) == BARRIER)
5857 abort ();
5859 /* Count the length of this insn. */
5860 count += get_attr_length (from);
5862 /* If there is a jump table, add its length. */
5863 tmp = is_jump_table (from);
5864 if (tmp != NULL)
5866 count += get_jump_table_size (tmp);
5868 /* Jump tables aren't in a basic block, so base the cost on
5869 the dispatch insn. If we select this location, we will
5870 still put the pool after the table. */
5871 new_cost = arm_barrier_cost (from);
5873 if (count < max_count && new_cost <= selected_cost)
5875 selected = tmp;
5876 selected_cost = new_cost;
5877 selected_address = fix->address + count;
5880 /* Continue after the dispatch table. */
5881 from = NEXT_INSN (tmp);
5882 continue;
5885 new_cost = arm_barrier_cost (from);
5887 if (count < max_count && new_cost <= selected_cost)
5889 selected = from;
5890 selected_cost = new_cost;
5891 selected_address = fix->address + count;
5894 from = NEXT_INSN (from);
5897 /* Create a new JUMP_INSN that branches around a barrier. */
5898 from = emit_jump_insn_after (gen_jump (label), selected);
5899 JUMP_LABEL (from) = label;
5900 barrier = emit_barrier_after (from);
5901 emit_label_after (label, barrier);
5903 /* Create a minipool barrier entry for the new barrier. */
5904 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5905 new_fix->insn = barrier;
5906 new_fix->address = selected_address;
5907 new_fix->next = fix->next;
5908 fix->next = new_fix;
5910 return new_fix;
5913 /* Record that there is a natural barrier in the insn stream at
5914 ADDRESS. */
5915 static void
5916 push_minipool_barrier (insn, address)
5917 rtx insn;
5918 HOST_WIDE_INT address;
5920 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5922 fix->insn = insn;
5923 fix->address = address;
5925 fix->next = NULL;
5926 if (minipool_fix_head != NULL)
5927 minipool_fix_tail->next = fix;
5928 else
5929 minipool_fix_head = fix;
5931 minipool_fix_tail = fix;
5934 /* Record INSN, which will need fixing up to load a value from the
5935 minipool. ADDRESS is the offset of the insn since the start of the
5936 function; LOC is a pointer to the part of the insn which requires
5937 fixing; VALUE is the constant that must be loaded, which is of type
5938 MODE. */
5939 static void
5940 push_minipool_fix (insn, address, loc, mode, value)
5941 rtx insn;
5942 HOST_WIDE_INT address;
5943 rtx * loc;
5944 enum machine_mode mode;
5945 rtx value;
5947 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5949 #ifdef AOF_ASSEMBLER
5950 /* PIC symbol refereneces need to be converted into offsets into the
5951 based area. */
5952 /* XXX This shouldn't be done here. */
5953 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5954 value = aof_pic_entry (value);
5955 #endif /* AOF_ASSEMBLER */
5957 fix->insn = insn;
5958 fix->address = address;
5959 fix->loc = loc;
5960 fix->mode = mode;
5961 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5962 fix->value = value;
5963 fix->forwards = get_attr_pool_range (insn);
5964 fix->backwards = get_attr_neg_pool_range (insn);
5965 fix->minipool = NULL;
5967 /* If an insn doesn't have a range defined for it, then it isn't
5968 expecting to be reworked by this code. Better to abort now than
5969 to generate duff assembly code. */
5970 if (fix->forwards == 0 && fix->backwards == 0)
5971 abort ();
5973 if (rtl_dump_file)
5975 fprintf (rtl_dump_file,
5976 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5977 GET_MODE_NAME (mode),
5978 INSN_UID (insn), (unsigned long) address,
5979 -1 * (long)fix->backwards, (long)fix->forwards);
5980 arm_print_value (rtl_dump_file, fix->value);
5981 fprintf (rtl_dump_file, "\n");
5984 /* Add it to the chain of fixes. */
5985 fix->next = NULL;
5987 if (minipool_fix_head != NULL)
5988 minipool_fix_tail->next = fix;
5989 else
5990 minipool_fix_head = fix;
5992 minipool_fix_tail = fix;
5995 /* Scan INSN and note any of its operands that need fixing. */
5997 static void
5998 note_invalid_constants (insn, address)
5999 rtx insn;
6000 HOST_WIDE_INT address;
6002 int opno;
6004 extract_insn (insn);
6006 if (!constrain_operands (1))
6007 fatal_insn_not_found (insn);
6009 /* Fill in recog_op_alt with information about the constraints of this
6010 insn. */
6011 preprocess_constraints ();
6013 for (opno = 0; opno < recog_data.n_operands; opno++)
6015 /* Things we need to fix can only occur in inputs. */
6016 if (recog_data.operand_type[opno] != OP_IN)
6017 continue;
6019 /* If this alternative is a memory reference, then any mention
6020 of constants in this alternative is really to fool reload
6021 into allowing us to accept one there. We need to fix them up
6022 now so that we output the right code. */
6023 if (recog_op_alt[opno][which_alternative].memory_ok)
6025 rtx op = recog_data.operand[opno];
6027 if (CONSTANT_P (op))
6028 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6029 recog_data.operand_mode[opno], op);
6030 #if 0
6031 /* RWE: Now we look correctly at the operands for the insn,
6032 this shouldn't be needed any more. */
6033 #ifndef AOF_ASSEMBLER
6034 /* XXX Is this still needed? */
6035 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6036 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6037 recog_data.operand_mode[opno],
6038 XVECEXP (op, 0, 0));
6039 #endif
6040 #endif
6041 else if (GET_CODE (op) == MEM
6042 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6043 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6044 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6045 recog_data.operand_mode[opno],
6046 get_pool_constant (XEXP (op, 0)));
6051 void
6052 arm_reorg (first)
6053 rtx first;
6055 rtx insn;
6056 HOST_WIDE_INT address = 0;
6057 Mfix * fix;
6059 minipool_fix_head = minipool_fix_tail = NULL;
6061 /* The first insn must always be a note, or the code below won't
6062 scan it properly. */
6063 if (GET_CODE (first) != NOTE)
6064 abort ();
6066 /* Scan all the insns and record the operands that will need fixing. */
6067 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6069 if (GET_CODE (insn) == BARRIER)
6070 push_minipool_barrier (insn, address);
6071 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6072 || GET_CODE (insn) == JUMP_INSN)
6074 rtx table;
6076 note_invalid_constants (insn, address);
6077 address += get_attr_length (insn);
6079 /* If the insn is a vector jump, add the size of the table
6080 and skip the table. */
6081 if ((table = is_jump_table (insn)) != NULL)
6083 address += get_jump_table_size (table);
6084 insn = table;
6089 fix = minipool_fix_head;
6091 /* Now scan the fixups and perform the required changes. */
6092 while (fix)
6094 Mfix * ftmp;
6095 Mfix * fdel;
6096 Mfix * last_added_fix;
6097 Mfix * last_barrier = NULL;
6098 Mfix * this_fix;
6100 /* Skip any further barriers before the next fix. */
6101 while (fix && GET_CODE (fix->insn) == BARRIER)
6102 fix = fix->next;
6104 /* No more fixes. */
6105 if (fix == NULL)
6106 break;
6108 last_added_fix = NULL;
6110 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6112 if (GET_CODE (ftmp->insn) == BARRIER)
6114 if (ftmp->address >= minipool_vector_head->max_address)
6115 break;
6117 last_barrier = ftmp;
6119 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6120 break;
6122 last_added_fix = ftmp; /* Keep track of the last fix added. */
6125 /* If we found a barrier, drop back to that; any fixes that we
6126 could have reached but come after the barrier will now go in
6127 the next mini-pool. */
6128 if (last_barrier != NULL)
6130 /* Reduce the refcount for those fixes that won't go into this
6131 pool after all. */
6132 for (fdel = last_barrier->next;
6133 fdel && fdel != ftmp;
6134 fdel = fdel->next)
6136 fdel->minipool->refcount--;
6137 fdel->minipool = NULL;
6140 ftmp = last_barrier;
6142 else
6144 /* ftmp is first fix that we can't fit into this pool and
6145 there no natural barriers that we could use. Insert a
6146 new barrier in the code somewhere between the previous
6147 fix and this one, and arrange to jump around it. */
6148 HOST_WIDE_INT max_address;
6150 /* The last item on the list of fixes must be a barrier, so
6151 we can never run off the end of the list of fixes without
6152 last_barrier being set. */
6153 if (ftmp == NULL)
6154 abort ();
6156 max_address = minipool_vector_head->max_address;
6157 /* Check that there isn't another fix that is in range that
6158 we couldn't fit into this pool because the pool was
6159 already too large: we need to put the pool before such an
6160 instruction. */
6161 if (ftmp->address < max_address)
6162 max_address = ftmp->address;
6164 last_barrier = create_fix_barrier (last_added_fix, max_address);
6167 assign_minipool_offsets (last_barrier);
6169 while (ftmp)
6171 if (GET_CODE (ftmp->insn) != BARRIER
6172 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6173 == NULL))
6174 break;
6176 ftmp = ftmp->next;
6179 /* Scan over the fixes we have identified for this pool, fixing them
6180 up and adding the constants to the pool itself. */
6181 for (this_fix = fix; this_fix && ftmp != this_fix;
6182 this_fix = this_fix->next)
6183 if (GET_CODE (this_fix->insn) != BARRIER)
6185 rtx addr
6186 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6187 minipool_vector_label),
6188 this_fix->minipool->offset);
6189 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6192 dump_minipool (last_barrier->insn);
6193 fix = ftmp;
6196 /* From now on we must synthesize any constants that we can't handle
6197 directly. This can happen if the RTL gets split during final
6198 instruction generation. */
6199 after_arm_reorg = 1;
6201 /* Free the minipool memory. */
6202 obstack_free (&minipool_obstack, minipool_startobj);
6205 /* Routines to output assembly language. */
6207 /* If the rtx is the correct value then return the string of the number.
6208 In this way we can ensure that valid double constants are generated even
6209 when cross compiling. */
6211 const char *
6212 fp_immediate_constant (x)
6213 rtx x;
6215 REAL_VALUE_TYPE r;
6216 int i;
6218 if (!fpa_consts_inited)
6219 init_fpa_table ();
6221 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6222 for (i = 0; i < 8; i++)
6223 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6224 return strings_fpa[i];
6226 abort ();
6229 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6231 static const char *
6232 fp_const_from_val (r)
6233 REAL_VALUE_TYPE * r;
6235 int i;
6237 if (!fpa_consts_inited)
6238 init_fpa_table ();
6240 for (i = 0; i < 8; i++)
6241 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6242 return strings_fpa[i];
6244 abort ();
6247 /* Output the operands of a LDM/STM instruction to STREAM.
6248 MASK is the ARM register set mask of which only bits 0-15 are important.
6249 REG is the base register, either the frame pointer or the stack pointer,
6250 INSTR is the possibly suffixed load or store instruction. */
6252 static void
6253 print_multi_reg (stream, instr, reg, mask)
6254 FILE * stream;
6255 const char * instr;
6256 int reg;
6257 int mask;
6259 int i;
6260 int not_first = FALSE;
6262 fputc ('\t', stream);
6263 asm_fprintf (stream, instr, reg);
6264 fputs (", {", stream);
6266 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6267 if (mask & (1 << i))
6269 if (not_first)
6270 fprintf (stream, ", ");
6272 asm_fprintf (stream, "%r", i);
6273 not_first = TRUE;
6276 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6279 /* Output a 'call' insn. */
6281 const char *
6282 output_call (operands)
6283 rtx * operands;
6285 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6287 if (REGNO (operands[0]) == LR_REGNUM)
6289 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6290 output_asm_insn ("mov%?\t%0, %|lr", operands);
6293 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6295 if (TARGET_INTERWORK)
6296 output_asm_insn ("bx%?\t%0", operands);
6297 else
6298 output_asm_insn ("mov%?\t%|pc, %0", operands);
6300 return "";
6303 static int
6304 eliminate_lr2ip (x)
6305 rtx * x;
6307 int something_changed = 0;
6308 rtx x0 = * x;
6309 int code = GET_CODE (x0);
6310 int i, j;
6311 const char * fmt;
6313 switch (code)
6315 case REG:
6316 if (REGNO (x0) == LR_REGNUM)
6318 *x = gen_rtx_REG (SImode, IP_REGNUM);
6319 return 1;
6321 return 0;
6322 default:
6323 /* Scan through the sub-elements and change any references there. */
6324 fmt = GET_RTX_FORMAT (code);
6326 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6327 if (fmt[i] == 'e')
6328 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6329 else if (fmt[i] == 'E')
6330 for (j = 0; j < XVECLEN (x0, i); j++)
6331 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6333 return something_changed;
6337 /* Output a 'call' insn that is a reference in memory. */
6339 const char *
6340 output_call_mem (operands)
6341 rtx * operands;
6343 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6344 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6345 if (eliminate_lr2ip (&operands[0]))
6346 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6348 if (TARGET_INTERWORK)
6350 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6351 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6352 output_asm_insn ("bx%?\t%|ip", operands);
6354 else
6356 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6357 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6360 return "";
6364 /* Output a move from arm registers to an fpu registers.
6365 OPERANDS[0] is an fpu register.
6366 OPERANDS[1] is the first registers of an arm register pair. */
6368 const char *
6369 output_mov_long_double_fpu_from_arm (operands)
6370 rtx * operands;
6372 int arm_reg0 = REGNO (operands[1]);
6373 rtx ops[3];
6375 if (arm_reg0 == IP_REGNUM)
6376 abort ();
6378 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6379 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6380 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6382 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6383 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6385 return "";
6388 /* Output a move from an fpu register to arm registers.
6389 OPERANDS[0] is the first registers of an arm register pair.
6390 OPERANDS[1] is an fpu register. */
6392 const char *
6393 output_mov_long_double_arm_from_fpu (operands)
6394 rtx * operands;
6396 int arm_reg0 = REGNO (operands[0]);
6397 rtx ops[3];
6399 if (arm_reg0 == IP_REGNUM)
6400 abort ();
6402 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6403 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6404 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6406 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6407 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6408 return "";
6411 /* Output a move from arm registers to arm registers of a long double
6412 OPERANDS[0] is the destination.
6413 OPERANDS[1] is the source. */
6415 const char *
6416 output_mov_long_double_arm_from_arm (operands)
6417 rtx * operands;
6419 /* We have to be careful here because the two might overlap. */
6420 int dest_start = REGNO (operands[0]);
6421 int src_start = REGNO (operands[1]);
6422 rtx ops[2];
6423 int i;
6425 if (dest_start < src_start)
6427 for (i = 0; i < 3; i++)
6429 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6430 ops[1] = gen_rtx_REG (SImode, src_start + i);
6431 output_asm_insn ("mov%?\t%0, %1", ops);
6434 else
6436 for (i = 2; i >= 0; i--)
6438 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6439 ops[1] = gen_rtx_REG (SImode, src_start + i);
6440 output_asm_insn ("mov%?\t%0, %1", ops);
6444 return "";
6448 /* Output a move from arm registers to an fpu registers.
6449 OPERANDS[0] is an fpu register.
6450 OPERANDS[1] is the first registers of an arm register pair. */
6452 const char *
6453 output_mov_double_fpu_from_arm (operands)
6454 rtx * operands;
6456 int arm_reg0 = REGNO (operands[1]);
6457 rtx ops[2];
6459 if (arm_reg0 == IP_REGNUM)
6460 abort ();
6462 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6463 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6464 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6465 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6466 return "";
6469 /* Output a move from an fpu register to arm registers.
6470 OPERANDS[0] is the first registers of an arm register pair.
6471 OPERANDS[1] is an fpu register. */
6473 const char *
6474 output_mov_double_arm_from_fpu (operands)
6475 rtx * operands;
6477 int arm_reg0 = REGNO (operands[0]);
6478 rtx ops[2];
6480 if (arm_reg0 == IP_REGNUM)
6481 abort ();
6483 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6484 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6485 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6486 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6487 return "";
6490 /* Output a move between double words.
6491 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6492 or MEM<-REG and all MEMs must be offsettable addresses. */
6494 const char *
6495 output_move_double (operands)
6496 rtx * operands;
6498 enum rtx_code code0 = GET_CODE (operands[0]);
6499 enum rtx_code code1 = GET_CODE (operands[1]);
6500 rtx otherops[3];
6502 if (code0 == REG)
6504 int reg0 = REGNO (operands[0]);
6506 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6508 if (code1 == REG)
6510 int reg1 = REGNO (operands[1]);
6511 if (reg1 == IP_REGNUM)
6512 abort ();
6514 /* Ensure the second source is not overwritten. */
6515 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6516 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6517 else
6518 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6520 else if (code1 == CONST_DOUBLE)
6522 if (GET_MODE (operands[1]) == DFmode)
6524 long l[2];
6525 union real_extract u;
6527 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
6528 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
6529 otherops[1] = GEN_INT (l[1]);
6530 operands[1] = GEN_INT (l[0]);
6532 else if (GET_MODE (operands[1]) != VOIDmode)
6533 abort ();
6534 else if (WORDS_BIG_ENDIAN)
6536 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6537 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6539 else
6541 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6542 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6545 output_mov_immediate (operands);
6546 output_mov_immediate (otherops);
6548 else if (code1 == CONST_INT)
6550 #if HOST_BITS_PER_WIDE_INT > 32
6551 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6552 what the upper word is. */
6553 if (WORDS_BIG_ENDIAN)
6555 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6556 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6558 else
6560 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6561 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6563 #else
6564 /* Sign extend the intval into the high-order word. */
6565 if (WORDS_BIG_ENDIAN)
6567 otherops[1] = operands[1];
6568 operands[1] = (INTVAL (operands[1]) < 0
6569 ? constm1_rtx : const0_rtx);
6571 else
6572 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6573 #endif
6574 output_mov_immediate (otherops);
6575 output_mov_immediate (operands);
6577 else if (code1 == MEM)
6579 switch (GET_CODE (XEXP (operands[1], 0)))
6581 case REG:
6582 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6583 break;
6585 case PRE_INC:
6586 abort (); /* Should never happen now. */
6587 break;
6589 case PRE_DEC:
6590 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6591 break;
6593 case POST_INC:
6594 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6595 break;
6597 case POST_DEC:
6598 abort (); /* Should never happen now. */
6599 break;
6601 case LABEL_REF:
6602 case CONST:
6603 output_asm_insn ("adr%?\t%0, %1", operands);
6604 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6605 break;
6607 default:
6608 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6609 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6611 otherops[0] = operands[0];
6612 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6613 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6615 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6617 if (GET_CODE (otherops[2]) == CONST_INT)
6619 switch (INTVAL (otherops[2]))
6621 case -8:
6622 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6623 return "";
6624 case -4:
6625 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6626 return "";
6627 case 4:
6628 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6629 return "";
6632 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6633 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6634 else
6635 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6637 else
6638 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6640 else
6641 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6643 return "ldm%?ia\t%0, %M0";
6645 else
6647 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6648 /* Take care of overlapping base/data reg. */
6649 if (reg_mentioned_p (operands[0], operands[1]))
6651 output_asm_insn ("ldr%?\t%0, %1", otherops);
6652 output_asm_insn ("ldr%?\t%0, %1", operands);
6654 else
6656 output_asm_insn ("ldr%?\t%0, %1", operands);
6657 output_asm_insn ("ldr%?\t%0, %1", otherops);
6662 else
6663 abort (); /* Constraints should prevent this. */
6665 else if (code0 == MEM && code1 == REG)
6667 if (REGNO (operands[1]) == IP_REGNUM)
6668 abort ();
6670 switch (GET_CODE (XEXP (operands[0], 0)))
6672 case REG:
6673 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6674 break;
6676 case PRE_INC:
6677 abort (); /* Should never happen now. */
6678 break;
6680 case PRE_DEC:
6681 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6682 break;
6684 case POST_INC:
6685 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6686 break;
6688 case POST_DEC:
6689 abort (); /* Should never happen now. */
6690 break;
6692 case PLUS:
6693 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6695 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6697 case -8:
6698 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6699 return "";
6701 case -4:
6702 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6703 return "";
6705 case 4:
6706 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6707 return "";
6710 /* Fall through */
6712 default:
6713 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
6714 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6715 output_asm_insn ("str%?\t%1, %0", operands);
6716 output_asm_insn ("str%?\t%1, %0", otherops);
6719 else
6720 /* Constraints should prevent this. */
6721 abort ();
6723 return "";
6727 /* Output an arbitrary MOV reg, #n.
6728 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6730 const char *
6731 output_mov_immediate (operands)
6732 rtx * operands;
6734 HOST_WIDE_INT n = INTVAL (operands[1]);
6736 /* Try to use one MOV. */
6737 if (const_ok_for_arm (n))
6738 output_asm_insn ("mov%?\t%0, %1", operands);
6740 /* Try to use one MVN. */
6741 else if (const_ok_for_arm (~n))
6743 operands[1] = GEN_INT (~n);
6744 output_asm_insn ("mvn%?\t%0, %1", operands);
6746 else
6748 int n_ones = 0;
6749 int i;
6751 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6752 for (i = 0; i < 32; i ++)
6753 if (n & 1 << i)
6754 n_ones ++;
6756 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6757 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6758 else
6759 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6762 return "";
6765 /* Output an ADD r, s, #n where n may be too big for one instruction.
6766 If adding zero to one register, output nothing. */
6768 const char *
6769 output_add_immediate (operands)
6770 rtx * operands;
6772 HOST_WIDE_INT n = INTVAL (operands[2]);
6774 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6776 if (n < 0)
6777 output_multi_immediate (operands,
6778 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6779 -n);
6780 else
6781 output_multi_immediate (operands,
6782 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6786 return "";
6789 /* Output a multiple immediate operation.
6790 OPERANDS is the vector of operands referred to in the output patterns.
6791 INSTR1 is the output pattern to use for the first constant.
6792 INSTR2 is the output pattern to use for subsequent constants.
6793 IMMED_OP is the index of the constant slot in OPERANDS.
6794 N is the constant value. */
6796 static const char *
6797 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6798 rtx * operands;
6799 const char * instr1;
6800 const char * instr2;
6801 int immed_op;
6802 HOST_WIDE_INT n;
6804 #if HOST_BITS_PER_WIDE_INT > 32
6805 n &= 0xffffffff;
6806 #endif
6808 if (n == 0)
6810 /* Quick and easy output. */
6811 operands[immed_op] = const0_rtx;
6812 output_asm_insn (instr1, operands);
6814 else
6816 int i;
6817 const char * instr = instr1;
6819 /* Note that n is never zero here (which would give no output). */
6820 for (i = 0; i < 32; i += 2)
6822 if (n & (3 << i))
6824 operands[immed_op] = GEN_INT (n & (255 << i));
6825 output_asm_insn (instr, operands);
6826 instr = instr2;
6827 i += 6;
6832 return "";
6835 /* Return the appropriate ARM instruction for the operation code.
6836 The returned result should not be overwritten. OP is the rtx of the
6837 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6838 was shifted. */
6840 const char *
6841 arithmetic_instr (op, shift_first_arg)
6842 rtx op;
6843 int shift_first_arg;
6845 switch (GET_CODE (op))
6847 case PLUS:
6848 return "add";
6850 case MINUS:
6851 return shift_first_arg ? "rsb" : "sub";
6853 case IOR:
6854 return "orr";
6856 case XOR:
6857 return "eor";
6859 case AND:
6860 return "and";
6862 default:
6863 abort ();
6867 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6868 for the operation code. The returned result should not be overwritten.
6869 OP is the rtx code of the shift.
6870 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6871 shift. */
6873 static const char *
6874 shift_op (op, amountp)
6875 rtx op;
6876 HOST_WIDE_INT *amountp;
6878 const char * mnem;
6879 enum rtx_code code = GET_CODE (op);
6881 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6882 *amountp = -1;
6883 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6884 *amountp = INTVAL (XEXP (op, 1));
6885 else
6886 abort ();
6888 switch (code)
6890 case ASHIFT:
6891 mnem = "asl";
6892 break;
6894 case ASHIFTRT:
6895 mnem = "asr";
6896 break;
6898 case LSHIFTRT:
6899 mnem = "lsr";
6900 break;
6902 case ROTATERT:
6903 mnem = "ror";
6904 break;
6906 case MULT:
6907 /* We never have to worry about the amount being other than a
6908 power of 2, since this case can never be reloaded from a reg. */
6909 if (*amountp != -1)
6910 *amountp = int_log2 (*amountp);
6911 else
6912 abort ();
6913 return "asl";
6915 default:
6916 abort ();
6919 if (*amountp != -1)
6921 /* This is not 100% correct, but follows from the desire to merge
6922 multiplication by a power of 2 with the recognizer for a
6923 shift. >=32 is not a valid shift for "asl", so we must try and
6924 output a shift that produces the correct arithmetical result.
6925 Using lsr #32 is identical except for the fact that the carry bit
6926 is not set correctly if we set the flags; but we never use the
6927 carry bit from such an operation, so we can ignore that. */
6928 if (code == ROTATERT)
6929 /* Rotate is just modulo 32. */
6930 *amountp &= 31;
6931 else if (*amountp != (*amountp & 31))
6933 if (code == ASHIFT)
6934 mnem = "lsr";
6935 *amountp = 32;
6938 /* Shifts of 0 are no-ops. */
6939 if (*amountp == 0)
6940 return NULL;
6943 return mnem;
6946 /* Obtain the shift from the POWER of two. */
6948 static HOST_WIDE_INT
6949 int_log2 (power)
6950 HOST_WIDE_INT power;
6952 HOST_WIDE_INT shift = 0;
6954 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
6956 if (shift > 31)
6957 abort ();
6958 shift ++;
6961 return shift;
6964 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6965 /bin/as is horribly restrictive. */
6966 #define MAX_ASCII_LEN 51
6968 void
6969 output_ascii_pseudo_op (stream, p, len)
6970 FILE * stream;
6971 const unsigned char * p;
6972 int len;
6974 int i;
6975 int len_so_far = 0;
6977 fputs ("\t.ascii\t\"", stream);
6979 for (i = 0; i < len; i++)
6981 int c = p[i];
6983 if (len_so_far >= MAX_ASCII_LEN)
6985 fputs ("\"\n\t.ascii\t\"", stream);
6986 len_so_far = 0;
6989 switch (c)
6991 case TARGET_TAB:
6992 fputs ("\\t", stream);
6993 len_so_far += 2;
6994 break;
6996 case TARGET_FF:
6997 fputs ("\\f", stream);
6998 len_so_far += 2;
6999 break;
7001 case TARGET_BS:
7002 fputs ("\\b", stream);
7003 len_so_far += 2;
7004 break;
7006 case TARGET_CR:
7007 fputs ("\\r", stream);
7008 len_so_far += 2;
7009 break;
7011 case TARGET_NEWLINE:
7012 fputs ("\\n", stream);
7013 c = p [i + 1];
7014 if ((c >= ' ' && c <= '~')
7015 || c == TARGET_TAB)
7016 /* This is a good place for a line break. */
7017 len_so_far = MAX_ASCII_LEN;
7018 else
7019 len_so_far += 2;
7020 break;
7022 case '\"':
7023 case '\\':
7024 putc ('\\', stream);
7025 len_so_far++;
7026 /* drop through. */
7028 default:
7029 if (c >= ' ' && c <= '~')
7031 putc (c, stream);
7032 len_so_far++;
7034 else
7036 fprintf (stream, "\\%03o", c);
7037 len_so_far += 4;
7039 break;
7043 fputs ("\"\n", stream);
7046 /* Compute the register sabe mask for registers 0 through 12
7047 inclusive. This code is used by both arm_compute_save_reg_mask
7048 and arm_compute_initial_elimination_offset. */
7050 static unsigned long
7051 arm_compute_save_reg0_reg12_mask ()
7053 unsigned long func_type = arm_current_func_type ();
7054 unsigned int save_reg_mask = 0;
7055 unsigned int reg;
7057 if (IS_INTERRUPT (func_type))
7059 unsigned int max_reg;
7060 /* Interrupt functions must not corrupt any registers,
7061 even call clobbered ones. If this is a leaf function
7062 we can just examine the registers used by the RTL, but
7063 otherwise we have to assume that whatever function is
7064 called might clobber anything, and so we have to save
7065 all the call-clobbered registers as well. */
7066 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7067 /* FIQ handlers have registers r8 - r12 banked, so
7068 we only need to check r0 - r7, Normal ISRs only
7069 bank r14 and r15, so we must check up to r12.
7070 r13 is the stack pointer which is always preserved,
7071 so we do not need to consider it here. */
7072 max_reg = 7;
7073 else
7074 max_reg = 12;
7076 for (reg = 0; reg <= max_reg; reg++)
7077 if (regs_ever_live[reg]
7078 || (! current_function_is_leaf && call_used_regs [reg]))
7079 save_reg_mask |= (1 << reg);
7081 else
7083 /* In the normal case we only need to save those registers
7084 which are call saved and which are used by this function. */
7085 for (reg = 0; reg <= 10; reg++)
7086 if (regs_ever_live[reg] && ! call_used_regs [reg])
7087 save_reg_mask |= (1 << reg);
7089 /* Handle the frame pointer as a special case. */
7090 if (! TARGET_APCS_FRAME
7091 && ! frame_pointer_needed
7092 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7093 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7094 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7096 /* If we aren't loading the PIC register,
7097 don't stack it even though it may be live. */
7098 if (flag_pic
7099 && ! TARGET_SINGLE_PIC_BASE
7100 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7101 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7104 return save_reg_mask;
7107 /* Compute a bit mask of which registers need to be
7108 saved on the stack for the current function. */
7110 static unsigned long
7111 arm_compute_save_reg_mask ()
7113 unsigned int save_reg_mask = 0;
7114 unsigned long func_type = arm_current_func_type ();
7116 if (IS_NAKED (func_type))
7117 /* This should never really happen. */
7118 return 0;
7120 /* If we are creating a stack frame, then we must save the frame pointer,
7121 IP (which will hold the old stack pointer), LR and the PC. */
7122 if (frame_pointer_needed)
7123 save_reg_mask |=
7124 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7125 | (1 << IP_REGNUM)
7126 | (1 << LR_REGNUM)
7127 | (1 << PC_REGNUM);
7129 /* Volatile functions do not return, so there
7130 is no need to save any other registers. */
7131 if (IS_VOLATILE (func_type))
7132 return save_reg_mask;
7134 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7136 /* Decide if we need to save the link register.
7137 Interrupt routines have their own banked link register,
7138 so they never need to save it.
7139 Otheriwse if we do not use the link register we do not need to save
7140 it. If we are pushing other registers onto the stack however, we
7141 can save an instruction in the epilogue by pushing the link register
7142 now and then popping it back into the PC. This incurs extra memory
7143 accesses though, so we only do it when optimising for size, and only
7144 if we know that we will not need a fancy return sequence. */
7145 if (! IS_INTERRUPT (func_type)
7146 && (regs_ever_live [LR_REGNUM]
7147 || (save_reg_mask
7148 && optimize_size
7149 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)))
7150 save_reg_mask |= 1 << LR_REGNUM;
7152 if (cfun->machine->lr_save_eliminated)
7153 save_reg_mask &= ~ (1 << LR_REGNUM);
7155 return save_reg_mask;
7158 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7159 everything bar the final return instruction. */
7161 const char *
7162 output_return_instruction (operand, really_return, reverse)
7163 rtx operand;
7164 int really_return;
7165 int reverse;
7167 char conditional[10];
7168 char instr[100];
7169 int reg;
7170 unsigned long live_regs_mask;
7171 unsigned long func_type;
7173 func_type = arm_current_func_type ();
7175 if (IS_NAKED (func_type))
7176 return "";
7178 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7180 /* If this function was declared non-returning, and we have found a tail
7181 call, then we have to trust that the called function won't return. */
7182 if (really_return)
7184 rtx ops[2];
7186 /* Otherwise, trap an attempted return by aborting. */
7187 ops[0] = operand;
7188 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7189 : "abort");
7190 assemble_external_libcall (ops[1]);
7191 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7194 return "";
7197 if (current_function_calls_alloca && !really_return)
7198 abort ();
7200 /* Construct the conditional part of the instruction(s) to be emitted. */
7201 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7203 return_used_this_function = 1;
7205 live_regs_mask = arm_compute_save_reg_mask ();
7207 /* On some ARM architectures it is faster to use LDR rather than LDM to
7208 load a single register. On other architectures, the cost is the same.
7209 In 26 bit mode we have to use LDM in order to be able to restore the CPSR. */
7210 if ((live_regs_mask == (1 << LR_REGNUM))
7211 && ! TARGET_INTERWORK
7212 && ! IS_INTERRUPT (func_type)
7213 && (! really_return || TARGET_APCS_32))
7215 if (! really_return)
7216 sprintf (instr, "ldr%s\t%%|lr, [%%|sp], #4", conditional);
7217 else
7218 sprintf (instr, "ldr%s\t%%|pc, [%%|sp], #4", conditional);
7220 else if (live_regs_mask)
7222 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7223 /* There are two possible reasons for the IP register being saved.
7224 Either a stack frame was created, in which case IP contains the
7225 old stack pointer, or an ISR routine corrupted it. If this in an
7226 ISR routine then just restore IP, otherwise restore IP into SP. */
7227 if (! IS_INTERRUPT (func_type))
7229 live_regs_mask &= ~ (1 << IP_REGNUM);
7230 live_regs_mask |= (1 << SP_REGNUM);
7233 /* Generate the load multiple instruction to restore the registers. */
7234 if (frame_pointer_needed)
7235 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7236 else
7237 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7239 for (reg = 0; reg <= SP_REGNUM; reg++)
7240 if (live_regs_mask & (1 << reg))
7242 strcat (instr, "%|");
7243 strcat (instr, reg_names[reg]);
7244 strcat (instr, ", ");
7247 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7249 /* If we are not restoring the LR register then we will
7250 have added one too many commas to the list above.
7251 Replace it with a closing brace. */
7252 instr [strlen (instr) - 2] = '}';
7254 else
7256 strcat (instr, "%|");
7258 /* At this point there should only be one or two registers left in
7259 live_regs_mask: always LR, and possibly PC if we created a stack
7260 frame. LR contains the return address. If we do not have any
7261 special requirements for function exit (eg interworking, or ISR)
7262 then we can load this value directly into the PC and save an
7263 instruction. */
7264 if (! TARGET_INTERWORK
7265 && ! IS_INTERRUPT (func_type)
7266 && really_return)
7267 strcat (instr, reg_names [PC_REGNUM]);
7268 else
7269 strcat (instr, reg_names [LR_REGNUM]);
7271 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
7274 if (really_return)
7276 /* See if we need to generate an extra instruction to
7277 perform the actual function return. */
7278 switch ((int) ARM_FUNC_TYPE (func_type))
7280 case ARM_FT_ISR:
7281 case ARM_FT_FIQ:
7282 output_asm_insn (instr, & operand);
7284 strcpy (instr, "sub");
7285 strcat (instr, conditional);
7286 strcat (instr, "s\t%|pc, %|lr, #4");
7287 break;
7289 case ARM_FT_EXCEPTION:
7290 output_asm_insn (instr, & operand);
7292 strcpy (instr, "mov");
7293 strcat (instr, conditional);
7294 strcat (instr, "s\t%|pc, %|lr");
7295 break;
7297 case ARM_FT_INTERWORKED:
7298 output_asm_insn (instr, & operand);
7300 strcpy (instr, "bx");
7301 strcat (instr, conditional);
7302 strcat (instr, "\t%|lr");
7303 break;
7305 default:
7306 /* The return has already been handled
7307 by loading the LR into the PC. */
7308 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7310 output_asm_insn (instr, & operand);
7312 strcpy (instr, "mov");
7313 strcat (instr, conditional);
7314 if (! TARGET_APCS_32)
7315 strcat (instr, "s");
7316 strcat (instr, "\t%|pc, %|lr");
7318 break;
7322 else if (really_return)
7324 switch ((int) ARM_FUNC_TYPE (func_type))
7326 case ARM_FT_ISR:
7327 case ARM_FT_FIQ:
7328 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7329 break;
7331 case ARM_FT_INTERWORKED:
7332 sprintf (instr, "bx%s\t%%|lr", conditional);
7333 break;
7335 case ARM_FT_EXCEPTION:
7336 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7337 break;
7339 default:
7340 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7341 conditional, TARGET_APCS_32 ? "" : "s");
7342 break;
7345 else
7346 /* Nothing to load off the stack, and
7347 no return instruction to generate. */
7348 return "";
7350 output_asm_insn (instr, & operand);
7352 return "";
7355 /* Write the function name into the code section, directly preceding
7356 the function prologue.
7358 Code will be output similar to this:
7360 .ascii "arm_poke_function_name", 0
7361 .align
7363 .word 0xff000000 + (t1 - t0)
7364 arm_poke_function_name
7365 mov ip, sp
7366 stmfd sp!, {fp, ip, lr, pc}
7367 sub fp, ip, #4
7369 When performing a stack backtrace, code can inspect the value
7370 of 'pc' stored at 'fp' + 0. If the trace function then looks
7371 at location pc - 12 and the top 8 bits are set, then we know
7372 that there is a function name embedded immediately preceding this
7373 location and has length ((pc[-3]) & 0xff000000).
7375 We assume that pc is declared as a pointer to an unsigned long.
7377 It is of no benefit to output the function name if we are assembling
7378 a leaf function. These function types will not contain a stack
7379 backtrace structure, therefore it is not possible to determine the
7380 function name. */
7382 void
7383 arm_poke_function_name (stream, name)
7384 FILE * stream;
7385 const char * name;
7387 unsigned long alignlength;
7388 unsigned long length;
7389 rtx x;
7391 length = strlen (name) + 1;
7392 alignlength = ROUND_UP (length);
7394 ASM_OUTPUT_ASCII (stream, name, length);
7395 ASM_OUTPUT_ALIGN (stream, 2);
7396 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7397 assemble_aligned_integer (UNITS_PER_WORD, x);
7400 /* Place some comments into the assembler stream
7401 describing the current function. */
7403 static void
7404 arm_output_function_prologue (f, frame_size)
7405 FILE * f;
7406 HOST_WIDE_INT frame_size;
7408 unsigned long func_type;
7410 if (!TARGET_ARM)
7412 thumb_output_function_prologue (f, frame_size);
7413 return;
7416 /* Sanity check. */
7417 if (arm_ccfsm_state || arm_target_insn)
7418 abort ();
7420 func_type = arm_current_func_type ();
7422 switch ((int) ARM_FUNC_TYPE (func_type))
7424 default:
7425 case ARM_FT_NORMAL:
7426 break;
7427 case ARM_FT_INTERWORKED:
7428 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7429 break;
7430 case ARM_FT_EXCEPTION_HANDLER:
7431 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7432 break;
7433 case ARM_FT_ISR:
7434 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7435 break;
7436 case ARM_FT_FIQ:
7437 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7438 break;
7439 case ARM_FT_EXCEPTION:
7440 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7441 break;
7444 if (IS_NAKED (func_type))
7445 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7447 if (IS_VOLATILE (func_type))
7448 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7450 if (IS_NESTED (func_type))
7451 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7453 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7454 current_function_args_size,
7455 current_function_pretend_args_size, frame_size);
7457 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7458 frame_pointer_needed,
7459 cfun->machine->uses_anonymous_args);
7461 if (cfun->machine->lr_save_eliminated)
7462 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7464 #ifdef AOF_ASSEMBLER
7465 if (flag_pic)
7466 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7467 #endif
7469 return_used_this_function = 0;
7472 const char *
7473 arm_output_epilogue (really_return)
7474 int really_return;
7476 int reg;
7477 unsigned long saved_regs_mask;
7478 unsigned long func_type;
7479 /* If we need this, then it will always be at least this much. */
7480 int floats_offset = 12;
7481 rtx operands[3];
7482 int frame_size = get_frame_size ();
7483 FILE * f = asm_out_file;
7484 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7486 /* If we have already generated the return instruction
7487 then it is futile to generate anything else. */
7488 if (use_return_insn (FALSE) && return_used_this_function)
7489 return "";
7491 func_type = arm_current_func_type ();
7493 if (IS_NAKED (func_type))
7494 /* Naked functions don't have epilogues. */
7495 return "";
7497 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7499 rtx op;
7501 /* A volatile function should never return. Call abort. */
7502 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7503 assemble_external_libcall (op);
7504 output_asm_insn ("bl\t%a0", &op);
7506 return "";
7509 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7510 && ! really_return)
7511 /* If we are throwing an exception, then we really must
7512 be doing a return, so we can't tail-call. */
7513 abort ();
7515 saved_regs_mask = arm_compute_save_reg_mask ();
7517 /* Compute how far away the floats will be. */
7518 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7519 if (saved_regs_mask & (1 << reg))
7520 floats_offset += 4;
7522 if (frame_pointer_needed)
7524 if (arm_fpu_arch == FP_SOFT2)
7526 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7527 if (regs_ever_live[reg] && !call_used_regs[reg])
7529 floats_offset += 12;
7530 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7531 reg, FP_REGNUM, floats_offset);
7534 else
7536 int start_reg = LAST_ARM_FP_REGNUM;
7538 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7540 if (regs_ever_live[reg] && !call_used_regs[reg])
7542 floats_offset += 12;
7544 /* We can't unstack more than four registers at once. */
7545 if (start_reg - reg == 3)
7547 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7548 reg, FP_REGNUM, floats_offset);
7549 start_reg = reg - 1;
7552 else
7554 if (reg != start_reg)
7555 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7556 reg + 1, start_reg - reg,
7557 FP_REGNUM, floats_offset);
7558 start_reg = reg - 1;
7562 /* Just in case the last register checked also needs unstacking. */
7563 if (reg != start_reg)
7564 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7565 reg + 1, start_reg - reg,
7566 FP_REGNUM, floats_offset);
7569 /* saved_regs_mask should contain the IP, which at the time of stack
7570 frame generation actually contains the old stack pointer. So a
7571 quick way to unwind the stack is just pop the IP register directly
7572 into the stack pointer. */
7573 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7574 abort ();
7575 saved_regs_mask &= ~ (1 << IP_REGNUM);
7576 saved_regs_mask |= (1 << SP_REGNUM);
7578 /* There are two registers left in saved_regs_mask - LR and PC. We
7579 only need to restore the LR register (the return address), but to
7580 save time we can load it directly into the PC, unless we need a
7581 special function exit sequence, or we are not really returning. */
7582 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7583 /* Delete the LR from the register mask, so that the LR on
7584 the stack is loaded into the PC in the register mask. */
7585 saved_regs_mask &= ~ (1 << LR_REGNUM);
7586 else
7587 saved_regs_mask &= ~ (1 << PC_REGNUM);
7589 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7591 if (IS_INTERRUPT (func_type))
7592 /* Interrupt handlers will have pushed the
7593 IP onto the stack, so restore it now. */
7594 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7596 else
7598 /* Restore stack pointer if necessary. */
7599 if (frame_size + current_function_outgoing_args_size != 0)
7601 operands[0] = operands[1] = stack_pointer_rtx;
7602 operands[2] = GEN_INT (frame_size
7603 + current_function_outgoing_args_size);
7604 output_add_immediate (operands);
7607 if (arm_fpu_arch == FP_SOFT2)
7609 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7610 if (regs_ever_live[reg] && !call_used_regs[reg])
7611 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7612 reg, SP_REGNUM);
7614 else
7616 int start_reg = FIRST_ARM_FP_REGNUM;
7618 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7620 if (regs_ever_live[reg] && !call_used_regs[reg])
7622 if (reg - start_reg == 3)
7624 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7625 start_reg, SP_REGNUM);
7626 start_reg = reg + 1;
7629 else
7631 if (reg != start_reg)
7632 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7633 start_reg, reg - start_reg,
7634 SP_REGNUM);
7636 start_reg = reg + 1;
7640 /* Just in case the last register checked also needs unstacking. */
7641 if (reg != start_reg)
7642 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7643 start_reg, reg - start_reg, SP_REGNUM);
7646 /* If we can, restore the LR into the PC. */
7647 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7648 && really_return
7649 && current_function_pretend_args_size == 0
7650 && saved_regs_mask & (1 << LR_REGNUM))
7652 saved_regs_mask &= ~ (1 << LR_REGNUM);
7653 saved_regs_mask |= (1 << PC_REGNUM);
7656 /* Load the registers off the stack. If we only have one register
7657 to load use the LDR instruction - it is faster. */
7658 if (saved_regs_mask == (1 << LR_REGNUM))
7660 /* The excpetion handler ignores the LR, so we do
7661 not really need to load it off the stack. */
7662 if (eh_ofs)
7663 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7664 else
7665 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7667 else if (saved_regs_mask)
7668 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7670 if (current_function_pretend_args_size)
7672 /* Unwind the pre-pushed regs. */
7673 operands[0] = operands[1] = stack_pointer_rtx;
7674 operands[2] = GEN_INT (current_function_pretend_args_size);
7675 output_add_immediate (operands);
7679 #if 0
7680 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7681 /* Adjust the stack to remove the exception handler stuff. */
7682 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7683 REGNO (eh_ofs));
7684 #endif
7686 if (! really_return)
7687 return "";
7689 /* Generate the return instruction. */
7690 switch ((int) ARM_FUNC_TYPE (func_type))
7692 case ARM_FT_EXCEPTION_HANDLER:
7693 /* Even in 26-bit mode we do a mov (rather than a movs)
7694 because we don't have the PSR bits set in the address. */
7695 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7696 break;
7698 case ARM_FT_ISR:
7699 case ARM_FT_FIQ:
7700 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7701 break;
7703 case ARM_FT_EXCEPTION:
7704 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7705 break;
7707 case ARM_FT_INTERWORKED:
7708 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7709 break;
7711 default:
7712 if (frame_pointer_needed)
7713 /* If we used the frame pointer then the return adddress
7714 will have been loaded off the stack directly into the
7715 PC, so there is no need to issue a MOV instruction
7716 here. */
7718 else if (current_function_pretend_args_size == 0
7719 && (saved_regs_mask & (1 << LR_REGNUM)))
7720 /* Similarly we may have been able to load LR into the PC
7721 even if we did not create a stack frame. */
7723 else if (TARGET_APCS_32)
7724 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7725 else
7726 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7727 break;
7730 return "";
7733 static void
7734 arm_output_function_epilogue (file, frame_size)
7735 FILE *file ATTRIBUTE_UNUSED;
7736 HOST_WIDE_INT frame_size;
7738 if (TARGET_THUMB)
7740 /* ??? Probably not safe to set this here, since it assumes that a
7741 function will be emitted as assembly immediately after we generate
7742 RTL for it. This does not happen for inline functions. */
7743 return_used_this_function = 0;
7745 else
7747 if (use_return_insn (FALSE)
7748 && return_used_this_function
7749 && (frame_size + current_function_outgoing_args_size) != 0
7750 && !frame_pointer_needed)
7751 abort ();
7753 /* Reset the ARM-specific per-function variables. */
7754 after_arm_reorg = 0;
7758 /* Generate and emit an insn that we will recognize as a push_multi.
7759 Unfortunately, since this insn does not reflect very well the actual
7760 semantics of the operation, we need to annotate the insn for the benefit
7761 of DWARF2 frame unwind information. */
7763 static rtx
7764 emit_multi_reg_push (mask)
7765 int mask;
7767 int num_regs = 0;
7768 int num_dwarf_regs;
7769 int i, j;
7770 rtx par;
7771 rtx dwarf;
7772 int dwarf_par_index;
7773 rtx tmp, reg;
7775 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7776 if (mask & (1 << i))
7777 num_regs++;
7779 if (num_regs == 0 || num_regs > 16)
7780 abort ();
7782 /* We don't record the PC in the dwarf frame information. */
7783 num_dwarf_regs = num_regs;
7784 if (mask & (1 << PC_REGNUM))
7785 num_dwarf_regs--;
7787 /* For the body of the insn we are going to generate an UNSPEC in
7788 parallel with several USEs. This allows the insn to be recognised
7789 by the push_multi pattern in the arm.md file. The insn looks
7790 something like this:
7792 (parallel [
7793 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7794 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7795 (use (reg:SI 11 fp))
7796 (use (reg:SI 12 ip))
7797 (use (reg:SI 14 lr))
7798 (use (reg:SI 15 pc))
7801 For the frame note however, we try to be more explicit and actually
7802 show each register being stored into the stack frame, plus a (single)
7803 decrement of the stack pointer. We do it this way in order to be
7804 friendly to the stack unwinding code, which only wants to see a single
7805 stack decrement per instruction. The RTL we generate for the note looks
7806 something like this:
7808 (sequence [
7809 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7810 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7811 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7812 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7813 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7816 This sequence is used both by the code to support stack unwinding for
7817 exceptions handlers and the code to generate dwarf2 frame debugging. */
7819 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7820 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
7821 RTX_FRAME_RELATED_P (dwarf) = 1;
7822 dwarf_par_index = 1;
7824 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7826 if (mask & (1 << i))
7828 reg = gen_rtx_REG (SImode, i);
7830 XVECEXP (par, 0, 0)
7831 = gen_rtx_SET (VOIDmode,
7832 gen_rtx_MEM (BLKmode,
7833 gen_rtx_PRE_DEC (BLKmode,
7834 stack_pointer_rtx)),
7835 gen_rtx_UNSPEC (BLKmode,
7836 gen_rtvec (1, reg),
7837 UNSPEC_PUSH_MULT));
7839 if (i != PC_REGNUM)
7841 tmp = gen_rtx_SET (VOIDmode,
7842 gen_rtx_MEM (SImode, stack_pointer_rtx),
7843 reg);
7844 RTX_FRAME_RELATED_P (tmp) = 1;
7845 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7846 dwarf_par_index++;
7849 break;
7853 for (j = 1, i++; j < num_regs; i++)
7855 if (mask & (1 << i))
7857 reg = gen_rtx_REG (SImode, i);
7859 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7861 if (i != PC_REGNUM)
7863 tmp = gen_rtx_SET (VOIDmode,
7864 gen_rtx_MEM (SImode,
7865 plus_constant (stack_pointer_rtx,
7866 4 * j)),
7867 reg);
7868 RTX_FRAME_RELATED_P (tmp) = 1;
7869 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7872 j++;
7876 par = emit_insn (par);
7878 tmp = gen_rtx_SET (SImode,
7879 stack_pointer_rtx,
7880 gen_rtx_PLUS (SImode,
7881 stack_pointer_rtx,
7882 GEN_INT (-4 * num_regs)));
7883 RTX_FRAME_RELATED_P (tmp) = 1;
7884 XVECEXP (dwarf, 0, 0) = tmp;
7886 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7887 REG_NOTES (par));
7888 return par;
7891 static rtx
7892 emit_sfm (base_reg, count)
7893 int base_reg;
7894 int count;
7896 rtx par;
7897 rtx dwarf;
7898 rtx tmp, reg;
7899 int i;
7901 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7902 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7903 RTX_FRAME_RELATED_P (dwarf) = 1;
7905 reg = gen_rtx_REG (XFmode, base_reg++);
7907 XVECEXP (par, 0, 0)
7908 = gen_rtx_SET (VOIDmode,
7909 gen_rtx_MEM (BLKmode,
7910 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7911 gen_rtx_UNSPEC (BLKmode,
7912 gen_rtvec (1, reg),
7913 UNSPEC_PUSH_MULT));
7915 = gen_rtx_SET (VOIDmode,
7916 gen_rtx_MEM (XFmode,
7917 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7918 reg);
7919 RTX_FRAME_RELATED_P (tmp) = 1;
7920 XVECEXP (dwarf, 0, count - 1) = tmp;
7922 for (i = 1; i < count; i++)
7924 reg = gen_rtx_REG (XFmode, base_reg++);
7925 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7927 tmp = gen_rtx_SET (VOIDmode,
7928 gen_rtx_MEM (XFmode,
7929 gen_rtx_PRE_DEC (BLKmode,
7930 stack_pointer_rtx)),
7931 reg);
7932 RTX_FRAME_RELATED_P (tmp) = 1;
7933 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7936 par = emit_insn (par);
7937 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7938 REG_NOTES (par));
7939 return par;
7942 /* Compute the distance from register FROM to register TO.
7943 These can be the arg pointer (26), the soft frame pointer (25),
7944 the stack pointer (13) or the hard frame pointer (11).
7945 Typical stack layout looks like this:
7947 old stack pointer -> | |
7948 ----
7949 | | \
7950 | | saved arguments for
7951 | | vararg functions
7952 | | /
7954 hard FP & arg pointer -> | | \
7955 | | stack
7956 | | frame
7957 | | /
7959 | | \
7960 | | call saved
7961 | | registers
7962 soft frame pointer -> | | /
7964 | | \
7965 | | local
7966 | | variables
7967 | | /
7969 | | \
7970 | | outgoing
7971 | | arguments
7972 current stack pointer -> | | /
7975 For a given funciton some or all of these stack compomnents
7976 may not be needed, giving rise to the possibility of
7977 eliminating some of the registers.
7979 The values returned by this function must reflect the behaviour
7980 of arm_expand_prologue() and arm_compute_save_reg_mask().
7982 The sign of the number returned reflects the direction of stack
7983 growth, so the values are positive for all eliminations except
7984 from the soft frame pointer to the hard frame pointer. */
7986 unsigned int
7987 arm_compute_initial_elimination_offset (from, to)
7988 unsigned int from;
7989 unsigned int to;
7991 unsigned int local_vars = (get_frame_size () + 3) & ~3;
7992 unsigned int outgoing_args = current_function_outgoing_args_size;
7993 unsigned int stack_frame;
7994 unsigned int call_saved_registers;
7995 unsigned long func_type;
7997 func_type = arm_current_func_type ();
7999 /* Volatile functions never return, so there is
8000 no need to save call saved registers. */
8001 call_saved_registers = 0;
8002 if (! IS_VOLATILE (func_type))
8004 unsigned int reg_mask;
8005 unsigned int reg;
8007 /* Make sure that we compute which registers will be saved
8008 on the stack using the same algorithm that is used by
8009 arm_compute_save_reg_mask(). */
8010 reg_mask = arm_compute_save_reg0_reg12_mask ();
8012 /* Now count the number of bits set in save_reg_mask.
8013 For each set bit we need 4 bytes of stack space. */
8014 while (reg_mask)
8016 call_saved_registers += 4;
8017 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8020 if (regs_ever_live[LR_REGNUM]
8021 /* If a stack frame is going to be created, the LR will
8022 be saved as part of that, so we do not need to allow
8023 for it here. */
8024 && ! frame_pointer_needed)
8025 call_saved_registers += 4;
8027 /* If the hard floating point registers are going to be
8028 used then they must be saved on the stack as well.
8029 Each register occupies 12 bytes of stack space. */
8030 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8031 if (regs_ever_live[reg] && ! call_used_regs[reg])
8032 call_saved_registers += 12;
8035 /* The stack frame contains 4 registers - the old frame pointer,
8036 the old stack pointer, the return address and PC of the start
8037 of the function. */
8038 stack_frame = frame_pointer_needed ? 16 : 0;
8040 /* OK, now we have enough information to compute the distances.
8041 There must be an entry in these switch tables for each pair
8042 of registers in ELIMINABLE_REGS, even if some of the entries
8043 seem to be redundant or useless. */
8044 switch (from)
8046 case ARG_POINTER_REGNUM:
8047 switch (to)
8049 case THUMB_HARD_FRAME_POINTER_REGNUM:
8050 return 0;
8052 case FRAME_POINTER_REGNUM:
8053 /* This is the reverse of the soft frame pointer
8054 to hard frame pointer elimination below. */
8055 if (call_saved_registers == 0 && stack_frame == 0)
8056 return 0;
8057 return (call_saved_registers + stack_frame - 4);
8059 case ARM_HARD_FRAME_POINTER_REGNUM:
8060 /* If there is no stack frame then the hard
8061 frame pointer and the arg pointer coincide. */
8062 if (stack_frame == 0 && call_saved_registers != 0)
8063 return 0;
8064 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8065 return (frame_pointer_needed
8066 && current_function_needs_context
8067 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8069 case STACK_POINTER_REGNUM:
8070 /* If nothing has been pushed on the stack at all
8071 then this will return -4. This *is* correct! */
8072 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8074 default:
8075 abort ();
8077 break;
8079 case FRAME_POINTER_REGNUM:
8080 switch (to)
8082 case THUMB_HARD_FRAME_POINTER_REGNUM:
8083 return 0;
8085 case ARM_HARD_FRAME_POINTER_REGNUM:
8086 /* The hard frame pointer points to the top entry in the
8087 stack frame. The soft frame pointer to the bottom entry
8088 in the stack frame. If there is no stack frame at all,
8089 then they are identical. */
8090 if (call_saved_registers == 0 && stack_frame == 0)
8091 return 0;
8092 return - (call_saved_registers + stack_frame - 4);
8094 case STACK_POINTER_REGNUM:
8095 return local_vars + outgoing_args;
8097 default:
8098 abort ();
8100 break;
8102 default:
8103 /* You cannot eliminate from the stack pointer.
8104 In theory you could eliminate from the hard frame
8105 pointer to the stack pointer, but this will never
8106 happen, since if a stack frame is not needed the
8107 hard frame pointer will never be used. */
8108 abort ();
8112 /* Generate the prologue instructions for entry into an ARM function. */
8114 void
8115 arm_expand_prologue ()
8117 int reg;
8118 rtx amount;
8119 rtx insn;
8120 rtx ip_rtx;
8121 unsigned long live_regs_mask;
8122 unsigned long func_type;
8123 int fp_offset = 0;
8124 int saved_pretend_args = 0;
8125 unsigned int args_to_push;
8127 func_type = arm_current_func_type ();
8129 /* Naked functions don't have prologues. */
8130 if (IS_NAKED (func_type))
8131 return;
8133 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8134 args_to_push = current_function_pretend_args_size;
8136 /* Compute which register we will have to save onto the stack. */
8137 live_regs_mask = arm_compute_save_reg_mask ();
8139 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8141 if (frame_pointer_needed)
8143 if (IS_INTERRUPT (func_type))
8145 /* Interrupt functions must not corrupt any registers.
8146 Creating a frame pointer however, corrupts the IP
8147 register, so we must push it first. */
8148 insn = emit_multi_reg_push (1 << IP_REGNUM);
8150 /* Do not set RTX_FRAME_RELATED_P on this insn.
8151 The dwarf stack unwinding code only wants to see one
8152 stack decrement per function, and this is not it. If
8153 this instruction is labeled as being part of the frame
8154 creation sequence then dwarf2out_frame_debug_expr will
8155 abort when it encounters the assignment of IP to FP
8156 later on, since the use of SP here establishes SP as
8157 the CFA register and not IP.
8159 Anyway this instruction is not really part of the stack
8160 frame creation although it is part of the prologue. */
8162 else if (IS_NESTED (func_type))
8164 /* The Static chain register is the same as the IP register
8165 used as a scratch register during stack frame creation.
8166 To get around this need to find somewhere to store IP
8167 whilst the frame is being created. We try the following
8168 places in order:
8170 1. The last argument register.
8171 2. A slot on the stack above the frame. (This only
8172 works if the function is not a varargs function).
8173 3. Register r3, after pushing the argument registers
8174 onto the stack.
8176 Note - we only need to tell the dwarf2 backend about the SP
8177 adjustment in the second variant; the static chain register
8178 doesn't need to be unwound, as it doesn't contain a value
8179 inherited from the caller. */
8181 if (regs_ever_live[3] == 0)
8183 insn = gen_rtx_REG (SImode, 3);
8184 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8185 insn = emit_insn (insn);
8187 else if (args_to_push == 0)
8189 rtx dwarf;
8190 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8191 insn = gen_rtx_MEM (SImode, insn);
8192 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8193 insn = emit_insn (insn);
8195 fp_offset = 4;
8197 /* Just tell the dwarf backend that we adjusted SP. */
8198 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8199 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8200 GEN_INT (-fp_offset)));
8201 RTX_FRAME_RELATED_P (insn) = 1;
8202 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8203 dwarf, REG_NOTES (insn));
8205 else
8207 /* Store the args on the stack. */
8208 if (cfun->machine->uses_anonymous_args)
8209 insn = emit_multi_reg_push
8210 ((0xf0 >> (args_to_push / 4)) & 0xf);
8211 else
8212 insn = emit_insn
8213 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8214 GEN_INT (- args_to_push)));
8216 RTX_FRAME_RELATED_P (insn) = 1;
8218 saved_pretend_args = 1;
8219 fp_offset = args_to_push;
8220 args_to_push = 0;
8222 /* Now reuse r3 to preserve IP. */
8223 insn = gen_rtx_REG (SImode, 3);
8224 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8225 (void) emit_insn (insn);
8229 if (fp_offset)
8231 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8232 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8234 else
8235 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8237 insn = emit_insn (insn);
8238 RTX_FRAME_RELATED_P (insn) = 1;
8241 if (args_to_push)
8243 /* Push the argument registers, or reserve space for them. */
8244 if (cfun->machine->uses_anonymous_args)
8245 insn = emit_multi_reg_push
8246 ((0xf0 >> (args_to_push / 4)) & 0xf);
8247 else
8248 insn = emit_insn
8249 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8250 GEN_INT (- args_to_push)));
8251 RTX_FRAME_RELATED_P (insn) = 1;
8254 if (live_regs_mask)
8256 insn = emit_multi_reg_push (live_regs_mask);
8257 RTX_FRAME_RELATED_P (insn) = 1;
8260 if (! IS_VOLATILE (func_type))
8262 /* Save any floating point call-saved registers used by this function. */
8263 if (arm_fpu_arch == FP_SOFT2)
8265 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8266 if (regs_ever_live[reg] && !call_used_regs[reg])
8268 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8269 insn = gen_rtx_MEM (XFmode, insn);
8270 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8271 gen_rtx_REG (XFmode, reg)));
8272 RTX_FRAME_RELATED_P (insn) = 1;
8275 else
8277 int start_reg = LAST_ARM_FP_REGNUM;
8279 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8281 if (regs_ever_live[reg] && !call_used_regs[reg])
8283 if (start_reg - reg == 3)
8285 insn = emit_sfm (reg, 4);
8286 RTX_FRAME_RELATED_P (insn) = 1;
8287 start_reg = reg - 1;
8290 else
8292 if (start_reg != reg)
8294 insn = emit_sfm (reg + 1, start_reg - reg);
8295 RTX_FRAME_RELATED_P (insn) = 1;
8297 start_reg = reg - 1;
8301 if (start_reg != reg)
8303 insn = emit_sfm (reg + 1, start_reg - reg);
8304 RTX_FRAME_RELATED_P (insn) = 1;
8309 if (frame_pointer_needed)
8311 /* Create the new frame pointer. */
8312 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8313 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8314 RTX_FRAME_RELATED_P (insn) = 1;
8316 if (IS_NESTED (func_type))
8318 /* Recover the static chain register. */
8319 if (regs_ever_live [3] == 0
8320 || saved_pretend_args)
8321 insn = gen_rtx_REG (SImode, 3);
8322 else /* if (current_function_pretend_args_size == 0) */
8324 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8325 insn = gen_rtx_MEM (SImode, insn);
8328 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8329 /* Add a USE to stop propagate_one_insn() from barfing. */
8330 emit_insn (gen_prologue_use (ip_rtx));
8334 amount = GEN_INT (-(get_frame_size ()
8335 + current_function_outgoing_args_size));
8337 if (amount != const0_rtx)
8339 /* This add can produce multiple insns for a large constant, so we
8340 need to get tricky. */
8341 rtx last = get_last_insn ();
8342 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8343 amount));
8346 last = last ? NEXT_INSN (last) : get_insns ();
8347 RTX_FRAME_RELATED_P (last) = 1;
8349 while (last != insn);
8351 /* If the frame pointer is needed, emit a special barrier that
8352 will prevent the scheduler from moving stores to the frame
8353 before the stack adjustment. */
8354 if (frame_pointer_needed)
8356 rtx unspec = gen_rtx_UNSPEC (SImode,
8357 gen_rtvec (2, stack_pointer_rtx,
8358 hard_frame_pointer_rtx),
8359 UNSPEC_PRLG_STK);
8361 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
8362 gen_rtx_MEM (BLKmode, unspec)));
8366 /* If we are profiling, make sure no instructions are scheduled before
8367 the call to mcount. Similarly if the user has requested no
8368 scheduling in the prolog. */
8369 if (current_function_profile || TARGET_NO_SCHED_PRO)
8370 emit_insn (gen_blockage ());
8372 /* If the link register is being kept alive, with the return address in it,
8373 then make sure that it does not get reused by the ce2 pass. */
8374 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8376 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8377 cfun->machine->lr_save_eliminated = 1;
8381 /* If CODE is 'd', then the X is a condition operand and the instruction
8382 should only be executed if the condition is true.
8383 if CODE is 'D', then the X is a condition operand and the instruction
8384 should only be executed if the condition is false: however, if the mode
8385 of the comparison is CCFPEmode, then always execute the instruction -- we
8386 do this because in these circumstances !GE does not necessarily imply LT;
8387 in these cases the instruction pattern will take care to make sure that
8388 an instruction containing %d will follow, thereby undoing the effects of
8389 doing this instruction unconditionally.
8390 If CODE is 'N' then X is a floating point operand that must be negated
8391 before output.
8392 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8393 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8395 void
8396 arm_print_operand (stream, x, code)
8397 FILE * stream;
8398 rtx x;
8399 int code;
8401 switch (code)
8403 case '@':
8404 fputs (ASM_COMMENT_START, stream);
8405 return;
8407 case '_':
8408 fputs (user_label_prefix, stream);
8409 return;
8411 case '|':
8412 fputs (REGISTER_PREFIX, stream);
8413 return;
8415 case '?':
8416 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8418 if (TARGET_THUMB || current_insn_predicate != NULL)
8419 abort ();
8421 fputs (arm_condition_codes[arm_current_cc], stream);
8423 else if (current_insn_predicate)
8425 enum arm_cond_code code;
8427 if (TARGET_THUMB)
8428 abort ();
8430 code = get_arm_condition_code (current_insn_predicate);
8431 fputs (arm_condition_codes[code], stream);
8433 return;
8435 case 'N':
8437 REAL_VALUE_TYPE r;
8438 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8439 r = REAL_VALUE_NEGATE (r);
8440 fprintf (stream, "%s", fp_const_from_val (&r));
8442 return;
8444 case 'B':
8445 if (GET_CODE (x) == CONST_INT)
8447 HOST_WIDE_INT val;
8448 val = ARM_SIGN_EXTEND (~INTVAL (x));
8449 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8451 else
8453 putc ('~', stream);
8454 output_addr_const (stream, x);
8456 return;
8458 case 'i':
8459 fprintf (stream, "%s", arithmetic_instr (x, 1));
8460 return;
8462 case 'I':
8463 fprintf (stream, "%s", arithmetic_instr (x, 0));
8464 return;
8466 case 'S':
8468 HOST_WIDE_INT val;
8469 const char * shift = shift_op (x, &val);
8471 if (shift)
8473 fprintf (stream, ", %s ", shift_op (x, &val));
8474 if (val == -1)
8475 arm_print_operand (stream, XEXP (x, 1), 0);
8476 else
8478 fputc ('#', stream);
8479 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8483 return;
8485 /* An explanation of the 'Q', 'R' and 'H' register operands:
8487 In a pair of registers containing a DI or DF value the 'Q'
8488 operand returns the register number of the register containing
8489 the least signficant part of the value. The 'R' operand returns
8490 the register number of the register containing the most
8491 significant part of the value.
8493 The 'H' operand returns the higher of the two register numbers.
8494 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8495 same as the 'Q' operand, since the most signficant part of the
8496 value is held in the lower number register. The reverse is true
8497 on systems where WORDS_BIG_ENDIAN is false.
8499 The purpose of these operands is to distinguish between cases
8500 where the endian-ness of the values is important (for example
8501 when they are added together), and cases where the endian-ness
8502 is irrelevant, but the order of register operations is important.
8503 For example when loading a value from memory into a register
8504 pair, the endian-ness does not matter. Provided that the value
8505 from the lower memory address is put into the lower numbered
8506 register, and the value from the higher address is put into the
8507 higher numbered register, the load will work regardless of whether
8508 the value being loaded is big-wordian or little-wordian. The
8509 order of the two register loads can matter however, if the address
8510 of the memory location is actually held in one of the registers
8511 being overwritten by the load. */
8512 case 'Q':
8513 if (REGNO (x) > LAST_ARM_REGNUM)
8514 abort ();
8515 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8516 return;
8518 case 'R':
8519 if (REGNO (x) > LAST_ARM_REGNUM)
8520 abort ();
8521 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8522 return;
8524 case 'H':
8525 if (REGNO (x) > LAST_ARM_REGNUM)
8526 abort ();
8527 asm_fprintf (stream, "%r", REGNO (x) + 1);
8528 return;
8530 case 'm':
8531 asm_fprintf (stream, "%r",
8532 GET_CODE (XEXP (x, 0)) == REG
8533 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8534 return;
8536 case 'M':
8537 asm_fprintf (stream, "{%r-%r}",
8538 REGNO (x),
8539 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
8540 return;
8542 case 'd':
8543 if (!x)
8544 return;
8546 if (TARGET_ARM)
8547 fputs (arm_condition_codes[get_arm_condition_code (x)],
8548 stream);
8549 else
8550 fputs (thumb_condition_code (x, 0), stream);
8551 return;
8553 case 'D':
8554 if (!x)
8555 return;
8557 if (TARGET_ARM)
8558 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8559 (get_arm_condition_code (x))],
8560 stream);
8561 else
8562 fputs (thumb_condition_code (x, 1), stream);
8563 return;
8565 default:
8566 if (x == 0)
8567 abort ();
8569 if (GET_CODE (x) == REG)
8570 asm_fprintf (stream, "%r", REGNO (x));
8571 else if (GET_CODE (x) == MEM)
8573 output_memory_reference_mode = GET_MODE (x);
8574 output_address (XEXP (x, 0));
8576 else if (GET_CODE (x) == CONST_DOUBLE)
8577 fprintf (stream, "#%s", fp_immediate_constant (x));
8578 else if (GET_CODE (x) == NEG)
8579 abort (); /* This should never happen now. */
8580 else
8582 fputc ('#', stream);
8583 output_addr_const (stream, x);
8588 #ifndef AOF_ASSEMBLER
8589 /* Target hook for assembling integer objects. The ARM version needs to
8590 handle word-sized values specially. */
8592 static bool
8593 arm_assemble_integer (x, size, aligned_p)
8594 rtx x;
8595 unsigned int size;
8596 int aligned_p;
8598 if (size == UNITS_PER_WORD && aligned_p)
8600 fputs ("\t.word\t", asm_out_file);
8601 output_addr_const (asm_out_file, x);
8603 /* Mark symbols as position independent. We only do this in the
8604 .text segment, not in the .data segment. */
8605 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8606 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8608 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
8609 fputs ("(GOTOFF)", asm_out_file);
8610 else if (GET_CODE (x) == LABEL_REF)
8611 fputs ("(GOTOFF)", asm_out_file);
8612 else
8613 fputs ("(GOT)", asm_out_file);
8615 fputc ('\n', asm_out_file);
8616 return true;
8619 return default_assemble_integer (x, size, aligned_p);
8621 #endif
8623 /* A finite state machine takes care of noticing whether or not instructions
8624 can be conditionally executed, and thus decrease execution time and code
8625 size by deleting branch instructions. The fsm is controlled by
8626 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8628 /* The state of the fsm controlling condition codes are:
8629 0: normal, do nothing special
8630 1: make ASM_OUTPUT_OPCODE not output this instruction
8631 2: make ASM_OUTPUT_OPCODE not output this instruction
8632 3: make instructions conditional
8633 4: make instructions conditional
8635 State transitions (state->state by whom under condition):
8636 0 -> 1 final_prescan_insn if the `target' is a label
8637 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8638 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8639 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8640 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8641 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8642 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8643 (the target insn is arm_target_insn).
8645 If the jump clobbers the conditions then we use states 2 and 4.
8647 A similar thing can be done with conditional return insns.
8649 XXX In case the `target' is an unconditional branch, this conditionalising
8650 of the instructions always reduces code size, but not always execution
8651 time. But then, I want to reduce the code size to somewhere near what
8652 /bin/cc produces. */
8654 /* Returns the index of the ARM condition code string in
8655 `arm_condition_codes'. COMPARISON should be an rtx like
8656 `(eq (...) (...))'. */
8658 static enum arm_cond_code
8659 get_arm_condition_code (comparison)
8660 rtx comparison;
8662 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
8663 int code;
8664 enum rtx_code comp_code = GET_CODE (comparison);
8666 if (GET_MODE_CLASS (mode) != MODE_CC)
8667 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
8668 XEXP (comparison, 1));
8670 switch (mode)
8672 case CC_DNEmode: code = ARM_NE; goto dominance;
8673 case CC_DEQmode: code = ARM_EQ; goto dominance;
8674 case CC_DGEmode: code = ARM_GE; goto dominance;
8675 case CC_DGTmode: code = ARM_GT; goto dominance;
8676 case CC_DLEmode: code = ARM_LE; goto dominance;
8677 case CC_DLTmode: code = ARM_LT; goto dominance;
8678 case CC_DGEUmode: code = ARM_CS; goto dominance;
8679 case CC_DGTUmode: code = ARM_HI; goto dominance;
8680 case CC_DLEUmode: code = ARM_LS; goto dominance;
8681 case CC_DLTUmode: code = ARM_CC;
8683 dominance:
8684 if (comp_code != EQ && comp_code != NE)
8685 abort ();
8687 if (comp_code == EQ)
8688 return ARM_INVERSE_CONDITION_CODE (code);
8689 return code;
8691 case CC_NOOVmode:
8692 switch (comp_code)
8694 case NE: return ARM_NE;
8695 case EQ: return ARM_EQ;
8696 case GE: return ARM_PL;
8697 case LT: return ARM_MI;
8698 default: abort ();
8701 case CC_Zmode:
8702 switch (comp_code)
8704 case NE: return ARM_NE;
8705 case EQ: return ARM_EQ;
8706 default: abort ();
8709 case CCFPEmode:
8710 case CCFPmode:
8711 /* These encodings assume that AC=1 in the FPA system control
8712 byte. This allows us to handle all cases except UNEQ and
8713 LTGT. */
8714 switch (comp_code)
8716 case GE: return ARM_GE;
8717 case GT: return ARM_GT;
8718 case LE: return ARM_LS;
8719 case LT: return ARM_MI;
8720 case NE: return ARM_NE;
8721 case EQ: return ARM_EQ;
8722 case ORDERED: return ARM_VC;
8723 case UNORDERED: return ARM_VS;
8724 case UNLT: return ARM_LT;
8725 case UNLE: return ARM_LE;
8726 case UNGT: return ARM_HI;
8727 case UNGE: return ARM_PL;
8728 /* UNEQ and LTGT do not have a representation. */
8729 case UNEQ: /* Fall through. */
8730 case LTGT: /* Fall through. */
8731 default: abort ();
8734 case CC_SWPmode:
8735 switch (comp_code)
8737 case NE: return ARM_NE;
8738 case EQ: return ARM_EQ;
8739 case GE: return ARM_LE;
8740 case GT: return ARM_LT;
8741 case LE: return ARM_GE;
8742 case LT: return ARM_GT;
8743 case GEU: return ARM_LS;
8744 case GTU: return ARM_CC;
8745 case LEU: return ARM_CS;
8746 case LTU: return ARM_HI;
8747 default: abort ();
8750 case CC_Cmode:
8751 switch (comp_code)
8753 case LTU: return ARM_CS;
8754 case GEU: return ARM_CC;
8755 default: abort ();
8758 case CCmode:
8759 switch (comp_code)
8761 case NE: return ARM_NE;
8762 case EQ: return ARM_EQ;
8763 case GE: return ARM_GE;
8764 case GT: return ARM_GT;
8765 case LE: return ARM_LE;
8766 case LT: return ARM_LT;
8767 case GEU: return ARM_CS;
8768 case GTU: return ARM_HI;
8769 case LEU: return ARM_LS;
8770 case LTU: return ARM_CC;
8771 default: abort ();
8774 default: abort ();
8777 abort ();
8781 void
8782 arm_final_prescan_insn (insn)
8783 rtx insn;
8785 /* BODY will hold the body of INSN. */
8786 rtx body = PATTERN (insn);
8788 /* This will be 1 if trying to repeat the trick, and things need to be
8789 reversed if it appears to fail. */
8790 int reverse = 0;
8792 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8793 taken are clobbered, even if the rtl suggests otherwise. It also
8794 means that we have to grub around within the jump expression to find
8795 out what the conditions are when the jump isn't taken. */
8796 int jump_clobbers = 0;
8798 /* If we start with a return insn, we only succeed if we find another one. */
8799 int seeking_return = 0;
8801 /* START_INSN will hold the insn from where we start looking. This is the
8802 first insn after the following code_label if REVERSE is true. */
8803 rtx start_insn = insn;
8805 /* If in state 4, check if the target branch is reached, in order to
8806 change back to state 0. */
8807 if (arm_ccfsm_state == 4)
8809 if (insn == arm_target_insn)
8811 arm_target_insn = NULL;
8812 arm_ccfsm_state = 0;
8814 return;
8817 /* If in state 3, it is possible to repeat the trick, if this insn is an
8818 unconditional branch to a label, and immediately following this branch
8819 is the previous target label which is only used once, and the label this
8820 branch jumps to is not too far off. */
8821 if (arm_ccfsm_state == 3)
8823 if (simplejump_p (insn))
8825 start_insn = next_nonnote_insn (start_insn);
8826 if (GET_CODE (start_insn) == BARRIER)
8828 /* XXX Isn't this always a barrier? */
8829 start_insn = next_nonnote_insn (start_insn);
8831 if (GET_CODE (start_insn) == CODE_LABEL
8832 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8833 && LABEL_NUSES (start_insn) == 1)
8834 reverse = TRUE;
8835 else
8836 return;
8838 else if (GET_CODE (body) == RETURN)
8840 start_insn = next_nonnote_insn (start_insn);
8841 if (GET_CODE (start_insn) == BARRIER)
8842 start_insn = next_nonnote_insn (start_insn);
8843 if (GET_CODE (start_insn) == CODE_LABEL
8844 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8845 && LABEL_NUSES (start_insn) == 1)
8847 reverse = TRUE;
8848 seeking_return = 1;
8850 else
8851 return;
8853 else
8854 return;
8857 if (arm_ccfsm_state != 0 && !reverse)
8858 abort ();
8859 if (GET_CODE (insn) != JUMP_INSN)
8860 return;
8862 /* This jump might be paralleled with a clobber of the condition codes
8863 the jump should always come first */
8864 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8865 body = XVECEXP (body, 0, 0);
8867 #if 0
8868 /* If this is a conditional return then we don't want to know */
8869 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8870 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8871 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8872 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8873 return;
8874 #endif
8876 if (reverse
8877 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8878 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8880 int insns_skipped;
8881 int fail = FALSE, succeed = FALSE;
8882 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8883 int then_not_else = TRUE;
8884 rtx this_insn = start_insn, label = 0;
8886 /* If the jump cannot be done with one instruction, we cannot
8887 conditionally execute the instruction in the inverse case. */
8888 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8890 jump_clobbers = 1;
8891 return;
8894 /* Register the insn jumped to. */
8895 if (reverse)
8897 if (!seeking_return)
8898 label = XEXP (SET_SRC (body), 0);
8900 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8901 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8902 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8904 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8905 then_not_else = FALSE;
8907 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8908 seeking_return = 1;
8909 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8911 seeking_return = 1;
8912 then_not_else = FALSE;
8914 else
8915 abort ();
8917 /* See how many insns this branch skips, and what kind of insns. If all
8918 insns are okay, and the label or unconditional branch to the same
8919 label is not too far away, succeed. */
8920 for (insns_skipped = 0;
8921 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8923 rtx scanbody;
8925 this_insn = next_nonnote_insn (this_insn);
8926 if (!this_insn)
8927 break;
8929 switch (GET_CODE (this_insn))
8931 case CODE_LABEL:
8932 /* Succeed if it is the target label, otherwise fail since
8933 control falls in from somewhere else. */
8934 if (this_insn == label)
8936 if (jump_clobbers)
8938 arm_ccfsm_state = 2;
8939 this_insn = next_nonnote_insn (this_insn);
8941 else
8942 arm_ccfsm_state = 1;
8943 succeed = TRUE;
8945 else
8946 fail = TRUE;
8947 break;
8949 case BARRIER:
8950 /* Succeed if the following insn is the target label.
8951 Otherwise fail.
8952 If return insns are used then the last insn in a function
8953 will be a barrier. */
8954 this_insn = next_nonnote_insn (this_insn);
8955 if (this_insn && this_insn == label)
8957 if (jump_clobbers)
8959 arm_ccfsm_state = 2;
8960 this_insn = next_nonnote_insn (this_insn);
8962 else
8963 arm_ccfsm_state = 1;
8964 succeed = TRUE;
8966 else
8967 fail = TRUE;
8968 break;
8970 case CALL_INSN:
8971 /* If using 32-bit addresses the cc is not preserved over
8972 calls. */
8973 if (TARGET_APCS_32)
8975 /* Succeed if the following insn is the target label,
8976 or if the following two insns are a barrier and
8977 the target label. */
8978 this_insn = next_nonnote_insn (this_insn);
8979 if (this_insn && GET_CODE (this_insn) == BARRIER)
8980 this_insn = next_nonnote_insn (this_insn);
8982 if (this_insn && this_insn == label
8983 && insns_skipped < max_insns_skipped)
8985 if (jump_clobbers)
8987 arm_ccfsm_state = 2;
8988 this_insn = next_nonnote_insn (this_insn);
8990 else
8991 arm_ccfsm_state = 1;
8992 succeed = TRUE;
8994 else
8995 fail = TRUE;
8997 break;
8999 case JUMP_INSN:
9000 /* If this is an unconditional branch to the same label, succeed.
9001 If it is to another label, do nothing. If it is conditional,
9002 fail. */
9003 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9005 scanbody = PATTERN (this_insn);
9006 if (GET_CODE (scanbody) == SET
9007 && GET_CODE (SET_DEST (scanbody)) == PC)
9009 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9010 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9012 arm_ccfsm_state = 2;
9013 succeed = TRUE;
9015 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9016 fail = TRUE;
9018 /* Fail if a conditional return is undesirable (eg on a
9019 StrongARM), but still allow this if optimizing for size. */
9020 else if (GET_CODE (scanbody) == RETURN
9021 && !use_return_insn (TRUE)
9022 && !optimize_size)
9023 fail = TRUE;
9024 else if (GET_CODE (scanbody) == RETURN
9025 && seeking_return)
9027 arm_ccfsm_state = 2;
9028 succeed = TRUE;
9030 else if (GET_CODE (scanbody) == PARALLEL)
9032 switch (get_attr_conds (this_insn))
9034 case CONDS_NOCOND:
9035 break;
9036 default:
9037 fail = TRUE;
9038 break;
9041 else
9042 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9044 break;
9046 case INSN:
9047 /* Instructions using or affecting the condition codes make it
9048 fail. */
9049 scanbody = PATTERN (this_insn);
9050 if (!(GET_CODE (scanbody) == SET
9051 || GET_CODE (scanbody) == PARALLEL)
9052 || get_attr_conds (this_insn) != CONDS_NOCOND)
9053 fail = TRUE;
9054 break;
9056 default:
9057 break;
9060 if (succeed)
9062 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9063 arm_target_label = CODE_LABEL_NUMBER (label);
9064 else if (seeking_return || arm_ccfsm_state == 2)
9066 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9068 this_insn = next_nonnote_insn (this_insn);
9069 if (this_insn && (GET_CODE (this_insn) == BARRIER
9070 || GET_CODE (this_insn) == CODE_LABEL))
9071 abort ();
9073 if (!this_insn)
9075 /* Oh, dear! we ran off the end.. give up */
9076 recog (PATTERN (insn), insn, NULL);
9077 arm_ccfsm_state = 0;
9078 arm_target_insn = NULL;
9079 return;
9081 arm_target_insn = this_insn;
9083 else
9084 abort ();
9085 if (jump_clobbers)
9087 if (reverse)
9088 abort ();
9089 arm_current_cc =
9090 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9091 0), 0), 1));
9092 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9093 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9094 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9095 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9097 else
9099 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9100 what it was. */
9101 if (!reverse)
9102 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9103 0));
9106 if (reverse || then_not_else)
9107 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9110 /* Restore recog_data (getting the attributes of other insns can
9111 destroy this array, but final.c assumes that it remains intact
9112 across this call; since the insn has been recognized already we
9113 call recog direct). */
9114 recog (PATTERN (insn), insn, NULL);
9118 /* Returns true if REGNO is a valid register
9119 for holding a quantity of tyoe MODE. */
9122 arm_hard_regno_mode_ok (regno, mode)
9123 unsigned int regno;
9124 enum machine_mode mode;
9126 if (GET_MODE_CLASS (mode) == MODE_CC)
9127 return regno == CC_REGNUM;
9129 if (TARGET_THUMB)
9130 /* For the Thumb we only allow values bigger than SImode in
9131 registers 0 - 6, so that there is always a second low
9132 register available to hold the upper part of the value.
9133 We probably we ought to ensure that the register is the
9134 start of an even numbered register pair. */
9135 return (NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9137 if (regno <= LAST_ARM_REGNUM)
9138 /* We allow any value to be stored in the general regisetrs. */
9139 return 1;
9141 if ( regno == FRAME_POINTER_REGNUM
9142 || regno == ARG_POINTER_REGNUM)
9143 /* We only allow integers in the fake hard registers. */
9144 return GET_MODE_CLASS (mode) == MODE_INT;
9146 /* The only registers left are the FPU registers
9147 which we only allow to hold FP values. */
9148 return GET_MODE_CLASS (mode) == MODE_FLOAT
9149 && regno >= FIRST_ARM_FP_REGNUM
9150 && regno <= LAST_ARM_FP_REGNUM;
9154 arm_regno_class (regno)
9155 int regno;
9157 if (TARGET_THUMB)
9159 if (regno == STACK_POINTER_REGNUM)
9160 return STACK_REG;
9161 if (regno == CC_REGNUM)
9162 return CC_REG;
9163 if (regno < 8)
9164 return LO_REGS;
9165 return HI_REGS;
9168 if ( regno <= LAST_ARM_REGNUM
9169 || regno == FRAME_POINTER_REGNUM
9170 || regno == ARG_POINTER_REGNUM)
9171 return GENERAL_REGS;
9173 if (regno == CC_REGNUM)
9174 return NO_REGS;
9176 return FPU_REGS;
9179 /* Handle a special case when computing the offset
9180 of an argument from the frame pointer. */
9183 arm_debugger_arg_offset (value, addr)
9184 int value;
9185 rtx addr;
9187 rtx insn;
9189 /* We are only interested if dbxout_parms() failed to compute the offset. */
9190 if (value != 0)
9191 return 0;
9193 /* We can only cope with the case where the address is held in a register. */
9194 if (GET_CODE (addr) != REG)
9195 return 0;
9197 /* If we are using the frame pointer to point at the argument, then
9198 an offset of 0 is correct. */
9199 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9200 return 0;
9202 /* If we are using the stack pointer to point at the
9203 argument, then an offset of 0 is correct. */
9204 if ((TARGET_THUMB || !frame_pointer_needed)
9205 && REGNO (addr) == SP_REGNUM)
9206 return 0;
9208 /* Oh dear. The argument is pointed to by a register rather
9209 than being held in a register, or being stored at a known
9210 offset from the frame pointer. Since GDB only understands
9211 those two kinds of argument we must translate the address
9212 held in the register into an offset from the frame pointer.
9213 We do this by searching through the insns for the function
9214 looking to see where this register gets its value. If the
9215 register is initialised from the frame pointer plus an offset
9216 then we are in luck and we can continue, otherwise we give up.
9218 This code is exercised by producing debugging information
9219 for a function with arguments like this:
9221 double func (double a, double b, int c, double d) {return d;}
9223 Without this code the stab for parameter 'd' will be set to
9224 an offset of 0 from the frame pointer, rather than 8. */
9226 /* The if() statement says:
9228 If the insn is a normal instruction
9229 and if the insn is setting the value in a register
9230 and if the register being set is the register holding the address of the argument
9231 and if the address is computing by an addition
9232 that involves adding to a register
9233 which is the frame pointer
9234 a constant integer
9236 then... */
9238 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9240 if ( GET_CODE (insn) == INSN
9241 && GET_CODE (PATTERN (insn)) == SET
9242 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9243 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9244 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9245 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9246 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9249 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9251 break;
9255 if (value == 0)
9257 debug_rtx (addr);
9258 warning ("unable to compute real location of stacked parameter");
9259 value = 8; /* XXX magic hack */
9262 return value;
9265 #define def_builtin(NAME, TYPE, CODE) \
9266 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9268 void
9269 arm_init_builtins ()
9271 tree endlink = void_list_node;
9272 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9273 tree pchar_type_node = build_pointer_type (char_type_node);
9275 tree int_ftype_int, void_ftype_pchar;
9277 /* void func (void *) */
9278 void_ftype_pchar
9279 = build_function_type (void_type_node,
9280 tree_cons (NULL_TREE, pchar_type_node, endlink));
9282 /* int func (int) */
9283 int_ftype_int
9284 = build_function_type (integer_type_node, int_endlink);
9286 /* Initialize arm V5 builtins. */
9287 if (arm_arch5)
9288 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9291 /* Expand an expression EXP that calls a built-in function,
9292 with result going to TARGET if that's convenient
9293 (and in mode MODE if that's convenient).
9294 SUBTARGET may be used as the target for computing one of EXP's operands.
9295 IGNORE is nonzero if the value is to be ignored. */
9298 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9299 tree exp;
9300 rtx target;
9301 rtx subtarget ATTRIBUTE_UNUSED;
9302 enum machine_mode mode ATTRIBUTE_UNUSED;
9303 int ignore ATTRIBUTE_UNUSED;
9305 enum insn_code icode;
9306 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9307 tree arglist = TREE_OPERAND (exp, 1);
9308 tree arg0;
9309 rtx op0, pat;
9310 enum machine_mode tmode, mode0;
9311 int fcode = DECL_FUNCTION_CODE (fndecl);
9313 switch (fcode)
9315 default:
9316 break;
9318 case ARM_BUILTIN_CLZ:
9319 icode = CODE_FOR_clz;
9320 arg0 = TREE_VALUE (arglist);
9321 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9322 tmode = insn_data[icode].operand[0].mode;
9323 mode0 = insn_data[icode].operand[1].mode;
9325 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9326 op0 = copy_to_mode_reg (mode0, op0);
9327 if (target == 0
9328 || GET_MODE (target) != tmode
9329 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9330 target = gen_reg_rtx (tmode);
9331 pat = GEN_FCN (icode) (target, op0);
9332 if (! pat)
9333 return 0;
9334 emit_insn (pat);
9335 return target;
9338 /* @@@ Should really do something sensible here. */
9339 return NULL_RTX;
9342 /* Recursively search through all of the blocks in a function
9343 checking to see if any of the variables created in that
9344 function match the RTX called 'orig'. If they do then
9345 replace them with the RTX called 'new'. */
9347 static void
9348 replace_symbols_in_block (block, orig, new)
9349 tree block;
9350 rtx orig;
9351 rtx new;
9353 for (; block; block = BLOCK_CHAIN (block))
9355 tree sym;
9357 if (!TREE_USED (block))
9358 continue;
9360 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9362 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9363 || DECL_IGNORED_P (sym)
9364 || TREE_CODE (sym) != VAR_DECL
9365 || DECL_EXTERNAL (sym)
9366 || !rtx_equal_p (DECL_RTL (sym), orig)
9368 continue;
9370 SET_DECL_RTL (sym, new);
9373 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9377 /* Return the number (counting from 0) of
9378 the least significant set bit in MASK. */
9380 #ifdef __GNUC__
9381 inline
9382 #endif
9383 static int
9384 number_of_first_bit_set (mask)
9385 int mask;
9387 int bit;
9389 for (bit = 0;
9390 (mask & (1 << bit)) == 0;
9391 ++bit)
9392 continue;
9394 return bit;
9397 /* Generate code to return from a thumb function.
9398 If 'reg_containing_return_addr' is -1, then the return address is
9399 actually on the stack, at the stack pointer. */
9400 static void
9401 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9402 FILE * f;
9403 int reg_containing_return_addr;
9404 rtx eh_ofs;
9406 unsigned regs_available_for_popping;
9407 unsigned regs_to_pop;
9408 int pops_needed;
9409 unsigned available;
9410 unsigned required;
9411 int mode;
9412 int size;
9413 int restore_a4 = FALSE;
9415 /* Compute the registers we need to pop. */
9416 regs_to_pop = 0;
9417 pops_needed = 0;
9419 /* There is an assumption here, that if eh_ofs is not NULL, the
9420 normal return address will have been pushed. */
9421 if (reg_containing_return_addr == -1 || eh_ofs)
9423 /* When we are generating a return for __builtin_eh_return,
9424 reg_containing_return_addr must specify the return regno. */
9425 if (eh_ofs && reg_containing_return_addr == -1)
9426 abort ();
9428 regs_to_pop |= 1 << LR_REGNUM;
9429 ++pops_needed;
9432 if (TARGET_BACKTRACE)
9434 /* Restore the (ARM) frame pointer and stack pointer. */
9435 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9436 pops_needed += 2;
9439 /* If there is nothing to pop then just emit the BX instruction and
9440 return. */
9441 if (pops_needed == 0)
9443 if (eh_ofs)
9444 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9446 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9447 return;
9449 /* Otherwise if we are not supporting interworking and we have not created
9450 a backtrace structure and the function was not entered in ARM mode then
9451 just pop the return address straight into the PC. */
9452 else if (!TARGET_INTERWORK
9453 && !TARGET_BACKTRACE
9454 && !is_called_in_ARM_mode (current_function_decl))
9456 if (eh_ofs)
9458 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9459 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9460 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9462 else
9463 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9465 return;
9468 /* Find out how many of the (return) argument registers we can corrupt. */
9469 regs_available_for_popping = 0;
9471 /* If returning via __builtin_eh_return, the bottom three registers
9472 all contain information needed for the return. */
9473 if (eh_ofs)
9474 size = 12;
9475 else
9477 #ifdef RTX_CODE
9478 /* If we can deduce the registers used from the function's
9479 return value. This is more reliable that examining
9480 regs_ever_live[] because that will be set if the register is
9481 ever used in the function, not just if the register is used
9482 to hold a return value. */
9484 if (current_function_return_rtx != 0)
9485 mode = GET_MODE (current_function_return_rtx);
9486 else
9487 #endif
9488 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9490 size = GET_MODE_SIZE (mode);
9492 if (size == 0)
9494 /* In a void function we can use any argument register.
9495 In a function that returns a structure on the stack
9496 we can use the second and third argument registers. */
9497 if (mode == VOIDmode)
9498 regs_available_for_popping =
9499 (1 << ARG_REGISTER (1))
9500 | (1 << ARG_REGISTER (2))
9501 | (1 << ARG_REGISTER (3));
9502 else
9503 regs_available_for_popping =
9504 (1 << ARG_REGISTER (2))
9505 | (1 << ARG_REGISTER (3));
9507 else if (size <= 4)
9508 regs_available_for_popping =
9509 (1 << ARG_REGISTER (2))
9510 | (1 << ARG_REGISTER (3));
9511 else if (size <= 8)
9512 regs_available_for_popping =
9513 (1 << ARG_REGISTER (3));
9516 /* Match registers to be popped with registers into which we pop them. */
9517 for (available = regs_available_for_popping,
9518 required = regs_to_pop;
9519 required != 0 && available != 0;
9520 available &= ~(available & - available),
9521 required &= ~(required & - required))
9522 -- pops_needed;
9524 /* If we have any popping registers left over, remove them. */
9525 if (available > 0)
9526 regs_available_for_popping &= ~available;
9528 /* Otherwise if we need another popping register we can use
9529 the fourth argument register. */
9530 else if (pops_needed)
9532 /* If we have not found any free argument registers and
9533 reg a4 contains the return address, we must move it. */
9534 if (regs_available_for_popping == 0
9535 && reg_containing_return_addr == LAST_ARG_REGNUM)
9537 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9538 reg_containing_return_addr = LR_REGNUM;
9540 else if (size > 12)
9542 /* Register a4 is being used to hold part of the return value,
9543 but we have dire need of a free, low register. */
9544 restore_a4 = TRUE;
9546 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9549 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9551 /* The fourth argument register is available. */
9552 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9554 --pops_needed;
9558 /* Pop as many registers as we can. */
9559 thumb_pushpop (f, regs_available_for_popping, FALSE);
9561 /* Process the registers we popped. */
9562 if (reg_containing_return_addr == -1)
9564 /* The return address was popped into the lowest numbered register. */
9565 regs_to_pop &= ~(1 << LR_REGNUM);
9567 reg_containing_return_addr =
9568 number_of_first_bit_set (regs_available_for_popping);
9570 /* Remove this register for the mask of available registers, so that
9571 the return address will not be corrupted by futher pops. */
9572 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9575 /* If we popped other registers then handle them here. */
9576 if (regs_available_for_popping)
9578 int frame_pointer;
9580 /* Work out which register currently contains the frame pointer. */
9581 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9583 /* Move it into the correct place. */
9584 asm_fprintf (f, "\tmov\t%r, %r\n",
9585 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9587 /* (Temporarily) remove it from the mask of popped registers. */
9588 regs_available_for_popping &= ~(1 << frame_pointer);
9589 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9591 if (regs_available_for_popping)
9593 int stack_pointer;
9595 /* We popped the stack pointer as well,
9596 find the register that contains it. */
9597 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9599 /* Move it into the stack register. */
9600 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9602 /* At this point we have popped all necessary registers, so
9603 do not worry about restoring regs_available_for_popping
9604 to its correct value:
9606 assert (pops_needed == 0)
9607 assert (regs_available_for_popping == (1 << frame_pointer))
9608 assert (regs_to_pop == (1 << STACK_POINTER)) */
9610 else
9612 /* Since we have just move the popped value into the frame
9613 pointer, the popping register is available for reuse, and
9614 we know that we still have the stack pointer left to pop. */
9615 regs_available_for_popping |= (1 << frame_pointer);
9619 /* If we still have registers left on the stack, but we no longer have
9620 any registers into which we can pop them, then we must move the return
9621 address into the link register and make available the register that
9622 contained it. */
9623 if (regs_available_for_popping == 0 && pops_needed > 0)
9625 regs_available_for_popping |= 1 << reg_containing_return_addr;
9627 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9628 reg_containing_return_addr);
9630 reg_containing_return_addr = LR_REGNUM;
9633 /* If we have registers left on the stack then pop some more.
9634 We know that at most we will want to pop FP and SP. */
9635 if (pops_needed > 0)
9637 int popped_into;
9638 int move_to;
9640 thumb_pushpop (f, regs_available_for_popping, FALSE);
9642 /* We have popped either FP or SP.
9643 Move whichever one it is into the correct register. */
9644 popped_into = number_of_first_bit_set (regs_available_for_popping);
9645 move_to = number_of_first_bit_set (regs_to_pop);
9647 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9649 regs_to_pop &= ~(1 << move_to);
9651 --pops_needed;
9654 /* If we still have not popped everything then we must have only
9655 had one register available to us and we are now popping the SP. */
9656 if (pops_needed > 0)
9658 int popped_into;
9660 thumb_pushpop (f, regs_available_for_popping, FALSE);
9662 popped_into = number_of_first_bit_set (regs_available_for_popping);
9664 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9666 assert (regs_to_pop == (1 << STACK_POINTER))
9667 assert (pops_needed == 1)
9671 /* If necessary restore the a4 register. */
9672 if (restore_a4)
9674 if (reg_containing_return_addr != LR_REGNUM)
9676 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9677 reg_containing_return_addr = LR_REGNUM;
9680 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9683 if (eh_ofs)
9684 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9686 /* Return to caller. */
9687 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9690 /* Emit code to push or pop registers to or from the stack. */
9692 static void
9693 thumb_pushpop (f, mask, push)
9694 FILE * f;
9695 int mask;
9696 int push;
9698 int regno;
9699 int lo_mask = mask & 0xFF;
9701 if (lo_mask == 0 && !push && (mask & (1 << 15)))
9703 /* Special case. Do not generate a POP PC statement here, do it in
9704 thumb_exit() */
9705 thumb_exit (f, -1, NULL_RTX);
9706 return;
9709 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9711 /* Look at the low registers first. */
9712 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
9714 if (lo_mask & 1)
9716 asm_fprintf (f, "%r", regno);
9718 if ((lo_mask & ~1) != 0)
9719 fprintf (f, ", ");
9723 if (push && (mask & (1 << LR_REGNUM)))
9725 /* Catch pushing the LR. */
9726 if (mask & 0xFF)
9727 fprintf (f, ", ");
9729 asm_fprintf (f, "%r", LR_REGNUM);
9731 else if (!push && (mask & (1 << PC_REGNUM)))
9733 /* Catch popping the PC. */
9734 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9736 /* The PC is never poped directly, instead
9737 it is popped into r3 and then BX is used. */
9738 fprintf (f, "}\n");
9740 thumb_exit (f, -1, NULL_RTX);
9742 return;
9744 else
9746 if (mask & 0xFF)
9747 fprintf (f, ", ");
9749 asm_fprintf (f, "%r", PC_REGNUM);
9753 fprintf (f, "}\n");
9756 void
9757 thumb_final_prescan_insn (insn)
9758 rtx insn;
9760 if (flag_print_asm_name)
9761 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9762 INSN_ADDRESSES (INSN_UID (insn)));
9766 thumb_shiftable_const (val)
9767 unsigned HOST_WIDE_INT val;
9769 unsigned HOST_WIDE_INT mask = 0xff;
9770 int i;
9772 if (val == 0) /* XXX */
9773 return 0;
9775 for (i = 0; i < 25; i++)
9776 if ((val & (mask << i)) == val)
9777 return 1;
9779 return 0;
9782 /* Returns non-zero if the current function contains,
9783 or might contain a far jump. */
9786 thumb_far_jump_used_p (int in_prologue)
9788 rtx insn;
9790 /* This test is only important for leaf functions. */
9791 /* assert (!leaf_function_p ()); */
9793 /* If we have already decided that far jumps may be used,
9794 do not bother checking again, and always return true even if
9795 it turns out that they are not being used. Once we have made
9796 the decision that far jumps are present (and that hence the link
9797 register will be pushed onto the stack) we cannot go back on it. */
9798 if (cfun->machine->far_jump_used)
9799 return 1;
9801 /* If this function is not being called from the prologue/epilogue
9802 generation code then it must be being called from the
9803 INITIAL_ELIMINATION_OFFSET macro. */
9804 if (!in_prologue)
9806 /* In this case we know that we are being asked about the elimination
9807 of the arg pointer register. If that register is not being used,
9808 then there are no arguments on the stack, and we do not have to
9809 worry that a far jump might force the prologue to push the link
9810 register, changing the stack offsets. In this case we can just
9811 return false, since the presence of far jumps in the function will
9812 not affect stack offsets.
9814 If the arg pointer is live (or if it was live, but has now been
9815 eliminated and so set to dead) then we do have to test to see if
9816 the function might contain a far jump. This test can lead to some
9817 false negatives, since before reload is completed, then length of
9818 branch instructions is not known, so gcc defaults to returning their
9819 longest length, which in turn sets the far jump attribute to true.
9821 A false negative will not result in bad code being generated, but it
9822 will result in a needless push and pop of the link register. We
9823 hope that this does not occur too often. */
9824 if (regs_ever_live [ARG_POINTER_REGNUM])
9825 cfun->machine->arg_pointer_live = 1;
9826 else if (!cfun->machine->arg_pointer_live)
9827 return 0;
9830 /* Check to see if the function contains a branch
9831 insn with the far jump attribute set. */
9832 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9834 if (GET_CODE (insn) == JUMP_INSN
9835 /* Ignore tablejump patterns. */
9836 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9837 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9838 && get_attr_far_jump (insn) == FAR_JUMP_YES
9841 /* Record the fact that we have decied that
9842 the function does use far jumps. */
9843 cfun->machine->far_jump_used = 1;
9844 return 1;
9848 return 0;
9851 /* Return non-zero if FUNC must be entered in ARM mode. */
9854 is_called_in_ARM_mode (func)
9855 tree func;
9857 if (TREE_CODE (func) != FUNCTION_DECL)
9858 abort ();
9860 /* Ignore the problem about functions whoes address is taken. */
9861 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9862 return TRUE;
9864 #ifdef ARM_PE
9865 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
9866 #else
9867 return FALSE;
9868 #endif
9871 /* The bits which aren't usefully expanded as rtl. */
9873 const char *
9874 thumb_unexpanded_epilogue ()
9876 int regno;
9877 int live_regs_mask = 0;
9878 int high_regs_pushed = 0;
9879 int leaf_function = leaf_function_p ();
9880 int had_to_push_lr;
9881 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9883 if (return_used_this_function)
9884 return "";
9886 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9887 if (regs_ever_live[regno] && !call_used_regs[regno]
9888 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9889 live_regs_mask |= 1 << regno;
9891 for (regno = 8; regno < 13; regno++)
9893 if (regs_ever_live[regno] && !call_used_regs[regno]
9894 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9895 high_regs_pushed++;
9898 /* The prolog may have pushed some high registers to use as
9899 work registers. eg the testuite file:
9900 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9901 compiles to produce:
9902 push {r4, r5, r6, r7, lr}
9903 mov r7, r9
9904 mov r6, r8
9905 push {r6, r7}
9906 as part of the prolog. We have to undo that pushing here. */
9908 if (high_regs_pushed)
9910 int mask = live_regs_mask;
9911 int next_hi_reg;
9912 int size;
9913 int mode;
9915 #ifdef RTX_CODE
9916 /* If we can deduce the registers used from the function's return value.
9917 This is more reliable that examining regs_ever_live[] because that
9918 will be set if the register is ever used in the function, not just if
9919 the register is used to hold a return value. */
9921 if (current_function_return_rtx != 0)
9922 mode = GET_MODE (current_function_return_rtx);
9923 else
9924 #endif
9925 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9927 size = GET_MODE_SIZE (mode);
9929 /* Unless we are returning a type of size > 12 register r3 is
9930 available. */
9931 if (size < 13)
9932 mask |= 1 << 3;
9934 if (mask == 0)
9935 /* Oh dear! We have no low registers into which we can pop
9936 high registers! */
9937 internal_error
9938 ("no low registers available for popping high registers");
9940 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9941 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9942 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9943 break;
9945 while (high_regs_pushed)
9947 /* Find lo register(s) into which the high register(s) can
9948 be popped. */
9949 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9951 if (mask & (1 << regno))
9952 high_regs_pushed--;
9953 if (high_regs_pushed == 0)
9954 break;
9957 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9959 /* Pop the values into the low register(s). */
9960 thumb_pushpop (asm_out_file, mask, 0);
9962 /* Move the value(s) into the high registers. */
9963 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9965 if (mask & (1 << regno))
9967 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9968 regno);
9970 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
9971 if (regs_ever_live[next_hi_reg]
9972 && !call_used_regs[next_hi_reg]
9973 && !(TARGET_SINGLE_PIC_BASE
9974 && (next_hi_reg == arm_pic_register)))
9975 break;
9981 had_to_push_lr = (live_regs_mask || !leaf_function
9982 || thumb_far_jump_used_p (1));
9984 if (TARGET_BACKTRACE
9985 && ((live_regs_mask & 0xFF) == 0)
9986 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9988 /* The stack backtrace structure creation code had to
9989 push R7 in order to get a work register, so we pop
9990 it now. */
9991 live_regs_mask |= (1 << LAST_LO_REGNUM);
9994 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9996 if (had_to_push_lr
9997 && !is_called_in_ARM_mode (current_function_decl)
9998 && !eh_ofs)
9999 live_regs_mask |= 1 << PC_REGNUM;
10001 /* Either no argument registers were pushed or a backtrace
10002 structure was created which includes an adjusted stack
10003 pointer, so just pop everything. */
10004 if (live_regs_mask)
10005 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10007 if (eh_ofs)
10008 thumb_exit (asm_out_file, 2, eh_ofs);
10009 /* We have either just popped the return address into the
10010 PC or it is was kept in LR for the entire function or
10011 it is still on the stack because we do not want to
10012 return by doing a pop {pc}. */
10013 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10014 thumb_exit (asm_out_file,
10015 (had_to_push_lr
10016 && is_called_in_ARM_mode (current_function_decl)) ?
10017 -1 : LR_REGNUM, NULL_RTX);
10019 else
10021 /* Pop everything but the return address. */
10022 live_regs_mask &= ~(1 << PC_REGNUM);
10024 if (live_regs_mask)
10025 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10027 if (had_to_push_lr)
10028 /* Get the return address into a temporary register. */
10029 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10031 /* Remove the argument registers that were pushed onto the stack. */
10032 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10033 SP_REGNUM, SP_REGNUM,
10034 current_function_pretend_args_size);
10036 if (eh_ofs)
10037 thumb_exit (asm_out_file, 2, eh_ofs);
10038 else
10039 thumb_exit (asm_out_file,
10040 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10043 return "";
10046 /* Functions to save and restore machine-specific function data. */
10048 static void
10049 arm_mark_machine_status (p)
10050 struct function * p;
10052 machine_function *machine = p->machine;
10054 if (machine)
10055 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
10058 static void
10059 arm_init_machine_status (p)
10060 struct function * p;
10062 p->machine =
10063 (machine_function *) xcalloc (1, sizeof (machine_function));
10065 #if ARM_FT_UNKNOWWN != 0
10066 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
10067 #endif
10070 static void
10071 arm_free_machine_status (p)
10072 struct function * p;
10074 if (p->machine)
10076 free (p->machine);
10077 p->machine = NULL;
10081 /* Return an RTX indicating where the return address to the
10082 calling function can be found. */
10085 arm_return_addr (count, frame)
10086 int count;
10087 rtx frame ATTRIBUTE_UNUSED;
10089 if (count != 0)
10090 return NULL_RTX;
10092 if (TARGET_APCS_32)
10093 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10094 else
10096 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10097 GEN_INT (RETURN_ADDR_MASK26));
10098 return get_func_hard_reg_initial_val (cfun, lr);
10102 /* Do anything needed before RTL is emitted for each function. */
10104 void
10105 arm_init_expanders ()
10107 /* Arrange to initialize and mark the machine per-function status. */
10108 init_machine_status = arm_init_machine_status;
10109 mark_machine_status = arm_mark_machine_status;
10110 free_machine_status = arm_free_machine_status;
10113 /* Generate the rest of a function's prologue. */
10115 void
10116 thumb_expand_prologue ()
10118 HOST_WIDE_INT amount = (get_frame_size ()
10119 + current_function_outgoing_args_size);
10120 unsigned long func_type;
10122 func_type = arm_current_func_type ();
10124 /* Naked functions don't have prologues. */
10125 if (IS_NAKED (func_type))
10126 return;
10128 if (IS_INTERRUPT (func_type))
10130 error ("interrupt Service Routines cannot be coded in Thumb mode");
10131 return;
10134 if (frame_pointer_needed)
10135 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10137 if (amount)
10139 amount = ROUND_UP (amount);
10141 if (amount < 512)
10142 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10143 GEN_INT (- amount)));
10144 else
10146 int regno;
10147 rtx reg;
10149 /* The stack decrement is too big for an immediate value in a single
10150 insn. In theory we could issue multiple subtracts, but after
10151 three of them it becomes more space efficient to place the full
10152 value in the constant pool and load into a register. (Also the
10153 ARM debugger really likes to see only one stack decrement per
10154 function). So instead we look for a scratch register into which
10155 we can load the decrement, and then we subtract this from the
10156 stack pointer. Unfortunately on the thumb the only available
10157 scratch registers are the argument registers, and we cannot use
10158 these as they may hold arguments to the function. Instead we
10159 attempt to locate a call preserved register which is used by this
10160 function. If we can find one, then we know that it will have
10161 been pushed at the start of the prologue and so we can corrupt
10162 it now. */
10163 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10164 if (regs_ever_live[regno]
10165 && !call_used_regs[regno] /* Paranoia */
10166 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
10167 && !(frame_pointer_needed
10168 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10169 break;
10171 if (regno > LAST_LO_REGNUM) /* Very unlikely */
10173 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10175 /* Choose an arbitary, non-argument low register. */
10176 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10178 /* Save it by copying it into a high, scratch register. */
10179 emit_insn (gen_movsi (spare, reg));
10180 /* Add a USE to stop propagate_one_insn() from barfing. */
10181 emit_insn (gen_prologue_use (spare));
10183 /* Decrement the stack. */
10184 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10185 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10186 reg));
10188 /* Restore the low register's original value. */
10189 emit_insn (gen_movsi (reg, spare));
10191 /* Emit a USE of the restored scratch register, so that flow
10192 analysis will not consider the restore redundant. The
10193 register won't be used again in this function and isn't
10194 restored by the epilogue. */
10195 emit_insn (gen_prologue_use (reg));
10197 else
10199 reg = gen_rtx (REG, SImode, regno);
10201 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10202 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10203 reg));
10208 if (current_function_profile || TARGET_NO_SCHED_PRO)
10209 emit_insn (gen_blockage ());
10212 void
10213 thumb_expand_epilogue ()
10215 HOST_WIDE_INT amount = (get_frame_size ()
10216 + current_function_outgoing_args_size);
10218 /* Naked functions don't have prologues. */
10219 if (IS_NAKED (arm_current_func_type ()))
10220 return;
10222 if (frame_pointer_needed)
10223 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10224 else if (amount)
10226 amount = ROUND_UP (amount);
10228 if (amount < 512)
10229 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10230 GEN_INT (amount)));
10231 else
10233 /* r3 is always free in the epilogue. */
10234 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10236 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10237 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10241 /* Emit a USE (stack_pointer_rtx), so that
10242 the stack adjustment will not be deleted. */
10243 emit_insn (gen_prologue_use (stack_pointer_rtx));
10245 if (current_function_profile || TARGET_NO_SCHED_PRO)
10246 emit_insn (gen_blockage ());
10249 static void
10250 thumb_output_function_prologue (f, size)
10251 FILE * f;
10252 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10254 int live_regs_mask = 0;
10255 int high_regs_pushed = 0;
10256 int regno;
10258 if (IS_NAKED (arm_current_func_type ()))
10259 return;
10261 if (is_called_in_ARM_mode (current_function_decl))
10263 const char * name;
10265 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10266 abort ();
10267 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10268 abort ();
10269 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10271 /* Generate code sequence to switch us into Thumb mode. */
10272 /* The .code 32 directive has already been emitted by
10273 ASM_DECLARE_FUNCTION_NAME. */
10274 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10275 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10277 /* Generate a label, so that the debugger will notice the
10278 change in instruction sets. This label is also used by
10279 the assembler to bypass the ARM code when this function
10280 is called from a Thumb encoded function elsewhere in the
10281 same file. Hence the definition of STUB_NAME here must
10282 agree with the definition in gas/config/tc-arm.c */
10284 #define STUB_NAME ".real_start_of"
10286 asm_fprintf (f, "\t.code\t16\n");
10287 #ifdef ARM_PE
10288 if (arm_dllexport_name_p (name))
10289 name = arm_strip_name_encoding (name);
10290 #endif
10291 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10292 asm_fprintf (f, "\t.thumb_func\n");
10293 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10296 if (current_function_pretend_args_size)
10298 if (cfun->machine->uses_anonymous_args)
10300 int num_pushes;
10302 asm_fprintf (f, "\tpush\t{");
10304 num_pushes = NUM_INTS (current_function_pretend_args_size);
10306 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10307 regno <= LAST_ARG_REGNUM;
10308 regno++)
10309 asm_fprintf (f, "%r%s", regno,
10310 regno == LAST_ARG_REGNUM ? "" : ", ");
10312 asm_fprintf (f, "}\n");
10314 else
10315 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10316 SP_REGNUM, SP_REGNUM,
10317 current_function_pretend_args_size);
10320 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10321 if (regs_ever_live[regno] && !call_used_regs[regno]
10322 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10323 live_regs_mask |= 1 << regno;
10325 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10326 live_regs_mask |= 1 << LR_REGNUM;
10328 if (TARGET_BACKTRACE)
10330 int offset;
10331 int work_register = 0;
10332 int wr;
10334 /* We have been asked to create a stack backtrace structure.
10335 The code looks like this:
10337 0 .align 2
10338 0 func:
10339 0 sub SP, #16 Reserve space for 4 registers.
10340 2 push {R7} Get a work register.
10341 4 add R7, SP, #20 Get the stack pointer before the push.
10342 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10343 8 mov R7, PC Get hold of the start of this code plus 12.
10344 10 str R7, [SP, #16] Store it.
10345 12 mov R7, FP Get hold of the current frame pointer.
10346 14 str R7, [SP, #4] Store it.
10347 16 mov R7, LR Get hold of the current return address.
10348 18 str R7, [SP, #12] Store it.
10349 20 add R7, SP, #16 Point at the start of the backtrace structure.
10350 22 mov FP, R7 Put this value into the frame pointer. */
10352 if ((live_regs_mask & 0xFF) == 0)
10354 /* See if the a4 register is free. */
10356 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10357 work_register = LAST_ARG_REGNUM;
10358 else /* We must push a register of our own */
10359 live_regs_mask |= (1 << LAST_LO_REGNUM);
10362 if (work_register == 0)
10364 /* Select a register from the list that will be pushed to
10365 use as our work register. */
10366 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10367 if ((1 << work_register) & live_regs_mask)
10368 break;
10371 asm_fprintf
10372 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10373 SP_REGNUM, SP_REGNUM);
10375 if (live_regs_mask)
10376 thumb_pushpop (f, live_regs_mask, 1);
10378 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10379 if (wr & live_regs_mask)
10380 offset += 4;
10382 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10383 offset + 16 + current_function_pretend_args_size);
10385 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10386 offset + 4);
10388 /* Make sure that the instruction fetching the PC is in the right place
10389 to calculate "start of backtrace creation code + 12". */
10390 if (live_regs_mask)
10392 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10393 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10394 offset + 12);
10395 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10396 ARM_HARD_FRAME_POINTER_REGNUM);
10397 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10398 offset);
10400 else
10402 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10403 ARM_HARD_FRAME_POINTER_REGNUM);
10404 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10405 offset);
10406 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10407 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10408 offset + 12);
10411 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10412 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10413 offset + 8);
10414 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10415 offset + 12);
10416 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10417 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10419 else if (live_regs_mask)
10420 thumb_pushpop (f, live_regs_mask, 1);
10422 for (regno = 8; regno < 13; regno++)
10424 if (regs_ever_live[regno] && !call_used_regs[regno]
10425 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10426 high_regs_pushed++;
10429 if (high_regs_pushed)
10431 int pushable_regs = 0;
10432 int mask = live_regs_mask & 0xff;
10433 int next_hi_reg;
10435 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10437 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
10438 && !(TARGET_SINGLE_PIC_BASE
10439 && (next_hi_reg == arm_pic_register)))
10440 break;
10443 pushable_regs = mask;
10445 if (pushable_regs == 0)
10447 /* Desperation time -- this probably will never happen. */
10448 if (regs_ever_live[LAST_ARG_REGNUM]
10449 || !call_used_regs[LAST_ARG_REGNUM])
10450 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10451 mask = 1 << LAST_ARG_REGNUM;
10454 while (high_regs_pushed > 0)
10456 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10458 if (mask & (1 << regno))
10460 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10462 high_regs_pushed--;
10464 if (high_regs_pushed)
10465 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10466 next_hi_reg--)
10468 if (regs_ever_live[next_hi_reg]
10469 && !call_used_regs[next_hi_reg]
10470 && !(TARGET_SINGLE_PIC_BASE
10471 && (next_hi_reg == arm_pic_register)))
10472 break;
10474 else
10476 mask &= ~((1 << regno) - 1);
10477 break;
10482 thumb_pushpop (f, mask, 1);
10485 if (pushable_regs == 0
10486 && (regs_ever_live[LAST_ARG_REGNUM]
10487 || !call_used_regs[LAST_ARG_REGNUM]))
10488 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10492 /* Handle the case of a double word load into a low register from
10493 a computed memory address. The computed address may involve a
10494 register which is overwritten by the load. */
10496 const char *
10497 thumb_load_double_from_address (operands)
10498 rtx *operands;
10500 rtx addr;
10501 rtx base;
10502 rtx offset;
10503 rtx arg1;
10504 rtx arg2;
10506 if (GET_CODE (operands[0]) != REG)
10507 abort ();
10509 if (GET_CODE (operands[1]) != MEM)
10510 abort ();
10512 /* Get the memory address. */
10513 addr = XEXP (operands[1], 0);
10515 /* Work out how the memory address is computed. */
10516 switch (GET_CODE (addr))
10518 case REG:
10519 operands[2] = gen_rtx (MEM, SImode,
10520 plus_constant (XEXP (operands[1], 0), 4));
10522 if (REGNO (operands[0]) == REGNO (addr))
10524 output_asm_insn ("ldr\t%H0, %2", operands);
10525 output_asm_insn ("ldr\t%0, %1", operands);
10527 else
10529 output_asm_insn ("ldr\t%0, %1", operands);
10530 output_asm_insn ("ldr\t%H0, %2", operands);
10532 break;
10534 case CONST:
10535 /* Compute <address> + 4 for the high order load. */
10536 operands[2] = gen_rtx (MEM, SImode,
10537 plus_constant (XEXP (operands[1], 0), 4));
10539 output_asm_insn ("ldr\t%0, %1", operands);
10540 output_asm_insn ("ldr\t%H0, %2", operands);
10541 break;
10543 case PLUS:
10544 arg1 = XEXP (addr, 0);
10545 arg2 = XEXP (addr, 1);
10547 if (CONSTANT_P (arg1))
10548 base = arg2, offset = arg1;
10549 else
10550 base = arg1, offset = arg2;
10552 if (GET_CODE (base) != REG)
10553 abort ();
10555 /* Catch the case of <address> = <reg> + <reg> */
10556 if (GET_CODE (offset) == REG)
10558 int reg_offset = REGNO (offset);
10559 int reg_base = REGNO (base);
10560 int reg_dest = REGNO (operands[0]);
10562 /* Add the base and offset registers together into the
10563 higher destination register. */
10564 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10565 reg_dest + 1, reg_base, reg_offset);
10567 /* Load the lower destination register from the address in
10568 the higher destination register. */
10569 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10570 reg_dest, reg_dest + 1);
10572 /* Load the higher destination register from its own address
10573 plus 4. */
10574 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10575 reg_dest + 1, reg_dest + 1);
10577 else
10579 /* Compute <address> + 4 for the high order load. */
10580 operands[2] = gen_rtx (MEM, SImode,
10581 plus_constant (XEXP (operands[1], 0), 4));
10583 /* If the computed address is held in the low order register
10584 then load the high order register first, otherwise always
10585 load the low order register first. */
10586 if (REGNO (operands[0]) == REGNO (base))
10588 output_asm_insn ("ldr\t%H0, %2", operands);
10589 output_asm_insn ("ldr\t%0, %1", operands);
10591 else
10593 output_asm_insn ("ldr\t%0, %1", operands);
10594 output_asm_insn ("ldr\t%H0, %2", operands);
10597 break;
10599 case LABEL_REF:
10600 /* With no registers to worry about we can just load the value
10601 directly. */
10602 operands[2] = gen_rtx (MEM, SImode,
10603 plus_constant (XEXP (operands[1], 0), 4));
10605 output_asm_insn ("ldr\t%H0, %2", operands);
10606 output_asm_insn ("ldr\t%0, %1", operands);
10607 break;
10609 default:
10610 abort ();
10611 break;
10614 return "";
10618 const char *
10619 thumb_output_move_mem_multiple (n, operands)
10620 int n;
10621 rtx * operands;
10623 rtx tmp;
10625 switch (n)
10627 case 2:
10628 if (REGNO (operands[4]) > REGNO (operands[5]))
10630 tmp = operands[4];
10631 operands[4] = operands[5];
10632 operands[5] = tmp;
10634 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10635 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
10636 break;
10638 case 3:
10639 if (REGNO (operands[4]) > REGNO (operands[5]))
10641 tmp = operands[4];
10642 operands[4] = operands[5];
10643 operands[5] = tmp;
10645 if (REGNO (operands[5]) > REGNO (operands[6]))
10647 tmp = operands[5];
10648 operands[5] = operands[6];
10649 operands[6] = tmp;
10651 if (REGNO (operands[4]) > REGNO (operands[5]))
10653 tmp = operands[4];
10654 operands[4] = operands[5];
10655 operands[5] = tmp;
10658 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10659 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
10660 break;
10662 default:
10663 abort ();
10666 return "";
10669 /* Routines for generating rtl. */
10671 void
10672 thumb_expand_movstrqi (operands)
10673 rtx * operands;
10675 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10676 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10677 HOST_WIDE_INT len = INTVAL (operands[2]);
10678 HOST_WIDE_INT offset = 0;
10680 while (len >= 12)
10682 emit_insn (gen_movmem12b (out, in, out, in));
10683 len -= 12;
10686 if (len >= 8)
10688 emit_insn (gen_movmem8b (out, in, out, in));
10689 len -= 8;
10692 if (len >= 4)
10694 rtx reg = gen_reg_rtx (SImode);
10695 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10696 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10697 len -= 4;
10698 offset += 4;
10701 if (len >= 2)
10703 rtx reg = gen_reg_rtx (HImode);
10704 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10705 plus_constant (in, offset))));
10706 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10707 reg));
10708 len -= 2;
10709 offset += 2;
10712 if (len)
10714 rtx reg = gen_reg_rtx (QImode);
10715 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10716 plus_constant (in, offset))));
10717 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10718 reg));
10723 thumb_cmp_operand (op, mode)
10724 rtx op;
10725 enum machine_mode mode;
10727 return ((GET_CODE (op) == CONST_INT
10728 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10729 || register_operand (op, mode));
10732 static const char *
10733 thumb_condition_code (x, invert)
10734 rtx x;
10735 int invert;
10737 static const char * const conds[] =
10739 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10740 "hi", "ls", "ge", "lt", "gt", "le"
10742 int val;
10744 switch (GET_CODE (x))
10746 case EQ: val = 0; break;
10747 case NE: val = 1; break;
10748 case GEU: val = 2; break;
10749 case LTU: val = 3; break;
10750 case GTU: val = 8; break;
10751 case LEU: val = 9; break;
10752 case GE: val = 10; break;
10753 case LT: val = 11; break;
10754 case GT: val = 12; break;
10755 case LE: val = 13; break;
10756 default:
10757 abort ();
10760 return conds[val ^ invert];
10763 /* Handle storing a half-word to memory during reload. */
10765 void
10766 thumb_reload_out_hi (operands)
10767 rtx * operands;
10769 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10772 /* Handle storing a half-word to memory during reload. */
10774 void
10775 thumb_reload_in_hi (operands)
10776 rtx * operands ATTRIBUTE_UNUSED;
10778 abort ();
10781 /* Return the length of a function name prefix
10782 that starts with the character 'c'. */
10784 static int
10785 arm_get_strip_length (char c)
10787 switch (c)
10789 ARM_NAME_ENCODING_LENGTHS
10790 default: return 0;
10794 /* Return a pointer to a function's name with any
10795 and all prefix encodings stripped from it. */
10797 const char *
10798 arm_strip_name_encoding (const char * name)
10800 int skip;
10802 while ((skip = arm_get_strip_length (* name)))
10803 name += skip;
10805 return name;
10808 #ifdef AOF_ASSEMBLER
10809 /* Special functions only needed when producing AOF syntax assembler. */
10811 rtx aof_pic_label = NULL_RTX;
10812 struct pic_chain
10814 struct pic_chain * next;
10815 const char * symname;
10818 static struct pic_chain * aof_pic_chain = NULL;
10821 aof_pic_entry (x)
10822 rtx x;
10824 struct pic_chain ** chainp;
10825 int offset;
10827 if (aof_pic_label == NULL_RTX)
10829 /* We mark this here and not in arm_add_gc_roots() to avoid
10830 polluting even more code with ifdefs, and because it never
10831 contains anything useful until we assign to it here. */
10832 ggc_add_rtx_root (&aof_pic_label, 1);
10833 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10836 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10837 offset += 4, chainp = &(*chainp)->next)
10838 if ((*chainp)->symname == XSTR (x, 0))
10839 return plus_constant (aof_pic_label, offset);
10841 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10842 (*chainp)->next = NULL;
10843 (*chainp)->symname = XSTR (x, 0);
10844 return plus_constant (aof_pic_label, offset);
10847 void
10848 aof_dump_pic_table (f)
10849 FILE * f;
10851 struct pic_chain * chain;
10853 if (aof_pic_chain == NULL)
10854 return;
10856 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10857 PIC_OFFSET_TABLE_REGNUM,
10858 PIC_OFFSET_TABLE_REGNUM);
10859 fputs ("|x$adcons|\n", f);
10861 for (chain = aof_pic_chain; chain; chain = chain->next)
10863 fputs ("\tDCD\t", f);
10864 assemble_name (f, chain->symname);
10865 fputs ("\n", f);
10869 int arm_text_section_count = 1;
10871 char *
10872 aof_text_section ()
10874 static char buf[100];
10875 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10876 arm_text_section_count++);
10877 if (flag_pic)
10878 strcat (buf, ", PIC, REENTRANT");
10879 return buf;
10882 static int arm_data_section_count = 1;
10884 char *
10885 aof_data_section ()
10887 static char buf[100];
10888 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10889 return buf;
10892 /* The AOF assembler is religiously strict about declarations of
10893 imported and exported symbols, so that it is impossible to declare
10894 a function as imported near the beginning of the file, and then to
10895 export it later on. It is, however, possible to delay the decision
10896 until all the functions in the file have been compiled. To get
10897 around this, we maintain a list of the imports and exports, and
10898 delete from it any that are subsequently defined. At the end of
10899 compilation we spit the remainder of the list out before the END
10900 directive. */
10902 struct import
10904 struct import * next;
10905 const char * name;
10908 static struct import * imports_list = NULL;
10910 void
10911 aof_add_import (name)
10912 const char * name;
10914 struct import * new;
10916 for (new = imports_list; new; new = new->next)
10917 if (new->name == name)
10918 return;
10920 new = (struct import *) xmalloc (sizeof (struct import));
10921 new->next = imports_list;
10922 imports_list = new;
10923 new->name = name;
10926 void
10927 aof_delete_import (name)
10928 const char * name;
10930 struct import ** old;
10932 for (old = &imports_list; *old; old = & (*old)->next)
10934 if ((*old)->name == name)
10936 *old = (*old)->next;
10937 return;
10942 int arm_main_function = 0;
10944 void
10945 aof_dump_imports (f)
10946 FILE * f;
10948 /* The AOF assembler needs this to cause the startup code to be extracted
10949 from the library. Brining in __main causes the whole thing to work
10950 automagically. */
10951 if (arm_main_function)
10953 text_section ();
10954 fputs ("\tIMPORT __main\n", f);
10955 fputs ("\tDCD __main\n", f);
10958 /* Now dump the remaining imports. */
10959 while (imports_list)
10961 fprintf (f, "\tIMPORT\t");
10962 assemble_name (f, imports_list->name);
10963 fputc ('\n', f);
10964 imports_list = imports_list->next;
10967 #endif /* AOF_ASSEMBLER */
10969 #ifdef OBJECT_FORMAT_ELF
10970 /* Switch to an arbitrary section NAME with attributes as specified
10971 by FLAGS. ALIGN specifies any known alignment requirements for
10972 the section; 0 if the default should be used.
10974 Differs from the default elf version only in the prefix character
10975 used before the section type. */
10977 static void
10978 arm_elf_asm_named_section (name, flags)
10979 const char *name;
10980 unsigned int flags;
10982 char flagchars[8], *f = flagchars;
10983 const char *type;
10985 if (!(flags & SECTION_DEBUG))
10986 *f++ = 'a';
10987 if (flags & SECTION_WRITE)
10988 *f++ = 'w';
10989 if (flags & SECTION_CODE)
10990 *f++ = 'x';
10991 if (flags & SECTION_SMALL)
10992 *f++ = 's';
10993 if (flags & SECTION_MERGE)
10994 *f++ = 'M';
10995 if (flags & SECTION_STRINGS)
10996 *f++ = 'S';
10997 *f = '\0';
10999 if (flags & SECTION_BSS)
11000 type = "nobits";
11001 else
11002 type = "progbits";
11004 if (flags & SECTION_ENTSIZE)
11005 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
11006 name, flagchars, type, flags & SECTION_ENTSIZE);
11007 else
11008 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
11009 name, flagchars, type);
11011 #endif