PR target/5828
[official-gcc.git] / gcc / config / arm / arm.c
blob921411a983afe59fbc2bb0605951cf79ecd80697
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "obstack.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "toplev.h"
43 #include "recog.h"
44 #include "ggc.h"
45 #include "except.h"
46 #include "c-pragma.h"
47 #include "integrate.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode;
54 typedef struct minipool_fixup Mfix;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots PARAMS ((void));
67 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
68 static Ulong bit_count PARAMS ((signed int));
69 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70 static int eliminate_lr2ip PARAMS ((rtx *));
71 static rtx emit_multi_reg_push PARAMS ((int));
72 static rtx emit_sfm PARAMS ((int, int));
73 #ifndef AOF_ASSEMBLER
74 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
75 #endif
76 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
77 static arm_cc get_arm_condition_code PARAMS ((rtx));
78 static void init_fpa_table PARAMS ((void));
79 static Hint int_log2 PARAMS ((Hint));
80 static rtx is_jump_table PARAMS ((rtx));
81 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
83 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
84 static Ccstar shift_op PARAMS ((rtx, Hint *));
85 static void arm_init_machine_status PARAMS ((struct function *));
86 static void arm_mark_machine_status PARAMS ((struct function *));
87 static void arm_free_machine_status PARAMS ((struct function *));
88 static int number_of_first_bit_set PARAMS ((int));
89 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
90 static void thumb_exit PARAMS ((FILE *, int, rtx));
91 static void thumb_pushpop PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code PARAMS ((rtx, int));
93 static rtx is_jump_table PARAMS ((rtx));
94 static Hint get_jump_table_size PARAMS ((rtx));
95 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
96 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
97 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
98 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
99 static void assign_minipool_offsets PARAMS ((Mfix *));
100 static void arm_print_value PARAMS ((FILE *, rtx));
101 static void dump_minipool PARAMS ((rtx));
102 static int arm_barrier_cost PARAMS ((rtx));
103 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
104 static void push_minipool_barrier PARAMS ((rtx, Hint));
105 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
106 static void note_invalid_constants PARAMS ((rtx, Hint));
107 static int current_file_function_operand PARAMS ((rtx));
108 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask PARAMS ((void));
110 static Ulong arm_isr_value PARAMS ((tree));
111 static Ulong arm_compute_func_type PARAMS ((void));
112 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
113 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
114 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
115 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
116 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
117 static int arm_comp_type_attributes PARAMS ((tree, tree));
118 static void arm_set_default_type_attributes PARAMS ((tree));
119 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
122 #endif
124 #undef Hint
125 #undef Mmode
126 #undef Ulong
127 #undef Ccstar
129 /* Initialize the GCC target structure. */
130 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
131 #undef TARGET_MERGE_DECL_ATTRIBUTES
132 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
133 #endif
135 #undef TARGET_ATTRIBUTE_TABLE
136 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
138 #ifdef AOF_ASSEMBLER
139 #undef TARGET_ASM_BYTE_OP
140 #define TARGET_ASM_BYTE_OP "\tDCB\t"
141 #undef TARGET_ASM_ALIGNED_HI_OP
142 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
143 #undef TARGET_ASM_ALIGNED_SI_OP
144 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
145 #else
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP NULL
148 #undef TARGET_ASM_INTEGER
149 #define TARGET_ASM_INTEGER arm_assemble_integer
150 #endif
152 #undef TARGET_ASM_FUNCTION_PROLOGUE
153 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155 #undef TARGET_ASM_FUNCTION_EPILOGUE
156 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158 #undef TARGET_COMP_TYPE_ATTRIBUTES
159 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
162 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164 #undef TARGET_INIT_BUILTINS
165 #define TARGET_INIT_BUILTINS arm_init_builtins
167 #undef TARGET_EXPAND_BUILTIN
168 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
170 #undef TARGET_SCHED_ADJUST_COST
171 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173 struct gcc_target targetm = TARGET_INITIALIZER;
175 /* Obstack for minipool constant handling. */
176 static struct obstack minipool_obstack;
177 static char * minipool_startobj;
179 #define obstack_chunk_alloc xmalloc
180 #define obstack_chunk_free free
182 /* The maximum number of insns skipped which
183 will be conditionalised if possible. */
184 static int max_insns_skipped = 5;
186 extern FILE * asm_out_file;
188 /* True if we are currently building a constant table. */
189 int making_const_table;
191 /* Define the information needed to generate branch insns. This is
192 stored from the compare operation. */
193 rtx arm_compare_op0, arm_compare_op1;
195 /* What type of floating point are we tuning for? */
196 enum floating_point_type arm_fpu;
198 /* What type of floating point instructions are available? */
199 enum floating_point_type arm_fpu_arch;
201 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
202 enum prog_mode_type arm_prgmode;
204 /* Set by the -mfp=... option. */
205 const char * target_fp_name = NULL;
207 /* Used to parse -mstructure_size_boundary command line option. */
208 const char * structure_size_string = NULL;
209 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
211 /* Bit values used to identify processor capabilities. */
212 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
213 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
214 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
215 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
216 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
217 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
218 #define FL_THUMB (1 << 6) /* Thumb aware */
219 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
220 #define FL_STRONG (1 << 8) /* StrongARM */
221 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
222 #define FL_XSCALE (1 << 10) /* XScale */
224 /* The bits in this mask specify which
225 instructions we are allowed to generate. */
226 static int insn_flags = 0;
228 /* The bits in this mask specify which instruction scheduling options should
229 be used. Note - there is an overlap with the FL_FAST_MULT. For some
230 hardware we want to be able to generate the multiply instructions, but to
231 tune as if they were not present in the architecture. */
232 static int tune_flags = 0;
234 /* The following are used in the arm.md file as equivalents to bits
235 in the above two flag variables. */
237 /* Nonzero if this is an "M" variant of the processor. */
238 int arm_fast_multiply = 0;
240 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
241 int arm_arch4 = 0;
243 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
244 int arm_arch5 = 0;
246 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
247 int arm_arch5e = 0;
249 /* Nonzero if this chip can benefit from load scheduling. */
250 int arm_ld_sched = 0;
252 /* Nonzero if this chip is a StrongARM. */
253 int arm_is_strong = 0;
255 /* Nonzero if this chip is an XScale. */
256 int arm_is_xscale = 0;
258 /* Nonzero if this chip is an ARM6 or an ARM7. */
259 int arm_is_6_or_7 = 0;
261 /* Nonzero if generating Thumb instructions. */
262 int thumb_code = 0;
264 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
265 must report the mode of the memory reference from PRINT_OPERAND to
266 PRINT_OPERAND_ADDRESS. */
267 enum machine_mode output_memory_reference_mode;
269 /* The register number to be used for the PIC offset register. */
270 const char * arm_pic_register_string = NULL;
271 int arm_pic_register = INVALID_REGNUM;
273 /* Set to 1 when a return insn is output, this means that the epilogue
274 is not needed. */
275 int return_used_this_function;
277 /* Set to 1 after arm_reorg has started. Reset to start at the start of
278 the next function. */
279 static int after_arm_reorg = 0;
281 /* The maximum number of insns to be used when loading a constant. */
282 static int arm_constant_limit = 3;
284 /* For an explanation of these variables, see final_prescan_insn below. */
285 int arm_ccfsm_state;
286 enum arm_cond_code arm_current_cc;
287 rtx arm_target_insn;
288 int arm_target_label;
290 /* The condition codes of the ARM, and the inverse function. */
291 static const char * const arm_condition_codes[] =
293 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
294 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
297 #define streq(string1, string2) (strcmp (string1, string2) == 0)
299 /* Initialization code. */
301 struct processors
303 const char *const name;
304 const unsigned int flags;
307 /* Not all of these give usefully different compilation alternatives,
308 but there is no simple way of generalizing them. */
309 static const struct processors all_cores[] =
311 /* ARM Cores */
313 {"arm2", FL_CO_PROC | FL_MODE26 },
314 {"arm250", FL_CO_PROC | FL_MODE26 },
315 {"arm3", FL_CO_PROC | FL_MODE26 },
316 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
317 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
318 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
319 {"arm610", FL_MODE26 | FL_MODE32 },
320 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
321 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
322 /* arm7m doesn't exist on its own, but only with D, (and I), but
323 those don't alter the code, so arm7m is sometimes used. */
324 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
325 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
326 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
327 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
328 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
329 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
330 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
331 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
332 {"arm710", FL_MODE26 | FL_MODE32 },
333 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
334 {"arm720", FL_MODE26 | FL_MODE32 },
335 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
336 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
337 {"arm710c", FL_MODE26 | FL_MODE32 },
338 {"arm7100", FL_MODE26 | FL_MODE32 },
339 {"arm7500", FL_MODE26 | FL_MODE32 },
340 /* Doesn't have an external co-proc, but does have embedded fpu. */
341 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
342 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
343 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
344 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
345 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
346 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
347 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
348 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
349 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
350 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
351 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
352 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
353 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
354 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
355 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
356 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
357 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
359 {NULL, 0}
362 static const struct processors all_architectures[] =
364 /* ARM Architectures */
366 { "armv2", FL_CO_PROC | FL_MODE26 },
367 { "armv2a", FL_CO_PROC | FL_MODE26 },
368 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
369 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
370 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
371 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
372 implementations that support it, so we will leave it out for now. */
373 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
374 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
375 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
376 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
377 { NULL, 0 }
380 /* This is a magic stucture. The 'string' field is magically filled in
381 with a pointer to the value specified by the user on the command line
382 assuming that the user has specified such a value. */
384 struct arm_cpu_select arm_select[] =
386 /* string name processors */
387 { NULL, "-mcpu=", all_cores },
388 { NULL, "-march=", all_architectures },
389 { NULL, "-mtune=", all_cores }
392 /* Return the number of bits set in value' */
393 static unsigned long
394 bit_count (value)
395 signed int value;
397 unsigned long count = 0;
399 while (value)
401 value &= ~(value & -value);
402 ++count;
405 return count;
408 /* Fix up any incompatible options that the user has specified.
409 This has now turned into a maze. */
410 void
411 arm_override_options ()
413 unsigned i;
415 /* Set up the flags based on the cpu/architecture selected by the user. */
416 for (i = ARRAY_SIZE (arm_select); i--;)
418 struct arm_cpu_select * ptr = arm_select + i;
420 if (ptr->string != NULL && ptr->string[0] != '\0')
422 const struct processors * sel;
424 for (sel = ptr->processors; sel->name != NULL; sel++)
425 if (streq (ptr->string, sel->name))
427 if (i == 2)
428 tune_flags = sel->flags;
429 else
431 /* If we have been given an architecture and a processor
432 make sure that they are compatible. We only generate
433 a warning though, and we prefer the CPU over the
434 architecture. */
435 if (insn_flags != 0 && (insn_flags ^ sel->flags))
436 warning ("switch -mcpu=%s conflicts with -march= switch",
437 ptr->string);
439 insn_flags = sel->flags;
442 break;
445 if (sel->name == NULL)
446 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
450 /* If the user did not specify a processor, choose one for them. */
451 if (insn_flags == 0)
453 const struct processors * sel;
454 unsigned int sought;
455 static const struct cpu_default
457 const int cpu;
458 const char *const name;
460 cpu_defaults[] =
462 { TARGET_CPU_arm2, "arm2" },
463 { TARGET_CPU_arm6, "arm6" },
464 { TARGET_CPU_arm610, "arm610" },
465 { TARGET_CPU_arm710, "arm710" },
466 { TARGET_CPU_arm7m, "arm7m" },
467 { TARGET_CPU_arm7500fe, "arm7500fe" },
468 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
469 { TARGET_CPU_arm8, "arm8" },
470 { TARGET_CPU_arm810, "arm810" },
471 { TARGET_CPU_arm9, "arm9" },
472 { TARGET_CPU_strongarm, "strongarm" },
473 { TARGET_CPU_xscale, "xscale" },
474 { TARGET_CPU_generic, "arm" },
475 { 0, 0 }
477 const struct cpu_default * def;
479 /* Find the default. */
480 for (def = cpu_defaults; def->name; def++)
481 if (def->cpu == TARGET_CPU_DEFAULT)
482 break;
484 /* Make sure we found the default CPU. */
485 if (def->name == NULL)
486 abort ();
488 /* Find the default CPU's flags. */
489 for (sel = all_cores; sel->name != NULL; sel++)
490 if (streq (def->name, sel->name))
491 break;
493 if (sel->name == NULL)
494 abort ();
496 insn_flags = sel->flags;
498 /* Now check to see if the user has specified some command line
499 switch that require certain abilities from the cpu. */
500 sought = 0;
502 if (TARGET_INTERWORK || TARGET_THUMB)
504 sought |= (FL_THUMB | FL_MODE32);
506 /* Force apcs-32 to be used for interworking. */
507 target_flags |= ARM_FLAG_APCS_32;
509 /* There are no ARM processors that support both APCS-26 and
510 interworking. Therefore we force FL_MODE26 to be removed
511 from insn_flags here (if it was set), so that the search
512 below will always be able to find a compatible processor. */
513 insn_flags &= ~FL_MODE26;
515 else if (!TARGET_APCS_32)
516 sought |= FL_MODE26;
518 if (sought != 0 && ((sought & insn_flags) != sought))
520 /* Try to locate a CPU type that supports all of the abilities
521 of the default CPU, plus the extra abilities requested by
522 the user. */
523 for (sel = all_cores; sel->name != NULL; sel++)
524 if ((sel->flags & sought) == (sought | insn_flags))
525 break;
527 if (sel->name == NULL)
529 unsigned int current_bit_count = 0;
530 const struct processors * best_fit = NULL;
532 /* Ideally we would like to issue an error message here
533 saying that it was not possible to find a CPU compatible
534 with the default CPU, but which also supports the command
535 line options specified by the programmer, and so they
536 ought to use the -mcpu=<name> command line option to
537 override the default CPU type.
539 Unfortunately this does not work with multilibing. We
540 need to be able to support multilibs for -mapcs-26 and for
541 -mthumb-interwork and there is no CPU that can support both
542 options. Instead if we cannot find a cpu that has both the
543 characteristics of the default cpu and the given command line
544 options we scan the array again looking for a best match. */
545 for (sel = all_cores; sel->name != NULL; sel++)
546 if ((sel->flags & sought) == sought)
548 unsigned int count;
550 count = bit_count (sel->flags & insn_flags);
552 if (count >= current_bit_count)
554 best_fit = sel;
555 current_bit_count = count;
559 if (best_fit == NULL)
560 abort ();
561 else
562 sel = best_fit;
565 insn_flags = sel->flags;
569 /* If tuning has not been specified, tune for whichever processor or
570 architecture has been selected. */
571 if (tune_flags == 0)
572 tune_flags = insn_flags;
574 /* Make sure that the processor choice does not conflict with any of the
575 other command line choices. */
576 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
578 /* If APCS-32 was not the default then it must have been set by the
579 user, so issue a warning message. If the user has specified
580 "-mapcs-32 -mcpu=arm2" then we loose here. */
581 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
582 warning ("target CPU does not support APCS-32" );
583 target_flags &= ~ARM_FLAG_APCS_32;
585 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
587 warning ("target CPU does not support APCS-26" );
588 target_flags |= ARM_FLAG_APCS_32;
591 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
593 warning ("target CPU does not support interworking" );
594 target_flags &= ~ARM_FLAG_INTERWORK;
597 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
599 warning ("target CPU does not support THUMB instructions");
600 target_flags &= ~ARM_FLAG_THUMB;
603 if (TARGET_APCS_FRAME && TARGET_THUMB)
605 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
606 target_flags &= ~ARM_FLAG_APCS_FRAME;
609 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
610 from here where no function is being compiled currently. */
611 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
612 && TARGET_ARM)
613 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
615 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
616 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
618 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
619 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
621 /* If interworking is enabled then APCS-32 must be selected as well. */
622 if (TARGET_INTERWORK)
624 if (!TARGET_APCS_32)
625 warning ("interworking forces APCS-32 to be used" );
626 target_flags |= ARM_FLAG_APCS_32;
629 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
631 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
632 target_flags |= ARM_FLAG_APCS_FRAME;
635 if (TARGET_POKE_FUNCTION_NAME)
636 target_flags |= ARM_FLAG_APCS_FRAME;
638 if (TARGET_APCS_REENT && flag_pic)
639 error ("-fpic and -mapcs-reent are incompatible");
641 if (TARGET_APCS_REENT)
642 warning ("APCS reentrant code not supported. Ignored");
644 /* If this target is normally configured to use APCS frames, warn if they
645 are turned off and debugging is turned on. */
646 if (TARGET_ARM
647 && write_symbols != NO_DEBUG
648 && !TARGET_APCS_FRAME
649 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
650 warning ("-g with -mno-apcs-frame may not give sensible debugging");
652 /* If stack checking is disabled, we can use r10 as the PIC register,
653 which keeps r9 available. */
654 if (flag_pic)
655 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
657 if (TARGET_APCS_FLOAT)
658 warning ("passing floating point arguments in fp regs not yet supported");
660 /* Initialise boolean versions of the flags, for use in the arm.md file. */
661 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
662 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
663 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
664 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
665 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
667 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
668 arm_is_strong = (tune_flags & FL_STRONG) != 0;
669 thumb_code = (TARGET_ARM == 0);
670 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
671 && !(tune_flags & FL_ARCH4))) != 0;
673 /* Default value for floating point code... if no co-processor
674 bus, then schedule for emulated floating point. Otherwise,
675 assume the user has an FPA.
676 Note: this does not prevent use of floating point instructions,
677 -msoft-float does that. */
678 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
680 if (target_fp_name)
682 if (streq (target_fp_name, "2"))
683 arm_fpu_arch = FP_SOFT2;
684 else if (streq (target_fp_name, "3"))
685 arm_fpu_arch = FP_SOFT3;
686 else
687 error ("invalid floating point emulation option: -mfpe-%s",
688 target_fp_name);
690 else
691 arm_fpu_arch = FP_DEFAULT;
693 if (TARGET_FPE && arm_fpu != FP_HARD)
694 arm_fpu = FP_SOFT2;
696 /* For arm2/3 there is no need to do any scheduling if there is only
697 a floating point emulator, or we are doing software floating-point. */
698 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
699 && (tune_flags & FL_MODE32) == 0)
700 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
702 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
704 if (structure_size_string != NULL)
706 int size = strtol (structure_size_string, NULL, 0);
708 if (size == 8 || size == 32)
709 arm_structure_size_boundary = size;
710 else
711 warning ("structure size boundary can only be set to 8 or 32");
714 if (arm_pic_register_string != NULL)
716 int pic_register = decode_reg_name (arm_pic_register_string);
718 if (!flag_pic)
719 warning ("-mpic-register= is useless without -fpic");
721 /* Prevent the user from choosing an obviously stupid PIC register. */
722 else if (pic_register < 0 || call_used_regs[pic_register]
723 || pic_register == HARD_FRAME_POINTER_REGNUM
724 || pic_register == STACK_POINTER_REGNUM
725 || pic_register >= PC_REGNUM)
726 error ("unable to use '%s' for PIC register", arm_pic_register_string);
727 else
728 arm_pic_register = pic_register;
731 if (TARGET_THUMB && flag_schedule_insns)
733 /* Don't warn since it's on by default in -O2. */
734 flag_schedule_insns = 0;
737 /* If optimizing for space, don't synthesize constants.
738 For processors with load scheduling, it never costs more than 2 cycles
739 to load a constant, and the load scheduler may well reduce that to 1. */
740 if (optimize_size || (tune_flags & FL_LDSCHED))
741 arm_constant_limit = 1;
743 if (arm_is_xscale)
744 arm_constant_limit = 2;
746 /* If optimizing for size, bump the number of instructions that we
747 are prepared to conditionally execute (even on a StrongARM).
748 Otherwise for the StrongARM, which has early execution of branches,
749 a sequence that is worth skipping is shorter. */
750 if (optimize_size)
751 max_insns_skipped = 6;
752 else if (arm_is_strong)
753 max_insns_skipped = 3;
755 /* Register global variables with the garbage collector. */
756 arm_add_gc_roots ();
759 static void
760 arm_add_gc_roots ()
762 ggc_add_rtx_root (&arm_compare_op0, 1);
763 ggc_add_rtx_root (&arm_compare_op1, 1);
764 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
766 gcc_obstack_init(&minipool_obstack);
767 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
770 /* A table of known ARM exception types.
771 For use with the interrupt function attribute. */
773 typedef struct
775 const char *const arg;
776 const unsigned long return_value;
778 isr_attribute_arg;
780 static const isr_attribute_arg isr_attribute_args [] =
782 { "IRQ", ARM_FT_ISR },
783 { "irq", ARM_FT_ISR },
784 { "FIQ", ARM_FT_FIQ },
785 { "fiq", ARM_FT_FIQ },
786 { "ABORT", ARM_FT_ISR },
787 { "abort", ARM_FT_ISR },
788 { "ABORT", ARM_FT_ISR },
789 { "abort", ARM_FT_ISR },
790 { "UNDEF", ARM_FT_EXCEPTION },
791 { "undef", ARM_FT_EXCEPTION },
792 { "SWI", ARM_FT_EXCEPTION },
793 { "swi", ARM_FT_EXCEPTION },
794 { NULL, ARM_FT_NORMAL }
797 /* Returns the (interrupt) function type of the current
798 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
800 static unsigned long
801 arm_isr_value (argument)
802 tree argument;
804 const isr_attribute_arg * ptr;
805 const char * arg;
807 /* No argument - default to IRQ. */
808 if (argument == NULL_TREE)
809 return ARM_FT_ISR;
811 /* Get the value of the argument. */
812 if (TREE_VALUE (argument) == NULL_TREE
813 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
814 return ARM_FT_UNKNOWN;
816 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
818 /* Check it against the list of known arguments. */
819 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
820 if (streq (arg, ptr->arg))
821 return ptr->return_value;
823 /* An unrecognised interrupt type. */
824 return ARM_FT_UNKNOWN;
827 /* Computes the type of the current function. */
829 static unsigned long
830 arm_compute_func_type ()
832 unsigned long type = ARM_FT_UNKNOWN;
833 tree a;
834 tree attr;
836 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
837 abort ();
839 /* Decide if the current function is volatile. Such functions
840 never return, and many memory cycles can be saved by not storing
841 register values that will never be needed again. This optimization
842 was added to speed up context switching in a kernel application. */
843 if (optimize > 0
844 && current_function_nothrow
845 && TREE_THIS_VOLATILE (current_function_decl))
846 type |= ARM_FT_VOLATILE;
848 if (current_function_needs_context)
849 type |= ARM_FT_NESTED;
851 attr = DECL_ATTRIBUTES (current_function_decl);
853 a = lookup_attribute ("naked", attr);
854 if (a != NULL_TREE)
855 type |= ARM_FT_NAKED;
857 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
858 type |= ARM_FT_EXCEPTION_HANDLER;
859 else
861 a = lookup_attribute ("isr", attr);
862 if (a == NULL_TREE)
863 a = lookup_attribute ("interrupt", attr);
865 if (a == NULL_TREE)
866 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
867 else
868 type |= arm_isr_value (TREE_VALUE (a));
871 return type;
874 /* Returns the type of the current function. */
876 unsigned long
877 arm_current_func_type ()
879 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
880 cfun->machine->func_type = arm_compute_func_type ();
882 return cfun->machine->func_type;
885 /* Return 1 if it is possible to return using a single instruction. */
888 use_return_insn (iscond)
889 int iscond;
891 int regno;
892 unsigned int func_type;
894 /* Never use a return instruction before reload has run. */
895 if (!reload_completed)
896 return 0;
898 func_type = arm_current_func_type ();
900 /* Naked functions and volatile functions need special
901 consideration. */
902 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
903 return 0;
905 /* As do variadic functions. */
906 if (current_function_pretend_args_size
907 || cfun->machine->uses_anonymous_args
908 /* Of if the function calls __builtin_eh_return () */
909 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
910 /* Or if there is no frame pointer and there is a stack adjustment. */
911 || ((get_frame_size () + current_function_outgoing_args_size != 0)
912 && !frame_pointer_needed))
913 return 0;
915 /* Can't be done if interworking with Thumb, and any registers have been
916 stacked. Similarly, on StrongARM, conditional returns are expensive
917 if they aren't taken and registers have been stacked. */
918 if (iscond && arm_is_strong && frame_pointer_needed)
919 return 0;
921 if ((iscond && arm_is_strong)
922 || TARGET_INTERWORK)
924 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
925 if (regs_ever_live[regno] && !call_used_regs[regno])
926 return 0;
928 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
929 return 0;
932 /* Can't be done if any of the FPU regs are pushed,
933 since this also requires an insn. */
934 if (TARGET_HARD_FLOAT)
935 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
936 if (regs_ever_live[regno] && !call_used_regs[regno])
937 return 0;
939 return 1;
942 /* Return TRUE if int I is a valid immediate ARM constant. */
945 const_ok_for_arm (i)
946 HOST_WIDE_INT i;
948 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
950 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
951 be all zero, or all one. */
952 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
953 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
954 != ((~(unsigned HOST_WIDE_INT) 0)
955 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
956 return FALSE;
958 /* Fast return for 0 and powers of 2 */
959 if ((i & (i - 1)) == 0)
960 return TRUE;
964 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
965 return TRUE;
966 mask =
967 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
968 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
970 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
972 return FALSE;
975 /* Return true if I is a valid constant for the operation CODE. */
976 static int
977 const_ok_for_op (i, code)
978 HOST_WIDE_INT i;
979 enum rtx_code code;
981 if (const_ok_for_arm (i))
982 return 1;
984 switch (code)
986 case PLUS:
987 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
989 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
990 case XOR:
991 case IOR:
992 return 0;
994 case AND:
995 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
997 default:
998 abort ();
1002 /* Emit a sequence of insns to handle a large constant.
1003 CODE is the code of the operation required, it can be any of SET, PLUS,
1004 IOR, AND, XOR, MINUS;
1005 MODE is the mode in which the operation is being performed;
1006 VAL is the integer to operate on;
1007 SOURCE is the other operand (a register, or a null-pointer for SET);
1008 SUBTARGETS means it is safe to create scratch registers if that will
1009 either produce a simpler sequence, or we will want to cse the values.
1010 Return value is the number of insns emitted. */
1013 arm_split_constant (code, mode, val, target, source, subtargets)
1014 enum rtx_code code;
1015 enum machine_mode mode;
1016 HOST_WIDE_INT val;
1017 rtx target;
1018 rtx source;
1019 int subtargets;
1021 if (subtargets || code == SET
1022 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1023 && REGNO (target) != REGNO (source)))
1025 /* After arm_reorg has been called, we can't fix up expensive
1026 constants by pushing them into memory so we must synthesise
1027 them in-line, regardless of the cost. This is only likely to
1028 be more costly on chips that have load delay slots and we are
1029 compiling without running the scheduler (so no splitting
1030 occurred before the final instruction emission).
1032 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1034 if (!after_arm_reorg
1035 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1036 > arm_constant_limit + (code != SET)))
1038 if (code == SET)
1040 /* Currently SET is the only monadic value for CODE, all
1041 the rest are diadic. */
1042 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1043 return 1;
1045 else
1047 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1049 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1050 /* For MINUS, the value is subtracted from, since we never
1051 have subtraction of a constant. */
1052 if (code == MINUS)
1053 emit_insn (gen_rtx_SET (VOIDmode, target,
1054 gen_rtx_MINUS (mode, temp, source)));
1055 else
1056 emit_insn (gen_rtx_SET (VOIDmode, target,
1057 gen_rtx (code, mode, source, temp)));
1058 return 2;
1063 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1066 static int
1067 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1069 HOST_WIDE_INT temp1;
1070 int num_insns = 0;
1073 int end;
1075 if (i <= 0)
1076 i += 32;
1077 if (remainder & (3 << (i - 2)))
1079 end = i - 8;
1080 if (end < 0)
1081 end += 32;
1082 temp1 = remainder & ((0x0ff << end)
1083 | ((i < end) ? (0xff >> (32 - end)) : 0));
1084 remainder &= ~temp1;
1085 num_insns++;
1086 i -= 6;
1088 i -= 2;
1089 } while (remainder);
1090 return num_insns;
1093 /* As above, but extra parameter GENERATE which, if clear, suppresses
1094 RTL generation. */
1096 static int
1097 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1098 enum rtx_code code;
1099 enum machine_mode mode;
1100 HOST_WIDE_INT val;
1101 rtx target;
1102 rtx source;
1103 int subtargets;
1104 int generate;
1106 int can_invert = 0;
1107 int can_negate = 0;
1108 int can_negate_initial = 0;
1109 int can_shift = 0;
1110 int i;
1111 int num_bits_set = 0;
1112 int set_sign_bit_copies = 0;
1113 int clear_sign_bit_copies = 0;
1114 int clear_zero_bit_copies = 0;
1115 int set_zero_bit_copies = 0;
1116 int insns = 0;
1117 unsigned HOST_WIDE_INT temp1, temp2;
1118 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1120 /* Find out which operations are safe for a given CODE. Also do a quick
1121 check for degenerate cases; these can occur when DImode operations
1122 are split. */
1123 switch (code)
1125 case SET:
1126 can_invert = 1;
1127 can_shift = 1;
1128 can_negate = 1;
1129 break;
1131 case PLUS:
1132 can_negate = 1;
1133 can_negate_initial = 1;
1134 break;
1136 case IOR:
1137 if (remainder == 0xffffffff)
1139 if (generate)
1140 emit_insn (gen_rtx_SET (VOIDmode, target,
1141 GEN_INT (ARM_SIGN_EXTEND (val))));
1142 return 1;
1144 if (remainder == 0)
1146 if (reload_completed && rtx_equal_p (target, source))
1147 return 0;
1148 if (generate)
1149 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1150 return 1;
1152 break;
1154 case AND:
1155 if (remainder == 0)
1157 if (generate)
1158 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1159 return 1;
1161 if (remainder == 0xffffffff)
1163 if (reload_completed && rtx_equal_p (target, source))
1164 return 0;
1165 if (generate)
1166 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1167 return 1;
1169 can_invert = 1;
1170 break;
1172 case XOR:
1173 if (remainder == 0)
1175 if (reload_completed && rtx_equal_p (target, source))
1176 return 0;
1177 if (generate)
1178 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1179 return 1;
1181 if (remainder == 0xffffffff)
1183 if (generate)
1184 emit_insn (gen_rtx_SET (VOIDmode, target,
1185 gen_rtx_NOT (mode, source)));
1186 return 1;
1189 /* We don't know how to handle this yet below. */
1190 abort ();
1192 case MINUS:
1193 /* We treat MINUS as (val - source), since (source - val) is always
1194 passed as (source + (-val)). */
1195 if (remainder == 0)
1197 if (generate)
1198 emit_insn (gen_rtx_SET (VOIDmode, target,
1199 gen_rtx_NEG (mode, source)));
1200 return 1;
1202 if (const_ok_for_arm (val))
1204 if (generate)
1205 emit_insn (gen_rtx_SET (VOIDmode, target,
1206 gen_rtx_MINUS (mode, GEN_INT (val),
1207 source)));
1208 return 1;
1210 can_negate = 1;
1212 break;
1214 default:
1215 abort ();
1218 /* If we can do it in one insn get out quickly. */
1219 if (const_ok_for_arm (val)
1220 || (can_negate_initial && const_ok_for_arm (-val))
1221 || (can_invert && const_ok_for_arm (~val)))
1223 if (generate)
1224 emit_insn (gen_rtx_SET (VOIDmode, target,
1225 (source ? gen_rtx (code, mode, source,
1226 GEN_INT (val))
1227 : GEN_INT (val))));
1228 return 1;
1231 /* Calculate a few attributes that may be useful for specific
1232 optimizations. */
1233 for (i = 31; i >= 0; i--)
1235 if ((remainder & (1 << i)) == 0)
1236 clear_sign_bit_copies++;
1237 else
1238 break;
1241 for (i = 31; i >= 0; i--)
1243 if ((remainder & (1 << i)) != 0)
1244 set_sign_bit_copies++;
1245 else
1246 break;
1249 for (i = 0; i <= 31; i++)
1251 if ((remainder & (1 << i)) == 0)
1252 clear_zero_bit_copies++;
1253 else
1254 break;
1257 for (i = 0; i <= 31; i++)
1259 if ((remainder & (1 << i)) != 0)
1260 set_zero_bit_copies++;
1261 else
1262 break;
1265 switch (code)
1267 case SET:
1268 /* See if we can do this by sign_extending a constant that is known
1269 to be negative. This is a good, way of doing it, since the shift
1270 may well merge into a subsequent insn. */
1271 if (set_sign_bit_copies > 1)
1273 if (const_ok_for_arm
1274 (temp1 = ARM_SIGN_EXTEND (remainder
1275 << (set_sign_bit_copies - 1))))
1277 if (generate)
1279 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1280 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1281 GEN_INT (temp1)));
1282 emit_insn (gen_ashrsi3 (target, new_src,
1283 GEN_INT (set_sign_bit_copies - 1)));
1285 return 2;
1287 /* For an inverted constant, we will need to set the low bits,
1288 these will be shifted out of harm's way. */
1289 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1290 if (const_ok_for_arm (~temp1))
1292 if (generate)
1294 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1295 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1296 GEN_INT (temp1)));
1297 emit_insn (gen_ashrsi3 (target, new_src,
1298 GEN_INT (set_sign_bit_copies - 1)));
1300 return 2;
1304 /* See if we can generate this by setting the bottom (or the top)
1305 16 bits, and then shifting these into the other half of the
1306 word. We only look for the simplest cases, to do more would cost
1307 too much. Be careful, however, not to generate this when the
1308 alternative would take fewer insns. */
1309 if (val & 0xffff0000)
1311 temp1 = remainder & 0xffff0000;
1312 temp2 = remainder & 0x0000ffff;
1314 /* Overlaps outside this range are best done using other methods. */
1315 for (i = 9; i < 24; i++)
1317 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1318 && !const_ok_for_arm (temp2))
1320 rtx new_src = (subtargets
1321 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1322 : target);
1323 insns = arm_gen_constant (code, mode, temp2, new_src,
1324 source, subtargets, generate);
1325 source = new_src;
1326 if (generate)
1327 emit_insn (gen_rtx_SET
1328 (VOIDmode, target,
1329 gen_rtx_IOR (mode,
1330 gen_rtx_ASHIFT (mode, source,
1331 GEN_INT (i)),
1332 source)));
1333 return insns + 1;
1337 /* Don't duplicate cases already considered. */
1338 for (i = 17; i < 24; i++)
1340 if (((temp1 | (temp1 >> i)) == remainder)
1341 && !const_ok_for_arm (temp1))
1343 rtx new_src = (subtargets
1344 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1345 : target);
1346 insns = arm_gen_constant (code, mode, temp1, new_src,
1347 source, subtargets, generate);
1348 source = new_src;
1349 if (generate)
1350 emit_insn
1351 (gen_rtx_SET (VOIDmode, target,
1352 gen_rtx_IOR
1353 (mode,
1354 gen_rtx_LSHIFTRT (mode, source,
1355 GEN_INT (i)),
1356 source)));
1357 return insns + 1;
1361 break;
1363 case IOR:
1364 case XOR:
1365 /* If we have IOR or XOR, and the constant can be loaded in a
1366 single instruction, and we can find a temporary to put it in,
1367 then this can be done in two instructions instead of 3-4. */
1368 if (subtargets
1369 /* TARGET can't be NULL if SUBTARGETS is 0 */
1370 || (reload_completed && !reg_mentioned_p (target, source)))
1372 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1374 if (generate)
1376 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1378 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1379 emit_insn (gen_rtx_SET (VOIDmode, target,
1380 gen_rtx (code, mode, source, sub)));
1382 return 2;
1386 if (code == XOR)
1387 break;
1389 if (set_sign_bit_copies > 8
1390 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1392 if (generate)
1394 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1395 rtx shift = GEN_INT (set_sign_bit_copies);
1397 emit_insn (gen_rtx_SET (VOIDmode, sub,
1398 gen_rtx_NOT (mode,
1399 gen_rtx_ASHIFT (mode,
1400 source,
1401 shift))));
1402 emit_insn (gen_rtx_SET (VOIDmode, target,
1403 gen_rtx_NOT (mode,
1404 gen_rtx_LSHIFTRT (mode, sub,
1405 shift))));
1407 return 2;
1410 if (set_zero_bit_copies > 8
1411 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1413 if (generate)
1415 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1416 rtx shift = GEN_INT (set_zero_bit_copies);
1418 emit_insn (gen_rtx_SET (VOIDmode, sub,
1419 gen_rtx_NOT (mode,
1420 gen_rtx_LSHIFTRT (mode,
1421 source,
1422 shift))));
1423 emit_insn (gen_rtx_SET (VOIDmode, target,
1424 gen_rtx_NOT (mode,
1425 gen_rtx_ASHIFT (mode, sub,
1426 shift))));
1428 return 2;
1431 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1433 if (generate)
1435 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1436 emit_insn (gen_rtx_SET (VOIDmode, sub,
1437 gen_rtx_NOT (mode, source)));
1438 source = sub;
1439 if (subtargets)
1440 sub = gen_reg_rtx (mode);
1441 emit_insn (gen_rtx_SET (VOIDmode, sub,
1442 gen_rtx_AND (mode, source,
1443 GEN_INT (temp1))));
1444 emit_insn (gen_rtx_SET (VOIDmode, target,
1445 gen_rtx_NOT (mode, sub)));
1447 return 3;
1449 break;
1451 case AND:
1452 /* See if two shifts will do 2 or more insn's worth of work. */
1453 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1455 HOST_WIDE_INT shift_mask = ((0xffffffff
1456 << (32 - clear_sign_bit_copies))
1457 & 0xffffffff);
1459 if ((remainder | shift_mask) != 0xffffffff)
1461 if (generate)
1463 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1464 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1465 new_src, source, subtargets, 1);
1466 source = new_src;
1468 else
1470 rtx targ = subtargets ? NULL_RTX : target;
1471 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1472 targ, source, subtargets, 0);
1476 if (generate)
1478 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1479 rtx shift = GEN_INT (clear_sign_bit_copies);
1481 emit_insn (gen_ashlsi3 (new_src, source, shift));
1482 emit_insn (gen_lshrsi3 (target, new_src, shift));
1485 return insns + 2;
1488 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1490 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1492 if ((remainder | shift_mask) != 0xffffffff)
1494 if (generate)
1496 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1498 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1499 new_src, source, subtargets, 1);
1500 source = new_src;
1502 else
1504 rtx targ = subtargets ? NULL_RTX : target;
1506 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1507 targ, source, subtargets, 0);
1511 if (generate)
1513 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1514 rtx shift = GEN_INT (clear_zero_bit_copies);
1516 emit_insn (gen_lshrsi3 (new_src, source, shift));
1517 emit_insn (gen_ashlsi3 (target, new_src, shift));
1520 return insns + 2;
1523 break;
1525 default:
1526 break;
1529 for (i = 0; i < 32; i++)
1530 if (remainder & (1 << i))
1531 num_bits_set++;
1533 if (code == AND || (can_invert && num_bits_set > 16))
1534 remainder = (~remainder) & 0xffffffff;
1535 else if (code == PLUS && num_bits_set > 16)
1536 remainder = (-remainder) & 0xffffffff;
1537 else
1539 can_invert = 0;
1540 can_negate = 0;
1543 /* Now try and find a way of doing the job in either two or three
1544 instructions.
1545 We start by looking for the largest block of zeros that are aligned on
1546 a 2-bit boundary, we then fill up the temps, wrapping around to the
1547 top of the word when we drop off the bottom.
1548 In the worst case this code should produce no more than four insns. */
1550 int best_start = 0;
1551 int best_consecutive_zeros = 0;
1553 for (i = 0; i < 32; i += 2)
1555 int consecutive_zeros = 0;
1557 if (!(remainder & (3 << i)))
1559 while ((i < 32) && !(remainder & (3 << i)))
1561 consecutive_zeros += 2;
1562 i += 2;
1564 if (consecutive_zeros > best_consecutive_zeros)
1566 best_consecutive_zeros = consecutive_zeros;
1567 best_start = i - consecutive_zeros;
1569 i -= 2;
1573 /* So long as it won't require any more insns to do so, it's
1574 desirable to emit a small constant (in bits 0...9) in the last
1575 insn. This way there is more chance that it can be combined with
1576 a later addressing insn to form a pre-indexed load or store
1577 operation. Consider:
1579 *((volatile int *)0xe0000100) = 1;
1580 *((volatile int *)0xe0000110) = 2;
1582 We want this to wind up as:
1584 mov rA, #0xe0000000
1585 mov rB, #1
1586 str rB, [rA, #0x100]
1587 mov rB, #2
1588 str rB, [rA, #0x110]
1590 rather than having to synthesize both large constants from scratch.
1592 Therefore, we calculate how many insns would be required to emit
1593 the constant starting from `best_start', and also starting from
1594 zero (ie with bit 31 first to be output). If `best_start' doesn't
1595 yield a shorter sequence, we may as well use zero. */
1596 if (best_start != 0
1597 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1598 && (count_insns_for_constant (remainder, 0) <=
1599 count_insns_for_constant (remainder, best_start)))
1600 best_start = 0;
1602 /* Now start emitting the insns. */
1603 i = best_start;
1606 int end;
1608 if (i <= 0)
1609 i += 32;
1610 if (remainder & (3 << (i - 2)))
1612 end = i - 8;
1613 if (end < 0)
1614 end += 32;
1615 temp1 = remainder & ((0x0ff << end)
1616 | ((i < end) ? (0xff >> (32 - end)) : 0));
1617 remainder &= ~temp1;
1619 if (generate)
1621 rtx new_src, temp1_rtx;
1623 if (code == SET || code == MINUS)
1625 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1626 if (can_invert && code != MINUS)
1627 temp1 = ~temp1;
1629 else
1631 if (remainder && subtargets)
1632 new_src = gen_reg_rtx (mode);
1633 else
1634 new_src = target;
1635 if (can_invert)
1636 temp1 = ~temp1;
1637 else if (can_negate)
1638 temp1 = -temp1;
1641 temp1 = trunc_int_for_mode (temp1, mode);
1642 temp1_rtx = GEN_INT (temp1);
1644 if (code == SET)
1646 else if (code == MINUS)
1647 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1648 else
1649 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1651 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1652 source = new_src;
1655 if (code == SET)
1657 can_invert = 0;
1658 code = PLUS;
1660 else if (code == MINUS)
1661 code = PLUS;
1663 insns++;
1664 i -= 6;
1666 i -= 2;
1668 while (remainder);
1671 return insns;
1674 /* Canonicalize a comparison so that we are more likely to recognize it.
1675 This can be done for a few constant compares, where we can make the
1676 immediate value easier to load. */
1678 enum rtx_code
1679 arm_canonicalize_comparison (code, op1)
1680 enum rtx_code code;
1681 rtx * op1;
1683 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1685 switch (code)
1687 case EQ:
1688 case NE:
1689 return code;
1691 case GT:
1692 case LE:
1693 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1694 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1696 *op1 = GEN_INT (i + 1);
1697 return code == GT ? GE : LT;
1699 break;
1701 case GE:
1702 case LT:
1703 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1704 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1706 *op1 = GEN_INT (i - 1);
1707 return code == GE ? GT : LE;
1709 break;
1711 case GTU:
1712 case LEU:
1713 if (i != ~((unsigned HOST_WIDE_INT) 0)
1714 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1716 *op1 = GEN_INT (i + 1);
1717 return code == GTU ? GEU : LTU;
1719 break;
1721 case GEU:
1722 case LTU:
1723 if (i != 0
1724 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1726 *op1 = GEN_INT (i - 1);
1727 return code == GEU ? GTU : LEU;
1729 break;
1731 default:
1732 abort ();
1735 return code;
1738 /* Decide whether a type should be returned in memory (true)
1739 or in a register (false). This is called by the macro
1740 RETURN_IN_MEMORY. */
1743 arm_return_in_memory (type)
1744 tree type;
1746 if (!AGGREGATE_TYPE_P (type))
1747 /* All simple types are returned in registers. */
1748 return 0;
1750 /* For the arm-wince targets we choose to be compitable with Microsoft's
1751 ARM and Thumb compilers, which always return aggregates in memory. */
1752 #ifndef ARM_WINCE
1753 /* All structures/unions bigger than one word are returned in memory.
1754 Also catch the case where int_size_in_bytes returns -1. In this case
1755 the aggregate is either huge or of varaible size, and in either case
1756 we will want to return it via memory and not in a register. */
1757 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
1758 return 1;
1760 if (TREE_CODE (type) == RECORD_TYPE)
1762 tree field;
1764 /* For a struct the APCS says that we only return in a register
1765 if the type is 'integer like' and every addressable element
1766 has an offset of zero. For practical purposes this means
1767 that the structure can have at most one non bit-field element
1768 and that this element must be the first one in the structure. */
1770 /* Find the first field, ignoring non FIELD_DECL things which will
1771 have been created by C++. */
1772 for (field = TYPE_FIELDS (type);
1773 field && TREE_CODE (field) != FIELD_DECL;
1774 field = TREE_CHAIN (field))
1775 continue;
1777 if (field == NULL)
1778 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1780 /* Check that the first field is valid for returning in a register. */
1782 /* ... Floats are not allowed */
1783 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1784 return 1;
1786 /* ... Aggregates that are not themselves valid for returning in
1787 a register are not allowed. */
1788 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1789 return 1;
1791 /* Now check the remaining fields, if any. Only bitfields are allowed,
1792 since they are not addressable. */
1793 for (field = TREE_CHAIN (field);
1794 field;
1795 field = TREE_CHAIN (field))
1797 if (TREE_CODE (field) != FIELD_DECL)
1798 continue;
1800 if (!DECL_BIT_FIELD_TYPE (field))
1801 return 1;
1804 return 0;
1807 if (TREE_CODE (type) == UNION_TYPE)
1809 tree field;
1811 /* Unions can be returned in registers if every element is
1812 integral, or can be returned in an integer register. */
1813 for (field = TYPE_FIELDS (type);
1814 field;
1815 field = TREE_CHAIN (field))
1817 if (TREE_CODE (field) != FIELD_DECL)
1818 continue;
1820 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1821 return 1;
1823 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1824 return 1;
1827 return 0;
1829 #endif /* not ARM_WINCE */
1831 /* Return all other types in memory. */
1832 return 1;
1835 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1836 for a call to a function whose data type is FNTYPE.
1837 For a library call, FNTYPE is NULL. */
1838 void
1839 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1840 CUMULATIVE_ARGS * pcum;
1841 tree fntype;
1842 rtx libname ATTRIBUTE_UNUSED;
1843 int indirect ATTRIBUTE_UNUSED;
1845 /* On the ARM, the offset starts at 0. */
1846 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1848 pcum->call_cookie = CALL_NORMAL;
1850 if (TARGET_LONG_CALLS)
1851 pcum->call_cookie = CALL_LONG;
1853 /* Check for long call/short call attributes. The attributes
1854 override any command line option. */
1855 if (fntype)
1857 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1858 pcum->call_cookie = CALL_SHORT;
1859 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1860 pcum->call_cookie = CALL_LONG;
1864 /* Determine where to put an argument to a function.
1865 Value is zero to push the argument on the stack,
1866 or a hard register in which to store the argument.
1868 MODE is the argument's machine mode.
1869 TYPE is the data type of the argument (as a tree).
1870 This is null for libcalls where that information may
1871 not be available.
1872 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1873 the preceding args and about the function being called.
1874 NAMED is nonzero if this argument is a named parameter
1875 (otherwise it is an extra parameter matching an ellipsis). */
1878 arm_function_arg (pcum, mode, type, named)
1879 CUMULATIVE_ARGS * pcum;
1880 enum machine_mode mode;
1881 tree type ATTRIBUTE_UNUSED;
1882 int named;
1884 if (mode == VOIDmode)
1885 /* Compute operand 2 of the call insn. */
1886 return GEN_INT (pcum->call_cookie);
1888 if (!named || pcum->nregs >= NUM_ARG_REGS)
1889 return NULL_RTX;
1891 return gen_rtx_REG (mode, pcum->nregs);
1894 /* Encode the current state of the #pragma [no_]long_calls. */
1895 typedef enum
1897 OFF, /* No #pramgma [no_]long_calls is in effect. */
1898 LONG, /* #pragma long_calls is in effect. */
1899 SHORT /* #pragma no_long_calls is in effect. */
1900 } arm_pragma_enum;
1902 static arm_pragma_enum arm_pragma_long_calls = OFF;
1904 void
1905 arm_pr_long_calls (pfile)
1906 cpp_reader * pfile ATTRIBUTE_UNUSED;
1908 arm_pragma_long_calls = LONG;
1911 void
1912 arm_pr_no_long_calls (pfile)
1913 cpp_reader * pfile ATTRIBUTE_UNUSED;
1915 arm_pragma_long_calls = SHORT;
1918 void
1919 arm_pr_long_calls_off (pfile)
1920 cpp_reader * pfile ATTRIBUTE_UNUSED;
1922 arm_pragma_long_calls = OFF;
1925 /* Table of machine attributes. */
1926 const struct attribute_spec arm_attribute_table[] =
1928 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1929 /* Function calls made to this symbol must be done indirectly, because
1930 it may lie outside of the 26 bit addressing range of a normal function
1931 call. */
1932 { "long_call", 0, 0, false, true, true, NULL },
1933 /* Whereas these functions are always known to reside within the 26 bit
1934 addressing range. */
1935 { "short_call", 0, 0, false, true, true, NULL },
1936 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1937 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1938 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1939 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1940 #ifdef ARM_PE
1941 /* ARM/PE has three new attributes:
1942 interfacearm - ?
1943 dllexport - for exporting a function/variable that will live in a dll
1944 dllimport - for importing a function/variable from a dll
1946 Microsoft allows multiple declspecs in one __declspec, separating
1947 them with spaces. We do NOT support this. Instead, use __declspec
1948 multiple times.
1950 { "dllimport", 0, 0, true, false, false, NULL },
1951 { "dllexport", 0, 0, true, false, false, NULL },
1952 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1953 #endif
1954 { NULL, 0, 0, false, false, false, NULL }
1957 /* Handle an attribute requiring a FUNCTION_DECL;
1958 arguments as in struct attribute_spec.handler. */
1960 static tree
1961 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1962 tree * node;
1963 tree name;
1964 tree args ATTRIBUTE_UNUSED;
1965 int flags ATTRIBUTE_UNUSED;
1966 bool * no_add_attrs;
1968 if (TREE_CODE (*node) != FUNCTION_DECL)
1970 warning ("`%s' attribute only applies to functions",
1971 IDENTIFIER_POINTER (name));
1972 *no_add_attrs = true;
1975 return NULL_TREE;
1978 /* Handle an "interrupt" or "isr" attribute;
1979 arguments as in struct attribute_spec.handler. */
1981 static tree
1982 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1983 tree * node;
1984 tree name;
1985 tree args;
1986 int flags;
1987 bool * no_add_attrs;
1989 if (DECL_P (*node))
1991 if (TREE_CODE (*node) != FUNCTION_DECL)
1993 warning ("`%s' attribute only applies to functions",
1994 IDENTIFIER_POINTER (name));
1995 *no_add_attrs = true;
1997 /* FIXME: the argument if any is checked for type attributes;
1998 should it be checked for decl ones? */
2000 else
2002 if (TREE_CODE (*node) == FUNCTION_TYPE
2003 || TREE_CODE (*node) == METHOD_TYPE)
2005 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2007 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2008 *no_add_attrs = true;
2011 else if (TREE_CODE (*node) == POINTER_TYPE
2012 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2013 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2014 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2016 *node = build_type_copy (*node);
2017 TREE_TYPE (*node) = build_type_attribute_variant
2018 (TREE_TYPE (*node),
2019 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2020 *no_add_attrs = true;
2022 else
2024 /* Possibly pass this attribute on from the type to a decl. */
2025 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2026 | (int) ATTR_FLAG_FUNCTION_NEXT
2027 | (int) ATTR_FLAG_ARRAY_NEXT))
2029 *no_add_attrs = true;
2030 return tree_cons (name, args, NULL_TREE);
2032 else
2034 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2039 return NULL_TREE;
2042 /* Return 0 if the attributes for two types are incompatible, 1 if they
2043 are compatible, and 2 if they are nearly compatible (which causes a
2044 warning to be generated). */
2046 static int
2047 arm_comp_type_attributes (type1, type2)
2048 tree type1;
2049 tree type2;
2051 int l1, l2, s1, s2;
2053 /* Check for mismatch of non-default calling convention. */
2054 if (TREE_CODE (type1) != FUNCTION_TYPE)
2055 return 1;
2057 /* Check for mismatched call attributes. */
2058 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2059 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2060 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2061 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2063 /* Only bother to check if an attribute is defined. */
2064 if (l1 | l2 | s1 | s2)
2066 /* If one type has an attribute, the other must have the same attribute. */
2067 if ((l1 != l2) || (s1 != s2))
2068 return 0;
2070 /* Disallow mixed attributes. */
2071 if ((l1 & s2) || (l2 & s1))
2072 return 0;
2075 /* Check for mismatched ISR attribute. */
2076 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2077 if (! l1)
2078 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2079 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2080 if (! l2)
2081 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2082 if (l1 != l2)
2083 return 0;
2085 return 1;
2088 /* Encode long_call or short_call attribute by prefixing
2089 symbol name in DECL with a special character FLAG. */
2091 void
2092 arm_encode_call_attribute (decl, flag)
2093 tree decl;
2094 int flag;
2096 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2097 int len = strlen (str);
2098 char * newstr;
2100 /* Do not allow weak functions to be treated as short call. */
2101 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2102 return;
2104 newstr = alloca (len + 2);
2105 newstr[0] = flag;
2106 strcpy (newstr + 1, str);
2108 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2109 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2112 /* Assigns default attributes to newly defined type. This is used to
2113 set short_call/long_call attributes for function types of
2114 functions defined inside corresponding #pragma scopes. */
2116 static void
2117 arm_set_default_type_attributes (type)
2118 tree type;
2120 /* Add __attribute__ ((long_call)) to all functions, when
2121 inside #pragma long_calls or __attribute__ ((short_call)),
2122 when inside #pragma no_long_calls. */
2123 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2125 tree type_attr_list, attr_name;
2126 type_attr_list = TYPE_ATTRIBUTES (type);
2128 if (arm_pragma_long_calls == LONG)
2129 attr_name = get_identifier ("long_call");
2130 else if (arm_pragma_long_calls == SHORT)
2131 attr_name = get_identifier ("short_call");
2132 else
2133 return;
2135 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2136 TYPE_ATTRIBUTES (type) = type_attr_list;
2140 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2141 defined within the current compilation unit. If this caanot be
2142 determined, then 0 is returned. */
2144 static int
2145 current_file_function_operand (sym_ref)
2146 rtx sym_ref;
2148 /* This is a bit of a fib. A function will have a short call flag
2149 applied to its name if it has the short call attribute, or it has
2150 already been defined within the current compilation unit. */
2151 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2152 return 1;
2154 /* The current function is always defined within the current compilation
2155 unit. if it s a weak definition however, then this may not be the real
2156 definition of the function, and so we have to say no. */
2157 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2158 && !DECL_WEAK (current_function_decl))
2159 return 1;
2161 /* We cannot make the determination - default to returning 0. */
2162 return 0;
2165 /* Return non-zero if a 32 bit "long_call" should be generated for
2166 this call. We generate a long_call if the function:
2168 a. has an __attribute__((long call))
2169 or b. is within the scope of a #pragma long_calls
2170 or c. the -mlong-calls command line switch has been specified
2172 However we do not generate a long call if the function:
2174 d. has an __attribute__ ((short_call))
2175 or e. is inside the scope of a #pragma no_long_calls
2176 or f. has an __attribute__ ((section))
2177 or g. is defined within the current compilation unit.
2179 This function will be called by C fragments contained in the machine
2180 description file. CALL_REF and CALL_COOKIE correspond to the matched
2181 rtl operands. CALL_SYMBOL is used to distinguish between
2182 two different callers of the function. It is set to 1 in the
2183 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2184 and "call_value" patterns. This is because of the difference in the
2185 SYM_REFs passed by these patterns. */
2188 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2189 rtx sym_ref;
2190 int call_cookie;
2191 int call_symbol;
2193 if (!call_symbol)
2195 if (GET_CODE (sym_ref) != MEM)
2196 return 0;
2198 sym_ref = XEXP (sym_ref, 0);
2201 if (GET_CODE (sym_ref) != SYMBOL_REF)
2202 return 0;
2204 if (call_cookie & CALL_SHORT)
2205 return 0;
2207 if (TARGET_LONG_CALLS && flag_function_sections)
2208 return 1;
2210 if (current_file_function_operand (sym_ref))
2211 return 0;
2213 return (call_cookie & CALL_LONG)
2214 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2215 || TARGET_LONG_CALLS;
2218 /* Return non-zero if it is ok to make a tail-call to DECL. */
2221 arm_function_ok_for_sibcall (decl)
2222 tree decl;
2224 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2226 /* Never tailcall something for which we have no decl, or if we
2227 are in Thumb mode. */
2228 if (decl == NULL || TARGET_THUMB)
2229 return 0;
2231 /* Get the calling method. */
2232 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2233 call_type = CALL_SHORT;
2234 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2235 call_type = CALL_LONG;
2237 /* Cannot tail-call to long calls, since these are out of range of
2238 a branch instruction. However, if not compiling PIC, we know
2239 we can reach the symbol if it is in this compilation unit. */
2240 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2241 return 0;
2243 /* If we are interworking and the function is not declared static
2244 then we can't tail-call it unless we know that it exists in this
2245 compilation unit (since it might be a Thumb routine). */
2246 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2247 return 0;
2249 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2250 if (IS_INTERRUPT (arm_current_func_type ()))
2251 return 0;
2253 /* Everything else is ok. */
2254 return 1;
2259 legitimate_pic_operand_p (x)
2260 rtx x;
2262 if (CONSTANT_P (x)
2263 && flag_pic
2264 && (GET_CODE (x) == SYMBOL_REF
2265 || (GET_CODE (x) == CONST
2266 && GET_CODE (XEXP (x, 0)) == PLUS
2267 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2268 return 0;
2270 return 1;
2274 legitimize_pic_address (orig, mode, reg)
2275 rtx orig;
2276 enum machine_mode mode;
2277 rtx reg;
2279 if (GET_CODE (orig) == SYMBOL_REF
2280 || GET_CODE (orig) == LABEL_REF)
2282 #ifndef AOF_ASSEMBLER
2283 rtx pic_ref, address;
2284 #endif
2285 rtx insn;
2286 int subregs = 0;
2288 if (reg == 0)
2290 if (no_new_pseudos)
2291 abort ();
2292 else
2293 reg = gen_reg_rtx (Pmode);
2295 subregs = 1;
2298 #ifdef AOF_ASSEMBLER
2299 /* The AOF assembler can generate relocations for these directly, and
2300 understands that the PIC register has to be added into the offset. */
2301 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2302 #else
2303 if (subregs)
2304 address = gen_reg_rtx (Pmode);
2305 else
2306 address = reg;
2308 if (TARGET_ARM)
2309 emit_insn (gen_pic_load_addr_arm (address, orig));
2310 else
2311 emit_insn (gen_pic_load_addr_thumb (address, orig));
2313 if ((GET_CODE (orig) == LABEL_REF
2314 || (GET_CODE (orig) == SYMBOL_REF &&
2315 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2316 && NEED_GOT_RELOC)
2317 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2318 else
2320 pic_ref = gen_rtx_MEM (Pmode,
2321 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2322 address));
2323 RTX_UNCHANGING_P (pic_ref) = 1;
2326 insn = emit_move_insn (reg, pic_ref);
2327 #endif
2328 current_function_uses_pic_offset_table = 1;
2329 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2330 by loop. */
2331 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2332 REG_NOTES (insn));
2333 return reg;
2335 else if (GET_CODE (orig) == CONST)
2337 rtx base, offset;
2339 if (GET_CODE (XEXP (orig, 0)) == PLUS
2340 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2341 return orig;
2343 if (reg == 0)
2345 if (no_new_pseudos)
2346 abort ();
2347 else
2348 reg = gen_reg_rtx (Pmode);
2351 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2353 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2354 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2355 base == reg ? 0 : reg);
2357 else
2358 abort ();
2360 if (GET_CODE (offset) == CONST_INT)
2362 /* The base register doesn't really matter, we only want to
2363 test the index for the appropriate mode. */
2364 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
2366 if (!no_new_pseudos)
2367 offset = force_reg (Pmode, offset);
2368 else
2369 abort ();
2371 win:
2372 if (GET_CODE (offset) == CONST_INT)
2373 return plus_constant (base, INTVAL (offset));
2376 if (GET_MODE_SIZE (mode) > 4
2377 && (GET_MODE_CLASS (mode) == MODE_INT
2378 || TARGET_SOFT_FLOAT))
2380 emit_insn (gen_addsi3 (reg, base, offset));
2381 return reg;
2384 return gen_rtx_PLUS (Pmode, base, offset);
2387 return orig;
2390 /* Generate code to load the PIC register. PROLOGUE is true if
2391 called from arm_expand_prologue (in which case we want the
2392 generated insns at the start of the function); false if called
2393 by an exception receiver that needs the PIC register reloaded
2394 (in which case the insns are just dumped at the current location). */
2396 void
2397 arm_finalize_pic (prologue)
2398 int prologue ATTRIBUTE_UNUSED;
2400 #ifndef AOF_ASSEMBLER
2401 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2402 rtx global_offset_table;
2404 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2405 return;
2407 if (!flag_pic)
2408 abort ();
2410 start_sequence ();
2411 l1 = gen_label_rtx ();
2413 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2414 /* On the ARM the PC register contains 'dot + 8' at the time of the
2415 addition, on the Thumb it is 'dot + 4'. */
2416 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2417 if (GOT_PCREL)
2418 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2419 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2420 else
2421 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2423 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2425 if (TARGET_ARM)
2427 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2428 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2430 else
2432 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2433 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2436 seq = gen_sequence ();
2437 end_sequence ();
2438 if (prologue)
2439 emit_insn_after (seq, get_insns ());
2440 else
2441 emit_insn (seq);
2443 /* Need to emit this whether or not we obey regdecls,
2444 since setjmp/longjmp can cause life info to screw up. */
2445 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2446 #endif /* AOF_ASSEMBLER */
2449 #define REG_OR_SUBREG_REG(X) \
2450 (GET_CODE (X) == REG \
2451 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2453 #define REG_OR_SUBREG_RTX(X) \
2454 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2456 #ifndef COSTS_N_INSNS
2457 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2458 #endif
2461 arm_rtx_costs (x, code, outer)
2462 rtx x;
2463 enum rtx_code code;
2464 enum rtx_code outer;
2466 enum machine_mode mode = GET_MODE (x);
2467 enum rtx_code subcode;
2468 int extra_cost;
2470 if (TARGET_THUMB)
2472 switch (code)
2474 case ASHIFT:
2475 case ASHIFTRT:
2476 case LSHIFTRT:
2477 case ROTATERT:
2478 case PLUS:
2479 case MINUS:
2480 case COMPARE:
2481 case NEG:
2482 case NOT:
2483 return COSTS_N_INSNS (1);
2485 case MULT:
2486 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2488 int cycles = 0;
2489 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2491 while (i)
2493 i >>= 2;
2494 cycles++;
2496 return COSTS_N_INSNS (2) + cycles;
2498 return COSTS_N_INSNS (1) + 16;
2500 case SET:
2501 return (COSTS_N_INSNS (1)
2502 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2503 + GET_CODE (SET_DEST (x)) == MEM));
2505 case CONST_INT:
2506 if (outer == SET)
2508 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2509 return 0;
2510 if (thumb_shiftable_const (INTVAL (x)))
2511 return COSTS_N_INSNS (2);
2512 return COSTS_N_INSNS (3);
2514 else if (outer == PLUS
2515 && INTVAL (x) < 256 && INTVAL (x) > -256)
2516 return 0;
2517 else if (outer == COMPARE
2518 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2519 return 0;
2520 else if (outer == ASHIFT || outer == ASHIFTRT
2521 || outer == LSHIFTRT)
2522 return 0;
2523 return COSTS_N_INSNS (2);
2525 case CONST:
2526 case CONST_DOUBLE:
2527 case LABEL_REF:
2528 case SYMBOL_REF:
2529 return COSTS_N_INSNS (3);
2531 case UDIV:
2532 case UMOD:
2533 case DIV:
2534 case MOD:
2535 return 100;
2537 case TRUNCATE:
2538 return 99;
2540 case AND:
2541 case XOR:
2542 case IOR:
2543 /* XXX guess. */
2544 return 8;
2546 case ADDRESSOF:
2547 case MEM:
2548 /* XXX another guess. */
2549 /* Memory costs quite a lot for the first word, but subsequent words
2550 load at the equivalent of a single insn each. */
2551 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2552 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2554 case IF_THEN_ELSE:
2555 /* XXX a guess. */
2556 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2557 return 14;
2558 return 2;
2560 case ZERO_EXTEND:
2561 /* XXX still guessing. */
2562 switch (GET_MODE (XEXP (x, 0)))
2564 case QImode:
2565 return (1 + (mode == DImode ? 4 : 0)
2566 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2568 case HImode:
2569 return (4 + (mode == DImode ? 4 : 0)
2570 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2572 case SImode:
2573 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2575 default:
2576 return 99;
2579 default:
2580 return 99;
2581 #if 0
2582 case FFS:
2583 case FLOAT:
2584 case FIX:
2585 case UNSIGNED_FIX:
2586 /* XXX guess */
2587 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2588 rtx_name[code]);
2589 abort ();
2590 #endif
2594 switch (code)
2596 case MEM:
2597 /* Memory costs quite a lot for the first word, but subsequent words
2598 load at the equivalent of a single insn each. */
2599 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2600 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2602 case DIV:
2603 case MOD:
2604 return 100;
2606 case ROTATE:
2607 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2608 return 4;
2609 /* Fall through */
2610 case ROTATERT:
2611 if (mode != SImode)
2612 return 8;
2613 /* Fall through */
2614 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2615 if (mode == DImode)
2616 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2617 + ((GET_CODE (XEXP (x, 0)) == REG
2618 || (GET_CODE (XEXP (x, 0)) == SUBREG
2619 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2620 ? 0 : 8));
2621 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2622 || (GET_CODE (XEXP (x, 0)) == SUBREG
2623 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2624 ? 0 : 4)
2625 + ((GET_CODE (XEXP (x, 1)) == REG
2626 || (GET_CODE (XEXP (x, 1)) == SUBREG
2627 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2628 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2629 ? 0 : 4));
2631 case MINUS:
2632 if (mode == DImode)
2633 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2634 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2635 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2636 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2637 ? 0 : 8));
2639 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2640 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2641 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2642 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2643 ? 0 : 8)
2644 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2645 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2646 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2647 ? 0 : 8));
2649 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2650 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2651 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2652 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2653 || subcode == ASHIFTRT || subcode == LSHIFTRT
2654 || subcode == ROTATE || subcode == ROTATERT
2655 || (subcode == MULT
2656 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2657 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2658 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2659 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2660 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2661 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2662 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2663 return 1;
2664 /* Fall through */
2666 case PLUS:
2667 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2668 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2669 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2670 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2671 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2672 ? 0 : 8));
2674 /* Fall through */
2675 case AND: case XOR: case IOR:
2676 extra_cost = 0;
2678 /* Normally the frame registers will be spilt into reg+const during
2679 reload, so it is a bad idea to combine them with other instructions,
2680 since then they might not be moved outside of loops. As a compromise
2681 we allow integration with ops that have a constant as their second
2682 operand. */
2683 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2684 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2685 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2686 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2687 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2688 extra_cost = 4;
2690 if (mode == DImode)
2691 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2692 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2693 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2694 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2695 ? 0 : 8));
2697 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2698 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2699 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2700 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2701 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2702 ? 0 : 4));
2704 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2705 return (1 + extra_cost
2706 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2707 || subcode == LSHIFTRT || subcode == ASHIFTRT
2708 || subcode == ROTATE || subcode == ROTATERT
2709 || (subcode == MULT
2710 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2711 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2712 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2713 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2714 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2715 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2716 ? 0 : 4));
2718 return 8;
2720 case MULT:
2721 /* There is no point basing this on the tuning, since it is always the
2722 fast variant if it exists at all. */
2723 if (arm_fast_multiply && mode == DImode
2724 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2725 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2726 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2727 return 8;
2729 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2730 || mode == DImode)
2731 return 30;
2733 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2735 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2736 & (unsigned HOST_WIDE_INT) 0xffffffff);
2737 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2738 int j;
2740 /* Tune as appropriate. */
2741 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2743 for (j = 0; i && j < 32; j += booth_unit_size)
2745 i >>= booth_unit_size;
2746 add_cost += 2;
2749 return add_cost;
2752 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2753 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2754 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2756 case TRUNCATE:
2757 if (arm_fast_multiply && mode == SImode
2758 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2759 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2760 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2761 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2762 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2763 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2764 return 8;
2765 return 99;
2767 case NEG:
2768 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2769 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2770 /* Fall through */
2771 case NOT:
2772 if (mode == DImode)
2773 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2775 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2777 case IF_THEN_ELSE:
2778 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2779 return 14;
2780 return 2;
2782 case COMPARE:
2783 return 1;
2785 case ABS:
2786 return 4 + (mode == DImode ? 4 : 0);
2788 case SIGN_EXTEND:
2789 if (GET_MODE (XEXP (x, 0)) == QImode)
2790 return (4 + (mode == DImode ? 4 : 0)
2791 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2792 /* Fall through */
2793 case ZERO_EXTEND:
2794 switch (GET_MODE (XEXP (x, 0)))
2796 case QImode:
2797 return (1 + (mode == DImode ? 4 : 0)
2798 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2800 case HImode:
2801 return (4 + (mode == DImode ? 4 : 0)
2802 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2804 case SImode:
2805 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2807 default:
2808 break;
2810 abort ();
2812 case CONST_INT:
2813 if (const_ok_for_arm (INTVAL (x)))
2814 return outer == SET ? 2 : -1;
2815 else if (outer == AND
2816 && const_ok_for_arm (~INTVAL (x)))
2817 return -1;
2818 else if ((outer == COMPARE
2819 || outer == PLUS || outer == MINUS)
2820 && const_ok_for_arm (-INTVAL (x)))
2821 return -1;
2822 else
2823 return 5;
2825 case CONST:
2826 case LABEL_REF:
2827 case SYMBOL_REF:
2828 return 6;
2830 case CONST_DOUBLE:
2831 if (const_double_rtx_ok_for_fpu (x))
2832 return outer == SET ? 2 : -1;
2833 else if ((outer == COMPARE || outer == PLUS)
2834 && neg_const_double_rtx_ok_for_fpu (x))
2835 return -1;
2836 return 7;
2838 default:
2839 return 99;
2843 static int
2844 arm_adjust_cost (insn, link, dep, cost)
2845 rtx insn;
2846 rtx link;
2847 rtx dep;
2848 int cost;
2850 rtx i_pat, d_pat;
2852 /* Some true dependencies can have a higher cost depending
2853 on precisely how certain input operands are used. */
2854 if (arm_is_xscale
2855 && REG_NOTE_KIND (link) == 0
2856 && recog_memoized (insn) < 0
2857 && recog_memoized (dep) < 0)
2859 int shift_opnum = get_attr_shift (insn);
2860 enum attr_type attr_type = get_attr_type (dep);
2862 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2863 operand for INSN. If we have a shifted input operand and the
2864 instruction we depend on is another ALU instruction, then we may
2865 have to account for an additional stall. */
2866 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2868 rtx shifted_operand;
2869 int opno;
2871 /* Get the shifted operand. */
2872 extract_insn (insn);
2873 shifted_operand = recog_data.operand[shift_opnum];
2875 /* Iterate over all the operands in DEP. If we write an operand
2876 that overlaps with SHIFTED_OPERAND, then we have increase the
2877 cost of this dependency. */
2878 extract_insn (dep);
2879 preprocess_constraints ();
2880 for (opno = 0; opno < recog_data.n_operands; opno++)
2882 /* We can ignore strict inputs. */
2883 if (recog_data.operand_type[opno] == OP_IN)
2884 continue;
2886 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2887 shifted_operand))
2888 return 2;
2893 /* XXX This is not strictly true for the FPA. */
2894 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2895 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2896 return 0;
2898 /* Call insns don't incur a stall, even if they follow a load. */
2899 if (REG_NOTE_KIND (link) == 0
2900 && GET_CODE (insn) == CALL_INSN)
2901 return 1;
2903 if ((i_pat = single_set (insn)) != NULL
2904 && GET_CODE (SET_SRC (i_pat)) == MEM
2905 && (d_pat = single_set (dep)) != NULL
2906 && GET_CODE (SET_DEST (d_pat)) == MEM)
2908 /* This is a load after a store, there is no conflict if the load reads
2909 from a cached area. Assume that loads from the stack, and from the
2910 constant pool are cached, and that others will miss. This is a
2911 hack. */
2913 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2914 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2915 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2916 || reg_mentioned_p (hard_frame_pointer_rtx,
2917 XEXP (SET_SRC (i_pat), 0)))
2918 return 1;
2921 return cost;
2924 /* This code has been fixed for cross compilation. */
2926 static int fpa_consts_inited = 0;
2928 static const char * const strings_fpa[8] =
2930 "0", "1", "2", "3",
2931 "4", "5", "0.5", "10"
2934 static REAL_VALUE_TYPE values_fpa[8];
2936 static void
2937 init_fpa_table ()
2939 int i;
2940 REAL_VALUE_TYPE r;
2942 for (i = 0; i < 8; i++)
2944 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2945 values_fpa[i] = r;
2948 fpa_consts_inited = 1;
2951 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2954 const_double_rtx_ok_for_fpu (x)
2955 rtx x;
2957 REAL_VALUE_TYPE r;
2958 int i;
2960 if (!fpa_consts_inited)
2961 init_fpa_table ();
2963 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2964 if (REAL_VALUE_MINUS_ZERO (r))
2965 return 0;
2967 for (i = 0; i < 8; i++)
2968 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2969 return 1;
2971 return 0;
2974 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2977 neg_const_double_rtx_ok_for_fpu (x)
2978 rtx x;
2980 REAL_VALUE_TYPE r;
2981 int i;
2983 if (!fpa_consts_inited)
2984 init_fpa_table ();
2986 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2987 r = REAL_VALUE_NEGATE (r);
2988 if (REAL_VALUE_MINUS_ZERO (r))
2989 return 0;
2991 for (i = 0; i < 8; i++)
2992 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2993 return 1;
2995 return 0;
2998 /* Predicates for `match_operand' and `match_operator'. */
3000 /* s_register_operand is the same as register_operand, but it doesn't accept
3001 (SUBREG (MEM)...).
3003 This function exists because at the time it was put in it led to better
3004 code. SUBREG(MEM) always needs a reload in the places where
3005 s_register_operand is used, and this seemed to lead to excessive
3006 reloading. */
3009 s_register_operand (op, mode)
3010 rtx op;
3011 enum machine_mode mode;
3013 if (GET_MODE (op) != mode && mode != VOIDmode)
3014 return 0;
3016 if (GET_CODE (op) == SUBREG)
3017 op = SUBREG_REG (op);
3019 /* We don't consider registers whose class is NO_REGS
3020 to be a register operand. */
3021 /* XXX might have to check for lo regs only for thumb ??? */
3022 return (GET_CODE (op) == REG
3023 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3024 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3027 /* A hard register operand (even before reload. */
3030 arm_hard_register_operand (op, mode)
3031 rtx op;
3032 enum machine_mode mode;
3034 if (GET_MODE (op) != mode && mode != VOIDmode)
3035 return 0;
3037 return (GET_CODE (op) == REG
3038 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3041 /* Only accept reg, subreg(reg), const_int. */
3044 reg_or_int_operand (op, mode)
3045 rtx op;
3046 enum machine_mode mode;
3048 if (GET_CODE (op) == CONST_INT)
3049 return 1;
3051 if (GET_MODE (op) != mode && mode != VOIDmode)
3052 return 0;
3054 if (GET_CODE (op) == SUBREG)
3055 op = SUBREG_REG (op);
3057 /* We don't consider registers whose class is NO_REGS
3058 to be a register operand. */
3059 return (GET_CODE (op) == REG
3060 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3061 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3064 /* Return 1 if OP is an item in memory, given that we are in reload. */
3067 arm_reload_memory_operand (op, mode)
3068 rtx op;
3069 enum machine_mode mode ATTRIBUTE_UNUSED;
3071 int regno = true_regnum (op);
3073 return (!CONSTANT_P (op)
3074 && (regno == -1
3075 || (GET_CODE (op) == REG
3076 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3079 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3080 memory access (architecture V4).
3081 MODE is QImode if called when computing constraints, or VOIDmode when
3082 emitting patterns. In this latter case we cannot use memory_operand()
3083 because it will fail on badly formed MEMs, which is precisly what we are
3084 trying to catch. */
3087 bad_signed_byte_operand (op, mode)
3088 rtx op;
3089 enum machine_mode mode ATTRIBUTE_UNUSED;
3091 #if 0
3092 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3093 return 0;
3094 #endif
3095 if (GET_CODE (op) != MEM)
3096 return 0;
3098 op = XEXP (op, 0);
3100 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3101 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3102 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3103 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3104 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3105 return 1;
3107 /* Big constants are also bad. */
3108 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3109 && (INTVAL (XEXP (op, 1)) > 0xff
3110 || -INTVAL (XEXP (op, 1)) > 0xff))
3111 return 1;
3113 /* Everything else is good, or can will automatically be made so. */
3114 return 0;
3117 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3120 arm_rhs_operand (op, mode)
3121 rtx op;
3122 enum machine_mode mode;
3124 return (s_register_operand (op, mode)
3125 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3128 /* Return TRUE for valid operands for the
3129 rhs of an ARM instruction, or a load. */
3132 arm_rhsm_operand (op, mode)
3133 rtx op;
3134 enum machine_mode mode;
3136 return (s_register_operand (op, mode)
3137 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3138 || memory_operand (op, mode));
3141 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3142 constant that is valid when negated. */
3145 arm_add_operand (op, mode)
3146 rtx op;
3147 enum machine_mode mode;
3149 if (TARGET_THUMB)
3150 return thumb_cmp_operand (op, mode);
3152 return (s_register_operand (op, mode)
3153 || (GET_CODE (op) == CONST_INT
3154 && (const_ok_for_arm (INTVAL (op))
3155 || const_ok_for_arm (-INTVAL (op)))));
3159 arm_not_operand (op, mode)
3160 rtx op;
3161 enum machine_mode mode;
3163 return (s_register_operand (op, mode)
3164 || (GET_CODE (op) == CONST_INT
3165 && (const_ok_for_arm (INTVAL (op))
3166 || const_ok_for_arm (~INTVAL (op)))));
3169 /* Return TRUE if the operand is a memory reference which contains an
3170 offsettable address. */
3173 offsettable_memory_operand (op, mode)
3174 rtx op;
3175 enum machine_mode mode;
3177 if (mode == VOIDmode)
3178 mode = GET_MODE (op);
3180 return (mode == GET_MODE (op)
3181 && GET_CODE (op) == MEM
3182 && offsettable_address_p (reload_completed | reload_in_progress,
3183 mode, XEXP (op, 0)));
3186 /* Return TRUE if the operand is a memory reference which is, or can be
3187 made word aligned by adjusting the offset. */
3190 alignable_memory_operand (op, mode)
3191 rtx op;
3192 enum machine_mode mode;
3194 rtx reg;
3196 if (mode == VOIDmode)
3197 mode = GET_MODE (op);
3199 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3200 return 0;
3202 op = XEXP (op, 0);
3204 return ((GET_CODE (reg = op) == REG
3205 || (GET_CODE (op) == SUBREG
3206 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3207 || (GET_CODE (op) == PLUS
3208 && GET_CODE (XEXP (op, 1)) == CONST_INT
3209 && (GET_CODE (reg = XEXP (op, 0)) == REG
3210 || (GET_CODE (XEXP (op, 0)) == SUBREG
3211 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3212 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3215 /* Similar to s_register_operand, but does not allow hard integer
3216 registers. */
3219 f_register_operand (op, mode)
3220 rtx op;
3221 enum machine_mode mode;
3223 if (GET_MODE (op) != mode && mode != VOIDmode)
3224 return 0;
3226 if (GET_CODE (op) == SUBREG)
3227 op = SUBREG_REG (op);
3229 /* We don't consider registers whose class is NO_REGS
3230 to be a register operand. */
3231 return (GET_CODE (op) == REG
3232 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3233 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3236 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3239 fpu_rhs_operand (op, mode)
3240 rtx op;
3241 enum machine_mode mode;
3243 if (s_register_operand (op, mode))
3244 return TRUE;
3246 if (GET_MODE (op) != mode && mode != VOIDmode)
3247 return FALSE;
3249 if (GET_CODE (op) == CONST_DOUBLE)
3250 return const_double_rtx_ok_for_fpu (op);
3252 return FALSE;
3256 fpu_add_operand (op, mode)
3257 rtx op;
3258 enum machine_mode mode;
3260 if (s_register_operand (op, mode))
3261 return TRUE;
3263 if (GET_MODE (op) != mode && mode != VOIDmode)
3264 return FALSE;
3266 if (GET_CODE (op) == CONST_DOUBLE)
3267 return (const_double_rtx_ok_for_fpu (op)
3268 || neg_const_double_rtx_ok_for_fpu (op));
3270 return FALSE;
3273 /* Return nonzero if OP is a constant power of two. */
3276 power_of_two_operand (op, mode)
3277 rtx op;
3278 enum machine_mode mode ATTRIBUTE_UNUSED;
3280 if (GET_CODE (op) == CONST_INT)
3282 HOST_WIDE_INT value = INTVAL (op);
3284 return value != 0 && (value & (value - 1)) == 0;
3287 return FALSE;
3290 /* Return TRUE for a valid operand of a DImode operation.
3291 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3292 Note that this disallows MEM(REG+REG), but allows
3293 MEM(PRE/POST_INC/DEC(REG)). */
3296 di_operand (op, mode)
3297 rtx op;
3298 enum machine_mode mode;
3300 if (s_register_operand (op, mode))
3301 return TRUE;
3303 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3304 return FALSE;
3306 if (GET_CODE (op) == SUBREG)
3307 op = SUBREG_REG (op);
3309 switch (GET_CODE (op))
3311 case CONST_DOUBLE:
3312 case CONST_INT:
3313 return TRUE;
3315 case MEM:
3316 return memory_address_p (DImode, XEXP (op, 0));
3318 default:
3319 return FALSE;
3323 /* Like di_operand, but don't accept constants. */
3326 nonimmediate_di_operand (op, mode)
3327 rtx op;
3328 enum machine_mode mode;
3330 if (s_register_operand (op, mode))
3331 return TRUE;
3333 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3334 return FALSE;
3336 if (GET_CODE (op) == SUBREG)
3337 op = SUBREG_REG (op);
3339 if (GET_CODE (op) == MEM)
3340 return memory_address_p (DImode, XEXP (op, 0));
3342 return FALSE;
3345 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3346 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3347 Note that this disallows MEM(REG+REG), but allows
3348 MEM(PRE/POST_INC/DEC(REG)). */
3351 soft_df_operand (op, mode)
3352 rtx op;
3353 enum machine_mode mode;
3355 if (s_register_operand (op, mode))
3356 return TRUE;
3358 if (mode != VOIDmode && GET_MODE (op) != mode)
3359 return FALSE;
3361 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3362 return FALSE;
3364 if (GET_CODE (op) == SUBREG)
3365 op = SUBREG_REG (op);
3367 switch (GET_CODE (op))
3369 case CONST_DOUBLE:
3370 return TRUE;
3372 case MEM:
3373 return memory_address_p (DFmode, XEXP (op, 0));
3375 default:
3376 return FALSE;
3380 /* Like soft_df_operand, but don't accept constants. */
3383 nonimmediate_soft_df_operand (op, mode)
3384 rtx op;
3385 enum machine_mode mode;
3387 if (s_register_operand (op, mode))
3388 return TRUE;
3390 if (mode != VOIDmode && GET_MODE (op) != mode)
3391 return FALSE;
3393 if (GET_CODE (op) == SUBREG)
3394 op = SUBREG_REG (op);
3396 if (GET_CODE (op) == MEM)
3397 return memory_address_p (DFmode, XEXP (op, 0));
3398 return FALSE;
3401 /* Return TRUE for valid index operands. */
3404 index_operand (op, mode)
3405 rtx op;
3406 enum machine_mode mode;
3408 return (s_register_operand (op, mode)
3409 || (immediate_operand (op, mode)
3410 && (GET_CODE (op) != CONST_INT
3411 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3414 /* Return TRUE for valid shifts by a constant. This also accepts any
3415 power of two on the (somewhat overly relaxed) assumption that the
3416 shift operator in this case was a mult. */
3419 const_shift_operand (op, mode)
3420 rtx op;
3421 enum machine_mode mode;
3423 return (power_of_two_operand (op, mode)
3424 || (immediate_operand (op, mode)
3425 && (GET_CODE (op) != CONST_INT
3426 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3429 /* Return TRUE for arithmetic operators which can be combined with a multiply
3430 (shift). */
3433 shiftable_operator (x, mode)
3434 rtx x;
3435 enum machine_mode mode;
3437 enum rtx_code code;
3439 if (GET_MODE (x) != mode)
3440 return FALSE;
3442 code = GET_CODE (x);
3444 return (code == PLUS || code == MINUS
3445 || code == IOR || code == XOR || code == AND);
3448 /* Return TRUE for binary logical operators. */
3451 logical_binary_operator (x, mode)
3452 rtx x;
3453 enum machine_mode mode;
3455 enum rtx_code code;
3457 if (GET_MODE (x) != mode)
3458 return FALSE;
3460 code = GET_CODE (x);
3462 return (code == IOR || code == XOR || code == AND);
3465 /* Return TRUE for shift operators. */
3468 shift_operator (x, mode)
3469 rtx x;
3470 enum machine_mode mode;
3472 enum rtx_code code;
3474 if (GET_MODE (x) != mode)
3475 return FALSE;
3477 code = GET_CODE (x);
3479 if (code == MULT)
3480 return power_of_two_operand (XEXP (x, 1), mode);
3482 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3483 || code == ROTATERT);
3486 /* Return TRUE if x is EQ or NE. */
3489 equality_operator (x, mode)
3490 rtx x;
3491 enum machine_mode mode ATTRIBUTE_UNUSED;
3493 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3496 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3499 arm_comparison_operator (x, mode)
3500 rtx x;
3501 enum machine_mode mode;
3503 return (comparison_operator (x, mode)
3504 && GET_CODE (x) != LTGT
3505 && GET_CODE (x) != UNEQ);
3508 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3511 minmax_operator (x, mode)
3512 rtx x;
3513 enum machine_mode mode;
3515 enum rtx_code code = GET_CODE (x);
3517 if (GET_MODE (x) != mode)
3518 return FALSE;
3520 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3523 /* Return TRUE if this is the condition code register, if we aren't given
3524 a mode, accept any class CCmode register. */
3527 cc_register (x, mode)
3528 rtx x;
3529 enum machine_mode mode;
3531 if (mode == VOIDmode)
3533 mode = GET_MODE (x);
3535 if (GET_MODE_CLASS (mode) != MODE_CC)
3536 return FALSE;
3539 if ( GET_MODE (x) == mode
3540 && GET_CODE (x) == REG
3541 && REGNO (x) == CC_REGNUM)
3542 return TRUE;
3544 return FALSE;
3547 /* Return TRUE if this is the condition code register, if we aren't given
3548 a mode, accept any class CCmode register which indicates a dominance
3549 expression. */
3552 dominant_cc_register (x, mode)
3553 rtx x;
3554 enum machine_mode mode;
3556 if (mode == VOIDmode)
3558 mode = GET_MODE (x);
3560 if (GET_MODE_CLASS (mode) != MODE_CC)
3561 return FALSE;
3564 if ( mode != CC_DNEmode && mode != CC_DEQmode
3565 && mode != CC_DLEmode && mode != CC_DLTmode
3566 && mode != CC_DGEmode && mode != CC_DGTmode
3567 && mode != CC_DLEUmode && mode != CC_DLTUmode
3568 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3569 return FALSE;
3571 return cc_register (x, mode);
3574 /* Return TRUE if X references a SYMBOL_REF. */
3577 symbol_mentioned_p (x)
3578 rtx x;
3580 const char * fmt;
3581 int i;
3583 if (GET_CODE (x) == SYMBOL_REF)
3584 return 1;
3586 fmt = GET_RTX_FORMAT (GET_CODE (x));
3588 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3590 if (fmt[i] == 'E')
3592 int j;
3594 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3595 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3596 return 1;
3598 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3599 return 1;
3602 return 0;
3605 /* Return TRUE if X references a LABEL_REF. */
3608 label_mentioned_p (x)
3609 rtx x;
3611 const char * fmt;
3612 int i;
3614 if (GET_CODE (x) == LABEL_REF)
3615 return 1;
3617 fmt = GET_RTX_FORMAT (GET_CODE (x));
3618 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3620 if (fmt[i] == 'E')
3622 int j;
3624 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3625 if (label_mentioned_p (XVECEXP (x, i, j)))
3626 return 1;
3628 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3629 return 1;
3632 return 0;
3635 enum rtx_code
3636 minmax_code (x)
3637 rtx x;
3639 enum rtx_code code = GET_CODE (x);
3641 if (code == SMAX)
3642 return GE;
3643 else if (code == SMIN)
3644 return LE;
3645 else if (code == UMIN)
3646 return LEU;
3647 else if (code == UMAX)
3648 return GEU;
3650 abort ();
3653 /* Return 1 if memory locations are adjacent. */
3656 adjacent_mem_locations (a, b)
3657 rtx a, b;
3659 if ((GET_CODE (XEXP (a, 0)) == REG
3660 || (GET_CODE (XEXP (a, 0)) == PLUS
3661 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3662 && (GET_CODE (XEXP (b, 0)) == REG
3663 || (GET_CODE (XEXP (b, 0)) == PLUS
3664 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3666 int val0 = 0, val1 = 0;
3667 int reg0, reg1;
3669 if (GET_CODE (XEXP (a, 0)) == PLUS)
3671 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3672 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3674 else
3675 reg0 = REGNO (XEXP (a, 0));
3677 if (GET_CODE (XEXP (b, 0)) == PLUS)
3679 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3680 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3682 else
3683 reg1 = REGNO (XEXP (b, 0));
3685 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3687 return 0;
3690 /* Return 1 if OP is a load multiple operation. It is known to be
3691 parallel and the first section will be tested. */
3694 load_multiple_operation (op, mode)
3695 rtx op;
3696 enum machine_mode mode ATTRIBUTE_UNUSED;
3698 HOST_WIDE_INT count = XVECLEN (op, 0);
3699 int dest_regno;
3700 rtx src_addr;
3701 HOST_WIDE_INT i = 1, base = 0;
3702 rtx elt;
3704 if (count <= 1
3705 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3706 return 0;
3708 /* Check to see if this might be a write-back. */
3709 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3711 i++;
3712 base = 1;
3714 /* Now check it more carefully. */
3715 if (GET_CODE (SET_DEST (elt)) != REG
3716 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3717 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3718 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3719 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3720 return 0;
3723 /* Perform a quick check so we don't blow up below. */
3724 if (count <= i
3725 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3726 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3727 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3728 return 0;
3730 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3731 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3733 for (; i < count; i++)
3735 elt = XVECEXP (op, 0, i);
3737 if (GET_CODE (elt) != SET
3738 || GET_CODE (SET_DEST (elt)) != REG
3739 || GET_MODE (SET_DEST (elt)) != SImode
3740 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3741 || GET_CODE (SET_SRC (elt)) != MEM
3742 || GET_MODE (SET_SRC (elt)) != SImode
3743 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3744 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3745 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3746 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3747 return 0;
3750 return 1;
3753 /* Return 1 if OP is a store multiple operation. It is known to be
3754 parallel and the first section will be tested. */
3757 store_multiple_operation (op, mode)
3758 rtx op;
3759 enum machine_mode mode ATTRIBUTE_UNUSED;
3761 HOST_WIDE_INT count = XVECLEN (op, 0);
3762 int src_regno;
3763 rtx dest_addr;
3764 HOST_WIDE_INT i = 1, base = 0;
3765 rtx elt;
3767 if (count <= 1
3768 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3769 return 0;
3771 /* Check to see if this might be a write-back. */
3772 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3774 i++;
3775 base = 1;
3777 /* Now check it more carefully. */
3778 if (GET_CODE (SET_DEST (elt)) != REG
3779 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3780 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3781 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3782 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3783 return 0;
3786 /* Perform a quick check so we don't blow up below. */
3787 if (count <= i
3788 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3789 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3790 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3791 return 0;
3793 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3794 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3796 for (; i < count; i++)
3798 elt = XVECEXP (op, 0, i);
3800 if (GET_CODE (elt) != SET
3801 || GET_CODE (SET_SRC (elt)) != REG
3802 || GET_MODE (SET_SRC (elt)) != SImode
3803 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3804 || GET_CODE (SET_DEST (elt)) != MEM
3805 || GET_MODE (SET_DEST (elt)) != SImode
3806 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3807 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3808 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3809 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3810 return 0;
3813 return 1;
3817 load_multiple_sequence (operands, nops, regs, base, load_offset)
3818 rtx * operands;
3819 int nops;
3820 int * regs;
3821 int * base;
3822 HOST_WIDE_INT * load_offset;
3824 int unsorted_regs[4];
3825 HOST_WIDE_INT unsorted_offsets[4];
3826 int order[4];
3827 int base_reg = -1;
3828 int i;
3830 /* Can only handle 2, 3, or 4 insns at present,
3831 though could be easily extended if required. */
3832 if (nops < 2 || nops > 4)
3833 abort ();
3835 /* Loop over the operands and check that the memory references are
3836 suitable (ie immediate offsets from the same base register). At
3837 the same time, extract the target register, and the memory
3838 offsets. */
3839 for (i = 0; i < nops; i++)
3841 rtx reg;
3842 rtx offset;
3844 /* Convert a subreg of a mem into the mem itself. */
3845 if (GET_CODE (operands[nops + i]) == SUBREG)
3846 operands[nops + i] = alter_subreg (operands + (nops + i));
3848 if (GET_CODE (operands[nops + i]) != MEM)
3849 abort ();
3851 /* Don't reorder volatile memory references; it doesn't seem worth
3852 looking for the case where the order is ok anyway. */
3853 if (MEM_VOLATILE_P (operands[nops + i]))
3854 return 0;
3856 offset = const0_rtx;
3858 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3859 || (GET_CODE (reg) == SUBREG
3860 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3861 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3862 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3863 == REG)
3864 || (GET_CODE (reg) == SUBREG
3865 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3866 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3867 == CONST_INT)))
3869 if (i == 0)
3871 base_reg = REGNO (reg);
3872 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3873 ? REGNO (operands[i])
3874 : REGNO (SUBREG_REG (operands[i])));
3875 order[0] = 0;
3877 else
3879 if (base_reg != (int) REGNO (reg))
3880 /* Not addressed from the same base register. */
3881 return 0;
3883 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3884 ? REGNO (operands[i])
3885 : REGNO (SUBREG_REG (operands[i])));
3886 if (unsorted_regs[i] < unsorted_regs[order[0]])
3887 order[0] = i;
3890 /* If it isn't an integer register, or if it overwrites the
3891 base register but isn't the last insn in the list, then
3892 we can't do this. */
3893 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3894 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3895 return 0;
3897 unsorted_offsets[i] = INTVAL (offset);
3899 else
3900 /* Not a suitable memory address. */
3901 return 0;
3904 /* All the useful information has now been extracted from the
3905 operands into unsorted_regs and unsorted_offsets; additionally,
3906 order[0] has been set to the lowest numbered register in the
3907 list. Sort the registers into order, and check that the memory
3908 offsets are ascending and adjacent. */
3910 for (i = 1; i < nops; i++)
3912 int j;
3914 order[i] = order[i - 1];
3915 for (j = 0; j < nops; j++)
3916 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3917 && (order[i] == order[i - 1]
3918 || unsorted_regs[j] < unsorted_regs[order[i]]))
3919 order[i] = j;
3921 /* Have we found a suitable register? if not, one must be used more
3922 than once. */
3923 if (order[i] == order[i - 1])
3924 return 0;
3926 /* Is the memory address adjacent and ascending? */
3927 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3928 return 0;
3931 if (base)
3933 *base = base_reg;
3935 for (i = 0; i < nops; i++)
3936 regs[i] = unsorted_regs[order[i]];
3938 *load_offset = unsorted_offsets[order[0]];
3941 if (unsorted_offsets[order[0]] == 0)
3942 return 1; /* ldmia */
3944 if (unsorted_offsets[order[0]] == 4)
3945 return 2; /* ldmib */
3947 if (unsorted_offsets[order[nops - 1]] == 0)
3948 return 3; /* ldmda */
3950 if (unsorted_offsets[order[nops - 1]] == -4)
3951 return 4; /* ldmdb */
3953 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3954 if the offset isn't small enough. The reason 2 ldrs are faster
3955 is because these ARMs are able to do more than one cache access
3956 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3957 whilst the ARM8 has a double bandwidth cache. This means that
3958 these cores can do both an instruction fetch and a data fetch in
3959 a single cycle, so the trick of calculating the address into a
3960 scratch register (one of the result regs) and then doing a load
3961 multiple actually becomes slower (and no smaller in code size).
3962 That is the transformation
3964 ldr rd1, [rbase + offset]
3965 ldr rd2, [rbase + offset + 4]
3969 add rd1, rbase, offset
3970 ldmia rd1, {rd1, rd2}
3972 produces worse code -- '3 cycles + any stalls on rd2' instead of
3973 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3974 access per cycle, the first sequence could never complete in less
3975 than 6 cycles, whereas the ldm sequence would only take 5 and
3976 would make better use of sequential accesses if not hitting the
3977 cache.
3979 We cheat here and test 'arm_ld_sched' which we currently know to
3980 only be true for the ARM8, ARM9 and StrongARM. If this ever
3981 changes, then the test below needs to be reworked. */
3982 if (nops == 2 && arm_ld_sched)
3983 return 0;
3985 /* Can't do it without setting up the offset, only do this if it takes
3986 no more than one insn. */
3987 return (const_ok_for_arm (unsorted_offsets[order[0]])
3988 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3991 const char *
3992 emit_ldm_seq (operands, nops)
3993 rtx * operands;
3994 int nops;
3996 int regs[4];
3997 int base_reg;
3998 HOST_WIDE_INT offset;
3999 char buf[100];
4000 int i;
4002 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4004 case 1:
4005 strcpy (buf, "ldm%?ia\t");
4006 break;
4008 case 2:
4009 strcpy (buf, "ldm%?ib\t");
4010 break;
4012 case 3:
4013 strcpy (buf, "ldm%?da\t");
4014 break;
4016 case 4:
4017 strcpy (buf, "ldm%?db\t");
4018 break;
4020 case 5:
4021 if (offset >= 0)
4022 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4023 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4024 (long) offset);
4025 else
4026 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4027 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4028 (long) -offset);
4029 output_asm_insn (buf, operands);
4030 base_reg = regs[0];
4031 strcpy (buf, "ldm%?ia\t");
4032 break;
4034 default:
4035 abort ();
4038 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4039 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4041 for (i = 1; i < nops; i++)
4042 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4043 reg_names[regs[i]]);
4045 strcat (buf, "}\t%@ phole ldm");
4047 output_asm_insn (buf, operands);
4048 return "";
4052 store_multiple_sequence (operands, nops, regs, base, load_offset)
4053 rtx * operands;
4054 int nops;
4055 int * regs;
4056 int * base;
4057 HOST_WIDE_INT * load_offset;
4059 int unsorted_regs[4];
4060 HOST_WIDE_INT unsorted_offsets[4];
4061 int order[4];
4062 int base_reg = -1;
4063 int i;
4065 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4066 extended if required. */
4067 if (nops < 2 || nops > 4)
4068 abort ();
4070 /* Loop over the operands and check that the memory references are
4071 suitable (ie immediate offsets from the same base register). At
4072 the same time, extract the target register, and the memory
4073 offsets. */
4074 for (i = 0; i < nops; i++)
4076 rtx reg;
4077 rtx offset;
4079 /* Convert a subreg of a mem into the mem itself. */
4080 if (GET_CODE (operands[nops + i]) == SUBREG)
4081 operands[nops + i] = alter_subreg (operands + (nops + i));
4083 if (GET_CODE (operands[nops + i]) != MEM)
4084 abort ();
4086 /* Don't reorder volatile memory references; it doesn't seem worth
4087 looking for the case where the order is ok anyway. */
4088 if (MEM_VOLATILE_P (operands[nops + i]))
4089 return 0;
4091 offset = const0_rtx;
4093 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4094 || (GET_CODE (reg) == SUBREG
4095 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4096 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4097 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4098 == REG)
4099 || (GET_CODE (reg) == SUBREG
4100 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4101 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4102 == CONST_INT)))
4104 if (i == 0)
4106 base_reg = REGNO (reg);
4107 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4108 ? REGNO (operands[i])
4109 : REGNO (SUBREG_REG (operands[i])));
4110 order[0] = 0;
4112 else
4114 if (base_reg != (int) REGNO (reg))
4115 /* Not addressed from the same base register. */
4116 return 0;
4118 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4119 ? REGNO (operands[i])
4120 : REGNO (SUBREG_REG (operands[i])));
4121 if (unsorted_regs[i] < unsorted_regs[order[0]])
4122 order[0] = i;
4125 /* If it isn't an integer register, then we can't do this. */
4126 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4127 return 0;
4129 unsorted_offsets[i] = INTVAL (offset);
4131 else
4132 /* Not a suitable memory address. */
4133 return 0;
4136 /* All the useful information has now been extracted from the
4137 operands into unsorted_regs and unsorted_offsets; additionally,
4138 order[0] has been set to the lowest numbered register in the
4139 list. Sort the registers into order, and check that the memory
4140 offsets are ascending and adjacent. */
4142 for (i = 1; i < nops; i++)
4144 int j;
4146 order[i] = order[i - 1];
4147 for (j = 0; j < nops; j++)
4148 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4149 && (order[i] == order[i - 1]
4150 || unsorted_regs[j] < unsorted_regs[order[i]]))
4151 order[i] = j;
4153 /* Have we found a suitable register? if not, one must be used more
4154 than once. */
4155 if (order[i] == order[i - 1])
4156 return 0;
4158 /* Is the memory address adjacent and ascending? */
4159 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4160 return 0;
4163 if (base)
4165 *base = base_reg;
4167 for (i = 0; i < nops; i++)
4168 regs[i] = unsorted_regs[order[i]];
4170 *load_offset = unsorted_offsets[order[0]];
4173 if (unsorted_offsets[order[0]] == 0)
4174 return 1; /* stmia */
4176 if (unsorted_offsets[order[0]] == 4)
4177 return 2; /* stmib */
4179 if (unsorted_offsets[order[nops - 1]] == 0)
4180 return 3; /* stmda */
4182 if (unsorted_offsets[order[nops - 1]] == -4)
4183 return 4; /* stmdb */
4185 return 0;
4188 const char *
4189 emit_stm_seq (operands, nops)
4190 rtx * operands;
4191 int nops;
4193 int regs[4];
4194 int base_reg;
4195 HOST_WIDE_INT offset;
4196 char buf[100];
4197 int i;
4199 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4201 case 1:
4202 strcpy (buf, "stm%?ia\t");
4203 break;
4205 case 2:
4206 strcpy (buf, "stm%?ib\t");
4207 break;
4209 case 3:
4210 strcpy (buf, "stm%?da\t");
4211 break;
4213 case 4:
4214 strcpy (buf, "stm%?db\t");
4215 break;
4217 default:
4218 abort ();
4221 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4222 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4224 for (i = 1; i < nops; i++)
4225 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4226 reg_names[regs[i]]);
4228 strcat (buf, "}\t%@ phole stm");
4230 output_asm_insn (buf, operands);
4231 return "";
4235 multi_register_push (op, mode)
4236 rtx op;
4237 enum machine_mode mode ATTRIBUTE_UNUSED;
4239 if (GET_CODE (op) != PARALLEL
4240 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4241 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4242 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4243 return 0;
4245 return 1;
4248 /* Routines for use in generating RTL. */
4251 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4252 in_struct_p, scalar_p)
4253 int base_regno;
4254 int count;
4255 rtx from;
4256 int up;
4257 int write_back;
4258 int unchanging_p;
4259 int in_struct_p;
4260 int scalar_p;
4262 int i = 0, j;
4263 rtx result;
4264 int sign = up ? 1 : -1;
4265 rtx mem;
4267 /* XScale has load-store double instructions, but they have stricter
4268 alignment requirements than load-store multiple, so we can not
4269 use them.
4271 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4272 the pipeline until completion.
4274 NREGS CYCLES
4280 An ldr instruction takes 1-3 cycles, but does not block the
4281 pipeline.
4283 NREGS CYCLES
4284 1 1-3
4285 2 2-6
4286 3 3-9
4287 4 4-12
4289 Best case ldr will always win. However, the more ldr instructions
4290 we issue, the less likely we are to be able to schedule them well.
4291 Using ldr instructions also increases code size.
4293 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4294 for counts of 3 or 4 regs. */
4295 if (arm_is_xscale && count <= 2 && ! optimize_size)
4297 rtx seq;
4299 start_sequence ();
4301 for (i = 0; i < count; i++)
4303 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4304 RTX_UNCHANGING_P (mem) = unchanging_p;
4305 MEM_IN_STRUCT_P (mem) = in_struct_p;
4306 MEM_SCALAR_P (mem) = scalar_p;
4307 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4310 if (write_back)
4311 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4313 seq = gen_sequence ();
4314 end_sequence ();
4316 return seq;
4319 result = gen_rtx_PARALLEL (VOIDmode,
4320 rtvec_alloc (count + (write_back ? 1 : 0)));
4321 if (write_back)
4323 XVECEXP (result, 0, 0)
4324 = gen_rtx_SET (GET_MODE (from), from,
4325 plus_constant (from, count * 4 * sign));
4326 i = 1;
4327 count++;
4330 for (j = 0; i < count; i++, j++)
4332 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4333 RTX_UNCHANGING_P (mem) = unchanging_p;
4334 MEM_IN_STRUCT_P (mem) = in_struct_p;
4335 MEM_SCALAR_P (mem) = scalar_p;
4336 XVECEXP (result, 0, i)
4337 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4340 return result;
4344 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4345 in_struct_p, scalar_p)
4346 int base_regno;
4347 int count;
4348 rtx to;
4349 int up;
4350 int write_back;
4351 int unchanging_p;
4352 int in_struct_p;
4353 int scalar_p;
4355 int i = 0, j;
4356 rtx result;
4357 int sign = up ? 1 : -1;
4358 rtx mem;
4360 /* See arm_gen_load_multiple for discussion of
4361 the pros/cons of ldm/stm usage for XScale. */
4362 if (arm_is_xscale && count <= 2 && ! optimize_size)
4364 rtx seq;
4366 start_sequence ();
4368 for (i = 0; i < count; i++)
4370 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4371 RTX_UNCHANGING_P (mem) = unchanging_p;
4372 MEM_IN_STRUCT_P (mem) = in_struct_p;
4373 MEM_SCALAR_P (mem) = scalar_p;
4374 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4377 if (write_back)
4378 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4380 seq = gen_sequence ();
4381 end_sequence ();
4383 return seq;
4386 result = gen_rtx_PARALLEL (VOIDmode,
4387 rtvec_alloc (count + (write_back ? 1 : 0)));
4388 if (write_back)
4390 XVECEXP (result, 0, 0)
4391 = gen_rtx_SET (GET_MODE (to), to,
4392 plus_constant (to, count * 4 * sign));
4393 i = 1;
4394 count++;
4397 for (j = 0; i < count; i++, j++)
4399 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4400 RTX_UNCHANGING_P (mem) = unchanging_p;
4401 MEM_IN_STRUCT_P (mem) = in_struct_p;
4402 MEM_SCALAR_P (mem) = scalar_p;
4404 XVECEXP (result, 0, i)
4405 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4408 return result;
4412 arm_gen_movstrqi (operands)
4413 rtx * operands;
4415 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4416 int i;
4417 rtx src, dst;
4418 rtx st_src, st_dst, fin_src, fin_dst;
4419 rtx part_bytes_reg = NULL;
4420 rtx mem;
4421 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4422 int dst_scalar_p, src_scalar_p;
4424 if (GET_CODE (operands[2]) != CONST_INT
4425 || GET_CODE (operands[3]) != CONST_INT
4426 || INTVAL (operands[2]) > 64
4427 || INTVAL (operands[3]) & 3)
4428 return 0;
4430 st_dst = XEXP (operands[0], 0);
4431 st_src = XEXP (operands[1], 0);
4433 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4434 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4435 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4436 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4437 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4438 src_scalar_p = MEM_SCALAR_P (operands[1]);
4440 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4441 fin_src = src = copy_to_mode_reg (SImode, st_src);
4443 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4444 out_words_to_go = INTVAL (operands[2]) / 4;
4445 last_bytes = INTVAL (operands[2]) & 3;
4447 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4448 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4450 for (i = 0; in_words_to_go >= 2; i+=4)
4452 if (in_words_to_go > 4)
4453 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4454 src_unchanging_p,
4455 src_in_struct_p,
4456 src_scalar_p));
4457 else
4458 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4459 FALSE, src_unchanging_p,
4460 src_in_struct_p, src_scalar_p));
4462 if (out_words_to_go)
4464 if (out_words_to_go > 4)
4465 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4466 dst_unchanging_p,
4467 dst_in_struct_p,
4468 dst_scalar_p));
4469 else if (out_words_to_go != 1)
4470 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4471 dst, TRUE,
4472 (last_bytes == 0
4473 ? FALSE : TRUE),
4474 dst_unchanging_p,
4475 dst_in_struct_p,
4476 dst_scalar_p));
4477 else
4479 mem = gen_rtx_MEM (SImode, dst);
4480 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4481 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4482 MEM_SCALAR_P (mem) = dst_scalar_p;
4483 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4484 if (last_bytes != 0)
4485 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4489 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4490 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4493 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4494 if (out_words_to_go)
4496 rtx sreg;
4498 mem = gen_rtx_MEM (SImode, src);
4499 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4500 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4501 MEM_SCALAR_P (mem) = src_scalar_p;
4502 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4503 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4505 mem = gen_rtx_MEM (SImode, dst);
4506 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4507 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4508 MEM_SCALAR_P (mem) = dst_scalar_p;
4509 emit_move_insn (mem, sreg);
4510 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4511 in_words_to_go--;
4513 if (in_words_to_go) /* Sanity check */
4514 abort ();
4517 if (in_words_to_go)
4519 if (in_words_to_go < 0)
4520 abort ();
4522 mem = gen_rtx_MEM (SImode, src);
4523 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4524 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4525 MEM_SCALAR_P (mem) = src_scalar_p;
4526 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4529 if (last_bytes && part_bytes_reg == NULL)
4530 abort ();
4532 if (BYTES_BIG_ENDIAN && last_bytes)
4534 rtx tmp = gen_reg_rtx (SImode);
4536 /* The bytes we want are in the top end of the word. */
4537 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4538 GEN_INT (8 * (4 - last_bytes))));
4539 part_bytes_reg = tmp;
4541 while (last_bytes)
4543 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4544 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4545 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4546 MEM_SCALAR_P (mem) = dst_scalar_p;
4547 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4549 if (--last_bytes)
4551 tmp = gen_reg_rtx (SImode);
4552 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4553 part_bytes_reg = tmp;
4558 else
4560 if (last_bytes > 1)
4562 mem = gen_rtx_MEM (HImode, dst);
4563 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4564 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4565 MEM_SCALAR_P (mem) = dst_scalar_p;
4566 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4567 last_bytes -= 2;
4568 if (last_bytes)
4570 rtx tmp = gen_reg_rtx (SImode);
4572 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4573 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4574 part_bytes_reg = tmp;
4578 if (last_bytes)
4580 mem = gen_rtx_MEM (QImode, dst);
4581 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4582 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4583 MEM_SCALAR_P (mem) = dst_scalar_p;
4584 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4588 return 1;
4591 /* Generate a memory reference for a half word, such that it will be loaded
4592 into the top 16 bits of the word. We can assume that the address is
4593 known to be alignable and of the form reg, or plus (reg, const). */
4596 arm_gen_rotated_half_load (memref)
4597 rtx memref;
4599 HOST_WIDE_INT offset = 0;
4600 rtx base = XEXP (memref, 0);
4602 if (GET_CODE (base) == PLUS)
4604 offset = INTVAL (XEXP (base, 1));
4605 base = XEXP (base, 0);
4608 /* If we aren't allowed to generate unaligned addresses, then fail. */
4609 if (TARGET_MMU_TRAPS
4610 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4611 return NULL;
4613 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4615 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4616 return base;
4618 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4621 /* Select a dominance comparison mode if possible. We support three forms.
4622 COND_OR == 0 => (X && Y)
4623 COND_OR == 1 => ((! X( || Y)
4624 COND_OR == 2 => (X || Y)
4625 If we are unable to support a dominance comparsison we return CC mode.
4626 This will then fail to match for the RTL expressions that generate this
4627 call. */
4629 static enum machine_mode
4630 select_dominance_cc_mode (x, y, cond_or)
4631 rtx x;
4632 rtx y;
4633 HOST_WIDE_INT cond_or;
4635 enum rtx_code cond1, cond2;
4636 int swapped = 0;
4638 /* Currently we will probably get the wrong result if the individual
4639 comparisons are not simple. This also ensures that it is safe to
4640 reverse a comparison if necessary. */
4641 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4642 != CCmode)
4643 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4644 != CCmode))
4645 return CCmode;
4647 /* The if_then_else variant of this tests the second condition if the
4648 first passes, but is true if the first fails. Reverse the first
4649 condition to get a true "inclusive-or" expression. */
4650 if (cond_or == 1)
4651 cond1 = reverse_condition (cond1);
4653 /* If the comparisons are not equal, and one doesn't dominate the other,
4654 then we can't do this. */
4655 if (cond1 != cond2
4656 && !comparison_dominates_p (cond1, cond2)
4657 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4658 return CCmode;
4660 if (swapped)
4662 enum rtx_code temp = cond1;
4663 cond1 = cond2;
4664 cond2 = temp;
4667 switch (cond1)
4669 case EQ:
4670 if (cond2 == EQ || !cond_or)
4671 return CC_DEQmode;
4673 switch (cond2)
4675 case LE: return CC_DLEmode;
4676 case LEU: return CC_DLEUmode;
4677 case GE: return CC_DGEmode;
4678 case GEU: return CC_DGEUmode;
4679 default: break;
4682 break;
4684 case LT:
4685 if (cond2 == LT || !cond_or)
4686 return CC_DLTmode;
4687 if (cond2 == LE)
4688 return CC_DLEmode;
4689 if (cond2 == NE)
4690 return CC_DNEmode;
4691 break;
4693 case GT:
4694 if (cond2 == GT || !cond_or)
4695 return CC_DGTmode;
4696 if (cond2 == GE)
4697 return CC_DGEmode;
4698 if (cond2 == NE)
4699 return CC_DNEmode;
4700 break;
4702 case LTU:
4703 if (cond2 == LTU || !cond_or)
4704 return CC_DLTUmode;
4705 if (cond2 == LEU)
4706 return CC_DLEUmode;
4707 if (cond2 == NE)
4708 return CC_DNEmode;
4709 break;
4711 case GTU:
4712 if (cond2 == GTU || !cond_or)
4713 return CC_DGTUmode;
4714 if (cond2 == GEU)
4715 return CC_DGEUmode;
4716 if (cond2 == NE)
4717 return CC_DNEmode;
4718 break;
4720 /* The remaining cases only occur when both comparisons are the
4721 same. */
4722 case NE:
4723 return CC_DNEmode;
4725 case LE:
4726 return CC_DLEmode;
4728 case GE:
4729 return CC_DGEmode;
4731 case LEU:
4732 return CC_DLEUmode;
4734 case GEU:
4735 return CC_DGEUmode;
4737 default:
4738 break;
4741 abort ();
4744 enum machine_mode
4745 arm_select_cc_mode (op, x, y)
4746 enum rtx_code op;
4747 rtx x;
4748 rtx y;
4750 /* All floating point compares return CCFP if it is an equality
4751 comparison, and CCFPE otherwise. */
4752 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4754 switch (op)
4756 case EQ:
4757 case NE:
4758 case UNORDERED:
4759 case ORDERED:
4760 case UNLT:
4761 case UNLE:
4762 case UNGT:
4763 case UNGE:
4764 case UNEQ:
4765 case LTGT:
4766 return CCFPmode;
4768 case LT:
4769 case LE:
4770 case GT:
4771 case GE:
4772 return CCFPEmode;
4774 default:
4775 abort ();
4779 /* A compare with a shifted operand. Because of canonicalization, the
4780 comparison will have to be swapped when we emit the assembler. */
4781 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4782 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4783 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4784 || GET_CODE (x) == ROTATERT))
4785 return CC_SWPmode;
4787 /* This is a special case that is used by combine to allow a
4788 comparison of a shifted byte load to be split into a zero-extend
4789 followed by a comparison of the shifted integer (only valid for
4790 equalities and unsigned inequalities). */
4791 if (GET_MODE (x) == SImode
4792 && GET_CODE (x) == ASHIFT
4793 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4794 && GET_CODE (XEXP (x, 0)) == SUBREG
4795 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4796 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4797 && (op == EQ || op == NE
4798 || op == GEU || op == GTU || op == LTU || op == LEU)
4799 && GET_CODE (y) == CONST_INT)
4800 return CC_Zmode;
4802 /* A construct for a conditional compare, if the false arm contains
4803 0, then both conditions must be true, otherwise either condition
4804 must be true. Not all conditions are possible, so CCmode is
4805 returned if it can't be done. */
4806 if (GET_CODE (x) == IF_THEN_ELSE
4807 && (XEXP (x, 2) == const0_rtx
4808 || XEXP (x, 2) == const1_rtx)
4809 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4810 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4811 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4812 INTVAL (XEXP (x, 2)));
4814 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4815 if (GET_CODE (x) == AND
4816 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4817 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4818 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4820 if (GET_CODE (x) == IOR
4821 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4822 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4823 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4825 /* An operation that sets the condition codes as a side-effect, the
4826 V flag is not set correctly, so we can only use comparisons where
4827 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4828 instead. */
4829 if (GET_MODE (x) == SImode
4830 && y == const0_rtx
4831 && (op == EQ || op == NE || op == LT || op == GE)
4832 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4833 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4834 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4835 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4836 || GET_CODE (x) == LSHIFTRT
4837 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4838 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4839 return CC_NOOVmode;
4841 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4842 return CC_Zmode;
4844 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4845 && GET_CODE (x) == PLUS
4846 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4847 return CC_Cmode;
4849 return CCmode;
4852 /* X and Y are two things to compare using CODE. Emit the compare insn and
4853 return the rtx for register 0 in the proper mode. FP means this is a
4854 floating point compare: I don't think that it is needed on the arm. */
4857 arm_gen_compare_reg (code, x, y)
4858 enum rtx_code code;
4859 rtx x, y;
4861 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4862 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4864 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4865 gen_rtx_COMPARE (mode, x, y)));
4867 return cc_reg;
4870 void
4871 arm_reload_in_hi (operands)
4872 rtx * operands;
4874 rtx ref = operands[1];
4875 rtx base, scratch;
4876 HOST_WIDE_INT offset = 0;
4878 if (GET_CODE (ref) == SUBREG)
4880 offset = SUBREG_BYTE (ref);
4881 ref = SUBREG_REG (ref);
4884 if (GET_CODE (ref) == REG)
4886 /* We have a pseudo which has been spilt onto the stack; there
4887 are two cases here: the first where there is a simple
4888 stack-slot replacement and a second where the stack-slot is
4889 out of range, or is used as a subreg. */
4890 if (reg_equiv_mem[REGNO (ref)])
4892 ref = reg_equiv_mem[REGNO (ref)];
4893 base = find_replacement (&XEXP (ref, 0));
4895 else
4896 /* The slot is out of range, or was dressed up in a SUBREG. */
4897 base = reg_equiv_address[REGNO (ref)];
4899 else
4900 base = find_replacement (&XEXP (ref, 0));
4902 /* Handle the case where the address is too complex to be offset by 1. */
4903 if (GET_CODE (base) == MINUS
4904 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4906 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4908 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4909 base = base_plus;
4911 else if (GET_CODE (base) == PLUS)
4913 /* The addend must be CONST_INT, or we would have dealt with it above. */
4914 HOST_WIDE_INT hi, lo;
4916 offset += INTVAL (XEXP (base, 1));
4917 base = XEXP (base, 0);
4919 /* Rework the address into a legal sequence of insns. */
4920 /* Valid range for lo is -4095 -> 4095 */
4921 lo = (offset >= 0
4922 ? (offset & 0xfff)
4923 : -((-offset) & 0xfff));
4925 /* Corner case, if lo is the max offset then we would be out of range
4926 once we have added the additional 1 below, so bump the msb into the
4927 pre-loading insn(s). */
4928 if (lo == 4095)
4929 lo &= 0x7ff;
4931 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4932 ^ (HOST_WIDE_INT) 0x80000000)
4933 - (HOST_WIDE_INT) 0x80000000);
4935 if (hi + lo != offset)
4936 abort ();
4938 if (hi != 0)
4940 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4942 /* Get the base address; addsi3 knows how to handle constants
4943 that require more than one insn. */
4944 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4945 base = base_plus;
4946 offset = lo;
4950 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4951 emit_insn (gen_zero_extendqisi2 (scratch,
4952 gen_rtx_MEM (QImode,
4953 plus_constant (base,
4954 offset))));
4955 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4956 gen_rtx_MEM (QImode,
4957 plus_constant (base,
4958 offset + 1))));
4959 if (!BYTES_BIG_ENDIAN)
4960 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4961 gen_rtx_IOR (SImode,
4962 gen_rtx_ASHIFT
4963 (SImode,
4964 gen_rtx_SUBREG (SImode, operands[0], 0),
4965 GEN_INT (8)),
4966 scratch)));
4967 else
4968 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4969 gen_rtx_IOR (SImode,
4970 gen_rtx_ASHIFT (SImode, scratch,
4971 GEN_INT (8)),
4972 gen_rtx_SUBREG (SImode, operands[0],
4973 0))));
4976 /* Handle storing a half-word to memory during reload by synthesising as two
4977 byte stores. Take care not to clobber the input values until after we
4978 have moved them somewhere safe. This code assumes that if the DImode
4979 scratch in operands[2] overlaps either the input value or output address
4980 in some way, then that value must die in this insn (we absolutely need
4981 two scratch registers for some corner cases). */
4983 void
4984 arm_reload_out_hi (operands)
4985 rtx * operands;
4987 rtx ref = operands[0];
4988 rtx outval = operands[1];
4989 rtx base, scratch;
4990 HOST_WIDE_INT offset = 0;
4992 if (GET_CODE (ref) == SUBREG)
4994 offset = SUBREG_BYTE (ref);
4995 ref = SUBREG_REG (ref);
4998 if (GET_CODE (ref) == REG)
5000 /* We have a pseudo which has been spilt onto the stack; there
5001 are two cases here: the first where there is a simple
5002 stack-slot replacement and a second where the stack-slot is
5003 out of range, or is used as a subreg. */
5004 if (reg_equiv_mem[REGNO (ref)])
5006 ref = reg_equiv_mem[REGNO (ref)];
5007 base = find_replacement (&XEXP (ref, 0));
5009 else
5010 /* The slot is out of range, or was dressed up in a SUBREG. */
5011 base = reg_equiv_address[REGNO (ref)];
5013 else
5014 base = find_replacement (&XEXP (ref, 0));
5016 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5018 /* Handle the case where the address is too complex to be offset by 1. */
5019 if (GET_CODE (base) == MINUS
5020 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5022 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5024 /* Be careful not to destroy OUTVAL. */
5025 if (reg_overlap_mentioned_p (base_plus, outval))
5027 /* Updating base_plus might destroy outval, see if we can
5028 swap the scratch and base_plus. */
5029 if (!reg_overlap_mentioned_p (scratch, outval))
5031 rtx tmp = scratch;
5032 scratch = base_plus;
5033 base_plus = tmp;
5035 else
5037 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5039 /* Be conservative and copy OUTVAL into the scratch now,
5040 this should only be necessary if outval is a subreg
5041 of something larger than a word. */
5042 /* XXX Might this clobber base? I can't see how it can,
5043 since scratch is known to overlap with OUTVAL, and
5044 must be wider than a word. */
5045 emit_insn (gen_movhi (scratch_hi, outval));
5046 outval = scratch_hi;
5050 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5051 base = base_plus;
5053 else if (GET_CODE (base) == PLUS)
5055 /* The addend must be CONST_INT, or we would have dealt with it above. */
5056 HOST_WIDE_INT hi, lo;
5058 offset += INTVAL (XEXP (base, 1));
5059 base = XEXP (base, 0);
5061 /* Rework the address into a legal sequence of insns. */
5062 /* Valid range for lo is -4095 -> 4095 */
5063 lo = (offset >= 0
5064 ? (offset & 0xfff)
5065 : -((-offset) & 0xfff));
5067 /* Corner case, if lo is the max offset then we would be out of range
5068 once we have added the additional 1 below, so bump the msb into the
5069 pre-loading insn(s). */
5070 if (lo == 4095)
5071 lo &= 0x7ff;
5073 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5074 ^ (HOST_WIDE_INT) 0x80000000)
5075 - (HOST_WIDE_INT) 0x80000000);
5077 if (hi + lo != offset)
5078 abort ();
5080 if (hi != 0)
5082 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5084 /* Be careful not to destroy OUTVAL. */
5085 if (reg_overlap_mentioned_p (base_plus, outval))
5087 /* Updating base_plus might destroy outval, see if we
5088 can swap the scratch and base_plus. */
5089 if (!reg_overlap_mentioned_p (scratch, outval))
5091 rtx tmp = scratch;
5092 scratch = base_plus;
5093 base_plus = tmp;
5095 else
5097 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5099 /* Be conservative and copy outval into scratch now,
5100 this should only be necessary if outval is a
5101 subreg of something larger than a word. */
5102 /* XXX Might this clobber base? I can't see how it
5103 can, since scratch is known to overlap with
5104 outval. */
5105 emit_insn (gen_movhi (scratch_hi, outval));
5106 outval = scratch_hi;
5110 /* Get the base address; addsi3 knows how to handle constants
5111 that require more than one insn. */
5112 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5113 base = base_plus;
5114 offset = lo;
5118 if (BYTES_BIG_ENDIAN)
5120 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5121 plus_constant (base, offset + 1)),
5122 gen_rtx_SUBREG (QImode, outval, 0)));
5123 emit_insn (gen_lshrsi3 (scratch,
5124 gen_rtx_SUBREG (SImode, outval, 0),
5125 GEN_INT (8)));
5126 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5127 gen_rtx_SUBREG (QImode, scratch, 0)));
5129 else
5131 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5132 gen_rtx_SUBREG (QImode, outval, 0)));
5133 emit_insn (gen_lshrsi3 (scratch,
5134 gen_rtx_SUBREG (SImode, outval, 0),
5135 GEN_INT (8)));
5136 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5137 plus_constant (base, offset + 1)),
5138 gen_rtx_SUBREG (QImode, scratch, 0)));
5142 /* Print a symbolic form of X to the debug file, F. */
5144 static void
5145 arm_print_value (f, x)
5146 FILE * f;
5147 rtx x;
5149 switch (GET_CODE (x))
5151 case CONST_INT:
5152 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5153 return;
5155 case CONST_DOUBLE:
5156 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5157 return;
5159 case CONST_STRING:
5160 fprintf (f, "\"%s\"", XSTR (x, 0));
5161 return;
5163 case SYMBOL_REF:
5164 fprintf (f, "`%s'", XSTR (x, 0));
5165 return;
5167 case LABEL_REF:
5168 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5169 return;
5171 case CONST:
5172 arm_print_value (f, XEXP (x, 0));
5173 return;
5175 case PLUS:
5176 arm_print_value (f, XEXP (x, 0));
5177 fprintf (f, "+");
5178 arm_print_value (f, XEXP (x, 1));
5179 return;
5181 case PC:
5182 fprintf (f, "pc");
5183 return;
5185 default:
5186 fprintf (f, "????");
5187 return;
5191 /* Routines for manipulation of the constant pool. */
5193 /* Arm instructions cannot load a large constant directly into a
5194 register; they have to come from a pc relative load. The constant
5195 must therefore be placed in the addressable range of the pc
5196 relative load. Depending on the precise pc relative load
5197 instruction the range is somewhere between 256 bytes and 4k. This
5198 means that we often have to dump a constant inside a function, and
5199 generate code to branch around it.
5201 It is important to minimize this, since the branches will slow
5202 things down and make the code larger.
5204 Normally we can hide the table after an existing unconditional
5205 branch so that there is no interruption of the flow, but in the
5206 worst case the code looks like this:
5208 ldr rn, L1
5210 b L2
5211 align
5212 L1: .long value
5216 ldr rn, L3
5218 b L4
5219 align
5220 L3: .long value
5224 We fix this by performing a scan after scheduling, which notices
5225 which instructions need to have their operands fetched from the
5226 constant table and builds the table.
5228 The algorithm starts by building a table of all the constants that
5229 need fixing up and all the natural barriers in the function (places
5230 where a constant table can be dropped without breaking the flow).
5231 For each fixup we note how far the pc-relative replacement will be
5232 able to reach and the offset of the instruction into the function.
5234 Having built the table we then group the fixes together to form
5235 tables that are as large as possible (subject to addressing
5236 constraints) and emit each table of constants after the last
5237 barrier that is within range of all the instructions in the group.
5238 If a group does not contain a barrier, then we forcibly create one
5239 by inserting a jump instruction into the flow. Once the table has
5240 been inserted, the insns are then modified to reference the
5241 relevant entry in the pool.
5243 Possible enhancements to the algorithm (not implemented) are:
5245 1) For some processors and object formats, there may be benefit in
5246 aligning the pools to the start of cache lines; this alignment
5247 would need to be taken into account when calculating addressability
5248 of a pool. */
5250 /* These typedefs are located at the start of this file, so that
5251 they can be used in the prototypes there. This comment is to
5252 remind readers of that fact so that the following structures
5253 can be understood more easily.
5255 typedef struct minipool_node Mnode;
5256 typedef struct minipool_fixup Mfix; */
5258 struct minipool_node
5260 /* Doubly linked chain of entries. */
5261 Mnode * next;
5262 Mnode * prev;
5263 /* The maximum offset into the code that this entry can be placed. While
5264 pushing fixes for forward references, all entries are sorted in order
5265 of increasing max_address. */
5266 HOST_WIDE_INT max_address;
5267 /* Similarly for an entry inserted for a backwards ref. */
5268 HOST_WIDE_INT min_address;
5269 /* The number of fixes referencing this entry. This can become zero
5270 if we "unpush" an entry. In this case we ignore the entry when we
5271 come to emit the code. */
5272 int refcount;
5273 /* The offset from the start of the minipool. */
5274 HOST_WIDE_INT offset;
5275 /* The value in table. */
5276 rtx value;
5277 /* The mode of value. */
5278 enum machine_mode mode;
5279 int fix_size;
5282 struct minipool_fixup
5284 Mfix * next;
5285 rtx insn;
5286 HOST_WIDE_INT address;
5287 rtx * loc;
5288 enum machine_mode mode;
5289 int fix_size;
5290 rtx value;
5291 Mnode * minipool;
5292 HOST_WIDE_INT forwards;
5293 HOST_WIDE_INT backwards;
5296 /* Fixes less than a word need padding out to a word boundary. */
5297 #define MINIPOOL_FIX_SIZE(mode) \
5298 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5300 static Mnode * minipool_vector_head;
5301 static Mnode * minipool_vector_tail;
5302 static rtx minipool_vector_label;
5304 /* The linked list of all minipool fixes required for this function. */
5305 Mfix * minipool_fix_head;
5306 Mfix * minipool_fix_tail;
5307 /* The fix entry for the current minipool, once it has been placed. */
5308 Mfix * minipool_barrier;
5310 /* Determines if INSN is the start of a jump table. Returns the end
5311 of the TABLE or NULL_RTX. */
5313 static rtx
5314 is_jump_table (insn)
5315 rtx insn;
5317 rtx table;
5319 if (GET_CODE (insn) == JUMP_INSN
5320 && JUMP_LABEL (insn) != NULL
5321 && ((table = next_real_insn (JUMP_LABEL (insn)))
5322 == next_real_insn (insn))
5323 && table != NULL
5324 && GET_CODE (table) == JUMP_INSN
5325 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5326 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5327 return table;
5329 return NULL_RTX;
5332 static HOST_WIDE_INT
5333 get_jump_table_size (insn)
5334 rtx insn;
5336 rtx body = PATTERN (insn);
5337 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5339 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5342 /* Move a minipool fix MP from its current location to before MAX_MP.
5343 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5344 contrains may need updating. */
5346 static Mnode *
5347 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5348 Mnode * mp;
5349 Mnode * max_mp;
5350 HOST_WIDE_INT max_address;
5352 /* This should never be true and the code below assumes these are
5353 different. */
5354 if (mp == max_mp)
5355 abort ();
5357 if (max_mp == NULL)
5359 if (max_address < mp->max_address)
5360 mp->max_address = max_address;
5362 else
5364 if (max_address > max_mp->max_address - mp->fix_size)
5365 mp->max_address = max_mp->max_address - mp->fix_size;
5366 else
5367 mp->max_address = max_address;
5369 /* Unlink MP from its current position. Since max_mp is non-null,
5370 mp->prev must be non-null. */
5371 mp->prev->next = mp->next;
5372 if (mp->next != NULL)
5373 mp->next->prev = mp->prev;
5374 else
5375 minipool_vector_tail = mp->prev;
5377 /* Re-insert it before MAX_MP. */
5378 mp->next = max_mp;
5379 mp->prev = max_mp->prev;
5380 max_mp->prev = mp;
5382 if (mp->prev != NULL)
5383 mp->prev->next = mp;
5384 else
5385 minipool_vector_head = mp;
5388 /* Save the new entry. */
5389 max_mp = mp;
5391 /* Scan over the preceding entries and adjust their addresses as
5392 required. */
5393 while (mp->prev != NULL
5394 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5396 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5397 mp = mp->prev;
5400 return max_mp;
5403 /* Add a constant to the minipool for a forward reference. Returns the
5404 node added or NULL if the constant will not fit in this pool. */
5406 static Mnode *
5407 add_minipool_forward_ref (fix)
5408 Mfix * fix;
5410 /* If set, max_mp is the first pool_entry that has a lower
5411 constraint than the one we are trying to add. */
5412 Mnode * max_mp = NULL;
5413 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5414 Mnode * mp;
5416 /* If this fix's address is greater than the address of the first
5417 entry, then we can't put the fix in this pool. We subtract the
5418 size of the current fix to ensure that if the table is fully
5419 packed we still have enough room to insert this value by suffling
5420 the other fixes forwards. */
5421 if (minipool_vector_head &&
5422 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5423 return NULL;
5425 /* Scan the pool to see if a constant with the same value has
5426 already been added. While we are doing this, also note the
5427 location where we must insert the constant if it doesn't already
5428 exist. */
5429 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5431 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5432 && fix->mode == mp->mode
5433 && (GET_CODE (fix->value) != CODE_LABEL
5434 || (CODE_LABEL_NUMBER (fix->value)
5435 == CODE_LABEL_NUMBER (mp->value)))
5436 && rtx_equal_p (fix->value, mp->value))
5438 /* More than one fix references this entry. */
5439 mp->refcount++;
5440 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5443 /* Note the insertion point if necessary. */
5444 if (max_mp == NULL
5445 && mp->max_address > max_address)
5446 max_mp = mp;
5449 /* The value is not currently in the minipool, so we need to create
5450 a new entry for it. If MAX_MP is NULL, the entry will be put on
5451 the end of the list since the placement is less constrained than
5452 any existing entry. Otherwise, we insert the new fix before
5453 MAX_MP and, if neceesary, adjust the constraints on the other
5454 entries. */
5455 mp = xmalloc (sizeof (* mp));
5456 mp->fix_size = fix->fix_size;
5457 mp->mode = fix->mode;
5458 mp->value = fix->value;
5459 mp->refcount = 1;
5460 /* Not yet required for a backwards ref. */
5461 mp->min_address = -65536;
5463 if (max_mp == NULL)
5465 mp->max_address = max_address;
5466 mp->next = NULL;
5467 mp->prev = minipool_vector_tail;
5469 if (mp->prev == NULL)
5471 minipool_vector_head = mp;
5472 minipool_vector_label = gen_label_rtx ();
5474 else
5475 mp->prev->next = mp;
5477 minipool_vector_tail = mp;
5479 else
5481 if (max_address > max_mp->max_address - mp->fix_size)
5482 mp->max_address = max_mp->max_address - mp->fix_size;
5483 else
5484 mp->max_address = max_address;
5486 mp->next = max_mp;
5487 mp->prev = max_mp->prev;
5488 max_mp->prev = mp;
5489 if (mp->prev != NULL)
5490 mp->prev->next = mp;
5491 else
5492 minipool_vector_head = mp;
5495 /* Save the new entry. */
5496 max_mp = mp;
5498 /* Scan over the preceding entries and adjust their addresses as
5499 required. */
5500 while (mp->prev != NULL
5501 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5503 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5504 mp = mp->prev;
5507 return max_mp;
5510 static Mnode *
5511 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5512 Mnode * mp;
5513 Mnode * min_mp;
5514 HOST_WIDE_INT min_address;
5516 HOST_WIDE_INT offset;
5518 /* This should never be true, and the code below assumes these are
5519 different. */
5520 if (mp == min_mp)
5521 abort ();
5523 if (min_mp == NULL)
5525 if (min_address > mp->min_address)
5526 mp->min_address = min_address;
5528 else
5530 /* We will adjust this below if it is too loose. */
5531 mp->min_address = min_address;
5533 /* Unlink MP from its current position. Since min_mp is non-null,
5534 mp->next must be non-null. */
5535 mp->next->prev = mp->prev;
5536 if (mp->prev != NULL)
5537 mp->prev->next = mp->next;
5538 else
5539 minipool_vector_head = mp->next;
5541 /* Reinsert it after MIN_MP. */
5542 mp->prev = min_mp;
5543 mp->next = min_mp->next;
5544 min_mp->next = mp;
5545 if (mp->next != NULL)
5546 mp->next->prev = mp;
5547 else
5548 minipool_vector_tail = mp;
5551 min_mp = mp;
5553 offset = 0;
5554 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5556 mp->offset = offset;
5557 if (mp->refcount > 0)
5558 offset += mp->fix_size;
5560 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5561 mp->next->min_address = mp->min_address + mp->fix_size;
5564 return min_mp;
5567 /* Add a constant to the minipool for a backward reference. Returns the
5568 node added or NULL if the constant will not fit in this pool.
5570 Note that the code for insertion for a backwards reference can be
5571 somewhat confusing because the calculated offsets for each fix do
5572 not take into account the size of the pool (which is still under
5573 construction. */
5575 static Mnode *
5576 add_minipool_backward_ref (fix)
5577 Mfix * fix;
5579 /* If set, min_mp is the last pool_entry that has a lower constraint
5580 than the one we are trying to add. */
5581 Mnode * min_mp = NULL;
5582 /* This can be negative, since it is only a constraint. */
5583 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5584 Mnode * mp;
5586 /* If we can't reach the current pool from this insn, or if we can't
5587 insert this entry at the end of the pool without pushing other
5588 fixes out of range, then we don't try. This ensures that we
5589 can't fail later on. */
5590 if (min_address >= minipool_barrier->address
5591 || (minipool_vector_tail->min_address + fix->fix_size
5592 >= minipool_barrier->address))
5593 return NULL;
5595 /* Scan the pool to see if a constant with the same value has
5596 already been added. While we are doing this, also note the
5597 location where we must insert the constant if it doesn't already
5598 exist. */
5599 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5601 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5602 && fix->mode == mp->mode
5603 && (GET_CODE (fix->value) != CODE_LABEL
5604 || (CODE_LABEL_NUMBER (fix->value)
5605 == CODE_LABEL_NUMBER (mp->value)))
5606 && rtx_equal_p (fix->value, mp->value)
5607 /* Check that there is enough slack to move this entry to the
5608 end of the table (this is conservative). */
5609 && (mp->max_address
5610 > (minipool_barrier->address
5611 + minipool_vector_tail->offset
5612 + minipool_vector_tail->fix_size)))
5614 mp->refcount++;
5615 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5618 if (min_mp != NULL)
5619 mp->min_address += fix->fix_size;
5620 else
5622 /* Note the insertion point if necessary. */
5623 if (mp->min_address < min_address)
5624 min_mp = mp;
5625 else if (mp->max_address
5626 < minipool_barrier->address + mp->offset + fix->fix_size)
5628 /* Inserting before this entry would push the fix beyond
5629 its maximum address (which can happen if we have
5630 re-located a forwards fix); force the new fix to come
5631 after it. */
5632 min_mp = mp;
5633 min_address = mp->min_address + fix->fix_size;
5638 /* We need to create a new entry. */
5639 mp = xmalloc (sizeof (* mp));
5640 mp->fix_size = fix->fix_size;
5641 mp->mode = fix->mode;
5642 mp->value = fix->value;
5643 mp->refcount = 1;
5644 mp->max_address = minipool_barrier->address + 65536;
5646 mp->min_address = min_address;
5648 if (min_mp == NULL)
5650 mp->prev = NULL;
5651 mp->next = minipool_vector_head;
5653 if (mp->next == NULL)
5655 minipool_vector_tail = mp;
5656 minipool_vector_label = gen_label_rtx ();
5658 else
5659 mp->next->prev = mp;
5661 minipool_vector_head = mp;
5663 else
5665 mp->next = min_mp->next;
5666 mp->prev = min_mp;
5667 min_mp->next = mp;
5669 if (mp->next != NULL)
5670 mp->next->prev = mp;
5671 else
5672 minipool_vector_tail = mp;
5675 /* Save the new entry. */
5676 min_mp = mp;
5678 if (mp->prev)
5679 mp = mp->prev;
5680 else
5681 mp->offset = 0;
5683 /* Scan over the following entries and adjust their offsets. */
5684 while (mp->next != NULL)
5686 if (mp->next->min_address < mp->min_address + mp->fix_size)
5687 mp->next->min_address = mp->min_address + mp->fix_size;
5689 if (mp->refcount)
5690 mp->next->offset = mp->offset + mp->fix_size;
5691 else
5692 mp->next->offset = mp->offset;
5694 mp = mp->next;
5697 return min_mp;
5700 static void
5701 assign_minipool_offsets (barrier)
5702 Mfix * barrier;
5704 HOST_WIDE_INT offset = 0;
5705 Mnode * mp;
5707 minipool_barrier = barrier;
5709 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5711 mp->offset = offset;
5713 if (mp->refcount > 0)
5714 offset += mp->fix_size;
5718 /* Output the literal table */
5719 static void
5720 dump_minipool (scan)
5721 rtx scan;
5723 Mnode * mp;
5724 Mnode * nmp;
5726 if (rtl_dump_file)
5727 fprintf (rtl_dump_file,
5728 ";; Emitting minipool after insn %u; address %ld\n",
5729 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5731 scan = emit_label_after (gen_label_rtx (), scan);
5732 scan = emit_insn_after (gen_align_4 (), scan);
5733 scan = emit_label_after (minipool_vector_label, scan);
5735 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5737 if (mp->refcount > 0)
5739 if (rtl_dump_file)
5741 fprintf (rtl_dump_file,
5742 ";; Offset %u, min %ld, max %ld ",
5743 (unsigned) mp->offset, (unsigned long) mp->min_address,
5744 (unsigned long) mp->max_address);
5745 arm_print_value (rtl_dump_file, mp->value);
5746 fputc ('\n', rtl_dump_file);
5749 switch (mp->fix_size)
5751 #ifdef HAVE_consttable_1
5752 case 1:
5753 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5754 break;
5756 #endif
5757 #ifdef HAVE_consttable_2
5758 case 2:
5759 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5760 break;
5762 #endif
5763 #ifdef HAVE_consttable_4
5764 case 4:
5765 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5766 break;
5768 #endif
5769 #ifdef HAVE_consttable_8
5770 case 8:
5771 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5772 break;
5774 #endif
5775 default:
5776 abort ();
5777 break;
5781 nmp = mp->next;
5782 free (mp);
5785 minipool_vector_head = minipool_vector_tail = NULL;
5786 scan = emit_insn_after (gen_consttable_end (), scan);
5787 scan = emit_barrier_after (scan);
5790 /* Return the cost of forcibly inserting a barrier after INSN. */
5792 static int
5793 arm_barrier_cost (insn)
5794 rtx insn;
5796 /* Basing the location of the pool on the loop depth is preferable,
5797 but at the moment, the basic block information seems to be
5798 corrupt by this stage of the compilation. */
5799 int base_cost = 50;
5800 rtx next = next_nonnote_insn (insn);
5802 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5803 base_cost -= 20;
5805 switch (GET_CODE (insn))
5807 case CODE_LABEL:
5808 /* It will always be better to place the table before the label, rather
5809 than after it. */
5810 return 50;
5812 case INSN:
5813 case CALL_INSN:
5814 return base_cost;
5816 case JUMP_INSN:
5817 return base_cost - 10;
5819 default:
5820 return base_cost + 10;
5824 /* Find the best place in the insn stream in the range
5825 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5826 Create the barrier by inserting a jump and add a new fix entry for
5827 it. */
5829 static Mfix *
5830 create_fix_barrier (fix, max_address)
5831 Mfix * fix;
5832 HOST_WIDE_INT max_address;
5834 HOST_WIDE_INT count = 0;
5835 rtx barrier;
5836 rtx from = fix->insn;
5837 rtx selected = from;
5838 int selected_cost;
5839 HOST_WIDE_INT selected_address;
5840 Mfix * new_fix;
5841 HOST_WIDE_INT max_count = max_address - fix->address;
5842 rtx label = gen_label_rtx ();
5844 selected_cost = arm_barrier_cost (from);
5845 selected_address = fix->address;
5847 while (from && count < max_count)
5849 rtx tmp;
5850 int new_cost;
5852 /* This code shouldn't have been called if there was a natural barrier
5853 within range. */
5854 if (GET_CODE (from) == BARRIER)
5855 abort ();
5857 /* Count the length of this insn. */
5858 count += get_attr_length (from);
5860 /* If there is a jump table, add its length. */
5861 tmp = is_jump_table (from);
5862 if (tmp != NULL)
5864 count += get_jump_table_size (tmp);
5866 /* Jump tables aren't in a basic block, so base the cost on
5867 the dispatch insn. If we select this location, we will
5868 still put the pool after the table. */
5869 new_cost = arm_barrier_cost (from);
5871 if (count < max_count && new_cost <= selected_cost)
5873 selected = tmp;
5874 selected_cost = new_cost;
5875 selected_address = fix->address + count;
5878 /* Continue after the dispatch table. */
5879 from = NEXT_INSN (tmp);
5880 continue;
5883 new_cost = arm_barrier_cost (from);
5885 if (count < max_count && new_cost <= selected_cost)
5887 selected = from;
5888 selected_cost = new_cost;
5889 selected_address = fix->address + count;
5892 from = NEXT_INSN (from);
5895 /* Create a new JUMP_INSN that branches around a barrier. */
5896 from = emit_jump_insn_after (gen_jump (label), selected);
5897 JUMP_LABEL (from) = label;
5898 barrier = emit_barrier_after (from);
5899 emit_label_after (label, barrier);
5901 /* Create a minipool barrier entry for the new barrier. */
5902 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5903 new_fix->insn = barrier;
5904 new_fix->address = selected_address;
5905 new_fix->next = fix->next;
5906 fix->next = new_fix;
5908 return new_fix;
5911 /* Record that there is a natural barrier in the insn stream at
5912 ADDRESS. */
5913 static void
5914 push_minipool_barrier (insn, address)
5915 rtx insn;
5916 HOST_WIDE_INT address;
5918 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5920 fix->insn = insn;
5921 fix->address = address;
5923 fix->next = NULL;
5924 if (minipool_fix_head != NULL)
5925 minipool_fix_tail->next = fix;
5926 else
5927 minipool_fix_head = fix;
5929 minipool_fix_tail = fix;
5932 /* Record INSN, which will need fixing up to load a value from the
5933 minipool. ADDRESS is the offset of the insn since the start of the
5934 function; LOC is a pointer to the part of the insn which requires
5935 fixing; VALUE is the constant that must be loaded, which is of type
5936 MODE. */
5937 static void
5938 push_minipool_fix (insn, address, loc, mode, value)
5939 rtx insn;
5940 HOST_WIDE_INT address;
5941 rtx * loc;
5942 enum machine_mode mode;
5943 rtx value;
5945 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5947 #ifdef AOF_ASSEMBLER
5948 /* PIC symbol refereneces need to be converted into offsets into the
5949 based area. */
5950 /* XXX This shouldn't be done here. */
5951 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5952 value = aof_pic_entry (value);
5953 #endif /* AOF_ASSEMBLER */
5955 fix->insn = insn;
5956 fix->address = address;
5957 fix->loc = loc;
5958 fix->mode = mode;
5959 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5960 fix->value = value;
5961 fix->forwards = get_attr_pool_range (insn);
5962 fix->backwards = get_attr_neg_pool_range (insn);
5963 fix->minipool = NULL;
5965 /* If an insn doesn't have a range defined for it, then it isn't
5966 expecting to be reworked by this code. Better to abort now than
5967 to generate duff assembly code. */
5968 if (fix->forwards == 0 && fix->backwards == 0)
5969 abort ();
5971 if (rtl_dump_file)
5973 fprintf (rtl_dump_file,
5974 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5975 GET_MODE_NAME (mode),
5976 INSN_UID (insn), (unsigned long) address,
5977 -1 * (long)fix->backwards, (long)fix->forwards);
5978 arm_print_value (rtl_dump_file, fix->value);
5979 fprintf (rtl_dump_file, "\n");
5982 /* Add it to the chain of fixes. */
5983 fix->next = NULL;
5985 if (minipool_fix_head != NULL)
5986 minipool_fix_tail->next = fix;
5987 else
5988 minipool_fix_head = fix;
5990 minipool_fix_tail = fix;
5993 /* Scan INSN and note any of its operands that need fixing. */
5995 static void
5996 note_invalid_constants (insn, address)
5997 rtx insn;
5998 HOST_WIDE_INT address;
6000 int opno;
6002 extract_insn (insn);
6004 if (!constrain_operands (1))
6005 fatal_insn_not_found (insn);
6007 /* Fill in recog_op_alt with information about the constraints of this
6008 insn. */
6009 preprocess_constraints ();
6011 for (opno = 0; opno < recog_data.n_operands; opno++)
6013 /* Things we need to fix can only occur in inputs. */
6014 if (recog_data.operand_type[opno] != OP_IN)
6015 continue;
6017 /* If this alternative is a memory reference, then any mention
6018 of constants in this alternative is really to fool reload
6019 into allowing us to accept one there. We need to fix them up
6020 now so that we output the right code. */
6021 if (recog_op_alt[opno][which_alternative].memory_ok)
6023 rtx op = recog_data.operand[opno];
6025 if (CONSTANT_P (op))
6026 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6027 recog_data.operand_mode[opno], op);
6028 #if 0
6029 /* RWE: Now we look correctly at the operands for the insn,
6030 this shouldn't be needed any more. */
6031 #ifndef AOF_ASSEMBLER
6032 /* XXX Is this still needed? */
6033 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6034 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6035 recog_data.operand_mode[opno],
6036 XVECEXP (op, 0, 0));
6037 #endif
6038 #endif
6039 else if (GET_CODE (op) == MEM
6040 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6041 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6042 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6043 recog_data.operand_mode[opno],
6044 get_pool_constant (XEXP (op, 0)));
6049 void
6050 arm_reorg (first)
6051 rtx first;
6053 rtx insn;
6054 HOST_WIDE_INT address = 0;
6055 Mfix * fix;
6057 minipool_fix_head = minipool_fix_tail = NULL;
6059 /* The first insn must always be a note, or the code below won't
6060 scan it properly. */
6061 if (GET_CODE (first) != NOTE)
6062 abort ();
6064 /* Scan all the insns and record the operands that will need fixing. */
6065 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6067 if (GET_CODE (insn) == BARRIER)
6068 push_minipool_barrier (insn, address);
6069 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6070 || GET_CODE (insn) == JUMP_INSN)
6072 rtx table;
6074 note_invalid_constants (insn, address);
6075 address += get_attr_length (insn);
6077 /* If the insn is a vector jump, add the size of the table
6078 and skip the table. */
6079 if ((table = is_jump_table (insn)) != NULL)
6081 address += get_jump_table_size (table);
6082 insn = table;
6087 fix = minipool_fix_head;
6089 /* Now scan the fixups and perform the required changes. */
6090 while (fix)
6092 Mfix * ftmp;
6093 Mfix * fdel;
6094 Mfix * last_added_fix;
6095 Mfix * last_barrier = NULL;
6096 Mfix * this_fix;
6098 /* Skip any further barriers before the next fix. */
6099 while (fix && GET_CODE (fix->insn) == BARRIER)
6100 fix = fix->next;
6102 /* No more fixes. */
6103 if (fix == NULL)
6104 break;
6106 last_added_fix = NULL;
6108 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6110 if (GET_CODE (ftmp->insn) == BARRIER)
6112 if (ftmp->address >= minipool_vector_head->max_address)
6113 break;
6115 last_barrier = ftmp;
6117 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6118 break;
6120 last_added_fix = ftmp; /* Keep track of the last fix added. */
6123 /* If we found a barrier, drop back to that; any fixes that we
6124 could have reached but come after the barrier will now go in
6125 the next mini-pool. */
6126 if (last_barrier != NULL)
6128 /* Reduce the refcount for those fixes that won't go into this
6129 pool after all. */
6130 for (fdel = last_barrier->next;
6131 fdel && fdel != ftmp;
6132 fdel = fdel->next)
6134 fdel->minipool->refcount--;
6135 fdel->minipool = NULL;
6138 ftmp = last_barrier;
6140 else
6142 /* ftmp is first fix that we can't fit into this pool and
6143 there no natural barriers that we could use. Insert a
6144 new barrier in the code somewhere between the previous
6145 fix and this one, and arrange to jump around it. */
6146 HOST_WIDE_INT max_address;
6148 /* The last item on the list of fixes must be a barrier, so
6149 we can never run off the end of the list of fixes without
6150 last_barrier being set. */
6151 if (ftmp == NULL)
6152 abort ();
6154 max_address = minipool_vector_head->max_address;
6155 /* Check that there isn't another fix that is in range that
6156 we couldn't fit into this pool because the pool was
6157 already too large: we need to put the pool before such an
6158 instruction. */
6159 if (ftmp->address < max_address)
6160 max_address = ftmp->address;
6162 last_barrier = create_fix_barrier (last_added_fix, max_address);
6165 assign_minipool_offsets (last_barrier);
6167 while (ftmp)
6169 if (GET_CODE (ftmp->insn) != BARRIER
6170 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6171 == NULL))
6172 break;
6174 ftmp = ftmp->next;
6177 /* Scan over the fixes we have identified for this pool, fixing them
6178 up and adding the constants to the pool itself. */
6179 for (this_fix = fix; this_fix && ftmp != this_fix;
6180 this_fix = this_fix->next)
6181 if (GET_CODE (this_fix->insn) != BARRIER)
6183 rtx addr
6184 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6185 minipool_vector_label),
6186 this_fix->minipool->offset);
6187 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6190 dump_minipool (last_barrier->insn);
6191 fix = ftmp;
6194 /* From now on we must synthesize any constants that we can't handle
6195 directly. This can happen if the RTL gets split during final
6196 instruction generation. */
6197 after_arm_reorg = 1;
6199 /* Free the minipool memory. */
6200 obstack_free (&minipool_obstack, minipool_startobj);
6203 /* Routines to output assembly language. */
6205 /* If the rtx is the correct value then return the string of the number.
6206 In this way we can ensure that valid double constants are generated even
6207 when cross compiling. */
6209 const char *
6210 fp_immediate_constant (x)
6211 rtx x;
6213 REAL_VALUE_TYPE r;
6214 int i;
6216 if (!fpa_consts_inited)
6217 init_fpa_table ();
6219 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6220 for (i = 0; i < 8; i++)
6221 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6222 return strings_fpa[i];
6224 abort ();
6227 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6229 static const char *
6230 fp_const_from_val (r)
6231 REAL_VALUE_TYPE * r;
6233 int i;
6235 if (!fpa_consts_inited)
6236 init_fpa_table ();
6238 for (i = 0; i < 8; i++)
6239 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6240 return strings_fpa[i];
6242 abort ();
6245 /* Output the operands of a LDM/STM instruction to STREAM.
6246 MASK is the ARM register set mask of which only bits 0-15 are important.
6247 REG is the base register, either the frame pointer or the stack pointer,
6248 INSTR is the possibly suffixed load or store instruction. */
6250 static void
6251 print_multi_reg (stream, instr, reg, mask)
6252 FILE * stream;
6253 const char * instr;
6254 int reg;
6255 int mask;
6257 int i;
6258 int not_first = FALSE;
6260 fputc ('\t', stream);
6261 asm_fprintf (stream, instr, reg);
6262 fputs (", {", stream);
6264 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6265 if (mask & (1 << i))
6267 if (not_first)
6268 fprintf (stream, ", ");
6270 asm_fprintf (stream, "%r", i);
6271 not_first = TRUE;
6274 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6277 /* Output a 'call' insn. */
6279 const char *
6280 output_call (operands)
6281 rtx * operands;
6283 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6285 if (REGNO (operands[0]) == LR_REGNUM)
6287 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6288 output_asm_insn ("mov%?\t%0, %|lr", operands);
6291 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6293 if (TARGET_INTERWORK)
6294 output_asm_insn ("bx%?\t%0", operands);
6295 else
6296 output_asm_insn ("mov%?\t%|pc, %0", operands);
6298 return "";
6301 static int
6302 eliminate_lr2ip (x)
6303 rtx * x;
6305 int something_changed = 0;
6306 rtx x0 = * x;
6307 int code = GET_CODE (x0);
6308 int i, j;
6309 const char * fmt;
6311 switch (code)
6313 case REG:
6314 if (REGNO (x0) == LR_REGNUM)
6316 *x = gen_rtx_REG (SImode, IP_REGNUM);
6317 return 1;
6319 return 0;
6320 default:
6321 /* Scan through the sub-elements and change any references there. */
6322 fmt = GET_RTX_FORMAT (code);
6324 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6325 if (fmt[i] == 'e')
6326 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6327 else if (fmt[i] == 'E')
6328 for (j = 0; j < XVECLEN (x0, i); j++)
6329 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6331 return something_changed;
6335 /* Output a 'call' insn that is a reference in memory. */
6337 const char *
6338 output_call_mem (operands)
6339 rtx * operands;
6341 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6342 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6343 if (eliminate_lr2ip (&operands[0]))
6344 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6346 if (TARGET_INTERWORK)
6348 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6349 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6350 output_asm_insn ("bx%?\t%|ip", operands);
6352 else
6354 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6355 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6358 return "";
6362 /* Output a move from arm registers to an fpu registers.
6363 OPERANDS[0] is an fpu register.
6364 OPERANDS[1] is the first registers of an arm register pair. */
6366 const char *
6367 output_mov_long_double_fpu_from_arm (operands)
6368 rtx * operands;
6370 int arm_reg0 = REGNO (operands[1]);
6371 rtx ops[3];
6373 if (arm_reg0 == IP_REGNUM)
6374 abort ();
6376 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6377 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6378 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6380 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6381 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6383 return "";
6386 /* Output a move from an fpu register to arm registers.
6387 OPERANDS[0] is the first registers of an arm register pair.
6388 OPERANDS[1] is an fpu register. */
6390 const char *
6391 output_mov_long_double_arm_from_fpu (operands)
6392 rtx * operands;
6394 int arm_reg0 = REGNO (operands[0]);
6395 rtx ops[3];
6397 if (arm_reg0 == IP_REGNUM)
6398 abort ();
6400 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6401 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6402 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6404 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6405 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6406 return "";
6409 /* Output a move from arm registers to arm registers of a long double
6410 OPERANDS[0] is the destination.
6411 OPERANDS[1] is the source. */
6413 const char *
6414 output_mov_long_double_arm_from_arm (operands)
6415 rtx * operands;
6417 /* We have to be careful here because the two might overlap. */
6418 int dest_start = REGNO (operands[0]);
6419 int src_start = REGNO (operands[1]);
6420 rtx ops[2];
6421 int i;
6423 if (dest_start < src_start)
6425 for (i = 0; i < 3; i++)
6427 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6428 ops[1] = gen_rtx_REG (SImode, src_start + i);
6429 output_asm_insn ("mov%?\t%0, %1", ops);
6432 else
6434 for (i = 2; i >= 0; i--)
6436 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6437 ops[1] = gen_rtx_REG (SImode, src_start + i);
6438 output_asm_insn ("mov%?\t%0, %1", ops);
6442 return "";
6446 /* Output a move from arm registers to an fpu registers.
6447 OPERANDS[0] is an fpu register.
6448 OPERANDS[1] is the first registers of an arm register pair. */
6450 const char *
6451 output_mov_double_fpu_from_arm (operands)
6452 rtx * operands;
6454 int arm_reg0 = REGNO (operands[1]);
6455 rtx ops[2];
6457 if (arm_reg0 == IP_REGNUM)
6458 abort ();
6460 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6461 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6462 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6463 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6464 return "";
6467 /* Output a move from an fpu register to arm registers.
6468 OPERANDS[0] is the first registers of an arm register pair.
6469 OPERANDS[1] is an fpu register. */
6471 const char *
6472 output_mov_double_arm_from_fpu (operands)
6473 rtx * operands;
6475 int arm_reg0 = REGNO (operands[0]);
6476 rtx ops[2];
6478 if (arm_reg0 == IP_REGNUM)
6479 abort ();
6481 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6482 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6483 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6484 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6485 return "";
6488 /* Output a move between double words.
6489 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6490 or MEM<-REG and all MEMs must be offsettable addresses. */
6492 const char *
6493 output_move_double (operands)
6494 rtx * operands;
6496 enum rtx_code code0 = GET_CODE (operands[0]);
6497 enum rtx_code code1 = GET_CODE (operands[1]);
6498 rtx otherops[3];
6500 if (code0 == REG)
6502 int reg0 = REGNO (operands[0]);
6504 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6506 if (code1 == REG)
6508 int reg1 = REGNO (operands[1]);
6509 if (reg1 == IP_REGNUM)
6510 abort ();
6512 /* Ensure the second source is not overwritten. */
6513 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6514 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6515 else
6516 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6518 else if (code1 == CONST_DOUBLE)
6520 if (GET_MODE (operands[1]) == DFmode)
6522 long l[2];
6523 union real_extract u;
6525 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
6526 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
6527 otherops[1] = GEN_INT (l[1]);
6528 operands[1] = GEN_INT (l[0]);
6530 else if (GET_MODE (operands[1]) != VOIDmode)
6531 abort ();
6532 else if (WORDS_BIG_ENDIAN)
6534 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6535 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6537 else
6539 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6540 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6543 output_mov_immediate (operands);
6544 output_mov_immediate (otherops);
6546 else if (code1 == CONST_INT)
6548 #if HOST_BITS_PER_WIDE_INT > 32
6549 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6550 what the upper word is. */
6551 if (WORDS_BIG_ENDIAN)
6553 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6554 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6556 else
6558 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6559 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6561 #else
6562 /* Sign extend the intval into the high-order word. */
6563 if (WORDS_BIG_ENDIAN)
6565 otherops[1] = operands[1];
6566 operands[1] = (INTVAL (operands[1]) < 0
6567 ? constm1_rtx : const0_rtx);
6569 else
6570 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6571 #endif
6572 output_mov_immediate (otherops);
6573 output_mov_immediate (operands);
6575 else if (code1 == MEM)
6577 switch (GET_CODE (XEXP (operands[1], 0)))
6579 case REG:
6580 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6581 break;
6583 case PRE_INC:
6584 abort (); /* Should never happen now. */
6585 break;
6587 case PRE_DEC:
6588 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6589 break;
6591 case POST_INC:
6592 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6593 break;
6595 case POST_DEC:
6596 abort (); /* Should never happen now. */
6597 break;
6599 case LABEL_REF:
6600 case CONST:
6601 output_asm_insn ("adr%?\t%0, %1", operands);
6602 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6603 break;
6605 default:
6606 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6607 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6609 otherops[0] = operands[0];
6610 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6611 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6613 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6615 if (GET_CODE (otherops[2]) == CONST_INT)
6617 switch (INTVAL (otherops[2]))
6619 case -8:
6620 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6621 return "";
6622 case -4:
6623 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6624 return "";
6625 case 4:
6626 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6627 return "";
6630 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6631 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6632 else
6633 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6635 else
6636 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6638 else
6639 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6641 return "ldm%?ia\t%0, %M0";
6643 else
6645 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6646 /* Take care of overlapping base/data reg. */
6647 if (reg_mentioned_p (operands[0], operands[1]))
6649 output_asm_insn ("ldr%?\t%0, %1", otherops);
6650 output_asm_insn ("ldr%?\t%0, %1", operands);
6652 else
6654 output_asm_insn ("ldr%?\t%0, %1", operands);
6655 output_asm_insn ("ldr%?\t%0, %1", otherops);
6660 else
6661 abort (); /* Constraints should prevent this. */
6663 else if (code0 == MEM && code1 == REG)
6665 if (REGNO (operands[1]) == IP_REGNUM)
6666 abort ();
6668 switch (GET_CODE (XEXP (operands[0], 0)))
6670 case REG:
6671 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6672 break;
6674 case PRE_INC:
6675 abort (); /* Should never happen now. */
6676 break;
6678 case PRE_DEC:
6679 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6680 break;
6682 case POST_INC:
6683 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6684 break;
6686 case POST_DEC:
6687 abort (); /* Should never happen now. */
6688 break;
6690 case PLUS:
6691 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6693 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6695 case -8:
6696 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6697 return "";
6699 case -4:
6700 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6701 return "";
6703 case 4:
6704 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6705 return "";
6708 /* Fall through */
6710 default:
6711 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
6712 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6713 output_asm_insn ("str%?\t%1, %0", operands);
6714 output_asm_insn ("str%?\t%1, %0", otherops);
6717 else
6718 /* Constraints should prevent this. */
6719 abort ();
6721 return "";
6725 /* Output an arbitrary MOV reg, #n.
6726 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6728 const char *
6729 output_mov_immediate (operands)
6730 rtx * operands;
6732 HOST_WIDE_INT n = INTVAL (operands[1]);
6734 /* Try to use one MOV. */
6735 if (const_ok_for_arm (n))
6736 output_asm_insn ("mov%?\t%0, %1", operands);
6738 /* Try to use one MVN. */
6739 else if (const_ok_for_arm (~n))
6741 operands[1] = GEN_INT (~n);
6742 output_asm_insn ("mvn%?\t%0, %1", operands);
6744 else
6746 int n_ones = 0;
6747 int i;
6749 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6750 for (i = 0; i < 32; i ++)
6751 if (n & 1 << i)
6752 n_ones ++;
6754 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6755 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6756 else
6757 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6760 return "";
6763 /* Output an ADD r, s, #n where n may be too big for one instruction.
6764 If adding zero to one register, output nothing. */
6766 const char *
6767 output_add_immediate (operands)
6768 rtx * operands;
6770 HOST_WIDE_INT n = INTVAL (operands[2]);
6772 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6774 if (n < 0)
6775 output_multi_immediate (operands,
6776 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6777 -n);
6778 else
6779 output_multi_immediate (operands,
6780 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6784 return "";
6787 /* Output a multiple immediate operation.
6788 OPERANDS is the vector of operands referred to in the output patterns.
6789 INSTR1 is the output pattern to use for the first constant.
6790 INSTR2 is the output pattern to use for subsequent constants.
6791 IMMED_OP is the index of the constant slot in OPERANDS.
6792 N is the constant value. */
6794 static const char *
6795 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6796 rtx * operands;
6797 const char * instr1;
6798 const char * instr2;
6799 int immed_op;
6800 HOST_WIDE_INT n;
6802 #if HOST_BITS_PER_WIDE_INT > 32
6803 n &= 0xffffffff;
6804 #endif
6806 if (n == 0)
6808 /* Quick and easy output. */
6809 operands[immed_op] = const0_rtx;
6810 output_asm_insn (instr1, operands);
6812 else
6814 int i;
6815 const char * instr = instr1;
6817 /* Note that n is never zero here (which would give no output). */
6818 for (i = 0; i < 32; i += 2)
6820 if (n & (3 << i))
6822 operands[immed_op] = GEN_INT (n & (255 << i));
6823 output_asm_insn (instr, operands);
6824 instr = instr2;
6825 i += 6;
6830 return "";
6833 /* Return the appropriate ARM instruction for the operation code.
6834 The returned result should not be overwritten. OP is the rtx of the
6835 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6836 was shifted. */
6838 const char *
6839 arithmetic_instr (op, shift_first_arg)
6840 rtx op;
6841 int shift_first_arg;
6843 switch (GET_CODE (op))
6845 case PLUS:
6846 return "add";
6848 case MINUS:
6849 return shift_first_arg ? "rsb" : "sub";
6851 case IOR:
6852 return "orr";
6854 case XOR:
6855 return "eor";
6857 case AND:
6858 return "and";
6860 default:
6861 abort ();
6865 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6866 for the operation code. The returned result should not be overwritten.
6867 OP is the rtx code of the shift.
6868 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6869 shift. */
6871 static const char *
6872 shift_op (op, amountp)
6873 rtx op;
6874 HOST_WIDE_INT *amountp;
6876 const char * mnem;
6877 enum rtx_code code = GET_CODE (op);
6879 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6880 *amountp = -1;
6881 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6882 *amountp = INTVAL (XEXP (op, 1));
6883 else
6884 abort ();
6886 switch (code)
6888 case ASHIFT:
6889 mnem = "asl";
6890 break;
6892 case ASHIFTRT:
6893 mnem = "asr";
6894 break;
6896 case LSHIFTRT:
6897 mnem = "lsr";
6898 break;
6900 case ROTATERT:
6901 mnem = "ror";
6902 break;
6904 case MULT:
6905 /* We never have to worry about the amount being other than a
6906 power of 2, since this case can never be reloaded from a reg. */
6907 if (*amountp != -1)
6908 *amountp = int_log2 (*amountp);
6909 else
6910 abort ();
6911 return "asl";
6913 default:
6914 abort ();
6917 if (*amountp != -1)
6919 /* This is not 100% correct, but follows from the desire to merge
6920 multiplication by a power of 2 with the recognizer for a
6921 shift. >=32 is not a valid shift for "asl", so we must try and
6922 output a shift that produces the correct arithmetical result.
6923 Using lsr #32 is identical except for the fact that the carry bit
6924 is not set correctly if we set the flags; but we never use the
6925 carry bit from such an operation, so we can ignore that. */
6926 if (code == ROTATERT)
6927 /* Rotate is just modulo 32. */
6928 *amountp &= 31;
6929 else if (*amountp != (*amountp & 31))
6931 if (code == ASHIFT)
6932 mnem = "lsr";
6933 *amountp = 32;
6936 /* Shifts of 0 are no-ops. */
6937 if (*amountp == 0)
6938 return NULL;
6941 return mnem;
6944 /* Obtain the shift from the POWER of two. */
6946 static HOST_WIDE_INT
6947 int_log2 (power)
6948 HOST_WIDE_INT power;
6950 HOST_WIDE_INT shift = 0;
6952 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
6954 if (shift > 31)
6955 abort ();
6956 shift ++;
6959 return shift;
6962 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6963 /bin/as is horribly restrictive. */
6964 #define MAX_ASCII_LEN 51
6966 void
6967 output_ascii_pseudo_op (stream, p, len)
6968 FILE * stream;
6969 const unsigned char * p;
6970 int len;
6972 int i;
6973 int len_so_far = 0;
6975 fputs ("\t.ascii\t\"", stream);
6977 for (i = 0; i < len; i++)
6979 int c = p[i];
6981 if (len_so_far >= MAX_ASCII_LEN)
6983 fputs ("\"\n\t.ascii\t\"", stream);
6984 len_so_far = 0;
6987 switch (c)
6989 case TARGET_TAB:
6990 fputs ("\\t", stream);
6991 len_so_far += 2;
6992 break;
6994 case TARGET_FF:
6995 fputs ("\\f", stream);
6996 len_so_far += 2;
6997 break;
6999 case TARGET_BS:
7000 fputs ("\\b", stream);
7001 len_so_far += 2;
7002 break;
7004 case TARGET_CR:
7005 fputs ("\\r", stream);
7006 len_so_far += 2;
7007 break;
7009 case TARGET_NEWLINE:
7010 fputs ("\\n", stream);
7011 c = p [i + 1];
7012 if ((c >= ' ' && c <= '~')
7013 || c == TARGET_TAB)
7014 /* This is a good place for a line break. */
7015 len_so_far = MAX_ASCII_LEN;
7016 else
7017 len_so_far += 2;
7018 break;
7020 case '\"':
7021 case '\\':
7022 putc ('\\', stream);
7023 len_so_far++;
7024 /* drop through. */
7026 default:
7027 if (c >= ' ' && c <= '~')
7029 putc (c, stream);
7030 len_so_far++;
7032 else
7034 fprintf (stream, "\\%03o", c);
7035 len_so_far += 4;
7037 break;
7041 fputs ("\"\n", stream);
7044 /* Compute the register sabe mask for registers 0 through 12
7045 inclusive. This code is used by both arm_compute_save_reg_mask
7046 and arm_compute_initial_elimination_offset. */
7048 static unsigned long
7049 arm_compute_save_reg0_reg12_mask ()
7051 unsigned long func_type = arm_current_func_type ();
7052 unsigned int save_reg_mask = 0;
7053 unsigned int reg;
7055 if (IS_INTERRUPT (func_type))
7057 unsigned int max_reg;
7058 /* Interrupt functions must not corrupt any registers,
7059 even call clobbered ones. If this is a leaf function
7060 we can just examine the registers used by the RTL, but
7061 otherwise we have to assume that whatever function is
7062 called might clobber anything, and so we have to save
7063 all the call-clobbered registers as well. */
7064 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7065 /* FIQ handlers have registers r8 - r12 banked, so
7066 we only need to check r0 - r7, Normal ISRs only
7067 bank r14 and r15, so we must check up to r12.
7068 r13 is the stack pointer which is always preserved,
7069 so we do not need to consider it here. */
7070 max_reg = 7;
7071 else
7072 max_reg = 12;
7074 for (reg = 0; reg <= max_reg; reg++)
7075 if (regs_ever_live[reg]
7076 || (! current_function_is_leaf && call_used_regs [reg]))
7077 save_reg_mask |= (1 << reg);
7079 else
7081 /* In the normal case we only need to save those registers
7082 which are call saved and which are used by this function. */
7083 for (reg = 0; reg <= 10; reg++)
7084 if (regs_ever_live[reg] && ! call_used_regs [reg])
7085 save_reg_mask |= (1 << reg);
7087 /* Handle the frame pointer as a special case. */
7088 if (! TARGET_APCS_FRAME
7089 && ! frame_pointer_needed
7090 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7091 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7092 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7094 /* If we aren't loading the PIC register,
7095 don't stack it even though it may be live. */
7096 if (flag_pic
7097 && ! TARGET_SINGLE_PIC_BASE
7098 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7099 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7102 return save_reg_mask;
7105 /* Compute a bit mask of which registers need to be
7106 saved on the stack for the current function. */
7108 static unsigned long
7109 arm_compute_save_reg_mask ()
7111 unsigned int save_reg_mask = 0;
7112 unsigned long func_type = arm_current_func_type ();
7114 if (IS_NAKED (func_type))
7115 /* This should never really happen. */
7116 return 0;
7118 /* If we are creating a stack frame, then we must save the frame pointer,
7119 IP (which will hold the old stack pointer), LR and the PC. */
7120 if (frame_pointer_needed)
7121 save_reg_mask |=
7122 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7123 | (1 << IP_REGNUM)
7124 | (1 << LR_REGNUM)
7125 | (1 << PC_REGNUM);
7127 /* Volatile functions do not return, so there
7128 is no need to save any other registers. */
7129 if (IS_VOLATILE (func_type))
7130 return save_reg_mask;
7132 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7134 /* Decide if we need to save the link register.
7135 Interrupt routines have their own banked link register,
7136 so they never need to save it.
7137 Otherwise if we do not use the link register we do not need to save
7138 it. If we are pushing other registers onto the stack however, we
7139 can save an instruction in the epilogue by pushing the link register
7140 now and then popping it back into the PC. This incurs extra memory
7141 accesses though, so we only do it when optimising for size, and only
7142 if we know that we will not need a fancy return sequence. */
7143 if (regs_ever_live [LR_REGNUM]
7144 || (save_reg_mask
7145 && optimize_size
7146 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7147 save_reg_mask |= 1 << LR_REGNUM;
7149 if (cfun->machine->lr_save_eliminated)
7150 save_reg_mask &= ~ (1 << LR_REGNUM);
7152 return save_reg_mask;
7155 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7156 everything bar the final return instruction. */
7158 const char *
7159 output_return_instruction (operand, really_return, reverse)
7160 rtx operand;
7161 int really_return;
7162 int reverse;
7164 char conditional[10];
7165 char instr[100];
7166 int reg;
7167 unsigned long live_regs_mask;
7168 unsigned long func_type;
7170 func_type = arm_current_func_type ();
7172 if (IS_NAKED (func_type))
7173 return "";
7175 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7177 /* If this function was declared non-returning, and we have found a tail
7178 call, then we have to trust that the called function won't return. */
7179 if (really_return)
7181 rtx ops[2];
7183 /* Otherwise, trap an attempted return by aborting. */
7184 ops[0] = operand;
7185 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7186 : "abort");
7187 assemble_external_libcall (ops[1]);
7188 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7191 return "";
7194 if (current_function_calls_alloca && !really_return)
7195 abort ();
7197 /* Construct the conditional part of the instruction(s) to be emitted. */
7198 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7200 return_used_this_function = 1;
7202 live_regs_mask = arm_compute_save_reg_mask ();
7204 if (live_regs_mask)
7206 const char * return_reg;
7208 /* If we do not have any special requirements for function exit
7209 (eg interworking, or ISR) then we can load the return address
7210 directly into the PC. Otherwise we must load it into LR. */
7211 if (really_return
7212 && ! TARGET_INTERWORK)
7213 return_reg = reg_names[PC_REGNUM];
7214 else
7215 return_reg = reg_names[LR_REGNUM];
7217 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7218 /* There are two possible reasons for the IP register being saved.
7219 Either a stack frame was created, in which case IP contains the
7220 old stack pointer, or an ISR routine corrupted it. If this in an
7221 ISR routine then just restore IP, otherwise restore IP into SP. */
7222 if (! IS_INTERRUPT (func_type))
7224 live_regs_mask &= ~ (1 << IP_REGNUM);
7225 live_regs_mask |= (1 << SP_REGNUM);
7228 /* On some ARM architectures it is faster to use LDR rather than
7229 LDM to load a single register. On other architectures, the
7230 cost is the same. In 26 bit mode, or for exception handlers,
7231 we have to use LDM to load the PC so that the CPSR is also
7232 restored. */
7233 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7235 if (live_regs_mask == (unsigned int)(1 << reg))
7236 break;
7238 if (reg <= LAST_ARM_REGNUM
7239 && (reg != LR_REGNUM
7240 || ! really_return
7241 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7243 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7244 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7246 else
7248 char *p;
7249 int first = 1;
7251 /* Generate the load multiple instruction to restore the registers. */
7252 if (frame_pointer_needed)
7253 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7254 else
7255 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7257 p = instr + strlen (instr);
7259 for (reg = 0; reg <= SP_REGNUM; reg++)
7260 if (live_regs_mask & (1 << reg))
7262 int l = strlen (reg_names[reg]);
7264 if (first)
7265 first = 0;
7266 else
7268 memcpy (p, ", ", 2);
7269 p += 2;
7272 memcpy (p, "%|", 2);
7273 memcpy (p + 2, reg_names[reg], l);
7274 p += l + 2;
7277 if (live_regs_mask & (1 << LR_REGNUM))
7279 int l = strlen (return_reg);
7281 if (! first)
7283 memcpy (p, ", ", 2);
7284 p += 2;
7287 memcpy (p, "%|", 2);
7288 memcpy (p + 2, return_reg, l);
7289 strcpy (p + 2 + l, ((TARGET_APCS_32
7290 && !IS_INTERRUPT (func_type))
7291 || !really_return)
7292 ? "}" : "}^");
7294 else
7295 strcpy (p, "}");
7298 output_asm_insn (instr, & operand);
7300 /* See if we need to generate an extra instruction to
7301 perform the actual function return. */
7302 if (really_return
7303 && func_type != ARM_FT_INTERWORKED
7304 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7306 /* The return has already been handled
7307 by loading the LR into the PC. */
7308 really_return = 0;
7312 if (really_return)
7314 switch ((int) ARM_FUNC_TYPE (func_type))
7316 case ARM_FT_ISR:
7317 case ARM_FT_FIQ:
7318 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7319 break;
7321 case ARM_FT_INTERWORKED:
7322 sprintf (instr, "bx%s\t%%|lr", conditional);
7323 break;
7325 case ARM_FT_EXCEPTION:
7326 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7327 break;
7329 default:
7330 /* ARMv5 implementations always provide BX, so interworking
7331 is the default unless APCS-26 is in use. */
7332 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7333 sprintf (instr, "bx%s\t%%|lr", conditional);
7334 else
7335 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7336 conditional, TARGET_APCS_32 ? "" : "s");
7337 break;
7340 output_asm_insn (instr, & operand);
7343 return "";
7346 /* Write the function name into the code section, directly preceding
7347 the function prologue.
7349 Code will be output similar to this:
7351 .ascii "arm_poke_function_name", 0
7352 .align
7354 .word 0xff000000 + (t1 - t0)
7355 arm_poke_function_name
7356 mov ip, sp
7357 stmfd sp!, {fp, ip, lr, pc}
7358 sub fp, ip, #4
7360 When performing a stack backtrace, code can inspect the value
7361 of 'pc' stored at 'fp' + 0. If the trace function then looks
7362 at location pc - 12 and the top 8 bits are set, then we know
7363 that there is a function name embedded immediately preceding this
7364 location and has length ((pc[-3]) & 0xff000000).
7366 We assume that pc is declared as a pointer to an unsigned long.
7368 It is of no benefit to output the function name if we are assembling
7369 a leaf function. These function types will not contain a stack
7370 backtrace structure, therefore it is not possible to determine the
7371 function name. */
7373 void
7374 arm_poke_function_name (stream, name)
7375 FILE * stream;
7376 const char * name;
7378 unsigned long alignlength;
7379 unsigned long length;
7380 rtx x;
7382 length = strlen (name) + 1;
7383 alignlength = ROUND_UP (length);
7385 ASM_OUTPUT_ASCII (stream, name, length);
7386 ASM_OUTPUT_ALIGN (stream, 2);
7387 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7388 assemble_aligned_integer (UNITS_PER_WORD, x);
7391 /* Place some comments into the assembler stream
7392 describing the current function. */
7394 static void
7395 arm_output_function_prologue (f, frame_size)
7396 FILE * f;
7397 HOST_WIDE_INT frame_size;
7399 unsigned long func_type;
7401 if (!TARGET_ARM)
7403 thumb_output_function_prologue (f, frame_size);
7404 return;
7407 /* Sanity check. */
7408 if (arm_ccfsm_state || arm_target_insn)
7409 abort ();
7411 func_type = arm_current_func_type ();
7413 switch ((int) ARM_FUNC_TYPE (func_type))
7415 default:
7416 case ARM_FT_NORMAL:
7417 break;
7418 case ARM_FT_INTERWORKED:
7419 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7420 break;
7421 case ARM_FT_EXCEPTION_HANDLER:
7422 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7423 break;
7424 case ARM_FT_ISR:
7425 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7426 break;
7427 case ARM_FT_FIQ:
7428 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7429 break;
7430 case ARM_FT_EXCEPTION:
7431 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7432 break;
7435 if (IS_NAKED (func_type))
7436 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7438 if (IS_VOLATILE (func_type))
7439 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7441 if (IS_NESTED (func_type))
7442 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7444 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7445 current_function_args_size,
7446 current_function_pretend_args_size, frame_size);
7448 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7449 frame_pointer_needed,
7450 cfun->machine->uses_anonymous_args);
7452 if (cfun->machine->lr_save_eliminated)
7453 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7455 #ifdef AOF_ASSEMBLER
7456 if (flag_pic)
7457 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7458 #endif
7460 return_used_this_function = 0;
7463 const char *
7464 arm_output_epilogue (really_return)
7465 int really_return;
7467 int reg;
7468 unsigned long saved_regs_mask;
7469 unsigned long func_type;
7470 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7471 frame that is $fp + 4 for a non-variadic function. */
7472 int floats_offset = 0;
7473 rtx operands[3];
7474 int frame_size = get_frame_size ();
7475 FILE * f = asm_out_file;
7476 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7478 /* If we have already generated the return instruction
7479 then it is futile to generate anything else. */
7480 if (use_return_insn (FALSE) && return_used_this_function)
7481 return "";
7483 func_type = arm_current_func_type ();
7485 if (IS_NAKED (func_type))
7486 /* Naked functions don't have epilogues. */
7487 return "";
7489 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7491 rtx op;
7493 /* A volatile function should never return. Call abort. */
7494 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7495 assemble_external_libcall (op);
7496 output_asm_insn ("bl\t%a0", &op);
7498 return "";
7501 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7502 && ! really_return)
7503 /* If we are throwing an exception, then we really must
7504 be doing a return, so we can't tail-call. */
7505 abort ();
7507 saved_regs_mask = arm_compute_save_reg_mask ();
7509 /* XXX We should adjust floats_offset for any anonymous args, and then
7510 re-adjust vfp_offset below to compensate. */
7512 /* Compute how far away the floats will be. */
7513 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7514 if (saved_regs_mask & (1 << reg))
7515 floats_offset += 4;
7517 if (frame_pointer_needed)
7519 int vfp_offset = 4;
7521 if (arm_fpu_arch == FP_SOFT2)
7523 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7524 if (regs_ever_live[reg] && !call_used_regs[reg])
7526 floats_offset += 12;
7527 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7528 reg, FP_REGNUM, floats_offset - vfp_offset);
7531 else
7533 int start_reg = LAST_ARM_FP_REGNUM;
7535 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7537 if (regs_ever_live[reg] && !call_used_regs[reg])
7539 floats_offset += 12;
7541 /* We can't unstack more than four registers at once. */
7542 if (start_reg - reg == 3)
7544 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7545 reg, FP_REGNUM, floats_offset - vfp_offset);
7546 start_reg = reg - 1;
7549 else
7551 if (reg != start_reg)
7552 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7553 reg + 1, start_reg - reg,
7554 FP_REGNUM, floats_offset - vfp_offset);
7555 start_reg = reg - 1;
7559 /* Just in case the last register checked also needs unstacking. */
7560 if (reg != start_reg)
7561 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7562 reg + 1, start_reg - reg,
7563 FP_REGNUM, floats_offset - vfp_offset);
7566 /* saved_regs_mask should contain the IP, which at the time of stack
7567 frame generation actually contains the old stack pointer. So a
7568 quick way to unwind the stack is just pop the IP register directly
7569 into the stack pointer. */
7570 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7571 abort ();
7572 saved_regs_mask &= ~ (1 << IP_REGNUM);
7573 saved_regs_mask |= (1 << SP_REGNUM);
7575 /* There are two registers left in saved_regs_mask - LR and PC. We
7576 only need to restore the LR register (the return address), but to
7577 save time we can load it directly into the PC, unless we need a
7578 special function exit sequence, or we are not really returning. */
7579 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7580 /* Delete the LR from the register mask, so that the LR on
7581 the stack is loaded into the PC in the register mask. */
7582 saved_regs_mask &= ~ (1 << LR_REGNUM);
7583 else
7584 saved_regs_mask &= ~ (1 << PC_REGNUM);
7586 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7588 if (IS_INTERRUPT (func_type))
7589 /* Interrupt handlers will have pushed the
7590 IP onto the stack, so restore it now. */
7591 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7593 else
7595 /* Restore stack pointer if necessary. */
7596 if (frame_size + current_function_outgoing_args_size != 0)
7598 operands[0] = operands[1] = stack_pointer_rtx;
7599 operands[2] = GEN_INT (frame_size
7600 + current_function_outgoing_args_size);
7601 output_add_immediate (operands);
7604 if (arm_fpu_arch == FP_SOFT2)
7606 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7607 if (regs_ever_live[reg] && !call_used_regs[reg])
7608 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7609 reg, SP_REGNUM);
7611 else
7613 int start_reg = FIRST_ARM_FP_REGNUM;
7615 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7617 if (regs_ever_live[reg] && !call_used_regs[reg])
7619 if (reg - start_reg == 3)
7621 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7622 start_reg, SP_REGNUM);
7623 start_reg = reg + 1;
7626 else
7628 if (reg != start_reg)
7629 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7630 start_reg, reg - start_reg,
7631 SP_REGNUM);
7633 start_reg = reg + 1;
7637 /* Just in case the last register checked also needs unstacking. */
7638 if (reg != start_reg)
7639 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7640 start_reg, reg - start_reg, SP_REGNUM);
7643 /* If we can, restore the LR into the PC. */
7644 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7645 && really_return
7646 && current_function_pretend_args_size == 0
7647 && saved_regs_mask & (1 << LR_REGNUM))
7649 saved_regs_mask &= ~ (1 << LR_REGNUM);
7650 saved_regs_mask |= (1 << PC_REGNUM);
7653 /* Load the registers off the stack. If we only have one register
7654 to load use the LDR instruction - it is faster. */
7655 if (saved_regs_mask == (1 << LR_REGNUM))
7657 /* The excpetion handler ignores the LR, so we do
7658 not really need to load it off the stack. */
7659 if (eh_ofs)
7660 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7661 else
7662 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7664 else if (saved_regs_mask)
7665 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7667 if (current_function_pretend_args_size)
7669 /* Unwind the pre-pushed regs. */
7670 operands[0] = operands[1] = stack_pointer_rtx;
7671 operands[2] = GEN_INT (current_function_pretend_args_size);
7672 output_add_immediate (operands);
7676 #if 0
7677 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7678 /* Adjust the stack to remove the exception handler stuff. */
7679 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7680 REGNO (eh_ofs));
7681 #endif
7683 if (! really_return)
7684 return "";
7686 /* Generate the return instruction. */
7687 switch ((int) ARM_FUNC_TYPE (func_type))
7689 case ARM_FT_EXCEPTION_HANDLER:
7690 /* Even in 26-bit mode we do a mov (rather than a movs)
7691 because we don't have the PSR bits set in the address. */
7692 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7693 break;
7695 case ARM_FT_ISR:
7696 case ARM_FT_FIQ:
7697 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7698 break;
7700 case ARM_FT_EXCEPTION:
7701 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7702 break;
7704 case ARM_FT_INTERWORKED:
7705 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7706 break;
7708 default:
7709 if (frame_pointer_needed)
7710 /* If we used the frame pointer then the return adddress
7711 will have been loaded off the stack directly into the
7712 PC, so there is no need to issue a MOV instruction
7713 here. */
7715 else if (current_function_pretend_args_size == 0
7716 && (saved_regs_mask & (1 << LR_REGNUM)))
7717 /* Similarly we may have been able to load LR into the PC
7718 even if we did not create a stack frame. */
7720 else if (TARGET_APCS_32)
7721 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7722 else
7723 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7724 break;
7727 return "";
7730 static void
7731 arm_output_function_epilogue (file, frame_size)
7732 FILE *file ATTRIBUTE_UNUSED;
7733 HOST_WIDE_INT frame_size;
7735 if (TARGET_THUMB)
7737 /* ??? Probably not safe to set this here, since it assumes that a
7738 function will be emitted as assembly immediately after we generate
7739 RTL for it. This does not happen for inline functions. */
7740 return_used_this_function = 0;
7742 else
7744 if (use_return_insn (FALSE)
7745 && return_used_this_function
7746 && (frame_size + current_function_outgoing_args_size) != 0
7747 && !frame_pointer_needed)
7748 abort ();
7750 /* Reset the ARM-specific per-function variables. */
7751 after_arm_reorg = 0;
7755 /* Generate and emit an insn that we will recognize as a push_multi.
7756 Unfortunately, since this insn does not reflect very well the actual
7757 semantics of the operation, we need to annotate the insn for the benefit
7758 of DWARF2 frame unwind information. */
7760 static rtx
7761 emit_multi_reg_push (mask)
7762 int mask;
7764 int num_regs = 0;
7765 int num_dwarf_regs;
7766 int i, j;
7767 rtx par;
7768 rtx dwarf;
7769 int dwarf_par_index;
7770 rtx tmp, reg;
7772 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7773 if (mask & (1 << i))
7774 num_regs++;
7776 if (num_regs == 0 || num_regs > 16)
7777 abort ();
7779 /* We don't record the PC in the dwarf frame information. */
7780 num_dwarf_regs = num_regs;
7781 if (mask & (1 << PC_REGNUM))
7782 num_dwarf_regs--;
7784 /* For the body of the insn we are going to generate an UNSPEC in
7785 parallel with several USEs. This allows the insn to be recognised
7786 by the push_multi pattern in the arm.md file. The insn looks
7787 something like this:
7789 (parallel [
7790 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7791 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7792 (use (reg:SI 11 fp))
7793 (use (reg:SI 12 ip))
7794 (use (reg:SI 14 lr))
7795 (use (reg:SI 15 pc))
7798 For the frame note however, we try to be more explicit and actually
7799 show each register being stored into the stack frame, plus a (single)
7800 decrement of the stack pointer. We do it this way in order to be
7801 friendly to the stack unwinding code, which only wants to see a single
7802 stack decrement per instruction. The RTL we generate for the note looks
7803 something like this:
7805 (sequence [
7806 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7807 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7808 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7809 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7810 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7813 This sequence is used both by the code to support stack unwinding for
7814 exceptions handlers and the code to generate dwarf2 frame debugging. */
7816 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7817 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
7818 RTX_FRAME_RELATED_P (dwarf) = 1;
7819 dwarf_par_index = 1;
7821 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7823 if (mask & (1 << i))
7825 reg = gen_rtx_REG (SImode, i);
7827 XVECEXP (par, 0, 0)
7828 = gen_rtx_SET (VOIDmode,
7829 gen_rtx_MEM (BLKmode,
7830 gen_rtx_PRE_DEC (BLKmode,
7831 stack_pointer_rtx)),
7832 gen_rtx_UNSPEC (BLKmode,
7833 gen_rtvec (1, reg),
7834 UNSPEC_PUSH_MULT));
7836 if (i != PC_REGNUM)
7838 tmp = gen_rtx_SET (VOIDmode,
7839 gen_rtx_MEM (SImode, stack_pointer_rtx),
7840 reg);
7841 RTX_FRAME_RELATED_P (tmp) = 1;
7842 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7843 dwarf_par_index++;
7846 break;
7850 for (j = 1, i++; j < num_regs; i++)
7852 if (mask & (1 << i))
7854 reg = gen_rtx_REG (SImode, i);
7856 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7858 if (i != PC_REGNUM)
7860 tmp = gen_rtx_SET (VOIDmode,
7861 gen_rtx_MEM (SImode,
7862 plus_constant (stack_pointer_rtx,
7863 4 * j)),
7864 reg);
7865 RTX_FRAME_RELATED_P (tmp) = 1;
7866 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7869 j++;
7873 par = emit_insn (par);
7875 tmp = gen_rtx_SET (SImode,
7876 stack_pointer_rtx,
7877 gen_rtx_PLUS (SImode,
7878 stack_pointer_rtx,
7879 GEN_INT (-4 * num_regs)));
7880 RTX_FRAME_RELATED_P (tmp) = 1;
7881 XVECEXP (dwarf, 0, 0) = tmp;
7883 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7884 REG_NOTES (par));
7885 return par;
7888 static rtx
7889 emit_sfm (base_reg, count)
7890 int base_reg;
7891 int count;
7893 rtx par;
7894 rtx dwarf;
7895 rtx tmp, reg;
7896 int i;
7898 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7899 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7900 RTX_FRAME_RELATED_P (dwarf) = 1;
7902 reg = gen_rtx_REG (XFmode, base_reg++);
7904 XVECEXP (par, 0, 0)
7905 = gen_rtx_SET (VOIDmode,
7906 gen_rtx_MEM (BLKmode,
7907 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7908 gen_rtx_UNSPEC (BLKmode,
7909 gen_rtvec (1, reg),
7910 UNSPEC_PUSH_MULT));
7912 = gen_rtx_SET (VOIDmode,
7913 gen_rtx_MEM (XFmode,
7914 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7915 reg);
7916 RTX_FRAME_RELATED_P (tmp) = 1;
7917 XVECEXP (dwarf, 0, count - 1) = tmp;
7919 for (i = 1; i < count; i++)
7921 reg = gen_rtx_REG (XFmode, base_reg++);
7922 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7924 tmp = gen_rtx_SET (VOIDmode,
7925 gen_rtx_MEM (XFmode,
7926 gen_rtx_PRE_DEC (BLKmode,
7927 stack_pointer_rtx)),
7928 reg);
7929 RTX_FRAME_RELATED_P (tmp) = 1;
7930 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7933 par = emit_insn (par);
7934 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7935 REG_NOTES (par));
7936 return par;
7939 /* Compute the distance from register FROM to register TO.
7940 These can be the arg pointer (26), the soft frame pointer (25),
7941 the stack pointer (13) or the hard frame pointer (11).
7942 Typical stack layout looks like this:
7944 old stack pointer -> | |
7945 ----
7946 | | \
7947 | | saved arguments for
7948 | | vararg functions
7949 | | /
7951 hard FP & arg pointer -> | | \
7952 | | stack
7953 | | frame
7954 | | /
7956 | | \
7957 | | call saved
7958 | | registers
7959 soft frame pointer -> | | /
7961 | | \
7962 | | local
7963 | | variables
7964 | | /
7966 | | \
7967 | | outgoing
7968 | | arguments
7969 current stack pointer -> | | /
7972 For a given funciton some or all of these stack compomnents
7973 may not be needed, giving rise to the possibility of
7974 eliminating some of the registers.
7976 The values returned by this function must reflect the behaviour
7977 of arm_expand_prologue() and arm_compute_save_reg_mask().
7979 The sign of the number returned reflects the direction of stack
7980 growth, so the values are positive for all eliminations except
7981 from the soft frame pointer to the hard frame pointer. */
7983 unsigned int
7984 arm_compute_initial_elimination_offset (from, to)
7985 unsigned int from;
7986 unsigned int to;
7988 unsigned int local_vars = (get_frame_size () + 3) & ~3;
7989 unsigned int outgoing_args = current_function_outgoing_args_size;
7990 unsigned int stack_frame;
7991 unsigned int call_saved_registers;
7992 unsigned long func_type;
7994 func_type = arm_current_func_type ();
7996 /* Volatile functions never return, so there is
7997 no need to save call saved registers. */
7998 call_saved_registers = 0;
7999 if (! IS_VOLATILE (func_type))
8001 unsigned int reg_mask;
8002 unsigned int reg;
8004 /* Make sure that we compute which registers will be saved
8005 on the stack using the same algorithm that is used by
8006 arm_compute_save_reg_mask(). */
8007 reg_mask = arm_compute_save_reg0_reg12_mask ();
8009 /* Now count the number of bits set in save_reg_mask.
8010 For each set bit we need 4 bytes of stack space. */
8011 while (reg_mask)
8013 call_saved_registers += 4;
8014 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8017 if (regs_ever_live[LR_REGNUM]
8018 /* If a stack frame is going to be created, the LR will
8019 be saved as part of that, so we do not need to allow
8020 for it here. */
8021 && ! frame_pointer_needed)
8022 call_saved_registers += 4;
8024 /* If the hard floating point registers are going to be
8025 used then they must be saved on the stack as well.
8026 Each register occupies 12 bytes of stack space. */
8027 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8028 if (regs_ever_live[reg] && ! call_used_regs[reg])
8029 call_saved_registers += 12;
8032 /* The stack frame contains 4 registers - the old frame pointer,
8033 the old stack pointer, the return address and PC of the start
8034 of the function. */
8035 stack_frame = frame_pointer_needed ? 16 : 0;
8037 /* OK, now we have enough information to compute the distances.
8038 There must be an entry in these switch tables for each pair
8039 of registers in ELIMINABLE_REGS, even if some of the entries
8040 seem to be redundant or useless. */
8041 switch (from)
8043 case ARG_POINTER_REGNUM:
8044 switch (to)
8046 case THUMB_HARD_FRAME_POINTER_REGNUM:
8047 return 0;
8049 case FRAME_POINTER_REGNUM:
8050 /* This is the reverse of the soft frame pointer
8051 to hard frame pointer elimination below. */
8052 if (call_saved_registers == 0 && stack_frame == 0)
8053 return 0;
8054 return (call_saved_registers + stack_frame - 4);
8056 case ARM_HARD_FRAME_POINTER_REGNUM:
8057 /* If there is no stack frame then the hard
8058 frame pointer and the arg pointer coincide. */
8059 if (stack_frame == 0 && call_saved_registers != 0)
8060 return 0;
8061 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8062 return (frame_pointer_needed
8063 && current_function_needs_context
8064 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8066 case STACK_POINTER_REGNUM:
8067 /* If nothing has been pushed on the stack at all
8068 then this will return -4. This *is* correct! */
8069 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8071 default:
8072 abort ();
8074 break;
8076 case FRAME_POINTER_REGNUM:
8077 switch (to)
8079 case THUMB_HARD_FRAME_POINTER_REGNUM:
8080 return 0;
8082 case ARM_HARD_FRAME_POINTER_REGNUM:
8083 /* The hard frame pointer points to the top entry in the
8084 stack frame. The soft frame pointer to the bottom entry
8085 in the stack frame. If there is no stack frame at all,
8086 then they are identical. */
8087 if (call_saved_registers == 0 && stack_frame == 0)
8088 return 0;
8089 return - (call_saved_registers + stack_frame - 4);
8091 case STACK_POINTER_REGNUM:
8092 return local_vars + outgoing_args;
8094 default:
8095 abort ();
8097 break;
8099 default:
8100 /* You cannot eliminate from the stack pointer.
8101 In theory you could eliminate from the hard frame
8102 pointer to the stack pointer, but this will never
8103 happen, since if a stack frame is not needed the
8104 hard frame pointer will never be used. */
8105 abort ();
8109 /* Generate the prologue instructions for entry into an ARM function. */
8111 void
8112 arm_expand_prologue ()
8114 int reg;
8115 rtx amount;
8116 rtx insn;
8117 rtx ip_rtx;
8118 unsigned long live_regs_mask;
8119 unsigned long func_type;
8120 int fp_offset = 0;
8121 int saved_pretend_args = 0;
8122 unsigned int args_to_push;
8124 func_type = arm_current_func_type ();
8126 /* Naked functions don't have prologues. */
8127 if (IS_NAKED (func_type))
8128 return;
8130 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8131 args_to_push = current_function_pretend_args_size;
8133 /* Compute which register we will have to save onto the stack. */
8134 live_regs_mask = arm_compute_save_reg_mask ();
8136 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8138 if (frame_pointer_needed)
8140 if (IS_INTERRUPT (func_type))
8142 /* Interrupt functions must not corrupt any registers.
8143 Creating a frame pointer however, corrupts the IP
8144 register, so we must push it first. */
8145 insn = emit_multi_reg_push (1 << IP_REGNUM);
8147 /* Do not set RTX_FRAME_RELATED_P on this insn.
8148 The dwarf stack unwinding code only wants to see one
8149 stack decrement per function, and this is not it. If
8150 this instruction is labeled as being part of the frame
8151 creation sequence then dwarf2out_frame_debug_expr will
8152 abort when it encounters the assignment of IP to FP
8153 later on, since the use of SP here establishes SP as
8154 the CFA register and not IP.
8156 Anyway this instruction is not really part of the stack
8157 frame creation although it is part of the prologue. */
8159 else if (IS_NESTED (func_type))
8161 /* The Static chain register is the same as the IP register
8162 used as a scratch register during stack frame creation.
8163 To get around this need to find somewhere to store IP
8164 whilst the frame is being created. We try the following
8165 places in order:
8167 1. The last argument register.
8168 2. A slot on the stack above the frame. (This only
8169 works if the function is not a varargs function).
8170 3. Register r3, after pushing the argument registers
8171 onto the stack.
8173 Note - we only need to tell the dwarf2 backend about the SP
8174 adjustment in the second variant; the static chain register
8175 doesn't need to be unwound, as it doesn't contain a value
8176 inherited from the caller. */
8178 if (regs_ever_live[3] == 0)
8180 insn = gen_rtx_REG (SImode, 3);
8181 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8182 insn = emit_insn (insn);
8184 else if (args_to_push == 0)
8186 rtx dwarf;
8187 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8188 insn = gen_rtx_MEM (SImode, insn);
8189 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8190 insn = emit_insn (insn);
8192 fp_offset = 4;
8194 /* Just tell the dwarf backend that we adjusted SP. */
8195 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8196 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8197 GEN_INT (-fp_offset)));
8198 RTX_FRAME_RELATED_P (insn) = 1;
8199 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8200 dwarf, REG_NOTES (insn));
8202 else
8204 /* Store the args on the stack. */
8205 if (cfun->machine->uses_anonymous_args)
8206 insn = emit_multi_reg_push
8207 ((0xf0 >> (args_to_push / 4)) & 0xf);
8208 else
8209 insn = emit_insn
8210 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8211 GEN_INT (- args_to_push)));
8213 RTX_FRAME_RELATED_P (insn) = 1;
8215 saved_pretend_args = 1;
8216 fp_offset = args_to_push;
8217 args_to_push = 0;
8219 /* Now reuse r3 to preserve IP. */
8220 insn = gen_rtx_REG (SImode, 3);
8221 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8222 (void) emit_insn (insn);
8226 if (fp_offset)
8228 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8229 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8231 else
8232 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8234 insn = emit_insn (insn);
8235 RTX_FRAME_RELATED_P (insn) = 1;
8238 if (args_to_push)
8240 /* Push the argument registers, or reserve space for them. */
8241 if (cfun->machine->uses_anonymous_args)
8242 insn = emit_multi_reg_push
8243 ((0xf0 >> (args_to_push / 4)) & 0xf);
8244 else
8245 insn = emit_insn
8246 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8247 GEN_INT (- args_to_push)));
8248 RTX_FRAME_RELATED_P (insn) = 1;
8251 /* If this is an interrupt service routine, and the link register is
8252 going to be pushed, subtracting four now will mean that the
8253 function return can be done with a single instruction. */
8254 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8255 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8257 emit_insn (gen_rtx_SET (SImode,
8258 gen_rtx_REG (SImode, LR_REGNUM),
8259 gen_rtx_PLUS (SImode,
8260 gen_rtx_REG (SImode, LR_REGNUM),
8261 GEN_INT (-4))));
8264 if (live_regs_mask)
8266 insn = emit_multi_reg_push (live_regs_mask);
8267 RTX_FRAME_RELATED_P (insn) = 1;
8270 if (! IS_VOLATILE (func_type))
8272 /* Save any floating point call-saved registers used by this function. */
8273 if (arm_fpu_arch == FP_SOFT2)
8275 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8276 if (regs_ever_live[reg] && !call_used_regs[reg])
8278 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8279 insn = gen_rtx_MEM (XFmode, insn);
8280 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8281 gen_rtx_REG (XFmode, reg)));
8282 RTX_FRAME_RELATED_P (insn) = 1;
8285 else
8287 int start_reg = LAST_ARM_FP_REGNUM;
8289 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8291 if (regs_ever_live[reg] && !call_used_regs[reg])
8293 if (start_reg - reg == 3)
8295 insn = emit_sfm (reg, 4);
8296 RTX_FRAME_RELATED_P (insn) = 1;
8297 start_reg = reg - 1;
8300 else
8302 if (start_reg != reg)
8304 insn = emit_sfm (reg + 1, start_reg - reg);
8305 RTX_FRAME_RELATED_P (insn) = 1;
8307 start_reg = reg - 1;
8311 if (start_reg != reg)
8313 insn = emit_sfm (reg + 1, start_reg - reg);
8314 RTX_FRAME_RELATED_P (insn) = 1;
8319 if (frame_pointer_needed)
8321 /* Create the new frame pointer. */
8322 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8323 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8324 RTX_FRAME_RELATED_P (insn) = 1;
8326 if (IS_NESTED (func_type))
8328 /* Recover the static chain register. */
8329 if (regs_ever_live [3] == 0
8330 || saved_pretend_args)
8331 insn = gen_rtx_REG (SImode, 3);
8332 else /* if (current_function_pretend_args_size == 0) */
8334 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8335 insn = gen_rtx_MEM (SImode, insn);
8338 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8339 /* Add a USE to stop propagate_one_insn() from barfing. */
8340 emit_insn (gen_prologue_use (ip_rtx));
8344 amount = GEN_INT (-(get_frame_size ()
8345 + current_function_outgoing_args_size));
8347 if (amount != const0_rtx)
8349 /* This add can produce multiple insns for a large constant, so we
8350 need to get tricky. */
8351 rtx last = get_last_insn ();
8352 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8353 amount));
8356 last = last ? NEXT_INSN (last) : get_insns ();
8357 RTX_FRAME_RELATED_P (last) = 1;
8359 while (last != insn);
8361 /* If the frame pointer is needed, emit a special barrier that
8362 will prevent the scheduler from moving stores to the frame
8363 before the stack adjustment. */
8364 if (frame_pointer_needed)
8366 rtx unspec = gen_rtx_UNSPEC (SImode,
8367 gen_rtvec (2, stack_pointer_rtx,
8368 hard_frame_pointer_rtx),
8369 UNSPEC_PRLG_STK);
8371 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
8372 gen_rtx_MEM (BLKmode, unspec)));
8376 /* If we are profiling, make sure no instructions are scheduled before
8377 the call to mcount. Similarly if the user has requested no
8378 scheduling in the prolog. */
8379 if (current_function_profile || TARGET_NO_SCHED_PRO)
8380 emit_insn (gen_blockage ());
8382 /* If the link register is being kept alive, with the return address in it,
8383 then make sure that it does not get reused by the ce2 pass. */
8384 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8386 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8387 cfun->machine->lr_save_eliminated = 1;
8391 /* If CODE is 'd', then the X is a condition operand and the instruction
8392 should only be executed if the condition is true.
8393 if CODE is 'D', then the X is a condition operand and the instruction
8394 should only be executed if the condition is false: however, if the mode
8395 of the comparison is CCFPEmode, then always execute the instruction -- we
8396 do this because in these circumstances !GE does not necessarily imply LT;
8397 in these cases the instruction pattern will take care to make sure that
8398 an instruction containing %d will follow, thereby undoing the effects of
8399 doing this instruction unconditionally.
8400 If CODE is 'N' then X is a floating point operand that must be negated
8401 before output.
8402 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8403 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8405 void
8406 arm_print_operand (stream, x, code)
8407 FILE * stream;
8408 rtx x;
8409 int code;
8411 switch (code)
8413 case '@':
8414 fputs (ASM_COMMENT_START, stream);
8415 return;
8417 case '_':
8418 fputs (user_label_prefix, stream);
8419 return;
8421 case '|':
8422 fputs (REGISTER_PREFIX, stream);
8423 return;
8425 case '?':
8426 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8428 if (TARGET_THUMB || current_insn_predicate != NULL)
8429 abort ();
8431 fputs (arm_condition_codes[arm_current_cc], stream);
8433 else if (current_insn_predicate)
8435 enum arm_cond_code code;
8437 if (TARGET_THUMB)
8438 abort ();
8440 code = get_arm_condition_code (current_insn_predicate);
8441 fputs (arm_condition_codes[code], stream);
8443 return;
8445 case 'N':
8447 REAL_VALUE_TYPE r;
8448 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8449 r = REAL_VALUE_NEGATE (r);
8450 fprintf (stream, "%s", fp_const_from_val (&r));
8452 return;
8454 case 'B':
8455 if (GET_CODE (x) == CONST_INT)
8457 HOST_WIDE_INT val;
8458 val = ARM_SIGN_EXTEND (~INTVAL (x));
8459 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8461 else
8463 putc ('~', stream);
8464 output_addr_const (stream, x);
8466 return;
8468 case 'i':
8469 fprintf (stream, "%s", arithmetic_instr (x, 1));
8470 return;
8472 case 'I':
8473 fprintf (stream, "%s", arithmetic_instr (x, 0));
8474 return;
8476 case 'S':
8478 HOST_WIDE_INT val;
8479 const char * shift = shift_op (x, &val);
8481 if (shift)
8483 fprintf (stream, ", %s ", shift_op (x, &val));
8484 if (val == -1)
8485 arm_print_operand (stream, XEXP (x, 1), 0);
8486 else
8488 fputc ('#', stream);
8489 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8493 return;
8495 /* An explanation of the 'Q', 'R' and 'H' register operands:
8497 In a pair of registers containing a DI or DF value the 'Q'
8498 operand returns the register number of the register containing
8499 the least signficant part of the value. The 'R' operand returns
8500 the register number of the register containing the most
8501 significant part of the value.
8503 The 'H' operand returns the higher of the two register numbers.
8504 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8505 same as the 'Q' operand, since the most signficant part of the
8506 value is held in the lower number register. The reverse is true
8507 on systems where WORDS_BIG_ENDIAN is false.
8509 The purpose of these operands is to distinguish between cases
8510 where the endian-ness of the values is important (for example
8511 when they are added together), and cases where the endian-ness
8512 is irrelevant, but the order of register operations is important.
8513 For example when loading a value from memory into a register
8514 pair, the endian-ness does not matter. Provided that the value
8515 from the lower memory address is put into the lower numbered
8516 register, and the value from the higher address is put into the
8517 higher numbered register, the load will work regardless of whether
8518 the value being loaded is big-wordian or little-wordian. The
8519 order of the two register loads can matter however, if the address
8520 of the memory location is actually held in one of the registers
8521 being overwritten by the load. */
8522 case 'Q':
8523 if (REGNO (x) > LAST_ARM_REGNUM)
8524 abort ();
8525 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8526 return;
8528 case 'R':
8529 if (REGNO (x) > LAST_ARM_REGNUM)
8530 abort ();
8531 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8532 return;
8534 case 'H':
8535 if (REGNO (x) > LAST_ARM_REGNUM)
8536 abort ();
8537 asm_fprintf (stream, "%r", REGNO (x) + 1);
8538 return;
8540 case 'm':
8541 asm_fprintf (stream, "%r",
8542 GET_CODE (XEXP (x, 0)) == REG
8543 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8544 return;
8546 case 'M':
8547 asm_fprintf (stream, "{%r-%r}",
8548 REGNO (x),
8549 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
8550 return;
8552 case 'd':
8553 if (!x)
8554 return;
8556 if (TARGET_ARM)
8557 fputs (arm_condition_codes[get_arm_condition_code (x)],
8558 stream);
8559 else
8560 fputs (thumb_condition_code (x, 0), stream);
8561 return;
8563 case 'D':
8564 if (!x)
8565 return;
8567 if (TARGET_ARM)
8568 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8569 (get_arm_condition_code (x))],
8570 stream);
8571 else
8572 fputs (thumb_condition_code (x, 1), stream);
8573 return;
8575 default:
8576 if (x == 0)
8577 abort ();
8579 if (GET_CODE (x) == REG)
8580 asm_fprintf (stream, "%r", REGNO (x));
8581 else if (GET_CODE (x) == MEM)
8583 output_memory_reference_mode = GET_MODE (x);
8584 output_address (XEXP (x, 0));
8586 else if (GET_CODE (x) == CONST_DOUBLE)
8587 fprintf (stream, "#%s", fp_immediate_constant (x));
8588 else if (GET_CODE (x) == NEG)
8589 abort (); /* This should never happen now. */
8590 else
8592 fputc ('#', stream);
8593 output_addr_const (stream, x);
8598 #ifndef AOF_ASSEMBLER
8599 /* Target hook for assembling integer objects. The ARM version needs to
8600 handle word-sized values specially. */
8602 static bool
8603 arm_assemble_integer (x, size, aligned_p)
8604 rtx x;
8605 unsigned int size;
8606 int aligned_p;
8608 if (size == UNITS_PER_WORD && aligned_p)
8610 fputs ("\t.word\t", asm_out_file);
8611 output_addr_const (asm_out_file, x);
8613 /* Mark symbols as position independent. We only do this in the
8614 .text segment, not in the .data segment. */
8615 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8616 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8618 if (GET_CODE (x) == SYMBOL_REF
8619 && (CONSTANT_POOL_ADDRESS_P (x)
8620 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
8621 fputs ("(GOTOFF)", asm_out_file);
8622 else if (GET_CODE (x) == LABEL_REF)
8623 fputs ("(GOTOFF)", asm_out_file);
8624 else
8625 fputs ("(GOT)", asm_out_file);
8627 fputc ('\n', asm_out_file);
8628 return true;
8631 return default_assemble_integer (x, size, aligned_p);
8633 #endif
8635 /* A finite state machine takes care of noticing whether or not instructions
8636 can be conditionally executed, and thus decrease execution time and code
8637 size by deleting branch instructions. The fsm is controlled by
8638 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8640 /* The state of the fsm controlling condition codes are:
8641 0: normal, do nothing special
8642 1: make ASM_OUTPUT_OPCODE not output this instruction
8643 2: make ASM_OUTPUT_OPCODE not output this instruction
8644 3: make instructions conditional
8645 4: make instructions conditional
8647 State transitions (state->state by whom under condition):
8648 0 -> 1 final_prescan_insn if the `target' is a label
8649 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8650 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8651 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8652 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8653 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8654 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8655 (the target insn is arm_target_insn).
8657 If the jump clobbers the conditions then we use states 2 and 4.
8659 A similar thing can be done with conditional return insns.
8661 XXX In case the `target' is an unconditional branch, this conditionalising
8662 of the instructions always reduces code size, but not always execution
8663 time. But then, I want to reduce the code size to somewhere near what
8664 /bin/cc produces. */
8666 /* Returns the index of the ARM condition code string in
8667 `arm_condition_codes'. COMPARISON should be an rtx like
8668 `(eq (...) (...))'. */
8670 static enum arm_cond_code
8671 get_arm_condition_code (comparison)
8672 rtx comparison;
8674 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
8675 int code;
8676 enum rtx_code comp_code = GET_CODE (comparison);
8678 if (GET_MODE_CLASS (mode) != MODE_CC)
8679 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
8680 XEXP (comparison, 1));
8682 switch (mode)
8684 case CC_DNEmode: code = ARM_NE; goto dominance;
8685 case CC_DEQmode: code = ARM_EQ; goto dominance;
8686 case CC_DGEmode: code = ARM_GE; goto dominance;
8687 case CC_DGTmode: code = ARM_GT; goto dominance;
8688 case CC_DLEmode: code = ARM_LE; goto dominance;
8689 case CC_DLTmode: code = ARM_LT; goto dominance;
8690 case CC_DGEUmode: code = ARM_CS; goto dominance;
8691 case CC_DGTUmode: code = ARM_HI; goto dominance;
8692 case CC_DLEUmode: code = ARM_LS; goto dominance;
8693 case CC_DLTUmode: code = ARM_CC;
8695 dominance:
8696 if (comp_code != EQ && comp_code != NE)
8697 abort ();
8699 if (comp_code == EQ)
8700 return ARM_INVERSE_CONDITION_CODE (code);
8701 return code;
8703 case CC_NOOVmode:
8704 switch (comp_code)
8706 case NE: return ARM_NE;
8707 case EQ: return ARM_EQ;
8708 case GE: return ARM_PL;
8709 case LT: return ARM_MI;
8710 default: abort ();
8713 case CC_Zmode:
8714 switch (comp_code)
8716 case NE: return ARM_NE;
8717 case EQ: return ARM_EQ;
8718 default: abort ();
8721 case CCFPEmode:
8722 case CCFPmode:
8723 /* These encodings assume that AC=1 in the FPA system control
8724 byte. This allows us to handle all cases except UNEQ and
8725 LTGT. */
8726 switch (comp_code)
8728 case GE: return ARM_GE;
8729 case GT: return ARM_GT;
8730 case LE: return ARM_LS;
8731 case LT: return ARM_MI;
8732 case NE: return ARM_NE;
8733 case EQ: return ARM_EQ;
8734 case ORDERED: return ARM_VC;
8735 case UNORDERED: return ARM_VS;
8736 case UNLT: return ARM_LT;
8737 case UNLE: return ARM_LE;
8738 case UNGT: return ARM_HI;
8739 case UNGE: return ARM_PL;
8740 /* UNEQ and LTGT do not have a representation. */
8741 case UNEQ: /* Fall through. */
8742 case LTGT: /* Fall through. */
8743 default: abort ();
8746 case CC_SWPmode:
8747 switch (comp_code)
8749 case NE: return ARM_NE;
8750 case EQ: return ARM_EQ;
8751 case GE: return ARM_LE;
8752 case GT: return ARM_LT;
8753 case LE: return ARM_GE;
8754 case LT: return ARM_GT;
8755 case GEU: return ARM_LS;
8756 case GTU: return ARM_CC;
8757 case LEU: return ARM_CS;
8758 case LTU: return ARM_HI;
8759 default: abort ();
8762 case CC_Cmode:
8763 switch (comp_code)
8765 case LTU: return ARM_CS;
8766 case GEU: return ARM_CC;
8767 default: abort ();
8770 case CCmode:
8771 switch (comp_code)
8773 case NE: return ARM_NE;
8774 case EQ: return ARM_EQ;
8775 case GE: return ARM_GE;
8776 case GT: return ARM_GT;
8777 case LE: return ARM_LE;
8778 case LT: return ARM_LT;
8779 case GEU: return ARM_CS;
8780 case GTU: return ARM_HI;
8781 case LEU: return ARM_LS;
8782 case LTU: return ARM_CC;
8783 default: abort ();
8786 default: abort ();
8789 abort ();
8793 void
8794 arm_final_prescan_insn (insn)
8795 rtx insn;
8797 /* BODY will hold the body of INSN. */
8798 rtx body = PATTERN (insn);
8800 /* This will be 1 if trying to repeat the trick, and things need to be
8801 reversed if it appears to fail. */
8802 int reverse = 0;
8804 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8805 taken are clobbered, even if the rtl suggests otherwise. It also
8806 means that we have to grub around within the jump expression to find
8807 out what the conditions are when the jump isn't taken. */
8808 int jump_clobbers = 0;
8810 /* If we start with a return insn, we only succeed if we find another one. */
8811 int seeking_return = 0;
8813 /* START_INSN will hold the insn from where we start looking. This is the
8814 first insn after the following code_label if REVERSE is true. */
8815 rtx start_insn = insn;
8817 /* If in state 4, check if the target branch is reached, in order to
8818 change back to state 0. */
8819 if (arm_ccfsm_state == 4)
8821 if (insn == arm_target_insn)
8823 arm_target_insn = NULL;
8824 arm_ccfsm_state = 0;
8826 return;
8829 /* If in state 3, it is possible to repeat the trick, if this insn is an
8830 unconditional branch to a label, and immediately following this branch
8831 is the previous target label which is only used once, and the label this
8832 branch jumps to is not too far off. */
8833 if (arm_ccfsm_state == 3)
8835 if (simplejump_p (insn))
8837 start_insn = next_nonnote_insn (start_insn);
8838 if (GET_CODE (start_insn) == BARRIER)
8840 /* XXX Isn't this always a barrier? */
8841 start_insn = next_nonnote_insn (start_insn);
8843 if (GET_CODE (start_insn) == CODE_LABEL
8844 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8845 && LABEL_NUSES (start_insn) == 1)
8846 reverse = TRUE;
8847 else
8848 return;
8850 else if (GET_CODE (body) == RETURN)
8852 start_insn = next_nonnote_insn (start_insn);
8853 if (GET_CODE (start_insn) == BARRIER)
8854 start_insn = next_nonnote_insn (start_insn);
8855 if (GET_CODE (start_insn) == CODE_LABEL
8856 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8857 && LABEL_NUSES (start_insn) == 1)
8859 reverse = TRUE;
8860 seeking_return = 1;
8862 else
8863 return;
8865 else
8866 return;
8869 if (arm_ccfsm_state != 0 && !reverse)
8870 abort ();
8871 if (GET_CODE (insn) != JUMP_INSN)
8872 return;
8874 /* This jump might be paralleled with a clobber of the condition codes
8875 the jump should always come first */
8876 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8877 body = XVECEXP (body, 0, 0);
8879 #if 0
8880 /* If this is a conditional return then we don't want to know */
8881 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8882 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8883 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8884 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8885 return;
8886 #endif
8888 if (reverse
8889 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8890 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8892 int insns_skipped;
8893 int fail = FALSE, succeed = FALSE;
8894 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8895 int then_not_else = TRUE;
8896 rtx this_insn = start_insn, label = 0;
8898 /* If the jump cannot be done with one instruction, we cannot
8899 conditionally execute the instruction in the inverse case. */
8900 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8902 jump_clobbers = 1;
8903 return;
8906 /* Register the insn jumped to. */
8907 if (reverse)
8909 if (!seeking_return)
8910 label = XEXP (SET_SRC (body), 0);
8912 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8913 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8914 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8916 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8917 then_not_else = FALSE;
8919 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8920 seeking_return = 1;
8921 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8923 seeking_return = 1;
8924 then_not_else = FALSE;
8926 else
8927 abort ();
8929 /* See how many insns this branch skips, and what kind of insns. If all
8930 insns are okay, and the label or unconditional branch to the same
8931 label is not too far away, succeed. */
8932 for (insns_skipped = 0;
8933 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8935 rtx scanbody;
8937 this_insn = next_nonnote_insn (this_insn);
8938 if (!this_insn)
8939 break;
8941 switch (GET_CODE (this_insn))
8943 case CODE_LABEL:
8944 /* Succeed if it is the target label, otherwise fail since
8945 control falls in from somewhere else. */
8946 if (this_insn == label)
8948 if (jump_clobbers)
8950 arm_ccfsm_state = 2;
8951 this_insn = next_nonnote_insn (this_insn);
8953 else
8954 arm_ccfsm_state = 1;
8955 succeed = TRUE;
8957 else
8958 fail = TRUE;
8959 break;
8961 case BARRIER:
8962 /* Succeed if the following insn is the target label.
8963 Otherwise fail.
8964 If return insns are used then the last insn in a function
8965 will be a barrier. */
8966 this_insn = next_nonnote_insn (this_insn);
8967 if (this_insn && this_insn == label)
8969 if (jump_clobbers)
8971 arm_ccfsm_state = 2;
8972 this_insn = next_nonnote_insn (this_insn);
8974 else
8975 arm_ccfsm_state = 1;
8976 succeed = TRUE;
8978 else
8979 fail = TRUE;
8980 break;
8982 case CALL_INSN:
8983 /* If using 32-bit addresses the cc is not preserved over
8984 calls. */
8985 if (TARGET_APCS_32)
8987 /* Succeed if the following insn is the target label,
8988 or if the following two insns are a barrier and
8989 the target label. */
8990 this_insn = next_nonnote_insn (this_insn);
8991 if (this_insn && GET_CODE (this_insn) == BARRIER)
8992 this_insn = next_nonnote_insn (this_insn);
8994 if (this_insn && this_insn == label
8995 && insns_skipped < max_insns_skipped)
8997 if (jump_clobbers)
8999 arm_ccfsm_state = 2;
9000 this_insn = next_nonnote_insn (this_insn);
9002 else
9003 arm_ccfsm_state = 1;
9004 succeed = TRUE;
9006 else
9007 fail = TRUE;
9009 break;
9011 case JUMP_INSN:
9012 /* If this is an unconditional branch to the same label, succeed.
9013 If it is to another label, do nothing. If it is conditional,
9014 fail. */
9015 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9017 scanbody = PATTERN (this_insn);
9018 if (GET_CODE (scanbody) == SET
9019 && GET_CODE (SET_DEST (scanbody)) == PC)
9021 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9022 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9024 arm_ccfsm_state = 2;
9025 succeed = TRUE;
9027 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9028 fail = TRUE;
9030 /* Fail if a conditional return is undesirable (eg on a
9031 StrongARM), but still allow this if optimizing for size. */
9032 else if (GET_CODE (scanbody) == RETURN
9033 && !use_return_insn (TRUE)
9034 && !optimize_size)
9035 fail = TRUE;
9036 else if (GET_CODE (scanbody) == RETURN
9037 && seeking_return)
9039 arm_ccfsm_state = 2;
9040 succeed = TRUE;
9042 else if (GET_CODE (scanbody) == PARALLEL)
9044 switch (get_attr_conds (this_insn))
9046 case CONDS_NOCOND:
9047 break;
9048 default:
9049 fail = TRUE;
9050 break;
9053 else
9054 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9056 break;
9058 case INSN:
9059 /* Instructions using or affecting the condition codes make it
9060 fail. */
9061 scanbody = PATTERN (this_insn);
9062 if (!(GET_CODE (scanbody) == SET
9063 || GET_CODE (scanbody) == PARALLEL)
9064 || get_attr_conds (this_insn) != CONDS_NOCOND)
9065 fail = TRUE;
9066 break;
9068 default:
9069 break;
9072 if (succeed)
9074 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9075 arm_target_label = CODE_LABEL_NUMBER (label);
9076 else if (seeking_return || arm_ccfsm_state == 2)
9078 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9080 this_insn = next_nonnote_insn (this_insn);
9081 if (this_insn && (GET_CODE (this_insn) == BARRIER
9082 || GET_CODE (this_insn) == CODE_LABEL))
9083 abort ();
9085 if (!this_insn)
9087 /* Oh, dear! we ran off the end.. give up */
9088 recog (PATTERN (insn), insn, NULL);
9089 arm_ccfsm_state = 0;
9090 arm_target_insn = NULL;
9091 return;
9093 arm_target_insn = this_insn;
9095 else
9096 abort ();
9097 if (jump_clobbers)
9099 if (reverse)
9100 abort ();
9101 arm_current_cc =
9102 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9103 0), 0), 1));
9104 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9105 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9106 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9107 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9109 else
9111 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9112 what it was. */
9113 if (!reverse)
9114 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9115 0));
9118 if (reverse || then_not_else)
9119 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9122 /* Restore recog_data (getting the attributes of other insns can
9123 destroy this array, but final.c assumes that it remains intact
9124 across this call; since the insn has been recognized already we
9125 call recog direct). */
9126 recog (PATTERN (insn), insn, NULL);
9130 /* Returns true if REGNO is a valid register
9131 for holding a quantity of tyoe MODE. */
9134 arm_hard_regno_mode_ok (regno, mode)
9135 unsigned int regno;
9136 enum machine_mode mode;
9138 if (GET_MODE_CLASS (mode) == MODE_CC)
9139 return regno == CC_REGNUM;
9141 if (TARGET_THUMB)
9142 /* For the Thumb we only allow values bigger than SImode in
9143 registers 0 - 6, so that there is always a second low
9144 register available to hold the upper part of the value.
9145 We probably we ought to ensure that the register is the
9146 start of an even numbered register pair. */
9147 return (NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9149 if (regno <= LAST_ARM_REGNUM)
9150 /* We allow any value to be stored in the general regisetrs. */
9151 return 1;
9153 if ( regno == FRAME_POINTER_REGNUM
9154 || regno == ARG_POINTER_REGNUM)
9155 /* We only allow integers in the fake hard registers. */
9156 return GET_MODE_CLASS (mode) == MODE_INT;
9158 /* The only registers left are the FPU registers
9159 which we only allow to hold FP values. */
9160 return GET_MODE_CLASS (mode) == MODE_FLOAT
9161 && regno >= FIRST_ARM_FP_REGNUM
9162 && regno <= LAST_ARM_FP_REGNUM;
9166 arm_regno_class (regno)
9167 int regno;
9169 if (TARGET_THUMB)
9171 if (regno == STACK_POINTER_REGNUM)
9172 return STACK_REG;
9173 if (regno == CC_REGNUM)
9174 return CC_REG;
9175 if (regno < 8)
9176 return LO_REGS;
9177 return HI_REGS;
9180 if ( regno <= LAST_ARM_REGNUM
9181 || regno == FRAME_POINTER_REGNUM
9182 || regno == ARG_POINTER_REGNUM)
9183 return GENERAL_REGS;
9185 if (regno == CC_REGNUM)
9186 return NO_REGS;
9188 return FPU_REGS;
9191 /* Handle a special case when computing the offset
9192 of an argument from the frame pointer. */
9195 arm_debugger_arg_offset (value, addr)
9196 int value;
9197 rtx addr;
9199 rtx insn;
9201 /* We are only interested if dbxout_parms() failed to compute the offset. */
9202 if (value != 0)
9203 return 0;
9205 /* We can only cope with the case where the address is held in a register. */
9206 if (GET_CODE (addr) != REG)
9207 return 0;
9209 /* If we are using the frame pointer to point at the argument, then
9210 an offset of 0 is correct. */
9211 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9212 return 0;
9214 /* If we are using the stack pointer to point at the
9215 argument, then an offset of 0 is correct. */
9216 if ((TARGET_THUMB || !frame_pointer_needed)
9217 && REGNO (addr) == SP_REGNUM)
9218 return 0;
9220 /* Oh dear. The argument is pointed to by a register rather
9221 than being held in a register, or being stored at a known
9222 offset from the frame pointer. Since GDB only understands
9223 those two kinds of argument we must translate the address
9224 held in the register into an offset from the frame pointer.
9225 We do this by searching through the insns for the function
9226 looking to see where this register gets its value. If the
9227 register is initialised from the frame pointer plus an offset
9228 then we are in luck and we can continue, otherwise we give up.
9230 This code is exercised by producing debugging information
9231 for a function with arguments like this:
9233 double func (double a, double b, int c, double d) {return d;}
9235 Without this code the stab for parameter 'd' will be set to
9236 an offset of 0 from the frame pointer, rather than 8. */
9238 /* The if() statement says:
9240 If the insn is a normal instruction
9241 and if the insn is setting the value in a register
9242 and if the register being set is the register holding the address of the argument
9243 and if the address is computing by an addition
9244 that involves adding to a register
9245 which is the frame pointer
9246 a constant integer
9248 then... */
9250 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9252 if ( GET_CODE (insn) == INSN
9253 && GET_CODE (PATTERN (insn)) == SET
9254 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9255 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9256 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9257 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9258 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9261 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9263 break;
9267 if (value == 0)
9269 debug_rtx (addr);
9270 warning ("unable to compute real location of stacked parameter");
9271 value = 8; /* XXX magic hack */
9274 return value;
9277 #define def_builtin(NAME, TYPE, CODE) \
9278 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9280 void
9281 arm_init_builtins ()
9283 tree endlink = void_list_node;
9284 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9285 tree pchar_type_node = build_pointer_type (char_type_node);
9287 tree int_ftype_int, void_ftype_pchar;
9289 /* void func (void *) */
9290 void_ftype_pchar
9291 = build_function_type (void_type_node,
9292 tree_cons (NULL_TREE, pchar_type_node, endlink));
9294 /* int func (int) */
9295 int_ftype_int
9296 = build_function_type (integer_type_node, int_endlink);
9298 /* Initialize arm V5 builtins. */
9299 if (arm_arch5)
9300 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9303 /* Expand an expression EXP that calls a built-in function,
9304 with result going to TARGET if that's convenient
9305 (and in mode MODE if that's convenient).
9306 SUBTARGET may be used as the target for computing one of EXP's operands.
9307 IGNORE is nonzero if the value is to be ignored. */
9310 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9311 tree exp;
9312 rtx target;
9313 rtx subtarget ATTRIBUTE_UNUSED;
9314 enum machine_mode mode ATTRIBUTE_UNUSED;
9315 int ignore ATTRIBUTE_UNUSED;
9317 enum insn_code icode;
9318 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9319 tree arglist = TREE_OPERAND (exp, 1);
9320 tree arg0;
9321 rtx op0, pat;
9322 enum machine_mode tmode, mode0;
9323 int fcode = DECL_FUNCTION_CODE (fndecl);
9325 switch (fcode)
9327 default:
9328 break;
9330 case ARM_BUILTIN_CLZ:
9331 icode = CODE_FOR_clz;
9332 arg0 = TREE_VALUE (arglist);
9333 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9334 tmode = insn_data[icode].operand[0].mode;
9335 mode0 = insn_data[icode].operand[1].mode;
9337 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9338 op0 = copy_to_mode_reg (mode0, op0);
9339 if (target == 0
9340 || GET_MODE (target) != tmode
9341 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9342 target = gen_reg_rtx (tmode);
9343 pat = GEN_FCN (icode) (target, op0);
9344 if (! pat)
9345 return 0;
9346 emit_insn (pat);
9347 return target;
9350 /* @@@ Should really do something sensible here. */
9351 return NULL_RTX;
9354 /* Recursively search through all of the blocks in a function
9355 checking to see if any of the variables created in that
9356 function match the RTX called 'orig'. If they do then
9357 replace them with the RTX called 'new'. */
9359 static void
9360 replace_symbols_in_block (block, orig, new)
9361 tree block;
9362 rtx orig;
9363 rtx new;
9365 for (; block; block = BLOCK_CHAIN (block))
9367 tree sym;
9369 if (!TREE_USED (block))
9370 continue;
9372 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9374 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9375 || DECL_IGNORED_P (sym)
9376 || TREE_CODE (sym) != VAR_DECL
9377 || DECL_EXTERNAL (sym)
9378 || !rtx_equal_p (DECL_RTL (sym), orig)
9380 continue;
9382 SET_DECL_RTL (sym, new);
9385 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9389 /* Return the number (counting from 0) of
9390 the least significant set bit in MASK. */
9392 #ifdef __GNUC__
9393 inline
9394 #endif
9395 static int
9396 number_of_first_bit_set (mask)
9397 int mask;
9399 int bit;
9401 for (bit = 0;
9402 (mask & (1 << bit)) == 0;
9403 ++bit)
9404 continue;
9406 return bit;
9409 /* Generate code to return from a thumb function.
9410 If 'reg_containing_return_addr' is -1, then the return address is
9411 actually on the stack, at the stack pointer. */
9412 static void
9413 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9414 FILE * f;
9415 int reg_containing_return_addr;
9416 rtx eh_ofs;
9418 unsigned regs_available_for_popping;
9419 unsigned regs_to_pop;
9420 int pops_needed;
9421 unsigned available;
9422 unsigned required;
9423 int mode;
9424 int size;
9425 int restore_a4 = FALSE;
9427 /* Compute the registers we need to pop. */
9428 regs_to_pop = 0;
9429 pops_needed = 0;
9431 /* There is an assumption here, that if eh_ofs is not NULL, the
9432 normal return address will have been pushed. */
9433 if (reg_containing_return_addr == -1 || eh_ofs)
9435 /* When we are generating a return for __builtin_eh_return,
9436 reg_containing_return_addr must specify the return regno. */
9437 if (eh_ofs && reg_containing_return_addr == -1)
9438 abort ();
9440 regs_to_pop |= 1 << LR_REGNUM;
9441 ++pops_needed;
9444 if (TARGET_BACKTRACE)
9446 /* Restore the (ARM) frame pointer and stack pointer. */
9447 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9448 pops_needed += 2;
9451 /* If there is nothing to pop then just emit the BX instruction and
9452 return. */
9453 if (pops_needed == 0)
9455 if (eh_ofs)
9456 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9458 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9459 return;
9461 /* Otherwise if we are not supporting interworking and we have not created
9462 a backtrace structure and the function was not entered in ARM mode then
9463 just pop the return address straight into the PC. */
9464 else if (!TARGET_INTERWORK
9465 && !TARGET_BACKTRACE
9466 && !is_called_in_ARM_mode (current_function_decl))
9468 if (eh_ofs)
9470 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9471 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9472 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9474 else
9475 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9477 return;
9480 /* Find out how many of the (return) argument registers we can corrupt. */
9481 regs_available_for_popping = 0;
9483 /* If returning via __builtin_eh_return, the bottom three registers
9484 all contain information needed for the return. */
9485 if (eh_ofs)
9486 size = 12;
9487 else
9489 #ifdef RTX_CODE
9490 /* If we can deduce the registers used from the function's
9491 return value. This is more reliable that examining
9492 regs_ever_live[] because that will be set if the register is
9493 ever used in the function, not just if the register is used
9494 to hold a return value. */
9496 if (current_function_return_rtx != 0)
9497 mode = GET_MODE (current_function_return_rtx);
9498 else
9499 #endif
9500 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9502 size = GET_MODE_SIZE (mode);
9504 if (size == 0)
9506 /* In a void function we can use any argument register.
9507 In a function that returns a structure on the stack
9508 we can use the second and third argument registers. */
9509 if (mode == VOIDmode)
9510 regs_available_for_popping =
9511 (1 << ARG_REGISTER (1))
9512 | (1 << ARG_REGISTER (2))
9513 | (1 << ARG_REGISTER (3));
9514 else
9515 regs_available_for_popping =
9516 (1 << ARG_REGISTER (2))
9517 | (1 << ARG_REGISTER (3));
9519 else if (size <= 4)
9520 regs_available_for_popping =
9521 (1 << ARG_REGISTER (2))
9522 | (1 << ARG_REGISTER (3));
9523 else if (size <= 8)
9524 regs_available_for_popping =
9525 (1 << ARG_REGISTER (3));
9528 /* Match registers to be popped with registers into which we pop them. */
9529 for (available = regs_available_for_popping,
9530 required = regs_to_pop;
9531 required != 0 && available != 0;
9532 available &= ~(available & - available),
9533 required &= ~(required & - required))
9534 -- pops_needed;
9536 /* If we have any popping registers left over, remove them. */
9537 if (available > 0)
9538 regs_available_for_popping &= ~available;
9540 /* Otherwise if we need another popping register we can use
9541 the fourth argument register. */
9542 else if (pops_needed)
9544 /* If we have not found any free argument registers and
9545 reg a4 contains the return address, we must move it. */
9546 if (regs_available_for_popping == 0
9547 && reg_containing_return_addr == LAST_ARG_REGNUM)
9549 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9550 reg_containing_return_addr = LR_REGNUM;
9552 else if (size > 12)
9554 /* Register a4 is being used to hold part of the return value,
9555 but we have dire need of a free, low register. */
9556 restore_a4 = TRUE;
9558 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9561 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9563 /* The fourth argument register is available. */
9564 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9566 --pops_needed;
9570 /* Pop as many registers as we can. */
9571 thumb_pushpop (f, regs_available_for_popping, FALSE);
9573 /* Process the registers we popped. */
9574 if (reg_containing_return_addr == -1)
9576 /* The return address was popped into the lowest numbered register. */
9577 regs_to_pop &= ~(1 << LR_REGNUM);
9579 reg_containing_return_addr =
9580 number_of_first_bit_set (regs_available_for_popping);
9582 /* Remove this register for the mask of available registers, so that
9583 the return address will not be corrupted by futher pops. */
9584 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9587 /* If we popped other registers then handle them here. */
9588 if (regs_available_for_popping)
9590 int frame_pointer;
9592 /* Work out which register currently contains the frame pointer. */
9593 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9595 /* Move it into the correct place. */
9596 asm_fprintf (f, "\tmov\t%r, %r\n",
9597 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9599 /* (Temporarily) remove it from the mask of popped registers. */
9600 regs_available_for_popping &= ~(1 << frame_pointer);
9601 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9603 if (regs_available_for_popping)
9605 int stack_pointer;
9607 /* We popped the stack pointer as well,
9608 find the register that contains it. */
9609 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9611 /* Move it into the stack register. */
9612 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9614 /* At this point we have popped all necessary registers, so
9615 do not worry about restoring regs_available_for_popping
9616 to its correct value:
9618 assert (pops_needed == 0)
9619 assert (regs_available_for_popping == (1 << frame_pointer))
9620 assert (regs_to_pop == (1 << STACK_POINTER)) */
9622 else
9624 /* Since we have just move the popped value into the frame
9625 pointer, the popping register is available for reuse, and
9626 we know that we still have the stack pointer left to pop. */
9627 regs_available_for_popping |= (1 << frame_pointer);
9631 /* If we still have registers left on the stack, but we no longer have
9632 any registers into which we can pop them, then we must move the return
9633 address into the link register and make available the register that
9634 contained it. */
9635 if (regs_available_for_popping == 0 && pops_needed > 0)
9637 regs_available_for_popping |= 1 << reg_containing_return_addr;
9639 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9640 reg_containing_return_addr);
9642 reg_containing_return_addr = LR_REGNUM;
9645 /* If we have registers left on the stack then pop some more.
9646 We know that at most we will want to pop FP and SP. */
9647 if (pops_needed > 0)
9649 int popped_into;
9650 int move_to;
9652 thumb_pushpop (f, regs_available_for_popping, FALSE);
9654 /* We have popped either FP or SP.
9655 Move whichever one it is into the correct register. */
9656 popped_into = number_of_first_bit_set (regs_available_for_popping);
9657 move_to = number_of_first_bit_set (regs_to_pop);
9659 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9661 regs_to_pop &= ~(1 << move_to);
9663 --pops_needed;
9666 /* If we still have not popped everything then we must have only
9667 had one register available to us and we are now popping the SP. */
9668 if (pops_needed > 0)
9670 int popped_into;
9672 thumb_pushpop (f, regs_available_for_popping, FALSE);
9674 popped_into = number_of_first_bit_set (regs_available_for_popping);
9676 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9678 assert (regs_to_pop == (1 << STACK_POINTER))
9679 assert (pops_needed == 1)
9683 /* If necessary restore the a4 register. */
9684 if (restore_a4)
9686 if (reg_containing_return_addr != LR_REGNUM)
9688 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9689 reg_containing_return_addr = LR_REGNUM;
9692 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9695 if (eh_ofs)
9696 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9698 /* Return to caller. */
9699 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9702 /* Emit code to push or pop registers to or from the stack. */
9704 static void
9705 thumb_pushpop (f, mask, push)
9706 FILE * f;
9707 int mask;
9708 int push;
9710 int regno;
9711 int lo_mask = mask & 0xFF;
9713 if (lo_mask == 0 && !push && (mask & (1 << 15)))
9715 /* Special case. Do not generate a POP PC statement here, do it in
9716 thumb_exit() */
9717 thumb_exit (f, -1, NULL_RTX);
9718 return;
9721 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9723 /* Look at the low registers first. */
9724 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
9726 if (lo_mask & 1)
9728 asm_fprintf (f, "%r", regno);
9730 if ((lo_mask & ~1) != 0)
9731 fprintf (f, ", ");
9735 if (push && (mask & (1 << LR_REGNUM)))
9737 /* Catch pushing the LR. */
9738 if (mask & 0xFF)
9739 fprintf (f, ", ");
9741 asm_fprintf (f, "%r", LR_REGNUM);
9743 else if (!push && (mask & (1 << PC_REGNUM)))
9745 /* Catch popping the PC. */
9746 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9748 /* The PC is never poped directly, instead
9749 it is popped into r3 and then BX is used. */
9750 fprintf (f, "}\n");
9752 thumb_exit (f, -1, NULL_RTX);
9754 return;
9756 else
9758 if (mask & 0xFF)
9759 fprintf (f, ", ");
9761 asm_fprintf (f, "%r", PC_REGNUM);
9765 fprintf (f, "}\n");
9768 void
9769 thumb_final_prescan_insn (insn)
9770 rtx insn;
9772 if (flag_print_asm_name)
9773 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9774 INSN_ADDRESSES (INSN_UID (insn)));
9778 thumb_shiftable_const (val)
9779 unsigned HOST_WIDE_INT val;
9781 unsigned HOST_WIDE_INT mask = 0xff;
9782 int i;
9784 if (val == 0) /* XXX */
9785 return 0;
9787 for (i = 0; i < 25; i++)
9788 if ((val & (mask << i)) == val)
9789 return 1;
9791 return 0;
9794 /* Returns non-zero if the current function contains,
9795 or might contain a far jump. */
9798 thumb_far_jump_used_p (int in_prologue)
9800 rtx insn;
9802 /* This test is only important for leaf functions. */
9803 /* assert (!leaf_function_p ()); */
9805 /* If we have already decided that far jumps may be used,
9806 do not bother checking again, and always return true even if
9807 it turns out that they are not being used. Once we have made
9808 the decision that far jumps are present (and that hence the link
9809 register will be pushed onto the stack) we cannot go back on it. */
9810 if (cfun->machine->far_jump_used)
9811 return 1;
9813 /* If this function is not being called from the prologue/epilogue
9814 generation code then it must be being called from the
9815 INITIAL_ELIMINATION_OFFSET macro. */
9816 if (!in_prologue)
9818 /* In this case we know that we are being asked about the elimination
9819 of the arg pointer register. If that register is not being used,
9820 then there are no arguments on the stack, and we do not have to
9821 worry that a far jump might force the prologue to push the link
9822 register, changing the stack offsets. In this case we can just
9823 return false, since the presence of far jumps in the function will
9824 not affect stack offsets.
9826 If the arg pointer is live (or if it was live, but has now been
9827 eliminated and so set to dead) then we do have to test to see if
9828 the function might contain a far jump. This test can lead to some
9829 false negatives, since before reload is completed, then length of
9830 branch instructions is not known, so gcc defaults to returning their
9831 longest length, which in turn sets the far jump attribute to true.
9833 A false negative will not result in bad code being generated, but it
9834 will result in a needless push and pop of the link register. We
9835 hope that this does not occur too often. */
9836 if (regs_ever_live [ARG_POINTER_REGNUM])
9837 cfun->machine->arg_pointer_live = 1;
9838 else if (!cfun->machine->arg_pointer_live)
9839 return 0;
9842 /* Check to see if the function contains a branch
9843 insn with the far jump attribute set. */
9844 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9846 if (GET_CODE (insn) == JUMP_INSN
9847 /* Ignore tablejump patterns. */
9848 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9849 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9850 && get_attr_far_jump (insn) == FAR_JUMP_YES
9853 /* Record the fact that we have decied that
9854 the function does use far jumps. */
9855 cfun->machine->far_jump_used = 1;
9856 return 1;
9860 return 0;
9863 /* Return non-zero if FUNC must be entered in ARM mode. */
9866 is_called_in_ARM_mode (func)
9867 tree func;
9869 if (TREE_CODE (func) != FUNCTION_DECL)
9870 abort ();
9872 /* Ignore the problem about functions whoes address is taken. */
9873 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9874 return TRUE;
9876 #ifdef ARM_PE
9877 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
9878 #else
9879 return FALSE;
9880 #endif
9883 /* The bits which aren't usefully expanded as rtl. */
9885 const char *
9886 thumb_unexpanded_epilogue ()
9888 int regno;
9889 int live_regs_mask = 0;
9890 int high_regs_pushed = 0;
9891 int leaf_function = leaf_function_p ();
9892 int had_to_push_lr;
9893 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9895 if (return_used_this_function)
9896 return "";
9898 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9899 if (regs_ever_live[regno] && !call_used_regs[regno]
9900 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9901 live_regs_mask |= 1 << regno;
9903 for (regno = 8; regno < 13; regno++)
9905 if (regs_ever_live[regno] && !call_used_regs[regno]
9906 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9907 high_regs_pushed++;
9910 /* The prolog may have pushed some high registers to use as
9911 work registers. eg the testuite file:
9912 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9913 compiles to produce:
9914 push {r4, r5, r6, r7, lr}
9915 mov r7, r9
9916 mov r6, r8
9917 push {r6, r7}
9918 as part of the prolog. We have to undo that pushing here. */
9920 if (high_regs_pushed)
9922 int mask = live_regs_mask;
9923 int next_hi_reg;
9924 int size;
9925 int mode;
9927 #ifdef RTX_CODE
9928 /* If we can deduce the registers used from the function's return value.
9929 This is more reliable that examining regs_ever_live[] because that
9930 will be set if the register is ever used in the function, not just if
9931 the register is used to hold a return value. */
9933 if (current_function_return_rtx != 0)
9934 mode = GET_MODE (current_function_return_rtx);
9935 else
9936 #endif
9937 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9939 size = GET_MODE_SIZE (mode);
9941 /* Unless we are returning a type of size > 12 register r3 is
9942 available. */
9943 if (size < 13)
9944 mask |= 1 << 3;
9946 if (mask == 0)
9947 /* Oh dear! We have no low registers into which we can pop
9948 high registers! */
9949 internal_error
9950 ("no low registers available for popping high registers");
9952 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9953 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9954 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9955 break;
9957 while (high_regs_pushed)
9959 /* Find lo register(s) into which the high register(s) can
9960 be popped. */
9961 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9963 if (mask & (1 << regno))
9964 high_regs_pushed--;
9965 if (high_regs_pushed == 0)
9966 break;
9969 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9971 /* Pop the values into the low register(s). */
9972 thumb_pushpop (asm_out_file, mask, 0);
9974 /* Move the value(s) into the high registers. */
9975 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9977 if (mask & (1 << regno))
9979 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9980 regno);
9982 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
9983 if (regs_ever_live[next_hi_reg]
9984 && !call_used_regs[next_hi_reg]
9985 && !(TARGET_SINGLE_PIC_BASE
9986 && (next_hi_reg == arm_pic_register)))
9987 break;
9993 had_to_push_lr = (live_regs_mask || !leaf_function
9994 || thumb_far_jump_used_p (1));
9996 if (TARGET_BACKTRACE
9997 && ((live_regs_mask & 0xFF) == 0)
9998 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10000 /* The stack backtrace structure creation code had to
10001 push R7 in order to get a work register, so we pop
10002 it now. */
10003 live_regs_mask |= (1 << LAST_LO_REGNUM);
10006 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10008 if (had_to_push_lr
10009 && !is_called_in_ARM_mode (current_function_decl)
10010 && !eh_ofs)
10011 live_regs_mask |= 1 << PC_REGNUM;
10013 /* Either no argument registers were pushed or a backtrace
10014 structure was created which includes an adjusted stack
10015 pointer, so just pop everything. */
10016 if (live_regs_mask)
10017 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10019 if (eh_ofs)
10020 thumb_exit (asm_out_file, 2, eh_ofs);
10021 /* We have either just popped the return address into the
10022 PC or it is was kept in LR for the entire function or
10023 it is still on the stack because we do not want to
10024 return by doing a pop {pc}. */
10025 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10026 thumb_exit (asm_out_file,
10027 (had_to_push_lr
10028 && is_called_in_ARM_mode (current_function_decl)) ?
10029 -1 : LR_REGNUM, NULL_RTX);
10031 else
10033 /* Pop everything but the return address. */
10034 live_regs_mask &= ~(1 << PC_REGNUM);
10036 if (live_regs_mask)
10037 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10039 if (had_to_push_lr)
10040 /* Get the return address into a temporary register. */
10041 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10043 /* Remove the argument registers that were pushed onto the stack. */
10044 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10045 SP_REGNUM, SP_REGNUM,
10046 current_function_pretend_args_size);
10048 if (eh_ofs)
10049 thumb_exit (asm_out_file, 2, eh_ofs);
10050 else
10051 thumb_exit (asm_out_file,
10052 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10055 return "";
10058 /* Functions to save and restore machine-specific function data. */
10060 static void
10061 arm_mark_machine_status (p)
10062 struct function * p;
10064 machine_function *machine = p->machine;
10066 if (machine)
10067 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
10070 static void
10071 arm_init_machine_status (p)
10072 struct function * p;
10074 p->machine =
10075 (machine_function *) xcalloc (1, sizeof (machine_function));
10077 #if ARM_FT_UNKNOWWN != 0
10078 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
10079 #endif
10082 static void
10083 arm_free_machine_status (p)
10084 struct function * p;
10086 if (p->machine)
10088 free (p->machine);
10089 p->machine = NULL;
10093 /* Return an RTX indicating where the return address to the
10094 calling function can be found. */
10097 arm_return_addr (count, frame)
10098 int count;
10099 rtx frame ATTRIBUTE_UNUSED;
10101 if (count != 0)
10102 return NULL_RTX;
10104 if (TARGET_APCS_32)
10105 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10106 else
10108 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10109 GEN_INT (RETURN_ADDR_MASK26));
10110 return get_func_hard_reg_initial_val (cfun, lr);
10114 /* Do anything needed before RTL is emitted for each function. */
10116 void
10117 arm_init_expanders ()
10119 /* Arrange to initialize and mark the machine per-function status. */
10120 init_machine_status = arm_init_machine_status;
10121 mark_machine_status = arm_mark_machine_status;
10122 free_machine_status = arm_free_machine_status;
10125 /* Generate the rest of a function's prologue. */
10127 void
10128 thumb_expand_prologue ()
10130 HOST_WIDE_INT amount = (get_frame_size ()
10131 + current_function_outgoing_args_size);
10132 unsigned long func_type;
10134 func_type = arm_current_func_type ();
10136 /* Naked functions don't have prologues. */
10137 if (IS_NAKED (func_type))
10138 return;
10140 if (IS_INTERRUPT (func_type))
10142 error ("interrupt Service Routines cannot be coded in Thumb mode");
10143 return;
10146 if (frame_pointer_needed)
10147 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10149 if (amount)
10151 amount = ROUND_UP (amount);
10153 if (amount < 512)
10154 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10155 GEN_INT (- amount)));
10156 else
10158 int regno;
10159 rtx reg;
10161 /* The stack decrement is too big for an immediate value in a single
10162 insn. In theory we could issue multiple subtracts, but after
10163 three of them it becomes more space efficient to place the full
10164 value in the constant pool and load into a register. (Also the
10165 ARM debugger really likes to see only one stack decrement per
10166 function). So instead we look for a scratch register into which
10167 we can load the decrement, and then we subtract this from the
10168 stack pointer. Unfortunately on the thumb the only available
10169 scratch registers are the argument registers, and we cannot use
10170 these as they may hold arguments to the function. Instead we
10171 attempt to locate a call preserved register which is used by this
10172 function. If we can find one, then we know that it will have
10173 been pushed at the start of the prologue and so we can corrupt
10174 it now. */
10175 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10176 if (regs_ever_live[regno]
10177 && !call_used_regs[regno] /* Paranoia */
10178 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
10179 && !(frame_pointer_needed
10180 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10181 break;
10183 if (regno > LAST_LO_REGNUM) /* Very unlikely */
10185 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10187 /* Choose an arbitary, non-argument low register. */
10188 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10190 /* Save it by copying it into a high, scratch register. */
10191 emit_insn (gen_movsi (spare, reg));
10192 /* Add a USE to stop propagate_one_insn() from barfing. */
10193 emit_insn (gen_prologue_use (spare));
10195 /* Decrement the stack. */
10196 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10197 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10198 reg));
10200 /* Restore the low register's original value. */
10201 emit_insn (gen_movsi (reg, spare));
10203 /* Emit a USE of the restored scratch register, so that flow
10204 analysis will not consider the restore redundant. The
10205 register won't be used again in this function and isn't
10206 restored by the epilogue. */
10207 emit_insn (gen_prologue_use (reg));
10209 else
10211 reg = gen_rtx (REG, SImode, regno);
10213 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10214 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10215 reg));
10220 if (current_function_profile || TARGET_NO_SCHED_PRO)
10221 emit_insn (gen_blockage ());
10224 void
10225 thumb_expand_epilogue ()
10227 HOST_WIDE_INT amount = (get_frame_size ()
10228 + current_function_outgoing_args_size);
10230 /* Naked functions don't have prologues. */
10231 if (IS_NAKED (arm_current_func_type ()))
10232 return;
10234 if (frame_pointer_needed)
10235 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10236 else if (amount)
10238 amount = ROUND_UP (amount);
10240 if (amount < 512)
10241 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10242 GEN_INT (amount)));
10243 else
10245 /* r3 is always free in the epilogue. */
10246 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10248 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10249 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10253 /* Emit a USE (stack_pointer_rtx), so that
10254 the stack adjustment will not be deleted. */
10255 emit_insn (gen_prologue_use (stack_pointer_rtx));
10257 if (current_function_profile || TARGET_NO_SCHED_PRO)
10258 emit_insn (gen_blockage ());
10261 static void
10262 thumb_output_function_prologue (f, size)
10263 FILE * f;
10264 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10266 int live_regs_mask = 0;
10267 int high_regs_pushed = 0;
10268 int regno;
10270 if (IS_NAKED (arm_current_func_type ()))
10271 return;
10273 if (is_called_in_ARM_mode (current_function_decl))
10275 const char * name;
10277 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10278 abort ();
10279 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10280 abort ();
10281 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10283 /* Generate code sequence to switch us into Thumb mode. */
10284 /* The .code 32 directive has already been emitted by
10285 ASM_DECLARE_FUNCTION_NAME. */
10286 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10287 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10289 /* Generate a label, so that the debugger will notice the
10290 change in instruction sets. This label is also used by
10291 the assembler to bypass the ARM code when this function
10292 is called from a Thumb encoded function elsewhere in the
10293 same file. Hence the definition of STUB_NAME here must
10294 agree with the definition in gas/config/tc-arm.c */
10296 #define STUB_NAME ".real_start_of"
10298 asm_fprintf (f, "\t.code\t16\n");
10299 #ifdef ARM_PE
10300 if (arm_dllexport_name_p (name))
10301 name = arm_strip_name_encoding (name);
10302 #endif
10303 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10304 asm_fprintf (f, "\t.thumb_func\n");
10305 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10308 if (current_function_pretend_args_size)
10310 if (cfun->machine->uses_anonymous_args)
10312 int num_pushes;
10314 asm_fprintf (f, "\tpush\t{");
10316 num_pushes = NUM_INTS (current_function_pretend_args_size);
10318 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10319 regno <= LAST_ARG_REGNUM;
10320 regno++)
10321 asm_fprintf (f, "%r%s", regno,
10322 regno == LAST_ARG_REGNUM ? "" : ", ");
10324 asm_fprintf (f, "}\n");
10326 else
10327 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10328 SP_REGNUM, SP_REGNUM,
10329 current_function_pretend_args_size);
10332 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10333 if (regs_ever_live[regno] && !call_used_regs[regno]
10334 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10335 live_regs_mask |= 1 << regno;
10337 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10338 live_regs_mask |= 1 << LR_REGNUM;
10340 if (TARGET_BACKTRACE)
10342 int offset;
10343 int work_register = 0;
10344 int wr;
10346 /* We have been asked to create a stack backtrace structure.
10347 The code looks like this:
10349 0 .align 2
10350 0 func:
10351 0 sub SP, #16 Reserve space for 4 registers.
10352 2 push {R7} Get a work register.
10353 4 add R7, SP, #20 Get the stack pointer before the push.
10354 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10355 8 mov R7, PC Get hold of the start of this code plus 12.
10356 10 str R7, [SP, #16] Store it.
10357 12 mov R7, FP Get hold of the current frame pointer.
10358 14 str R7, [SP, #4] Store it.
10359 16 mov R7, LR Get hold of the current return address.
10360 18 str R7, [SP, #12] Store it.
10361 20 add R7, SP, #16 Point at the start of the backtrace structure.
10362 22 mov FP, R7 Put this value into the frame pointer. */
10364 if ((live_regs_mask & 0xFF) == 0)
10366 /* See if the a4 register is free. */
10368 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10369 work_register = LAST_ARG_REGNUM;
10370 else /* We must push a register of our own */
10371 live_regs_mask |= (1 << LAST_LO_REGNUM);
10374 if (work_register == 0)
10376 /* Select a register from the list that will be pushed to
10377 use as our work register. */
10378 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10379 if ((1 << work_register) & live_regs_mask)
10380 break;
10383 asm_fprintf
10384 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10385 SP_REGNUM, SP_REGNUM);
10387 if (live_regs_mask)
10388 thumb_pushpop (f, live_regs_mask, 1);
10390 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10391 if (wr & live_regs_mask)
10392 offset += 4;
10394 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10395 offset + 16 + current_function_pretend_args_size);
10397 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10398 offset + 4);
10400 /* Make sure that the instruction fetching the PC is in the right place
10401 to calculate "start of backtrace creation code + 12". */
10402 if (live_regs_mask)
10404 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10405 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10406 offset + 12);
10407 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10408 ARM_HARD_FRAME_POINTER_REGNUM);
10409 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10410 offset);
10412 else
10414 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10415 ARM_HARD_FRAME_POINTER_REGNUM);
10416 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10417 offset);
10418 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10419 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10420 offset + 12);
10423 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10424 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10425 offset + 8);
10426 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10427 offset + 12);
10428 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10429 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10431 else if (live_regs_mask)
10432 thumb_pushpop (f, live_regs_mask, 1);
10434 for (regno = 8; regno < 13; regno++)
10436 if (regs_ever_live[regno] && !call_used_regs[regno]
10437 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10438 high_regs_pushed++;
10441 if (high_regs_pushed)
10443 int pushable_regs = 0;
10444 int mask = live_regs_mask & 0xff;
10445 int next_hi_reg;
10447 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10449 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
10450 && !(TARGET_SINGLE_PIC_BASE
10451 && (next_hi_reg == arm_pic_register)))
10452 break;
10455 pushable_regs = mask;
10457 if (pushable_regs == 0)
10459 /* Desperation time -- this probably will never happen. */
10460 if (regs_ever_live[LAST_ARG_REGNUM]
10461 || !call_used_regs[LAST_ARG_REGNUM])
10462 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10463 mask = 1 << LAST_ARG_REGNUM;
10466 while (high_regs_pushed > 0)
10468 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10470 if (mask & (1 << regno))
10472 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10474 high_regs_pushed--;
10476 if (high_regs_pushed)
10477 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10478 next_hi_reg--)
10480 if (regs_ever_live[next_hi_reg]
10481 && !call_used_regs[next_hi_reg]
10482 && !(TARGET_SINGLE_PIC_BASE
10483 && (next_hi_reg == arm_pic_register)))
10484 break;
10486 else
10488 mask &= ~((1 << regno) - 1);
10489 break;
10494 thumb_pushpop (f, mask, 1);
10497 if (pushable_regs == 0
10498 && (regs_ever_live[LAST_ARG_REGNUM]
10499 || !call_used_regs[LAST_ARG_REGNUM]))
10500 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10504 /* Handle the case of a double word load into a low register from
10505 a computed memory address. The computed address may involve a
10506 register which is overwritten by the load. */
10508 const char *
10509 thumb_load_double_from_address (operands)
10510 rtx *operands;
10512 rtx addr;
10513 rtx base;
10514 rtx offset;
10515 rtx arg1;
10516 rtx arg2;
10518 if (GET_CODE (operands[0]) != REG)
10519 abort ();
10521 if (GET_CODE (operands[1]) != MEM)
10522 abort ();
10524 /* Get the memory address. */
10525 addr = XEXP (operands[1], 0);
10527 /* Work out how the memory address is computed. */
10528 switch (GET_CODE (addr))
10530 case REG:
10531 operands[2] = gen_rtx (MEM, SImode,
10532 plus_constant (XEXP (operands[1], 0), 4));
10534 if (REGNO (operands[0]) == REGNO (addr))
10536 output_asm_insn ("ldr\t%H0, %2", operands);
10537 output_asm_insn ("ldr\t%0, %1", operands);
10539 else
10541 output_asm_insn ("ldr\t%0, %1", operands);
10542 output_asm_insn ("ldr\t%H0, %2", operands);
10544 break;
10546 case CONST:
10547 /* Compute <address> + 4 for the high order load. */
10548 operands[2] = gen_rtx (MEM, SImode,
10549 plus_constant (XEXP (operands[1], 0), 4));
10551 output_asm_insn ("ldr\t%0, %1", operands);
10552 output_asm_insn ("ldr\t%H0, %2", operands);
10553 break;
10555 case PLUS:
10556 arg1 = XEXP (addr, 0);
10557 arg2 = XEXP (addr, 1);
10559 if (CONSTANT_P (arg1))
10560 base = arg2, offset = arg1;
10561 else
10562 base = arg1, offset = arg2;
10564 if (GET_CODE (base) != REG)
10565 abort ();
10567 /* Catch the case of <address> = <reg> + <reg> */
10568 if (GET_CODE (offset) == REG)
10570 int reg_offset = REGNO (offset);
10571 int reg_base = REGNO (base);
10572 int reg_dest = REGNO (operands[0]);
10574 /* Add the base and offset registers together into the
10575 higher destination register. */
10576 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10577 reg_dest + 1, reg_base, reg_offset);
10579 /* Load the lower destination register from the address in
10580 the higher destination register. */
10581 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10582 reg_dest, reg_dest + 1);
10584 /* Load the higher destination register from its own address
10585 plus 4. */
10586 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10587 reg_dest + 1, reg_dest + 1);
10589 else
10591 /* Compute <address> + 4 for the high order load. */
10592 operands[2] = gen_rtx (MEM, SImode,
10593 plus_constant (XEXP (operands[1], 0), 4));
10595 /* If the computed address is held in the low order register
10596 then load the high order register first, otherwise always
10597 load the low order register first. */
10598 if (REGNO (operands[0]) == REGNO (base))
10600 output_asm_insn ("ldr\t%H0, %2", operands);
10601 output_asm_insn ("ldr\t%0, %1", operands);
10603 else
10605 output_asm_insn ("ldr\t%0, %1", operands);
10606 output_asm_insn ("ldr\t%H0, %2", operands);
10609 break;
10611 case LABEL_REF:
10612 /* With no registers to worry about we can just load the value
10613 directly. */
10614 operands[2] = gen_rtx (MEM, SImode,
10615 plus_constant (XEXP (operands[1], 0), 4));
10617 output_asm_insn ("ldr\t%H0, %2", operands);
10618 output_asm_insn ("ldr\t%0, %1", operands);
10619 break;
10621 default:
10622 abort ();
10623 break;
10626 return "";
10630 const char *
10631 thumb_output_move_mem_multiple (n, operands)
10632 int n;
10633 rtx * operands;
10635 rtx tmp;
10637 switch (n)
10639 case 2:
10640 if (REGNO (operands[4]) > REGNO (operands[5]))
10642 tmp = operands[4];
10643 operands[4] = operands[5];
10644 operands[5] = tmp;
10646 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10647 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
10648 break;
10650 case 3:
10651 if (REGNO (operands[4]) > REGNO (operands[5]))
10653 tmp = operands[4];
10654 operands[4] = operands[5];
10655 operands[5] = tmp;
10657 if (REGNO (operands[5]) > REGNO (operands[6]))
10659 tmp = operands[5];
10660 operands[5] = operands[6];
10661 operands[6] = tmp;
10663 if (REGNO (operands[4]) > REGNO (operands[5]))
10665 tmp = operands[4];
10666 operands[4] = operands[5];
10667 operands[5] = tmp;
10670 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10671 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
10672 break;
10674 default:
10675 abort ();
10678 return "";
10681 /* Routines for generating rtl. */
10683 void
10684 thumb_expand_movstrqi (operands)
10685 rtx * operands;
10687 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10688 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10689 HOST_WIDE_INT len = INTVAL (operands[2]);
10690 HOST_WIDE_INT offset = 0;
10692 while (len >= 12)
10694 emit_insn (gen_movmem12b (out, in, out, in));
10695 len -= 12;
10698 if (len >= 8)
10700 emit_insn (gen_movmem8b (out, in, out, in));
10701 len -= 8;
10704 if (len >= 4)
10706 rtx reg = gen_reg_rtx (SImode);
10707 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10708 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10709 len -= 4;
10710 offset += 4;
10713 if (len >= 2)
10715 rtx reg = gen_reg_rtx (HImode);
10716 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10717 plus_constant (in, offset))));
10718 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10719 reg));
10720 len -= 2;
10721 offset += 2;
10724 if (len)
10726 rtx reg = gen_reg_rtx (QImode);
10727 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10728 plus_constant (in, offset))));
10729 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10730 reg));
10735 thumb_cmp_operand (op, mode)
10736 rtx op;
10737 enum machine_mode mode;
10739 return ((GET_CODE (op) == CONST_INT
10740 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10741 || register_operand (op, mode));
10744 static const char *
10745 thumb_condition_code (x, invert)
10746 rtx x;
10747 int invert;
10749 static const char * const conds[] =
10751 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10752 "hi", "ls", "ge", "lt", "gt", "le"
10754 int val;
10756 switch (GET_CODE (x))
10758 case EQ: val = 0; break;
10759 case NE: val = 1; break;
10760 case GEU: val = 2; break;
10761 case LTU: val = 3; break;
10762 case GTU: val = 8; break;
10763 case LEU: val = 9; break;
10764 case GE: val = 10; break;
10765 case LT: val = 11; break;
10766 case GT: val = 12; break;
10767 case LE: val = 13; break;
10768 default:
10769 abort ();
10772 return conds[val ^ invert];
10775 /* Handle storing a half-word to memory during reload. */
10777 void
10778 thumb_reload_out_hi (operands)
10779 rtx * operands;
10781 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10784 /* Handle storing a half-word to memory during reload. */
10786 void
10787 thumb_reload_in_hi (operands)
10788 rtx * operands ATTRIBUTE_UNUSED;
10790 abort ();
10793 /* Return the length of a function name prefix
10794 that starts with the character 'c'. */
10796 static int
10797 arm_get_strip_length (char c)
10799 switch (c)
10801 ARM_NAME_ENCODING_LENGTHS
10802 default: return 0;
10806 /* Return a pointer to a function's name with any
10807 and all prefix encodings stripped from it. */
10809 const char *
10810 arm_strip_name_encoding (const char * name)
10812 int skip;
10814 while ((skip = arm_get_strip_length (* name)))
10815 name += skip;
10817 return name;
10820 #ifdef AOF_ASSEMBLER
10821 /* Special functions only needed when producing AOF syntax assembler. */
10823 rtx aof_pic_label = NULL_RTX;
10824 struct pic_chain
10826 struct pic_chain * next;
10827 const char * symname;
10830 static struct pic_chain * aof_pic_chain = NULL;
10833 aof_pic_entry (x)
10834 rtx x;
10836 struct pic_chain ** chainp;
10837 int offset;
10839 if (aof_pic_label == NULL_RTX)
10841 /* We mark this here and not in arm_add_gc_roots() to avoid
10842 polluting even more code with ifdefs, and because it never
10843 contains anything useful until we assign to it here. */
10844 ggc_add_rtx_root (&aof_pic_label, 1);
10845 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10848 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10849 offset += 4, chainp = &(*chainp)->next)
10850 if ((*chainp)->symname == XSTR (x, 0))
10851 return plus_constant (aof_pic_label, offset);
10853 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10854 (*chainp)->next = NULL;
10855 (*chainp)->symname = XSTR (x, 0);
10856 return plus_constant (aof_pic_label, offset);
10859 void
10860 aof_dump_pic_table (f)
10861 FILE * f;
10863 struct pic_chain * chain;
10865 if (aof_pic_chain == NULL)
10866 return;
10868 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10869 PIC_OFFSET_TABLE_REGNUM,
10870 PIC_OFFSET_TABLE_REGNUM);
10871 fputs ("|x$adcons|\n", f);
10873 for (chain = aof_pic_chain; chain; chain = chain->next)
10875 fputs ("\tDCD\t", f);
10876 assemble_name (f, chain->symname);
10877 fputs ("\n", f);
10881 int arm_text_section_count = 1;
10883 char *
10884 aof_text_section ()
10886 static char buf[100];
10887 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10888 arm_text_section_count++);
10889 if (flag_pic)
10890 strcat (buf, ", PIC, REENTRANT");
10891 return buf;
10894 static int arm_data_section_count = 1;
10896 char *
10897 aof_data_section ()
10899 static char buf[100];
10900 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10901 return buf;
10904 /* The AOF assembler is religiously strict about declarations of
10905 imported and exported symbols, so that it is impossible to declare
10906 a function as imported near the beginning of the file, and then to
10907 export it later on. It is, however, possible to delay the decision
10908 until all the functions in the file have been compiled. To get
10909 around this, we maintain a list of the imports and exports, and
10910 delete from it any that are subsequently defined. At the end of
10911 compilation we spit the remainder of the list out before the END
10912 directive. */
10914 struct import
10916 struct import * next;
10917 const char * name;
10920 static struct import * imports_list = NULL;
10922 void
10923 aof_add_import (name)
10924 const char * name;
10926 struct import * new;
10928 for (new = imports_list; new; new = new->next)
10929 if (new->name == name)
10930 return;
10932 new = (struct import *) xmalloc (sizeof (struct import));
10933 new->next = imports_list;
10934 imports_list = new;
10935 new->name = name;
10938 void
10939 aof_delete_import (name)
10940 const char * name;
10942 struct import ** old;
10944 for (old = &imports_list; *old; old = & (*old)->next)
10946 if ((*old)->name == name)
10948 *old = (*old)->next;
10949 return;
10954 int arm_main_function = 0;
10956 void
10957 aof_dump_imports (f)
10958 FILE * f;
10960 /* The AOF assembler needs this to cause the startup code to be extracted
10961 from the library. Brining in __main causes the whole thing to work
10962 automagically. */
10963 if (arm_main_function)
10965 text_section ();
10966 fputs ("\tIMPORT __main\n", f);
10967 fputs ("\tDCD __main\n", f);
10970 /* Now dump the remaining imports. */
10971 while (imports_list)
10973 fprintf (f, "\tIMPORT\t");
10974 assemble_name (f, imports_list->name);
10975 fputc ('\n', f);
10976 imports_list = imports_list->next;
10979 #endif /* AOF_ASSEMBLER */
10981 #ifdef OBJECT_FORMAT_ELF
10982 /* Switch to an arbitrary section NAME with attributes as specified
10983 by FLAGS. ALIGN specifies any known alignment requirements for
10984 the section; 0 if the default should be used.
10986 Differs from the default elf version only in the prefix character
10987 used before the section type. */
10989 static void
10990 arm_elf_asm_named_section (name, flags)
10991 const char *name;
10992 unsigned int flags;
10994 char flagchars[8], *f = flagchars;
10995 const char *type;
10997 if (!(flags & SECTION_DEBUG))
10998 *f++ = 'a';
10999 if (flags & SECTION_WRITE)
11000 *f++ = 'w';
11001 if (flags & SECTION_CODE)
11002 *f++ = 'x';
11003 if (flags & SECTION_SMALL)
11004 *f++ = 's';
11005 if (flags & SECTION_MERGE)
11006 *f++ = 'M';
11007 if (flags & SECTION_STRINGS)
11008 *f++ = 'S';
11009 *f = '\0';
11011 if (flags & SECTION_BSS)
11012 type = "nobits";
11013 else
11014 type = "progbits";
11016 if (flags & SECTION_ENTSIZE)
11017 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
11018 name, flagchars, type, flags & SECTION_ENTSIZE);
11019 else
11020 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
11021 name, flagchars, type);
11023 #endif