Add REG_MAYBE_DEAD note to frame pointer initialisation instruction.
[official-gcc.git] / gcc / config / arm / arm.c
blob8ef7355ec8e2c48d54843e6291ce9fb0f6fd91fb
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "obstack.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "toplev.h"
43 #include "recog.h"
44 #include "ggc.h"
45 #include "except.h"
46 #include "c-pragma.h"
47 #include "integrate.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode;
54 typedef struct minipool_fixup Mfix;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots PARAMS ((void));
67 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
68 static Ulong bit_count PARAMS ((signed int));
69 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70 static int eliminate_lr2ip PARAMS ((rtx *));
71 static rtx emit_multi_reg_push PARAMS ((int));
72 static rtx emit_sfm PARAMS ((int, int));
73 #ifndef AOF_ASSEMBLER
74 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
75 #endif
76 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
77 static arm_cc get_arm_condition_code PARAMS ((rtx));
78 static void init_fpa_table PARAMS ((void));
79 static Hint int_log2 PARAMS ((Hint));
80 static rtx is_jump_table PARAMS ((rtx));
81 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
83 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
84 static Ccstar shift_op PARAMS ((rtx, Hint *));
85 static void arm_init_machine_status PARAMS ((struct function *));
86 static void arm_mark_machine_status PARAMS ((struct function *));
87 static void arm_free_machine_status PARAMS ((struct function *));
88 static int number_of_first_bit_set PARAMS ((int));
89 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
90 static void thumb_exit PARAMS ((FILE *, int, rtx));
91 static void thumb_pushpop PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code PARAMS ((rtx, int));
93 static rtx is_jump_table PARAMS ((rtx));
94 static Hint get_jump_table_size PARAMS ((rtx));
95 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
96 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
97 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
98 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
99 static void assign_minipool_offsets PARAMS ((Mfix *));
100 static void arm_print_value PARAMS ((FILE *, rtx));
101 static void dump_minipool PARAMS ((rtx));
102 static int arm_barrier_cost PARAMS ((rtx));
103 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
104 static void push_minipool_barrier PARAMS ((rtx, Hint));
105 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
106 static void note_invalid_constants PARAMS ((rtx, Hint));
107 static int current_file_function_operand PARAMS ((rtx));
108 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask PARAMS ((void));
110 static Ulong arm_isr_value PARAMS ((tree));
111 static Ulong arm_compute_func_type PARAMS ((void));
112 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
113 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
114 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
115 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
116 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
117 static int arm_comp_type_attributes PARAMS ((tree, tree));
118 static void arm_set_default_type_attributes PARAMS ((tree));
119 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
122 #endif
124 #undef Hint
125 #undef Mmode
126 #undef Ulong
127 #undef Ccstar
129 /* Initialize the GCC target structure. */
130 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
131 #undef TARGET_MERGE_DECL_ATTRIBUTES
132 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
133 #endif
135 #undef TARGET_ATTRIBUTE_TABLE
136 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
138 #ifdef AOF_ASSEMBLER
139 #undef TARGET_ASM_BYTE_OP
140 #define TARGET_ASM_BYTE_OP "\tDCB\t"
141 #undef TARGET_ASM_ALIGNED_HI_OP
142 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
143 #undef TARGET_ASM_ALIGNED_SI_OP
144 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
145 #else
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP NULL
148 #undef TARGET_ASM_INTEGER
149 #define TARGET_ASM_INTEGER arm_assemble_integer
150 #endif
152 #undef TARGET_ASM_FUNCTION_PROLOGUE
153 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155 #undef TARGET_ASM_FUNCTION_EPILOGUE
156 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158 #undef TARGET_COMP_TYPE_ATTRIBUTES
159 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
162 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164 #undef TARGET_INIT_BUILTINS
165 #define TARGET_INIT_BUILTINS arm_init_builtins
167 #undef TARGET_EXPAND_BUILTIN
168 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
170 #undef TARGET_SCHED_ADJUST_COST
171 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173 struct gcc_target targetm = TARGET_INITIALIZER;
175 /* Obstack for minipool constant handling. */
176 static struct obstack minipool_obstack;
177 static char * minipool_startobj;
179 #define obstack_chunk_alloc xmalloc
180 #define obstack_chunk_free free
182 /* The maximum number of insns skipped which
183 will be conditionalised if possible. */
184 static int max_insns_skipped = 5;
186 extern FILE * asm_out_file;
188 /* True if we are currently building a constant table. */
189 int making_const_table;
191 /* Define the information needed to generate branch insns. This is
192 stored from the compare operation. */
193 rtx arm_compare_op0, arm_compare_op1;
195 /* What type of floating point are we tuning for? */
196 enum floating_point_type arm_fpu;
198 /* What type of floating point instructions are available? */
199 enum floating_point_type arm_fpu_arch;
201 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
202 enum prog_mode_type arm_prgmode;
204 /* Set by the -mfp=... option. */
205 const char * target_fp_name = NULL;
207 /* Used to parse -mstructure_size_boundary command line option. */
208 const char * structure_size_string = NULL;
209 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
211 /* Bit values used to identify processor capabilities. */
212 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
213 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
214 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
215 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
216 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
217 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
218 #define FL_THUMB (1 << 6) /* Thumb aware */
219 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
220 #define FL_STRONG (1 << 8) /* StrongARM */
221 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
222 #define FL_XSCALE (1 << 10) /* XScale */
224 /* The bits in this mask specify which
225 instructions we are allowed to generate. */
226 static int insn_flags = 0;
228 /* The bits in this mask specify which instruction scheduling options should
229 be used. Note - there is an overlap with the FL_FAST_MULT. For some
230 hardware we want to be able to generate the multiply instructions, but to
231 tune as if they were not present in the architecture. */
232 static int tune_flags = 0;
234 /* The following are used in the arm.md file as equivalents to bits
235 in the above two flag variables. */
237 /* Nonzero if this is an "M" variant of the processor. */
238 int arm_fast_multiply = 0;
240 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
241 int arm_arch4 = 0;
243 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
244 int arm_arch5 = 0;
246 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
247 int arm_arch5e = 0;
249 /* Nonzero if this chip can benefit from load scheduling. */
250 int arm_ld_sched = 0;
252 /* Nonzero if this chip is a StrongARM. */
253 int arm_is_strong = 0;
255 /* Nonzero if this chip is an XScale. */
256 int arm_is_xscale = 0;
258 /* Nonzero if this chip is an ARM6 or an ARM7. */
259 int arm_is_6_or_7 = 0;
261 /* Nonzero if generating Thumb instructions. */
262 int thumb_code = 0;
264 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
265 must report the mode of the memory reference from PRINT_OPERAND to
266 PRINT_OPERAND_ADDRESS. */
267 enum machine_mode output_memory_reference_mode;
269 /* Nonzero if the prologue must setup `fp'. */
270 int current_function_anonymous_args;
272 /* The register number to be used for the PIC offset register. */
273 const char * arm_pic_register_string = NULL;
274 int arm_pic_register = 9;
276 /* Set to 1 when a return insn is output, this means that the epilogue
277 is not needed. */
278 int return_used_this_function;
280 /* Set to 1 after arm_reorg has started. Reset to start at the start of
281 the next function. */
282 static int after_arm_reorg = 0;
284 /* The maximum number of insns to be used when loading a constant. */
285 static int arm_constant_limit = 3;
287 /* For an explanation of these variables, see final_prescan_insn below. */
288 int arm_ccfsm_state;
289 enum arm_cond_code arm_current_cc;
290 rtx arm_target_insn;
291 int arm_target_label;
293 /* The condition codes of the ARM, and the inverse function. */
294 static const char * const arm_condition_codes[] =
296 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
297 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
300 #define streq(string1, string2) (strcmp (string1, string2) == 0)
302 /* Initialization code. */
304 struct processors
306 const char *const name;
307 const unsigned int flags;
310 /* Not all of these give usefully different compilation alternatives,
311 but there is no simple way of generalizing them. */
312 static const struct processors all_cores[] =
314 /* ARM Cores */
316 {"arm2", FL_CO_PROC | FL_MODE26 },
317 {"arm250", FL_CO_PROC | FL_MODE26 },
318 {"arm3", FL_CO_PROC | FL_MODE26 },
319 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
320 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
321 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
322 {"arm610", FL_MODE26 | FL_MODE32 },
323 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
324 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
325 /* arm7m doesn't exist on its own, but only with D, (and I), but
326 those don't alter the code, so arm7m is sometimes used. */
327 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
328 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
329 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
330 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
331 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
332 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
333 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
334 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
335 {"arm710", FL_MODE26 | FL_MODE32 },
336 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
337 {"arm720", FL_MODE26 | FL_MODE32 },
338 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
339 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
340 {"arm710c", FL_MODE26 | FL_MODE32 },
341 {"arm7100", FL_MODE26 | FL_MODE32 },
342 {"arm7500", FL_MODE26 | FL_MODE32 },
343 /* Doesn't have an external co-proc, but does have embedded fpu. */
344 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
345 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
346 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
347 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
348 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
349 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
350 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
351 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
352 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
353 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
354 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
355 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
356 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
357 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
358 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
359 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
360 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
362 {NULL, 0}
365 static const struct processors all_architectures[] =
367 /* ARM Architectures */
369 { "armv2", FL_CO_PROC | FL_MODE26 },
370 { "armv2a", FL_CO_PROC | FL_MODE26 },
371 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
372 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
373 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
374 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
375 implementations that support it, so we will leave it out for now. */
376 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
377 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
378 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
379 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
380 { NULL, 0 }
383 /* This is a magic stucture. The 'string' field is magically filled in
384 with a pointer to the value specified by the user on the command line
385 assuming that the user has specified such a value. */
387 struct arm_cpu_select arm_select[] =
389 /* string name processors */
390 { NULL, "-mcpu=", all_cores },
391 { NULL, "-march=", all_architectures },
392 { NULL, "-mtune=", all_cores }
395 /* Return the number of bits set in value' */
396 static unsigned long
397 bit_count (value)
398 signed int value;
400 unsigned long count = 0;
402 while (value)
404 value &= ~(value & -value);
405 ++count;
408 return count;
411 /* Fix up any incompatible options that the user has specified.
412 This has now turned into a maze. */
413 void
414 arm_override_options ()
416 unsigned i;
418 /* Set up the flags based on the cpu/architecture selected by the user. */
419 for (i = ARRAY_SIZE (arm_select); i--;)
421 struct arm_cpu_select * ptr = arm_select + i;
423 if (ptr->string != NULL && ptr->string[0] != '\0')
425 const struct processors * sel;
427 for (sel = ptr->processors; sel->name != NULL; sel++)
428 if (streq (ptr->string, sel->name))
430 if (i == 2)
431 tune_flags = sel->flags;
432 else
434 /* If we have been given an architecture and a processor
435 make sure that they are compatible. We only generate
436 a warning though, and we prefer the CPU over the
437 architecture. */
438 if (insn_flags != 0 && (insn_flags ^ sel->flags))
439 warning ("switch -mcpu=%s conflicts with -march= switch",
440 ptr->string);
442 insn_flags = sel->flags;
445 break;
448 if (sel->name == NULL)
449 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
453 /* If the user did not specify a processor, choose one for them. */
454 if (insn_flags == 0)
456 const struct processors * sel;
457 unsigned int sought;
458 static const struct cpu_default
460 const int cpu;
461 const char *const name;
463 cpu_defaults[] =
465 { TARGET_CPU_arm2, "arm2" },
466 { TARGET_CPU_arm6, "arm6" },
467 { TARGET_CPU_arm610, "arm610" },
468 { TARGET_CPU_arm710, "arm710" },
469 { TARGET_CPU_arm7m, "arm7m" },
470 { TARGET_CPU_arm7500fe, "arm7500fe" },
471 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
472 { TARGET_CPU_arm8, "arm8" },
473 { TARGET_CPU_arm810, "arm810" },
474 { TARGET_CPU_arm9, "arm9" },
475 { TARGET_CPU_strongarm, "strongarm" },
476 { TARGET_CPU_xscale, "xscale" },
477 { TARGET_CPU_generic, "arm" },
478 { 0, 0 }
480 const struct cpu_default * def;
482 /* Find the default. */
483 for (def = cpu_defaults; def->name; def++)
484 if (def->cpu == TARGET_CPU_DEFAULT)
485 break;
487 /* Make sure we found the default CPU. */
488 if (def->name == NULL)
489 abort ();
491 /* Find the default CPU's flags. */
492 for (sel = all_cores; sel->name != NULL; sel++)
493 if (streq (def->name, sel->name))
494 break;
496 if (sel->name == NULL)
497 abort ();
499 insn_flags = sel->flags;
501 /* Now check to see if the user has specified some command line
502 switch that require certain abilities from the cpu. */
503 sought = 0;
505 if (TARGET_INTERWORK || TARGET_THUMB)
507 sought |= (FL_THUMB | FL_MODE32);
509 /* Force apcs-32 to be used for interworking. */
510 target_flags |= ARM_FLAG_APCS_32;
512 /* There are no ARM processors that support both APCS-26 and
513 interworking. Therefore we force FL_MODE26 to be removed
514 from insn_flags here (if it was set), so that the search
515 below will always be able to find a compatible processor. */
516 insn_flags &= ~FL_MODE26;
518 else if (!TARGET_APCS_32)
519 sought |= FL_MODE26;
521 if (sought != 0 && ((sought & insn_flags) != sought))
523 /* Try to locate a CPU type that supports all of the abilities
524 of the default CPU, plus the extra abilities requested by
525 the user. */
526 for (sel = all_cores; sel->name != NULL; sel++)
527 if ((sel->flags & sought) == (sought | insn_flags))
528 break;
530 if (sel->name == NULL)
532 unsigned int current_bit_count = 0;
533 const struct processors * best_fit = NULL;
535 /* Ideally we would like to issue an error message here
536 saying that it was not possible to find a CPU compatible
537 with the default CPU, but which also supports the command
538 line options specified by the programmer, and so they
539 ought to use the -mcpu=<name> command line option to
540 override the default CPU type.
542 Unfortunately this does not work with multilibing. We
543 need to be able to support multilibs for -mapcs-26 and for
544 -mthumb-interwork and there is no CPU that can support both
545 options. Instead if we cannot find a cpu that has both the
546 characteristics of the default cpu and the given command line
547 options we scan the array again looking for a best match. */
548 for (sel = all_cores; sel->name != NULL; sel++)
549 if ((sel->flags & sought) == sought)
551 unsigned int count;
553 count = bit_count (sel->flags & insn_flags);
555 if (count >= current_bit_count)
557 best_fit = sel;
558 current_bit_count = count;
562 if (best_fit == NULL)
563 abort ();
564 else
565 sel = best_fit;
568 insn_flags = sel->flags;
572 /* If tuning has not been specified, tune for whichever processor or
573 architecture has been selected. */
574 if (tune_flags == 0)
575 tune_flags = insn_flags;
577 /* Make sure that the processor choice does not conflict with any of the
578 other command line choices. */
579 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
581 /* If APCS-32 was not the default then it must have been set by the
582 user, so issue a warning message. If the user has specified
583 "-mapcs-32 -mcpu=arm2" then we loose here. */
584 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
585 warning ("target CPU does not support APCS-32" );
586 target_flags &= ~ARM_FLAG_APCS_32;
588 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
590 warning ("target CPU does not support APCS-26" );
591 target_flags |= ARM_FLAG_APCS_32;
594 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
596 warning ("target CPU does not support interworking" );
597 target_flags &= ~ARM_FLAG_INTERWORK;
600 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
602 warning ("target CPU does not support THUMB instructions");
603 target_flags &= ~ARM_FLAG_THUMB;
606 if (TARGET_APCS_FRAME && TARGET_THUMB)
608 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
609 target_flags &= ~ARM_FLAG_APCS_FRAME;
612 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
613 from here where no function is being compiled currently. */
614 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
615 && TARGET_ARM)
616 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
618 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
619 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
621 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
622 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
624 /* If interworking is enabled then APCS-32 must be selected as well. */
625 if (TARGET_INTERWORK)
627 if (!TARGET_APCS_32)
628 warning ("interworking forces APCS-32 to be used" );
629 target_flags |= ARM_FLAG_APCS_32;
632 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
634 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
635 target_flags |= ARM_FLAG_APCS_FRAME;
638 if (TARGET_POKE_FUNCTION_NAME)
639 target_flags |= ARM_FLAG_APCS_FRAME;
641 if (TARGET_APCS_REENT && flag_pic)
642 error ("-fpic and -mapcs-reent are incompatible");
644 if (TARGET_APCS_REENT)
645 warning ("APCS reentrant code not supported. Ignored");
647 /* If this target is normally configured to use APCS frames, warn if they
648 are turned off and debugging is turned on. */
649 if (TARGET_ARM
650 && write_symbols != NO_DEBUG
651 && !TARGET_APCS_FRAME
652 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
653 warning ("-g with -mno-apcs-frame may not give sensible debugging");
655 /* If stack checking is disabled, we can use r10 as the PIC register,
656 which keeps r9 available. */
657 if (flag_pic && !TARGET_APCS_STACK)
658 arm_pic_register = 10;
660 if (TARGET_APCS_FLOAT)
661 warning ("passing floating point arguments in fp regs not yet supported");
663 /* Initialise boolean versions of the flags, for use in the arm.md file. */
664 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
665 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
666 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
667 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
668 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
670 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
671 arm_is_strong = (tune_flags & FL_STRONG) != 0;
672 thumb_code = (TARGET_ARM == 0);
673 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
674 && !(tune_flags & FL_ARCH4))) != 0;
676 /* Default value for floating point code... if no co-processor
677 bus, then schedule for emulated floating point. Otherwise,
678 assume the user has an FPA.
679 Note: this does not prevent use of floating point instructions,
680 -msoft-float does that. */
681 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
683 if (target_fp_name)
685 if (streq (target_fp_name, "2"))
686 arm_fpu_arch = FP_SOFT2;
687 else if (streq (target_fp_name, "3"))
688 arm_fpu_arch = FP_SOFT3;
689 else
690 error ("invalid floating point emulation option: -mfpe-%s",
691 target_fp_name);
693 else
694 arm_fpu_arch = FP_DEFAULT;
696 if (TARGET_FPE && arm_fpu != FP_HARD)
697 arm_fpu = FP_SOFT2;
699 /* For arm2/3 there is no need to do any scheduling if there is only
700 a floating point emulator, or we are doing software floating-point. */
701 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
702 && (tune_flags & FL_MODE32) == 0)
703 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
705 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
707 if (structure_size_string != NULL)
709 int size = strtol (structure_size_string, NULL, 0);
711 if (size == 8 || size == 32)
712 arm_structure_size_boundary = size;
713 else
714 warning ("structure size boundary can only be set to 8 or 32");
717 if (arm_pic_register_string != NULL)
719 int pic_register;
721 if (!flag_pic)
722 warning ("-mpic-register= is useless without -fpic");
724 pic_register = decode_reg_name (arm_pic_register_string);
726 /* Prevent the user from choosing an obviously stupid PIC register. */
727 if (pic_register < 0 || call_used_regs[pic_register]
728 || pic_register == HARD_FRAME_POINTER_REGNUM
729 || pic_register == STACK_POINTER_REGNUM
730 || pic_register >= PC_REGNUM)
731 error ("unable to use '%s' for PIC register", arm_pic_register_string);
732 else
733 arm_pic_register = pic_register;
736 if (TARGET_THUMB && flag_schedule_insns)
738 /* Don't warn since it's on by default in -O2. */
739 flag_schedule_insns = 0;
742 /* If optimizing for space, don't synthesize constants.
743 For processors with load scheduling, it never costs more than 2 cycles
744 to load a constant, and the load scheduler may well reduce that to 1. */
745 if (optimize_size || (tune_flags & FL_LDSCHED))
746 arm_constant_limit = 1;
748 if (arm_is_xscale)
749 arm_constant_limit = 2;
751 /* If optimizing for size, bump the number of instructions that we
752 are prepared to conditionally execute (even on a StrongARM).
753 Otherwise for the StrongARM, which has early execution of branches,
754 a sequence that is worth skipping is shorter. */
755 if (optimize_size)
756 max_insns_skipped = 6;
757 else if (arm_is_strong)
758 max_insns_skipped = 3;
760 /* Register global variables with the garbage collector. */
761 arm_add_gc_roots ();
764 static void
765 arm_add_gc_roots ()
767 ggc_add_rtx_root (&arm_compare_op0, 1);
768 ggc_add_rtx_root (&arm_compare_op1, 1);
769 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
771 gcc_obstack_init(&minipool_obstack);
772 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
775 /* A table of known ARM exception types.
776 For use with the interrupt function attribute. */
778 typedef struct
780 const char *const arg;
781 const unsigned long return_value;
783 isr_attribute_arg;
785 static const isr_attribute_arg isr_attribute_args [] =
787 { "IRQ", ARM_FT_ISR },
788 { "irq", ARM_FT_ISR },
789 { "FIQ", ARM_FT_FIQ },
790 { "fiq", ARM_FT_FIQ },
791 { "ABORT", ARM_FT_ISR },
792 { "abort", ARM_FT_ISR },
793 { "ABORT", ARM_FT_ISR },
794 { "abort", ARM_FT_ISR },
795 { "UNDEF", ARM_FT_EXCEPTION },
796 { "undef", ARM_FT_EXCEPTION },
797 { "SWI", ARM_FT_EXCEPTION },
798 { "swi", ARM_FT_EXCEPTION },
799 { NULL, ARM_FT_NORMAL }
802 /* Returns the (interrupt) function type of the current
803 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
805 static unsigned long
806 arm_isr_value (argument)
807 tree argument;
809 const isr_attribute_arg * ptr;
810 const char * arg;
812 /* No argument - default to IRQ. */
813 if (argument == NULL_TREE)
814 return ARM_FT_ISR;
816 /* Get the value of the argument. */
817 if (TREE_VALUE (argument) == NULL_TREE
818 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
819 return ARM_FT_UNKNOWN;
821 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
823 /* Check it against the list of known arguments. */
824 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
825 if (streq (arg, ptr->arg))
826 return ptr->return_value;
828 /* An unrecognised interrupt type. */
829 return ARM_FT_UNKNOWN;
832 /* Computes the type of the current function. */
834 static unsigned long
835 arm_compute_func_type ()
837 unsigned long type = ARM_FT_UNKNOWN;
838 tree a;
839 tree attr;
841 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
842 abort ();
844 /* Decide if the current function is volatile. Such functions
845 never return, and many memory cycles can be saved by not storing
846 register values that will never be needed again. This optimization
847 was added to speed up context switching in a kernel application. */
848 if (optimize > 0
849 && current_function_nothrow
850 && TREE_THIS_VOLATILE (current_function_decl))
851 type |= ARM_FT_VOLATILE;
853 if (current_function_needs_context)
854 type |= ARM_FT_NESTED;
856 attr = DECL_ATTRIBUTES (current_function_decl);
858 a = lookup_attribute ("naked", attr);
859 if (a != NULL_TREE)
860 type |= ARM_FT_NAKED;
862 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
863 type |= ARM_FT_EXCEPTION_HANDLER;
864 else
866 a = lookup_attribute ("isr", attr);
867 if (a == NULL_TREE)
868 a = lookup_attribute ("interrupt", attr);
870 if (a == NULL_TREE)
871 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
872 else
873 type |= arm_isr_value (TREE_VALUE (a));
876 return type;
879 /* Returns the type of the current function. */
881 unsigned long
882 arm_current_func_type ()
884 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
885 cfun->machine->func_type = arm_compute_func_type ();
887 return cfun->machine->func_type;
890 /* Return 1 if it is possible to return using a single instruction. */
893 use_return_insn (iscond)
894 int iscond;
896 int regno;
897 unsigned int func_type;
899 /* Never use a return instruction before reload has run. */
900 if (!reload_completed)
901 return 0;
903 func_type = arm_current_func_type ();
905 /* Naked functions, volatile functiond and interrupt
906 functions all need special consideration. */
907 if (func_type & (ARM_FT_INTERRUPT | ARM_FT_VOLATILE | ARM_FT_NAKED))
908 return 0;
910 /* As do variadic functions. */
911 if (current_function_pretend_args_size
912 || current_function_anonymous_args
913 /* Of if the function calls __builtin_eh_return () */
914 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
915 /* Or if there is no frame pointer and there is a stack adjustment. */
916 || ((get_frame_size () + current_function_outgoing_args_size != 0)
917 && !frame_pointer_needed))
918 return 0;
920 /* Can't be done if interworking with Thumb, and any registers have been
921 stacked. Similarly, on StrongARM, conditional returns are expensive
922 if they aren't taken and registers have been stacked. */
923 if (iscond && arm_is_strong && frame_pointer_needed)
924 return 0;
926 if ((iscond && arm_is_strong)
927 || TARGET_INTERWORK)
929 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
930 if (regs_ever_live[regno] && !call_used_regs[regno])
931 return 0;
933 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
934 return 0;
937 /* Can't be done if any of the FPU regs are pushed,
938 since this also requires an insn. */
939 if (TARGET_HARD_FLOAT)
940 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
941 if (regs_ever_live[regno] && !call_used_regs[regno])
942 return 0;
944 return 1;
947 /* Return TRUE if int I is a valid immediate ARM constant. */
950 const_ok_for_arm (i)
951 HOST_WIDE_INT i;
953 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
955 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
956 be all zero, or all one. */
957 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
958 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
959 != ((~(unsigned HOST_WIDE_INT) 0)
960 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
961 return FALSE;
963 /* Fast return for 0 and powers of 2 */
964 if ((i & (i - 1)) == 0)
965 return TRUE;
969 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
970 return TRUE;
971 mask =
972 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
973 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
975 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
977 return FALSE;
980 /* Return true if I is a valid constant for the operation CODE. */
981 static int
982 const_ok_for_op (i, code)
983 HOST_WIDE_INT i;
984 enum rtx_code code;
986 if (const_ok_for_arm (i))
987 return 1;
989 switch (code)
991 case PLUS:
992 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
994 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
995 case XOR:
996 case IOR:
997 return 0;
999 case AND:
1000 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1002 default:
1003 abort ();
1007 /* Emit a sequence of insns to handle a large constant.
1008 CODE is the code of the operation required, it can be any of SET, PLUS,
1009 IOR, AND, XOR, MINUS;
1010 MODE is the mode in which the operation is being performed;
1011 VAL is the integer to operate on;
1012 SOURCE is the other operand (a register, or a null-pointer for SET);
1013 SUBTARGETS means it is safe to create scratch registers if that will
1014 either produce a simpler sequence, or we will want to cse the values.
1015 Return value is the number of insns emitted. */
1018 arm_split_constant (code, mode, val, target, source, subtargets)
1019 enum rtx_code code;
1020 enum machine_mode mode;
1021 HOST_WIDE_INT val;
1022 rtx target;
1023 rtx source;
1024 int subtargets;
1026 if (subtargets || code == SET
1027 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1028 && REGNO (target) != REGNO (source)))
1030 /* After arm_reorg has been called, we can't fix up expensive
1031 constants by pushing them into memory so we must synthesise
1032 them in-line, regardless of the cost. This is only likely to
1033 be more costly on chips that have load delay slots and we are
1034 compiling without running the scheduler (so no splitting
1035 occurred before the final instruction emission).
1037 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1039 if (!after_arm_reorg
1040 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1041 > arm_constant_limit + (code != SET)))
1043 if (code == SET)
1045 /* Currently SET is the only monadic value for CODE, all
1046 the rest are diadic. */
1047 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1048 return 1;
1050 else
1052 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1054 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1055 /* For MINUS, the value is subtracted from, since we never
1056 have subtraction of a constant. */
1057 if (code == MINUS)
1058 emit_insn (gen_rtx_SET (VOIDmode, target,
1059 gen_rtx_MINUS (mode, temp, source)));
1060 else
1061 emit_insn (gen_rtx_SET (VOIDmode, target,
1062 gen_rtx (code, mode, source, temp)));
1063 return 2;
1068 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1071 static int
1072 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1074 HOST_WIDE_INT temp1;
1075 int num_insns = 0;
1078 int end;
1080 if (i <= 0)
1081 i += 32;
1082 if (remainder & (3 << (i - 2)))
1084 end = i - 8;
1085 if (end < 0)
1086 end += 32;
1087 temp1 = remainder & ((0x0ff << end)
1088 | ((i < end) ? (0xff >> (32 - end)) : 0));
1089 remainder &= ~temp1;
1090 num_insns++;
1091 i -= 6;
1093 i -= 2;
1094 } while (remainder);
1095 return num_insns;
1098 /* As above, but extra parameter GENERATE which, if clear, suppresses
1099 RTL generation. */
1101 static int
1102 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1103 enum rtx_code code;
1104 enum machine_mode mode;
1105 HOST_WIDE_INT val;
1106 rtx target;
1107 rtx source;
1108 int subtargets;
1109 int generate;
1111 int can_invert = 0;
1112 int can_negate = 0;
1113 int can_negate_initial = 0;
1114 int can_shift = 0;
1115 int i;
1116 int num_bits_set = 0;
1117 int set_sign_bit_copies = 0;
1118 int clear_sign_bit_copies = 0;
1119 int clear_zero_bit_copies = 0;
1120 int set_zero_bit_copies = 0;
1121 int insns = 0;
1122 unsigned HOST_WIDE_INT temp1, temp2;
1123 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1125 /* Find out which operations are safe for a given CODE. Also do a quick
1126 check for degenerate cases; these can occur when DImode operations
1127 are split. */
1128 switch (code)
1130 case SET:
1131 can_invert = 1;
1132 can_shift = 1;
1133 can_negate = 1;
1134 break;
1136 case PLUS:
1137 can_negate = 1;
1138 can_negate_initial = 1;
1139 break;
1141 case IOR:
1142 if (remainder == 0xffffffff)
1144 if (generate)
1145 emit_insn (gen_rtx_SET (VOIDmode, target,
1146 GEN_INT (ARM_SIGN_EXTEND (val))));
1147 return 1;
1149 if (remainder == 0)
1151 if (reload_completed && rtx_equal_p (target, source))
1152 return 0;
1153 if (generate)
1154 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1155 return 1;
1157 break;
1159 case AND:
1160 if (remainder == 0)
1162 if (generate)
1163 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1164 return 1;
1166 if (remainder == 0xffffffff)
1168 if (reload_completed && rtx_equal_p (target, source))
1169 return 0;
1170 if (generate)
1171 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1172 return 1;
1174 can_invert = 1;
1175 break;
1177 case XOR:
1178 if (remainder == 0)
1180 if (reload_completed && rtx_equal_p (target, source))
1181 return 0;
1182 if (generate)
1183 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1184 return 1;
1186 if (remainder == 0xffffffff)
1188 if (generate)
1189 emit_insn (gen_rtx_SET (VOIDmode, target,
1190 gen_rtx_NOT (mode, source)));
1191 return 1;
1194 /* We don't know how to handle this yet below. */
1195 abort ();
1197 case MINUS:
1198 /* We treat MINUS as (val - source), since (source - val) is always
1199 passed as (source + (-val)). */
1200 if (remainder == 0)
1202 if (generate)
1203 emit_insn (gen_rtx_SET (VOIDmode, target,
1204 gen_rtx_NEG (mode, source)));
1205 return 1;
1207 if (const_ok_for_arm (val))
1209 if (generate)
1210 emit_insn (gen_rtx_SET (VOIDmode, target,
1211 gen_rtx_MINUS (mode, GEN_INT (val),
1212 source)));
1213 return 1;
1215 can_negate = 1;
1217 break;
1219 default:
1220 abort ();
1223 /* If we can do it in one insn get out quickly. */
1224 if (const_ok_for_arm (val)
1225 || (can_negate_initial && const_ok_for_arm (-val))
1226 || (can_invert && const_ok_for_arm (~val)))
1228 if (generate)
1229 emit_insn (gen_rtx_SET (VOIDmode, target,
1230 (source ? gen_rtx (code, mode, source,
1231 GEN_INT (val))
1232 : GEN_INT (val))));
1233 return 1;
1236 /* Calculate a few attributes that may be useful for specific
1237 optimizations. */
1238 for (i = 31; i >= 0; i--)
1240 if ((remainder & (1 << i)) == 0)
1241 clear_sign_bit_copies++;
1242 else
1243 break;
1246 for (i = 31; i >= 0; i--)
1248 if ((remainder & (1 << i)) != 0)
1249 set_sign_bit_copies++;
1250 else
1251 break;
1254 for (i = 0; i <= 31; i++)
1256 if ((remainder & (1 << i)) == 0)
1257 clear_zero_bit_copies++;
1258 else
1259 break;
1262 for (i = 0; i <= 31; i++)
1264 if ((remainder & (1 << i)) != 0)
1265 set_zero_bit_copies++;
1266 else
1267 break;
1270 switch (code)
1272 case SET:
1273 /* See if we can do this by sign_extending a constant that is known
1274 to be negative. This is a good, way of doing it, since the shift
1275 may well merge into a subsequent insn. */
1276 if (set_sign_bit_copies > 1)
1278 if (const_ok_for_arm
1279 (temp1 = ARM_SIGN_EXTEND (remainder
1280 << (set_sign_bit_copies - 1))))
1282 if (generate)
1284 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1285 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1286 GEN_INT (temp1)));
1287 emit_insn (gen_ashrsi3 (target, new_src,
1288 GEN_INT (set_sign_bit_copies - 1)));
1290 return 2;
1292 /* For an inverted constant, we will need to set the low bits,
1293 these will be shifted out of harm's way. */
1294 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1295 if (const_ok_for_arm (~temp1))
1297 if (generate)
1299 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1300 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1301 GEN_INT (temp1)));
1302 emit_insn (gen_ashrsi3 (target, new_src,
1303 GEN_INT (set_sign_bit_copies - 1)));
1305 return 2;
1309 /* See if we can generate this by setting the bottom (or the top)
1310 16 bits, and then shifting these into the other half of the
1311 word. We only look for the simplest cases, to do more would cost
1312 too much. Be careful, however, not to generate this when the
1313 alternative would take fewer insns. */
1314 if (val & 0xffff0000)
1316 temp1 = remainder & 0xffff0000;
1317 temp2 = remainder & 0x0000ffff;
1319 /* Overlaps outside this range are best done using other methods. */
1320 for (i = 9; i < 24; i++)
1322 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1323 && !const_ok_for_arm (temp2))
1325 rtx new_src = (subtargets
1326 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1327 : target);
1328 insns = arm_gen_constant (code, mode, temp2, new_src,
1329 source, subtargets, generate);
1330 source = new_src;
1331 if (generate)
1332 emit_insn (gen_rtx_SET
1333 (VOIDmode, target,
1334 gen_rtx_IOR (mode,
1335 gen_rtx_ASHIFT (mode, source,
1336 GEN_INT (i)),
1337 source)));
1338 return insns + 1;
1342 /* Don't duplicate cases already considered. */
1343 for (i = 17; i < 24; i++)
1345 if (((temp1 | (temp1 >> i)) == remainder)
1346 && !const_ok_for_arm (temp1))
1348 rtx new_src = (subtargets
1349 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1350 : target);
1351 insns = arm_gen_constant (code, mode, temp1, new_src,
1352 source, subtargets, generate);
1353 source = new_src;
1354 if (generate)
1355 emit_insn
1356 (gen_rtx_SET (VOIDmode, target,
1357 gen_rtx_IOR
1358 (mode,
1359 gen_rtx_LSHIFTRT (mode, source,
1360 GEN_INT (i)),
1361 source)));
1362 return insns + 1;
1366 break;
1368 case IOR:
1369 case XOR:
1370 /* If we have IOR or XOR, and the constant can be loaded in a
1371 single instruction, and we can find a temporary to put it in,
1372 then this can be done in two instructions instead of 3-4. */
1373 if (subtargets
1374 /* TARGET can't be NULL if SUBTARGETS is 0 */
1375 || (reload_completed && !reg_mentioned_p (target, source)))
1377 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1379 if (generate)
1381 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1383 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1384 emit_insn (gen_rtx_SET (VOIDmode, target,
1385 gen_rtx (code, mode, source, sub)));
1387 return 2;
1391 if (code == XOR)
1392 break;
1394 if (set_sign_bit_copies > 8
1395 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1397 if (generate)
1399 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1400 rtx shift = GEN_INT (set_sign_bit_copies);
1402 emit_insn (gen_rtx_SET (VOIDmode, sub,
1403 gen_rtx_NOT (mode,
1404 gen_rtx_ASHIFT (mode,
1405 source,
1406 shift))));
1407 emit_insn (gen_rtx_SET (VOIDmode, target,
1408 gen_rtx_NOT (mode,
1409 gen_rtx_LSHIFTRT (mode, sub,
1410 shift))));
1412 return 2;
1415 if (set_zero_bit_copies > 8
1416 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1418 if (generate)
1420 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1421 rtx shift = GEN_INT (set_zero_bit_copies);
1423 emit_insn (gen_rtx_SET (VOIDmode, sub,
1424 gen_rtx_NOT (mode,
1425 gen_rtx_LSHIFTRT (mode,
1426 source,
1427 shift))));
1428 emit_insn (gen_rtx_SET (VOIDmode, target,
1429 gen_rtx_NOT (mode,
1430 gen_rtx_ASHIFT (mode, sub,
1431 shift))));
1433 return 2;
1436 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1438 if (generate)
1440 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1441 emit_insn (gen_rtx_SET (VOIDmode, sub,
1442 gen_rtx_NOT (mode, source)));
1443 source = sub;
1444 if (subtargets)
1445 sub = gen_reg_rtx (mode);
1446 emit_insn (gen_rtx_SET (VOIDmode, sub,
1447 gen_rtx_AND (mode, source,
1448 GEN_INT (temp1))));
1449 emit_insn (gen_rtx_SET (VOIDmode, target,
1450 gen_rtx_NOT (mode, sub)));
1452 return 3;
1454 break;
1456 case AND:
1457 /* See if two shifts will do 2 or more insn's worth of work. */
1458 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1460 HOST_WIDE_INT shift_mask = ((0xffffffff
1461 << (32 - clear_sign_bit_copies))
1462 & 0xffffffff);
1464 if ((remainder | shift_mask) != 0xffffffff)
1466 if (generate)
1468 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1469 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1470 new_src, source, subtargets, 1);
1471 source = new_src;
1473 else
1475 rtx targ = subtargets ? NULL_RTX : target;
1476 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1477 targ, source, subtargets, 0);
1481 if (generate)
1483 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1484 rtx shift = GEN_INT (clear_sign_bit_copies);
1486 emit_insn (gen_ashlsi3 (new_src, source, shift));
1487 emit_insn (gen_lshrsi3 (target, new_src, shift));
1490 return insns + 2;
1493 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1495 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1497 if ((remainder | shift_mask) != 0xffffffff)
1499 if (generate)
1501 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1503 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1504 new_src, source, subtargets, 1);
1505 source = new_src;
1507 else
1509 rtx targ = subtargets ? NULL_RTX : target;
1511 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1512 targ, source, subtargets, 0);
1516 if (generate)
1518 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1519 rtx shift = GEN_INT (clear_zero_bit_copies);
1521 emit_insn (gen_lshrsi3 (new_src, source, shift));
1522 emit_insn (gen_ashlsi3 (target, new_src, shift));
1525 return insns + 2;
1528 break;
1530 default:
1531 break;
1534 for (i = 0; i < 32; i++)
1535 if (remainder & (1 << i))
1536 num_bits_set++;
1538 if (code == AND || (can_invert && num_bits_set > 16))
1539 remainder = (~remainder) & 0xffffffff;
1540 else if (code == PLUS && num_bits_set > 16)
1541 remainder = (-remainder) & 0xffffffff;
1542 else
1544 can_invert = 0;
1545 can_negate = 0;
1548 /* Now try and find a way of doing the job in either two or three
1549 instructions.
1550 We start by looking for the largest block of zeros that are aligned on
1551 a 2-bit boundary, we then fill up the temps, wrapping around to the
1552 top of the word when we drop off the bottom.
1553 In the worst case this code should produce no more than four insns. */
1555 int best_start = 0;
1556 int best_consecutive_zeros = 0;
1558 for (i = 0; i < 32; i += 2)
1560 int consecutive_zeros = 0;
1562 if (!(remainder & (3 << i)))
1564 while ((i < 32) && !(remainder & (3 << i)))
1566 consecutive_zeros += 2;
1567 i += 2;
1569 if (consecutive_zeros > best_consecutive_zeros)
1571 best_consecutive_zeros = consecutive_zeros;
1572 best_start = i - consecutive_zeros;
1574 i -= 2;
1578 /* So long as it won't require any more insns to do so, it's
1579 desirable to emit a small constant (in bits 0...9) in the last
1580 insn. This way there is more chance that it can be combined with
1581 a later addressing insn to form a pre-indexed load or store
1582 operation. Consider:
1584 *((volatile int *)0xe0000100) = 1;
1585 *((volatile int *)0xe0000110) = 2;
1587 We want this to wind up as:
1589 mov rA, #0xe0000000
1590 mov rB, #1
1591 str rB, [rA, #0x100]
1592 mov rB, #2
1593 str rB, [rA, #0x110]
1595 rather than having to synthesize both large constants from scratch.
1597 Therefore, we calculate how many insns would be required to emit
1598 the constant starting from `best_start', and also starting from
1599 zero (ie with bit 31 first to be output). If `best_start' doesn't
1600 yield a shorter sequence, we may as well use zero. */
1601 if (best_start != 0
1602 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1603 && (count_insns_for_constant (remainder, 0) <=
1604 count_insns_for_constant (remainder, best_start)))
1605 best_start = 0;
1607 /* Now start emitting the insns. */
1608 i = best_start;
1611 int end;
1613 if (i <= 0)
1614 i += 32;
1615 if (remainder & (3 << (i - 2)))
1617 end = i - 8;
1618 if (end < 0)
1619 end += 32;
1620 temp1 = remainder & ((0x0ff << end)
1621 | ((i < end) ? (0xff >> (32 - end)) : 0));
1622 remainder &= ~temp1;
1624 if (generate)
1626 rtx new_src, temp1_rtx;
1628 if (code == SET || code == MINUS)
1630 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1631 if (can_invert && code != MINUS)
1632 temp1 = ~temp1;
1634 else
1636 if (remainder && subtargets)
1637 new_src = gen_reg_rtx (mode);
1638 else
1639 new_src = target;
1640 if (can_invert)
1641 temp1 = ~temp1;
1642 else if (can_negate)
1643 temp1 = -temp1;
1646 temp1 = trunc_int_for_mode (temp1, mode);
1647 temp1_rtx = GEN_INT (temp1);
1649 if (code == SET)
1651 else if (code == MINUS)
1652 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1653 else
1654 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1656 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1657 source = new_src;
1660 if (code == SET)
1662 can_invert = 0;
1663 code = PLUS;
1665 else if (code == MINUS)
1666 code = PLUS;
1668 insns++;
1669 i -= 6;
1671 i -= 2;
1673 while (remainder);
1676 return insns;
1679 /* Canonicalize a comparison so that we are more likely to recognize it.
1680 This can be done for a few constant compares, where we can make the
1681 immediate value easier to load. */
1683 enum rtx_code
1684 arm_canonicalize_comparison (code, op1)
1685 enum rtx_code code;
1686 rtx * op1;
1688 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1690 switch (code)
1692 case EQ:
1693 case NE:
1694 return code;
1696 case GT:
1697 case LE:
1698 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1699 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1701 *op1 = GEN_INT (i + 1);
1702 return code == GT ? GE : LT;
1704 break;
1706 case GE:
1707 case LT:
1708 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1709 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1711 *op1 = GEN_INT (i - 1);
1712 return code == GE ? GT : LE;
1714 break;
1716 case GTU:
1717 case LEU:
1718 if (i != ~((unsigned HOST_WIDE_INT) 0)
1719 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1721 *op1 = GEN_INT (i + 1);
1722 return code == GTU ? GEU : LTU;
1724 break;
1726 case GEU:
1727 case LTU:
1728 if (i != 0
1729 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1731 *op1 = GEN_INT (i - 1);
1732 return code == GEU ? GTU : LEU;
1734 break;
1736 default:
1737 abort ();
1740 return code;
1743 /* Decide whether a type should be returned in memory (true)
1744 or in a register (false). This is called by the macro
1745 RETURN_IN_MEMORY. */
1748 arm_return_in_memory (type)
1749 tree type;
1751 if (!AGGREGATE_TYPE_P (type))
1752 /* All simple types are returned in registers. */
1753 return 0;
1755 /* For the arm-wince targets we choose to be compitable with Microsoft's
1756 ARM and Thumb compilers, which always return aggregates in memory. */
1757 #ifndef ARM_WINCE
1758 /* All structures/unions bigger than one word are returned in memory.
1759 Also catch the case where int_size_in_bytes returns -1. In this case
1760 the aggregate is either huge or of varaible size, and in either case
1761 we will want to return it via memory and not in a register. */
1762 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
1763 return 1;
1765 if (TREE_CODE (type) == RECORD_TYPE)
1767 tree field;
1769 /* For a struct the APCS says that we only return in a register
1770 if the type is 'integer like' and every addressable element
1771 has an offset of zero. For practical purposes this means
1772 that the structure can have at most one non bit-field element
1773 and that this element must be the first one in the structure. */
1775 /* Find the first field, ignoring non FIELD_DECL things which will
1776 have been created by C++. */
1777 for (field = TYPE_FIELDS (type);
1778 field && TREE_CODE (field) != FIELD_DECL;
1779 field = TREE_CHAIN (field))
1780 continue;
1782 if (field == NULL)
1783 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1785 /* Check that the first field is valid for returning in a register. */
1787 /* ... Floats are not allowed */
1788 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1789 return 1;
1791 /* ... Aggregates that are not themselves valid for returning in
1792 a register are not allowed. */
1793 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1794 return 1;
1796 /* Now check the remaining fields, if any. Only bitfields are allowed,
1797 since they are not addressable. */
1798 for (field = TREE_CHAIN (field);
1799 field;
1800 field = TREE_CHAIN (field))
1802 if (TREE_CODE (field) != FIELD_DECL)
1803 continue;
1805 if (!DECL_BIT_FIELD_TYPE (field))
1806 return 1;
1809 return 0;
1812 if (TREE_CODE (type) == UNION_TYPE)
1814 tree field;
1816 /* Unions can be returned in registers if every element is
1817 integral, or can be returned in an integer register. */
1818 for (field = TYPE_FIELDS (type);
1819 field;
1820 field = TREE_CHAIN (field))
1822 if (TREE_CODE (field) != FIELD_DECL)
1823 continue;
1825 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1826 return 1;
1828 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1829 return 1;
1832 return 0;
1834 #endif /* not ARM_WINCE */
1836 /* Return all other types in memory. */
1837 return 1;
1840 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1841 for a call to a function whose data type is FNTYPE.
1842 For a library call, FNTYPE is NULL. */
1843 void
1844 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1845 CUMULATIVE_ARGS * pcum;
1846 tree fntype;
1847 rtx libname ATTRIBUTE_UNUSED;
1848 int indirect ATTRIBUTE_UNUSED;
1850 /* On the ARM, the offset starts at 0. */
1851 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1853 pcum->call_cookie = CALL_NORMAL;
1855 if (TARGET_LONG_CALLS)
1856 pcum->call_cookie = CALL_LONG;
1858 /* Check for long call/short call attributes. The attributes
1859 override any command line option. */
1860 if (fntype)
1862 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1863 pcum->call_cookie = CALL_SHORT;
1864 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1865 pcum->call_cookie = CALL_LONG;
1869 /* Determine where to put an argument to a function.
1870 Value is zero to push the argument on the stack,
1871 or a hard register in which to store the argument.
1873 MODE is the argument's machine mode.
1874 TYPE is the data type of the argument (as a tree).
1875 This is null for libcalls where that information may
1876 not be available.
1877 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1878 the preceding args and about the function being called.
1879 NAMED is nonzero if this argument is a named parameter
1880 (otherwise it is an extra parameter matching an ellipsis). */
1883 arm_function_arg (pcum, mode, type, named)
1884 CUMULATIVE_ARGS * pcum;
1885 enum machine_mode mode;
1886 tree type ATTRIBUTE_UNUSED;
1887 int named;
1889 if (mode == VOIDmode)
1890 /* Compute operand 2 of the call insn. */
1891 return GEN_INT (pcum->call_cookie);
1893 if (!named || pcum->nregs >= NUM_ARG_REGS)
1894 return NULL_RTX;
1896 return gen_rtx_REG (mode, pcum->nregs);
1899 /* Encode the current state of the #pragma [no_]long_calls. */
1900 typedef enum
1902 OFF, /* No #pramgma [no_]long_calls is in effect. */
1903 LONG, /* #pragma long_calls is in effect. */
1904 SHORT /* #pragma no_long_calls is in effect. */
1905 } arm_pragma_enum;
1907 static arm_pragma_enum arm_pragma_long_calls = OFF;
1909 void
1910 arm_pr_long_calls (pfile)
1911 cpp_reader * pfile ATTRIBUTE_UNUSED;
1913 arm_pragma_long_calls = LONG;
1916 void
1917 arm_pr_no_long_calls (pfile)
1918 cpp_reader * pfile ATTRIBUTE_UNUSED;
1920 arm_pragma_long_calls = SHORT;
1923 void
1924 arm_pr_long_calls_off (pfile)
1925 cpp_reader * pfile ATTRIBUTE_UNUSED;
1927 arm_pragma_long_calls = OFF;
1930 /* Table of machine attributes. */
1931 const struct attribute_spec arm_attribute_table[] =
1933 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1934 /* Function calls made to this symbol must be done indirectly, because
1935 it may lie outside of the 26 bit addressing range of a normal function
1936 call. */
1937 { "long_call", 0, 0, false, true, true, NULL },
1938 /* Whereas these functions are always known to reside within the 26 bit
1939 addressing range. */
1940 { "short_call", 0, 0, false, true, true, NULL },
1941 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1942 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1943 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1944 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1945 #ifdef ARM_PE
1946 /* ARM/PE has three new attributes:
1947 interfacearm - ?
1948 dllexport - for exporting a function/variable that will live in a dll
1949 dllimport - for importing a function/variable from a dll
1951 Microsoft allows multiple declspecs in one __declspec, separating
1952 them with spaces. We do NOT support this. Instead, use __declspec
1953 multiple times.
1955 { "dllimport", 0, 0, true, false, false, NULL },
1956 { "dllexport", 0, 0, true, false, false, NULL },
1957 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1958 #endif
1959 { NULL, 0, 0, false, false, false, NULL }
1962 /* Handle an attribute requiring a FUNCTION_DECL;
1963 arguments as in struct attribute_spec.handler. */
1965 static tree
1966 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1967 tree * node;
1968 tree name;
1969 tree args ATTRIBUTE_UNUSED;
1970 int flags ATTRIBUTE_UNUSED;
1971 bool * no_add_attrs;
1973 if (TREE_CODE (*node) != FUNCTION_DECL)
1975 warning ("`%s' attribute only applies to functions",
1976 IDENTIFIER_POINTER (name));
1977 *no_add_attrs = true;
1980 return NULL_TREE;
1983 /* Handle an "interrupt" or "isr" attribute;
1984 arguments as in struct attribute_spec.handler. */
1986 static tree
1987 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1988 tree * node;
1989 tree name;
1990 tree args;
1991 int flags;
1992 bool * no_add_attrs;
1994 if (DECL_P (*node))
1996 if (TREE_CODE (*node) != FUNCTION_DECL)
1998 warning ("`%s' attribute only applies to functions",
1999 IDENTIFIER_POINTER (name));
2000 *no_add_attrs = true;
2002 /* FIXME: the argument if any is checked for type attributes;
2003 should it be checked for decl ones? */
2005 else
2007 if (TREE_CODE (*node) == FUNCTION_TYPE
2008 || TREE_CODE (*node) == METHOD_TYPE)
2010 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2012 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2013 *no_add_attrs = true;
2016 else if (TREE_CODE (*node) == POINTER_TYPE
2017 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2018 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2019 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2021 *node = build_type_copy (*node);
2022 TREE_TYPE (*node) = build_type_attribute_variant
2023 (TREE_TYPE (*node),
2024 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2025 *no_add_attrs = true;
2027 else
2029 /* Possibly pass this attribute on from the type to a decl. */
2030 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2031 | (int) ATTR_FLAG_FUNCTION_NEXT
2032 | (int) ATTR_FLAG_ARRAY_NEXT))
2034 *no_add_attrs = true;
2035 return tree_cons (name, args, NULL_TREE);
2037 else
2039 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2044 return NULL_TREE;
2047 /* Return 0 if the attributes for two types are incompatible, 1 if they
2048 are compatible, and 2 if they are nearly compatible (which causes a
2049 warning to be generated). */
2051 static int
2052 arm_comp_type_attributes (type1, type2)
2053 tree type1;
2054 tree type2;
2056 int l1, l2, s1, s2;
2058 /* Check for mismatch of non-default calling convention. */
2059 if (TREE_CODE (type1) != FUNCTION_TYPE)
2060 return 1;
2062 /* Check for mismatched call attributes. */
2063 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2064 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2065 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2066 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2068 /* Only bother to check if an attribute is defined. */
2069 if (l1 | l2 | s1 | s2)
2071 /* If one type has an attribute, the other must have the same attribute. */
2072 if ((l1 != l2) || (s1 != s2))
2073 return 0;
2075 /* Disallow mixed attributes. */
2076 if ((l1 & s2) || (l2 & s1))
2077 return 0;
2080 /* Check for mismatched ISR attribute. */
2081 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2082 if (! l1)
2083 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2084 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2085 if (! l2)
2086 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2087 if (l1 != l2)
2088 return 0;
2090 return 1;
2093 /* Encode long_call or short_call attribute by prefixing
2094 symbol name in DECL with a special character FLAG. */
2096 void
2097 arm_encode_call_attribute (decl, flag)
2098 tree decl;
2099 int flag;
2101 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2102 int len = strlen (str);
2103 char * newstr;
2105 if (TREE_CODE (decl) != FUNCTION_DECL)
2106 return;
2108 /* Do not allow weak functions to be treated as short call. */
2109 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2110 return;
2112 newstr = alloca (len + 2);
2113 newstr[0] = flag;
2114 strcpy (newstr + 1, str);
2116 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2117 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2120 /* Assigns default attributes to newly defined type. This is used to
2121 set short_call/long_call attributes for function types of
2122 functions defined inside corresponding #pragma scopes. */
2124 static void
2125 arm_set_default_type_attributes (type)
2126 tree type;
2128 /* Add __attribute__ ((long_call)) to all functions, when
2129 inside #pragma long_calls or __attribute__ ((short_call)),
2130 when inside #pragma no_long_calls. */
2131 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2133 tree type_attr_list, attr_name;
2134 type_attr_list = TYPE_ATTRIBUTES (type);
2136 if (arm_pragma_long_calls == LONG)
2137 attr_name = get_identifier ("long_call");
2138 else if (arm_pragma_long_calls == SHORT)
2139 attr_name = get_identifier ("short_call");
2140 else
2141 return;
2143 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2144 TYPE_ATTRIBUTES (type) = type_attr_list;
2148 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2149 defined within the current compilation unit. If this caanot be
2150 determined, then 0 is returned. */
2152 static int
2153 current_file_function_operand (sym_ref)
2154 rtx sym_ref;
2156 /* This is a bit of a fib. A function will have a short call flag
2157 applied to its name if it has the short call attribute, or it has
2158 already been defined within the current compilation unit. */
2159 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2160 return 1;
2162 /* The current function is always defined within the current compilation
2163 unit. if it s a weak definition however, then this may not be the real
2164 definition of the function, and so we have to say no. */
2165 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2166 && !DECL_WEAK (current_function_decl))
2167 return 1;
2169 /* We cannot make the determination - default to returning 0. */
2170 return 0;
2173 /* Return non-zero if a 32 bit "long_call" should be generated for
2174 this call. We generate a long_call if the function:
2176 a. has an __attribute__((long call))
2177 or b. is within the scope of a #pragma long_calls
2178 or c. the -mlong-calls command line switch has been specified
2180 However we do not generate a long call if the function:
2182 d. has an __attribute__ ((short_call))
2183 or e. is inside the scope of a #pragma no_long_calls
2184 or f. has an __attribute__ ((section))
2185 or g. is defined within the current compilation unit.
2187 This function will be called by C fragments contained in the machine
2188 description file. CALL_REF and CALL_COOKIE correspond to the matched
2189 rtl operands. CALL_SYMBOL is used to distinguish between
2190 two different callers of the function. It is set to 1 in the
2191 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2192 and "call_value" patterns. This is because of the difference in the
2193 SYM_REFs passed by these patterns. */
2196 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2197 rtx sym_ref;
2198 int call_cookie;
2199 int call_symbol;
2201 if (!call_symbol)
2203 if (GET_CODE (sym_ref) != MEM)
2204 return 0;
2206 sym_ref = XEXP (sym_ref, 0);
2209 if (GET_CODE (sym_ref) != SYMBOL_REF)
2210 return 0;
2212 if (call_cookie & CALL_SHORT)
2213 return 0;
2215 if (TARGET_LONG_CALLS && flag_function_sections)
2216 return 1;
2218 if (current_file_function_operand (sym_ref))
2219 return 0;
2221 return (call_cookie & CALL_LONG)
2222 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2223 || TARGET_LONG_CALLS;
2226 /* Return non-zero if it is ok to make a tail-call to DECL. */
2229 arm_function_ok_for_sibcall (decl)
2230 tree decl;
2232 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2234 /* Never tailcall something for which we have no decl, or if we
2235 are in Thumb mode. */
2236 if (decl == NULL || TARGET_THUMB)
2237 return 0;
2239 /* Get the calling method. */
2240 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2241 call_type = CALL_SHORT;
2242 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2243 call_type = CALL_LONG;
2245 /* Cannot tail-call to long calls, since these are out of range of
2246 a branch instruction. However, if not compiling PIC, we know
2247 we can reach the symbol if it is in this compilation unit. */
2248 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2249 return 0;
2251 /* If we are interworking and the function is not declared static
2252 then we can't tail-call it unless we know that it exists in this
2253 compilation unit (since it might be a Thumb routine). */
2254 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2255 return 0;
2257 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2258 if (IS_INTERRUPT (arm_current_func_type ()))
2259 return 0;
2261 /* Everything else is ok. */
2262 return 1;
2267 legitimate_pic_operand_p (x)
2268 rtx x;
2270 if (CONSTANT_P (x)
2271 && flag_pic
2272 && (GET_CODE (x) == SYMBOL_REF
2273 || (GET_CODE (x) == CONST
2274 && GET_CODE (XEXP (x, 0)) == PLUS
2275 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2276 return 0;
2278 return 1;
2282 legitimize_pic_address (orig, mode, reg)
2283 rtx orig;
2284 enum machine_mode mode;
2285 rtx reg;
2287 if (GET_CODE (orig) == SYMBOL_REF
2288 || GET_CODE (orig) == LABEL_REF)
2290 #ifndef AOF_ASSEMBLER
2291 rtx pic_ref, address;
2292 #endif
2293 rtx insn;
2294 int subregs = 0;
2296 if (reg == 0)
2298 if (no_new_pseudos)
2299 abort ();
2300 else
2301 reg = gen_reg_rtx (Pmode);
2303 subregs = 1;
2306 #ifdef AOF_ASSEMBLER
2307 /* The AOF assembler can generate relocations for these directly, and
2308 understands that the PIC register has to be added into the offset. */
2309 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2310 #else
2311 if (subregs)
2312 address = gen_reg_rtx (Pmode);
2313 else
2314 address = reg;
2316 if (TARGET_ARM)
2317 emit_insn (gen_pic_load_addr_arm (address, orig));
2318 else
2319 emit_insn (gen_pic_load_addr_thumb (address, orig));
2321 if (GET_CODE (orig) == LABEL_REF && NEED_GOT_RELOC)
2322 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2323 else
2325 pic_ref = gen_rtx_MEM (Pmode,
2326 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2327 address));
2328 RTX_UNCHANGING_P (pic_ref) = 1;
2331 insn = emit_move_insn (reg, pic_ref);
2332 #endif
2333 current_function_uses_pic_offset_table = 1;
2334 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2335 by loop. */
2336 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2337 REG_NOTES (insn));
2338 return reg;
2340 else if (GET_CODE (orig) == CONST)
2342 rtx base, offset;
2344 if (GET_CODE (XEXP (orig, 0)) == PLUS
2345 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2346 return orig;
2348 if (reg == 0)
2350 if (no_new_pseudos)
2351 abort ();
2352 else
2353 reg = gen_reg_rtx (Pmode);
2356 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2358 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2359 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2360 base == reg ? 0 : reg);
2362 else
2363 abort ();
2365 if (GET_CODE (offset) == CONST_INT)
2367 /* The base register doesn't really matter, we only want to
2368 test the index for the appropriate mode. */
2369 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
2371 if (!no_new_pseudos)
2372 offset = force_reg (Pmode, offset);
2373 else
2374 abort ();
2376 win:
2377 if (GET_CODE (offset) == CONST_INT)
2378 return plus_constant (base, INTVAL (offset));
2381 if (GET_MODE_SIZE (mode) > 4
2382 && (GET_MODE_CLASS (mode) == MODE_INT
2383 || TARGET_SOFT_FLOAT))
2385 emit_insn (gen_addsi3 (reg, base, offset));
2386 return reg;
2389 return gen_rtx_PLUS (Pmode, base, offset);
2392 return orig;
2395 /* Generate code to load the PIC register. PROLOGUE is true if
2396 called from arm_expand_prologue (in which case we want the
2397 generated insns at the start of the function); false if called
2398 by an exception receiver that needs the PIC register reloaded
2399 (in which case the insns are just dumped at the current location). */
2401 void
2402 arm_finalize_pic (prologue)
2403 int prologue ATTRIBUTE_UNUSED;
2405 #ifndef AOF_ASSEMBLER
2406 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2407 rtx global_offset_table;
2409 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2410 return;
2412 if (!flag_pic)
2413 abort ();
2415 start_sequence ();
2416 l1 = gen_label_rtx ();
2418 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2419 /* On the ARM the PC register contains 'dot + 8' at the time of the
2420 addition, on the Thumb it is 'dot + 4'. */
2421 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2422 if (GOT_PCREL)
2423 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2424 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2425 else
2426 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2428 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2430 if (TARGET_ARM)
2432 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2433 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2435 else
2437 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2438 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2441 seq = gen_sequence ();
2442 end_sequence ();
2443 if (prologue)
2444 emit_insn_after (seq, get_insns ());
2445 else
2446 emit_insn (seq);
2448 /* Need to emit this whether or not we obey regdecls,
2449 since setjmp/longjmp can cause life info to screw up. */
2450 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2451 #endif /* AOF_ASSEMBLER */
2454 #define REG_OR_SUBREG_REG(X) \
2455 (GET_CODE (X) == REG \
2456 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2458 #define REG_OR_SUBREG_RTX(X) \
2459 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2461 #ifndef COSTS_N_INSNS
2462 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2463 #endif
2466 arm_rtx_costs (x, code, outer)
2467 rtx x;
2468 enum rtx_code code;
2469 enum rtx_code outer;
2471 enum machine_mode mode = GET_MODE (x);
2472 enum rtx_code subcode;
2473 int extra_cost;
2475 if (TARGET_THUMB)
2477 switch (code)
2479 case ASHIFT:
2480 case ASHIFTRT:
2481 case LSHIFTRT:
2482 case ROTATERT:
2483 case PLUS:
2484 case MINUS:
2485 case COMPARE:
2486 case NEG:
2487 case NOT:
2488 return COSTS_N_INSNS (1);
2490 case MULT:
2491 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2493 int cycles = 0;
2494 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2496 while (i)
2498 i >>= 2;
2499 cycles++;
2501 return COSTS_N_INSNS (2) + cycles;
2503 return COSTS_N_INSNS (1) + 16;
2505 case SET:
2506 return (COSTS_N_INSNS (1)
2507 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2508 + GET_CODE (SET_DEST (x)) == MEM));
2510 case CONST_INT:
2511 if (outer == SET)
2513 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2514 return 0;
2515 if (thumb_shiftable_const (INTVAL (x)))
2516 return COSTS_N_INSNS (2);
2517 return COSTS_N_INSNS (3);
2519 else if (outer == PLUS
2520 && INTVAL (x) < 256 && INTVAL (x) > -256)
2521 return 0;
2522 else if (outer == COMPARE
2523 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2524 return 0;
2525 else if (outer == ASHIFT || outer == ASHIFTRT
2526 || outer == LSHIFTRT)
2527 return 0;
2528 return COSTS_N_INSNS (2);
2530 case CONST:
2531 case CONST_DOUBLE:
2532 case LABEL_REF:
2533 case SYMBOL_REF:
2534 return COSTS_N_INSNS (3);
2536 case UDIV:
2537 case UMOD:
2538 case DIV:
2539 case MOD:
2540 return 100;
2542 case TRUNCATE:
2543 return 99;
2545 case AND:
2546 case XOR:
2547 case IOR:
2548 /* XXX guess. */
2549 return 8;
2551 case ADDRESSOF:
2552 case MEM:
2553 /* XXX another guess. */
2554 /* Memory costs quite a lot for the first word, but subsequent words
2555 load at the equivalent of a single insn each. */
2556 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2557 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2559 case IF_THEN_ELSE:
2560 /* XXX a guess. */
2561 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2562 return 14;
2563 return 2;
2565 case ZERO_EXTEND:
2566 /* XXX still guessing. */
2567 switch (GET_MODE (XEXP (x, 0)))
2569 case QImode:
2570 return (1 + (mode == DImode ? 4 : 0)
2571 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2573 case HImode:
2574 return (4 + (mode == DImode ? 4 : 0)
2575 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2577 case SImode:
2578 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2580 default:
2581 return 99;
2584 default:
2585 return 99;
2586 #if 0
2587 case FFS:
2588 case FLOAT:
2589 case FIX:
2590 case UNSIGNED_FIX:
2591 /* XXX guess */
2592 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2593 rtx_name[code]);
2594 abort ();
2595 #endif
2599 switch (code)
2601 case MEM:
2602 /* Memory costs quite a lot for the first word, but subsequent words
2603 load at the equivalent of a single insn each. */
2604 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2605 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2607 case DIV:
2608 case MOD:
2609 return 100;
2611 case ROTATE:
2612 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2613 return 4;
2614 /* Fall through */
2615 case ROTATERT:
2616 if (mode != SImode)
2617 return 8;
2618 /* Fall through */
2619 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2620 if (mode == DImode)
2621 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2622 + ((GET_CODE (XEXP (x, 0)) == REG
2623 || (GET_CODE (XEXP (x, 0)) == SUBREG
2624 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2625 ? 0 : 8));
2626 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2627 || (GET_CODE (XEXP (x, 0)) == SUBREG
2628 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2629 ? 0 : 4)
2630 + ((GET_CODE (XEXP (x, 1)) == REG
2631 || (GET_CODE (XEXP (x, 1)) == SUBREG
2632 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2633 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2634 ? 0 : 4));
2636 case MINUS:
2637 if (mode == DImode)
2638 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2639 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2640 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2641 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2642 ? 0 : 8));
2644 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2645 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2646 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2647 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2648 ? 0 : 8)
2649 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2650 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2651 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2652 ? 0 : 8));
2654 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2655 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2656 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2657 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2658 || subcode == ASHIFTRT || subcode == LSHIFTRT
2659 || subcode == ROTATE || subcode == ROTATERT
2660 || (subcode == MULT
2661 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2662 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2663 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2664 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2665 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2666 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2667 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2668 return 1;
2669 /* Fall through */
2671 case PLUS:
2672 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2673 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2674 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2675 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2676 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2677 ? 0 : 8));
2679 /* Fall through */
2680 case AND: case XOR: case IOR:
2681 extra_cost = 0;
2683 /* Normally the frame registers will be spilt into reg+const during
2684 reload, so it is a bad idea to combine them with other instructions,
2685 since then they might not be moved outside of loops. As a compromise
2686 we allow integration with ops that have a constant as their second
2687 operand. */
2688 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2689 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2690 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2691 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2692 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2693 extra_cost = 4;
2695 if (mode == DImode)
2696 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2697 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2698 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2699 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2700 ? 0 : 8));
2702 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2703 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2704 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2705 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2706 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2707 ? 0 : 4));
2709 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2710 return (1 + extra_cost
2711 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2712 || subcode == LSHIFTRT || subcode == ASHIFTRT
2713 || subcode == ROTATE || subcode == ROTATERT
2714 || (subcode == MULT
2715 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2716 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2717 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2718 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2719 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2720 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2721 ? 0 : 4));
2723 return 8;
2725 case MULT:
2726 /* There is no point basing this on the tuning, since it is always the
2727 fast variant if it exists at all. */
2728 if (arm_fast_multiply && mode == DImode
2729 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2730 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2731 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2732 return 8;
2734 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2735 || mode == DImode)
2736 return 30;
2738 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2740 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2741 & (unsigned HOST_WIDE_INT) 0xffffffff);
2742 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2743 int j;
2745 /* Tune as appropriate. */
2746 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2748 for (j = 0; i && j < 32; j += booth_unit_size)
2750 i >>= booth_unit_size;
2751 add_cost += 2;
2754 return add_cost;
2757 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2758 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2759 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2761 case TRUNCATE:
2762 if (arm_fast_multiply && mode == SImode
2763 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2764 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2765 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2766 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2767 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2768 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2769 return 8;
2770 return 99;
2772 case NEG:
2773 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2774 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2775 /* Fall through */
2776 case NOT:
2777 if (mode == DImode)
2778 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2780 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2782 case IF_THEN_ELSE:
2783 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2784 return 14;
2785 return 2;
2787 case COMPARE:
2788 return 1;
2790 case ABS:
2791 return 4 + (mode == DImode ? 4 : 0);
2793 case SIGN_EXTEND:
2794 if (GET_MODE (XEXP (x, 0)) == QImode)
2795 return (4 + (mode == DImode ? 4 : 0)
2796 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2797 /* Fall through */
2798 case ZERO_EXTEND:
2799 switch (GET_MODE (XEXP (x, 0)))
2801 case QImode:
2802 return (1 + (mode == DImode ? 4 : 0)
2803 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2805 case HImode:
2806 return (4 + (mode == DImode ? 4 : 0)
2807 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2809 case SImode:
2810 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2812 default:
2813 break;
2815 abort ();
2817 case CONST_INT:
2818 if (const_ok_for_arm (INTVAL (x)))
2819 return outer == SET ? 2 : -1;
2820 else if (outer == AND
2821 && const_ok_for_arm (~INTVAL (x)))
2822 return -1;
2823 else if ((outer == COMPARE
2824 || outer == PLUS || outer == MINUS)
2825 && const_ok_for_arm (-INTVAL (x)))
2826 return -1;
2827 else
2828 return 5;
2830 case CONST:
2831 case LABEL_REF:
2832 case SYMBOL_REF:
2833 return 6;
2835 case CONST_DOUBLE:
2836 if (const_double_rtx_ok_for_fpu (x))
2837 return outer == SET ? 2 : -1;
2838 else if ((outer == COMPARE || outer == PLUS)
2839 && neg_const_double_rtx_ok_for_fpu (x))
2840 return -1;
2841 return 7;
2843 default:
2844 return 99;
2848 static int
2849 arm_adjust_cost (insn, link, dep, cost)
2850 rtx insn;
2851 rtx link;
2852 rtx dep;
2853 int cost;
2855 rtx i_pat, d_pat;
2857 /* Some true dependencies can have a higher cost depending
2858 on precisely how certain input operands are used. */
2859 if (arm_is_xscale
2860 && REG_NOTE_KIND (link) == 0
2861 && recog_memoized (insn) < 0
2862 && recog_memoized (dep) < 0)
2864 int shift_opnum = get_attr_shift (insn);
2865 enum attr_type attr_type = get_attr_type (dep);
2867 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2868 operand for INSN. If we have a shifted input operand and the
2869 instruction we depend on is another ALU instruction, then we may
2870 have to account for an additional stall. */
2871 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2873 rtx shifted_operand;
2874 int opno;
2876 /* Get the shifted operand. */
2877 extract_insn (insn);
2878 shifted_operand = recog_data.operand[shift_opnum];
2880 /* Iterate over all the operands in DEP. If we write an operand
2881 that overlaps with SHIFTED_OPERAND, then we have increase the
2882 cost of this dependency. */
2883 extract_insn (dep);
2884 preprocess_constraints ();
2885 for (opno = 0; opno < recog_data.n_operands; opno++)
2887 /* We can ignore strict inputs. */
2888 if (recog_data.operand_type[opno] == OP_IN)
2889 continue;
2891 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2892 shifted_operand))
2893 return 2;
2898 /* XXX This is not strictly true for the FPA. */
2899 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2900 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2901 return 0;
2903 /* Call insns don't incur a stall, even if they follow a load. */
2904 if (REG_NOTE_KIND (link) == 0
2905 && GET_CODE (insn) == CALL_INSN)
2906 return 1;
2908 if ((i_pat = single_set (insn)) != NULL
2909 && GET_CODE (SET_SRC (i_pat)) == MEM
2910 && (d_pat = single_set (dep)) != NULL
2911 && GET_CODE (SET_DEST (d_pat)) == MEM)
2913 /* This is a load after a store, there is no conflict if the load reads
2914 from a cached area. Assume that loads from the stack, and from the
2915 constant pool are cached, and that others will miss. This is a
2916 hack. */
2918 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2919 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2920 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2921 || reg_mentioned_p (hard_frame_pointer_rtx,
2922 XEXP (SET_SRC (i_pat), 0)))
2923 return 1;
2926 return cost;
2929 /* This code has been fixed for cross compilation. */
2931 static int fpa_consts_inited = 0;
2933 static const char * const strings_fpa[8] =
2935 "0", "1", "2", "3",
2936 "4", "5", "0.5", "10"
2939 static REAL_VALUE_TYPE values_fpa[8];
2941 static void
2942 init_fpa_table ()
2944 int i;
2945 REAL_VALUE_TYPE r;
2947 for (i = 0; i < 8; i++)
2949 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2950 values_fpa[i] = r;
2953 fpa_consts_inited = 1;
2956 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2959 const_double_rtx_ok_for_fpu (x)
2960 rtx x;
2962 REAL_VALUE_TYPE r;
2963 int i;
2965 if (!fpa_consts_inited)
2966 init_fpa_table ();
2968 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2969 if (REAL_VALUE_MINUS_ZERO (r))
2970 return 0;
2972 for (i = 0; i < 8; i++)
2973 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2974 return 1;
2976 return 0;
2979 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2982 neg_const_double_rtx_ok_for_fpu (x)
2983 rtx x;
2985 REAL_VALUE_TYPE r;
2986 int i;
2988 if (!fpa_consts_inited)
2989 init_fpa_table ();
2991 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2992 r = REAL_VALUE_NEGATE (r);
2993 if (REAL_VALUE_MINUS_ZERO (r))
2994 return 0;
2996 for (i = 0; i < 8; i++)
2997 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2998 return 1;
3000 return 0;
3003 /* Predicates for `match_operand' and `match_operator'. */
3005 /* s_register_operand is the same as register_operand, but it doesn't accept
3006 (SUBREG (MEM)...).
3008 This function exists because at the time it was put in it led to better
3009 code. SUBREG(MEM) always needs a reload in the places where
3010 s_register_operand is used, and this seemed to lead to excessive
3011 reloading. */
3014 s_register_operand (op, mode)
3015 rtx op;
3016 enum machine_mode mode;
3018 if (GET_MODE (op) != mode && mode != VOIDmode)
3019 return 0;
3021 if (GET_CODE (op) == SUBREG)
3022 op = SUBREG_REG (op);
3024 /* We don't consider registers whose class is NO_REGS
3025 to be a register operand. */
3026 /* XXX might have to check for lo regs only for thumb ??? */
3027 return (GET_CODE (op) == REG
3028 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3029 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3032 /* A hard register operand (even before reload. */
3035 arm_hard_register_operand (op, mode)
3036 rtx op;
3037 enum machine_mode mode;
3039 if (GET_MODE (op) != mode && mode != VOIDmode)
3040 return 0;
3042 return (GET_CODE (op) == REG
3043 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3046 /* Only accept reg, subreg(reg), const_int. */
3049 reg_or_int_operand (op, mode)
3050 rtx op;
3051 enum machine_mode mode;
3053 if (GET_CODE (op) == CONST_INT)
3054 return 1;
3056 if (GET_MODE (op) != mode && mode != VOIDmode)
3057 return 0;
3059 if (GET_CODE (op) == SUBREG)
3060 op = SUBREG_REG (op);
3062 /* We don't consider registers whose class is NO_REGS
3063 to be a register operand. */
3064 return (GET_CODE (op) == REG
3065 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3066 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3069 /* Return 1 if OP is an item in memory, given that we are in reload. */
3072 arm_reload_memory_operand (op, mode)
3073 rtx op;
3074 enum machine_mode mode ATTRIBUTE_UNUSED;
3076 int regno = true_regnum (op);
3078 return (!CONSTANT_P (op)
3079 && (regno == -1
3080 || (GET_CODE (op) == REG
3081 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3084 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3085 memory access (architecture V4).
3086 MODE is QImode if called when computing constraints, or VOIDmode when
3087 emitting patterns. In this latter case we cannot use memory_operand()
3088 because it will fail on badly formed MEMs, which is precisly what we are
3089 trying to catch. */
3092 bad_signed_byte_operand (op, mode)
3093 rtx op;
3094 enum machine_mode mode ATTRIBUTE_UNUSED;
3096 #if 0
3097 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3098 return 0;
3099 #endif
3100 if (GET_CODE (op) != MEM)
3101 return 0;
3103 op = XEXP (op, 0);
3105 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3106 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3107 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3108 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3109 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3110 return 1;
3112 /* Big constants are also bad. */
3113 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3114 && (INTVAL (XEXP (op, 1)) > 0xff
3115 || -INTVAL (XEXP (op, 1)) > 0xff))
3116 return 1;
3118 /* Everything else is good, or can will automatically be made so. */
3119 return 0;
3122 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3125 arm_rhs_operand (op, mode)
3126 rtx op;
3127 enum machine_mode mode;
3129 return (s_register_operand (op, mode)
3130 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3133 /* Return TRUE for valid operands for the
3134 rhs of an ARM instruction, or a load. */
3137 arm_rhsm_operand (op, mode)
3138 rtx op;
3139 enum machine_mode mode;
3141 return (s_register_operand (op, mode)
3142 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3143 || memory_operand (op, mode));
3146 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3147 constant that is valid when negated. */
3150 arm_add_operand (op, mode)
3151 rtx op;
3152 enum machine_mode mode;
3154 if (TARGET_THUMB)
3155 return thumb_cmp_operand (op, mode);
3157 return (s_register_operand (op, mode)
3158 || (GET_CODE (op) == CONST_INT
3159 && (const_ok_for_arm (INTVAL (op))
3160 || const_ok_for_arm (-INTVAL (op)))));
3164 arm_not_operand (op, mode)
3165 rtx op;
3166 enum machine_mode mode;
3168 return (s_register_operand (op, mode)
3169 || (GET_CODE (op) == CONST_INT
3170 && (const_ok_for_arm (INTVAL (op))
3171 || const_ok_for_arm (~INTVAL (op)))));
3174 /* Return TRUE if the operand is a memory reference which contains an
3175 offsettable address. */
3178 offsettable_memory_operand (op, mode)
3179 rtx op;
3180 enum machine_mode mode;
3182 if (mode == VOIDmode)
3183 mode = GET_MODE (op);
3185 return (mode == GET_MODE (op)
3186 && GET_CODE (op) == MEM
3187 && offsettable_address_p (reload_completed | reload_in_progress,
3188 mode, XEXP (op, 0)));
3191 /* Return TRUE if the operand is a memory reference which is, or can be
3192 made word aligned by adjusting the offset. */
3195 alignable_memory_operand (op, mode)
3196 rtx op;
3197 enum machine_mode mode;
3199 rtx reg;
3201 if (mode == VOIDmode)
3202 mode = GET_MODE (op);
3204 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3205 return 0;
3207 op = XEXP (op, 0);
3209 return ((GET_CODE (reg = op) == REG
3210 || (GET_CODE (op) == SUBREG
3211 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3212 || (GET_CODE (op) == PLUS
3213 && GET_CODE (XEXP (op, 1)) == CONST_INT
3214 && (GET_CODE (reg = XEXP (op, 0)) == REG
3215 || (GET_CODE (XEXP (op, 0)) == SUBREG
3216 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3217 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3220 /* Similar to s_register_operand, but does not allow hard integer
3221 registers. */
3224 f_register_operand (op, mode)
3225 rtx op;
3226 enum machine_mode mode;
3228 if (GET_MODE (op) != mode && mode != VOIDmode)
3229 return 0;
3231 if (GET_CODE (op) == SUBREG)
3232 op = SUBREG_REG (op);
3234 /* We don't consider registers whose class is NO_REGS
3235 to be a register operand. */
3236 return (GET_CODE (op) == REG
3237 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3238 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3241 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3244 fpu_rhs_operand (op, mode)
3245 rtx op;
3246 enum machine_mode mode;
3248 if (s_register_operand (op, mode))
3249 return TRUE;
3251 if (GET_MODE (op) != mode && mode != VOIDmode)
3252 return FALSE;
3254 if (GET_CODE (op) == CONST_DOUBLE)
3255 return const_double_rtx_ok_for_fpu (op);
3257 return FALSE;
3261 fpu_add_operand (op, mode)
3262 rtx op;
3263 enum machine_mode mode;
3265 if (s_register_operand (op, mode))
3266 return TRUE;
3268 if (GET_MODE (op) != mode && mode != VOIDmode)
3269 return FALSE;
3271 if (GET_CODE (op) == CONST_DOUBLE)
3272 return (const_double_rtx_ok_for_fpu (op)
3273 || neg_const_double_rtx_ok_for_fpu (op));
3275 return FALSE;
3278 /* Return nonzero if OP is a constant power of two. */
3281 power_of_two_operand (op, mode)
3282 rtx op;
3283 enum machine_mode mode ATTRIBUTE_UNUSED;
3285 if (GET_CODE (op) == CONST_INT)
3287 HOST_WIDE_INT value = INTVAL (op);
3289 return value != 0 && (value & (value - 1)) == 0;
3292 return FALSE;
3295 /* Return TRUE for a valid operand of a DImode operation.
3296 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3297 Note that this disallows MEM(REG+REG), but allows
3298 MEM(PRE/POST_INC/DEC(REG)). */
3301 di_operand (op, mode)
3302 rtx op;
3303 enum machine_mode mode;
3305 if (s_register_operand (op, mode))
3306 return TRUE;
3308 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3309 return FALSE;
3311 if (GET_CODE (op) == SUBREG)
3312 op = SUBREG_REG (op);
3314 switch (GET_CODE (op))
3316 case CONST_DOUBLE:
3317 case CONST_INT:
3318 return TRUE;
3320 case MEM:
3321 return memory_address_p (DImode, XEXP (op, 0));
3323 default:
3324 return FALSE;
3328 /* Like di_operand, but don't accept constants. */
3331 nonimmediate_di_operand (op, mode)
3332 rtx op;
3333 enum machine_mode mode;
3335 if (s_register_operand (op, mode))
3336 return TRUE;
3338 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3339 return FALSE;
3341 if (GET_CODE (op) == SUBREG)
3342 op = SUBREG_REG (op);
3344 if (GET_CODE (op) == MEM)
3345 return memory_address_p (DImode, XEXP (op, 0));
3347 return FALSE;
3350 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3351 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3352 Note that this disallows MEM(REG+REG), but allows
3353 MEM(PRE/POST_INC/DEC(REG)). */
3356 soft_df_operand (op, mode)
3357 rtx op;
3358 enum machine_mode mode;
3360 if (s_register_operand (op, mode))
3362 if (GET_CODE (op) == SUBREG)
3363 op = SUBREG_REG (op);
3365 if (REGNO (op) == IP_REGNUM)
3366 return FALSE;
3367 return TRUE;
3370 if (mode != VOIDmode && GET_MODE (op) != mode)
3371 return FALSE;
3373 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3374 return FALSE;
3376 if (GET_CODE (op) == SUBREG)
3377 op = SUBREG_REG (op);
3379 switch (GET_CODE (op))
3381 case CONST_DOUBLE:
3382 return TRUE;
3384 case MEM:
3385 return memory_address_p (DFmode, XEXP (op, 0));
3387 default:
3388 return FALSE;
3392 /* Like soft_df_operand, but don't accept constants. */
3395 nonimmediate_soft_df_operand (op, mode)
3396 rtx op;
3397 enum machine_mode mode;
3399 if (s_register_operand (op, mode))
3401 if (GET_CODE (op) == SUBREG)
3402 op = SUBREG_REG (op);
3404 if (REGNO (op) == IP_REGNUM)
3405 return FALSE;
3406 return TRUE;
3409 if (mode != VOIDmode && GET_MODE (op) != mode)
3410 return FALSE;
3412 if (GET_CODE (op) == SUBREG)
3413 op = SUBREG_REG (op);
3415 if (GET_CODE (op) == MEM)
3416 return memory_address_p (DFmode, XEXP (op, 0));
3417 return FALSE;
3420 /* Return TRUE for valid index operands. */
3423 index_operand (op, mode)
3424 rtx op;
3425 enum machine_mode mode;
3427 return (s_register_operand (op, mode)
3428 || (immediate_operand (op, mode)
3429 && (GET_CODE (op) != CONST_INT
3430 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3433 /* Return TRUE for valid shifts by a constant. This also accepts any
3434 power of two on the (somewhat overly relaxed) assumption that the
3435 shift operator in this case was a mult. */
3438 const_shift_operand (op, mode)
3439 rtx op;
3440 enum machine_mode mode;
3442 return (power_of_two_operand (op, mode)
3443 || (immediate_operand (op, mode)
3444 && (GET_CODE (op) != CONST_INT
3445 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3448 /* Return TRUE for arithmetic operators which can be combined with a multiply
3449 (shift). */
3452 shiftable_operator (x, mode)
3453 rtx x;
3454 enum machine_mode mode;
3456 enum rtx_code code;
3458 if (GET_MODE (x) != mode)
3459 return FALSE;
3461 code = GET_CODE (x);
3463 return (code == PLUS || code == MINUS
3464 || code == IOR || code == XOR || code == AND);
3467 /* Return TRUE for binary logical operators. */
3470 logical_binary_operator (x, mode)
3471 rtx x;
3472 enum machine_mode mode;
3474 enum rtx_code code;
3476 if (GET_MODE (x) != mode)
3477 return FALSE;
3479 code = GET_CODE (x);
3481 return (code == IOR || code == XOR || code == AND);
3484 /* Return TRUE for shift operators. */
3487 shift_operator (x, mode)
3488 rtx x;
3489 enum machine_mode mode;
3491 enum rtx_code code;
3493 if (GET_MODE (x) != mode)
3494 return FALSE;
3496 code = GET_CODE (x);
3498 if (code == MULT)
3499 return power_of_two_operand (XEXP (x, 1), mode);
3501 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3502 || code == ROTATERT);
3505 /* Return TRUE if x is EQ or NE. */
3508 equality_operator (x, mode)
3509 rtx x;
3510 enum machine_mode mode ATTRIBUTE_UNUSED;
3512 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3515 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3518 arm_comparison_operator (x, mode)
3519 rtx x;
3520 enum machine_mode mode;
3522 return (comparison_operator (x, mode)
3523 && GET_CODE (x) != LTGT
3524 && GET_CODE (x) != UNEQ);
3527 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3530 minmax_operator (x, mode)
3531 rtx x;
3532 enum machine_mode mode;
3534 enum rtx_code code = GET_CODE (x);
3536 if (GET_MODE (x) != mode)
3537 return FALSE;
3539 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3542 /* Return TRUE if this is the condition code register, if we aren't given
3543 a mode, accept any class CCmode register. */
3546 cc_register (x, mode)
3547 rtx x;
3548 enum machine_mode mode;
3550 if (mode == VOIDmode)
3552 mode = GET_MODE (x);
3554 if (GET_MODE_CLASS (mode) != MODE_CC)
3555 return FALSE;
3558 if ( GET_MODE (x) == mode
3559 && GET_CODE (x) == REG
3560 && REGNO (x) == CC_REGNUM)
3561 return TRUE;
3563 return FALSE;
3566 /* Return TRUE if this is the condition code register, if we aren't given
3567 a mode, accept any class CCmode register which indicates a dominance
3568 expression. */
3571 dominant_cc_register (x, mode)
3572 rtx x;
3573 enum machine_mode mode;
3575 if (mode == VOIDmode)
3577 mode = GET_MODE (x);
3579 if (GET_MODE_CLASS (mode) != MODE_CC)
3580 return FALSE;
3583 if ( mode != CC_DNEmode && mode != CC_DEQmode
3584 && mode != CC_DLEmode && mode != CC_DLTmode
3585 && mode != CC_DGEmode && mode != CC_DGTmode
3586 && mode != CC_DLEUmode && mode != CC_DLTUmode
3587 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3588 return FALSE;
3590 return cc_register (x, mode);
3593 /* Return TRUE if X references a SYMBOL_REF. */
3596 symbol_mentioned_p (x)
3597 rtx x;
3599 const char * fmt;
3600 int i;
3602 if (GET_CODE (x) == SYMBOL_REF)
3603 return 1;
3605 fmt = GET_RTX_FORMAT (GET_CODE (x));
3607 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3609 if (fmt[i] == 'E')
3611 int j;
3613 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3614 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3615 return 1;
3617 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3618 return 1;
3621 return 0;
3624 /* Return TRUE if X references a LABEL_REF. */
3627 label_mentioned_p (x)
3628 rtx x;
3630 const char * fmt;
3631 int i;
3633 if (GET_CODE (x) == LABEL_REF)
3634 return 1;
3636 fmt = GET_RTX_FORMAT (GET_CODE (x));
3637 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3639 if (fmt[i] == 'E')
3641 int j;
3643 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3644 if (label_mentioned_p (XVECEXP (x, i, j)))
3645 return 1;
3647 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3648 return 1;
3651 return 0;
3654 enum rtx_code
3655 minmax_code (x)
3656 rtx x;
3658 enum rtx_code code = GET_CODE (x);
3660 if (code == SMAX)
3661 return GE;
3662 else if (code == SMIN)
3663 return LE;
3664 else if (code == UMIN)
3665 return LEU;
3666 else if (code == UMAX)
3667 return GEU;
3669 abort ();
3672 /* Return 1 if memory locations are adjacent. */
3675 adjacent_mem_locations (a, b)
3676 rtx a, b;
3678 if ((GET_CODE (XEXP (a, 0)) == REG
3679 || (GET_CODE (XEXP (a, 0)) == PLUS
3680 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3681 && (GET_CODE (XEXP (b, 0)) == REG
3682 || (GET_CODE (XEXP (b, 0)) == PLUS
3683 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3685 int val0 = 0, val1 = 0;
3686 int reg0, reg1;
3688 if (GET_CODE (XEXP (a, 0)) == PLUS)
3690 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3691 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3693 else
3694 reg0 = REGNO (XEXP (a, 0));
3696 if (GET_CODE (XEXP (b, 0)) == PLUS)
3698 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3699 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3701 else
3702 reg1 = REGNO (XEXP (b, 0));
3704 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3706 return 0;
3709 /* Return 1 if OP is a load multiple operation. It is known to be
3710 parallel and the first section will be tested. */
3713 load_multiple_operation (op, mode)
3714 rtx op;
3715 enum machine_mode mode ATTRIBUTE_UNUSED;
3717 HOST_WIDE_INT count = XVECLEN (op, 0);
3718 int dest_regno;
3719 rtx src_addr;
3720 HOST_WIDE_INT i = 1, base = 0;
3721 rtx elt;
3723 if (count <= 1
3724 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3725 return 0;
3727 /* Check to see if this might be a write-back. */
3728 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3730 i++;
3731 base = 1;
3733 /* Now check it more carefully. */
3734 if (GET_CODE (SET_DEST (elt)) != REG
3735 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3736 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3737 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3738 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3739 return 0;
3742 /* Perform a quick check so we don't blow up below. */
3743 if (count <= i
3744 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3745 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3746 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3747 return 0;
3749 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3750 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3752 for (; i < count; i++)
3754 elt = XVECEXP (op, 0, i);
3756 if (GET_CODE (elt) != SET
3757 || GET_CODE (SET_DEST (elt)) != REG
3758 || GET_MODE (SET_DEST (elt)) != SImode
3759 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3760 || GET_CODE (SET_SRC (elt)) != MEM
3761 || GET_MODE (SET_SRC (elt)) != SImode
3762 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3763 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3764 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3765 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3766 return 0;
3769 return 1;
3772 /* Return 1 if OP is a store multiple operation. It is known to be
3773 parallel and the first section will be tested. */
3776 store_multiple_operation (op, mode)
3777 rtx op;
3778 enum machine_mode mode ATTRIBUTE_UNUSED;
3780 HOST_WIDE_INT count = XVECLEN (op, 0);
3781 int src_regno;
3782 rtx dest_addr;
3783 HOST_WIDE_INT i = 1, base = 0;
3784 rtx elt;
3786 if (count <= 1
3787 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3788 return 0;
3790 /* Check to see if this might be a write-back. */
3791 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3793 i++;
3794 base = 1;
3796 /* Now check it more carefully. */
3797 if (GET_CODE (SET_DEST (elt)) != REG
3798 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3799 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3800 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3801 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3802 return 0;
3805 /* Perform a quick check so we don't blow up below. */
3806 if (count <= i
3807 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3808 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3809 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3810 return 0;
3812 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3813 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3815 for (; i < count; i++)
3817 elt = XVECEXP (op, 0, i);
3819 if (GET_CODE (elt) != SET
3820 || GET_CODE (SET_SRC (elt)) != REG
3821 || GET_MODE (SET_SRC (elt)) != SImode
3822 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3823 || GET_CODE (SET_DEST (elt)) != MEM
3824 || GET_MODE (SET_DEST (elt)) != SImode
3825 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3826 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3827 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3828 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3829 return 0;
3832 return 1;
3836 load_multiple_sequence (operands, nops, regs, base, load_offset)
3837 rtx * operands;
3838 int nops;
3839 int * regs;
3840 int * base;
3841 HOST_WIDE_INT * load_offset;
3843 int unsorted_regs[4];
3844 HOST_WIDE_INT unsorted_offsets[4];
3845 int order[4];
3846 int base_reg = -1;
3847 int i;
3849 /* Can only handle 2, 3, or 4 insns at present,
3850 though could be easily extended if required. */
3851 if (nops < 2 || nops > 4)
3852 abort ();
3854 /* Loop over the operands and check that the memory references are
3855 suitable (ie immediate offsets from the same base register). At
3856 the same time, extract the target register, and the memory
3857 offsets. */
3858 for (i = 0; i < nops; i++)
3860 rtx reg;
3861 rtx offset;
3863 /* Convert a subreg of a mem into the mem itself. */
3864 if (GET_CODE (operands[nops + i]) == SUBREG)
3865 operands[nops + i] = alter_subreg (operands + (nops + i));
3867 if (GET_CODE (operands[nops + i]) != MEM)
3868 abort ();
3870 /* Don't reorder volatile memory references; it doesn't seem worth
3871 looking for the case where the order is ok anyway. */
3872 if (MEM_VOLATILE_P (operands[nops + i]))
3873 return 0;
3875 offset = const0_rtx;
3877 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3878 || (GET_CODE (reg) == SUBREG
3879 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3880 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3881 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3882 == REG)
3883 || (GET_CODE (reg) == SUBREG
3884 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3885 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3886 == CONST_INT)))
3888 if (i == 0)
3890 base_reg = REGNO (reg);
3891 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3892 ? REGNO (operands[i])
3893 : REGNO (SUBREG_REG (operands[i])));
3894 order[0] = 0;
3896 else
3898 if (base_reg != (int) REGNO (reg))
3899 /* Not addressed from the same base register. */
3900 return 0;
3902 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3903 ? REGNO (operands[i])
3904 : REGNO (SUBREG_REG (operands[i])));
3905 if (unsorted_regs[i] < unsorted_regs[order[0]])
3906 order[0] = i;
3909 /* If it isn't an integer register, or if it overwrites the
3910 base register but isn't the last insn in the list, then
3911 we can't do this. */
3912 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3913 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3914 return 0;
3916 unsorted_offsets[i] = INTVAL (offset);
3918 else
3919 /* Not a suitable memory address. */
3920 return 0;
3923 /* All the useful information has now been extracted from the
3924 operands into unsorted_regs and unsorted_offsets; additionally,
3925 order[0] has been set to the lowest numbered register in the
3926 list. Sort the registers into order, and check that the memory
3927 offsets are ascending and adjacent. */
3929 for (i = 1; i < nops; i++)
3931 int j;
3933 order[i] = order[i - 1];
3934 for (j = 0; j < nops; j++)
3935 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3936 && (order[i] == order[i - 1]
3937 || unsorted_regs[j] < unsorted_regs[order[i]]))
3938 order[i] = j;
3940 /* Have we found a suitable register? if not, one must be used more
3941 than once. */
3942 if (order[i] == order[i - 1])
3943 return 0;
3945 /* Is the memory address adjacent and ascending? */
3946 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3947 return 0;
3950 if (base)
3952 *base = base_reg;
3954 for (i = 0; i < nops; i++)
3955 regs[i] = unsorted_regs[order[i]];
3957 *load_offset = unsorted_offsets[order[0]];
3960 if (unsorted_offsets[order[0]] == 0)
3961 return 1; /* ldmia */
3963 if (unsorted_offsets[order[0]] == 4)
3964 return 2; /* ldmib */
3966 if (unsorted_offsets[order[nops - 1]] == 0)
3967 return 3; /* ldmda */
3969 if (unsorted_offsets[order[nops - 1]] == -4)
3970 return 4; /* ldmdb */
3972 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3973 if the offset isn't small enough. The reason 2 ldrs are faster
3974 is because these ARMs are able to do more than one cache access
3975 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3976 whilst the ARM8 has a double bandwidth cache. This means that
3977 these cores can do both an instruction fetch and a data fetch in
3978 a single cycle, so the trick of calculating the address into a
3979 scratch register (one of the result regs) and then doing a load
3980 multiple actually becomes slower (and no smaller in code size).
3981 That is the transformation
3983 ldr rd1, [rbase + offset]
3984 ldr rd2, [rbase + offset + 4]
3988 add rd1, rbase, offset
3989 ldmia rd1, {rd1, rd2}
3991 produces worse code -- '3 cycles + any stalls on rd2' instead of
3992 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3993 access per cycle, the first sequence could never complete in less
3994 than 6 cycles, whereas the ldm sequence would only take 5 and
3995 would make better use of sequential accesses if not hitting the
3996 cache.
3998 We cheat here and test 'arm_ld_sched' which we currently know to
3999 only be true for the ARM8, ARM9 and StrongARM. If this ever
4000 changes, then the test below needs to be reworked. */
4001 if (nops == 2 && arm_ld_sched)
4002 return 0;
4004 /* Can't do it without setting up the offset, only do this if it takes
4005 no more than one insn. */
4006 return (const_ok_for_arm (unsorted_offsets[order[0]])
4007 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4010 const char *
4011 emit_ldm_seq (operands, nops)
4012 rtx * operands;
4013 int nops;
4015 int regs[4];
4016 int base_reg;
4017 HOST_WIDE_INT offset;
4018 char buf[100];
4019 int i;
4021 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4023 case 1:
4024 strcpy (buf, "ldm%?ia\t");
4025 break;
4027 case 2:
4028 strcpy (buf, "ldm%?ib\t");
4029 break;
4031 case 3:
4032 strcpy (buf, "ldm%?da\t");
4033 break;
4035 case 4:
4036 strcpy (buf, "ldm%?db\t");
4037 break;
4039 case 5:
4040 if (offset >= 0)
4041 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4042 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4043 (long) offset);
4044 else
4045 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4046 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4047 (long) -offset);
4048 output_asm_insn (buf, operands);
4049 base_reg = regs[0];
4050 strcpy (buf, "ldm%?ia\t");
4051 break;
4053 default:
4054 abort ();
4057 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4058 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4060 for (i = 1; i < nops; i++)
4061 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4062 reg_names[regs[i]]);
4064 strcat (buf, "}\t%@ phole ldm");
4066 output_asm_insn (buf, operands);
4067 return "";
4071 store_multiple_sequence (operands, nops, regs, base, load_offset)
4072 rtx * operands;
4073 int nops;
4074 int * regs;
4075 int * base;
4076 HOST_WIDE_INT * load_offset;
4078 int unsorted_regs[4];
4079 HOST_WIDE_INT unsorted_offsets[4];
4080 int order[4];
4081 int base_reg = -1;
4082 int i;
4084 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4085 extended if required. */
4086 if (nops < 2 || nops > 4)
4087 abort ();
4089 /* Loop over the operands and check that the memory references are
4090 suitable (ie immediate offsets from the same base register). At
4091 the same time, extract the target register, and the memory
4092 offsets. */
4093 for (i = 0; i < nops; i++)
4095 rtx reg;
4096 rtx offset;
4098 /* Convert a subreg of a mem into the mem itself. */
4099 if (GET_CODE (operands[nops + i]) == SUBREG)
4100 operands[nops + i] = alter_subreg (operands + (nops + i));
4102 if (GET_CODE (operands[nops + i]) != MEM)
4103 abort ();
4105 /* Don't reorder volatile memory references; it doesn't seem worth
4106 looking for the case where the order is ok anyway. */
4107 if (MEM_VOLATILE_P (operands[nops + i]))
4108 return 0;
4110 offset = const0_rtx;
4112 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4113 || (GET_CODE (reg) == SUBREG
4114 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4115 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4116 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4117 == REG)
4118 || (GET_CODE (reg) == SUBREG
4119 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4120 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4121 == CONST_INT)))
4123 if (i == 0)
4125 base_reg = REGNO (reg);
4126 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4127 ? REGNO (operands[i])
4128 : REGNO (SUBREG_REG (operands[i])));
4129 order[0] = 0;
4131 else
4133 if (base_reg != (int) REGNO (reg))
4134 /* Not addressed from the same base register. */
4135 return 0;
4137 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4138 ? REGNO (operands[i])
4139 : REGNO (SUBREG_REG (operands[i])));
4140 if (unsorted_regs[i] < unsorted_regs[order[0]])
4141 order[0] = i;
4144 /* If it isn't an integer register, then we can't do this. */
4145 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4146 return 0;
4148 unsorted_offsets[i] = INTVAL (offset);
4150 else
4151 /* Not a suitable memory address. */
4152 return 0;
4155 /* All the useful information has now been extracted from the
4156 operands into unsorted_regs and unsorted_offsets; additionally,
4157 order[0] has been set to the lowest numbered register in the
4158 list. Sort the registers into order, and check that the memory
4159 offsets are ascending and adjacent. */
4161 for (i = 1; i < nops; i++)
4163 int j;
4165 order[i] = order[i - 1];
4166 for (j = 0; j < nops; j++)
4167 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4168 && (order[i] == order[i - 1]
4169 || unsorted_regs[j] < unsorted_regs[order[i]]))
4170 order[i] = j;
4172 /* Have we found a suitable register? if not, one must be used more
4173 than once. */
4174 if (order[i] == order[i - 1])
4175 return 0;
4177 /* Is the memory address adjacent and ascending? */
4178 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4179 return 0;
4182 if (base)
4184 *base = base_reg;
4186 for (i = 0; i < nops; i++)
4187 regs[i] = unsorted_regs[order[i]];
4189 *load_offset = unsorted_offsets[order[0]];
4192 if (unsorted_offsets[order[0]] == 0)
4193 return 1; /* stmia */
4195 if (unsorted_offsets[order[0]] == 4)
4196 return 2; /* stmib */
4198 if (unsorted_offsets[order[nops - 1]] == 0)
4199 return 3; /* stmda */
4201 if (unsorted_offsets[order[nops - 1]] == -4)
4202 return 4; /* stmdb */
4204 return 0;
4207 const char *
4208 emit_stm_seq (operands, nops)
4209 rtx * operands;
4210 int nops;
4212 int regs[4];
4213 int base_reg;
4214 HOST_WIDE_INT offset;
4215 char buf[100];
4216 int i;
4218 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4220 case 1:
4221 strcpy (buf, "stm%?ia\t");
4222 break;
4224 case 2:
4225 strcpy (buf, "stm%?ib\t");
4226 break;
4228 case 3:
4229 strcpy (buf, "stm%?da\t");
4230 break;
4232 case 4:
4233 strcpy (buf, "stm%?db\t");
4234 break;
4236 default:
4237 abort ();
4240 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4241 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4243 for (i = 1; i < nops; i++)
4244 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4245 reg_names[regs[i]]);
4247 strcat (buf, "}\t%@ phole stm");
4249 output_asm_insn (buf, operands);
4250 return "";
4254 multi_register_push (op, mode)
4255 rtx op;
4256 enum machine_mode mode ATTRIBUTE_UNUSED;
4258 if (GET_CODE (op) != PARALLEL
4259 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4260 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4261 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4262 return 0;
4264 return 1;
4267 /* Routines for use in generating RTL. */
4270 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4271 in_struct_p, scalar_p)
4272 int base_regno;
4273 int count;
4274 rtx from;
4275 int up;
4276 int write_back;
4277 int unchanging_p;
4278 int in_struct_p;
4279 int scalar_p;
4281 int i = 0, j;
4282 rtx result;
4283 int sign = up ? 1 : -1;
4284 rtx mem;
4286 /* XScale has load-store double instructions, but they have stricter
4287 alignment requirements than load-store multiple, so we can not
4288 use them.
4290 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4291 the pipeline until completion.
4293 NREGS CYCLES
4299 An ldr instruction takes 1-3 cycles, but does not block the
4300 pipeline.
4302 NREGS CYCLES
4303 1 1-3
4304 2 2-6
4305 3 3-9
4306 4 4-12
4308 Best case ldr will always win. However, the more ldr instructions
4309 we issue, the less likely we are to be able to schedule them well.
4310 Using ldr instructions also increases code size.
4312 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4313 for counts of 3 or 4 regs. */
4314 if (arm_is_xscale && count <= 2 && ! optimize_size)
4316 rtx seq;
4318 start_sequence ();
4320 for (i = 0; i < count; i++)
4322 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4323 RTX_UNCHANGING_P (mem) = unchanging_p;
4324 MEM_IN_STRUCT_P (mem) = in_struct_p;
4325 MEM_SCALAR_P (mem) = scalar_p;
4326 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4329 if (write_back)
4330 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4332 seq = gen_sequence ();
4333 end_sequence ();
4335 return seq;
4338 result = gen_rtx_PARALLEL (VOIDmode,
4339 rtvec_alloc (count + (write_back ? 1 : 0)));
4340 if (write_back)
4342 XVECEXP (result, 0, 0)
4343 = gen_rtx_SET (GET_MODE (from), from,
4344 plus_constant (from, count * 4 * sign));
4345 i = 1;
4346 count++;
4349 for (j = 0; i < count; i++, j++)
4351 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4352 RTX_UNCHANGING_P (mem) = unchanging_p;
4353 MEM_IN_STRUCT_P (mem) = in_struct_p;
4354 MEM_SCALAR_P (mem) = scalar_p;
4355 XVECEXP (result, 0, i)
4356 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4359 return result;
4363 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4364 in_struct_p, scalar_p)
4365 int base_regno;
4366 int count;
4367 rtx to;
4368 int up;
4369 int write_back;
4370 int unchanging_p;
4371 int in_struct_p;
4372 int scalar_p;
4374 int i = 0, j;
4375 rtx result;
4376 int sign = up ? 1 : -1;
4377 rtx mem;
4379 /* See arm_gen_load_multiple for discussion of
4380 the pros/cons of ldm/stm usage for XScale. */
4381 if (arm_is_xscale && count <= 2 && ! optimize_size)
4383 rtx seq;
4385 start_sequence ();
4387 for (i = 0; i < count; i++)
4389 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4390 RTX_UNCHANGING_P (mem) = unchanging_p;
4391 MEM_IN_STRUCT_P (mem) = in_struct_p;
4392 MEM_SCALAR_P (mem) = scalar_p;
4393 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4396 if (write_back)
4397 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4399 seq = gen_sequence ();
4400 end_sequence ();
4402 return seq;
4405 result = gen_rtx_PARALLEL (VOIDmode,
4406 rtvec_alloc (count + (write_back ? 1 : 0)));
4407 if (write_back)
4409 XVECEXP (result, 0, 0)
4410 = gen_rtx_SET (GET_MODE (to), to,
4411 plus_constant (to, count * 4 * sign));
4412 i = 1;
4413 count++;
4416 for (j = 0; i < count; i++, j++)
4418 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4419 RTX_UNCHANGING_P (mem) = unchanging_p;
4420 MEM_IN_STRUCT_P (mem) = in_struct_p;
4421 MEM_SCALAR_P (mem) = scalar_p;
4423 XVECEXP (result, 0, i)
4424 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4427 return result;
4431 arm_gen_movstrqi (operands)
4432 rtx * operands;
4434 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4435 int i;
4436 rtx src, dst;
4437 rtx st_src, st_dst, fin_src, fin_dst;
4438 rtx part_bytes_reg = NULL;
4439 rtx mem;
4440 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4441 int dst_scalar_p, src_scalar_p;
4443 if (GET_CODE (operands[2]) != CONST_INT
4444 || GET_CODE (operands[3]) != CONST_INT
4445 || INTVAL (operands[2]) > 64
4446 || INTVAL (operands[3]) & 3)
4447 return 0;
4449 st_dst = XEXP (operands[0], 0);
4450 st_src = XEXP (operands[1], 0);
4452 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4453 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4454 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4455 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4456 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4457 src_scalar_p = MEM_SCALAR_P (operands[1]);
4459 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4460 fin_src = src = copy_to_mode_reg (SImode, st_src);
4462 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4463 out_words_to_go = INTVAL (operands[2]) / 4;
4464 last_bytes = INTVAL (operands[2]) & 3;
4466 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4467 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4469 for (i = 0; in_words_to_go >= 2; i+=4)
4471 if (in_words_to_go > 4)
4472 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4473 src_unchanging_p,
4474 src_in_struct_p,
4475 src_scalar_p));
4476 else
4477 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4478 FALSE, src_unchanging_p,
4479 src_in_struct_p, src_scalar_p));
4481 if (out_words_to_go)
4483 if (out_words_to_go > 4)
4484 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4485 dst_unchanging_p,
4486 dst_in_struct_p,
4487 dst_scalar_p));
4488 else if (out_words_to_go != 1)
4489 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4490 dst, TRUE,
4491 (last_bytes == 0
4492 ? FALSE : TRUE),
4493 dst_unchanging_p,
4494 dst_in_struct_p,
4495 dst_scalar_p));
4496 else
4498 mem = gen_rtx_MEM (SImode, dst);
4499 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4500 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4501 MEM_SCALAR_P (mem) = dst_scalar_p;
4502 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4503 if (last_bytes != 0)
4504 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4508 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4509 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4512 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4513 if (out_words_to_go)
4515 rtx sreg;
4517 mem = gen_rtx_MEM (SImode, src);
4518 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4519 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4520 MEM_SCALAR_P (mem) = src_scalar_p;
4521 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4522 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4524 mem = gen_rtx_MEM (SImode, dst);
4525 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4526 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4527 MEM_SCALAR_P (mem) = dst_scalar_p;
4528 emit_move_insn (mem, sreg);
4529 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4530 in_words_to_go--;
4532 if (in_words_to_go) /* Sanity check */
4533 abort ();
4536 if (in_words_to_go)
4538 if (in_words_to_go < 0)
4539 abort ();
4541 mem = gen_rtx_MEM (SImode, src);
4542 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4543 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4544 MEM_SCALAR_P (mem) = src_scalar_p;
4545 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4548 if (last_bytes && part_bytes_reg == NULL)
4549 abort ();
4551 if (BYTES_BIG_ENDIAN && last_bytes)
4553 rtx tmp = gen_reg_rtx (SImode);
4555 /* The bytes we want are in the top end of the word. */
4556 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4557 GEN_INT (8 * (4 - last_bytes))));
4558 part_bytes_reg = tmp;
4560 while (last_bytes)
4562 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4563 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4564 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4565 MEM_SCALAR_P (mem) = dst_scalar_p;
4566 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4568 if (--last_bytes)
4570 tmp = gen_reg_rtx (SImode);
4571 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4572 part_bytes_reg = tmp;
4577 else
4579 if (last_bytes > 1)
4581 mem = gen_rtx_MEM (HImode, dst);
4582 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4583 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4584 MEM_SCALAR_P (mem) = dst_scalar_p;
4585 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4586 last_bytes -= 2;
4587 if (last_bytes)
4589 rtx tmp = gen_reg_rtx (SImode);
4591 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4592 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4593 part_bytes_reg = tmp;
4597 if (last_bytes)
4599 mem = gen_rtx_MEM (QImode, dst);
4600 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4601 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4602 MEM_SCALAR_P (mem) = dst_scalar_p;
4603 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4607 return 1;
4610 /* Generate a memory reference for a half word, such that it will be loaded
4611 into the top 16 bits of the word. We can assume that the address is
4612 known to be alignable and of the form reg, or plus (reg, const). */
4615 arm_gen_rotated_half_load (memref)
4616 rtx memref;
4618 HOST_WIDE_INT offset = 0;
4619 rtx base = XEXP (memref, 0);
4621 if (GET_CODE (base) == PLUS)
4623 offset = INTVAL (XEXP (base, 1));
4624 base = XEXP (base, 0);
4627 /* If we aren't allowed to generate unaligned addresses, then fail. */
4628 if (TARGET_MMU_TRAPS
4629 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4630 return NULL;
4632 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4634 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4635 return base;
4637 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4640 /* Select a dominance comparison mode if possible. We support three forms.
4641 COND_OR == 0 => (X && Y)
4642 COND_OR == 1 => ((! X( || Y)
4643 COND_OR == 2 => (X || Y)
4644 If we are unable to support a dominance comparsison we return CC mode.
4645 This will then fail to match for the RTL expressions that generate this
4646 call. */
4648 static enum machine_mode
4649 select_dominance_cc_mode (x, y, cond_or)
4650 rtx x;
4651 rtx y;
4652 HOST_WIDE_INT cond_or;
4654 enum rtx_code cond1, cond2;
4655 int swapped = 0;
4657 /* Currently we will probably get the wrong result if the individual
4658 comparisons are not simple. This also ensures that it is safe to
4659 reverse a comparison if necessary. */
4660 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4661 != CCmode)
4662 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4663 != CCmode))
4664 return CCmode;
4666 /* The if_then_else variant of this tests the second condition if the
4667 first passes, but is true if the first fails. Reverse the first
4668 condition to get a true "inclusive-or" expression. */
4669 if (cond_or == 1)
4670 cond1 = reverse_condition (cond1);
4672 /* If the comparisons are not equal, and one doesn't dominate the other,
4673 then we can't do this. */
4674 if (cond1 != cond2
4675 && !comparison_dominates_p (cond1, cond2)
4676 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4677 return CCmode;
4679 if (swapped)
4681 enum rtx_code temp = cond1;
4682 cond1 = cond2;
4683 cond2 = temp;
4686 switch (cond1)
4688 case EQ:
4689 if (cond2 == EQ || !cond_or)
4690 return CC_DEQmode;
4692 switch (cond2)
4694 case LE: return CC_DLEmode;
4695 case LEU: return CC_DLEUmode;
4696 case GE: return CC_DGEmode;
4697 case GEU: return CC_DGEUmode;
4698 default: break;
4701 break;
4703 case LT:
4704 if (cond2 == LT || !cond_or)
4705 return CC_DLTmode;
4706 if (cond2 == LE)
4707 return CC_DLEmode;
4708 if (cond2 == NE)
4709 return CC_DNEmode;
4710 break;
4712 case GT:
4713 if (cond2 == GT || !cond_or)
4714 return CC_DGTmode;
4715 if (cond2 == GE)
4716 return CC_DGEmode;
4717 if (cond2 == NE)
4718 return CC_DNEmode;
4719 break;
4721 case LTU:
4722 if (cond2 == LTU || !cond_or)
4723 return CC_DLTUmode;
4724 if (cond2 == LEU)
4725 return CC_DLEUmode;
4726 if (cond2 == NE)
4727 return CC_DNEmode;
4728 break;
4730 case GTU:
4731 if (cond2 == GTU || !cond_or)
4732 return CC_DGTUmode;
4733 if (cond2 == GEU)
4734 return CC_DGEUmode;
4735 if (cond2 == NE)
4736 return CC_DNEmode;
4737 break;
4739 /* The remaining cases only occur when both comparisons are the
4740 same. */
4741 case NE:
4742 return CC_DNEmode;
4744 case LE:
4745 return CC_DLEmode;
4747 case GE:
4748 return CC_DGEmode;
4750 case LEU:
4751 return CC_DLEUmode;
4753 case GEU:
4754 return CC_DGEUmode;
4756 default:
4757 break;
4760 abort ();
4763 enum machine_mode
4764 arm_select_cc_mode (op, x, y)
4765 enum rtx_code op;
4766 rtx x;
4767 rtx y;
4769 /* All floating point compares return CCFP if it is an equality
4770 comparison, and CCFPE otherwise. */
4771 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4773 switch (op)
4775 case EQ:
4776 case NE:
4777 case UNORDERED:
4778 case ORDERED:
4779 case UNLT:
4780 case UNLE:
4781 case UNGT:
4782 case UNGE:
4783 case UNEQ:
4784 case LTGT:
4785 return CCFPmode;
4787 case LT:
4788 case LE:
4789 case GT:
4790 case GE:
4791 return CCFPEmode;
4793 default:
4794 abort ();
4798 /* A compare with a shifted operand. Because of canonicalization, the
4799 comparison will have to be swapped when we emit the assembler. */
4800 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4801 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4802 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4803 || GET_CODE (x) == ROTATERT))
4804 return CC_SWPmode;
4806 /* This is a special case that is used by combine to allow a
4807 comparison of a shifted byte load to be split into a zero-extend
4808 followed by a comparison of the shifted integer (only valid for
4809 equalities and unsigned inequalities). */
4810 if (GET_MODE (x) == SImode
4811 && GET_CODE (x) == ASHIFT
4812 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4813 && GET_CODE (XEXP (x, 0)) == SUBREG
4814 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4815 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4816 && (op == EQ || op == NE
4817 || op == GEU || op == GTU || op == LTU || op == LEU)
4818 && GET_CODE (y) == CONST_INT)
4819 return CC_Zmode;
4821 /* A construct for a conditional compare, if the false arm contains
4822 0, then both conditions must be true, otherwise either condition
4823 must be true. Not all conditions are possible, so CCmode is
4824 returned if it can't be done. */
4825 if (GET_CODE (x) == IF_THEN_ELSE
4826 && (XEXP (x, 2) == const0_rtx
4827 || XEXP (x, 2) == const1_rtx)
4828 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4829 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4830 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4831 INTVAL (XEXP (x, 2)));
4833 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4834 if (GET_CODE (x) == AND
4835 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4836 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4837 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4839 if (GET_CODE (x) == IOR
4840 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4841 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4842 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4844 /* An operation that sets the condition codes as a side-effect, the
4845 V flag is not set correctly, so we can only use comparisons where
4846 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4847 instead. */
4848 if (GET_MODE (x) == SImode
4849 && y == const0_rtx
4850 && (op == EQ || op == NE || op == LT || op == GE)
4851 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4852 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4853 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4854 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4855 || GET_CODE (x) == LSHIFTRT
4856 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4857 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4858 return CC_NOOVmode;
4860 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4861 return CC_Zmode;
4863 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4864 && GET_CODE (x) == PLUS
4865 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4866 return CC_Cmode;
4868 return CCmode;
4871 /* X and Y are two things to compare using CODE. Emit the compare insn and
4872 return the rtx for register 0 in the proper mode. FP means this is a
4873 floating point compare: I don't think that it is needed on the arm. */
4876 arm_gen_compare_reg (code, x, y)
4877 enum rtx_code code;
4878 rtx x, y;
4880 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4881 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4883 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4884 gen_rtx_COMPARE (mode, x, y)));
4886 return cc_reg;
4889 void
4890 arm_reload_in_hi (operands)
4891 rtx * operands;
4893 rtx ref = operands[1];
4894 rtx base, scratch;
4895 HOST_WIDE_INT offset = 0;
4897 if (GET_CODE (ref) == SUBREG)
4899 offset = SUBREG_BYTE (ref);
4900 ref = SUBREG_REG (ref);
4903 if (GET_CODE (ref) == REG)
4905 /* We have a pseudo which has been spilt onto the stack; there
4906 are two cases here: the first where there is a simple
4907 stack-slot replacement and a second where the stack-slot is
4908 out of range, or is used as a subreg. */
4909 if (reg_equiv_mem[REGNO (ref)])
4911 ref = reg_equiv_mem[REGNO (ref)];
4912 base = find_replacement (&XEXP (ref, 0));
4914 else
4915 /* The slot is out of range, or was dressed up in a SUBREG. */
4916 base = reg_equiv_address[REGNO (ref)];
4918 else
4919 base = find_replacement (&XEXP (ref, 0));
4921 /* Handle the case where the address is too complex to be offset by 1. */
4922 if (GET_CODE (base) == MINUS
4923 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4925 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4927 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4928 base = base_plus;
4930 else if (GET_CODE (base) == PLUS)
4932 /* The addend must be CONST_INT, or we would have dealt with it above. */
4933 HOST_WIDE_INT hi, lo;
4935 offset += INTVAL (XEXP (base, 1));
4936 base = XEXP (base, 0);
4938 /* Rework the address into a legal sequence of insns. */
4939 /* Valid range for lo is -4095 -> 4095 */
4940 lo = (offset >= 0
4941 ? (offset & 0xfff)
4942 : -((-offset) & 0xfff));
4944 /* Corner case, if lo is the max offset then we would be out of range
4945 once we have added the additional 1 below, so bump the msb into the
4946 pre-loading insn(s). */
4947 if (lo == 4095)
4948 lo &= 0x7ff;
4950 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4951 ^ (HOST_WIDE_INT) 0x80000000)
4952 - (HOST_WIDE_INT) 0x80000000);
4954 if (hi + lo != offset)
4955 abort ();
4957 if (hi != 0)
4959 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4961 /* Get the base address; addsi3 knows how to handle constants
4962 that require more than one insn. */
4963 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4964 base = base_plus;
4965 offset = lo;
4969 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4970 emit_insn (gen_zero_extendqisi2 (scratch,
4971 gen_rtx_MEM (QImode,
4972 plus_constant (base,
4973 offset))));
4974 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4975 gen_rtx_MEM (QImode,
4976 plus_constant (base,
4977 offset + 1))));
4978 if (!BYTES_BIG_ENDIAN)
4979 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4980 gen_rtx_IOR (SImode,
4981 gen_rtx_ASHIFT
4982 (SImode,
4983 gen_rtx_SUBREG (SImode, operands[0], 0),
4984 GEN_INT (8)),
4985 scratch)));
4986 else
4987 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4988 gen_rtx_IOR (SImode,
4989 gen_rtx_ASHIFT (SImode, scratch,
4990 GEN_INT (8)),
4991 gen_rtx_SUBREG (SImode, operands[0],
4992 0))));
4995 /* Handle storing a half-word to memory during reload by synthesising as two
4996 byte stores. Take care not to clobber the input values until after we
4997 have moved them somewhere safe. This code assumes that if the DImode
4998 scratch in operands[2] overlaps either the input value or output address
4999 in some way, then that value must die in this insn (we absolutely need
5000 two scratch registers for some corner cases). */
5002 void
5003 arm_reload_out_hi (operands)
5004 rtx * operands;
5006 rtx ref = operands[0];
5007 rtx outval = operands[1];
5008 rtx base, scratch;
5009 HOST_WIDE_INT offset = 0;
5011 if (GET_CODE (ref) == SUBREG)
5013 offset = SUBREG_BYTE (ref);
5014 ref = SUBREG_REG (ref);
5017 if (GET_CODE (ref) == REG)
5019 /* We have a pseudo which has been spilt onto the stack; there
5020 are two cases here: the first where there is a simple
5021 stack-slot replacement and a second where the stack-slot is
5022 out of range, or is used as a subreg. */
5023 if (reg_equiv_mem[REGNO (ref)])
5025 ref = reg_equiv_mem[REGNO (ref)];
5026 base = find_replacement (&XEXP (ref, 0));
5028 else
5029 /* The slot is out of range, or was dressed up in a SUBREG. */
5030 base = reg_equiv_address[REGNO (ref)];
5032 else
5033 base = find_replacement (&XEXP (ref, 0));
5035 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5037 /* Handle the case where the address is too complex to be offset by 1. */
5038 if (GET_CODE (base) == MINUS
5039 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5041 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5043 /* Be careful not to destroy OUTVAL. */
5044 if (reg_overlap_mentioned_p (base_plus, outval))
5046 /* Updating base_plus might destroy outval, see if we can
5047 swap the scratch and base_plus. */
5048 if (!reg_overlap_mentioned_p (scratch, outval))
5050 rtx tmp = scratch;
5051 scratch = base_plus;
5052 base_plus = tmp;
5054 else
5056 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5058 /* Be conservative and copy OUTVAL into the scratch now,
5059 this should only be necessary if outval is a subreg
5060 of something larger than a word. */
5061 /* XXX Might this clobber base? I can't see how it can,
5062 since scratch is known to overlap with OUTVAL, and
5063 must be wider than a word. */
5064 emit_insn (gen_movhi (scratch_hi, outval));
5065 outval = scratch_hi;
5069 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5070 base = base_plus;
5072 else if (GET_CODE (base) == PLUS)
5074 /* The addend must be CONST_INT, or we would have dealt with it above. */
5075 HOST_WIDE_INT hi, lo;
5077 offset += INTVAL (XEXP (base, 1));
5078 base = XEXP (base, 0);
5080 /* Rework the address into a legal sequence of insns. */
5081 /* Valid range for lo is -4095 -> 4095 */
5082 lo = (offset >= 0
5083 ? (offset & 0xfff)
5084 : -((-offset) & 0xfff));
5086 /* Corner case, if lo is the max offset then we would be out of range
5087 once we have added the additional 1 below, so bump the msb into the
5088 pre-loading insn(s). */
5089 if (lo == 4095)
5090 lo &= 0x7ff;
5092 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5093 ^ (HOST_WIDE_INT) 0x80000000)
5094 - (HOST_WIDE_INT) 0x80000000);
5096 if (hi + lo != offset)
5097 abort ();
5099 if (hi != 0)
5101 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5103 /* Be careful not to destroy OUTVAL. */
5104 if (reg_overlap_mentioned_p (base_plus, outval))
5106 /* Updating base_plus might destroy outval, see if we
5107 can swap the scratch and base_plus. */
5108 if (!reg_overlap_mentioned_p (scratch, outval))
5110 rtx tmp = scratch;
5111 scratch = base_plus;
5112 base_plus = tmp;
5114 else
5116 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5118 /* Be conservative and copy outval into scratch now,
5119 this should only be necessary if outval is a
5120 subreg of something larger than a word. */
5121 /* XXX Might this clobber base? I can't see how it
5122 can, since scratch is known to overlap with
5123 outval. */
5124 emit_insn (gen_movhi (scratch_hi, outval));
5125 outval = scratch_hi;
5129 /* Get the base address; addsi3 knows how to handle constants
5130 that require more than one insn. */
5131 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5132 base = base_plus;
5133 offset = lo;
5137 if (BYTES_BIG_ENDIAN)
5139 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5140 plus_constant (base, offset + 1)),
5141 gen_rtx_SUBREG (QImode, outval, 0)));
5142 emit_insn (gen_lshrsi3 (scratch,
5143 gen_rtx_SUBREG (SImode, outval, 0),
5144 GEN_INT (8)));
5145 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5146 gen_rtx_SUBREG (QImode, scratch, 0)));
5148 else
5150 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5151 gen_rtx_SUBREG (QImode, outval, 0)));
5152 emit_insn (gen_lshrsi3 (scratch,
5153 gen_rtx_SUBREG (SImode, outval, 0),
5154 GEN_INT (8)));
5155 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5156 plus_constant (base, offset + 1)),
5157 gen_rtx_SUBREG (QImode, scratch, 0)));
5161 /* Print a symbolic form of X to the debug file, F. */
5163 static void
5164 arm_print_value (f, x)
5165 FILE * f;
5166 rtx x;
5168 switch (GET_CODE (x))
5170 case CONST_INT:
5171 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5172 return;
5174 case CONST_DOUBLE:
5175 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5176 return;
5178 case CONST_STRING:
5179 fprintf (f, "\"%s\"", XSTR (x, 0));
5180 return;
5182 case SYMBOL_REF:
5183 fprintf (f, "`%s'", XSTR (x, 0));
5184 return;
5186 case LABEL_REF:
5187 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5188 return;
5190 case CONST:
5191 arm_print_value (f, XEXP (x, 0));
5192 return;
5194 case PLUS:
5195 arm_print_value (f, XEXP (x, 0));
5196 fprintf (f, "+");
5197 arm_print_value (f, XEXP (x, 1));
5198 return;
5200 case PC:
5201 fprintf (f, "pc");
5202 return;
5204 default:
5205 fprintf (f, "????");
5206 return;
5210 /* Routines for manipulation of the constant pool. */
5212 /* Arm instructions cannot load a large constant directly into a
5213 register; they have to come from a pc relative load. The constant
5214 must therefore be placed in the addressable range of the pc
5215 relative load. Depending on the precise pc relative load
5216 instruction the range is somewhere between 256 bytes and 4k. This
5217 means that we often have to dump a constant inside a function, and
5218 generate code to branch around it.
5220 It is important to minimize this, since the branches will slow
5221 things down and make the code larger.
5223 Normally we can hide the table after an existing unconditional
5224 branch so that there is no interruption of the flow, but in the
5225 worst case the code looks like this:
5227 ldr rn, L1
5229 b L2
5230 align
5231 L1: .long value
5235 ldr rn, L3
5237 b L4
5238 align
5239 L3: .long value
5243 We fix this by performing a scan after scheduling, which notices
5244 which instructions need to have their operands fetched from the
5245 constant table and builds the table.
5247 The algorithm starts by building a table of all the constants that
5248 need fixing up and all the natural barriers in the function (places
5249 where a constant table can be dropped without breaking the flow).
5250 For each fixup we note how far the pc-relative replacement will be
5251 able to reach and the offset of the instruction into the function.
5253 Having built the table we then group the fixes together to form
5254 tables that are as large as possible (subject to addressing
5255 constraints) and emit each table of constants after the last
5256 barrier that is within range of all the instructions in the group.
5257 If a group does not contain a barrier, then we forcibly create one
5258 by inserting a jump instruction into the flow. Once the table has
5259 been inserted, the insns are then modified to reference the
5260 relevant entry in the pool.
5262 Possible enhancements to the algorithm (not implemented) are:
5264 1) For some processors and object formats, there may be benefit in
5265 aligning the pools to the start of cache lines; this alignment
5266 would need to be taken into account when calculating addressability
5267 of a pool. */
5269 /* These typedefs are located at the start of this file, so that
5270 they can be used in the prototypes there. This comment is to
5271 remind readers of that fact so that the following structures
5272 can be understood more easily.
5274 typedef struct minipool_node Mnode;
5275 typedef struct minipool_fixup Mfix; */
5277 struct minipool_node
5279 /* Doubly linked chain of entries. */
5280 Mnode * next;
5281 Mnode * prev;
5282 /* The maximum offset into the code that this entry can be placed. While
5283 pushing fixes for forward references, all entries are sorted in order
5284 of increasing max_address. */
5285 HOST_WIDE_INT max_address;
5286 /* Similarly for an entry inserted for a backwards ref. */
5287 HOST_WIDE_INT min_address;
5288 /* The number of fixes referencing this entry. This can become zero
5289 if we "unpush" an entry. In this case we ignore the entry when we
5290 come to emit the code. */
5291 int refcount;
5292 /* The offset from the start of the minipool. */
5293 HOST_WIDE_INT offset;
5294 /* The value in table. */
5295 rtx value;
5296 /* The mode of value. */
5297 enum machine_mode mode;
5298 int fix_size;
5301 struct minipool_fixup
5303 Mfix * next;
5304 rtx insn;
5305 HOST_WIDE_INT address;
5306 rtx * loc;
5307 enum machine_mode mode;
5308 int fix_size;
5309 rtx value;
5310 Mnode * minipool;
5311 HOST_WIDE_INT forwards;
5312 HOST_WIDE_INT backwards;
5315 /* Fixes less than a word need padding out to a word boundary. */
5316 #define MINIPOOL_FIX_SIZE(mode) \
5317 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5319 static Mnode * minipool_vector_head;
5320 static Mnode * minipool_vector_tail;
5321 static rtx minipool_vector_label;
5323 /* The linked list of all minipool fixes required for this function. */
5324 Mfix * minipool_fix_head;
5325 Mfix * minipool_fix_tail;
5326 /* The fix entry for the current minipool, once it has been placed. */
5327 Mfix * minipool_barrier;
5329 /* Determines if INSN is the start of a jump table. Returns the end
5330 of the TABLE or NULL_RTX. */
5332 static rtx
5333 is_jump_table (insn)
5334 rtx insn;
5336 rtx table;
5338 if (GET_CODE (insn) == JUMP_INSN
5339 && JUMP_LABEL (insn) != NULL
5340 && ((table = next_real_insn (JUMP_LABEL (insn)))
5341 == next_real_insn (insn))
5342 && table != NULL
5343 && GET_CODE (table) == JUMP_INSN
5344 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5345 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5346 return table;
5348 return NULL_RTX;
5351 static HOST_WIDE_INT
5352 get_jump_table_size (insn)
5353 rtx insn;
5355 rtx body = PATTERN (insn);
5356 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5358 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5361 /* Move a minipool fix MP from its current location to before MAX_MP.
5362 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5363 contrains may need updating. */
5365 static Mnode *
5366 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5367 Mnode * mp;
5368 Mnode * max_mp;
5369 HOST_WIDE_INT max_address;
5371 /* This should never be true and the code below assumes these are
5372 different. */
5373 if (mp == max_mp)
5374 abort ();
5376 if (max_mp == NULL)
5378 if (max_address < mp->max_address)
5379 mp->max_address = max_address;
5381 else
5383 if (max_address > max_mp->max_address - mp->fix_size)
5384 mp->max_address = max_mp->max_address - mp->fix_size;
5385 else
5386 mp->max_address = max_address;
5388 /* Unlink MP from its current position. Since max_mp is non-null,
5389 mp->prev must be non-null. */
5390 mp->prev->next = mp->next;
5391 if (mp->next != NULL)
5392 mp->next->prev = mp->prev;
5393 else
5394 minipool_vector_tail = mp->prev;
5396 /* Re-insert it before MAX_MP. */
5397 mp->next = max_mp;
5398 mp->prev = max_mp->prev;
5399 max_mp->prev = mp;
5401 if (mp->prev != NULL)
5402 mp->prev->next = mp;
5403 else
5404 minipool_vector_head = mp;
5407 /* Save the new entry. */
5408 max_mp = mp;
5410 /* Scan over the preceding entries and adjust their addresses as
5411 required. */
5412 while (mp->prev != NULL
5413 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5415 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5416 mp = mp->prev;
5419 return max_mp;
5422 /* Add a constant to the minipool for a forward reference. Returns the
5423 node added or NULL if the constant will not fit in this pool. */
5425 static Mnode *
5426 add_minipool_forward_ref (fix)
5427 Mfix * fix;
5429 /* If set, max_mp is the first pool_entry that has a lower
5430 constraint than the one we are trying to add. */
5431 Mnode * max_mp = NULL;
5432 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5433 Mnode * mp;
5435 /* If this fix's address is greater than the address of the first
5436 entry, then we can't put the fix in this pool. We subtract the
5437 size of the current fix to ensure that if the table is fully
5438 packed we still have enough room to insert this value by suffling
5439 the other fixes forwards. */
5440 if (minipool_vector_head &&
5441 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5442 return NULL;
5444 /* Scan the pool to see if a constant with the same value has
5445 already been added. While we are doing this, also note the
5446 location where we must insert the constant if it doesn't already
5447 exist. */
5448 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5450 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5451 && fix->mode == mp->mode
5452 && (GET_CODE (fix->value) != CODE_LABEL
5453 || (CODE_LABEL_NUMBER (fix->value)
5454 == CODE_LABEL_NUMBER (mp->value)))
5455 && rtx_equal_p (fix->value, mp->value))
5457 /* More than one fix references this entry. */
5458 mp->refcount++;
5459 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5462 /* Note the insertion point if necessary. */
5463 if (max_mp == NULL
5464 && mp->max_address > max_address)
5465 max_mp = mp;
5468 /* The value is not currently in the minipool, so we need to create
5469 a new entry for it. If MAX_MP is NULL, the entry will be put on
5470 the end of the list since the placement is less constrained than
5471 any existing entry. Otherwise, we insert the new fix before
5472 MAX_MP and, if neceesary, adjust the constraints on the other
5473 entries. */
5474 mp = xmalloc (sizeof (* mp));
5475 mp->fix_size = fix->fix_size;
5476 mp->mode = fix->mode;
5477 mp->value = fix->value;
5478 mp->refcount = 1;
5479 /* Not yet required for a backwards ref. */
5480 mp->min_address = -65536;
5482 if (max_mp == NULL)
5484 mp->max_address = max_address;
5485 mp->next = NULL;
5486 mp->prev = minipool_vector_tail;
5488 if (mp->prev == NULL)
5490 minipool_vector_head = mp;
5491 minipool_vector_label = gen_label_rtx ();
5493 else
5494 mp->prev->next = mp;
5496 minipool_vector_tail = mp;
5498 else
5500 if (max_address > max_mp->max_address - mp->fix_size)
5501 mp->max_address = max_mp->max_address - mp->fix_size;
5502 else
5503 mp->max_address = max_address;
5505 mp->next = max_mp;
5506 mp->prev = max_mp->prev;
5507 max_mp->prev = mp;
5508 if (mp->prev != NULL)
5509 mp->prev->next = mp;
5510 else
5511 minipool_vector_head = mp;
5514 /* Save the new entry. */
5515 max_mp = mp;
5517 /* Scan over the preceding entries and adjust their addresses as
5518 required. */
5519 while (mp->prev != NULL
5520 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5522 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5523 mp = mp->prev;
5526 return max_mp;
5529 static Mnode *
5530 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5531 Mnode * mp;
5532 Mnode * min_mp;
5533 HOST_WIDE_INT min_address;
5535 HOST_WIDE_INT offset;
5537 /* This should never be true, and the code below assumes these are
5538 different. */
5539 if (mp == min_mp)
5540 abort ();
5542 if (min_mp == NULL)
5544 if (min_address > mp->min_address)
5545 mp->min_address = min_address;
5547 else
5549 /* We will adjust this below if it is too loose. */
5550 mp->min_address = min_address;
5552 /* Unlink MP from its current position. Since min_mp is non-null,
5553 mp->next must be non-null. */
5554 mp->next->prev = mp->prev;
5555 if (mp->prev != NULL)
5556 mp->prev->next = mp->next;
5557 else
5558 minipool_vector_head = mp->next;
5560 /* Reinsert it after MIN_MP. */
5561 mp->prev = min_mp;
5562 mp->next = min_mp->next;
5563 min_mp->next = mp;
5564 if (mp->next != NULL)
5565 mp->next->prev = mp;
5566 else
5567 minipool_vector_tail = mp;
5570 min_mp = mp;
5572 offset = 0;
5573 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5575 mp->offset = offset;
5576 if (mp->refcount > 0)
5577 offset += mp->fix_size;
5579 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5580 mp->next->min_address = mp->min_address + mp->fix_size;
5583 return min_mp;
5586 /* Add a constant to the minipool for a backward reference. Returns the
5587 node added or NULL if the constant will not fit in this pool.
5589 Note that the code for insertion for a backwards reference can be
5590 somewhat confusing because the calculated offsets for each fix do
5591 not take into account the size of the pool (which is still under
5592 construction. */
5594 static Mnode *
5595 add_minipool_backward_ref (fix)
5596 Mfix * fix;
5598 /* If set, min_mp is the last pool_entry that has a lower constraint
5599 than the one we are trying to add. */
5600 Mnode * min_mp = NULL;
5601 /* This can be negative, since it is only a constraint. */
5602 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5603 Mnode * mp;
5605 /* If we can't reach the current pool from this insn, or if we can't
5606 insert this entry at the end of the pool without pushing other
5607 fixes out of range, then we don't try. This ensures that we
5608 can't fail later on. */
5609 if (min_address >= minipool_barrier->address
5610 || (minipool_vector_tail->min_address + fix->fix_size
5611 >= minipool_barrier->address))
5612 return NULL;
5614 /* Scan the pool to see if a constant with the same value has
5615 already been added. While we are doing this, also note the
5616 location where we must insert the constant if it doesn't already
5617 exist. */
5618 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5620 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5621 && fix->mode == mp->mode
5622 && (GET_CODE (fix->value) != CODE_LABEL
5623 || (CODE_LABEL_NUMBER (fix->value)
5624 == CODE_LABEL_NUMBER (mp->value)))
5625 && rtx_equal_p (fix->value, mp->value)
5626 /* Check that there is enough slack to move this entry to the
5627 end of the table (this is conservative). */
5628 && (mp->max_address
5629 > (minipool_barrier->address
5630 + minipool_vector_tail->offset
5631 + minipool_vector_tail->fix_size)))
5633 mp->refcount++;
5634 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5637 if (min_mp != NULL)
5638 mp->min_address += fix->fix_size;
5639 else
5641 /* Note the insertion point if necessary. */
5642 if (mp->min_address < min_address)
5643 min_mp = mp;
5644 else if (mp->max_address
5645 < minipool_barrier->address + mp->offset + fix->fix_size)
5647 /* Inserting before this entry would push the fix beyond
5648 its maximum address (which can happen if we have
5649 re-located a forwards fix); force the new fix to come
5650 after it. */
5651 min_mp = mp;
5652 min_address = mp->min_address + fix->fix_size;
5657 /* We need to create a new entry. */
5658 mp = xmalloc (sizeof (* mp));
5659 mp->fix_size = fix->fix_size;
5660 mp->mode = fix->mode;
5661 mp->value = fix->value;
5662 mp->refcount = 1;
5663 mp->max_address = minipool_barrier->address + 65536;
5665 mp->min_address = min_address;
5667 if (min_mp == NULL)
5669 mp->prev = NULL;
5670 mp->next = minipool_vector_head;
5672 if (mp->next == NULL)
5674 minipool_vector_tail = mp;
5675 minipool_vector_label = gen_label_rtx ();
5677 else
5678 mp->next->prev = mp;
5680 minipool_vector_head = mp;
5682 else
5684 mp->next = min_mp->next;
5685 mp->prev = min_mp;
5686 min_mp->next = mp;
5688 if (mp->next != NULL)
5689 mp->next->prev = mp;
5690 else
5691 minipool_vector_tail = mp;
5694 /* Save the new entry. */
5695 min_mp = mp;
5697 if (mp->prev)
5698 mp = mp->prev;
5699 else
5700 mp->offset = 0;
5702 /* Scan over the following entries and adjust their offsets. */
5703 while (mp->next != NULL)
5705 if (mp->next->min_address < mp->min_address + mp->fix_size)
5706 mp->next->min_address = mp->min_address + mp->fix_size;
5708 if (mp->refcount)
5709 mp->next->offset = mp->offset + mp->fix_size;
5710 else
5711 mp->next->offset = mp->offset;
5713 mp = mp->next;
5716 return min_mp;
5719 static void
5720 assign_minipool_offsets (barrier)
5721 Mfix * barrier;
5723 HOST_WIDE_INT offset = 0;
5724 Mnode * mp;
5726 minipool_barrier = barrier;
5728 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5730 mp->offset = offset;
5732 if (mp->refcount > 0)
5733 offset += mp->fix_size;
5737 /* Output the literal table */
5738 static void
5739 dump_minipool (scan)
5740 rtx scan;
5742 Mnode * mp;
5743 Mnode * nmp;
5745 if (rtl_dump_file)
5746 fprintf (rtl_dump_file,
5747 ";; Emitting minipool after insn %u; address %ld\n",
5748 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5750 scan = emit_label_after (gen_label_rtx (), scan);
5751 scan = emit_insn_after (gen_align_4 (), scan);
5752 scan = emit_label_after (minipool_vector_label, scan);
5754 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5756 if (mp->refcount > 0)
5758 if (rtl_dump_file)
5760 fprintf (rtl_dump_file,
5761 ";; Offset %u, min %ld, max %ld ",
5762 (unsigned) mp->offset, (unsigned long) mp->min_address,
5763 (unsigned long) mp->max_address);
5764 arm_print_value (rtl_dump_file, mp->value);
5765 fputc ('\n', rtl_dump_file);
5768 switch (mp->fix_size)
5770 #ifdef HAVE_consttable_1
5771 case 1:
5772 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5773 break;
5775 #endif
5776 #ifdef HAVE_consttable_2
5777 case 2:
5778 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5779 break;
5781 #endif
5782 #ifdef HAVE_consttable_4
5783 case 4:
5784 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5785 break;
5787 #endif
5788 #ifdef HAVE_consttable_8
5789 case 8:
5790 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5791 break;
5793 #endif
5794 default:
5795 abort ();
5796 break;
5800 nmp = mp->next;
5801 free (mp);
5804 minipool_vector_head = minipool_vector_tail = NULL;
5805 scan = emit_insn_after (gen_consttable_end (), scan);
5806 scan = emit_barrier_after (scan);
5809 /* Return the cost of forcibly inserting a barrier after INSN. */
5811 static int
5812 arm_barrier_cost (insn)
5813 rtx insn;
5815 /* Basing the location of the pool on the loop depth is preferable,
5816 but at the moment, the basic block information seems to be
5817 corrupt by this stage of the compilation. */
5818 int base_cost = 50;
5819 rtx next = next_nonnote_insn (insn);
5821 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5822 base_cost -= 20;
5824 switch (GET_CODE (insn))
5826 case CODE_LABEL:
5827 /* It will always be better to place the table before the label, rather
5828 than after it. */
5829 return 50;
5831 case INSN:
5832 case CALL_INSN:
5833 return base_cost;
5835 case JUMP_INSN:
5836 return base_cost - 10;
5838 default:
5839 return base_cost + 10;
5843 /* Find the best place in the insn stream in the range
5844 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5845 Create the barrier by inserting a jump and add a new fix entry for
5846 it. */
5848 static Mfix *
5849 create_fix_barrier (fix, max_address)
5850 Mfix * fix;
5851 HOST_WIDE_INT max_address;
5853 HOST_WIDE_INT count = 0;
5854 rtx barrier;
5855 rtx from = fix->insn;
5856 rtx selected = from;
5857 int selected_cost;
5858 HOST_WIDE_INT selected_address;
5859 Mfix * new_fix;
5860 HOST_WIDE_INT max_count = max_address - fix->address;
5861 rtx label = gen_label_rtx ();
5863 selected_cost = arm_barrier_cost (from);
5864 selected_address = fix->address;
5866 while (from && count < max_count)
5868 rtx tmp;
5869 int new_cost;
5871 /* This code shouldn't have been called if there was a natural barrier
5872 within range. */
5873 if (GET_CODE (from) == BARRIER)
5874 abort ();
5876 /* Count the length of this insn. */
5877 count += get_attr_length (from);
5879 /* If there is a jump table, add its length. */
5880 tmp = is_jump_table (from);
5881 if (tmp != NULL)
5883 count += get_jump_table_size (tmp);
5885 /* Jump tables aren't in a basic block, so base the cost on
5886 the dispatch insn. If we select this location, we will
5887 still put the pool after the table. */
5888 new_cost = arm_barrier_cost (from);
5890 if (count < max_count && new_cost <= selected_cost)
5892 selected = tmp;
5893 selected_cost = new_cost;
5894 selected_address = fix->address + count;
5897 /* Continue after the dispatch table. */
5898 from = NEXT_INSN (tmp);
5899 continue;
5902 new_cost = arm_barrier_cost (from);
5904 if (count < max_count && new_cost <= selected_cost)
5906 selected = from;
5907 selected_cost = new_cost;
5908 selected_address = fix->address + count;
5911 from = NEXT_INSN (from);
5914 /* Create a new JUMP_INSN that branches around a barrier. */
5915 from = emit_jump_insn_after (gen_jump (label), selected);
5916 JUMP_LABEL (from) = label;
5917 barrier = emit_barrier_after (from);
5918 emit_label_after (label, barrier);
5920 /* Create a minipool barrier entry for the new barrier. */
5921 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5922 new_fix->insn = barrier;
5923 new_fix->address = selected_address;
5924 new_fix->next = fix->next;
5925 fix->next = new_fix;
5927 return new_fix;
5930 /* Record that there is a natural barrier in the insn stream at
5931 ADDRESS. */
5932 static void
5933 push_minipool_barrier (insn, address)
5934 rtx insn;
5935 HOST_WIDE_INT address;
5937 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5939 fix->insn = insn;
5940 fix->address = address;
5942 fix->next = NULL;
5943 if (minipool_fix_head != NULL)
5944 minipool_fix_tail->next = fix;
5945 else
5946 minipool_fix_head = fix;
5948 minipool_fix_tail = fix;
5951 /* Record INSN, which will need fixing up to load a value from the
5952 minipool. ADDRESS is the offset of the insn since the start of the
5953 function; LOC is a pointer to the part of the insn which requires
5954 fixing; VALUE is the constant that must be loaded, which is of type
5955 MODE. */
5956 static void
5957 push_minipool_fix (insn, address, loc, mode, value)
5958 rtx insn;
5959 HOST_WIDE_INT address;
5960 rtx * loc;
5961 enum machine_mode mode;
5962 rtx value;
5964 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5966 #ifdef AOF_ASSEMBLER
5967 /* PIC symbol refereneces need to be converted into offsets into the
5968 based area. */
5969 /* XXX This shouldn't be done here. */
5970 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5971 value = aof_pic_entry (value);
5972 #endif /* AOF_ASSEMBLER */
5974 fix->insn = insn;
5975 fix->address = address;
5976 fix->loc = loc;
5977 fix->mode = mode;
5978 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5979 fix->value = value;
5980 fix->forwards = get_attr_pool_range (insn);
5981 fix->backwards = get_attr_neg_pool_range (insn);
5982 fix->minipool = NULL;
5984 /* If an insn doesn't have a range defined for it, then it isn't
5985 expecting to be reworked by this code. Better to abort now than
5986 to generate duff assembly code. */
5987 if (fix->forwards == 0 && fix->backwards == 0)
5988 abort ();
5990 if (rtl_dump_file)
5992 fprintf (rtl_dump_file,
5993 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5994 GET_MODE_NAME (mode),
5995 INSN_UID (insn), (unsigned long) address,
5996 -1 * (long)fix->backwards, (long)fix->forwards);
5997 arm_print_value (rtl_dump_file, fix->value);
5998 fprintf (rtl_dump_file, "\n");
6001 /* Add it to the chain of fixes. */
6002 fix->next = NULL;
6004 if (minipool_fix_head != NULL)
6005 minipool_fix_tail->next = fix;
6006 else
6007 minipool_fix_head = fix;
6009 minipool_fix_tail = fix;
6012 /* Scan INSN and note any of its operands that need fixing. */
6014 static void
6015 note_invalid_constants (insn, address)
6016 rtx insn;
6017 HOST_WIDE_INT address;
6019 int opno;
6021 extract_insn (insn);
6023 if (!constrain_operands (1))
6024 fatal_insn_not_found (insn);
6026 /* Fill in recog_op_alt with information about the constraints of this
6027 insn. */
6028 preprocess_constraints ();
6030 for (opno = 0; opno < recog_data.n_operands; opno++)
6032 /* Things we need to fix can only occur in inputs. */
6033 if (recog_data.operand_type[opno] != OP_IN)
6034 continue;
6036 /* If this alternative is a memory reference, then any mention
6037 of constants in this alternative is really to fool reload
6038 into allowing us to accept one there. We need to fix them up
6039 now so that we output the right code. */
6040 if (recog_op_alt[opno][which_alternative].memory_ok)
6042 rtx op = recog_data.operand[opno];
6044 if (CONSTANT_P (op))
6045 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6046 recog_data.operand_mode[opno], op);
6047 #if 0
6048 /* RWE: Now we look correctly at the operands for the insn,
6049 this shouldn't be needed any more. */
6050 #ifndef AOF_ASSEMBLER
6051 /* XXX Is this still needed? */
6052 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6053 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6054 recog_data.operand_mode[opno],
6055 XVECEXP (op, 0, 0));
6056 #endif
6057 #endif
6058 else if (GET_CODE (op) == MEM
6059 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6060 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6061 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6062 recog_data.operand_mode[opno],
6063 get_pool_constant (XEXP (op, 0)));
6068 void
6069 arm_reorg (first)
6070 rtx first;
6072 rtx insn;
6073 HOST_WIDE_INT address = 0;
6074 Mfix * fix;
6076 minipool_fix_head = minipool_fix_tail = NULL;
6078 /* The first insn must always be a note, or the code below won't
6079 scan it properly. */
6080 if (GET_CODE (first) != NOTE)
6081 abort ();
6083 /* Scan all the insns and record the operands that will need fixing. */
6084 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6086 if (GET_CODE (insn) == BARRIER)
6087 push_minipool_barrier (insn, address);
6088 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6089 || GET_CODE (insn) == JUMP_INSN)
6091 rtx table;
6093 note_invalid_constants (insn, address);
6094 address += get_attr_length (insn);
6096 /* If the insn is a vector jump, add the size of the table
6097 and skip the table. */
6098 if ((table = is_jump_table (insn)) != NULL)
6100 address += get_jump_table_size (table);
6101 insn = table;
6106 fix = minipool_fix_head;
6108 /* Now scan the fixups and perform the required changes. */
6109 while (fix)
6111 Mfix * ftmp;
6112 Mfix * fdel;
6113 Mfix * last_added_fix;
6114 Mfix * last_barrier = NULL;
6115 Mfix * this_fix;
6117 /* Skip any further barriers before the next fix. */
6118 while (fix && GET_CODE (fix->insn) == BARRIER)
6119 fix = fix->next;
6121 /* No more fixes. */
6122 if (fix == NULL)
6123 break;
6125 last_added_fix = NULL;
6127 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6129 if (GET_CODE (ftmp->insn) == BARRIER)
6131 if (ftmp->address >= minipool_vector_head->max_address)
6132 break;
6134 last_barrier = ftmp;
6136 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6137 break;
6139 last_added_fix = ftmp; /* Keep track of the last fix added. */
6142 /* If we found a barrier, drop back to that; any fixes that we
6143 could have reached but come after the barrier will now go in
6144 the next mini-pool. */
6145 if (last_barrier != NULL)
6147 /* Reduce the refcount for those fixes that won't go into this
6148 pool after all. */
6149 for (fdel = last_barrier->next;
6150 fdel && fdel != ftmp;
6151 fdel = fdel->next)
6153 fdel->minipool->refcount--;
6154 fdel->minipool = NULL;
6157 ftmp = last_barrier;
6159 else
6161 /* ftmp is first fix that we can't fit into this pool and
6162 there no natural barriers that we could use. Insert a
6163 new barrier in the code somewhere between the previous
6164 fix and this one, and arrange to jump around it. */
6165 HOST_WIDE_INT max_address;
6167 /* The last item on the list of fixes must be a barrier, so
6168 we can never run off the end of the list of fixes without
6169 last_barrier being set. */
6170 if (ftmp == NULL)
6171 abort ();
6173 max_address = minipool_vector_head->max_address;
6174 /* Check that there isn't another fix that is in range that
6175 we couldn't fit into this pool because the pool was
6176 already too large: we need to put the pool before such an
6177 instruction. */
6178 if (ftmp->address < max_address)
6179 max_address = ftmp->address;
6181 last_barrier = create_fix_barrier (last_added_fix, max_address);
6184 assign_minipool_offsets (last_barrier);
6186 while (ftmp)
6188 if (GET_CODE (ftmp->insn) != BARRIER
6189 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6190 == NULL))
6191 break;
6193 ftmp = ftmp->next;
6196 /* Scan over the fixes we have identified for this pool, fixing them
6197 up and adding the constants to the pool itself. */
6198 for (this_fix = fix; this_fix && ftmp != this_fix;
6199 this_fix = this_fix->next)
6200 if (GET_CODE (this_fix->insn) != BARRIER)
6202 rtx addr
6203 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6204 minipool_vector_label),
6205 this_fix->minipool->offset);
6206 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6209 dump_minipool (last_barrier->insn);
6210 fix = ftmp;
6213 /* From now on we must synthesize any constants that we can't handle
6214 directly. This can happen if the RTL gets split during final
6215 instruction generation. */
6216 after_arm_reorg = 1;
6218 /* Free the minipool memory. */
6219 obstack_free (&minipool_obstack, minipool_startobj);
6222 /* Routines to output assembly language. */
6224 /* If the rtx is the correct value then return the string of the number.
6225 In this way we can ensure that valid double constants are generated even
6226 when cross compiling. */
6228 const char *
6229 fp_immediate_constant (x)
6230 rtx x;
6232 REAL_VALUE_TYPE r;
6233 int i;
6235 if (!fpa_consts_inited)
6236 init_fpa_table ();
6238 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6239 for (i = 0; i < 8; i++)
6240 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6241 return strings_fpa[i];
6243 abort ();
6246 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6248 static const char *
6249 fp_const_from_val (r)
6250 REAL_VALUE_TYPE * r;
6252 int i;
6254 if (!fpa_consts_inited)
6255 init_fpa_table ();
6257 for (i = 0; i < 8; i++)
6258 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6259 return strings_fpa[i];
6261 abort ();
6264 /* Output the operands of a LDM/STM instruction to STREAM.
6265 MASK is the ARM register set mask of which only bits 0-15 are important.
6266 REG is the base register, either the frame pointer or the stack pointer,
6267 INSTR is the possibly suffixed load or store instruction. */
6269 static void
6270 print_multi_reg (stream, instr, reg, mask)
6271 FILE * stream;
6272 const char * instr;
6273 int reg;
6274 int mask;
6276 int i;
6277 int not_first = FALSE;
6279 fputc ('\t', stream);
6280 asm_fprintf (stream, instr, reg);
6281 fputs (", {", stream);
6283 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6284 if (mask & (1 << i))
6286 if (not_first)
6287 fprintf (stream, ", ");
6289 asm_fprintf (stream, "%r", i);
6290 not_first = TRUE;
6293 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6296 /* Output a 'call' insn. */
6298 const char *
6299 output_call (operands)
6300 rtx * operands;
6302 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6304 if (REGNO (operands[0]) == LR_REGNUM)
6306 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6307 output_asm_insn ("mov%?\t%0, %|lr", operands);
6310 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6312 if (TARGET_INTERWORK)
6313 output_asm_insn ("bx%?\t%0", operands);
6314 else
6315 output_asm_insn ("mov%?\t%|pc, %0", operands);
6317 return "";
6320 static int
6321 eliminate_lr2ip (x)
6322 rtx * x;
6324 int something_changed = 0;
6325 rtx x0 = * x;
6326 int code = GET_CODE (x0);
6327 int i, j;
6328 const char * fmt;
6330 switch (code)
6332 case REG:
6333 if (REGNO (x0) == LR_REGNUM)
6335 *x = gen_rtx_REG (SImode, IP_REGNUM);
6336 return 1;
6338 return 0;
6339 default:
6340 /* Scan through the sub-elements and change any references there. */
6341 fmt = GET_RTX_FORMAT (code);
6343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6344 if (fmt[i] == 'e')
6345 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6346 else if (fmt[i] == 'E')
6347 for (j = 0; j < XVECLEN (x0, i); j++)
6348 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6350 return something_changed;
6354 /* Output a 'call' insn that is a reference in memory. */
6356 const char *
6357 output_call_mem (operands)
6358 rtx * operands;
6360 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6361 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6362 if (eliminate_lr2ip (&operands[0]))
6363 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6365 if (TARGET_INTERWORK)
6367 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6368 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6369 output_asm_insn ("bx%?\t%|ip", operands);
6371 else
6373 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6374 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6377 return "";
6381 /* Output a move from arm registers to an fpu registers.
6382 OPERANDS[0] is an fpu register.
6383 OPERANDS[1] is the first registers of an arm register pair. */
6385 const char *
6386 output_mov_long_double_fpu_from_arm (operands)
6387 rtx * operands;
6389 int arm_reg0 = REGNO (operands[1]);
6390 rtx ops[3];
6392 if (arm_reg0 == IP_REGNUM)
6393 abort ();
6395 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6396 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6397 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6399 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6400 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6402 return "";
6405 /* Output a move from an fpu register to arm registers.
6406 OPERANDS[0] is the first registers of an arm register pair.
6407 OPERANDS[1] is an fpu register. */
6409 const char *
6410 output_mov_long_double_arm_from_fpu (operands)
6411 rtx * operands;
6413 int arm_reg0 = REGNO (operands[0]);
6414 rtx ops[3];
6416 if (arm_reg0 == IP_REGNUM)
6417 abort ();
6419 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6420 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6421 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6423 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6424 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6425 return "";
6428 /* Output a move from arm registers to arm registers of a long double
6429 OPERANDS[0] is the destination.
6430 OPERANDS[1] is the source. */
6432 const char *
6433 output_mov_long_double_arm_from_arm (operands)
6434 rtx * operands;
6436 /* We have to be careful here because the two might overlap. */
6437 int dest_start = REGNO (operands[0]);
6438 int src_start = REGNO (operands[1]);
6439 rtx ops[2];
6440 int i;
6442 if (dest_start < src_start)
6444 for (i = 0; i < 3; i++)
6446 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6447 ops[1] = gen_rtx_REG (SImode, src_start + i);
6448 output_asm_insn ("mov%?\t%0, %1", ops);
6451 else
6453 for (i = 2; i >= 0; i--)
6455 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6456 ops[1] = gen_rtx_REG (SImode, src_start + i);
6457 output_asm_insn ("mov%?\t%0, %1", ops);
6461 return "";
6465 /* Output a move from arm registers to an fpu registers.
6466 OPERANDS[0] is an fpu register.
6467 OPERANDS[1] is the first registers of an arm register pair. */
6469 const char *
6470 output_mov_double_fpu_from_arm (operands)
6471 rtx * operands;
6473 int arm_reg0 = REGNO (operands[1]);
6474 rtx ops[2];
6476 if (arm_reg0 == IP_REGNUM)
6477 abort ();
6479 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6480 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6481 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6482 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6483 return "";
6486 /* Output a move from an fpu register to arm registers.
6487 OPERANDS[0] is the first registers of an arm register pair.
6488 OPERANDS[1] is an fpu register. */
6490 const char *
6491 output_mov_double_arm_from_fpu (operands)
6492 rtx * operands;
6494 int arm_reg0 = REGNO (operands[0]);
6495 rtx ops[2];
6497 if (arm_reg0 == IP_REGNUM)
6498 abort ();
6500 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6501 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6502 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6503 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6504 return "";
6507 /* Output a move between double words.
6508 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6509 or MEM<-REG and all MEMs must be offsettable addresses. */
6511 const char *
6512 output_move_double (operands)
6513 rtx * operands;
6515 enum rtx_code code0 = GET_CODE (operands[0]);
6516 enum rtx_code code1 = GET_CODE (operands[1]);
6517 rtx otherops[3];
6519 if (code0 == REG)
6521 int reg0 = REGNO (operands[0]);
6523 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6525 if (code1 == REG)
6527 int reg1 = REGNO (operands[1]);
6528 if (reg1 == IP_REGNUM)
6529 abort ();
6531 /* Ensure the second source is not overwritten. */
6532 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6533 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6534 else
6535 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6537 else if (code1 == CONST_DOUBLE)
6539 if (GET_MODE (operands[1]) == DFmode)
6541 long l[2];
6542 union real_extract u;
6544 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
6545 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
6546 otherops[1] = GEN_INT (l[1]);
6547 operands[1] = GEN_INT (l[0]);
6549 else if (GET_MODE (operands[1]) != VOIDmode)
6550 abort ();
6551 else if (WORDS_BIG_ENDIAN)
6553 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6554 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6556 else
6558 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6559 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6562 output_mov_immediate (operands);
6563 output_mov_immediate (otherops);
6565 else if (code1 == CONST_INT)
6567 #if HOST_BITS_PER_WIDE_INT > 32
6568 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6569 what the upper word is. */
6570 if (WORDS_BIG_ENDIAN)
6572 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6573 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6575 else
6577 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6578 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6580 #else
6581 /* Sign extend the intval into the high-order word. */
6582 if (WORDS_BIG_ENDIAN)
6584 otherops[1] = operands[1];
6585 operands[1] = (INTVAL (operands[1]) < 0
6586 ? constm1_rtx : const0_rtx);
6588 else
6589 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6590 #endif
6591 output_mov_immediate (otherops);
6592 output_mov_immediate (operands);
6594 else if (code1 == MEM)
6596 switch (GET_CODE (XEXP (operands[1], 0)))
6598 case REG:
6599 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6600 break;
6602 case PRE_INC:
6603 abort (); /* Should never happen now. */
6604 break;
6606 case PRE_DEC:
6607 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6608 break;
6610 case POST_INC:
6611 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6612 break;
6614 case POST_DEC:
6615 abort (); /* Should never happen now. */
6616 break;
6618 case LABEL_REF:
6619 case CONST:
6620 output_asm_insn ("adr%?\t%0, %1", operands);
6621 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6622 break;
6624 default:
6625 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6626 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6628 otherops[0] = operands[0];
6629 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6630 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6632 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6634 if (GET_CODE (otherops[2]) == CONST_INT)
6636 switch (INTVAL (otherops[2]))
6638 case -8:
6639 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6640 return "";
6641 case -4:
6642 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6643 return "";
6644 case 4:
6645 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6646 return "";
6649 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6650 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6651 else
6652 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6654 else
6655 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6657 else
6658 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6660 return "ldm%?ia\t%0, %M0";
6662 else
6664 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6665 /* Take care of overlapping base/data reg. */
6666 if (reg_mentioned_p (operands[0], operands[1]))
6668 output_asm_insn ("ldr%?\t%0, %1", otherops);
6669 output_asm_insn ("ldr%?\t%0, %1", operands);
6671 else
6673 output_asm_insn ("ldr%?\t%0, %1", operands);
6674 output_asm_insn ("ldr%?\t%0, %1", otherops);
6679 else
6680 abort (); /* Constraints should prevent this. */
6682 else if (code0 == MEM && code1 == REG)
6684 if (REGNO (operands[1]) == IP_REGNUM)
6685 abort ();
6687 switch (GET_CODE (XEXP (operands[0], 0)))
6689 case REG:
6690 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6691 break;
6693 case PRE_INC:
6694 abort (); /* Should never happen now. */
6695 break;
6697 case PRE_DEC:
6698 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6699 break;
6701 case POST_INC:
6702 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6703 break;
6705 case POST_DEC:
6706 abort (); /* Should never happen now. */
6707 break;
6709 case PLUS:
6710 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6712 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6714 case -8:
6715 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6716 return "";
6718 case -4:
6719 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6720 return "";
6722 case 4:
6723 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6724 return "";
6727 /* Fall through */
6729 default:
6730 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
6731 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6732 output_asm_insn ("str%?\t%1, %0", operands);
6733 output_asm_insn ("str%?\t%1, %0", otherops);
6736 else
6737 /* Constraints should prevent this. */
6738 abort ();
6740 return "";
6744 /* Output an arbitrary MOV reg, #n.
6745 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6747 const char *
6748 output_mov_immediate (operands)
6749 rtx * operands;
6751 HOST_WIDE_INT n = INTVAL (operands[1]);
6753 /* Try to use one MOV. */
6754 if (const_ok_for_arm (n))
6755 output_asm_insn ("mov%?\t%0, %1", operands);
6757 /* Try to use one MVN. */
6758 else if (const_ok_for_arm (~n))
6760 operands[1] = GEN_INT (~n);
6761 output_asm_insn ("mvn%?\t%0, %1", operands);
6763 else
6765 int n_ones = 0;
6766 int i;
6768 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6769 for (i = 0; i < 32; i ++)
6770 if (n & 1 << i)
6771 n_ones ++;
6773 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6774 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6775 else
6776 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6779 return "";
6782 /* Output an ADD r, s, #n where n may be too big for one instruction.
6783 If adding zero to one register, output nothing. */
6785 const char *
6786 output_add_immediate (operands)
6787 rtx * operands;
6789 HOST_WIDE_INT n = INTVAL (operands[2]);
6791 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6793 if (n < 0)
6794 output_multi_immediate (operands,
6795 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6796 -n);
6797 else
6798 output_multi_immediate (operands,
6799 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6803 return "";
6806 /* Output a multiple immediate operation.
6807 OPERANDS is the vector of operands referred to in the output patterns.
6808 INSTR1 is the output pattern to use for the first constant.
6809 INSTR2 is the output pattern to use for subsequent constants.
6810 IMMED_OP is the index of the constant slot in OPERANDS.
6811 N is the constant value. */
6813 static const char *
6814 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6815 rtx * operands;
6816 const char * instr1;
6817 const char * instr2;
6818 int immed_op;
6819 HOST_WIDE_INT n;
6821 #if HOST_BITS_PER_WIDE_INT > 32
6822 n &= 0xffffffff;
6823 #endif
6825 if (n == 0)
6827 /* Quick and easy output. */
6828 operands[immed_op] = const0_rtx;
6829 output_asm_insn (instr1, operands);
6831 else
6833 int i;
6834 const char * instr = instr1;
6836 /* Note that n is never zero here (which would give no output). */
6837 for (i = 0; i < 32; i += 2)
6839 if (n & (3 << i))
6841 operands[immed_op] = GEN_INT (n & (255 << i));
6842 output_asm_insn (instr, operands);
6843 instr = instr2;
6844 i += 6;
6849 return "";
6852 /* Return the appropriate ARM instruction for the operation code.
6853 The returned result should not be overwritten. OP is the rtx of the
6854 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6855 was shifted. */
6857 const char *
6858 arithmetic_instr (op, shift_first_arg)
6859 rtx op;
6860 int shift_first_arg;
6862 switch (GET_CODE (op))
6864 case PLUS:
6865 return "add";
6867 case MINUS:
6868 return shift_first_arg ? "rsb" : "sub";
6870 case IOR:
6871 return "orr";
6873 case XOR:
6874 return "eor";
6876 case AND:
6877 return "and";
6879 default:
6880 abort ();
6884 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6885 for the operation code. The returned result should not be overwritten.
6886 OP is the rtx code of the shift.
6887 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6888 shift. */
6890 static const char *
6891 shift_op (op, amountp)
6892 rtx op;
6893 HOST_WIDE_INT *amountp;
6895 const char * mnem;
6896 enum rtx_code code = GET_CODE (op);
6898 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6899 *amountp = -1;
6900 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6901 *amountp = INTVAL (XEXP (op, 1));
6902 else
6903 abort ();
6905 switch (code)
6907 case ASHIFT:
6908 mnem = "asl";
6909 break;
6911 case ASHIFTRT:
6912 mnem = "asr";
6913 break;
6915 case LSHIFTRT:
6916 mnem = "lsr";
6917 break;
6919 case ROTATERT:
6920 mnem = "ror";
6921 break;
6923 case MULT:
6924 /* We never have to worry about the amount being other than a
6925 power of 2, since this case can never be reloaded from a reg. */
6926 if (*amountp != -1)
6927 *amountp = int_log2 (*amountp);
6928 else
6929 abort ();
6930 return "asl";
6932 default:
6933 abort ();
6936 if (*amountp != -1)
6938 /* This is not 100% correct, but follows from the desire to merge
6939 multiplication by a power of 2 with the recognizer for a
6940 shift. >=32 is not a valid shift for "asl", so we must try and
6941 output a shift that produces the correct arithmetical result.
6942 Using lsr #32 is identical except for the fact that the carry bit
6943 is not set correctly if we set the flags; but we never use the
6944 carry bit from such an operation, so we can ignore that. */
6945 if (code == ROTATERT)
6946 /* Rotate is just modulo 32. */
6947 *amountp &= 31;
6948 else if (*amountp != (*amountp & 31))
6950 if (code == ASHIFT)
6951 mnem = "lsr";
6952 *amountp = 32;
6955 /* Shifts of 0 are no-ops. */
6956 if (*amountp == 0)
6957 return NULL;
6960 return mnem;
6963 /* Obtain the shift from the POWER of two. */
6965 static HOST_WIDE_INT
6966 int_log2 (power)
6967 HOST_WIDE_INT power;
6969 HOST_WIDE_INT shift = 0;
6971 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
6973 if (shift > 31)
6974 abort ();
6975 shift ++;
6978 return shift;
6981 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6982 /bin/as is horribly restrictive. */
6983 #define MAX_ASCII_LEN 51
6985 void
6986 output_ascii_pseudo_op (stream, p, len)
6987 FILE * stream;
6988 const unsigned char * p;
6989 int len;
6991 int i;
6992 int len_so_far = 0;
6994 fputs ("\t.ascii\t\"", stream);
6996 for (i = 0; i < len; i++)
6998 int c = p[i];
7000 if (len_so_far >= MAX_ASCII_LEN)
7002 fputs ("\"\n\t.ascii\t\"", stream);
7003 len_so_far = 0;
7006 switch (c)
7008 case TARGET_TAB:
7009 fputs ("\\t", stream);
7010 len_so_far += 2;
7011 break;
7013 case TARGET_FF:
7014 fputs ("\\f", stream);
7015 len_so_far += 2;
7016 break;
7018 case TARGET_BS:
7019 fputs ("\\b", stream);
7020 len_so_far += 2;
7021 break;
7023 case TARGET_CR:
7024 fputs ("\\r", stream);
7025 len_so_far += 2;
7026 break;
7028 case TARGET_NEWLINE:
7029 fputs ("\\n", stream);
7030 c = p [i + 1];
7031 if ((c >= ' ' && c <= '~')
7032 || c == TARGET_TAB)
7033 /* This is a good place for a line break. */
7034 len_so_far = MAX_ASCII_LEN;
7035 else
7036 len_so_far += 2;
7037 break;
7039 case '\"':
7040 case '\\':
7041 putc ('\\', stream);
7042 len_so_far++;
7043 /* drop through. */
7045 default:
7046 if (c >= ' ' && c <= '~')
7048 putc (c, stream);
7049 len_so_far++;
7051 else
7053 fprintf (stream, "\\%03o", c);
7054 len_so_far += 4;
7056 break;
7060 fputs ("\"\n", stream);
7063 /* Compute the register sabe mask for registers 0 through 12
7064 inclusive. This code is used by both arm_compute_save_reg_mask
7065 and arm_compute_initial_elimination_offset. */
7067 static unsigned long
7068 arm_compute_save_reg0_reg12_mask ()
7070 unsigned long func_type = arm_current_func_type ();
7071 unsigned int save_reg_mask = 0;
7072 unsigned int reg;
7074 if (IS_INTERRUPT (func_type))
7076 unsigned int max_reg;
7077 /* Interrupt functions must not corrupt any registers,
7078 even call clobbered ones. If this is a leaf function
7079 we can just examine the registers used by the RTL, but
7080 otherwise we have to assume that whatever function is
7081 called might clobber anything, and so we have to save
7082 all the call-clobbered registers as well. */
7083 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7084 /* FIQ handlers have registers r8 - r12 banked, so
7085 we only need to check r0 - r7, Normal ISRs only
7086 bank r14 and r15, so we must check up to r12.
7087 r13 is the stack pointer which is always preserved,
7088 so we do not need to consider it here. */
7089 max_reg = 7;
7090 else
7091 max_reg = 12;
7093 for (reg = 0; reg <= max_reg; reg++)
7094 if (regs_ever_live[reg]
7095 || (! current_function_is_leaf && call_used_regs [reg]))
7096 save_reg_mask |= (1 << reg);
7098 else
7100 /* In the normal case we only need to save those registers
7101 which are call saved and which are used by this function. */
7102 for (reg = 0; reg <= 10; reg++)
7103 if (regs_ever_live[reg] && ! call_used_regs [reg])
7104 save_reg_mask |= (1 << reg);
7106 /* Handle the frame pointer as a special case. */
7107 if (! TARGET_APCS_FRAME
7108 && ! frame_pointer_needed
7109 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7110 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7111 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7113 /* If we aren't loading the PIC register,
7114 don't stack it even though it may be live. */
7115 if (flag_pic
7116 && ! TARGET_SINGLE_PIC_BASE
7117 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7118 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7121 return save_reg_mask;
7124 /* Compute a bit mask of which registers need to be
7125 saved on the stack for the current function. */
7127 static unsigned long
7128 arm_compute_save_reg_mask ()
7130 unsigned int save_reg_mask = 0;
7131 unsigned long func_type = arm_current_func_type ();
7133 if (IS_NAKED (func_type))
7134 /* This should never really happen. */
7135 return 0;
7137 /* If we are creating a stack frame, then we must save the frame pointer,
7138 IP (which will hold the old stack pointer), LR and the PC. */
7139 if (frame_pointer_needed)
7140 save_reg_mask |=
7141 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7142 | (1 << IP_REGNUM)
7143 | (1 << LR_REGNUM)
7144 | (1 << PC_REGNUM);
7146 /* Volatile functions do not return, so there
7147 is no need to save any other registers. */
7148 if (IS_VOLATILE (func_type))
7149 return save_reg_mask;
7151 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7153 /* Decide if we need to save the link register.
7154 Interrupt routines have their own banked link register,
7155 so they never need to save it.
7156 Otheriwse if we do not use the link register we do not need to save
7157 it. If we are pushing other registers onto the stack however, we
7158 can save an instruction in the epilogue by pushing the link register
7159 now and then popping it back into the PC. This incurs extra memory
7160 accesses though, so we only do it when optimising for size, and only
7161 if we know that we will not need a fancy return sequence. */
7162 if (! IS_INTERRUPT (func_type)
7163 && (regs_ever_live [LR_REGNUM]
7164 || (save_reg_mask
7165 && optimize_size
7166 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)))
7167 save_reg_mask |= 1 << LR_REGNUM;
7169 if (cfun->machine->lr_save_eliminated)
7170 save_reg_mask &= ~ (1 << LR_REGNUM);
7172 return save_reg_mask;
7175 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7176 everything bar the final return instruction. */
7178 const char *
7179 output_return_instruction (operand, really_return, reverse)
7180 rtx operand;
7181 int really_return;
7182 int reverse;
7184 char conditional[10];
7185 char instr[100];
7186 int reg;
7187 unsigned long live_regs_mask;
7188 unsigned long func_type;
7190 func_type = arm_current_func_type ();
7192 if (IS_NAKED (func_type))
7193 return "";
7195 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7197 /* If this function was declared non-returning, and we have found a tail
7198 call, then we have to trust that the called function won't return. */
7199 if (really_return)
7201 rtx ops[2];
7203 /* Otherwise, trap an attempted return by aborting. */
7204 ops[0] = operand;
7205 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7206 : "abort");
7207 assemble_external_libcall (ops[1]);
7208 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7211 return "";
7214 if (current_function_calls_alloca && !really_return)
7215 abort ();
7217 /* Construct the conditional part of the instruction(s) to be emitted. */
7218 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7220 return_used_this_function = 1;
7222 live_regs_mask = arm_compute_save_reg_mask ();
7224 /* On some ARM architectures it is faster to use LDR rather than LDM to
7225 load a single register. On other architectures, the cost is the same.
7226 In 26 bit mode we have to use LDM in order to be able to restore the CPSR. */
7227 if ((live_regs_mask == (1 << LR_REGNUM))
7228 && ! TARGET_INTERWORK
7229 && ! IS_INTERRUPT (func_type)
7230 && (! really_return || TARGET_APCS_32))
7232 if (! really_return)
7233 sprintf (instr, "ldr%s\t%%|lr, [%%|sp], #4", conditional);
7234 else
7235 sprintf (instr, "ldr%s\t%%|pc, [%%|sp], #4", conditional);
7237 else if (live_regs_mask)
7239 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7240 /* There are two possible reasons for the IP register being saved.
7241 Either a stack frame was created, in which case IP contains the
7242 old stack pointer, or an ISR routine corrupted it. If this in an
7243 ISR routine then just restore IP, otherwise restore IP into SP. */
7244 if (! IS_INTERRUPT (func_type))
7246 live_regs_mask &= ~ (1 << IP_REGNUM);
7247 live_regs_mask |= (1 << SP_REGNUM);
7250 /* Generate the load multiple instruction to restore the registers. */
7251 if (frame_pointer_needed)
7252 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7253 else
7254 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7256 for (reg = 0; reg <= SP_REGNUM; reg++)
7257 if (live_regs_mask & (1 << reg))
7259 strcat (instr, "%|");
7260 strcat (instr, reg_names[reg]);
7261 strcat (instr, ", ");
7264 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7266 /* If we are not restoring the LR register then we will
7267 have added one too many commas to the list above.
7268 Replace it with a closing brace. */
7269 instr [strlen (instr) - 2] = '}';
7271 else
7273 strcat (instr, "%|");
7275 /* At this point there should only be one or two registers left in
7276 live_regs_mask: always LR, and possibly PC if we created a stack
7277 frame. LR contains the return address. If we do not have any
7278 special requirements for function exit (eg interworking, or ISR)
7279 then we can load this value directly into the PC and save an
7280 instruction. */
7281 if (! TARGET_INTERWORK
7282 && ! IS_INTERRUPT (func_type)
7283 && really_return)
7284 strcat (instr, reg_names [PC_REGNUM]);
7285 else
7286 strcat (instr, reg_names [LR_REGNUM]);
7288 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
7291 if (really_return)
7293 /* See if we need to generate an extra instruction to
7294 perform the actual function return. */
7295 switch ((int) ARM_FUNC_TYPE (func_type))
7297 case ARM_FT_ISR:
7298 case ARM_FT_FIQ:
7299 output_asm_insn (instr, & operand);
7301 strcpy (instr, "sub");
7302 strcat (instr, conditional);
7303 strcat (instr, "s\t%|pc, %|lr, #4");
7304 break;
7306 case ARM_FT_EXCEPTION:
7307 output_asm_insn (instr, & operand);
7309 strcpy (instr, "mov");
7310 strcat (instr, conditional);
7311 strcat (instr, "s\t%|pc, %|lr");
7312 break;
7314 case ARM_FT_INTERWORKED:
7315 output_asm_insn (instr, & operand);
7317 strcpy (instr, "bx");
7318 strcat (instr, conditional);
7319 strcat (instr, "\t%|lr");
7320 break;
7322 default:
7323 /* The return has already been handled
7324 by loading the LR into the PC. */
7325 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7327 output_asm_insn (instr, & operand);
7329 strcpy (instr, "mov");
7330 strcat (instr, conditional);
7331 if (! TARGET_APCS_32)
7332 strcat (instr, "s");
7333 strcat (instr, "\t%|pc, %|lr");
7335 break;
7339 else if (really_return)
7341 switch ((int) ARM_FUNC_TYPE (func_type))
7343 case ARM_FT_ISR:
7344 case ARM_FT_FIQ:
7345 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7346 break;
7348 case ARM_FT_INTERWORKED:
7349 sprintf (instr, "bx%s\t%%|lr", conditional);
7350 break;
7352 case ARM_FT_EXCEPTION:
7353 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7354 break;
7356 default:
7357 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7358 conditional, TARGET_APCS_32 ? "" : "s");
7359 break;
7362 else
7363 /* Nothing to load off the stack, and
7364 no return instruction to generate. */
7365 return "";
7367 output_asm_insn (instr, & operand);
7369 return "";
7372 /* Write the function name into the code section, directly preceding
7373 the function prologue.
7375 Code will be output similar to this:
7377 .ascii "arm_poke_function_name", 0
7378 .align
7380 .word 0xff000000 + (t1 - t0)
7381 arm_poke_function_name
7382 mov ip, sp
7383 stmfd sp!, {fp, ip, lr, pc}
7384 sub fp, ip, #4
7386 When performing a stack backtrace, code can inspect the value
7387 of 'pc' stored at 'fp' + 0. If the trace function then looks
7388 at location pc - 12 and the top 8 bits are set, then we know
7389 that there is a function name embedded immediately preceding this
7390 location and has length ((pc[-3]) & 0xff000000).
7392 We assume that pc is declared as a pointer to an unsigned long.
7394 It is of no benefit to output the function name if we are assembling
7395 a leaf function. These function types will not contain a stack
7396 backtrace structure, therefore it is not possible to determine the
7397 function name. */
7399 void
7400 arm_poke_function_name (stream, name)
7401 FILE * stream;
7402 const char * name;
7404 unsigned long alignlength;
7405 unsigned long length;
7406 rtx x;
7408 length = strlen (name) + 1;
7409 alignlength = ROUND_UP (length);
7411 ASM_OUTPUT_ASCII (stream, name, length);
7412 ASM_OUTPUT_ALIGN (stream, 2);
7413 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7414 assemble_aligned_integer (UNITS_PER_WORD, x);
7417 /* Place some comments into the assembler stream
7418 describing the current function. */
7420 static void
7421 arm_output_function_prologue (f, frame_size)
7422 FILE * f;
7423 HOST_WIDE_INT frame_size;
7425 unsigned long func_type;
7427 if (!TARGET_ARM)
7429 thumb_output_function_prologue (f, frame_size);
7430 return;
7433 /* Sanity check. */
7434 if (arm_ccfsm_state || arm_target_insn)
7435 abort ();
7437 func_type = arm_current_func_type ();
7439 switch ((int) ARM_FUNC_TYPE (func_type))
7441 default:
7442 case ARM_FT_NORMAL:
7443 break;
7444 case ARM_FT_INTERWORKED:
7445 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7446 break;
7447 case ARM_FT_EXCEPTION_HANDLER:
7448 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7449 break;
7450 case ARM_FT_ISR:
7451 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7452 break;
7453 case ARM_FT_FIQ:
7454 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7455 break;
7456 case ARM_FT_EXCEPTION:
7457 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7458 break;
7461 if (IS_NAKED (func_type))
7462 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7464 if (IS_VOLATILE (func_type))
7465 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7467 if (IS_NESTED (func_type))
7468 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7470 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7471 current_function_args_size,
7472 current_function_pretend_args_size, frame_size);
7474 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
7475 frame_pointer_needed,
7476 current_function_anonymous_args);
7478 if (cfun->machine->lr_save_eliminated)
7479 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7481 #ifdef AOF_ASSEMBLER
7482 if (flag_pic)
7483 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7484 #endif
7486 return_used_this_function = 0;
7489 const char *
7490 arm_output_epilogue (really_return)
7491 int really_return;
7493 int reg;
7494 unsigned long saved_regs_mask;
7495 unsigned long func_type;
7496 /* If we need this, then it will always be at least this much. */
7497 int floats_offset = 12;
7498 rtx operands[3];
7499 int frame_size = get_frame_size ();
7500 FILE * f = asm_out_file;
7501 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7503 /* If we have already generated the return instruction
7504 then it is futile to generate anything else. */
7505 if (use_return_insn (FALSE) && return_used_this_function)
7506 return "";
7508 func_type = arm_current_func_type ();
7510 if (IS_NAKED (func_type))
7511 /* Naked functions don't have epilogues. */
7512 return "";
7514 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7516 rtx op;
7518 /* A volatile function should never return. Call abort. */
7519 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7520 assemble_external_libcall (op);
7521 output_asm_insn ("bl\t%a0", &op);
7523 return "";
7526 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7527 && ! really_return)
7528 /* If we are throwing an exception, then we really must
7529 be doing a return, so we can't tail-call. */
7530 abort ();
7532 saved_regs_mask = arm_compute_save_reg_mask ();
7534 /* Compute how far away the floats will be. */
7535 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7536 if (saved_regs_mask & (1 << reg))
7537 floats_offset += 4;
7539 if (frame_pointer_needed)
7541 if (arm_fpu_arch == FP_SOFT2)
7543 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7544 if (regs_ever_live[reg] && !call_used_regs[reg])
7546 floats_offset += 12;
7547 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7548 reg, FP_REGNUM, floats_offset);
7551 else
7553 int start_reg = LAST_ARM_FP_REGNUM;
7555 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7557 if (regs_ever_live[reg] && !call_used_regs[reg])
7559 floats_offset += 12;
7561 /* We can't unstack more than four registers at once. */
7562 if (start_reg - reg == 3)
7564 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7565 reg, FP_REGNUM, floats_offset);
7566 start_reg = reg - 1;
7569 else
7571 if (reg != start_reg)
7572 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7573 reg + 1, start_reg - reg,
7574 FP_REGNUM, floats_offset);
7575 start_reg = reg - 1;
7579 /* Just in case the last register checked also needs unstacking. */
7580 if (reg != start_reg)
7581 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7582 reg + 1, start_reg - reg,
7583 FP_REGNUM, floats_offset);
7586 /* saved_regs_mask should contain the IP, which at the time of stack
7587 frame generation actually contains the old stack pointer. So a
7588 quick way to unwind the stack is just pop the IP register directly
7589 into the stack pointer. */
7590 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7591 abort ();
7592 saved_regs_mask &= ~ (1 << IP_REGNUM);
7593 saved_regs_mask |= (1 << SP_REGNUM);
7595 /* There are two registers left in saved_regs_mask - LR and PC. We
7596 only need to restore the LR register (the return address), but to
7597 save time we can load it directly into the PC, unless we need a
7598 special function exit sequence, or we are not really returning. */
7599 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7600 /* Delete the LR from the register mask, so that the LR on
7601 the stack is loaded into the PC in the register mask. */
7602 saved_regs_mask &= ~ (1 << LR_REGNUM);
7603 else
7604 saved_regs_mask &= ~ (1 << PC_REGNUM);
7606 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7608 if (IS_INTERRUPT (func_type))
7609 /* Interrupt handlers will have pushed the
7610 IP onto the stack, so restore it now. */
7611 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7613 else
7615 /* Restore stack pointer if necessary. */
7616 if (frame_size + current_function_outgoing_args_size != 0)
7618 operands[0] = operands[1] = stack_pointer_rtx;
7619 operands[2] = GEN_INT (frame_size
7620 + current_function_outgoing_args_size);
7621 output_add_immediate (operands);
7624 if (arm_fpu_arch == FP_SOFT2)
7626 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7627 if (regs_ever_live[reg] && !call_used_regs[reg])
7628 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7629 reg, SP_REGNUM);
7631 else
7633 int start_reg = FIRST_ARM_FP_REGNUM;
7635 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7637 if (regs_ever_live[reg] && !call_used_regs[reg])
7639 if (reg - start_reg == 3)
7641 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7642 start_reg, SP_REGNUM);
7643 start_reg = reg + 1;
7646 else
7648 if (reg != start_reg)
7649 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7650 start_reg, reg - start_reg,
7651 SP_REGNUM);
7653 start_reg = reg + 1;
7657 /* Just in case the last register checked also needs unstacking. */
7658 if (reg != start_reg)
7659 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7660 start_reg, reg - start_reg, SP_REGNUM);
7663 /* If we can, restore the LR into the PC. */
7664 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7665 && really_return
7666 && current_function_pretend_args_size == 0
7667 && saved_regs_mask & (1 << LR_REGNUM))
7669 saved_regs_mask &= ~ (1 << LR_REGNUM);
7670 saved_regs_mask |= (1 << PC_REGNUM);
7673 /* Load the registers off the stack. If we only have one register
7674 to load use the LDR instruction - it is faster. */
7675 if (saved_regs_mask == (1 << LR_REGNUM))
7677 /* The excpetion handler ignores the LR, so we do
7678 not really need to load it off the stack. */
7679 if (eh_ofs)
7680 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7681 else
7682 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7684 else if (saved_regs_mask)
7685 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7687 if (current_function_pretend_args_size)
7689 /* Unwind the pre-pushed regs. */
7690 operands[0] = operands[1] = stack_pointer_rtx;
7691 operands[2] = GEN_INT (current_function_pretend_args_size);
7692 output_add_immediate (operands);
7696 #if 0
7697 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7698 /* Adjust the stack to remove the exception handler stuff. */
7699 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7700 REGNO (eh_ofs));
7701 #endif
7703 if (! really_return)
7704 return "";
7706 /* Generate the return instruction. */
7707 switch ((int) ARM_FUNC_TYPE (func_type))
7709 case ARM_FT_EXCEPTION_HANDLER:
7710 /* Even in 26-bit mode we do a mov (rather than a movs)
7711 because we don't have the PSR bits set in the address. */
7712 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7713 break;
7715 case ARM_FT_ISR:
7716 case ARM_FT_FIQ:
7717 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7718 break;
7720 case ARM_FT_EXCEPTION:
7721 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7722 break;
7724 case ARM_FT_INTERWORKED:
7725 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7726 break;
7728 default:
7729 if (frame_pointer_needed)
7730 /* If we used the frame pointer then the return adddress
7731 will have been loaded off the stack directly into the
7732 PC, so there is no need to issue a MOV instruction
7733 here. */
7735 else if (current_function_pretend_args_size == 0
7736 && (saved_regs_mask & (1 << LR_REGNUM)))
7737 /* Similarly we may have been able to load LR into the PC
7738 even if we did not create a stack frame. */
7740 else if (TARGET_APCS_32)
7741 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7742 else
7743 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7744 break;
7747 return "";
7750 static void
7751 arm_output_function_epilogue (file, frame_size)
7752 FILE *file ATTRIBUTE_UNUSED;
7753 HOST_WIDE_INT frame_size;
7755 if (TARGET_THUMB)
7757 /* ??? Probably not safe to set this here, since it assumes that a
7758 function will be emitted as assembly immediately after we generate
7759 RTL for it. This does not happen for inline functions. */
7760 return_used_this_function = 0;
7762 else
7764 if (use_return_insn (FALSE)
7765 && return_used_this_function
7766 && (frame_size + current_function_outgoing_args_size) != 0
7767 && !frame_pointer_needed)
7768 abort ();
7770 /* Reset the ARM-specific per-function variables. */
7771 current_function_anonymous_args = 0;
7772 after_arm_reorg = 0;
7776 /* Generate and emit an insn that we will recognize as a push_multi.
7777 Unfortunately, since this insn does not reflect very well the actual
7778 semantics of the operation, we need to annotate the insn for the benefit
7779 of DWARF2 frame unwind information. */
7781 static rtx
7782 emit_multi_reg_push (mask)
7783 int mask;
7785 int num_regs = 0;
7786 int num_dwarf_regs;
7787 int i, j;
7788 rtx par;
7789 rtx dwarf;
7790 int dwarf_par_index;
7791 rtx tmp, reg;
7793 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7794 if (mask & (1 << i))
7795 num_regs++;
7797 if (num_regs == 0 || num_regs > 16)
7798 abort ();
7800 /* We don't record the PC in the dwarf frame information. */
7801 num_dwarf_regs = num_regs;
7802 if (mask & (1 << PC_REGNUM))
7803 num_dwarf_regs--;
7805 /* For the body of the insn we are going to generate an UNSPEC in
7806 parallel with several USEs. This allows the insn to be recognised
7807 by the push_multi pattern in the arm.md file. The insn looks
7808 something like this:
7810 (parallel [
7811 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7812 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7813 (use (reg:SI 11 fp))
7814 (use (reg:SI 12 ip))
7815 (use (reg:SI 14 lr))
7816 (use (reg:SI 15 pc))
7819 For the frame note however, we try to be more explicit and actually
7820 show each register being stored into the stack frame, plus a (single)
7821 decrement of the stack pointer. We do it this way in order to be
7822 friendly to the stack unwinding code, which only wants to see a single
7823 stack decrement per instruction. The RTL we generate for the note looks
7824 something like this:
7826 (sequence [
7827 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7828 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7829 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7830 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7831 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7834 This sequence is used both by the code to support stack unwinding for
7835 exceptions handlers and the code to generate dwarf2 frame debugging. */
7837 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7838 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
7839 RTX_FRAME_RELATED_P (dwarf) = 1;
7840 dwarf_par_index = 1;
7842 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7844 if (mask & (1 << i))
7846 reg = gen_rtx_REG (SImode, i);
7848 XVECEXP (par, 0, 0)
7849 = gen_rtx_SET (VOIDmode,
7850 gen_rtx_MEM (BLKmode,
7851 gen_rtx_PRE_DEC (BLKmode,
7852 stack_pointer_rtx)),
7853 gen_rtx_UNSPEC (BLKmode,
7854 gen_rtvec (1, reg),
7855 UNSPEC_PUSH_MULT));
7857 if (i != PC_REGNUM)
7859 tmp = gen_rtx_SET (VOIDmode,
7860 gen_rtx_MEM (SImode, stack_pointer_rtx),
7861 reg);
7862 RTX_FRAME_RELATED_P (tmp) = 1;
7863 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7864 dwarf_par_index++;
7867 break;
7871 for (j = 1, i++; j < num_regs; i++)
7873 if (mask & (1 << i))
7875 reg = gen_rtx_REG (SImode, i);
7877 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7879 if (i != PC_REGNUM)
7881 tmp = gen_rtx_SET (VOIDmode,
7882 gen_rtx_MEM (SImode,
7883 plus_constant (stack_pointer_rtx,
7884 4 * j)),
7885 reg);
7886 RTX_FRAME_RELATED_P (tmp) = 1;
7887 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7890 j++;
7894 par = emit_insn (par);
7896 tmp = gen_rtx_SET (SImode,
7897 stack_pointer_rtx,
7898 gen_rtx_PLUS (SImode,
7899 stack_pointer_rtx,
7900 GEN_INT (-4 * num_regs)));
7901 RTX_FRAME_RELATED_P (tmp) = 1;
7902 XVECEXP (dwarf, 0, 0) = tmp;
7904 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7905 REG_NOTES (par));
7906 return par;
7909 static rtx
7910 emit_sfm (base_reg, count)
7911 int base_reg;
7912 int count;
7914 rtx par;
7915 rtx dwarf;
7916 rtx tmp, reg;
7917 int i;
7919 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7920 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7921 RTX_FRAME_RELATED_P (dwarf) = 1;
7923 reg = gen_rtx_REG (XFmode, base_reg++);
7925 XVECEXP (par, 0, 0)
7926 = gen_rtx_SET (VOIDmode,
7927 gen_rtx_MEM (BLKmode,
7928 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7929 gen_rtx_UNSPEC (BLKmode,
7930 gen_rtvec (1, reg),
7931 UNSPEC_PUSH_MULT));
7933 = gen_rtx_SET (VOIDmode,
7934 gen_rtx_MEM (XFmode,
7935 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7936 reg);
7937 RTX_FRAME_RELATED_P (tmp) = 1;
7938 XVECEXP (dwarf, 0, count - 1) = tmp;
7940 for (i = 1; i < count; i++)
7942 reg = gen_rtx_REG (XFmode, base_reg++);
7943 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7945 tmp = gen_rtx_SET (VOIDmode,
7946 gen_rtx_MEM (XFmode,
7947 gen_rtx_PRE_DEC (BLKmode,
7948 stack_pointer_rtx)),
7949 reg);
7950 RTX_FRAME_RELATED_P (tmp) = 1;
7951 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7954 par = emit_insn (par);
7955 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7956 REG_NOTES (par));
7957 return par;
7960 /* Compute the distance from register FROM to register TO.
7961 These can be the arg pointer (26), the soft frame pointer (25),
7962 the stack pointer (13) or the hard frame pointer (11).
7963 Typical stack layout looks like this:
7965 old stack pointer -> | |
7966 ----
7967 | | \
7968 | | saved arguments for
7969 | | vararg functions
7970 | | /
7972 hard FP & arg pointer -> | | \
7973 | | stack
7974 | | frame
7975 | | /
7977 | | \
7978 | | call saved
7979 | | registers
7980 soft frame pointer -> | | /
7982 | | \
7983 | | local
7984 | | variables
7985 | | /
7987 | | \
7988 | | outgoing
7989 | | arguments
7990 current stack pointer -> | | /
7993 For a given funciton some or all of these stack compomnents
7994 may not be needed, giving rise to the possibility of
7995 eliminating some of the registers.
7997 The values returned by this function must reflect the behaviour
7998 of arm_expand_prologue() and arm_compute_save_reg_mask().
8000 The sign of the number returned reflects the direction of stack
8001 growth, so the values are positive for all eliminations except
8002 from the soft frame pointer to the hard frame pointer. */
8004 unsigned int
8005 arm_compute_initial_elimination_offset (from, to)
8006 unsigned int from;
8007 unsigned int to;
8009 unsigned int local_vars = (get_frame_size () + 3) & ~3;
8010 unsigned int outgoing_args = current_function_outgoing_args_size;
8011 unsigned int stack_frame;
8012 unsigned int call_saved_registers;
8013 unsigned long func_type;
8015 func_type = arm_current_func_type ();
8017 /* Volatile functions never return, so there is
8018 no need to save call saved registers. */
8019 call_saved_registers = 0;
8020 if (! IS_VOLATILE (func_type))
8022 unsigned int reg_mask;
8023 unsigned int reg;
8025 /* Make sure that we compute which registers will be saved
8026 on the stack using the same algorithm that is used by
8027 arm_compute_save_reg_mask(). */
8028 reg_mask = arm_compute_save_reg0_reg12_mask ();
8030 /* Now count the number of bits set in save_reg_mask.
8031 For each set bit we need 4 bytes of stack space. */
8032 while (reg_mask)
8034 call_saved_registers += 4;
8035 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8038 if (regs_ever_live[LR_REGNUM]
8039 /* If a stack frame is going to be created, the LR will
8040 be saved as part of that, so we do not need to allow
8041 for it here. */
8042 && ! frame_pointer_needed)
8043 call_saved_registers += 4;
8045 /* If the hard floating point registers are going to be
8046 used then they must be saved on the stack as well.
8047 Each register occupies 12 bytes of stack space. */
8048 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8049 if (regs_ever_live[reg] && ! call_used_regs[reg])
8050 call_saved_registers += 12;
8053 /* The stack frame contains 4 registers - the old frame pointer,
8054 the old stack pointer, the return address and PC of the start
8055 of the function. */
8056 stack_frame = frame_pointer_needed ? 16 : 0;
8058 /* OK, now we have enough information to compute the distances.
8059 There must be an entry in these switch tables for each pair
8060 of registers in ELIMINABLE_REGS, even if some of the entries
8061 seem to be redundant or useless. */
8062 switch (from)
8064 case ARG_POINTER_REGNUM:
8065 switch (to)
8067 case THUMB_HARD_FRAME_POINTER_REGNUM:
8068 return 0;
8070 case FRAME_POINTER_REGNUM:
8071 /* This is the reverse of the soft frame pointer
8072 to hard frame pointer elimination below. */
8073 if (call_saved_registers == 0 && stack_frame == 0)
8074 return 0;
8075 return (call_saved_registers + stack_frame - 4);
8077 case ARM_HARD_FRAME_POINTER_REGNUM:
8078 /* If there is no stack frame then the hard
8079 frame pointer and the arg pointer coincide. */
8080 if (stack_frame == 0 && call_saved_registers != 0)
8081 return 0;
8082 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8083 return (frame_pointer_needed
8084 && current_function_needs_context
8085 && ! current_function_anonymous_args) ? 4 : 0;
8087 case STACK_POINTER_REGNUM:
8088 /* If nothing has been pushed on the stack at all
8089 then this will return -4. This *is* correct! */
8090 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8092 default:
8093 abort ();
8095 break;
8097 case FRAME_POINTER_REGNUM:
8098 switch (to)
8100 case THUMB_HARD_FRAME_POINTER_REGNUM:
8101 return 0;
8103 case ARM_HARD_FRAME_POINTER_REGNUM:
8104 /* The hard frame pointer points to the top entry in the
8105 stack frame. The soft frame pointer to the bottom entry
8106 in the stack frame. If there is no stack frame at all,
8107 then they are identical. */
8108 if (call_saved_registers == 0 && stack_frame == 0)
8109 return 0;
8110 return - (call_saved_registers + stack_frame - 4);
8112 case STACK_POINTER_REGNUM:
8113 return local_vars + outgoing_args;
8115 default:
8116 abort ();
8118 break;
8120 default:
8121 /* You cannot eliminate from the stack pointer.
8122 In theory you could eliminate from the hard frame
8123 pointer to the stack pointer, but this will never
8124 happen, since if a stack frame is not needed the
8125 hard frame pointer will never be used. */
8126 abort ();
8130 /* Generate the prologue instructions for entry into an ARM function. */
8132 void
8133 arm_expand_prologue ()
8135 int reg;
8136 rtx amount;
8137 rtx insn;
8138 rtx ip_rtx;
8139 unsigned long live_regs_mask;
8140 unsigned long func_type;
8141 int fp_offset = 0;
8142 int saved_pretend_args = 0;
8143 unsigned int args_to_push;
8145 func_type = arm_current_func_type ();
8147 /* Naked functions don't have prologues. */
8148 if (IS_NAKED (func_type))
8149 return;
8151 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8152 args_to_push = current_function_pretend_args_size;
8154 /* Compute which register we will have to save onto the stack. */
8155 live_regs_mask = arm_compute_save_reg_mask ();
8157 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8159 if (frame_pointer_needed)
8161 if (IS_INTERRUPT (func_type))
8163 /* Interrupt functions must not corrupt any registers.
8164 Creating a frame pointer however, corrupts the IP
8165 register, so we must push it first. */
8166 insn = emit_multi_reg_push (1 << IP_REGNUM);
8168 /* Do not set RTX_FRAME_RELATED_P on this insn.
8169 The dwarf stack unwinding code only wants to see one
8170 stack decrement per function, and this is not it. If
8171 this instruction is labeled as being part of the frame
8172 creation sequence then dwarf2out_frame_debug_expr will
8173 abort when it encounters the assignment of IP to FP
8174 later on, since the use of SP here establishes SP as
8175 the CFA register and not IP.
8177 Anyway this instruction is not really part of the stack
8178 frame creation although it is part of the prologue. */
8180 else if (IS_NESTED (func_type))
8182 /* The Static chain register is the same as the IP register
8183 used as a scratch register during stack frame creation.
8184 To get around this need to find somewhere to store IP
8185 whilst the frame is being created. We try the following
8186 places in order:
8188 1. The last argument register.
8189 2. A slot on the stack above the frame. (This only
8190 works if the function is not a varargs function).
8191 3. Register r3, after pushing the argument registers
8192 onto the stack.
8194 Note - we only need to tell the dwarf2 backend about the SP
8195 adjustment in the second variant; the static chain register
8196 doesn't need to be unwound, as it doesn't contain a value
8197 inherited from the caller. */
8199 if (regs_ever_live[3] == 0)
8201 insn = gen_rtx_REG (SImode, 3);
8202 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8203 insn = emit_insn (insn);
8205 else if (args_to_push == 0)
8207 rtx dwarf;
8208 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8209 insn = gen_rtx_MEM (SImode, insn);
8210 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8211 insn = emit_insn (insn);
8213 fp_offset = 4;
8215 /* Just tell the dwarf backend that we adjusted SP. */
8216 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8217 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8218 GEN_INT (-fp_offset)));
8219 RTX_FRAME_RELATED_P (insn) = 1;
8220 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8221 dwarf, REG_NOTES (insn));
8223 else
8225 /* Store the args on the stack. */
8226 if (current_function_anonymous_args)
8227 insn = emit_multi_reg_push
8228 ((0xf0 >> (args_to_push / 4)) & 0xf);
8229 else
8230 insn = emit_insn
8231 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8232 GEN_INT (- args_to_push)));
8234 RTX_FRAME_RELATED_P (insn) = 1;
8236 saved_pretend_args = 1;
8237 fp_offset = args_to_push;
8238 args_to_push = 0;
8240 /* Now reuse r3 to preserve IP. */
8241 insn = gen_rtx_REG (SImode, 3);
8242 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8243 (void) emit_insn (insn);
8247 if (fp_offset)
8249 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8250 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8252 else
8253 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8255 insn = emit_insn (insn);
8256 RTX_FRAME_RELATED_P (insn) = 1;
8259 if (args_to_push)
8261 /* Push the argument registers, or reserve space for them. */
8262 if (current_function_anonymous_args)
8263 insn = emit_multi_reg_push
8264 ((0xf0 >> (args_to_push / 4)) & 0xf);
8265 else
8266 insn = emit_insn
8267 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8268 GEN_INT (- args_to_push)));
8269 RTX_FRAME_RELATED_P (insn) = 1;
8272 if (live_regs_mask)
8274 insn = emit_multi_reg_push (live_regs_mask);
8275 RTX_FRAME_RELATED_P (insn) = 1;
8278 if (! IS_VOLATILE (func_type))
8280 /* Save any floating point call-saved registers used by this function. */
8281 if (arm_fpu_arch == FP_SOFT2)
8283 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8284 if (regs_ever_live[reg] && !call_used_regs[reg])
8286 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8287 insn = gen_rtx_MEM (XFmode, insn);
8288 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8289 gen_rtx_REG (XFmode, reg)));
8290 RTX_FRAME_RELATED_P (insn) = 1;
8293 else
8295 int start_reg = LAST_ARM_FP_REGNUM;
8297 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8299 if (regs_ever_live[reg] && !call_used_regs[reg])
8301 if (start_reg - reg == 3)
8303 insn = emit_sfm (reg, 4);
8304 RTX_FRAME_RELATED_P (insn) = 1;
8305 start_reg = reg - 1;
8308 else
8310 if (start_reg != reg)
8312 insn = emit_sfm (reg + 1, start_reg - reg);
8313 RTX_FRAME_RELATED_P (insn) = 1;
8315 start_reg = reg - 1;
8319 if (start_reg != reg)
8321 insn = emit_sfm (reg + 1, start_reg - reg);
8322 RTX_FRAME_RELATED_P (insn) = 1;
8327 if (frame_pointer_needed)
8329 /* Create the new frame pointer. */
8330 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8331 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8332 RTX_FRAME_RELATED_P (insn) = 1;
8334 if (IS_NESTED (func_type))
8336 /* Recover the static chain register. */
8337 if (regs_ever_live [3] == 0
8338 || saved_pretend_args)
8339 insn = gen_rtx_REG (SImode, 3);
8340 else /* if (current_function_pretend_args_size == 0) */
8342 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8343 insn = gen_rtx_MEM (SImode, insn);
8346 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8347 insn = emit_insn (insn);
8348 /* Add a reg note to stop propogate_one_insn() from barfing. */
8349 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, ip_rtx,
8350 REG_NOTES (insn));
8354 amount = GEN_INT (-(get_frame_size ()
8355 + current_function_outgoing_args_size));
8357 if (amount != const0_rtx)
8359 /* This add can produce multiple insns for a large constant, so we
8360 need to get tricky. */
8361 rtx last = get_last_insn ();
8362 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8363 amount));
8366 last = last ? NEXT_INSN (last) : get_insns ();
8367 RTX_FRAME_RELATED_P (last) = 1;
8369 while (last != insn);
8371 /* If the frame pointer is needed, emit a special barrier that
8372 will prevent the scheduler from moving stores to the frame
8373 before the stack adjustment. */
8374 if (frame_pointer_needed)
8376 rtx unspec = gen_rtx_UNSPEC (SImode,
8377 gen_rtvec (2, stack_pointer_rtx,
8378 hard_frame_pointer_rtx),
8379 UNSPEC_PRLG_STK);
8381 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
8382 gen_rtx_MEM (BLKmode, unspec)));
8386 /* If we are profiling, make sure no instructions are scheduled before
8387 the call to mcount. Similarly if the user has requested no
8388 scheduling in the prolog. */
8389 if (current_function_profile || TARGET_NO_SCHED_PRO)
8390 emit_insn (gen_blockage ());
8392 /* If the link register is being kept alive, with the return address in it,
8393 then make sure that it does not get reused by the ce2 pass. */
8394 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8396 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
8397 cfun->machine->lr_save_eliminated = 1;
8401 /* If CODE is 'd', then the X is a condition operand and the instruction
8402 should only be executed if the condition is true.
8403 if CODE is 'D', then the X is a condition operand and the instruction
8404 should only be executed if the condition is false: however, if the mode
8405 of the comparison is CCFPEmode, then always execute the instruction -- we
8406 do this because in these circumstances !GE does not necessarily imply LT;
8407 in these cases the instruction pattern will take care to make sure that
8408 an instruction containing %d will follow, thereby undoing the effects of
8409 doing this instruction unconditionally.
8410 If CODE is 'N' then X is a floating point operand that must be negated
8411 before output.
8412 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8413 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8415 void
8416 arm_print_operand (stream, x, code)
8417 FILE * stream;
8418 rtx x;
8419 int code;
8421 switch (code)
8423 case '@':
8424 fputs (ASM_COMMENT_START, stream);
8425 return;
8427 case '_':
8428 fputs (user_label_prefix, stream);
8429 return;
8431 case '|':
8432 fputs (REGISTER_PREFIX, stream);
8433 return;
8435 case '?':
8436 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8438 if (TARGET_THUMB || current_insn_predicate != NULL)
8439 abort ();
8441 fputs (arm_condition_codes[arm_current_cc], stream);
8443 else if (current_insn_predicate)
8445 enum arm_cond_code code;
8447 if (TARGET_THUMB)
8448 abort ();
8450 code = get_arm_condition_code (current_insn_predicate);
8451 fputs (arm_condition_codes[code], stream);
8453 return;
8455 case 'N':
8457 REAL_VALUE_TYPE r;
8458 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8459 r = REAL_VALUE_NEGATE (r);
8460 fprintf (stream, "%s", fp_const_from_val (&r));
8462 return;
8464 case 'B':
8465 if (GET_CODE (x) == CONST_INT)
8467 HOST_WIDE_INT val;
8468 val = ARM_SIGN_EXTEND (~INTVAL (x));
8469 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8471 else
8473 putc ('~', stream);
8474 output_addr_const (stream, x);
8476 return;
8478 case 'i':
8479 fprintf (stream, "%s", arithmetic_instr (x, 1));
8480 return;
8482 case 'I':
8483 fprintf (stream, "%s", arithmetic_instr (x, 0));
8484 return;
8486 case 'S':
8488 HOST_WIDE_INT val;
8489 const char * shift = shift_op (x, &val);
8491 if (shift)
8493 fprintf (stream, ", %s ", shift_op (x, &val));
8494 if (val == -1)
8495 arm_print_operand (stream, XEXP (x, 1), 0);
8496 else
8498 fputc ('#', stream);
8499 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8503 return;
8505 /* An explanation of the 'Q', 'R' and 'H' register operands:
8507 In a pair of registers containing a DI or DF value the 'Q'
8508 operand returns the register number of the register containing
8509 the least signficant part of the value. The 'R' operand returns
8510 the register number of the register containing the most
8511 significant part of the value.
8513 The 'H' operand returns the higher of the two register numbers.
8514 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8515 same as the 'Q' operand, since the most signficant part of the
8516 value is held in the lower number register. The reverse is true
8517 on systems where WORDS_BIG_ENDIAN is false.
8519 The purpose of these operands is to distinguish between cases
8520 where the endian-ness of the values is important (for example
8521 when they are added together), and cases where the endian-ness
8522 is irrelevant, but the order of register operations is important.
8523 For example when loading a value from memory into a register
8524 pair, the endian-ness does not matter. Provided that the value
8525 from the lower memory address is put into the lower numbered
8526 register, and the value from the higher address is put into the
8527 higher numbered register, the load will work regardless of whether
8528 the value being loaded is big-wordian or little-wordian. The
8529 order of the two register loads can matter however, if the address
8530 of the memory location is actually held in one of the registers
8531 being overwritten by the load. */
8532 case 'Q':
8533 if (REGNO (x) > LAST_ARM_REGNUM)
8534 abort ();
8535 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8536 return;
8538 case 'R':
8539 if (REGNO (x) > LAST_ARM_REGNUM)
8540 abort ();
8541 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8542 return;
8544 case 'H':
8545 if (REGNO (x) > LAST_ARM_REGNUM)
8546 abort ();
8547 asm_fprintf (stream, "%r", REGNO (x) + 1);
8548 return;
8550 case 'm':
8551 asm_fprintf (stream, "%r",
8552 GET_CODE (XEXP (x, 0)) == REG
8553 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8554 return;
8556 case 'M':
8557 asm_fprintf (stream, "{%r-%r}",
8558 REGNO (x),
8559 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
8560 return;
8562 case 'd':
8563 if (!x)
8564 return;
8566 if (TARGET_ARM)
8567 fputs (arm_condition_codes[get_arm_condition_code (x)],
8568 stream);
8569 else
8570 fputs (thumb_condition_code (x, 0), stream);
8571 return;
8573 case 'D':
8574 if (!x)
8575 return;
8577 if (TARGET_ARM)
8578 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8579 (get_arm_condition_code (x))],
8580 stream);
8581 else
8582 fputs (thumb_condition_code (x, 1), stream);
8583 return;
8585 default:
8586 if (x == 0)
8587 abort ();
8589 if (GET_CODE (x) == REG)
8590 asm_fprintf (stream, "%r", REGNO (x));
8591 else if (GET_CODE (x) == MEM)
8593 output_memory_reference_mode = GET_MODE (x);
8594 output_address (XEXP (x, 0));
8596 else if (GET_CODE (x) == CONST_DOUBLE)
8597 fprintf (stream, "#%s", fp_immediate_constant (x));
8598 else if (GET_CODE (x) == NEG)
8599 abort (); /* This should never happen now. */
8600 else
8602 fputc ('#', stream);
8603 output_addr_const (stream, x);
8608 #ifndef AOF_ASSEMBLER
8609 /* Target hook for assembling integer objects. The ARM version needs to
8610 handle word-sized values specially. */
8612 static bool
8613 arm_assemble_integer (x, size, aligned_p)
8614 rtx x;
8615 unsigned int size;
8616 int aligned_p;
8618 if (size == UNITS_PER_WORD && aligned_p)
8620 fputs ("\t.word\t", asm_out_file);
8621 output_addr_const (asm_out_file, x);
8623 /* Mark symbols as position independent. We only do this in the
8624 .text segment, not in the .data segment. */
8625 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8626 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8628 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
8629 fputs ("(GOTOFF)", asm_out_file);
8630 else if (GET_CODE (x) == LABEL_REF)
8631 fputs ("(GOTOFF)", asm_out_file);
8632 else
8633 fputs ("(GOT)", asm_out_file);
8635 fputc ('\n', asm_out_file);
8636 return true;
8639 return default_assemble_integer (x, size, aligned_p);
8641 #endif
8643 /* A finite state machine takes care of noticing whether or not instructions
8644 can be conditionally executed, and thus decrease execution time and code
8645 size by deleting branch instructions. The fsm is controlled by
8646 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8648 /* The state of the fsm controlling condition codes are:
8649 0: normal, do nothing special
8650 1: make ASM_OUTPUT_OPCODE not output this instruction
8651 2: make ASM_OUTPUT_OPCODE not output this instruction
8652 3: make instructions conditional
8653 4: make instructions conditional
8655 State transitions (state->state by whom under condition):
8656 0 -> 1 final_prescan_insn if the `target' is a label
8657 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8658 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8659 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8660 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8661 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8662 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8663 (the target insn is arm_target_insn).
8665 If the jump clobbers the conditions then we use states 2 and 4.
8667 A similar thing can be done with conditional return insns.
8669 XXX In case the `target' is an unconditional branch, this conditionalising
8670 of the instructions always reduces code size, but not always execution
8671 time. But then, I want to reduce the code size to somewhere near what
8672 /bin/cc produces. */
8674 /* Returns the index of the ARM condition code string in
8675 `arm_condition_codes'. COMPARISON should be an rtx like
8676 `(eq (...) (...))'. */
8678 static enum arm_cond_code
8679 get_arm_condition_code (comparison)
8680 rtx comparison;
8682 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
8683 int code;
8684 enum rtx_code comp_code = GET_CODE (comparison);
8686 if (GET_MODE_CLASS (mode) != MODE_CC)
8687 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
8688 XEXP (comparison, 1));
8690 switch (mode)
8692 case CC_DNEmode: code = ARM_NE; goto dominance;
8693 case CC_DEQmode: code = ARM_EQ; goto dominance;
8694 case CC_DGEmode: code = ARM_GE; goto dominance;
8695 case CC_DGTmode: code = ARM_GT; goto dominance;
8696 case CC_DLEmode: code = ARM_LE; goto dominance;
8697 case CC_DLTmode: code = ARM_LT; goto dominance;
8698 case CC_DGEUmode: code = ARM_CS; goto dominance;
8699 case CC_DGTUmode: code = ARM_HI; goto dominance;
8700 case CC_DLEUmode: code = ARM_LS; goto dominance;
8701 case CC_DLTUmode: code = ARM_CC;
8703 dominance:
8704 if (comp_code != EQ && comp_code != NE)
8705 abort ();
8707 if (comp_code == EQ)
8708 return ARM_INVERSE_CONDITION_CODE (code);
8709 return code;
8711 case CC_NOOVmode:
8712 switch (comp_code)
8714 case NE: return ARM_NE;
8715 case EQ: return ARM_EQ;
8716 case GE: return ARM_PL;
8717 case LT: return ARM_MI;
8718 default: abort ();
8721 case CC_Zmode:
8722 switch (comp_code)
8724 case NE: return ARM_NE;
8725 case EQ: return ARM_EQ;
8726 default: abort ();
8729 case CCFPEmode:
8730 case CCFPmode:
8731 /* These encodings assume that AC=1 in the FPA system control
8732 byte. This allows us to handle all cases except UNEQ and
8733 LTGT. */
8734 switch (comp_code)
8736 case GE: return ARM_GE;
8737 case GT: return ARM_GT;
8738 case LE: return ARM_LS;
8739 case LT: return ARM_MI;
8740 case NE: return ARM_NE;
8741 case EQ: return ARM_EQ;
8742 case ORDERED: return ARM_VC;
8743 case UNORDERED: return ARM_VS;
8744 case UNLT: return ARM_LT;
8745 case UNLE: return ARM_LE;
8746 case UNGT: return ARM_HI;
8747 case UNGE: return ARM_PL;
8748 /* UNEQ and LTGT do not have a representation. */
8749 case UNEQ: /* Fall through. */
8750 case LTGT: /* Fall through. */
8751 default: abort ();
8754 case CC_SWPmode:
8755 switch (comp_code)
8757 case NE: return ARM_NE;
8758 case EQ: return ARM_EQ;
8759 case GE: return ARM_LE;
8760 case GT: return ARM_LT;
8761 case LE: return ARM_GE;
8762 case LT: return ARM_GT;
8763 case GEU: return ARM_LS;
8764 case GTU: return ARM_CC;
8765 case LEU: return ARM_CS;
8766 case LTU: return ARM_HI;
8767 default: abort ();
8770 case CC_Cmode:
8771 switch (comp_code)
8773 case LTU: return ARM_CS;
8774 case GEU: return ARM_CC;
8775 default: abort ();
8778 case CCmode:
8779 switch (comp_code)
8781 case NE: return ARM_NE;
8782 case EQ: return ARM_EQ;
8783 case GE: return ARM_GE;
8784 case GT: return ARM_GT;
8785 case LE: return ARM_LE;
8786 case LT: return ARM_LT;
8787 case GEU: return ARM_CS;
8788 case GTU: return ARM_HI;
8789 case LEU: return ARM_LS;
8790 case LTU: return ARM_CC;
8791 default: abort ();
8794 default: abort ();
8797 abort ();
8801 void
8802 arm_final_prescan_insn (insn)
8803 rtx insn;
8805 /* BODY will hold the body of INSN. */
8806 rtx body = PATTERN (insn);
8808 /* This will be 1 if trying to repeat the trick, and things need to be
8809 reversed if it appears to fail. */
8810 int reverse = 0;
8812 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8813 taken are clobbered, even if the rtl suggests otherwise. It also
8814 means that we have to grub around within the jump expression to find
8815 out what the conditions are when the jump isn't taken. */
8816 int jump_clobbers = 0;
8818 /* If we start with a return insn, we only succeed if we find another one. */
8819 int seeking_return = 0;
8821 /* START_INSN will hold the insn from where we start looking. This is the
8822 first insn after the following code_label if REVERSE is true. */
8823 rtx start_insn = insn;
8825 /* If in state 4, check if the target branch is reached, in order to
8826 change back to state 0. */
8827 if (arm_ccfsm_state == 4)
8829 if (insn == arm_target_insn)
8831 arm_target_insn = NULL;
8832 arm_ccfsm_state = 0;
8834 return;
8837 /* If in state 3, it is possible to repeat the trick, if this insn is an
8838 unconditional branch to a label, and immediately following this branch
8839 is the previous target label which is only used once, and the label this
8840 branch jumps to is not too far off. */
8841 if (arm_ccfsm_state == 3)
8843 if (simplejump_p (insn))
8845 start_insn = next_nonnote_insn (start_insn);
8846 if (GET_CODE (start_insn) == BARRIER)
8848 /* XXX Isn't this always a barrier? */
8849 start_insn = next_nonnote_insn (start_insn);
8851 if (GET_CODE (start_insn) == CODE_LABEL
8852 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8853 && LABEL_NUSES (start_insn) == 1)
8854 reverse = TRUE;
8855 else
8856 return;
8858 else if (GET_CODE (body) == RETURN)
8860 start_insn = next_nonnote_insn (start_insn);
8861 if (GET_CODE (start_insn) == BARRIER)
8862 start_insn = next_nonnote_insn (start_insn);
8863 if (GET_CODE (start_insn) == CODE_LABEL
8864 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8865 && LABEL_NUSES (start_insn) == 1)
8867 reverse = TRUE;
8868 seeking_return = 1;
8870 else
8871 return;
8873 else
8874 return;
8877 if (arm_ccfsm_state != 0 && !reverse)
8878 abort ();
8879 if (GET_CODE (insn) != JUMP_INSN)
8880 return;
8882 /* This jump might be paralleled with a clobber of the condition codes
8883 the jump should always come first */
8884 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8885 body = XVECEXP (body, 0, 0);
8887 #if 0
8888 /* If this is a conditional return then we don't want to know */
8889 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8890 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8891 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8892 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8893 return;
8894 #endif
8896 if (reverse
8897 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8898 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8900 int insns_skipped;
8901 int fail = FALSE, succeed = FALSE;
8902 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8903 int then_not_else = TRUE;
8904 rtx this_insn = start_insn, label = 0;
8906 /* If the jump cannot be done with one instruction, we cannot
8907 conditionally execute the instruction in the inverse case. */
8908 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8910 jump_clobbers = 1;
8911 return;
8914 /* Register the insn jumped to. */
8915 if (reverse)
8917 if (!seeking_return)
8918 label = XEXP (SET_SRC (body), 0);
8920 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8921 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8922 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8924 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8925 then_not_else = FALSE;
8927 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8928 seeking_return = 1;
8929 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8931 seeking_return = 1;
8932 then_not_else = FALSE;
8934 else
8935 abort ();
8937 /* See how many insns this branch skips, and what kind of insns. If all
8938 insns are okay, and the label or unconditional branch to the same
8939 label is not too far away, succeed. */
8940 for (insns_skipped = 0;
8941 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8943 rtx scanbody;
8945 this_insn = next_nonnote_insn (this_insn);
8946 if (!this_insn)
8947 break;
8949 switch (GET_CODE (this_insn))
8951 case CODE_LABEL:
8952 /* Succeed if it is the target label, otherwise fail since
8953 control falls in from somewhere else. */
8954 if (this_insn == label)
8956 if (jump_clobbers)
8958 arm_ccfsm_state = 2;
8959 this_insn = next_nonnote_insn (this_insn);
8961 else
8962 arm_ccfsm_state = 1;
8963 succeed = TRUE;
8965 else
8966 fail = TRUE;
8967 break;
8969 case BARRIER:
8970 /* Succeed if the following insn is the target label.
8971 Otherwise fail.
8972 If return insns are used then the last insn in a function
8973 will be a barrier. */
8974 this_insn = next_nonnote_insn (this_insn);
8975 if (this_insn && this_insn == label)
8977 if (jump_clobbers)
8979 arm_ccfsm_state = 2;
8980 this_insn = next_nonnote_insn (this_insn);
8982 else
8983 arm_ccfsm_state = 1;
8984 succeed = TRUE;
8986 else
8987 fail = TRUE;
8988 break;
8990 case CALL_INSN:
8991 /* If using 32-bit addresses the cc is not preserved over
8992 calls. */
8993 if (TARGET_APCS_32)
8995 /* Succeed if the following insn is the target label,
8996 or if the following two insns are a barrier and
8997 the target label. */
8998 this_insn = next_nonnote_insn (this_insn);
8999 if (this_insn && GET_CODE (this_insn) == BARRIER)
9000 this_insn = next_nonnote_insn (this_insn);
9002 if (this_insn && this_insn == label
9003 && insns_skipped < max_insns_skipped)
9005 if (jump_clobbers)
9007 arm_ccfsm_state = 2;
9008 this_insn = next_nonnote_insn (this_insn);
9010 else
9011 arm_ccfsm_state = 1;
9012 succeed = TRUE;
9014 else
9015 fail = TRUE;
9017 break;
9019 case JUMP_INSN:
9020 /* If this is an unconditional branch to the same label, succeed.
9021 If it is to another label, do nothing. If it is conditional,
9022 fail. */
9023 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9025 scanbody = PATTERN (this_insn);
9026 if (GET_CODE (scanbody) == SET
9027 && GET_CODE (SET_DEST (scanbody)) == PC)
9029 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9030 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9032 arm_ccfsm_state = 2;
9033 succeed = TRUE;
9035 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9036 fail = TRUE;
9038 /* Fail if a conditional return is undesirable (eg on a
9039 StrongARM), but still allow this if optimizing for size. */
9040 else if (GET_CODE (scanbody) == RETURN
9041 && !use_return_insn (TRUE)
9042 && !optimize_size)
9043 fail = TRUE;
9044 else if (GET_CODE (scanbody) == RETURN
9045 && seeking_return)
9047 arm_ccfsm_state = 2;
9048 succeed = TRUE;
9050 else if (GET_CODE (scanbody) == PARALLEL)
9052 switch (get_attr_conds (this_insn))
9054 case CONDS_NOCOND:
9055 break;
9056 default:
9057 fail = TRUE;
9058 break;
9061 else
9062 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9064 break;
9066 case INSN:
9067 /* Instructions using or affecting the condition codes make it
9068 fail. */
9069 scanbody = PATTERN (this_insn);
9070 if (!(GET_CODE (scanbody) == SET
9071 || GET_CODE (scanbody) == PARALLEL)
9072 || get_attr_conds (this_insn) != CONDS_NOCOND)
9073 fail = TRUE;
9074 break;
9076 default:
9077 break;
9080 if (succeed)
9082 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9083 arm_target_label = CODE_LABEL_NUMBER (label);
9084 else if (seeking_return || arm_ccfsm_state == 2)
9086 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9088 this_insn = next_nonnote_insn (this_insn);
9089 if (this_insn && (GET_CODE (this_insn) == BARRIER
9090 || GET_CODE (this_insn) == CODE_LABEL))
9091 abort ();
9093 if (!this_insn)
9095 /* Oh, dear! we ran off the end.. give up */
9096 recog (PATTERN (insn), insn, NULL);
9097 arm_ccfsm_state = 0;
9098 arm_target_insn = NULL;
9099 return;
9101 arm_target_insn = this_insn;
9103 else
9104 abort ();
9105 if (jump_clobbers)
9107 if (reverse)
9108 abort ();
9109 arm_current_cc =
9110 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9111 0), 0), 1));
9112 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9113 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9114 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9115 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9117 else
9119 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9120 what it was. */
9121 if (!reverse)
9122 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9123 0));
9126 if (reverse || then_not_else)
9127 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9130 /* Restore recog_data (getting the attributes of other insns can
9131 destroy this array, but final.c assumes that it remains intact
9132 across this call; since the insn has been recognized already we
9133 call recog direct). */
9134 recog (PATTERN (insn), insn, NULL);
9139 arm_regno_class (regno)
9140 int regno;
9142 if (TARGET_THUMB)
9144 if (regno == STACK_POINTER_REGNUM)
9145 return STACK_REG;
9146 if (regno == CC_REGNUM)
9147 return CC_REG;
9148 if (regno < 8)
9149 return LO_REGS;
9150 return HI_REGS;
9153 if ( regno <= LAST_ARM_REGNUM
9154 || regno == FRAME_POINTER_REGNUM
9155 || regno == ARG_POINTER_REGNUM)
9156 return GENERAL_REGS;
9158 if (regno == CC_REGNUM)
9159 return NO_REGS;
9161 return FPU_REGS;
9164 /* Handle a special case when computing the offset
9165 of an argument from the frame pointer. */
9168 arm_debugger_arg_offset (value, addr)
9169 int value;
9170 rtx addr;
9172 rtx insn;
9174 /* We are only interested if dbxout_parms() failed to compute the offset. */
9175 if (value != 0)
9176 return 0;
9178 /* We can only cope with the case where the address is held in a register. */
9179 if (GET_CODE (addr) != REG)
9180 return 0;
9182 /* If we are using the frame pointer to point at the argument, then
9183 an offset of 0 is correct. */
9184 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9185 return 0;
9187 /* If we are using the stack pointer to point at the
9188 argument, then an offset of 0 is correct. */
9189 if ((TARGET_THUMB || !frame_pointer_needed)
9190 && REGNO (addr) == SP_REGNUM)
9191 return 0;
9193 /* Oh dear. The argument is pointed to by a register rather
9194 than being held in a register, or being stored at a known
9195 offset from the frame pointer. Since GDB only understands
9196 those two kinds of argument we must translate the address
9197 held in the register into an offset from the frame pointer.
9198 We do this by searching through the insns for the function
9199 looking to see where this register gets its value. If the
9200 register is initialised from the frame pointer plus an offset
9201 then we are in luck and we can continue, otherwise we give up.
9203 This code is exercised by producing debugging information
9204 for a function with arguments like this:
9206 double func (double a, double b, int c, double d) {return d;}
9208 Without this code the stab for parameter 'd' will be set to
9209 an offset of 0 from the frame pointer, rather than 8. */
9211 /* The if() statement says:
9213 If the insn is a normal instruction
9214 and if the insn is setting the value in a register
9215 and if the register being set is the register holding the address of the argument
9216 and if the address is computing by an addition
9217 that involves adding to a register
9218 which is the frame pointer
9219 a constant integer
9221 then... */
9223 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9225 if ( GET_CODE (insn) == INSN
9226 && GET_CODE (PATTERN (insn)) == SET
9227 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9228 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9229 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9230 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9231 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9234 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9236 break;
9240 if (value == 0)
9242 debug_rtx (addr);
9243 warning ("unable to compute real location of stacked parameter");
9244 value = 8; /* XXX magic hack */
9247 return value;
9250 #define def_builtin(NAME, TYPE, CODE) \
9251 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9253 void
9254 arm_init_builtins ()
9256 tree endlink = void_list_node;
9257 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9258 tree pchar_type_node = build_pointer_type (char_type_node);
9260 tree int_ftype_int, void_ftype_pchar;
9262 /* void func (void *) */
9263 void_ftype_pchar
9264 = build_function_type (void_type_node,
9265 tree_cons (NULL_TREE, pchar_type_node, endlink));
9267 /* int func (int) */
9268 int_ftype_int
9269 = build_function_type (integer_type_node, int_endlink);
9271 /* Initialize arm V5 builtins. */
9272 if (arm_arch5)
9273 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9276 /* Expand an expression EXP that calls a built-in function,
9277 with result going to TARGET if that's convenient
9278 (and in mode MODE if that's convenient).
9279 SUBTARGET may be used as the target for computing one of EXP's operands.
9280 IGNORE is nonzero if the value is to be ignored. */
9283 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9284 tree exp;
9285 rtx target;
9286 rtx subtarget ATTRIBUTE_UNUSED;
9287 enum machine_mode mode ATTRIBUTE_UNUSED;
9288 int ignore ATTRIBUTE_UNUSED;
9290 enum insn_code icode;
9291 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9292 tree arglist = TREE_OPERAND (exp, 1);
9293 tree arg0;
9294 rtx op0, pat;
9295 enum machine_mode tmode, mode0;
9296 int fcode = DECL_FUNCTION_CODE (fndecl);
9298 switch (fcode)
9300 default:
9301 break;
9303 case ARM_BUILTIN_CLZ:
9304 icode = CODE_FOR_clz;
9305 arg0 = TREE_VALUE (arglist);
9306 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9307 tmode = insn_data[icode].operand[0].mode;
9308 mode0 = insn_data[icode].operand[1].mode;
9310 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9311 op0 = copy_to_mode_reg (mode0, op0);
9312 if (target == 0
9313 || GET_MODE (target) != tmode
9314 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9315 target = gen_reg_rtx (tmode);
9316 pat = GEN_FCN (icode) (target, op0);
9317 if (! pat)
9318 return 0;
9319 emit_insn (pat);
9320 return target;
9323 /* @@@ Should really do something sensible here. */
9324 return NULL_RTX;
9327 /* Recursively search through all of the blocks in a function
9328 checking to see if any of the variables created in that
9329 function match the RTX called 'orig'. If they do then
9330 replace them with the RTX called 'new'. */
9332 static void
9333 replace_symbols_in_block (block, orig, new)
9334 tree block;
9335 rtx orig;
9336 rtx new;
9338 for (; block; block = BLOCK_CHAIN (block))
9340 tree sym;
9342 if (!TREE_USED (block))
9343 continue;
9345 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9347 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9348 || DECL_IGNORED_P (sym)
9349 || TREE_CODE (sym) != VAR_DECL
9350 || DECL_EXTERNAL (sym)
9351 || !rtx_equal_p (DECL_RTL (sym), orig)
9353 continue;
9355 SET_DECL_RTL (sym, new);
9358 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9362 /* Return the number (counting from 0) of
9363 the least significant set bit in MASK. */
9365 #ifdef __GNUC__
9366 inline
9367 #endif
9368 static int
9369 number_of_first_bit_set (mask)
9370 int mask;
9372 int bit;
9374 for (bit = 0;
9375 (mask & (1 << bit)) == 0;
9376 ++bit)
9377 continue;
9379 return bit;
9382 /* Generate code to return from a thumb function.
9383 If 'reg_containing_return_addr' is -1, then the return address is
9384 actually on the stack, at the stack pointer. */
9385 static void
9386 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9387 FILE * f;
9388 int reg_containing_return_addr;
9389 rtx eh_ofs;
9391 unsigned regs_available_for_popping;
9392 unsigned regs_to_pop;
9393 int pops_needed;
9394 unsigned available;
9395 unsigned required;
9396 int mode;
9397 int size;
9398 int restore_a4 = FALSE;
9400 /* Compute the registers we need to pop. */
9401 regs_to_pop = 0;
9402 pops_needed = 0;
9404 /* There is an assumption here, that if eh_ofs is not NULL, the
9405 normal return address will have been pushed. */
9406 if (reg_containing_return_addr == -1 || eh_ofs)
9408 /* When we are generating a return for __builtin_eh_return,
9409 reg_containing_return_addr must specify the return regno. */
9410 if (eh_ofs && reg_containing_return_addr == -1)
9411 abort ();
9413 regs_to_pop |= 1 << LR_REGNUM;
9414 ++pops_needed;
9417 if (TARGET_BACKTRACE)
9419 /* Restore the (ARM) frame pointer and stack pointer. */
9420 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9421 pops_needed += 2;
9424 /* If there is nothing to pop then just emit the BX instruction and
9425 return. */
9426 if (pops_needed == 0)
9428 if (eh_ofs)
9429 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9431 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9432 return;
9434 /* Otherwise if we are not supporting interworking and we have not created
9435 a backtrace structure and the function was not entered in ARM mode then
9436 just pop the return address straight into the PC. */
9437 else if (!TARGET_INTERWORK
9438 && !TARGET_BACKTRACE
9439 && !is_called_in_ARM_mode (current_function_decl))
9441 if (eh_ofs)
9443 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9444 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9445 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9447 else
9448 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9450 return;
9453 /* Find out how many of the (return) argument registers we can corrupt. */
9454 regs_available_for_popping = 0;
9456 /* If returning via __builtin_eh_return, the bottom three registers
9457 all contain information needed for the return. */
9458 if (eh_ofs)
9459 size = 12;
9460 else
9462 #ifdef RTX_CODE
9463 /* If we can deduce the registers used from the function's
9464 return value. This is more reliable that examining
9465 regs_ever_live[] because that will be set if the register is
9466 ever used in the function, not just if the register is used
9467 to hold a return value. */
9469 if (current_function_return_rtx != 0)
9470 mode = GET_MODE (current_function_return_rtx);
9471 else
9472 #endif
9473 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9475 size = GET_MODE_SIZE (mode);
9477 if (size == 0)
9479 /* In a void function we can use any argument register.
9480 In a function that returns a structure on the stack
9481 we can use the second and third argument registers. */
9482 if (mode == VOIDmode)
9483 regs_available_for_popping =
9484 (1 << ARG_REGISTER (1))
9485 | (1 << ARG_REGISTER (2))
9486 | (1 << ARG_REGISTER (3));
9487 else
9488 regs_available_for_popping =
9489 (1 << ARG_REGISTER (2))
9490 | (1 << ARG_REGISTER (3));
9492 else if (size <= 4)
9493 regs_available_for_popping =
9494 (1 << ARG_REGISTER (2))
9495 | (1 << ARG_REGISTER (3));
9496 else if (size <= 8)
9497 regs_available_for_popping =
9498 (1 << ARG_REGISTER (3));
9501 /* Match registers to be popped with registers into which we pop them. */
9502 for (available = regs_available_for_popping,
9503 required = regs_to_pop;
9504 required != 0 && available != 0;
9505 available &= ~(available & - available),
9506 required &= ~(required & - required))
9507 -- pops_needed;
9509 /* If we have any popping registers left over, remove them. */
9510 if (available > 0)
9511 regs_available_for_popping &= ~available;
9513 /* Otherwise if we need another popping register we can use
9514 the fourth argument register. */
9515 else if (pops_needed)
9517 /* If we have not found any free argument registers and
9518 reg a4 contains the return address, we must move it. */
9519 if (regs_available_for_popping == 0
9520 && reg_containing_return_addr == LAST_ARG_REGNUM)
9522 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9523 reg_containing_return_addr = LR_REGNUM;
9525 else if (size > 12)
9527 /* Register a4 is being used to hold part of the return value,
9528 but we have dire need of a free, low register. */
9529 restore_a4 = TRUE;
9531 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9534 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9536 /* The fourth argument register is available. */
9537 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9539 --pops_needed;
9543 /* Pop as many registers as we can. */
9544 thumb_pushpop (f, regs_available_for_popping, FALSE);
9546 /* Process the registers we popped. */
9547 if (reg_containing_return_addr == -1)
9549 /* The return address was popped into the lowest numbered register. */
9550 regs_to_pop &= ~(1 << LR_REGNUM);
9552 reg_containing_return_addr =
9553 number_of_first_bit_set (regs_available_for_popping);
9555 /* Remove this register for the mask of available registers, so that
9556 the return address will not be corrupted by futher pops. */
9557 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9560 /* If we popped other registers then handle them here. */
9561 if (regs_available_for_popping)
9563 int frame_pointer;
9565 /* Work out which register currently contains the frame pointer. */
9566 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9568 /* Move it into the correct place. */
9569 asm_fprintf (f, "\tmov\t%r, %r\n",
9570 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9572 /* (Temporarily) remove it from the mask of popped registers. */
9573 regs_available_for_popping &= ~(1 << frame_pointer);
9574 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9576 if (regs_available_for_popping)
9578 int stack_pointer;
9580 /* We popped the stack pointer as well,
9581 find the register that contains it. */
9582 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9584 /* Move it into the stack register. */
9585 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9587 /* At this point we have popped all necessary registers, so
9588 do not worry about restoring regs_available_for_popping
9589 to its correct value:
9591 assert (pops_needed == 0)
9592 assert (regs_available_for_popping == (1 << frame_pointer))
9593 assert (regs_to_pop == (1 << STACK_POINTER)) */
9595 else
9597 /* Since we have just move the popped value into the frame
9598 pointer, the popping register is available for reuse, and
9599 we know that we still have the stack pointer left to pop. */
9600 regs_available_for_popping |= (1 << frame_pointer);
9604 /* If we still have registers left on the stack, but we no longer have
9605 any registers into which we can pop them, then we must move the return
9606 address into the link register and make available the register that
9607 contained it. */
9608 if (regs_available_for_popping == 0 && pops_needed > 0)
9610 regs_available_for_popping |= 1 << reg_containing_return_addr;
9612 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9613 reg_containing_return_addr);
9615 reg_containing_return_addr = LR_REGNUM;
9618 /* If we have registers left on the stack then pop some more.
9619 We know that at most we will want to pop FP and SP. */
9620 if (pops_needed > 0)
9622 int popped_into;
9623 int move_to;
9625 thumb_pushpop (f, regs_available_for_popping, FALSE);
9627 /* We have popped either FP or SP.
9628 Move whichever one it is into the correct register. */
9629 popped_into = number_of_first_bit_set (regs_available_for_popping);
9630 move_to = number_of_first_bit_set (regs_to_pop);
9632 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9634 regs_to_pop &= ~(1 << move_to);
9636 --pops_needed;
9639 /* If we still have not popped everything then we must have only
9640 had one register available to us and we are now popping the SP. */
9641 if (pops_needed > 0)
9643 int popped_into;
9645 thumb_pushpop (f, regs_available_for_popping, FALSE);
9647 popped_into = number_of_first_bit_set (regs_available_for_popping);
9649 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9651 assert (regs_to_pop == (1 << STACK_POINTER))
9652 assert (pops_needed == 1)
9656 /* If necessary restore the a4 register. */
9657 if (restore_a4)
9659 if (reg_containing_return_addr != LR_REGNUM)
9661 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9662 reg_containing_return_addr = LR_REGNUM;
9665 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9668 if (eh_ofs)
9669 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9671 /* Return to caller. */
9672 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9675 /* Emit code to push or pop registers to or from the stack. */
9677 static void
9678 thumb_pushpop (f, mask, push)
9679 FILE * f;
9680 int mask;
9681 int push;
9683 int regno;
9684 int lo_mask = mask & 0xFF;
9686 if (lo_mask == 0 && !push && (mask & (1 << 15)))
9688 /* Special case. Do not generate a POP PC statement here, do it in
9689 thumb_exit() */
9690 thumb_exit (f, -1, NULL_RTX);
9691 return;
9694 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9696 /* Look at the low registers first. */
9697 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
9699 if (lo_mask & 1)
9701 asm_fprintf (f, "%r", regno);
9703 if ((lo_mask & ~1) != 0)
9704 fprintf (f, ", ");
9708 if (push && (mask & (1 << LR_REGNUM)))
9710 /* Catch pushing the LR. */
9711 if (mask & 0xFF)
9712 fprintf (f, ", ");
9714 asm_fprintf (f, "%r", LR_REGNUM);
9716 else if (!push && (mask & (1 << PC_REGNUM)))
9718 /* Catch popping the PC. */
9719 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9721 /* The PC is never poped directly, instead
9722 it is popped into r3 and then BX is used. */
9723 fprintf (f, "}\n");
9725 thumb_exit (f, -1, NULL_RTX);
9727 return;
9729 else
9731 if (mask & 0xFF)
9732 fprintf (f, ", ");
9734 asm_fprintf (f, "%r", PC_REGNUM);
9738 fprintf (f, "}\n");
9741 void
9742 thumb_final_prescan_insn (insn)
9743 rtx insn;
9745 if (flag_print_asm_name)
9746 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9747 INSN_ADDRESSES (INSN_UID (insn)));
9751 thumb_shiftable_const (val)
9752 unsigned HOST_WIDE_INT val;
9754 unsigned HOST_WIDE_INT mask = 0xff;
9755 int i;
9757 if (val == 0) /* XXX */
9758 return 0;
9760 for (i = 0; i < 25; i++)
9761 if ((val & (mask << i)) == val)
9762 return 1;
9764 return 0;
9767 /* Returns non-zero if the current function contains,
9768 or might contain a far jump. */
9771 thumb_far_jump_used_p (int in_prologue)
9773 rtx insn;
9775 /* This test is only important for leaf functions. */
9776 /* assert (!leaf_function_p ()); */
9778 /* If we have already decided that far jumps may be used,
9779 do not bother checking again, and always return true even if
9780 it turns out that they are not being used. Once we have made
9781 the decision that far jumps are present (and that hence the link
9782 register will be pushed onto the stack) we cannot go back on it. */
9783 if (cfun->machine->far_jump_used)
9784 return 1;
9786 /* If this function is not being called from the prologue/epilogue
9787 generation code then it must be being called from the
9788 INITIAL_ELIMINATION_OFFSET macro. */
9789 if (!in_prologue)
9791 /* In this case we know that we are being asked about the elimination
9792 of the arg pointer register. If that register is not being used,
9793 then there are no arguments on the stack, and we do not have to
9794 worry that a far jump might force the prologue to push the link
9795 register, changing the stack offsets. In this case we can just
9796 return false, since the presence of far jumps in the function will
9797 not affect stack offsets.
9799 If the arg pointer is live (or if it was live, but has now been
9800 eliminated and so set to dead) then we do have to test to see if
9801 the function might contain a far jump. This test can lead to some
9802 false negatives, since before reload is completed, then length of
9803 branch instructions is not known, so gcc defaults to returning their
9804 longest length, which in turn sets the far jump attribute to true.
9806 A false negative will not result in bad code being generated, but it
9807 will result in a needless push and pop of the link register. We
9808 hope that this does not occur too often. */
9809 if (regs_ever_live [ARG_POINTER_REGNUM])
9810 cfun->machine->arg_pointer_live = 1;
9811 else if (!cfun->machine->arg_pointer_live)
9812 return 0;
9815 /* Check to see if the function contains a branch
9816 insn with the far jump attribute set. */
9817 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9819 if (GET_CODE (insn) == JUMP_INSN
9820 /* Ignore tablejump patterns. */
9821 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9822 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9823 && get_attr_far_jump (insn) == FAR_JUMP_YES
9826 /* Record the fact that we have decied that
9827 the function does use far jumps. */
9828 cfun->machine->far_jump_used = 1;
9829 return 1;
9833 return 0;
9836 /* Return non-zero if FUNC must be entered in ARM mode. */
9839 is_called_in_ARM_mode (func)
9840 tree func;
9842 if (TREE_CODE (func) != FUNCTION_DECL)
9843 abort ();
9845 /* Ignore the problem about functions whoes address is taken. */
9846 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9847 return TRUE;
9849 #ifdef ARM_PE
9850 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
9851 #else
9852 return FALSE;
9853 #endif
9856 /* The bits which aren't usefully expanded as rtl. */
9858 const char *
9859 thumb_unexpanded_epilogue ()
9861 int regno;
9862 int live_regs_mask = 0;
9863 int high_regs_pushed = 0;
9864 int leaf_function = leaf_function_p ();
9865 int had_to_push_lr;
9866 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9868 if (return_used_this_function)
9869 return "";
9871 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9872 if (regs_ever_live[regno] && !call_used_regs[regno]
9873 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9874 live_regs_mask |= 1 << regno;
9876 for (regno = 8; regno < 13; regno++)
9878 if (regs_ever_live[regno] && !call_used_regs[regno]
9879 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9880 high_regs_pushed++;
9883 /* The prolog may have pushed some high registers to use as
9884 work registers. eg the testuite file:
9885 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9886 compiles to produce:
9887 push {r4, r5, r6, r7, lr}
9888 mov r7, r9
9889 mov r6, r8
9890 push {r6, r7}
9891 as part of the prolog. We have to undo that pushing here. */
9893 if (high_regs_pushed)
9895 int mask = live_regs_mask;
9896 int next_hi_reg;
9897 int size;
9898 int mode;
9900 #ifdef RTX_CODE
9901 /* If we can deduce the registers used from the function's return value.
9902 This is more reliable that examining regs_ever_live[] because that
9903 will be set if the register is ever used in the function, not just if
9904 the register is used to hold a return value. */
9906 if (current_function_return_rtx != 0)
9907 mode = GET_MODE (current_function_return_rtx);
9908 else
9909 #endif
9910 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9912 size = GET_MODE_SIZE (mode);
9914 /* Unless we are returning a type of size > 12 register r3 is
9915 available. */
9916 if (size < 13)
9917 mask |= 1 << 3;
9919 if (mask == 0)
9920 /* Oh dear! We have no low registers into which we can pop
9921 high registers! */
9922 internal_error
9923 ("no low registers available for popping high registers");
9925 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9926 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9927 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9928 break;
9930 while (high_regs_pushed)
9932 /* Find lo register(s) into which the high register(s) can
9933 be popped. */
9934 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9936 if (mask & (1 << regno))
9937 high_regs_pushed--;
9938 if (high_regs_pushed == 0)
9939 break;
9942 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9944 /* Pop the values into the low register(s). */
9945 thumb_pushpop (asm_out_file, mask, 0);
9947 /* Move the value(s) into the high registers. */
9948 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9950 if (mask & (1 << regno))
9952 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9953 regno);
9955 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
9956 if (regs_ever_live[next_hi_reg]
9957 && !call_used_regs[next_hi_reg]
9958 && !(TARGET_SINGLE_PIC_BASE
9959 && (next_hi_reg == arm_pic_register)))
9960 break;
9966 had_to_push_lr = (live_regs_mask || !leaf_function
9967 || thumb_far_jump_used_p (1));
9969 if (TARGET_BACKTRACE
9970 && ((live_regs_mask & 0xFF) == 0)
9971 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9973 /* The stack backtrace structure creation code had to
9974 push R7 in order to get a work register, so we pop
9975 it now. */
9976 live_regs_mask |= (1 << LAST_LO_REGNUM);
9979 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9981 if (had_to_push_lr
9982 && !is_called_in_ARM_mode (current_function_decl)
9983 && !eh_ofs)
9984 live_regs_mask |= 1 << PC_REGNUM;
9986 /* Either no argument registers were pushed or a backtrace
9987 structure was created which includes an adjusted stack
9988 pointer, so just pop everything. */
9989 if (live_regs_mask)
9990 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9992 if (eh_ofs)
9993 thumb_exit (asm_out_file, 2, eh_ofs);
9994 /* We have either just popped the return address into the
9995 PC or it is was kept in LR for the entire function or
9996 it is still on the stack because we do not want to
9997 return by doing a pop {pc}. */
9998 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9999 thumb_exit (asm_out_file,
10000 (had_to_push_lr
10001 && is_called_in_ARM_mode (current_function_decl)) ?
10002 -1 : LR_REGNUM, NULL_RTX);
10004 else
10006 /* Pop everything but the return address. */
10007 live_regs_mask &= ~(1 << PC_REGNUM);
10009 if (live_regs_mask)
10010 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10012 if (had_to_push_lr)
10013 /* Get the return address into a temporary register. */
10014 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10016 /* Remove the argument registers that were pushed onto the stack. */
10017 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10018 SP_REGNUM, SP_REGNUM,
10019 current_function_pretend_args_size);
10021 if (eh_ofs)
10022 thumb_exit (asm_out_file, 2, eh_ofs);
10023 else
10024 thumb_exit (asm_out_file,
10025 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10028 return "";
10031 /* Functions to save and restore machine-specific function data. */
10033 static void
10034 arm_mark_machine_status (p)
10035 struct function * p;
10037 machine_function *machine = p->machine;
10039 if (machine)
10040 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
10043 static void
10044 arm_init_machine_status (p)
10045 struct function * p;
10047 p->machine =
10048 (machine_function *) xcalloc (1, sizeof (machine_function));
10050 #if ARM_FT_UNKNOWWN != 0
10051 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
10052 #endif
10055 static void
10056 arm_free_machine_status (p)
10057 struct function * p;
10059 if (p->machine)
10061 free (p->machine);
10062 p->machine = NULL;
10066 /* Return an RTX indicating where the return address to the
10067 calling function can be found. */
10070 arm_return_addr (count, frame)
10071 int count;
10072 rtx frame ATTRIBUTE_UNUSED;
10074 if (count != 0)
10075 return NULL_RTX;
10077 if (TARGET_APCS_32)
10078 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10079 else
10081 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10082 GEN_INT (RETURN_ADDR_MASK26));
10083 return get_func_hard_reg_initial_val (cfun, lr);
10087 /* Do anything needed before RTL is emitted for each function. */
10089 void
10090 arm_init_expanders ()
10092 /* Arrange to initialize and mark the machine per-function status. */
10093 init_machine_status = arm_init_machine_status;
10094 mark_machine_status = arm_mark_machine_status;
10095 free_machine_status = arm_free_machine_status;
10098 /* Generate the rest of a function's prologue. */
10100 void
10101 thumb_expand_prologue ()
10103 HOST_WIDE_INT amount = (get_frame_size ()
10104 + current_function_outgoing_args_size);
10105 unsigned long func_type;
10107 func_type = arm_current_func_type ();
10109 /* Naked functions don't have prologues. */
10110 if (IS_NAKED (func_type))
10111 return;
10113 if (IS_INTERRUPT (func_type))
10115 error ("interrupt Service Routines cannot be coded in Thumb mode");
10116 return;
10119 if (frame_pointer_needed)
10120 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10122 if (amount)
10124 amount = ROUND_UP (amount);
10126 if (amount < 512)
10127 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10128 GEN_INT (- amount)));
10129 else
10131 int regno;
10132 rtx reg;
10134 /* The stack decrement is too big for an immediate value in a single
10135 insn. In theory we could issue multiple subtracts, but after
10136 three of them it becomes more space efficient to place the full
10137 value in the constant pool and load into a register. (Also the
10138 ARM debugger really likes to see only one stack decrement per
10139 function). So instead we look for a scratch register into which
10140 we can load the decrement, and then we subtract this from the
10141 stack pointer. Unfortunately on the thumb the only available
10142 scratch registers are the argument registers, and we cannot use
10143 these as they may hold arguments to the function. Instead we
10144 attempt to locate a call preserved register which is used by this
10145 function. If we can find one, then we know that it will have
10146 been pushed at the start of the prologue and so we can corrupt
10147 it now. */
10148 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10149 if (regs_ever_live[regno]
10150 && !call_used_regs[regno] /* Paranoia */
10151 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
10152 && !(frame_pointer_needed
10153 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10154 break;
10156 if (regno > LAST_LO_REGNUM) /* Very unlikely */
10158 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10159 rtx insn;
10161 /* Choose an arbitary, non-argument low register. */
10162 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10164 /* Save it by copying it into a high, scratch register. */
10165 insn = emit_insn (gen_movsi (spare, reg));
10166 /* Add a reg note to stop propogate_one_insn() from barfing. */
10167 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, spare,
10168 REG_NOTES (insn));
10170 /* Decrement the stack. */
10171 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10172 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10173 reg));
10175 /* Restore the low register's original value. */
10176 emit_insn (gen_movsi (reg, spare));
10178 /* Emit a USE of the restored scratch register, so that flow
10179 analysis will not consider the restore redundant. The
10180 register won't be used again in this function and isn't
10181 restored by the epilogue. */
10182 emit_insn (gen_rtx_USE (VOIDmode, reg));
10184 else
10186 reg = gen_rtx (REG, SImode, regno);
10188 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10189 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10190 reg));
10195 if (current_function_profile || TARGET_NO_SCHED_PRO)
10196 emit_insn (gen_blockage ());
10199 void
10200 thumb_expand_epilogue ()
10202 HOST_WIDE_INT amount = (get_frame_size ()
10203 + current_function_outgoing_args_size);
10205 /* Naked functions don't have prologues. */
10206 if (IS_NAKED (arm_current_func_type ()))
10207 return;
10209 if (frame_pointer_needed)
10210 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10211 else if (amount)
10213 amount = ROUND_UP (amount);
10215 if (amount < 512)
10216 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10217 GEN_INT (amount)));
10218 else
10220 /* r3 is always free in the epilogue. */
10221 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10223 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10224 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10228 /* Emit a USE (stack_pointer_rtx), so that
10229 the stack adjustment will not be deleted. */
10230 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
10232 if (current_function_profile || TARGET_NO_SCHED_PRO)
10233 emit_insn (gen_blockage ());
10236 static void
10237 thumb_output_function_prologue (f, size)
10238 FILE * f;
10239 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10241 int live_regs_mask = 0;
10242 int high_regs_pushed = 0;
10243 int regno;
10245 if (IS_NAKED (arm_current_func_type ()))
10246 return;
10248 if (is_called_in_ARM_mode (current_function_decl))
10250 const char * name;
10252 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10253 abort ();
10254 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10255 abort ();
10256 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10258 /* Generate code sequence to switch us into Thumb mode. */
10259 /* The .code 32 directive has already been emitted by
10260 ASM_DECLARE_FUNCTION_NAME. */
10261 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10262 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10264 /* Generate a label, so that the debugger will notice the
10265 change in instruction sets. This label is also used by
10266 the assembler to bypass the ARM code when this function
10267 is called from a Thumb encoded function elsewhere in the
10268 same file. Hence the definition of STUB_NAME here must
10269 agree with the definition in gas/config/tc-arm.c */
10271 #define STUB_NAME ".real_start_of"
10273 asm_fprintf (f, "\t.code\t16\n");
10274 #ifdef ARM_PE
10275 if (arm_dllexport_name_p (name))
10276 name = arm_strip_name_encoding (name);
10277 #endif
10278 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10279 asm_fprintf (f, "\t.thumb_func\n");
10280 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10283 if (current_function_pretend_args_size)
10285 if (current_function_anonymous_args)
10287 int num_pushes;
10289 asm_fprintf (f, "\tpush\t{");
10291 num_pushes = NUM_INTS (current_function_pretend_args_size);
10293 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10294 regno <= LAST_ARG_REGNUM;
10295 regno++)
10296 asm_fprintf (f, "%r%s", regno,
10297 regno == LAST_ARG_REGNUM ? "" : ", ");
10299 asm_fprintf (f, "}\n");
10301 else
10302 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10303 SP_REGNUM, SP_REGNUM,
10304 current_function_pretend_args_size);
10307 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10308 if (regs_ever_live[regno] && !call_used_regs[regno]
10309 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10310 live_regs_mask |= 1 << regno;
10312 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10313 live_regs_mask |= 1 << LR_REGNUM;
10315 if (TARGET_BACKTRACE)
10317 int offset;
10318 int work_register = 0;
10319 int wr;
10321 /* We have been asked to create a stack backtrace structure.
10322 The code looks like this:
10324 0 .align 2
10325 0 func:
10326 0 sub SP, #16 Reserve space for 4 registers.
10327 2 push {R7} Get a work register.
10328 4 add R7, SP, #20 Get the stack pointer before the push.
10329 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10330 8 mov R7, PC Get hold of the start of this code plus 12.
10331 10 str R7, [SP, #16] Store it.
10332 12 mov R7, FP Get hold of the current frame pointer.
10333 14 str R7, [SP, #4] Store it.
10334 16 mov R7, LR Get hold of the current return address.
10335 18 str R7, [SP, #12] Store it.
10336 20 add R7, SP, #16 Point at the start of the backtrace structure.
10337 22 mov FP, R7 Put this value into the frame pointer. */
10339 if ((live_regs_mask & 0xFF) == 0)
10341 /* See if the a4 register is free. */
10343 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10344 work_register = LAST_ARG_REGNUM;
10345 else /* We must push a register of our own */
10346 live_regs_mask |= (1 << LAST_LO_REGNUM);
10349 if (work_register == 0)
10351 /* Select a register from the list that will be pushed to
10352 use as our work register. */
10353 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10354 if ((1 << work_register) & live_regs_mask)
10355 break;
10358 asm_fprintf
10359 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10360 SP_REGNUM, SP_REGNUM);
10362 if (live_regs_mask)
10363 thumb_pushpop (f, live_regs_mask, 1);
10365 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10366 if (wr & live_regs_mask)
10367 offset += 4;
10369 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10370 offset + 16 + current_function_pretend_args_size);
10372 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10373 offset + 4);
10375 /* Make sure that the instruction fetching the PC is in the right place
10376 to calculate "start of backtrace creation code + 12". */
10377 if (live_regs_mask)
10379 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10380 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10381 offset + 12);
10382 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10383 ARM_HARD_FRAME_POINTER_REGNUM);
10384 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10385 offset);
10387 else
10389 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10390 ARM_HARD_FRAME_POINTER_REGNUM);
10391 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10392 offset);
10393 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10394 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10395 offset + 12);
10398 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10399 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10400 offset + 8);
10401 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10402 offset + 12);
10403 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10404 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10406 else if (live_regs_mask)
10407 thumb_pushpop (f, live_regs_mask, 1);
10409 for (regno = 8; regno < 13; regno++)
10411 if (regs_ever_live[regno] && !call_used_regs[regno]
10412 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10413 high_regs_pushed++;
10416 if (high_regs_pushed)
10418 int pushable_regs = 0;
10419 int mask = live_regs_mask & 0xff;
10420 int next_hi_reg;
10422 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10424 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
10425 && !(TARGET_SINGLE_PIC_BASE
10426 && (next_hi_reg == arm_pic_register)))
10427 break;
10430 pushable_regs = mask;
10432 if (pushable_regs == 0)
10434 /* Desperation time -- this probably will never happen. */
10435 if (regs_ever_live[LAST_ARG_REGNUM]
10436 || !call_used_regs[LAST_ARG_REGNUM])
10437 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10438 mask = 1 << LAST_ARG_REGNUM;
10441 while (high_regs_pushed > 0)
10443 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10445 if (mask & (1 << regno))
10447 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10449 high_regs_pushed--;
10451 if (high_regs_pushed)
10452 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10453 next_hi_reg--)
10455 if (regs_ever_live[next_hi_reg]
10456 && !call_used_regs[next_hi_reg]
10457 && !(TARGET_SINGLE_PIC_BASE
10458 && (next_hi_reg == arm_pic_register)))
10459 break;
10461 else
10463 mask &= ~((1 << regno) - 1);
10464 break;
10469 thumb_pushpop (f, mask, 1);
10472 if (pushable_regs == 0
10473 && (regs_ever_live[LAST_ARG_REGNUM]
10474 || !call_used_regs[LAST_ARG_REGNUM]))
10475 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10479 /* Handle the case of a double word load into a low register from
10480 a computed memory address. The computed address may involve a
10481 register which is overwritten by the load. */
10483 const char *
10484 thumb_load_double_from_address (operands)
10485 rtx *operands;
10487 rtx addr;
10488 rtx base;
10489 rtx offset;
10490 rtx arg1;
10491 rtx arg2;
10493 if (GET_CODE (operands[0]) != REG)
10494 abort ();
10496 if (GET_CODE (operands[1]) != MEM)
10497 abort ();
10499 /* Get the memory address. */
10500 addr = XEXP (operands[1], 0);
10502 /* Work out how the memory address is computed. */
10503 switch (GET_CODE (addr))
10505 case REG:
10506 operands[2] = gen_rtx (MEM, SImode,
10507 plus_constant (XEXP (operands[1], 0), 4));
10509 if (REGNO (operands[0]) == REGNO (addr))
10511 output_asm_insn ("ldr\t%H0, %2", operands);
10512 output_asm_insn ("ldr\t%0, %1", operands);
10514 else
10516 output_asm_insn ("ldr\t%0, %1", operands);
10517 output_asm_insn ("ldr\t%H0, %2", operands);
10519 break;
10521 case CONST:
10522 /* Compute <address> + 4 for the high order load. */
10523 operands[2] = gen_rtx (MEM, SImode,
10524 plus_constant (XEXP (operands[1], 0), 4));
10526 output_asm_insn ("ldr\t%0, %1", operands);
10527 output_asm_insn ("ldr\t%H0, %2", operands);
10528 break;
10530 case PLUS:
10531 arg1 = XEXP (addr, 0);
10532 arg2 = XEXP (addr, 1);
10534 if (CONSTANT_P (arg1))
10535 base = arg2, offset = arg1;
10536 else
10537 base = arg1, offset = arg2;
10539 if (GET_CODE (base) != REG)
10540 abort ();
10542 /* Catch the case of <address> = <reg> + <reg> */
10543 if (GET_CODE (offset) == REG)
10545 int reg_offset = REGNO (offset);
10546 int reg_base = REGNO (base);
10547 int reg_dest = REGNO (operands[0]);
10549 /* Add the base and offset registers together into the
10550 higher destination register. */
10551 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10552 reg_dest + 1, reg_base, reg_offset);
10554 /* Load the lower destination register from the address in
10555 the higher destination register. */
10556 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10557 reg_dest, reg_dest + 1);
10559 /* Load the higher destination register from its own address
10560 plus 4. */
10561 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10562 reg_dest + 1, reg_dest + 1);
10564 else
10566 /* Compute <address> + 4 for the high order load. */
10567 operands[2] = gen_rtx (MEM, SImode,
10568 plus_constant (XEXP (operands[1], 0), 4));
10570 /* If the computed address is held in the low order register
10571 then load the high order register first, otherwise always
10572 load the low order register first. */
10573 if (REGNO (operands[0]) == REGNO (base))
10575 output_asm_insn ("ldr\t%H0, %2", operands);
10576 output_asm_insn ("ldr\t%0, %1", operands);
10578 else
10580 output_asm_insn ("ldr\t%0, %1", operands);
10581 output_asm_insn ("ldr\t%H0, %2", operands);
10584 break;
10586 case LABEL_REF:
10587 /* With no registers to worry about we can just load the value
10588 directly. */
10589 operands[2] = gen_rtx (MEM, SImode,
10590 plus_constant (XEXP (operands[1], 0), 4));
10592 output_asm_insn ("ldr\t%H0, %2", operands);
10593 output_asm_insn ("ldr\t%0, %1", operands);
10594 break;
10596 default:
10597 abort ();
10598 break;
10601 return "";
10605 const char *
10606 thumb_output_move_mem_multiple (n, operands)
10607 int n;
10608 rtx * operands;
10610 rtx tmp;
10612 switch (n)
10614 case 2:
10615 if (REGNO (operands[4]) > REGNO (operands[5]))
10617 tmp = operands[4];
10618 operands[4] = operands[5];
10619 operands[5] = tmp;
10621 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10622 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
10623 break;
10625 case 3:
10626 if (REGNO (operands[4]) > REGNO (operands[5]))
10628 tmp = operands[4];
10629 operands[4] = operands[5];
10630 operands[5] = tmp;
10632 if (REGNO (operands[5]) > REGNO (operands[6]))
10634 tmp = operands[5];
10635 operands[5] = operands[6];
10636 operands[6] = tmp;
10638 if (REGNO (operands[4]) > REGNO (operands[5]))
10640 tmp = operands[4];
10641 operands[4] = operands[5];
10642 operands[5] = tmp;
10645 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10646 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
10647 break;
10649 default:
10650 abort ();
10653 return "";
10656 /* Routines for generating rtl. */
10658 void
10659 thumb_expand_movstrqi (operands)
10660 rtx * operands;
10662 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10663 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10664 HOST_WIDE_INT len = INTVAL (operands[2]);
10665 HOST_WIDE_INT offset = 0;
10667 while (len >= 12)
10669 emit_insn (gen_movmem12b (out, in, out, in));
10670 len -= 12;
10673 if (len >= 8)
10675 emit_insn (gen_movmem8b (out, in, out, in));
10676 len -= 8;
10679 if (len >= 4)
10681 rtx reg = gen_reg_rtx (SImode);
10682 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10683 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10684 len -= 4;
10685 offset += 4;
10688 if (len >= 2)
10690 rtx reg = gen_reg_rtx (HImode);
10691 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10692 plus_constant (in, offset))));
10693 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10694 reg));
10695 len -= 2;
10696 offset += 2;
10699 if (len)
10701 rtx reg = gen_reg_rtx (QImode);
10702 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10703 plus_constant (in, offset))));
10704 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10705 reg));
10710 thumb_cmp_operand (op, mode)
10711 rtx op;
10712 enum machine_mode mode;
10714 return ((GET_CODE (op) == CONST_INT
10715 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10716 || register_operand (op, mode));
10719 static const char *
10720 thumb_condition_code (x, invert)
10721 rtx x;
10722 int invert;
10724 static const char * const conds[] =
10726 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10727 "hi", "ls", "ge", "lt", "gt", "le"
10729 int val;
10731 switch (GET_CODE (x))
10733 case EQ: val = 0; break;
10734 case NE: val = 1; break;
10735 case GEU: val = 2; break;
10736 case LTU: val = 3; break;
10737 case GTU: val = 8; break;
10738 case LEU: val = 9; break;
10739 case GE: val = 10; break;
10740 case LT: val = 11; break;
10741 case GT: val = 12; break;
10742 case LE: val = 13; break;
10743 default:
10744 abort ();
10747 return conds[val ^ invert];
10750 /* Handle storing a half-word to memory during reload. */
10752 void
10753 thumb_reload_out_hi (operands)
10754 rtx * operands;
10756 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10759 /* Handle storing a half-word to memory during reload. */
10761 void
10762 thumb_reload_in_hi (operands)
10763 rtx * operands ATTRIBUTE_UNUSED;
10765 abort ();
10768 /* Return the length of a function name prefix
10769 that starts with the character 'c'. */
10771 static int
10772 arm_get_strip_length (char c)
10774 switch (c)
10776 ARM_NAME_ENCODING_LENGTHS
10777 default: return 0;
10781 /* Return a pointer to a function's name with any
10782 and all prefix encodings stripped from it. */
10784 const char *
10785 arm_strip_name_encoding (const char * name)
10787 int skip;
10789 while ((skip = arm_get_strip_length (* name)))
10790 name += skip;
10792 return name;
10795 #ifdef AOF_ASSEMBLER
10796 /* Special functions only needed when producing AOF syntax assembler. */
10798 rtx aof_pic_label = NULL_RTX;
10799 struct pic_chain
10801 struct pic_chain * next;
10802 const char * symname;
10805 static struct pic_chain * aof_pic_chain = NULL;
10808 aof_pic_entry (x)
10809 rtx x;
10811 struct pic_chain ** chainp;
10812 int offset;
10814 if (aof_pic_label == NULL_RTX)
10816 /* We mark this here and not in arm_add_gc_roots() to avoid
10817 polluting even more code with ifdefs, and because it never
10818 contains anything useful until we assign to it here. */
10819 ggc_add_rtx_root (&aof_pic_label, 1);
10820 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10823 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10824 offset += 4, chainp = &(*chainp)->next)
10825 if ((*chainp)->symname == XSTR (x, 0))
10826 return plus_constant (aof_pic_label, offset);
10828 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10829 (*chainp)->next = NULL;
10830 (*chainp)->symname = XSTR (x, 0);
10831 return plus_constant (aof_pic_label, offset);
10834 void
10835 aof_dump_pic_table (f)
10836 FILE * f;
10838 struct pic_chain * chain;
10840 if (aof_pic_chain == NULL)
10841 return;
10843 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10844 PIC_OFFSET_TABLE_REGNUM,
10845 PIC_OFFSET_TABLE_REGNUM);
10846 fputs ("|x$adcons|\n", f);
10848 for (chain = aof_pic_chain; chain; chain = chain->next)
10850 fputs ("\tDCD\t", f);
10851 assemble_name (f, chain->symname);
10852 fputs ("\n", f);
10856 int arm_text_section_count = 1;
10858 char *
10859 aof_text_section ()
10861 static char buf[100];
10862 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10863 arm_text_section_count++);
10864 if (flag_pic)
10865 strcat (buf, ", PIC, REENTRANT");
10866 return buf;
10869 static int arm_data_section_count = 1;
10871 char *
10872 aof_data_section ()
10874 static char buf[100];
10875 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10876 return buf;
10879 /* The AOF assembler is religiously strict about declarations of
10880 imported and exported symbols, so that it is impossible to declare
10881 a function as imported near the beginning of the file, and then to
10882 export it later on. It is, however, possible to delay the decision
10883 until all the functions in the file have been compiled. To get
10884 around this, we maintain a list of the imports and exports, and
10885 delete from it any that are subsequently defined. At the end of
10886 compilation we spit the remainder of the list out before the END
10887 directive. */
10889 struct import
10891 struct import * next;
10892 const char * name;
10895 static struct import * imports_list = NULL;
10897 void
10898 aof_add_import (name)
10899 const char * name;
10901 struct import * new;
10903 for (new = imports_list; new; new = new->next)
10904 if (new->name == name)
10905 return;
10907 new = (struct import *) xmalloc (sizeof (struct import));
10908 new->next = imports_list;
10909 imports_list = new;
10910 new->name = name;
10913 void
10914 aof_delete_import (name)
10915 const char * name;
10917 struct import ** old;
10919 for (old = &imports_list; *old; old = & (*old)->next)
10921 if ((*old)->name == name)
10923 *old = (*old)->next;
10924 return;
10929 int arm_main_function = 0;
10931 void
10932 aof_dump_imports (f)
10933 FILE * f;
10935 /* The AOF assembler needs this to cause the startup code to be extracted
10936 from the library. Brining in __main causes the whole thing to work
10937 automagically. */
10938 if (arm_main_function)
10940 text_section ();
10941 fputs ("\tIMPORT __main\n", f);
10942 fputs ("\tDCD __main\n", f);
10945 /* Now dump the remaining imports. */
10946 while (imports_list)
10948 fprintf (f, "\tIMPORT\t");
10949 assemble_name (f, imports_list->name);
10950 fputc ('\n', f);
10951 imports_list = imports_list->next;
10954 #endif /* AOF_ASSEMBLER */
10956 #ifdef OBJECT_FORMAT_ELF
10957 /* Switch to an arbitrary section NAME with attributes as specified
10958 by FLAGS. ALIGN specifies any known alignment requirements for
10959 the section; 0 if the default should be used.
10961 Differs from the default elf version only in the prefix character
10962 used before the section type. */
10964 static void
10965 arm_elf_asm_named_section (name, flags)
10966 const char *name;
10967 unsigned int flags;
10969 char flagchars[8], *f = flagchars;
10970 const char *type;
10972 if (!(flags & SECTION_DEBUG))
10973 *f++ = 'a';
10974 if (flags & SECTION_WRITE)
10975 *f++ = 'w';
10976 if (flags & SECTION_CODE)
10977 *f++ = 'x';
10978 if (flags & SECTION_SMALL)
10979 *f++ = 's';
10980 if (flags & SECTION_MERGE)
10981 *f++ = 'M';
10982 if (flags & SECTION_STRINGS)
10983 *f++ = 'S';
10984 *f = '\0';
10986 if (flags & SECTION_BSS)
10987 type = "nobits";
10988 else
10989 type = "progbits";
10991 if (flags & SECTION_ENTSIZE)
10992 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
10993 name, flagchars, type, flags & SECTION_ENTSIZE);
10994 else
10995 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
10996 name, flagchars, type);
10998 #endif