Move body of HARD_REGNO_MODE_OK into a function: arm_hard_regno_mode_ok
[official-gcc.git] / gcc / config / arm / arm.c
blob04f06ae997df902250fa2da9fb0e843b5ea4c06e
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "obstack.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "toplev.h"
43 #include "recog.h"
44 #include "ggc.h"
45 #include "except.h"
46 #include "c-pragma.h"
47 #include "integrate.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode;
54 typedef struct minipool_fixup Mfix;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots PARAMS ((void));
67 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
68 static Ulong bit_count PARAMS ((signed int));
69 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70 static int eliminate_lr2ip PARAMS ((rtx *));
71 static rtx emit_multi_reg_push PARAMS ((int));
72 static rtx emit_sfm PARAMS ((int, int));
73 #ifndef AOF_ASSEMBLER
74 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
75 #endif
76 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
77 static arm_cc get_arm_condition_code PARAMS ((rtx));
78 static void init_fpa_table PARAMS ((void));
79 static Hint int_log2 PARAMS ((Hint));
80 static rtx is_jump_table PARAMS ((rtx));
81 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
83 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
84 static Ccstar shift_op PARAMS ((rtx, Hint *));
85 static void arm_init_machine_status PARAMS ((struct function *));
86 static void arm_mark_machine_status PARAMS ((struct function *));
87 static void arm_free_machine_status PARAMS ((struct function *));
88 static int number_of_first_bit_set PARAMS ((int));
89 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
90 static void thumb_exit PARAMS ((FILE *, int, rtx));
91 static void thumb_pushpop PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code PARAMS ((rtx, int));
93 static rtx is_jump_table PARAMS ((rtx));
94 static Hint get_jump_table_size PARAMS ((rtx));
95 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
96 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
97 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
98 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
99 static void assign_minipool_offsets PARAMS ((Mfix *));
100 static void arm_print_value PARAMS ((FILE *, rtx));
101 static void dump_minipool PARAMS ((rtx));
102 static int arm_barrier_cost PARAMS ((rtx));
103 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
104 static void push_minipool_barrier PARAMS ((rtx, Hint));
105 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
106 static void note_invalid_constants PARAMS ((rtx, Hint));
107 static int current_file_function_operand PARAMS ((rtx));
108 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask PARAMS ((void));
110 static Ulong arm_isr_value PARAMS ((tree));
111 static Ulong arm_compute_func_type PARAMS ((void));
112 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
113 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
114 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
115 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
116 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
117 static int arm_comp_type_attributes PARAMS ((tree, tree));
118 static void arm_set_default_type_attributes PARAMS ((tree));
119 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
122 #endif
124 #undef Hint
125 #undef Mmode
126 #undef Ulong
127 #undef Ccstar
129 /* Initialize the GCC target structure. */
130 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
131 #undef TARGET_MERGE_DECL_ATTRIBUTES
132 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
133 #endif
135 #undef TARGET_ATTRIBUTE_TABLE
136 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
138 #ifdef AOF_ASSEMBLER
139 #undef TARGET_ASM_BYTE_OP
140 #define TARGET_ASM_BYTE_OP "\tDCB\t"
141 #undef TARGET_ASM_ALIGNED_HI_OP
142 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
143 #undef TARGET_ASM_ALIGNED_SI_OP
144 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
145 #else
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP NULL
148 #undef TARGET_ASM_INTEGER
149 #define TARGET_ASM_INTEGER arm_assemble_integer
150 #endif
152 #undef TARGET_ASM_FUNCTION_PROLOGUE
153 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155 #undef TARGET_ASM_FUNCTION_EPILOGUE
156 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158 #undef TARGET_COMP_TYPE_ATTRIBUTES
159 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
162 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164 #undef TARGET_INIT_BUILTINS
165 #define TARGET_INIT_BUILTINS arm_init_builtins
167 #undef TARGET_EXPAND_BUILTIN
168 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
170 #undef TARGET_SCHED_ADJUST_COST
171 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173 struct gcc_target targetm = TARGET_INITIALIZER;
175 /* Obstack for minipool constant handling. */
176 static struct obstack minipool_obstack;
177 static char * minipool_startobj;
179 #define obstack_chunk_alloc xmalloc
180 #define obstack_chunk_free free
182 /* The maximum number of insns skipped which
183 will be conditionalised if possible. */
184 static int max_insns_skipped = 5;
186 extern FILE * asm_out_file;
188 /* True if we are currently building a constant table. */
189 int making_const_table;
191 /* Define the information needed to generate branch insns. This is
192 stored from the compare operation. */
193 rtx arm_compare_op0, arm_compare_op1;
195 /* What type of floating point are we tuning for? */
196 enum floating_point_type arm_fpu;
198 /* What type of floating point instructions are available? */
199 enum floating_point_type arm_fpu_arch;
201 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
202 enum prog_mode_type arm_prgmode;
204 /* Set by the -mfp=... option. */
205 const char * target_fp_name = NULL;
207 /* Used to parse -mstructure_size_boundary command line option. */
208 const char * structure_size_string = NULL;
209 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
211 /* Bit values used to identify processor capabilities. */
212 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
213 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
214 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
215 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
216 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
217 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
218 #define FL_THUMB (1 << 6) /* Thumb aware */
219 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
220 #define FL_STRONG (1 << 8) /* StrongARM */
221 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
222 #define FL_XSCALE (1 << 10) /* XScale */
224 /* The bits in this mask specify which
225 instructions we are allowed to generate. */
226 static int insn_flags = 0;
228 /* The bits in this mask specify which instruction scheduling options should
229 be used. Note - there is an overlap with the FL_FAST_MULT. For some
230 hardware we want to be able to generate the multiply instructions, but to
231 tune as if they were not present in the architecture. */
232 static int tune_flags = 0;
234 /* The following are used in the arm.md file as equivalents to bits
235 in the above two flag variables. */
237 /* Nonzero if this is an "M" variant of the processor. */
238 int arm_fast_multiply = 0;
240 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
241 int arm_arch4 = 0;
243 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
244 int arm_arch5 = 0;
246 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
247 int arm_arch5e = 0;
249 /* Nonzero if this chip can benefit from load scheduling. */
250 int arm_ld_sched = 0;
252 /* Nonzero if this chip is a StrongARM. */
253 int arm_is_strong = 0;
255 /* Nonzero if this chip is an XScale. */
256 int arm_is_xscale = 0;
258 /* Nonzero if this chip is an ARM6 or an ARM7. */
259 int arm_is_6_or_7 = 0;
261 /* Nonzero if generating Thumb instructions. */
262 int thumb_code = 0;
264 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
265 must report the mode of the memory reference from PRINT_OPERAND to
266 PRINT_OPERAND_ADDRESS. */
267 enum machine_mode output_memory_reference_mode;
269 /* Nonzero if the prologue must setup `fp'. */
270 int current_function_anonymous_args;
272 /* The register number to be used for the PIC offset register. */
273 const char * arm_pic_register_string = NULL;
274 int arm_pic_register = 9;
276 /* Set to 1 when a return insn is output, this means that the epilogue
277 is not needed. */
278 int return_used_this_function;
280 /* Set to 1 after arm_reorg has started. Reset to start at the start of
281 the next function. */
282 static int after_arm_reorg = 0;
284 /* The maximum number of insns to be used when loading a constant. */
285 static int arm_constant_limit = 3;
287 /* For an explanation of these variables, see final_prescan_insn below. */
288 int arm_ccfsm_state;
289 enum arm_cond_code arm_current_cc;
290 rtx arm_target_insn;
291 int arm_target_label;
293 /* The condition codes of the ARM, and the inverse function. */
294 static const char * const arm_condition_codes[] =
296 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
297 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
300 #define streq(string1, string2) (strcmp (string1, string2) == 0)
302 /* Initialization code. */
304 struct processors
306 const char *const name;
307 const unsigned int flags;
310 /* Not all of these give usefully different compilation alternatives,
311 but there is no simple way of generalizing them. */
312 static const struct processors all_cores[] =
314 /* ARM Cores */
316 {"arm2", FL_CO_PROC | FL_MODE26 },
317 {"arm250", FL_CO_PROC | FL_MODE26 },
318 {"arm3", FL_CO_PROC | FL_MODE26 },
319 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
320 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
321 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
322 {"arm610", FL_MODE26 | FL_MODE32 },
323 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
324 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
325 /* arm7m doesn't exist on its own, but only with D, (and I), but
326 those don't alter the code, so arm7m is sometimes used. */
327 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
328 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
329 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
330 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
331 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
332 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
333 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
334 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
335 {"arm710", FL_MODE26 | FL_MODE32 },
336 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
337 {"arm720", FL_MODE26 | FL_MODE32 },
338 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
339 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
340 {"arm710c", FL_MODE26 | FL_MODE32 },
341 {"arm7100", FL_MODE26 | FL_MODE32 },
342 {"arm7500", FL_MODE26 | FL_MODE32 },
343 /* Doesn't have an external co-proc, but does have embedded fpu. */
344 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
345 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
346 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
347 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
348 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
349 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
350 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
351 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
352 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
353 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
354 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
355 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
356 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
357 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
358 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
359 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
360 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
362 {NULL, 0}
365 static const struct processors all_architectures[] =
367 /* ARM Architectures */
369 { "armv2", FL_CO_PROC | FL_MODE26 },
370 { "armv2a", FL_CO_PROC | FL_MODE26 },
371 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
372 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
373 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
374 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
375 implementations that support it, so we will leave it out for now. */
376 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
377 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
378 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
379 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
380 { NULL, 0 }
383 /* This is a magic stucture. The 'string' field is magically filled in
384 with a pointer to the value specified by the user on the command line
385 assuming that the user has specified such a value. */
387 struct arm_cpu_select arm_select[] =
389 /* string name processors */
390 { NULL, "-mcpu=", all_cores },
391 { NULL, "-march=", all_architectures },
392 { NULL, "-mtune=", all_cores }
395 /* Return the number of bits set in value' */
396 static unsigned long
397 bit_count (value)
398 signed int value;
400 unsigned long count = 0;
402 while (value)
404 value &= ~(value & -value);
405 ++count;
408 return count;
411 /* Fix up any incompatible options that the user has specified.
412 This has now turned into a maze. */
413 void
414 arm_override_options ()
416 unsigned i;
418 /* Set up the flags based on the cpu/architecture selected by the user. */
419 for (i = ARRAY_SIZE (arm_select); i--;)
421 struct arm_cpu_select * ptr = arm_select + i;
423 if (ptr->string != NULL && ptr->string[0] != '\0')
425 const struct processors * sel;
427 for (sel = ptr->processors; sel->name != NULL; sel++)
428 if (streq (ptr->string, sel->name))
430 if (i == 2)
431 tune_flags = sel->flags;
432 else
434 /* If we have been given an architecture and a processor
435 make sure that they are compatible. We only generate
436 a warning though, and we prefer the CPU over the
437 architecture. */
438 if (insn_flags != 0 && (insn_flags ^ sel->flags))
439 warning ("switch -mcpu=%s conflicts with -march= switch",
440 ptr->string);
442 insn_flags = sel->flags;
445 break;
448 if (sel->name == NULL)
449 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
453 /* If the user did not specify a processor, choose one for them. */
454 if (insn_flags == 0)
456 const struct processors * sel;
457 unsigned int sought;
458 static const struct cpu_default
460 const int cpu;
461 const char *const name;
463 cpu_defaults[] =
465 { TARGET_CPU_arm2, "arm2" },
466 { TARGET_CPU_arm6, "arm6" },
467 { TARGET_CPU_arm610, "arm610" },
468 { TARGET_CPU_arm710, "arm710" },
469 { TARGET_CPU_arm7m, "arm7m" },
470 { TARGET_CPU_arm7500fe, "arm7500fe" },
471 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
472 { TARGET_CPU_arm8, "arm8" },
473 { TARGET_CPU_arm810, "arm810" },
474 { TARGET_CPU_arm9, "arm9" },
475 { TARGET_CPU_strongarm, "strongarm" },
476 { TARGET_CPU_xscale, "xscale" },
477 { TARGET_CPU_generic, "arm" },
478 { 0, 0 }
480 const struct cpu_default * def;
482 /* Find the default. */
483 for (def = cpu_defaults; def->name; def++)
484 if (def->cpu == TARGET_CPU_DEFAULT)
485 break;
487 /* Make sure we found the default CPU. */
488 if (def->name == NULL)
489 abort ();
491 /* Find the default CPU's flags. */
492 for (sel = all_cores; sel->name != NULL; sel++)
493 if (streq (def->name, sel->name))
494 break;
496 if (sel->name == NULL)
497 abort ();
499 insn_flags = sel->flags;
501 /* Now check to see if the user has specified some command line
502 switch that require certain abilities from the cpu. */
503 sought = 0;
505 if (TARGET_INTERWORK || TARGET_THUMB)
507 sought |= (FL_THUMB | FL_MODE32);
509 /* Force apcs-32 to be used for interworking. */
510 target_flags |= ARM_FLAG_APCS_32;
512 /* There are no ARM processors that support both APCS-26 and
513 interworking. Therefore we force FL_MODE26 to be removed
514 from insn_flags here (if it was set), so that the search
515 below will always be able to find a compatible processor. */
516 insn_flags &= ~FL_MODE26;
518 else if (!TARGET_APCS_32)
519 sought |= FL_MODE26;
521 if (sought != 0 && ((sought & insn_flags) != sought))
523 /* Try to locate a CPU type that supports all of the abilities
524 of the default CPU, plus the extra abilities requested by
525 the user. */
526 for (sel = all_cores; sel->name != NULL; sel++)
527 if ((sel->flags & sought) == (sought | insn_flags))
528 break;
530 if (sel->name == NULL)
532 unsigned int current_bit_count = 0;
533 const struct processors * best_fit = NULL;
535 /* Ideally we would like to issue an error message here
536 saying that it was not possible to find a CPU compatible
537 with the default CPU, but which also supports the command
538 line options specified by the programmer, and so they
539 ought to use the -mcpu=<name> command line option to
540 override the default CPU type.
542 Unfortunately this does not work with multilibing. We
543 need to be able to support multilibs for -mapcs-26 and for
544 -mthumb-interwork and there is no CPU that can support both
545 options. Instead if we cannot find a cpu that has both the
546 characteristics of the default cpu and the given command line
547 options we scan the array again looking for a best match. */
548 for (sel = all_cores; sel->name != NULL; sel++)
549 if ((sel->flags & sought) == sought)
551 unsigned int count;
553 count = bit_count (sel->flags & insn_flags);
555 if (count >= current_bit_count)
557 best_fit = sel;
558 current_bit_count = count;
562 if (best_fit == NULL)
563 abort ();
564 else
565 sel = best_fit;
568 insn_flags = sel->flags;
572 /* If tuning has not been specified, tune for whichever processor or
573 architecture has been selected. */
574 if (tune_flags == 0)
575 tune_flags = insn_flags;
577 /* Make sure that the processor choice does not conflict with any of the
578 other command line choices. */
579 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
581 /* If APCS-32 was not the default then it must have been set by the
582 user, so issue a warning message. If the user has specified
583 "-mapcs-32 -mcpu=arm2" then we loose here. */
584 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
585 warning ("target CPU does not support APCS-32" );
586 target_flags &= ~ARM_FLAG_APCS_32;
588 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
590 warning ("target CPU does not support APCS-26" );
591 target_flags |= ARM_FLAG_APCS_32;
594 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
596 warning ("target CPU does not support interworking" );
597 target_flags &= ~ARM_FLAG_INTERWORK;
600 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
602 warning ("target CPU does not support THUMB instructions");
603 target_flags &= ~ARM_FLAG_THUMB;
606 if (TARGET_APCS_FRAME && TARGET_THUMB)
608 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
609 target_flags &= ~ARM_FLAG_APCS_FRAME;
612 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
613 from here where no function is being compiled currently. */
614 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
615 && TARGET_ARM)
616 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
618 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
619 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
621 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
622 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
624 /* If interworking is enabled then APCS-32 must be selected as well. */
625 if (TARGET_INTERWORK)
627 if (!TARGET_APCS_32)
628 warning ("interworking forces APCS-32 to be used" );
629 target_flags |= ARM_FLAG_APCS_32;
632 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
634 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
635 target_flags |= ARM_FLAG_APCS_FRAME;
638 if (TARGET_POKE_FUNCTION_NAME)
639 target_flags |= ARM_FLAG_APCS_FRAME;
641 if (TARGET_APCS_REENT && flag_pic)
642 error ("-fpic and -mapcs-reent are incompatible");
644 if (TARGET_APCS_REENT)
645 warning ("APCS reentrant code not supported. Ignored");
647 /* If this target is normally configured to use APCS frames, warn if they
648 are turned off and debugging is turned on. */
649 if (TARGET_ARM
650 && write_symbols != NO_DEBUG
651 && !TARGET_APCS_FRAME
652 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
653 warning ("-g with -mno-apcs-frame may not give sensible debugging");
655 /* If stack checking is disabled, we can use r10 as the PIC register,
656 which keeps r9 available. */
657 if (flag_pic && !TARGET_APCS_STACK)
658 arm_pic_register = 10;
660 if (TARGET_APCS_FLOAT)
661 warning ("passing floating point arguments in fp regs not yet supported");
663 /* Initialise boolean versions of the flags, for use in the arm.md file. */
664 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
665 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
666 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
667 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
668 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
670 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
671 arm_is_strong = (tune_flags & FL_STRONG) != 0;
672 thumb_code = (TARGET_ARM == 0);
673 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
674 && !(tune_flags & FL_ARCH4))) != 0;
676 /* Default value for floating point code... if no co-processor
677 bus, then schedule for emulated floating point. Otherwise,
678 assume the user has an FPA.
679 Note: this does not prevent use of floating point instructions,
680 -msoft-float does that. */
681 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
683 if (target_fp_name)
685 if (streq (target_fp_name, "2"))
686 arm_fpu_arch = FP_SOFT2;
687 else if (streq (target_fp_name, "3"))
688 arm_fpu_arch = FP_SOFT3;
689 else
690 error ("invalid floating point emulation option: -mfpe-%s",
691 target_fp_name);
693 else
694 arm_fpu_arch = FP_DEFAULT;
696 if (TARGET_FPE && arm_fpu != FP_HARD)
697 arm_fpu = FP_SOFT2;
699 /* For arm2/3 there is no need to do any scheduling if there is only
700 a floating point emulator, or we are doing software floating-point. */
701 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
702 && (tune_flags & FL_MODE32) == 0)
703 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
705 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
707 if (structure_size_string != NULL)
709 int size = strtol (structure_size_string, NULL, 0);
711 if (size == 8 || size == 32)
712 arm_structure_size_boundary = size;
713 else
714 warning ("structure size boundary can only be set to 8 or 32");
717 if (arm_pic_register_string != NULL)
719 int pic_register;
721 if (!flag_pic)
722 warning ("-mpic-register= is useless without -fpic");
724 pic_register = decode_reg_name (arm_pic_register_string);
726 /* Prevent the user from choosing an obviously stupid PIC register. */
727 if (pic_register < 0 || call_used_regs[pic_register]
728 || pic_register == HARD_FRAME_POINTER_REGNUM
729 || pic_register == STACK_POINTER_REGNUM
730 || pic_register >= PC_REGNUM)
731 error ("unable to use '%s' for PIC register", arm_pic_register_string);
732 else
733 arm_pic_register = pic_register;
736 if (TARGET_THUMB && flag_schedule_insns)
738 /* Don't warn since it's on by default in -O2. */
739 flag_schedule_insns = 0;
742 /* If optimizing for space, don't synthesize constants.
743 For processors with load scheduling, it never costs more than 2 cycles
744 to load a constant, and the load scheduler may well reduce that to 1. */
745 if (optimize_size || (tune_flags & FL_LDSCHED))
746 arm_constant_limit = 1;
748 if (arm_is_xscale)
749 arm_constant_limit = 2;
751 /* If optimizing for size, bump the number of instructions that we
752 are prepared to conditionally execute (even on a StrongARM).
753 Otherwise for the StrongARM, which has early execution of branches,
754 a sequence that is worth skipping is shorter. */
755 if (optimize_size)
756 max_insns_skipped = 6;
757 else if (arm_is_strong)
758 max_insns_skipped = 3;
760 /* Register global variables with the garbage collector. */
761 arm_add_gc_roots ();
764 static void
765 arm_add_gc_roots ()
767 ggc_add_rtx_root (&arm_compare_op0, 1);
768 ggc_add_rtx_root (&arm_compare_op1, 1);
769 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
771 gcc_obstack_init(&minipool_obstack);
772 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
775 /* A table of known ARM exception types.
776 For use with the interrupt function attribute. */
778 typedef struct
780 const char *const arg;
781 const unsigned long return_value;
783 isr_attribute_arg;
785 static const isr_attribute_arg isr_attribute_args [] =
787 { "IRQ", ARM_FT_ISR },
788 { "irq", ARM_FT_ISR },
789 { "FIQ", ARM_FT_FIQ },
790 { "fiq", ARM_FT_FIQ },
791 { "ABORT", ARM_FT_ISR },
792 { "abort", ARM_FT_ISR },
793 { "ABORT", ARM_FT_ISR },
794 { "abort", ARM_FT_ISR },
795 { "UNDEF", ARM_FT_EXCEPTION },
796 { "undef", ARM_FT_EXCEPTION },
797 { "SWI", ARM_FT_EXCEPTION },
798 { "swi", ARM_FT_EXCEPTION },
799 { NULL, ARM_FT_NORMAL }
802 /* Returns the (interrupt) function type of the current
803 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
805 static unsigned long
806 arm_isr_value (argument)
807 tree argument;
809 const isr_attribute_arg * ptr;
810 const char * arg;
812 /* No argument - default to IRQ. */
813 if (argument == NULL_TREE)
814 return ARM_FT_ISR;
816 /* Get the value of the argument. */
817 if (TREE_VALUE (argument) == NULL_TREE
818 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
819 return ARM_FT_UNKNOWN;
821 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
823 /* Check it against the list of known arguments. */
824 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
825 if (streq (arg, ptr->arg))
826 return ptr->return_value;
828 /* An unrecognised interrupt type. */
829 return ARM_FT_UNKNOWN;
832 /* Computes the type of the current function. */
834 static unsigned long
835 arm_compute_func_type ()
837 unsigned long type = ARM_FT_UNKNOWN;
838 tree a;
839 tree attr;
841 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
842 abort ();
844 /* Decide if the current function is volatile. Such functions
845 never return, and many memory cycles can be saved by not storing
846 register values that will never be needed again. This optimization
847 was added to speed up context switching in a kernel application. */
848 if (optimize > 0
849 && current_function_nothrow
850 && TREE_THIS_VOLATILE (current_function_decl))
851 type |= ARM_FT_VOLATILE;
853 if (current_function_needs_context)
854 type |= ARM_FT_NESTED;
856 attr = DECL_ATTRIBUTES (current_function_decl);
858 a = lookup_attribute ("naked", attr);
859 if (a != NULL_TREE)
860 type |= ARM_FT_NAKED;
862 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
863 type |= ARM_FT_EXCEPTION_HANDLER;
864 else
866 a = lookup_attribute ("isr", attr);
867 if (a == NULL_TREE)
868 a = lookup_attribute ("interrupt", attr);
870 if (a == NULL_TREE)
871 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
872 else
873 type |= arm_isr_value (TREE_VALUE (a));
876 return type;
879 /* Returns the type of the current function. */
881 unsigned long
882 arm_current_func_type ()
884 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
885 cfun->machine->func_type = arm_compute_func_type ();
887 return cfun->machine->func_type;
890 /* Return 1 if it is possible to return using a single instruction. */
893 use_return_insn (iscond)
894 int iscond;
896 int regno;
897 unsigned int func_type;
899 /* Never use a return instruction before reload has run. */
900 if (!reload_completed)
901 return 0;
903 func_type = arm_current_func_type ();
905 /* Naked functions, volatile functiond and interrupt
906 functions all need special consideration. */
907 if (func_type & (ARM_FT_INTERRUPT | ARM_FT_VOLATILE | ARM_FT_NAKED))
908 return 0;
910 /* As do variadic functions. */
911 if (current_function_pretend_args_size
912 || current_function_anonymous_args
913 /* Of if the function calls __builtin_eh_return () */
914 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
915 /* Or if there is no frame pointer and there is a stack adjustment. */
916 || ((get_frame_size () + current_function_outgoing_args_size != 0)
917 && !frame_pointer_needed))
918 return 0;
920 /* Can't be done if interworking with Thumb, and any registers have been
921 stacked. Similarly, on StrongARM, conditional returns are expensive
922 if they aren't taken and registers have been stacked. */
923 if (iscond && arm_is_strong && frame_pointer_needed)
924 return 0;
926 if ((iscond && arm_is_strong)
927 || TARGET_INTERWORK)
929 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
930 if (regs_ever_live[regno] && !call_used_regs[regno])
931 return 0;
933 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
934 return 0;
937 /* Can't be done if any of the FPU regs are pushed,
938 since this also requires an insn. */
939 if (TARGET_HARD_FLOAT)
940 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
941 if (regs_ever_live[regno] && !call_used_regs[regno])
942 return 0;
944 return 1;
947 /* Return TRUE if int I is a valid immediate ARM constant. */
950 const_ok_for_arm (i)
951 HOST_WIDE_INT i;
953 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
955 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
956 be all zero, or all one. */
957 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
958 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
959 != ((~(unsigned HOST_WIDE_INT) 0)
960 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
961 return FALSE;
963 /* Fast return for 0 and powers of 2 */
964 if ((i & (i - 1)) == 0)
965 return TRUE;
969 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
970 return TRUE;
971 mask =
972 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
973 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
975 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
977 return FALSE;
980 /* Return true if I is a valid constant for the operation CODE. */
981 static int
982 const_ok_for_op (i, code)
983 HOST_WIDE_INT i;
984 enum rtx_code code;
986 if (const_ok_for_arm (i))
987 return 1;
989 switch (code)
991 case PLUS:
992 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
994 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
995 case XOR:
996 case IOR:
997 return 0;
999 case AND:
1000 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1002 default:
1003 abort ();
1007 /* Emit a sequence of insns to handle a large constant.
1008 CODE is the code of the operation required, it can be any of SET, PLUS,
1009 IOR, AND, XOR, MINUS;
1010 MODE is the mode in which the operation is being performed;
1011 VAL is the integer to operate on;
1012 SOURCE is the other operand (a register, or a null-pointer for SET);
1013 SUBTARGETS means it is safe to create scratch registers if that will
1014 either produce a simpler sequence, or we will want to cse the values.
1015 Return value is the number of insns emitted. */
1018 arm_split_constant (code, mode, val, target, source, subtargets)
1019 enum rtx_code code;
1020 enum machine_mode mode;
1021 HOST_WIDE_INT val;
1022 rtx target;
1023 rtx source;
1024 int subtargets;
1026 if (subtargets || code == SET
1027 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1028 && REGNO (target) != REGNO (source)))
1030 /* After arm_reorg has been called, we can't fix up expensive
1031 constants by pushing them into memory so we must synthesise
1032 them in-line, regardless of the cost. This is only likely to
1033 be more costly on chips that have load delay slots and we are
1034 compiling without running the scheduler (so no splitting
1035 occurred before the final instruction emission).
1037 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1039 if (!after_arm_reorg
1040 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1041 > arm_constant_limit + (code != SET)))
1043 if (code == SET)
1045 /* Currently SET is the only monadic value for CODE, all
1046 the rest are diadic. */
1047 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1048 return 1;
1050 else
1052 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1054 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1055 /* For MINUS, the value is subtracted from, since we never
1056 have subtraction of a constant. */
1057 if (code == MINUS)
1058 emit_insn (gen_rtx_SET (VOIDmode, target,
1059 gen_rtx_MINUS (mode, temp, source)));
1060 else
1061 emit_insn (gen_rtx_SET (VOIDmode, target,
1062 gen_rtx (code, mode, source, temp)));
1063 return 2;
1068 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1071 static int
1072 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1074 HOST_WIDE_INT temp1;
1075 int num_insns = 0;
1078 int end;
1080 if (i <= 0)
1081 i += 32;
1082 if (remainder & (3 << (i - 2)))
1084 end = i - 8;
1085 if (end < 0)
1086 end += 32;
1087 temp1 = remainder & ((0x0ff << end)
1088 | ((i < end) ? (0xff >> (32 - end)) : 0));
1089 remainder &= ~temp1;
1090 num_insns++;
1091 i -= 6;
1093 i -= 2;
1094 } while (remainder);
1095 return num_insns;
1098 /* As above, but extra parameter GENERATE which, if clear, suppresses
1099 RTL generation. */
1101 static int
1102 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1103 enum rtx_code code;
1104 enum machine_mode mode;
1105 HOST_WIDE_INT val;
1106 rtx target;
1107 rtx source;
1108 int subtargets;
1109 int generate;
1111 int can_invert = 0;
1112 int can_negate = 0;
1113 int can_negate_initial = 0;
1114 int can_shift = 0;
1115 int i;
1116 int num_bits_set = 0;
1117 int set_sign_bit_copies = 0;
1118 int clear_sign_bit_copies = 0;
1119 int clear_zero_bit_copies = 0;
1120 int set_zero_bit_copies = 0;
1121 int insns = 0;
1122 unsigned HOST_WIDE_INT temp1, temp2;
1123 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1125 /* Find out which operations are safe for a given CODE. Also do a quick
1126 check for degenerate cases; these can occur when DImode operations
1127 are split. */
1128 switch (code)
1130 case SET:
1131 can_invert = 1;
1132 can_shift = 1;
1133 can_negate = 1;
1134 break;
1136 case PLUS:
1137 can_negate = 1;
1138 can_negate_initial = 1;
1139 break;
1141 case IOR:
1142 if (remainder == 0xffffffff)
1144 if (generate)
1145 emit_insn (gen_rtx_SET (VOIDmode, target,
1146 GEN_INT (ARM_SIGN_EXTEND (val))));
1147 return 1;
1149 if (remainder == 0)
1151 if (reload_completed && rtx_equal_p (target, source))
1152 return 0;
1153 if (generate)
1154 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1155 return 1;
1157 break;
1159 case AND:
1160 if (remainder == 0)
1162 if (generate)
1163 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1164 return 1;
1166 if (remainder == 0xffffffff)
1168 if (reload_completed && rtx_equal_p (target, source))
1169 return 0;
1170 if (generate)
1171 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1172 return 1;
1174 can_invert = 1;
1175 break;
1177 case XOR:
1178 if (remainder == 0)
1180 if (reload_completed && rtx_equal_p (target, source))
1181 return 0;
1182 if (generate)
1183 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1184 return 1;
1186 if (remainder == 0xffffffff)
1188 if (generate)
1189 emit_insn (gen_rtx_SET (VOIDmode, target,
1190 gen_rtx_NOT (mode, source)));
1191 return 1;
1194 /* We don't know how to handle this yet below. */
1195 abort ();
1197 case MINUS:
1198 /* We treat MINUS as (val - source), since (source - val) is always
1199 passed as (source + (-val)). */
1200 if (remainder == 0)
1202 if (generate)
1203 emit_insn (gen_rtx_SET (VOIDmode, target,
1204 gen_rtx_NEG (mode, source)));
1205 return 1;
1207 if (const_ok_for_arm (val))
1209 if (generate)
1210 emit_insn (gen_rtx_SET (VOIDmode, target,
1211 gen_rtx_MINUS (mode, GEN_INT (val),
1212 source)));
1213 return 1;
1215 can_negate = 1;
1217 break;
1219 default:
1220 abort ();
1223 /* If we can do it in one insn get out quickly. */
1224 if (const_ok_for_arm (val)
1225 || (can_negate_initial && const_ok_for_arm (-val))
1226 || (can_invert && const_ok_for_arm (~val)))
1228 if (generate)
1229 emit_insn (gen_rtx_SET (VOIDmode, target,
1230 (source ? gen_rtx (code, mode, source,
1231 GEN_INT (val))
1232 : GEN_INT (val))));
1233 return 1;
1236 /* Calculate a few attributes that may be useful for specific
1237 optimizations. */
1238 for (i = 31; i >= 0; i--)
1240 if ((remainder & (1 << i)) == 0)
1241 clear_sign_bit_copies++;
1242 else
1243 break;
1246 for (i = 31; i >= 0; i--)
1248 if ((remainder & (1 << i)) != 0)
1249 set_sign_bit_copies++;
1250 else
1251 break;
1254 for (i = 0; i <= 31; i++)
1256 if ((remainder & (1 << i)) == 0)
1257 clear_zero_bit_copies++;
1258 else
1259 break;
1262 for (i = 0; i <= 31; i++)
1264 if ((remainder & (1 << i)) != 0)
1265 set_zero_bit_copies++;
1266 else
1267 break;
1270 switch (code)
1272 case SET:
1273 /* See if we can do this by sign_extending a constant that is known
1274 to be negative. This is a good, way of doing it, since the shift
1275 may well merge into a subsequent insn. */
1276 if (set_sign_bit_copies > 1)
1278 if (const_ok_for_arm
1279 (temp1 = ARM_SIGN_EXTEND (remainder
1280 << (set_sign_bit_copies - 1))))
1282 if (generate)
1284 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1285 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1286 GEN_INT (temp1)));
1287 emit_insn (gen_ashrsi3 (target, new_src,
1288 GEN_INT (set_sign_bit_copies - 1)));
1290 return 2;
1292 /* For an inverted constant, we will need to set the low bits,
1293 these will be shifted out of harm's way. */
1294 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1295 if (const_ok_for_arm (~temp1))
1297 if (generate)
1299 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1300 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1301 GEN_INT (temp1)));
1302 emit_insn (gen_ashrsi3 (target, new_src,
1303 GEN_INT (set_sign_bit_copies - 1)));
1305 return 2;
1309 /* See if we can generate this by setting the bottom (or the top)
1310 16 bits, and then shifting these into the other half of the
1311 word. We only look for the simplest cases, to do more would cost
1312 too much. Be careful, however, not to generate this when the
1313 alternative would take fewer insns. */
1314 if (val & 0xffff0000)
1316 temp1 = remainder & 0xffff0000;
1317 temp2 = remainder & 0x0000ffff;
1319 /* Overlaps outside this range are best done using other methods. */
1320 for (i = 9; i < 24; i++)
1322 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1323 && !const_ok_for_arm (temp2))
1325 rtx new_src = (subtargets
1326 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1327 : target);
1328 insns = arm_gen_constant (code, mode, temp2, new_src,
1329 source, subtargets, generate);
1330 source = new_src;
1331 if (generate)
1332 emit_insn (gen_rtx_SET
1333 (VOIDmode, target,
1334 gen_rtx_IOR (mode,
1335 gen_rtx_ASHIFT (mode, source,
1336 GEN_INT (i)),
1337 source)));
1338 return insns + 1;
1342 /* Don't duplicate cases already considered. */
1343 for (i = 17; i < 24; i++)
1345 if (((temp1 | (temp1 >> i)) == remainder)
1346 && !const_ok_for_arm (temp1))
1348 rtx new_src = (subtargets
1349 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1350 : target);
1351 insns = arm_gen_constant (code, mode, temp1, new_src,
1352 source, subtargets, generate);
1353 source = new_src;
1354 if (generate)
1355 emit_insn
1356 (gen_rtx_SET (VOIDmode, target,
1357 gen_rtx_IOR
1358 (mode,
1359 gen_rtx_LSHIFTRT (mode, source,
1360 GEN_INT (i)),
1361 source)));
1362 return insns + 1;
1366 break;
1368 case IOR:
1369 case XOR:
1370 /* If we have IOR or XOR, and the constant can be loaded in a
1371 single instruction, and we can find a temporary to put it in,
1372 then this can be done in two instructions instead of 3-4. */
1373 if (subtargets
1374 /* TARGET can't be NULL if SUBTARGETS is 0 */
1375 || (reload_completed && !reg_mentioned_p (target, source)))
1377 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1379 if (generate)
1381 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1383 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1384 emit_insn (gen_rtx_SET (VOIDmode, target,
1385 gen_rtx (code, mode, source, sub)));
1387 return 2;
1391 if (code == XOR)
1392 break;
1394 if (set_sign_bit_copies > 8
1395 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1397 if (generate)
1399 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1400 rtx shift = GEN_INT (set_sign_bit_copies);
1402 emit_insn (gen_rtx_SET (VOIDmode, sub,
1403 gen_rtx_NOT (mode,
1404 gen_rtx_ASHIFT (mode,
1405 source,
1406 shift))));
1407 emit_insn (gen_rtx_SET (VOIDmode, target,
1408 gen_rtx_NOT (mode,
1409 gen_rtx_LSHIFTRT (mode, sub,
1410 shift))));
1412 return 2;
1415 if (set_zero_bit_copies > 8
1416 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1418 if (generate)
1420 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1421 rtx shift = GEN_INT (set_zero_bit_copies);
1423 emit_insn (gen_rtx_SET (VOIDmode, sub,
1424 gen_rtx_NOT (mode,
1425 gen_rtx_LSHIFTRT (mode,
1426 source,
1427 shift))));
1428 emit_insn (gen_rtx_SET (VOIDmode, target,
1429 gen_rtx_NOT (mode,
1430 gen_rtx_ASHIFT (mode, sub,
1431 shift))));
1433 return 2;
1436 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1438 if (generate)
1440 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1441 emit_insn (gen_rtx_SET (VOIDmode, sub,
1442 gen_rtx_NOT (mode, source)));
1443 source = sub;
1444 if (subtargets)
1445 sub = gen_reg_rtx (mode);
1446 emit_insn (gen_rtx_SET (VOIDmode, sub,
1447 gen_rtx_AND (mode, source,
1448 GEN_INT (temp1))));
1449 emit_insn (gen_rtx_SET (VOIDmode, target,
1450 gen_rtx_NOT (mode, sub)));
1452 return 3;
1454 break;
1456 case AND:
1457 /* See if two shifts will do 2 or more insn's worth of work. */
1458 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1460 HOST_WIDE_INT shift_mask = ((0xffffffff
1461 << (32 - clear_sign_bit_copies))
1462 & 0xffffffff);
1464 if ((remainder | shift_mask) != 0xffffffff)
1466 if (generate)
1468 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1469 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1470 new_src, source, subtargets, 1);
1471 source = new_src;
1473 else
1475 rtx targ = subtargets ? NULL_RTX : target;
1476 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1477 targ, source, subtargets, 0);
1481 if (generate)
1483 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1484 rtx shift = GEN_INT (clear_sign_bit_copies);
1486 emit_insn (gen_ashlsi3 (new_src, source, shift));
1487 emit_insn (gen_lshrsi3 (target, new_src, shift));
1490 return insns + 2;
1493 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1495 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1497 if ((remainder | shift_mask) != 0xffffffff)
1499 if (generate)
1501 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1503 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1504 new_src, source, subtargets, 1);
1505 source = new_src;
1507 else
1509 rtx targ = subtargets ? NULL_RTX : target;
1511 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1512 targ, source, subtargets, 0);
1516 if (generate)
1518 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1519 rtx shift = GEN_INT (clear_zero_bit_copies);
1521 emit_insn (gen_lshrsi3 (new_src, source, shift));
1522 emit_insn (gen_ashlsi3 (target, new_src, shift));
1525 return insns + 2;
1528 break;
1530 default:
1531 break;
1534 for (i = 0; i < 32; i++)
1535 if (remainder & (1 << i))
1536 num_bits_set++;
1538 if (code == AND || (can_invert && num_bits_set > 16))
1539 remainder = (~remainder) & 0xffffffff;
1540 else if (code == PLUS && num_bits_set > 16)
1541 remainder = (-remainder) & 0xffffffff;
1542 else
1544 can_invert = 0;
1545 can_negate = 0;
1548 /* Now try and find a way of doing the job in either two or three
1549 instructions.
1550 We start by looking for the largest block of zeros that are aligned on
1551 a 2-bit boundary, we then fill up the temps, wrapping around to the
1552 top of the word when we drop off the bottom.
1553 In the worst case this code should produce no more than four insns. */
1555 int best_start = 0;
1556 int best_consecutive_zeros = 0;
1558 for (i = 0; i < 32; i += 2)
1560 int consecutive_zeros = 0;
1562 if (!(remainder & (3 << i)))
1564 while ((i < 32) && !(remainder & (3 << i)))
1566 consecutive_zeros += 2;
1567 i += 2;
1569 if (consecutive_zeros > best_consecutive_zeros)
1571 best_consecutive_zeros = consecutive_zeros;
1572 best_start = i - consecutive_zeros;
1574 i -= 2;
1578 /* So long as it won't require any more insns to do so, it's
1579 desirable to emit a small constant (in bits 0...9) in the last
1580 insn. This way there is more chance that it can be combined with
1581 a later addressing insn to form a pre-indexed load or store
1582 operation. Consider:
1584 *((volatile int *)0xe0000100) = 1;
1585 *((volatile int *)0xe0000110) = 2;
1587 We want this to wind up as:
1589 mov rA, #0xe0000000
1590 mov rB, #1
1591 str rB, [rA, #0x100]
1592 mov rB, #2
1593 str rB, [rA, #0x110]
1595 rather than having to synthesize both large constants from scratch.
1597 Therefore, we calculate how many insns would be required to emit
1598 the constant starting from `best_start', and also starting from
1599 zero (ie with bit 31 first to be output). If `best_start' doesn't
1600 yield a shorter sequence, we may as well use zero. */
1601 if (best_start != 0
1602 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1603 && (count_insns_for_constant (remainder, 0) <=
1604 count_insns_for_constant (remainder, best_start)))
1605 best_start = 0;
1607 /* Now start emitting the insns. */
1608 i = best_start;
1611 int end;
1613 if (i <= 0)
1614 i += 32;
1615 if (remainder & (3 << (i - 2)))
1617 end = i - 8;
1618 if (end < 0)
1619 end += 32;
1620 temp1 = remainder & ((0x0ff << end)
1621 | ((i < end) ? (0xff >> (32 - end)) : 0));
1622 remainder &= ~temp1;
1624 if (generate)
1626 rtx new_src, temp1_rtx;
1628 if (code == SET || code == MINUS)
1630 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1631 if (can_invert && code != MINUS)
1632 temp1 = ~temp1;
1634 else
1636 if (remainder && subtargets)
1637 new_src = gen_reg_rtx (mode);
1638 else
1639 new_src = target;
1640 if (can_invert)
1641 temp1 = ~temp1;
1642 else if (can_negate)
1643 temp1 = -temp1;
1646 temp1 = trunc_int_for_mode (temp1, mode);
1647 temp1_rtx = GEN_INT (temp1);
1649 if (code == SET)
1651 else if (code == MINUS)
1652 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1653 else
1654 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1656 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1657 source = new_src;
1660 if (code == SET)
1662 can_invert = 0;
1663 code = PLUS;
1665 else if (code == MINUS)
1666 code = PLUS;
1668 insns++;
1669 i -= 6;
1671 i -= 2;
1673 while (remainder);
1676 return insns;
1679 /* Canonicalize a comparison so that we are more likely to recognize it.
1680 This can be done for a few constant compares, where we can make the
1681 immediate value easier to load. */
1683 enum rtx_code
1684 arm_canonicalize_comparison (code, op1)
1685 enum rtx_code code;
1686 rtx * op1;
1688 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1690 switch (code)
1692 case EQ:
1693 case NE:
1694 return code;
1696 case GT:
1697 case LE:
1698 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1699 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1701 *op1 = GEN_INT (i + 1);
1702 return code == GT ? GE : LT;
1704 break;
1706 case GE:
1707 case LT:
1708 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1709 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1711 *op1 = GEN_INT (i - 1);
1712 return code == GE ? GT : LE;
1714 break;
1716 case GTU:
1717 case LEU:
1718 if (i != ~((unsigned HOST_WIDE_INT) 0)
1719 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1721 *op1 = GEN_INT (i + 1);
1722 return code == GTU ? GEU : LTU;
1724 break;
1726 case GEU:
1727 case LTU:
1728 if (i != 0
1729 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1731 *op1 = GEN_INT (i - 1);
1732 return code == GEU ? GTU : LEU;
1734 break;
1736 default:
1737 abort ();
1740 return code;
1743 /* Decide whether a type should be returned in memory (true)
1744 or in a register (false). This is called by the macro
1745 RETURN_IN_MEMORY. */
1748 arm_return_in_memory (type)
1749 tree type;
1751 if (!AGGREGATE_TYPE_P (type))
1752 /* All simple types are returned in registers. */
1753 return 0;
1755 /* For the arm-wince targets we choose to be compitable with Microsoft's
1756 ARM and Thumb compilers, which always return aggregates in memory. */
1757 #ifndef ARM_WINCE
1758 /* All structures/unions bigger than one word are returned in memory.
1759 Also catch the case where int_size_in_bytes returns -1. In this case
1760 the aggregate is either huge or of varaible size, and in either case
1761 we will want to return it via memory and not in a register. */
1762 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
1763 return 1;
1765 if (TREE_CODE (type) == RECORD_TYPE)
1767 tree field;
1769 /* For a struct the APCS says that we only return in a register
1770 if the type is 'integer like' and every addressable element
1771 has an offset of zero. For practical purposes this means
1772 that the structure can have at most one non bit-field element
1773 and that this element must be the first one in the structure. */
1775 /* Find the first field, ignoring non FIELD_DECL things which will
1776 have been created by C++. */
1777 for (field = TYPE_FIELDS (type);
1778 field && TREE_CODE (field) != FIELD_DECL;
1779 field = TREE_CHAIN (field))
1780 continue;
1782 if (field == NULL)
1783 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1785 /* Check that the first field is valid for returning in a register. */
1787 /* ... Floats are not allowed */
1788 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1789 return 1;
1791 /* ... Aggregates that are not themselves valid for returning in
1792 a register are not allowed. */
1793 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1794 return 1;
1796 /* Now check the remaining fields, if any. Only bitfields are allowed,
1797 since they are not addressable. */
1798 for (field = TREE_CHAIN (field);
1799 field;
1800 field = TREE_CHAIN (field))
1802 if (TREE_CODE (field) != FIELD_DECL)
1803 continue;
1805 if (!DECL_BIT_FIELD_TYPE (field))
1806 return 1;
1809 return 0;
1812 if (TREE_CODE (type) == UNION_TYPE)
1814 tree field;
1816 /* Unions can be returned in registers if every element is
1817 integral, or can be returned in an integer register. */
1818 for (field = TYPE_FIELDS (type);
1819 field;
1820 field = TREE_CHAIN (field))
1822 if (TREE_CODE (field) != FIELD_DECL)
1823 continue;
1825 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1826 return 1;
1828 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1829 return 1;
1832 return 0;
1834 #endif /* not ARM_WINCE */
1836 /* Return all other types in memory. */
1837 return 1;
1840 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1841 for a call to a function whose data type is FNTYPE.
1842 For a library call, FNTYPE is NULL. */
1843 void
1844 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1845 CUMULATIVE_ARGS * pcum;
1846 tree fntype;
1847 rtx libname ATTRIBUTE_UNUSED;
1848 int indirect ATTRIBUTE_UNUSED;
1850 /* On the ARM, the offset starts at 0. */
1851 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1853 pcum->call_cookie = CALL_NORMAL;
1855 if (TARGET_LONG_CALLS)
1856 pcum->call_cookie = CALL_LONG;
1858 /* Check for long call/short call attributes. The attributes
1859 override any command line option. */
1860 if (fntype)
1862 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1863 pcum->call_cookie = CALL_SHORT;
1864 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1865 pcum->call_cookie = CALL_LONG;
1869 /* Determine where to put an argument to a function.
1870 Value is zero to push the argument on the stack,
1871 or a hard register in which to store the argument.
1873 MODE is the argument's machine mode.
1874 TYPE is the data type of the argument (as a tree).
1875 This is null for libcalls where that information may
1876 not be available.
1877 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1878 the preceding args and about the function being called.
1879 NAMED is nonzero if this argument is a named parameter
1880 (otherwise it is an extra parameter matching an ellipsis). */
1883 arm_function_arg (pcum, mode, type, named)
1884 CUMULATIVE_ARGS * pcum;
1885 enum machine_mode mode;
1886 tree type ATTRIBUTE_UNUSED;
1887 int named;
1889 if (mode == VOIDmode)
1890 /* Compute operand 2 of the call insn. */
1891 return GEN_INT (pcum->call_cookie);
1893 if (!named || pcum->nregs >= NUM_ARG_REGS)
1894 return NULL_RTX;
1896 return gen_rtx_REG (mode, pcum->nregs);
1899 /* Encode the current state of the #pragma [no_]long_calls. */
1900 typedef enum
1902 OFF, /* No #pramgma [no_]long_calls is in effect. */
1903 LONG, /* #pragma long_calls is in effect. */
1904 SHORT /* #pragma no_long_calls is in effect. */
1905 } arm_pragma_enum;
1907 static arm_pragma_enum arm_pragma_long_calls = OFF;
1909 void
1910 arm_pr_long_calls (pfile)
1911 cpp_reader * pfile ATTRIBUTE_UNUSED;
1913 arm_pragma_long_calls = LONG;
1916 void
1917 arm_pr_no_long_calls (pfile)
1918 cpp_reader * pfile ATTRIBUTE_UNUSED;
1920 arm_pragma_long_calls = SHORT;
1923 void
1924 arm_pr_long_calls_off (pfile)
1925 cpp_reader * pfile ATTRIBUTE_UNUSED;
1927 arm_pragma_long_calls = OFF;
1930 /* Table of machine attributes. */
1931 const struct attribute_spec arm_attribute_table[] =
1933 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1934 /* Function calls made to this symbol must be done indirectly, because
1935 it may lie outside of the 26 bit addressing range of a normal function
1936 call. */
1937 { "long_call", 0, 0, false, true, true, NULL },
1938 /* Whereas these functions are always known to reside within the 26 bit
1939 addressing range. */
1940 { "short_call", 0, 0, false, true, true, NULL },
1941 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1942 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1943 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1944 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1945 #ifdef ARM_PE
1946 /* ARM/PE has three new attributes:
1947 interfacearm - ?
1948 dllexport - for exporting a function/variable that will live in a dll
1949 dllimport - for importing a function/variable from a dll
1951 Microsoft allows multiple declspecs in one __declspec, separating
1952 them with spaces. We do NOT support this. Instead, use __declspec
1953 multiple times.
1955 { "dllimport", 0, 0, true, false, false, NULL },
1956 { "dllexport", 0, 0, true, false, false, NULL },
1957 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1958 #endif
1959 { NULL, 0, 0, false, false, false, NULL }
1962 /* Handle an attribute requiring a FUNCTION_DECL;
1963 arguments as in struct attribute_spec.handler. */
1965 static tree
1966 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1967 tree * node;
1968 tree name;
1969 tree args ATTRIBUTE_UNUSED;
1970 int flags ATTRIBUTE_UNUSED;
1971 bool * no_add_attrs;
1973 if (TREE_CODE (*node) != FUNCTION_DECL)
1975 warning ("`%s' attribute only applies to functions",
1976 IDENTIFIER_POINTER (name));
1977 *no_add_attrs = true;
1980 return NULL_TREE;
1983 /* Handle an "interrupt" or "isr" attribute;
1984 arguments as in struct attribute_spec.handler. */
1986 static tree
1987 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1988 tree * node;
1989 tree name;
1990 tree args;
1991 int flags;
1992 bool * no_add_attrs;
1994 if (DECL_P (*node))
1996 if (TREE_CODE (*node) != FUNCTION_DECL)
1998 warning ("`%s' attribute only applies to functions",
1999 IDENTIFIER_POINTER (name));
2000 *no_add_attrs = true;
2002 /* FIXME: the argument if any is checked for type attributes;
2003 should it be checked for decl ones? */
2005 else
2007 if (TREE_CODE (*node) == FUNCTION_TYPE
2008 || TREE_CODE (*node) == METHOD_TYPE)
2010 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2012 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2013 *no_add_attrs = true;
2016 else if (TREE_CODE (*node) == POINTER_TYPE
2017 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2018 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2019 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2021 *node = build_type_copy (*node);
2022 TREE_TYPE (*node) = build_type_attribute_variant
2023 (TREE_TYPE (*node),
2024 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2025 *no_add_attrs = true;
2027 else
2029 /* Possibly pass this attribute on from the type to a decl. */
2030 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2031 | (int) ATTR_FLAG_FUNCTION_NEXT
2032 | (int) ATTR_FLAG_ARRAY_NEXT))
2034 *no_add_attrs = true;
2035 return tree_cons (name, args, NULL_TREE);
2037 else
2039 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2044 return NULL_TREE;
2047 /* Return 0 if the attributes for two types are incompatible, 1 if they
2048 are compatible, and 2 if they are nearly compatible (which causes a
2049 warning to be generated). */
2051 static int
2052 arm_comp_type_attributes (type1, type2)
2053 tree type1;
2054 tree type2;
2056 int l1, l2, s1, s2;
2058 /* Check for mismatch of non-default calling convention. */
2059 if (TREE_CODE (type1) != FUNCTION_TYPE)
2060 return 1;
2062 /* Check for mismatched call attributes. */
2063 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2064 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2065 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2066 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2068 /* Only bother to check if an attribute is defined. */
2069 if (l1 | l2 | s1 | s2)
2071 /* If one type has an attribute, the other must have the same attribute. */
2072 if ((l1 != l2) || (s1 != s2))
2073 return 0;
2075 /* Disallow mixed attributes. */
2076 if ((l1 & s2) || (l2 & s1))
2077 return 0;
2080 /* Check for mismatched ISR attribute. */
2081 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2082 if (! l1)
2083 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2084 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2085 if (! l2)
2086 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2087 if (l1 != l2)
2088 return 0;
2090 return 1;
2093 /* Encode long_call or short_call attribute by prefixing
2094 symbol name in DECL with a special character FLAG. */
2096 void
2097 arm_encode_call_attribute (decl, flag)
2098 tree decl;
2099 int flag;
2101 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2102 int len = strlen (str);
2103 char * newstr;
2105 if (TREE_CODE (decl) != FUNCTION_DECL)
2106 return;
2108 /* Do not allow weak functions to be treated as short call. */
2109 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2110 return;
2112 newstr = alloca (len + 2);
2113 newstr[0] = flag;
2114 strcpy (newstr + 1, str);
2116 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2117 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2120 /* Assigns default attributes to newly defined type. This is used to
2121 set short_call/long_call attributes for function types of
2122 functions defined inside corresponding #pragma scopes. */
2124 static void
2125 arm_set_default_type_attributes (type)
2126 tree type;
2128 /* Add __attribute__ ((long_call)) to all functions, when
2129 inside #pragma long_calls or __attribute__ ((short_call)),
2130 when inside #pragma no_long_calls. */
2131 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2133 tree type_attr_list, attr_name;
2134 type_attr_list = TYPE_ATTRIBUTES (type);
2136 if (arm_pragma_long_calls == LONG)
2137 attr_name = get_identifier ("long_call");
2138 else if (arm_pragma_long_calls == SHORT)
2139 attr_name = get_identifier ("short_call");
2140 else
2141 return;
2143 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2144 TYPE_ATTRIBUTES (type) = type_attr_list;
2148 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2149 defined within the current compilation unit. If this caanot be
2150 determined, then 0 is returned. */
2152 static int
2153 current_file_function_operand (sym_ref)
2154 rtx sym_ref;
2156 /* This is a bit of a fib. A function will have a short call flag
2157 applied to its name if it has the short call attribute, or it has
2158 already been defined within the current compilation unit. */
2159 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2160 return 1;
2162 /* The current function is always defined within the current compilation
2163 unit. if it s a weak definition however, then this may not be the real
2164 definition of the function, and so we have to say no. */
2165 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2166 && !DECL_WEAK (current_function_decl))
2167 return 1;
2169 /* We cannot make the determination - default to returning 0. */
2170 return 0;
2173 /* Return non-zero if a 32 bit "long_call" should be generated for
2174 this call. We generate a long_call if the function:
2176 a. has an __attribute__((long call))
2177 or b. is within the scope of a #pragma long_calls
2178 or c. the -mlong-calls command line switch has been specified
2180 However we do not generate a long call if the function:
2182 d. has an __attribute__ ((short_call))
2183 or e. is inside the scope of a #pragma no_long_calls
2184 or f. has an __attribute__ ((section))
2185 or g. is defined within the current compilation unit.
2187 This function will be called by C fragments contained in the machine
2188 description file. CALL_REF and CALL_COOKIE correspond to the matched
2189 rtl operands. CALL_SYMBOL is used to distinguish between
2190 two different callers of the function. It is set to 1 in the
2191 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2192 and "call_value" patterns. This is because of the difference in the
2193 SYM_REFs passed by these patterns. */
2196 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2197 rtx sym_ref;
2198 int call_cookie;
2199 int call_symbol;
2201 if (!call_symbol)
2203 if (GET_CODE (sym_ref) != MEM)
2204 return 0;
2206 sym_ref = XEXP (sym_ref, 0);
2209 if (GET_CODE (sym_ref) != SYMBOL_REF)
2210 return 0;
2212 if (call_cookie & CALL_SHORT)
2213 return 0;
2215 if (TARGET_LONG_CALLS && flag_function_sections)
2216 return 1;
2218 if (current_file_function_operand (sym_ref))
2219 return 0;
2221 return (call_cookie & CALL_LONG)
2222 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2223 || TARGET_LONG_CALLS;
2226 /* Return non-zero if it is ok to make a tail-call to DECL. */
2229 arm_function_ok_for_sibcall (decl)
2230 tree decl;
2232 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2234 /* Never tailcall something for which we have no decl, or if we
2235 are in Thumb mode. */
2236 if (decl == NULL || TARGET_THUMB)
2237 return 0;
2239 /* Get the calling method. */
2240 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2241 call_type = CALL_SHORT;
2242 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2243 call_type = CALL_LONG;
2245 /* Cannot tail-call to long calls, since these are out of range of
2246 a branch instruction. However, if not compiling PIC, we know
2247 we can reach the symbol if it is in this compilation unit. */
2248 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2249 return 0;
2251 /* If we are interworking and the function is not declared static
2252 then we can't tail-call it unless we know that it exists in this
2253 compilation unit (since it might be a Thumb routine). */
2254 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2255 return 0;
2257 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2258 if (IS_INTERRUPT (arm_current_func_type ()))
2259 return 0;
2261 /* Everything else is ok. */
2262 return 1;
2267 legitimate_pic_operand_p (x)
2268 rtx x;
2270 if (CONSTANT_P (x)
2271 && flag_pic
2272 && (GET_CODE (x) == SYMBOL_REF
2273 || (GET_CODE (x) == CONST
2274 && GET_CODE (XEXP (x, 0)) == PLUS
2275 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2276 return 0;
2278 return 1;
2282 legitimize_pic_address (orig, mode, reg)
2283 rtx orig;
2284 enum machine_mode mode;
2285 rtx reg;
2287 if (GET_CODE (orig) == SYMBOL_REF
2288 || GET_CODE (orig) == LABEL_REF)
2290 #ifndef AOF_ASSEMBLER
2291 rtx pic_ref, address;
2292 #endif
2293 rtx insn;
2294 int subregs = 0;
2296 if (reg == 0)
2298 if (no_new_pseudos)
2299 abort ();
2300 else
2301 reg = gen_reg_rtx (Pmode);
2303 subregs = 1;
2306 #ifdef AOF_ASSEMBLER
2307 /* The AOF assembler can generate relocations for these directly, and
2308 understands that the PIC register has to be added into the offset. */
2309 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2310 #else
2311 if (subregs)
2312 address = gen_reg_rtx (Pmode);
2313 else
2314 address = reg;
2316 if (TARGET_ARM)
2317 emit_insn (gen_pic_load_addr_arm (address, orig));
2318 else
2319 emit_insn (gen_pic_load_addr_thumb (address, orig));
2321 if (GET_CODE (orig) == LABEL_REF && NEED_GOT_RELOC)
2322 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2323 else
2325 pic_ref = gen_rtx_MEM (Pmode,
2326 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2327 address));
2328 RTX_UNCHANGING_P (pic_ref) = 1;
2331 insn = emit_move_insn (reg, pic_ref);
2332 #endif
2333 current_function_uses_pic_offset_table = 1;
2334 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2335 by loop. */
2336 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2337 REG_NOTES (insn));
2338 return reg;
2340 else if (GET_CODE (orig) == CONST)
2342 rtx base, offset;
2344 if (GET_CODE (XEXP (orig, 0)) == PLUS
2345 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2346 return orig;
2348 if (reg == 0)
2350 if (no_new_pseudos)
2351 abort ();
2352 else
2353 reg = gen_reg_rtx (Pmode);
2356 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2358 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2359 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2360 base == reg ? 0 : reg);
2362 else
2363 abort ();
2365 if (GET_CODE (offset) == CONST_INT)
2367 /* The base register doesn't really matter, we only want to
2368 test the index for the appropriate mode. */
2369 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
2371 if (!no_new_pseudos)
2372 offset = force_reg (Pmode, offset);
2373 else
2374 abort ();
2376 win:
2377 if (GET_CODE (offset) == CONST_INT)
2378 return plus_constant (base, INTVAL (offset));
2381 if (GET_MODE_SIZE (mode) > 4
2382 && (GET_MODE_CLASS (mode) == MODE_INT
2383 || TARGET_SOFT_FLOAT))
2385 emit_insn (gen_addsi3 (reg, base, offset));
2386 return reg;
2389 return gen_rtx_PLUS (Pmode, base, offset);
2392 return orig;
2395 /* Generate code to load the PIC register. PROLOGUE is true if
2396 called from arm_expand_prologue (in which case we want the
2397 generated insns at the start of the function); false if called
2398 by an exception receiver that needs the PIC register reloaded
2399 (in which case the insns are just dumped at the current location). */
2401 void
2402 arm_finalize_pic (prologue)
2403 int prologue ATTRIBUTE_UNUSED;
2405 #ifndef AOF_ASSEMBLER
2406 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2407 rtx global_offset_table;
2409 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2410 return;
2412 if (!flag_pic)
2413 abort ();
2415 start_sequence ();
2416 l1 = gen_label_rtx ();
2418 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2419 /* On the ARM the PC register contains 'dot + 8' at the time of the
2420 addition, on the Thumb it is 'dot + 4'. */
2421 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2422 if (GOT_PCREL)
2423 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2424 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2425 else
2426 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2428 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2430 if (TARGET_ARM)
2432 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2433 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2435 else
2437 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2438 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2441 seq = gen_sequence ();
2442 end_sequence ();
2443 if (prologue)
2444 emit_insn_after (seq, get_insns ());
2445 else
2446 emit_insn (seq);
2448 /* Need to emit this whether or not we obey regdecls,
2449 since setjmp/longjmp can cause life info to screw up. */
2450 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2451 #endif /* AOF_ASSEMBLER */
2454 #define REG_OR_SUBREG_REG(X) \
2455 (GET_CODE (X) == REG \
2456 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2458 #define REG_OR_SUBREG_RTX(X) \
2459 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2461 #ifndef COSTS_N_INSNS
2462 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2463 #endif
2466 arm_rtx_costs (x, code, outer)
2467 rtx x;
2468 enum rtx_code code;
2469 enum rtx_code outer;
2471 enum machine_mode mode = GET_MODE (x);
2472 enum rtx_code subcode;
2473 int extra_cost;
2475 if (TARGET_THUMB)
2477 switch (code)
2479 case ASHIFT:
2480 case ASHIFTRT:
2481 case LSHIFTRT:
2482 case ROTATERT:
2483 case PLUS:
2484 case MINUS:
2485 case COMPARE:
2486 case NEG:
2487 case NOT:
2488 return COSTS_N_INSNS (1);
2490 case MULT:
2491 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2493 int cycles = 0;
2494 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2496 while (i)
2498 i >>= 2;
2499 cycles++;
2501 return COSTS_N_INSNS (2) + cycles;
2503 return COSTS_N_INSNS (1) + 16;
2505 case SET:
2506 return (COSTS_N_INSNS (1)
2507 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2508 + GET_CODE (SET_DEST (x)) == MEM));
2510 case CONST_INT:
2511 if (outer == SET)
2513 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2514 return 0;
2515 if (thumb_shiftable_const (INTVAL (x)))
2516 return COSTS_N_INSNS (2);
2517 return COSTS_N_INSNS (3);
2519 else if (outer == PLUS
2520 && INTVAL (x) < 256 && INTVAL (x) > -256)
2521 return 0;
2522 else if (outer == COMPARE
2523 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2524 return 0;
2525 else if (outer == ASHIFT || outer == ASHIFTRT
2526 || outer == LSHIFTRT)
2527 return 0;
2528 return COSTS_N_INSNS (2);
2530 case CONST:
2531 case CONST_DOUBLE:
2532 case LABEL_REF:
2533 case SYMBOL_REF:
2534 return COSTS_N_INSNS (3);
2536 case UDIV:
2537 case UMOD:
2538 case DIV:
2539 case MOD:
2540 return 100;
2542 case TRUNCATE:
2543 return 99;
2545 case AND:
2546 case XOR:
2547 case IOR:
2548 /* XXX guess. */
2549 return 8;
2551 case ADDRESSOF:
2552 case MEM:
2553 /* XXX another guess. */
2554 /* Memory costs quite a lot for the first word, but subsequent words
2555 load at the equivalent of a single insn each. */
2556 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2557 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2559 case IF_THEN_ELSE:
2560 /* XXX a guess. */
2561 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2562 return 14;
2563 return 2;
2565 case ZERO_EXTEND:
2566 /* XXX still guessing. */
2567 switch (GET_MODE (XEXP (x, 0)))
2569 case QImode:
2570 return (1 + (mode == DImode ? 4 : 0)
2571 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2573 case HImode:
2574 return (4 + (mode == DImode ? 4 : 0)
2575 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2577 case SImode:
2578 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2580 default:
2581 return 99;
2584 default:
2585 return 99;
2586 #if 0
2587 case FFS:
2588 case FLOAT:
2589 case FIX:
2590 case UNSIGNED_FIX:
2591 /* XXX guess */
2592 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2593 rtx_name[code]);
2594 abort ();
2595 #endif
2599 switch (code)
2601 case MEM:
2602 /* Memory costs quite a lot for the first word, but subsequent words
2603 load at the equivalent of a single insn each. */
2604 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2605 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2607 case DIV:
2608 case MOD:
2609 return 100;
2611 case ROTATE:
2612 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2613 return 4;
2614 /* Fall through */
2615 case ROTATERT:
2616 if (mode != SImode)
2617 return 8;
2618 /* Fall through */
2619 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2620 if (mode == DImode)
2621 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2622 + ((GET_CODE (XEXP (x, 0)) == REG
2623 || (GET_CODE (XEXP (x, 0)) == SUBREG
2624 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2625 ? 0 : 8));
2626 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2627 || (GET_CODE (XEXP (x, 0)) == SUBREG
2628 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2629 ? 0 : 4)
2630 + ((GET_CODE (XEXP (x, 1)) == REG
2631 || (GET_CODE (XEXP (x, 1)) == SUBREG
2632 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2633 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2634 ? 0 : 4));
2636 case MINUS:
2637 if (mode == DImode)
2638 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2639 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2640 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2641 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2642 ? 0 : 8));
2644 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2645 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2646 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2647 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2648 ? 0 : 8)
2649 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2650 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2651 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2652 ? 0 : 8));
2654 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2655 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2656 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2657 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2658 || subcode == ASHIFTRT || subcode == LSHIFTRT
2659 || subcode == ROTATE || subcode == ROTATERT
2660 || (subcode == MULT
2661 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2662 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2663 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2664 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2665 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2666 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2667 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2668 return 1;
2669 /* Fall through */
2671 case PLUS:
2672 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2673 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2674 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2675 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2676 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2677 ? 0 : 8));
2679 /* Fall through */
2680 case AND: case XOR: case IOR:
2681 extra_cost = 0;
2683 /* Normally the frame registers will be spilt into reg+const during
2684 reload, so it is a bad idea to combine them with other instructions,
2685 since then they might not be moved outside of loops. As a compromise
2686 we allow integration with ops that have a constant as their second
2687 operand. */
2688 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2689 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2690 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2691 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2692 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2693 extra_cost = 4;
2695 if (mode == DImode)
2696 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2697 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2698 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2699 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2700 ? 0 : 8));
2702 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2703 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2704 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2705 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2706 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2707 ? 0 : 4));
2709 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2710 return (1 + extra_cost
2711 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2712 || subcode == LSHIFTRT || subcode == ASHIFTRT
2713 || subcode == ROTATE || subcode == ROTATERT
2714 || (subcode == MULT
2715 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2716 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2717 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2718 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2719 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2720 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2721 ? 0 : 4));
2723 return 8;
2725 case MULT:
2726 /* There is no point basing this on the tuning, since it is always the
2727 fast variant if it exists at all. */
2728 if (arm_fast_multiply && mode == DImode
2729 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2730 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2731 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2732 return 8;
2734 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2735 || mode == DImode)
2736 return 30;
2738 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2740 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2741 & (unsigned HOST_WIDE_INT) 0xffffffff);
2742 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2743 int j;
2745 /* Tune as appropriate. */
2746 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2748 for (j = 0; i && j < 32; j += booth_unit_size)
2750 i >>= booth_unit_size;
2751 add_cost += 2;
2754 return add_cost;
2757 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2758 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2759 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2761 case TRUNCATE:
2762 if (arm_fast_multiply && mode == SImode
2763 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2764 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2765 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2766 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2767 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2768 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2769 return 8;
2770 return 99;
2772 case NEG:
2773 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2774 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2775 /* Fall through */
2776 case NOT:
2777 if (mode == DImode)
2778 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2780 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2782 case IF_THEN_ELSE:
2783 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2784 return 14;
2785 return 2;
2787 case COMPARE:
2788 return 1;
2790 case ABS:
2791 return 4 + (mode == DImode ? 4 : 0);
2793 case SIGN_EXTEND:
2794 if (GET_MODE (XEXP (x, 0)) == QImode)
2795 return (4 + (mode == DImode ? 4 : 0)
2796 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2797 /* Fall through */
2798 case ZERO_EXTEND:
2799 switch (GET_MODE (XEXP (x, 0)))
2801 case QImode:
2802 return (1 + (mode == DImode ? 4 : 0)
2803 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2805 case HImode:
2806 return (4 + (mode == DImode ? 4 : 0)
2807 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2809 case SImode:
2810 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2812 default:
2813 break;
2815 abort ();
2817 case CONST_INT:
2818 if (const_ok_for_arm (INTVAL (x)))
2819 return outer == SET ? 2 : -1;
2820 else if (outer == AND
2821 && const_ok_for_arm (~INTVAL (x)))
2822 return -1;
2823 else if ((outer == COMPARE
2824 || outer == PLUS || outer == MINUS)
2825 && const_ok_for_arm (-INTVAL (x)))
2826 return -1;
2827 else
2828 return 5;
2830 case CONST:
2831 case LABEL_REF:
2832 case SYMBOL_REF:
2833 return 6;
2835 case CONST_DOUBLE:
2836 if (const_double_rtx_ok_for_fpu (x))
2837 return outer == SET ? 2 : -1;
2838 else if ((outer == COMPARE || outer == PLUS)
2839 && neg_const_double_rtx_ok_for_fpu (x))
2840 return -1;
2841 return 7;
2843 default:
2844 return 99;
2848 static int
2849 arm_adjust_cost (insn, link, dep, cost)
2850 rtx insn;
2851 rtx link;
2852 rtx dep;
2853 int cost;
2855 rtx i_pat, d_pat;
2857 /* Some true dependencies can have a higher cost depending
2858 on precisely how certain input operands are used. */
2859 if (arm_is_xscale
2860 && REG_NOTE_KIND (link) == 0
2861 && recog_memoized (insn) < 0
2862 && recog_memoized (dep) < 0)
2864 int shift_opnum = get_attr_shift (insn);
2865 enum attr_type attr_type = get_attr_type (dep);
2867 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2868 operand for INSN. If we have a shifted input operand and the
2869 instruction we depend on is another ALU instruction, then we may
2870 have to account for an additional stall. */
2871 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2873 rtx shifted_operand;
2874 int opno;
2876 /* Get the shifted operand. */
2877 extract_insn (insn);
2878 shifted_operand = recog_data.operand[shift_opnum];
2880 /* Iterate over all the operands in DEP. If we write an operand
2881 that overlaps with SHIFTED_OPERAND, then we have increase the
2882 cost of this dependency. */
2883 extract_insn (dep);
2884 preprocess_constraints ();
2885 for (opno = 0; opno < recog_data.n_operands; opno++)
2887 /* We can ignore strict inputs. */
2888 if (recog_data.operand_type[opno] == OP_IN)
2889 continue;
2891 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2892 shifted_operand))
2893 return 2;
2898 /* XXX This is not strictly true for the FPA. */
2899 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2900 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2901 return 0;
2903 /* Call insns don't incur a stall, even if they follow a load. */
2904 if (REG_NOTE_KIND (link) == 0
2905 && GET_CODE (insn) == CALL_INSN)
2906 return 1;
2908 if ((i_pat = single_set (insn)) != NULL
2909 && GET_CODE (SET_SRC (i_pat)) == MEM
2910 && (d_pat = single_set (dep)) != NULL
2911 && GET_CODE (SET_DEST (d_pat)) == MEM)
2913 /* This is a load after a store, there is no conflict if the load reads
2914 from a cached area. Assume that loads from the stack, and from the
2915 constant pool are cached, and that others will miss. This is a
2916 hack. */
2918 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2919 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2920 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2921 || reg_mentioned_p (hard_frame_pointer_rtx,
2922 XEXP (SET_SRC (i_pat), 0)))
2923 return 1;
2926 return cost;
2929 /* This code has been fixed for cross compilation. */
2931 static int fpa_consts_inited = 0;
2933 static const char * const strings_fpa[8] =
2935 "0", "1", "2", "3",
2936 "4", "5", "0.5", "10"
2939 static REAL_VALUE_TYPE values_fpa[8];
2941 static void
2942 init_fpa_table ()
2944 int i;
2945 REAL_VALUE_TYPE r;
2947 for (i = 0; i < 8; i++)
2949 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2950 values_fpa[i] = r;
2953 fpa_consts_inited = 1;
2956 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2959 const_double_rtx_ok_for_fpu (x)
2960 rtx x;
2962 REAL_VALUE_TYPE r;
2963 int i;
2965 if (!fpa_consts_inited)
2966 init_fpa_table ();
2968 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2969 if (REAL_VALUE_MINUS_ZERO (r))
2970 return 0;
2972 for (i = 0; i < 8; i++)
2973 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2974 return 1;
2976 return 0;
2979 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2982 neg_const_double_rtx_ok_for_fpu (x)
2983 rtx x;
2985 REAL_VALUE_TYPE r;
2986 int i;
2988 if (!fpa_consts_inited)
2989 init_fpa_table ();
2991 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2992 r = REAL_VALUE_NEGATE (r);
2993 if (REAL_VALUE_MINUS_ZERO (r))
2994 return 0;
2996 for (i = 0; i < 8; i++)
2997 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2998 return 1;
3000 return 0;
3003 /* Predicates for `match_operand' and `match_operator'. */
3005 /* s_register_operand is the same as register_operand, but it doesn't accept
3006 (SUBREG (MEM)...).
3008 This function exists because at the time it was put in it led to better
3009 code. SUBREG(MEM) always needs a reload in the places where
3010 s_register_operand is used, and this seemed to lead to excessive
3011 reloading. */
3014 s_register_operand (op, mode)
3015 rtx op;
3016 enum machine_mode mode;
3018 if (GET_MODE (op) != mode && mode != VOIDmode)
3019 return 0;
3021 if (GET_CODE (op) == SUBREG)
3022 op = SUBREG_REG (op);
3024 /* We don't consider registers whose class is NO_REGS
3025 to be a register operand. */
3026 /* XXX might have to check for lo regs only for thumb ??? */
3027 return (GET_CODE (op) == REG
3028 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3029 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3032 /* A hard register operand (even before reload. */
3035 arm_hard_register_operand (op, mode)
3036 rtx op;
3037 enum machine_mode mode;
3039 if (GET_MODE (op) != mode && mode != VOIDmode)
3040 return 0;
3042 return (GET_CODE (op) == REG
3043 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3046 /* Only accept reg, subreg(reg), const_int. */
3049 reg_or_int_operand (op, mode)
3050 rtx op;
3051 enum machine_mode mode;
3053 if (GET_CODE (op) == CONST_INT)
3054 return 1;
3056 if (GET_MODE (op) != mode && mode != VOIDmode)
3057 return 0;
3059 if (GET_CODE (op) == SUBREG)
3060 op = SUBREG_REG (op);
3062 /* We don't consider registers whose class is NO_REGS
3063 to be a register operand. */
3064 return (GET_CODE (op) == REG
3065 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3066 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3069 /* Return 1 if OP is an item in memory, given that we are in reload. */
3072 arm_reload_memory_operand (op, mode)
3073 rtx op;
3074 enum machine_mode mode ATTRIBUTE_UNUSED;
3076 int regno = true_regnum (op);
3078 return (!CONSTANT_P (op)
3079 && (regno == -1
3080 || (GET_CODE (op) == REG
3081 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3084 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3085 memory access (architecture V4).
3086 MODE is QImode if called when computing constraints, or VOIDmode when
3087 emitting patterns. In this latter case we cannot use memory_operand()
3088 because it will fail on badly formed MEMs, which is precisly what we are
3089 trying to catch. */
3092 bad_signed_byte_operand (op, mode)
3093 rtx op;
3094 enum machine_mode mode ATTRIBUTE_UNUSED;
3096 #if 0
3097 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3098 return 0;
3099 #endif
3100 if (GET_CODE (op) != MEM)
3101 return 0;
3103 op = XEXP (op, 0);
3105 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3106 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3107 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3108 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3109 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3110 return 1;
3112 /* Big constants are also bad. */
3113 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3114 && (INTVAL (XEXP (op, 1)) > 0xff
3115 || -INTVAL (XEXP (op, 1)) > 0xff))
3116 return 1;
3118 /* Everything else is good, or can will automatically be made so. */
3119 return 0;
3122 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3125 arm_rhs_operand (op, mode)
3126 rtx op;
3127 enum machine_mode mode;
3129 return (s_register_operand (op, mode)
3130 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3133 /* Return TRUE for valid operands for the
3134 rhs of an ARM instruction, or a load. */
3137 arm_rhsm_operand (op, mode)
3138 rtx op;
3139 enum machine_mode mode;
3141 return (s_register_operand (op, mode)
3142 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3143 || memory_operand (op, mode));
3146 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3147 constant that is valid when negated. */
3150 arm_add_operand (op, mode)
3151 rtx op;
3152 enum machine_mode mode;
3154 if (TARGET_THUMB)
3155 return thumb_cmp_operand (op, mode);
3157 return (s_register_operand (op, mode)
3158 || (GET_CODE (op) == CONST_INT
3159 && (const_ok_for_arm (INTVAL (op))
3160 || const_ok_for_arm (-INTVAL (op)))));
3164 arm_not_operand (op, mode)
3165 rtx op;
3166 enum machine_mode mode;
3168 return (s_register_operand (op, mode)
3169 || (GET_CODE (op) == CONST_INT
3170 && (const_ok_for_arm (INTVAL (op))
3171 || const_ok_for_arm (~INTVAL (op)))));
3174 /* Return TRUE if the operand is a memory reference which contains an
3175 offsettable address. */
3178 offsettable_memory_operand (op, mode)
3179 rtx op;
3180 enum machine_mode mode;
3182 if (mode == VOIDmode)
3183 mode = GET_MODE (op);
3185 return (mode == GET_MODE (op)
3186 && GET_CODE (op) == MEM
3187 && offsettable_address_p (reload_completed | reload_in_progress,
3188 mode, XEXP (op, 0)));
3191 /* Return TRUE if the operand is a memory reference which is, or can be
3192 made word aligned by adjusting the offset. */
3195 alignable_memory_operand (op, mode)
3196 rtx op;
3197 enum machine_mode mode;
3199 rtx reg;
3201 if (mode == VOIDmode)
3202 mode = GET_MODE (op);
3204 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3205 return 0;
3207 op = XEXP (op, 0);
3209 return ((GET_CODE (reg = op) == REG
3210 || (GET_CODE (op) == SUBREG
3211 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3212 || (GET_CODE (op) == PLUS
3213 && GET_CODE (XEXP (op, 1)) == CONST_INT
3214 && (GET_CODE (reg = XEXP (op, 0)) == REG
3215 || (GET_CODE (XEXP (op, 0)) == SUBREG
3216 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3217 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3220 /* Similar to s_register_operand, but does not allow hard integer
3221 registers. */
3224 f_register_operand (op, mode)
3225 rtx op;
3226 enum machine_mode mode;
3228 if (GET_MODE (op) != mode && mode != VOIDmode)
3229 return 0;
3231 if (GET_CODE (op) == SUBREG)
3232 op = SUBREG_REG (op);
3234 /* We don't consider registers whose class is NO_REGS
3235 to be a register operand. */
3236 return (GET_CODE (op) == REG
3237 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3238 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3241 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3244 fpu_rhs_operand (op, mode)
3245 rtx op;
3246 enum machine_mode mode;
3248 if (s_register_operand (op, mode))
3249 return TRUE;
3251 if (GET_MODE (op) != mode && mode != VOIDmode)
3252 return FALSE;
3254 if (GET_CODE (op) == CONST_DOUBLE)
3255 return const_double_rtx_ok_for_fpu (op);
3257 return FALSE;
3261 fpu_add_operand (op, mode)
3262 rtx op;
3263 enum machine_mode mode;
3265 if (s_register_operand (op, mode))
3266 return TRUE;
3268 if (GET_MODE (op) != mode && mode != VOIDmode)
3269 return FALSE;
3271 if (GET_CODE (op) == CONST_DOUBLE)
3272 return (const_double_rtx_ok_for_fpu (op)
3273 || neg_const_double_rtx_ok_for_fpu (op));
3275 return FALSE;
3278 /* Return nonzero if OP is a constant power of two. */
3281 power_of_two_operand (op, mode)
3282 rtx op;
3283 enum machine_mode mode ATTRIBUTE_UNUSED;
3285 if (GET_CODE (op) == CONST_INT)
3287 HOST_WIDE_INT value = INTVAL (op);
3289 return value != 0 && (value & (value - 1)) == 0;
3292 return FALSE;
3295 /* Return TRUE for a valid operand of a DImode operation.
3296 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3297 Note that this disallows MEM(REG+REG), but allows
3298 MEM(PRE/POST_INC/DEC(REG)). */
3301 di_operand (op, mode)
3302 rtx op;
3303 enum machine_mode mode;
3305 if (s_register_operand (op, mode))
3306 return TRUE;
3308 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3309 return FALSE;
3311 if (GET_CODE (op) == SUBREG)
3312 op = SUBREG_REG (op);
3314 switch (GET_CODE (op))
3316 case CONST_DOUBLE:
3317 case CONST_INT:
3318 return TRUE;
3320 case MEM:
3321 return memory_address_p (DImode, XEXP (op, 0));
3323 default:
3324 return FALSE;
3328 /* Like di_operand, but don't accept constants. */
3331 nonimmediate_di_operand (op, mode)
3332 rtx op;
3333 enum machine_mode mode;
3335 if (s_register_operand (op, mode))
3336 return TRUE;
3338 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3339 return FALSE;
3341 if (GET_CODE (op) == SUBREG)
3342 op = SUBREG_REG (op);
3344 if (GET_CODE (op) == MEM)
3345 return memory_address_p (DImode, XEXP (op, 0));
3347 return FALSE;
3350 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3351 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3352 Note that this disallows MEM(REG+REG), but allows
3353 MEM(PRE/POST_INC/DEC(REG)). */
3356 soft_df_operand (op, mode)
3357 rtx op;
3358 enum machine_mode mode;
3360 if (s_register_operand (op, mode))
3361 return TRUE;
3363 if (mode != VOIDmode && GET_MODE (op) != mode)
3364 return FALSE;
3366 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3367 return FALSE;
3369 if (GET_CODE (op) == SUBREG)
3370 op = SUBREG_REG (op);
3372 switch (GET_CODE (op))
3374 case CONST_DOUBLE:
3375 return TRUE;
3377 case MEM:
3378 return memory_address_p (DFmode, XEXP (op, 0));
3380 default:
3381 return FALSE;
3385 /* Like soft_df_operand, but don't accept constants. */
3388 nonimmediate_soft_df_operand (op, mode)
3389 rtx op;
3390 enum machine_mode mode;
3392 if (s_register_operand (op, mode))
3393 return TRUE;
3395 if (mode != VOIDmode && GET_MODE (op) != mode)
3396 return FALSE;
3398 if (GET_CODE (op) == SUBREG)
3399 op = SUBREG_REG (op);
3401 if (GET_CODE (op) == MEM)
3402 return memory_address_p (DFmode, XEXP (op, 0));
3403 return FALSE;
3406 /* Return TRUE for valid index operands. */
3409 index_operand (op, mode)
3410 rtx op;
3411 enum machine_mode mode;
3413 return (s_register_operand (op, mode)
3414 || (immediate_operand (op, mode)
3415 && (GET_CODE (op) != CONST_INT
3416 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3419 /* Return TRUE for valid shifts by a constant. This also accepts any
3420 power of two on the (somewhat overly relaxed) assumption that the
3421 shift operator in this case was a mult. */
3424 const_shift_operand (op, mode)
3425 rtx op;
3426 enum machine_mode mode;
3428 return (power_of_two_operand (op, mode)
3429 || (immediate_operand (op, mode)
3430 && (GET_CODE (op) != CONST_INT
3431 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3434 /* Return TRUE for arithmetic operators which can be combined with a multiply
3435 (shift). */
3438 shiftable_operator (x, mode)
3439 rtx x;
3440 enum machine_mode mode;
3442 enum rtx_code code;
3444 if (GET_MODE (x) != mode)
3445 return FALSE;
3447 code = GET_CODE (x);
3449 return (code == PLUS || code == MINUS
3450 || code == IOR || code == XOR || code == AND);
3453 /* Return TRUE for binary logical operators. */
3456 logical_binary_operator (x, mode)
3457 rtx x;
3458 enum machine_mode mode;
3460 enum rtx_code code;
3462 if (GET_MODE (x) != mode)
3463 return FALSE;
3465 code = GET_CODE (x);
3467 return (code == IOR || code == XOR || code == AND);
3470 /* Return TRUE for shift operators. */
3473 shift_operator (x, mode)
3474 rtx x;
3475 enum machine_mode mode;
3477 enum rtx_code code;
3479 if (GET_MODE (x) != mode)
3480 return FALSE;
3482 code = GET_CODE (x);
3484 if (code == MULT)
3485 return power_of_two_operand (XEXP (x, 1), mode);
3487 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3488 || code == ROTATERT);
3491 /* Return TRUE if x is EQ or NE. */
3494 equality_operator (x, mode)
3495 rtx x;
3496 enum machine_mode mode ATTRIBUTE_UNUSED;
3498 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3501 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3504 arm_comparison_operator (x, mode)
3505 rtx x;
3506 enum machine_mode mode;
3508 return (comparison_operator (x, mode)
3509 && GET_CODE (x) != LTGT
3510 && GET_CODE (x) != UNEQ);
3513 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3516 minmax_operator (x, mode)
3517 rtx x;
3518 enum machine_mode mode;
3520 enum rtx_code code = GET_CODE (x);
3522 if (GET_MODE (x) != mode)
3523 return FALSE;
3525 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3528 /* Return TRUE if this is the condition code register, if we aren't given
3529 a mode, accept any class CCmode register. */
3532 cc_register (x, mode)
3533 rtx x;
3534 enum machine_mode mode;
3536 if (mode == VOIDmode)
3538 mode = GET_MODE (x);
3540 if (GET_MODE_CLASS (mode) != MODE_CC)
3541 return FALSE;
3544 if ( GET_MODE (x) == mode
3545 && GET_CODE (x) == REG
3546 && REGNO (x) == CC_REGNUM)
3547 return TRUE;
3549 return FALSE;
3552 /* Return TRUE if this is the condition code register, if we aren't given
3553 a mode, accept any class CCmode register which indicates a dominance
3554 expression. */
3557 dominant_cc_register (x, mode)
3558 rtx x;
3559 enum machine_mode mode;
3561 if (mode == VOIDmode)
3563 mode = GET_MODE (x);
3565 if (GET_MODE_CLASS (mode) != MODE_CC)
3566 return FALSE;
3569 if ( mode != CC_DNEmode && mode != CC_DEQmode
3570 && mode != CC_DLEmode && mode != CC_DLTmode
3571 && mode != CC_DGEmode && mode != CC_DGTmode
3572 && mode != CC_DLEUmode && mode != CC_DLTUmode
3573 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3574 return FALSE;
3576 return cc_register (x, mode);
3579 /* Return TRUE if X references a SYMBOL_REF. */
3582 symbol_mentioned_p (x)
3583 rtx x;
3585 const char * fmt;
3586 int i;
3588 if (GET_CODE (x) == SYMBOL_REF)
3589 return 1;
3591 fmt = GET_RTX_FORMAT (GET_CODE (x));
3593 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3595 if (fmt[i] == 'E')
3597 int j;
3599 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3600 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3601 return 1;
3603 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3604 return 1;
3607 return 0;
3610 /* Return TRUE if X references a LABEL_REF. */
3613 label_mentioned_p (x)
3614 rtx x;
3616 const char * fmt;
3617 int i;
3619 if (GET_CODE (x) == LABEL_REF)
3620 return 1;
3622 fmt = GET_RTX_FORMAT (GET_CODE (x));
3623 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3625 if (fmt[i] == 'E')
3627 int j;
3629 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3630 if (label_mentioned_p (XVECEXP (x, i, j)))
3631 return 1;
3633 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3634 return 1;
3637 return 0;
3640 enum rtx_code
3641 minmax_code (x)
3642 rtx x;
3644 enum rtx_code code = GET_CODE (x);
3646 if (code == SMAX)
3647 return GE;
3648 else if (code == SMIN)
3649 return LE;
3650 else if (code == UMIN)
3651 return LEU;
3652 else if (code == UMAX)
3653 return GEU;
3655 abort ();
3658 /* Return 1 if memory locations are adjacent. */
3661 adjacent_mem_locations (a, b)
3662 rtx a, b;
3664 if ((GET_CODE (XEXP (a, 0)) == REG
3665 || (GET_CODE (XEXP (a, 0)) == PLUS
3666 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3667 && (GET_CODE (XEXP (b, 0)) == REG
3668 || (GET_CODE (XEXP (b, 0)) == PLUS
3669 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3671 int val0 = 0, val1 = 0;
3672 int reg0, reg1;
3674 if (GET_CODE (XEXP (a, 0)) == PLUS)
3676 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3677 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3679 else
3680 reg0 = REGNO (XEXP (a, 0));
3682 if (GET_CODE (XEXP (b, 0)) == PLUS)
3684 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3685 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3687 else
3688 reg1 = REGNO (XEXP (b, 0));
3690 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3692 return 0;
3695 /* Return 1 if OP is a load multiple operation. It is known to be
3696 parallel and the first section will be tested. */
3699 load_multiple_operation (op, mode)
3700 rtx op;
3701 enum machine_mode mode ATTRIBUTE_UNUSED;
3703 HOST_WIDE_INT count = XVECLEN (op, 0);
3704 int dest_regno;
3705 rtx src_addr;
3706 HOST_WIDE_INT i = 1, base = 0;
3707 rtx elt;
3709 if (count <= 1
3710 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3711 return 0;
3713 /* Check to see if this might be a write-back. */
3714 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3716 i++;
3717 base = 1;
3719 /* Now check it more carefully. */
3720 if (GET_CODE (SET_DEST (elt)) != REG
3721 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3722 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3723 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3724 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3725 return 0;
3728 /* Perform a quick check so we don't blow up below. */
3729 if (count <= i
3730 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3731 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3732 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3733 return 0;
3735 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3736 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3738 for (; i < count; i++)
3740 elt = XVECEXP (op, 0, i);
3742 if (GET_CODE (elt) != SET
3743 || GET_CODE (SET_DEST (elt)) != REG
3744 || GET_MODE (SET_DEST (elt)) != SImode
3745 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3746 || GET_CODE (SET_SRC (elt)) != MEM
3747 || GET_MODE (SET_SRC (elt)) != SImode
3748 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3749 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3750 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3751 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3752 return 0;
3755 return 1;
3758 /* Return 1 if OP is a store multiple operation. It is known to be
3759 parallel and the first section will be tested. */
3762 store_multiple_operation (op, mode)
3763 rtx op;
3764 enum machine_mode mode ATTRIBUTE_UNUSED;
3766 HOST_WIDE_INT count = XVECLEN (op, 0);
3767 int src_regno;
3768 rtx dest_addr;
3769 HOST_WIDE_INT i = 1, base = 0;
3770 rtx elt;
3772 if (count <= 1
3773 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3774 return 0;
3776 /* Check to see if this might be a write-back. */
3777 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3779 i++;
3780 base = 1;
3782 /* Now check it more carefully. */
3783 if (GET_CODE (SET_DEST (elt)) != REG
3784 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3785 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3786 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3787 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3788 return 0;
3791 /* Perform a quick check so we don't blow up below. */
3792 if (count <= i
3793 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3794 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3795 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3796 return 0;
3798 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3799 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3801 for (; i < count; i++)
3803 elt = XVECEXP (op, 0, i);
3805 if (GET_CODE (elt) != SET
3806 || GET_CODE (SET_SRC (elt)) != REG
3807 || GET_MODE (SET_SRC (elt)) != SImode
3808 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3809 || GET_CODE (SET_DEST (elt)) != MEM
3810 || GET_MODE (SET_DEST (elt)) != SImode
3811 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3812 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3813 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3814 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3815 return 0;
3818 return 1;
3822 load_multiple_sequence (operands, nops, regs, base, load_offset)
3823 rtx * operands;
3824 int nops;
3825 int * regs;
3826 int * base;
3827 HOST_WIDE_INT * load_offset;
3829 int unsorted_regs[4];
3830 HOST_WIDE_INT unsorted_offsets[4];
3831 int order[4];
3832 int base_reg = -1;
3833 int i;
3835 /* Can only handle 2, 3, or 4 insns at present,
3836 though could be easily extended if required. */
3837 if (nops < 2 || nops > 4)
3838 abort ();
3840 /* Loop over the operands and check that the memory references are
3841 suitable (ie immediate offsets from the same base register). At
3842 the same time, extract the target register, and the memory
3843 offsets. */
3844 for (i = 0; i < nops; i++)
3846 rtx reg;
3847 rtx offset;
3849 /* Convert a subreg of a mem into the mem itself. */
3850 if (GET_CODE (operands[nops + i]) == SUBREG)
3851 operands[nops + i] = alter_subreg (operands + (nops + i));
3853 if (GET_CODE (operands[nops + i]) != MEM)
3854 abort ();
3856 /* Don't reorder volatile memory references; it doesn't seem worth
3857 looking for the case where the order is ok anyway. */
3858 if (MEM_VOLATILE_P (operands[nops + i]))
3859 return 0;
3861 offset = const0_rtx;
3863 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3864 || (GET_CODE (reg) == SUBREG
3865 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3866 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3867 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3868 == REG)
3869 || (GET_CODE (reg) == SUBREG
3870 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3871 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3872 == CONST_INT)))
3874 if (i == 0)
3876 base_reg = REGNO (reg);
3877 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3878 ? REGNO (operands[i])
3879 : REGNO (SUBREG_REG (operands[i])));
3880 order[0] = 0;
3882 else
3884 if (base_reg != (int) REGNO (reg))
3885 /* Not addressed from the same base register. */
3886 return 0;
3888 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3889 ? REGNO (operands[i])
3890 : REGNO (SUBREG_REG (operands[i])));
3891 if (unsorted_regs[i] < unsorted_regs[order[0]])
3892 order[0] = i;
3895 /* If it isn't an integer register, or if it overwrites the
3896 base register but isn't the last insn in the list, then
3897 we can't do this. */
3898 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3899 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3900 return 0;
3902 unsorted_offsets[i] = INTVAL (offset);
3904 else
3905 /* Not a suitable memory address. */
3906 return 0;
3909 /* All the useful information has now been extracted from the
3910 operands into unsorted_regs and unsorted_offsets; additionally,
3911 order[0] has been set to the lowest numbered register in the
3912 list. Sort the registers into order, and check that the memory
3913 offsets are ascending and adjacent. */
3915 for (i = 1; i < nops; i++)
3917 int j;
3919 order[i] = order[i - 1];
3920 for (j = 0; j < nops; j++)
3921 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3922 && (order[i] == order[i - 1]
3923 || unsorted_regs[j] < unsorted_regs[order[i]]))
3924 order[i] = j;
3926 /* Have we found a suitable register? if not, one must be used more
3927 than once. */
3928 if (order[i] == order[i - 1])
3929 return 0;
3931 /* Is the memory address adjacent and ascending? */
3932 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3933 return 0;
3936 if (base)
3938 *base = base_reg;
3940 for (i = 0; i < nops; i++)
3941 regs[i] = unsorted_regs[order[i]];
3943 *load_offset = unsorted_offsets[order[0]];
3946 if (unsorted_offsets[order[0]] == 0)
3947 return 1; /* ldmia */
3949 if (unsorted_offsets[order[0]] == 4)
3950 return 2; /* ldmib */
3952 if (unsorted_offsets[order[nops - 1]] == 0)
3953 return 3; /* ldmda */
3955 if (unsorted_offsets[order[nops - 1]] == -4)
3956 return 4; /* ldmdb */
3958 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3959 if the offset isn't small enough. The reason 2 ldrs are faster
3960 is because these ARMs are able to do more than one cache access
3961 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3962 whilst the ARM8 has a double bandwidth cache. This means that
3963 these cores can do both an instruction fetch and a data fetch in
3964 a single cycle, so the trick of calculating the address into a
3965 scratch register (one of the result regs) and then doing a load
3966 multiple actually becomes slower (and no smaller in code size).
3967 That is the transformation
3969 ldr rd1, [rbase + offset]
3970 ldr rd2, [rbase + offset + 4]
3974 add rd1, rbase, offset
3975 ldmia rd1, {rd1, rd2}
3977 produces worse code -- '3 cycles + any stalls on rd2' instead of
3978 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3979 access per cycle, the first sequence could never complete in less
3980 than 6 cycles, whereas the ldm sequence would only take 5 and
3981 would make better use of sequential accesses if not hitting the
3982 cache.
3984 We cheat here and test 'arm_ld_sched' which we currently know to
3985 only be true for the ARM8, ARM9 and StrongARM. If this ever
3986 changes, then the test below needs to be reworked. */
3987 if (nops == 2 && arm_ld_sched)
3988 return 0;
3990 /* Can't do it without setting up the offset, only do this if it takes
3991 no more than one insn. */
3992 return (const_ok_for_arm (unsorted_offsets[order[0]])
3993 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3996 const char *
3997 emit_ldm_seq (operands, nops)
3998 rtx * operands;
3999 int nops;
4001 int regs[4];
4002 int base_reg;
4003 HOST_WIDE_INT offset;
4004 char buf[100];
4005 int i;
4007 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4009 case 1:
4010 strcpy (buf, "ldm%?ia\t");
4011 break;
4013 case 2:
4014 strcpy (buf, "ldm%?ib\t");
4015 break;
4017 case 3:
4018 strcpy (buf, "ldm%?da\t");
4019 break;
4021 case 4:
4022 strcpy (buf, "ldm%?db\t");
4023 break;
4025 case 5:
4026 if (offset >= 0)
4027 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4028 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4029 (long) offset);
4030 else
4031 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4032 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4033 (long) -offset);
4034 output_asm_insn (buf, operands);
4035 base_reg = regs[0];
4036 strcpy (buf, "ldm%?ia\t");
4037 break;
4039 default:
4040 abort ();
4043 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4044 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4046 for (i = 1; i < nops; i++)
4047 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4048 reg_names[regs[i]]);
4050 strcat (buf, "}\t%@ phole ldm");
4052 output_asm_insn (buf, operands);
4053 return "";
4057 store_multiple_sequence (operands, nops, regs, base, load_offset)
4058 rtx * operands;
4059 int nops;
4060 int * regs;
4061 int * base;
4062 HOST_WIDE_INT * load_offset;
4064 int unsorted_regs[4];
4065 HOST_WIDE_INT unsorted_offsets[4];
4066 int order[4];
4067 int base_reg = -1;
4068 int i;
4070 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4071 extended if required. */
4072 if (nops < 2 || nops > 4)
4073 abort ();
4075 /* Loop over the operands and check that the memory references are
4076 suitable (ie immediate offsets from the same base register). At
4077 the same time, extract the target register, and the memory
4078 offsets. */
4079 for (i = 0; i < nops; i++)
4081 rtx reg;
4082 rtx offset;
4084 /* Convert a subreg of a mem into the mem itself. */
4085 if (GET_CODE (operands[nops + i]) == SUBREG)
4086 operands[nops + i] = alter_subreg (operands + (nops + i));
4088 if (GET_CODE (operands[nops + i]) != MEM)
4089 abort ();
4091 /* Don't reorder volatile memory references; it doesn't seem worth
4092 looking for the case where the order is ok anyway. */
4093 if (MEM_VOLATILE_P (operands[nops + i]))
4094 return 0;
4096 offset = const0_rtx;
4098 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4099 || (GET_CODE (reg) == SUBREG
4100 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4101 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4102 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4103 == REG)
4104 || (GET_CODE (reg) == SUBREG
4105 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4106 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4107 == CONST_INT)))
4109 if (i == 0)
4111 base_reg = REGNO (reg);
4112 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4113 ? REGNO (operands[i])
4114 : REGNO (SUBREG_REG (operands[i])));
4115 order[0] = 0;
4117 else
4119 if (base_reg != (int) REGNO (reg))
4120 /* Not addressed from the same base register. */
4121 return 0;
4123 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4124 ? REGNO (operands[i])
4125 : REGNO (SUBREG_REG (operands[i])));
4126 if (unsorted_regs[i] < unsorted_regs[order[0]])
4127 order[0] = i;
4130 /* If it isn't an integer register, then we can't do this. */
4131 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4132 return 0;
4134 unsorted_offsets[i] = INTVAL (offset);
4136 else
4137 /* Not a suitable memory address. */
4138 return 0;
4141 /* All the useful information has now been extracted from the
4142 operands into unsorted_regs and unsorted_offsets; additionally,
4143 order[0] has been set to the lowest numbered register in the
4144 list. Sort the registers into order, and check that the memory
4145 offsets are ascending and adjacent. */
4147 for (i = 1; i < nops; i++)
4149 int j;
4151 order[i] = order[i - 1];
4152 for (j = 0; j < nops; j++)
4153 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4154 && (order[i] == order[i - 1]
4155 || unsorted_regs[j] < unsorted_regs[order[i]]))
4156 order[i] = j;
4158 /* Have we found a suitable register? if not, one must be used more
4159 than once. */
4160 if (order[i] == order[i - 1])
4161 return 0;
4163 /* Is the memory address adjacent and ascending? */
4164 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4165 return 0;
4168 if (base)
4170 *base = base_reg;
4172 for (i = 0; i < nops; i++)
4173 regs[i] = unsorted_regs[order[i]];
4175 *load_offset = unsorted_offsets[order[0]];
4178 if (unsorted_offsets[order[0]] == 0)
4179 return 1; /* stmia */
4181 if (unsorted_offsets[order[0]] == 4)
4182 return 2; /* stmib */
4184 if (unsorted_offsets[order[nops - 1]] == 0)
4185 return 3; /* stmda */
4187 if (unsorted_offsets[order[nops - 1]] == -4)
4188 return 4; /* stmdb */
4190 return 0;
4193 const char *
4194 emit_stm_seq (operands, nops)
4195 rtx * operands;
4196 int nops;
4198 int regs[4];
4199 int base_reg;
4200 HOST_WIDE_INT offset;
4201 char buf[100];
4202 int i;
4204 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4206 case 1:
4207 strcpy (buf, "stm%?ia\t");
4208 break;
4210 case 2:
4211 strcpy (buf, "stm%?ib\t");
4212 break;
4214 case 3:
4215 strcpy (buf, "stm%?da\t");
4216 break;
4218 case 4:
4219 strcpy (buf, "stm%?db\t");
4220 break;
4222 default:
4223 abort ();
4226 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4227 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4229 for (i = 1; i < nops; i++)
4230 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4231 reg_names[regs[i]]);
4233 strcat (buf, "}\t%@ phole stm");
4235 output_asm_insn (buf, operands);
4236 return "";
4240 multi_register_push (op, mode)
4241 rtx op;
4242 enum machine_mode mode ATTRIBUTE_UNUSED;
4244 if (GET_CODE (op) != PARALLEL
4245 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4246 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4247 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4248 return 0;
4250 return 1;
4253 /* Routines for use in generating RTL. */
4256 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4257 in_struct_p, scalar_p)
4258 int base_regno;
4259 int count;
4260 rtx from;
4261 int up;
4262 int write_back;
4263 int unchanging_p;
4264 int in_struct_p;
4265 int scalar_p;
4267 int i = 0, j;
4268 rtx result;
4269 int sign = up ? 1 : -1;
4270 rtx mem;
4272 /* XScale has load-store double instructions, but they have stricter
4273 alignment requirements than load-store multiple, so we can not
4274 use them.
4276 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4277 the pipeline until completion.
4279 NREGS CYCLES
4285 An ldr instruction takes 1-3 cycles, but does not block the
4286 pipeline.
4288 NREGS CYCLES
4289 1 1-3
4290 2 2-6
4291 3 3-9
4292 4 4-12
4294 Best case ldr will always win. However, the more ldr instructions
4295 we issue, the less likely we are to be able to schedule them well.
4296 Using ldr instructions also increases code size.
4298 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4299 for counts of 3 or 4 regs. */
4300 if (arm_is_xscale && count <= 2 && ! optimize_size)
4302 rtx seq;
4304 start_sequence ();
4306 for (i = 0; i < count; i++)
4308 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4309 RTX_UNCHANGING_P (mem) = unchanging_p;
4310 MEM_IN_STRUCT_P (mem) = in_struct_p;
4311 MEM_SCALAR_P (mem) = scalar_p;
4312 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4315 if (write_back)
4316 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4318 seq = gen_sequence ();
4319 end_sequence ();
4321 return seq;
4324 result = gen_rtx_PARALLEL (VOIDmode,
4325 rtvec_alloc (count + (write_back ? 1 : 0)));
4326 if (write_back)
4328 XVECEXP (result, 0, 0)
4329 = gen_rtx_SET (GET_MODE (from), from,
4330 plus_constant (from, count * 4 * sign));
4331 i = 1;
4332 count++;
4335 for (j = 0; i < count; i++, j++)
4337 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4338 RTX_UNCHANGING_P (mem) = unchanging_p;
4339 MEM_IN_STRUCT_P (mem) = in_struct_p;
4340 MEM_SCALAR_P (mem) = scalar_p;
4341 XVECEXP (result, 0, i)
4342 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4345 return result;
4349 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4350 in_struct_p, scalar_p)
4351 int base_regno;
4352 int count;
4353 rtx to;
4354 int up;
4355 int write_back;
4356 int unchanging_p;
4357 int in_struct_p;
4358 int scalar_p;
4360 int i = 0, j;
4361 rtx result;
4362 int sign = up ? 1 : -1;
4363 rtx mem;
4365 /* See arm_gen_load_multiple for discussion of
4366 the pros/cons of ldm/stm usage for XScale. */
4367 if (arm_is_xscale && count <= 2 && ! optimize_size)
4369 rtx seq;
4371 start_sequence ();
4373 for (i = 0; i < count; i++)
4375 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4376 RTX_UNCHANGING_P (mem) = unchanging_p;
4377 MEM_IN_STRUCT_P (mem) = in_struct_p;
4378 MEM_SCALAR_P (mem) = scalar_p;
4379 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4382 if (write_back)
4383 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4385 seq = gen_sequence ();
4386 end_sequence ();
4388 return seq;
4391 result = gen_rtx_PARALLEL (VOIDmode,
4392 rtvec_alloc (count + (write_back ? 1 : 0)));
4393 if (write_back)
4395 XVECEXP (result, 0, 0)
4396 = gen_rtx_SET (GET_MODE (to), to,
4397 plus_constant (to, count * 4 * sign));
4398 i = 1;
4399 count++;
4402 for (j = 0; i < count; i++, j++)
4404 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4405 RTX_UNCHANGING_P (mem) = unchanging_p;
4406 MEM_IN_STRUCT_P (mem) = in_struct_p;
4407 MEM_SCALAR_P (mem) = scalar_p;
4409 XVECEXP (result, 0, i)
4410 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4413 return result;
4417 arm_gen_movstrqi (operands)
4418 rtx * operands;
4420 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4421 int i;
4422 rtx src, dst;
4423 rtx st_src, st_dst, fin_src, fin_dst;
4424 rtx part_bytes_reg = NULL;
4425 rtx mem;
4426 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4427 int dst_scalar_p, src_scalar_p;
4429 if (GET_CODE (operands[2]) != CONST_INT
4430 || GET_CODE (operands[3]) != CONST_INT
4431 || INTVAL (operands[2]) > 64
4432 || INTVAL (operands[3]) & 3)
4433 return 0;
4435 st_dst = XEXP (operands[0], 0);
4436 st_src = XEXP (operands[1], 0);
4438 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4439 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4440 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4441 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4442 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4443 src_scalar_p = MEM_SCALAR_P (operands[1]);
4445 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4446 fin_src = src = copy_to_mode_reg (SImode, st_src);
4448 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4449 out_words_to_go = INTVAL (operands[2]) / 4;
4450 last_bytes = INTVAL (operands[2]) & 3;
4452 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4453 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4455 for (i = 0; in_words_to_go >= 2; i+=4)
4457 if (in_words_to_go > 4)
4458 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4459 src_unchanging_p,
4460 src_in_struct_p,
4461 src_scalar_p));
4462 else
4463 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4464 FALSE, src_unchanging_p,
4465 src_in_struct_p, src_scalar_p));
4467 if (out_words_to_go)
4469 if (out_words_to_go > 4)
4470 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4471 dst_unchanging_p,
4472 dst_in_struct_p,
4473 dst_scalar_p));
4474 else if (out_words_to_go != 1)
4475 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4476 dst, TRUE,
4477 (last_bytes == 0
4478 ? FALSE : TRUE),
4479 dst_unchanging_p,
4480 dst_in_struct_p,
4481 dst_scalar_p));
4482 else
4484 mem = gen_rtx_MEM (SImode, dst);
4485 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4486 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4487 MEM_SCALAR_P (mem) = dst_scalar_p;
4488 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4489 if (last_bytes != 0)
4490 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4494 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4495 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4498 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4499 if (out_words_to_go)
4501 rtx sreg;
4503 mem = gen_rtx_MEM (SImode, src);
4504 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4505 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4506 MEM_SCALAR_P (mem) = src_scalar_p;
4507 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4508 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4510 mem = gen_rtx_MEM (SImode, dst);
4511 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4512 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4513 MEM_SCALAR_P (mem) = dst_scalar_p;
4514 emit_move_insn (mem, sreg);
4515 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4516 in_words_to_go--;
4518 if (in_words_to_go) /* Sanity check */
4519 abort ();
4522 if (in_words_to_go)
4524 if (in_words_to_go < 0)
4525 abort ();
4527 mem = gen_rtx_MEM (SImode, src);
4528 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4529 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4530 MEM_SCALAR_P (mem) = src_scalar_p;
4531 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4534 if (last_bytes && part_bytes_reg == NULL)
4535 abort ();
4537 if (BYTES_BIG_ENDIAN && last_bytes)
4539 rtx tmp = gen_reg_rtx (SImode);
4541 /* The bytes we want are in the top end of the word. */
4542 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4543 GEN_INT (8 * (4 - last_bytes))));
4544 part_bytes_reg = tmp;
4546 while (last_bytes)
4548 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4549 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4550 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4551 MEM_SCALAR_P (mem) = dst_scalar_p;
4552 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4554 if (--last_bytes)
4556 tmp = gen_reg_rtx (SImode);
4557 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4558 part_bytes_reg = tmp;
4563 else
4565 if (last_bytes > 1)
4567 mem = gen_rtx_MEM (HImode, dst);
4568 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4569 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4570 MEM_SCALAR_P (mem) = dst_scalar_p;
4571 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4572 last_bytes -= 2;
4573 if (last_bytes)
4575 rtx tmp = gen_reg_rtx (SImode);
4577 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4578 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4579 part_bytes_reg = tmp;
4583 if (last_bytes)
4585 mem = gen_rtx_MEM (QImode, dst);
4586 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4587 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4588 MEM_SCALAR_P (mem) = dst_scalar_p;
4589 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4593 return 1;
4596 /* Generate a memory reference for a half word, such that it will be loaded
4597 into the top 16 bits of the word. We can assume that the address is
4598 known to be alignable and of the form reg, or plus (reg, const). */
4601 arm_gen_rotated_half_load (memref)
4602 rtx memref;
4604 HOST_WIDE_INT offset = 0;
4605 rtx base = XEXP (memref, 0);
4607 if (GET_CODE (base) == PLUS)
4609 offset = INTVAL (XEXP (base, 1));
4610 base = XEXP (base, 0);
4613 /* If we aren't allowed to generate unaligned addresses, then fail. */
4614 if (TARGET_MMU_TRAPS
4615 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4616 return NULL;
4618 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4620 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4621 return base;
4623 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4626 /* Select a dominance comparison mode if possible. We support three forms.
4627 COND_OR == 0 => (X && Y)
4628 COND_OR == 1 => ((! X( || Y)
4629 COND_OR == 2 => (X || Y)
4630 If we are unable to support a dominance comparsison we return CC mode.
4631 This will then fail to match for the RTL expressions that generate this
4632 call. */
4634 static enum machine_mode
4635 select_dominance_cc_mode (x, y, cond_or)
4636 rtx x;
4637 rtx y;
4638 HOST_WIDE_INT cond_or;
4640 enum rtx_code cond1, cond2;
4641 int swapped = 0;
4643 /* Currently we will probably get the wrong result if the individual
4644 comparisons are not simple. This also ensures that it is safe to
4645 reverse a comparison if necessary. */
4646 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4647 != CCmode)
4648 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4649 != CCmode))
4650 return CCmode;
4652 /* The if_then_else variant of this tests the second condition if the
4653 first passes, but is true if the first fails. Reverse the first
4654 condition to get a true "inclusive-or" expression. */
4655 if (cond_or == 1)
4656 cond1 = reverse_condition (cond1);
4658 /* If the comparisons are not equal, and one doesn't dominate the other,
4659 then we can't do this. */
4660 if (cond1 != cond2
4661 && !comparison_dominates_p (cond1, cond2)
4662 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4663 return CCmode;
4665 if (swapped)
4667 enum rtx_code temp = cond1;
4668 cond1 = cond2;
4669 cond2 = temp;
4672 switch (cond1)
4674 case EQ:
4675 if (cond2 == EQ || !cond_or)
4676 return CC_DEQmode;
4678 switch (cond2)
4680 case LE: return CC_DLEmode;
4681 case LEU: return CC_DLEUmode;
4682 case GE: return CC_DGEmode;
4683 case GEU: return CC_DGEUmode;
4684 default: break;
4687 break;
4689 case LT:
4690 if (cond2 == LT || !cond_or)
4691 return CC_DLTmode;
4692 if (cond2 == LE)
4693 return CC_DLEmode;
4694 if (cond2 == NE)
4695 return CC_DNEmode;
4696 break;
4698 case GT:
4699 if (cond2 == GT || !cond_or)
4700 return CC_DGTmode;
4701 if (cond2 == GE)
4702 return CC_DGEmode;
4703 if (cond2 == NE)
4704 return CC_DNEmode;
4705 break;
4707 case LTU:
4708 if (cond2 == LTU || !cond_or)
4709 return CC_DLTUmode;
4710 if (cond2 == LEU)
4711 return CC_DLEUmode;
4712 if (cond2 == NE)
4713 return CC_DNEmode;
4714 break;
4716 case GTU:
4717 if (cond2 == GTU || !cond_or)
4718 return CC_DGTUmode;
4719 if (cond2 == GEU)
4720 return CC_DGEUmode;
4721 if (cond2 == NE)
4722 return CC_DNEmode;
4723 break;
4725 /* The remaining cases only occur when both comparisons are the
4726 same. */
4727 case NE:
4728 return CC_DNEmode;
4730 case LE:
4731 return CC_DLEmode;
4733 case GE:
4734 return CC_DGEmode;
4736 case LEU:
4737 return CC_DLEUmode;
4739 case GEU:
4740 return CC_DGEUmode;
4742 default:
4743 break;
4746 abort ();
4749 enum machine_mode
4750 arm_select_cc_mode (op, x, y)
4751 enum rtx_code op;
4752 rtx x;
4753 rtx y;
4755 /* All floating point compares return CCFP if it is an equality
4756 comparison, and CCFPE otherwise. */
4757 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4759 switch (op)
4761 case EQ:
4762 case NE:
4763 case UNORDERED:
4764 case ORDERED:
4765 case UNLT:
4766 case UNLE:
4767 case UNGT:
4768 case UNGE:
4769 case UNEQ:
4770 case LTGT:
4771 return CCFPmode;
4773 case LT:
4774 case LE:
4775 case GT:
4776 case GE:
4777 return CCFPEmode;
4779 default:
4780 abort ();
4784 /* A compare with a shifted operand. Because of canonicalization, the
4785 comparison will have to be swapped when we emit the assembler. */
4786 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4787 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4788 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4789 || GET_CODE (x) == ROTATERT))
4790 return CC_SWPmode;
4792 /* This is a special case that is used by combine to allow a
4793 comparison of a shifted byte load to be split into a zero-extend
4794 followed by a comparison of the shifted integer (only valid for
4795 equalities and unsigned inequalities). */
4796 if (GET_MODE (x) == SImode
4797 && GET_CODE (x) == ASHIFT
4798 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4799 && GET_CODE (XEXP (x, 0)) == SUBREG
4800 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4801 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4802 && (op == EQ || op == NE
4803 || op == GEU || op == GTU || op == LTU || op == LEU)
4804 && GET_CODE (y) == CONST_INT)
4805 return CC_Zmode;
4807 /* A construct for a conditional compare, if the false arm contains
4808 0, then both conditions must be true, otherwise either condition
4809 must be true. Not all conditions are possible, so CCmode is
4810 returned if it can't be done. */
4811 if (GET_CODE (x) == IF_THEN_ELSE
4812 && (XEXP (x, 2) == const0_rtx
4813 || XEXP (x, 2) == const1_rtx)
4814 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4815 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4816 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4817 INTVAL (XEXP (x, 2)));
4819 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4820 if (GET_CODE (x) == AND
4821 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4822 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4823 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4825 if (GET_CODE (x) == IOR
4826 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4827 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4828 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4830 /* An operation that sets the condition codes as a side-effect, the
4831 V flag is not set correctly, so we can only use comparisons where
4832 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4833 instead. */
4834 if (GET_MODE (x) == SImode
4835 && y == const0_rtx
4836 && (op == EQ || op == NE || op == LT || op == GE)
4837 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4838 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4839 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4840 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4841 || GET_CODE (x) == LSHIFTRT
4842 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4843 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4844 return CC_NOOVmode;
4846 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4847 return CC_Zmode;
4849 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4850 && GET_CODE (x) == PLUS
4851 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4852 return CC_Cmode;
4854 return CCmode;
4857 /* X and Y are two things to compare using CODE. Emit the compare insn and
4858 return the rtx for register 0 in the proper mode. FP means this is a
4859 floating point compare: I don't think that it is needed on the arm. */
4862 arm_gen_compare_reg (code, x, y)
4863 enum rtx_code code;
4864 rtx x, y;
4866 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4867 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4869 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4870 gen_rtx_COMPARE (mode, x, y)));
4872 return cc_reg;
4875 void
4876 arm_reload_in_hi (operands)
4877 rtx * operands;
4879 rtx ref = operands[1];
4880 rtx base, scratch;
4881 HOST_WIDE_INT offset = 0;
4883 if (GET_CODE (ref) == SUBREG)
4885 offset = SUBREG_BYTE (ref);
4886 ref = SUBREG_REG (ref);
4889 if (GET_CODE (ref) == REG)
4891 /* We have a pseudo which has been spilt onto the stack; there
4892 are two cases here: the first where there is a simple
4893 stack-slot replacement and a second where the stack-slot is
4894 out of range, or is used as a subreg. */
4895 if (reg_equiv_mem[REGNO (ref)])
4897 ref = reg_equiv_mem[REGNO (ref)];
4898 base = find_replacement (&XEXP (ref, 0));
4900 else
4901 /* The slot is out of range, or was dressed up in a SUBREG. */
4902 base = reg_equiv_address[REGNO (ref)];
4904 else
4905 base = find_replacement (&XEXP (ref, 0));
4907 /* Handle the case where the address is too complex to be offset by 1. */
4908 if (GET_CODE (base) == MINUS
4909 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4911 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4913 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4914 base = base_plus;
4916 else if (GET_CODE (base) == PLUS)
4918 /* The addend must be CONST_INT, or we would have dealt with it above. */
4919 HOST_WIDE_INT hi, lo;
4921 offset += INTVAL (XEXP (base, 1));
4922 base = XEXP (base, 0);
4924 /* Rework the address into a legal sequence of insns. */
4925 /* Valid range for lo is -4095 -> 4095 */
4926 lo = (offset >= 0
4927 ? (offset & 0xfff)
4928 : -((-offset) & 0xfff));
4930 /* Corner case, if lo is the max offset then we would be out of range
4931 once we have added the additional 1 below, so bump the msb into the
4932 pre-loading insn(s). */
4933 if (lo == 4095)
4934 lo &= 0x7ff;
4936 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4937 ^ (HOST_WIDE_INT) 0x80000000)
4938 - (HOST_WIDE_INT) 0x80000000);
4940 if (hi + lo != offset)
4941 abort ();
4943 if (hi != 0)
4945 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4947 /* Get the base address; addsi3 knows how to handle constants
4948 that require more than one insn. */
4949 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4950 base = base_plus;
4951 offset = lo;
4955 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4956 emit_insn (gen_zero_extendqisi2 (scratch,
4957 gen_rtx_MEM (QImode,
4958 plus_constant (base,
4959 offset))));
4960 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4961 gen_rtx_MEM (QImode,
4962 plus_constant (base,
4963 offset + 1))));
4964 if (!BYTES_BIG_ENDIAN)
4965 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4966 gen_rtx_IOR (SImode,
4967 gen_rtx_ASHIFT
4968 (SImode,
4969 gen_rtx_SUBREG (SImode, operands[0], 0),
4970 GEN_INT (8)),
4971 scratch)));
4972 else
4973 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4974 gen_rtx_IOR (SImode,
4975 gen_rtx_ASHIFT (SImode, scratch,
4976 GEN_INT (8)),
4977 gen_rtx_SUBREG (SImode, operands[0],
4978 0))));
4981 /* Handle storing a half-word to memory during reload by synthesising as two
4982 byte stores. Take care not to clobber the input values until after we
4983 have moved them somewhere safe. This code assumes that if the DImode
4984 scratch in operands[2] overlaps either the input value or output address
4985 in some way, then that value must die in this insn (we absolutely need
4986 two scratch registers for some corner cases). */
4988 void
4989 arm_reload_out_hi (operands)
4990 rtx * operands;
4992 rtx ref = operands[0];
4993 rtx outval = operands[1];
4994 rtx base, scratch;
4995 HOST_WIDE_INT offset = 0;
4997 if (GET_CODE (ref) == SUBREG)
4999 offset = SUBREG_BYTE (ref);
5000 ref = SUBREG_REG (ref);
5003 if (GET_CODE (ref) == REG)
5005 /* We have a pseudo which has been spilt onto the stack; there
5006 are two cases here: the first where there is a simple
5007 stack-slot replacement and a second where the stack-slot is
5008 out of range, or is used as a subreg. */
5009 if (reg_equiv_mem[REGNO (ref)])
5011 ref = reg_equiv_mem[REGNO (ref)];
5012 base = find_replacement (&XEXP (ref, 0));
5014 else
5015 /* The slot is out of range, or was dressed up in a SUBREG. */
5016 base = reg_equiv_address[REGNO (ref)];
5018 else
5019 base = find_replacement (&XEXP (ref, 0));
5021 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5023 /* Handle the case where the address is too complex to be offset by 1. */
5024 if (GET_CODE (base) == MINUS
5025 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5027 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5029 /* Be careful not to destroy OUTVAL. */
5030 if (reg_overlap_mentioned_p (base_plus, outval))
5032 /* Updating base_plus might destroy outval, see if we can
5033 swap the scratch and base_plus. */
5034 if (!reg_overlap_mentioned_p (scratch, outval))
5036 rtx tmp = scratch;
5037 scratch = base_plus;
5038 base_plus = tmp;
5040 else
5042 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5044 /* Be conservative and copy OUTVAL into the scratch now,
5045 this should only be necessary if outval is a subreg
5046 of something larger than a word. */
5047 /* XXX Might this clobber base? I can't see how it can,
5048 since scratch is known to overlap with OUTVAL, and
5049 must be wider than a word. */
5050 emit_insn (gen_movhi (scratch_hi, outval));
5051 outval = scratch_hi;
5055 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5056 base = base_plus;
5058 else if (GET_CODE (base) == PLUS)
5060 /* The addend must be CONST_INT, or we would have dealt with it above. */
5061 HOST_WIDE_INT hi, lo;
5063 offset += INTVAL (XEXP (base, 1));
5064 base = XEXP (base, 0);
5066 /* Rework the address into a legal sequence of insns. */
5067 /* Valid range for lo is -4095 -> 4095 */
5068 lo = (offset >= 0
5069 ? (offset & 0xfff)
5070 : -((-offset) & 0xfff));
5072 /* Corner case, if lo is the max offset then we would be out of range
5073 once we have added the additional 1 below, so bump the msb into the
5074 pre-loading insn(s). */
5075 if (lo == 4095)
5076 lo &= 0x7ff;
5078 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5079 ^ (HOST_WIDE_INT) 0x80000000)
5080 - (HOST_WIDE_INT) 0x80000000);
5082 if (hi + lo != offset)
5083 abort ();
5085 if (hi != 0)
5087 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5089 /* Be careful not to destroy OUTVAL. */
5090 if (reg_overlap_mentioned_p (base_plus, outval))
5092 /* Updating base_plus might destroy outval, see if we
5093 can swap the scratch and base_plus. */
5094 if (!reg_overlap_mentioned_p (scratch, outval))
5096 rtx tmp = scratch;
5097 scratch = base_plus;
5098 base_plus = tmp;
5100 else
5102 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5104 /* Be conservative and copy outval into scratch now,
5105 this should only be necessary if outval is a
5106 subreg of something larger than a word. */
5107 /* XXX Might this clobber base? I can't see how it
5108 can, since scratch is known to overlap with
5109 outval. */
5110 emit_insn (gen_movhi (scratch_hi, outval));
5111 outval = scratch_hi;
5115 /* Get the base address; addsi3 knows how to handle constants
5116 that require more than one insn. */
5117 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5118 base = base_plus;
5119 offset = lo;
5123 if (BYTES_BIG_ENDIAN)
5125 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5126 plus_constant (base, offset + 1)),
5127 gen_rtx_SUBREG (QImode, outval, 0)));
5128 emit_insn (gen_lshrsi3 (scratch,
5129 gen_rtx_SUBREG (SImode, outval, 0),
5130 GEN_INT (8)));
5131 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5132 gen_rtx_SUBREG (QImode, scratch, 0)));
5134 else
5136 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5137 gen_rtx_SUBREG (QImode, outval, 0)));
5138 emit_insn (gen_lshrsi3 (scratch,
5139 gen_rtx_SUBREG (SImode, outval, 0),
5140 GEN_INT (8)));
5141 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5142 plus_constant (base, offset + 1)),
5143 gen_rtx_SUBREG (QImode, scratch, 0)));
5147 /* Print a symbolic form of X to the debug file, F. */
5149 static void
5150 arm_print_value (f, x)
5151 FILE * f;
5152 rtx x;
5154 switch (GET_CODE (x))
5156 case CONST_INT:
5157 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5158 return;
5160 case CONST_DOUBLE:
5161 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5162 return;
5164 case CONST_STRING:
5165 fprintf (f, "\"%s\"", XSTR (x, 0));
5166 return;
5168 case SYMBOL_REF:
5169 fprintf (f, "`%s'", XSTR (x, 0));
5170 return;
5172 case LABEL_REF:
5173 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5174 return;
5176 case CONST:
5177 arm_print_value (f, XEXP (x, 0));
5178 return;
5180 case PLUS:
5181 arm_print_value (f, XEXP (x, 0));
5182 fprintf (f, "+");
5183 arm_print_value (f, XEXP (x, 1));
5184 return;
5186 case PC:
5187 fprintf (f, "pc");
5188 return;
5190 default:
5191 fprintf (f, "????");
5192 return;
5196 /* Routines for manipulation of the constant pool. */
5198 /* Arm instructions cannot load a large constant directly into a
5199 register; they have to come from a pc relative load. The constant
5200 must therefore be placed in the addressable range of the pc
5201 relative load. Depending on the precise pc relative load
5202 instruction the range is somewhere between 256 bytes and 4k. This
5203 means that we often have to dump a constant inside a function, and
5204 generate code to branch around it.
5206 It is important to minimize this, since the branches will slow
5207 things down and make the code larger.
5209 Normally we can hide the table after an existing unconditional
5210 branch so that there is no interruption of the flow, but in the
5211 worst case the code looks like this:
5213 ldr rn, L1
5215 b L2
5216 align
5217 L1: .long value
5221 ldr rn, L3
5223 b L4
5224 align
5225 L3: .long value
5229 We fix this by performing a scan after scheduling, which notices
5230 which instructions need to have their operands fetched from the
5231 constant table and builds the table.
5233 The algorithm starts by building a table of all the constants that
5234 need fixing up and all the natural barriers in the function (places
5235 where a constant table can be dropped without breaking the flow).
5236 For each fixup we note how far the pc-relative replacement will be
5237 able to reach and the offset of the instruction into the function.
5239 Having built the table we then group the fixes together to form
5240 tables that are as large as possible (subject to addressing
5241 constraints) and emit each table of constants after the last
5242 barrier that is within range of all the instructions in the group.
5243 If a group does not contain a barrier, then we forcibly create one
5244 by inserting a jump instruction into the flow. Once the table has
5245 been inserted, the insns are then modified to reference the
5246 relevant entry in the pool.
5248 Possible enhancements to the algorithm (not implemented) are:
5250 1) For some processors and object formats, there may be benefit in
5251 aligning the pools to the start of cache lines; this alignment
5252 would need to be taken into account when calculating addressability
5253 of a pool. */
5255 /* These typedefs are located at the start of this file, so that
5256 they can be used in the prototypes there. This comment is to
5257 remind readers of that fact so that the following structures
5258 can be understood more easily.
5260 typedef struct minipool_node Mnode;
5261 typedef struct minipool_fixup Mfix; */
5263 struct minipool_node
5265 /* Doubly linked chain of entries. */
5266 Mnode * next;
5267 Mnode * prev;
5268 /* The maximum offset into the code that this entry can be placed. While
5269 pushing fixes for forward references, all entries are sorted in order
5270 of increasing max_address. */
5271 HOST_WIDE_INT max_address;
5272 /* Similarly for an entry inserted for a backwards ref. */
5273 HOST_WIDE_INT min_address;
5274 /* The number of fixes referencing this entry. This can become zero
5275 if we "unpush" an entry. In this case we ignore the entry when we
5276 come to emit the code. */
5277 int refcount;
5278 /* The offset from the start of the minipool. */
5279 HOST_WIDE_INT offset;
5280 /* The value in table. */
5281 rtx value;
5282 /* The mode of value. */
5283 enum machine_mode mode;
5284 int fix_size;
5287 struct minipool_fixup
5289 Mfix * next;
5290 rtx insn;
5291 HOST_WIDE_INT address;
5292 rtx * loc;
5293 enum machine_mode mode;
5294 int fix_size;
5295 rtx value;
5296 Mnode * minipool;
5297 HOST_WIDE_INT forwards;
5298 HOST_WIDE_INT backwards;
5301 /* Fixes less than a word need padding out to a word boundary. */
5302 #define MINIPOOL_FIX_SIZE(mode) \
5303 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5305 static Mnode * minipool_vector_head;
5306 static Mnode * minipool_vector_tail;
5307 static rtx minipool_vector_label;
5309 /* The linked list of all minipool fixes required for this function. */
5310 Mfix * minipool_fix_head;
5311 Mfix * minipool_fix_tail;
5312 /* The fix entry for the current minipool, once it has been placed. */
5313 Mfix * minipool_barrier;
5315 /* Determines if INSN is the start of a jump table. Returns the end
5316 of the TABLE or NULL_RTX. */
5318 static rtx
5319 is_jump_table (insn)
5320 rtx insn;
5322 rtx table;
5324 if (GET_CODE (insn) == JUMP_INSN
5325 && JUMP_LABEL (insn) != NULL
5326 && ((table = next_real_insn (JUMP_LABEL (insn)))
5327 == next_real_insn (insn))
5328 && table != NULL
5329 && GET_CODE (table) == JUMP_INSN
5330 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5331 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5332 return table;
5334 return NULL_RTX;
5337 static HOST_WIDE_INT
5338 get_jump_table_size (insn)
5339 rtx insn;
5341 rtx body = PATTERN (insn);
5342 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5344 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5347 /* Move a minipool fix MP from its current location to before MAX_MP.
5348 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5349 contrains may need updating. */
5351 static Mnode *
5352 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5353 Mnode * mp;
5354 Mnode * max_mp;
5355 HOST_WIDE_INT max_address;
5357 /* This should never be true and the code below assumes these are
5358 different. */
5359 if (mp == max_mp)
5360 abort ();
5362 if (max_mp == NULL)
5364 if (max_address < mp->max_address)
5365 mp->max_address = max_address;
5367 else
5369 if (max_address > max_mp->max_address - mp->fix_size)
5370 mp->max_address = max_mp->max_address - mp->fix_size;
5371 else
5372 mp->max_address = max_address;
5374 /* Unlink MP from its current position. Since max_mp is non-null,
5375 mp->prev must be non-null. */
5376 mp->prev->next = mp->next;
5377 if (mp->next != NULL)
5378 mp->next->prev = mp->prev;
5379 else
5380 minipool_vector_tail = mp->prev;
5382 /* Re-insert it before MAX_MP. */
5383 mp->next = max_mp;
5384 mp->prev = max_mp->prev;
5385 max_mp->prev = mp;
5387 if (mp->prev != NULL)
5388 mp->prev->next = mp;
5389 else
5390 minipool_vector_head = mp;
5393 /* Save the new entry. */
5394 max_mp = mp;
5396 /* Scan over the preceding entries and adjust their addresses as
5397 required. */
5398 while (mp->prev != NULL
5399 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5401 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5402 mp = mp->prev;
5405 return max_mp;
5408 /* Add a constant to the minipool for a forward reference. Returns the
5409 node added or NULL if the constant will not fit in this pool. */
5411 static Mnode *
5412 add_minipool_forward_ref (fix)
5413 Mfix * fix;
5415 /* If set, max_mp is the first pool_entry that has a lower
5416 constraint than the one we are trying to add. */
5417 Mnode * max_mp = NULL;
5418 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5419 Mnode * mp;
5421 /* If this fix's address is greater than the address of the first
5422 entry, then we can't put the fix in this pool. We subtract the
5423 size of the current fix to ensure that if the table is fully
5424 packed we still have enough room to insert this value by suffling
5425 the other fixes forwards. */
5426 if (minipool_vector_head &&
5427 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5428 return NULL;
5430 /* Scan the pool to see if a constant with the same value has
5431 already been added. While we are doing this, also note the
5432 location where we must insert the constant if it doesn't already
5433 exist. */
5434 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5436 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5437 && fix->mode == mp->mode
5438 && (GET_CODE (fix->value) != CODE_LABEL
5439 || (CODE_LABEL_NUMBER (fix->value)
5440 == CODE_LABEL_NUMBER (mp->value)))
5441 && rtx_equal_p (fix->value, mp->value))
5443 /* More than one fix references this entry. */
5444 mp->refcount++;
5445 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5448 /* Note the insertion point if necessary. */
5449 if (max_mp == NULL
5450 && mp->max_address > max_address)
5451 max_mp = mp;
5454 /* The value is not currently in the minipool, so we need to create
5455 a new entry for it. If MAX_MP is NULL, the entry will be put on
5456 the end of the list since the placement is less constrained than
5457 any existing entry. Otherwise, we insert the new fix before
5458 MAX_MP and, if neceesary, adjust the constraints on the other
5459 entries. */
5460 mp = xmalloc (sizeof (* mp));
5461 mp->fix_size = fix->fix_size;
5462 mp->mode = fix->mode;
5463 mp->value = fix->value;
5464 mp->refcount = 1;
5465 /* Not yet required for a backwards ref. */
5466 mp->min_address = -65536;
5468 if (max_mp == NULL)
5470 mp->max_address = max_address;
5471 mp->next = NULL;
5472 mp->prev = minipool_vector_tail;
5474 if (mp->prev == NULL)
5476 minipool_vector_head = mp;
5477 minipool_vector_label = gen_label_rtx ();
5479 else
5480 mp->prev->next = mp;
5482 minipool_vector_tail = mp;
5484 else
5486 if (max_address > max_mp->max_address - mp->fix_size)
5487 mp->max_address = max_mp->max_address - mp->fix_size;
5488 else
5489 mp->max_address = max_address;
5491 mp->next = max_mp;
5492 mp->prev = max_mp->prev;
5493 max_mp->prev = mp;
5494 if (mp->prev != NULL)
5495 mp->prev->next = mp;
5496 else
5497 minipool_vector_head = mp;
5500 /* Save the new entry. */
5501 max_mp = mp;
5503 /* Scan over the preceding entries and adjust their addresses as
5504 required. */
5505 while (mp->prev != NULL
5506 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5508 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5509 mp = mp->prev;
5512 return max_mp;
5515 static Mnode *
5516 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5517 Mnode * mp;
5518 Mnode * min_mp;
5519 HOST_WIDE_INT min_address;
5521 HOST_WIDE_INT offset;
5523 /* This should never be true, and the code below assumes these are
5524 different. */
5525 if (mp == min_mp)
5526 abort ();
5528 if (min_mp == NULL)
5530 if (min_address > mp->min_address)
5531 mp->min_address = min_address;
5533 else
5535 /* We will adjust this below if it is too loose. */
5536 mp->min_address = min_address;
5538 /* Unlink MP from its current position. Since min_mp is non-null,
5539 mp->next must be non-null. */
5540 mp->next->prev = mp->prev;
5541 if (mp->prev != NULL)
5542 mp->prev->next = mp->next;
5543 else
5544 minipool_vector_head = mp->next;
5546 /* Reinsert it after MIN_MP. */
5547 mp->prev = min_mp;
5548 mp->next = min_mp->next;
5549 min_mp->next = mp;
5550 if (mp->next != NULL)
5551 mp->next->prev = mp;
5552 else
5553 minipool_vector_tail = mp;
5556 min_mp = mp;
5558 offset = 0;
5559 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5561 mp->offset = offset;
5562 if (mp->refcount > 0)
5563 offset += mp->fix_size;
5565 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5566 mp->next->min_address = mp->min_address + mp->fix_size;
5569 return min_mp;
5572 /* Add a constant to the minipool for a backward reference. Returns the
5573 node added or NULL if the constant will not fit in this pool.
5575 Note that the code for insertion for a backwards reference can be
5576 somewhat confusing because the calculated offsets for each fix do
5577 not take into account the size of the pool (which is still under
5578 construction. */
5580 static Mnode *
5581 add_minipool_backward_ref (fix)
5582 Mfix * fix;
5584 /* If set, min_mp is the last pool_entry that has a lower constraint
5585 than the one we are trying to add. */
5586 Mnode * min_mp = NULL;
5587 /* This can be negative, since it is only a constraint. */
5588 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5589 Mnode * mp;
5591 /* If we can't reach the current pool from this insn, or if we can't
5592 insert this entry at the end of the pool without pushing other
5593 fixes out of range, then we don't try. This ensures that we
5594 can't fail later on. */
5595 if (min_address >= minipool_barrier->address
5596 || (minipool_vector_tail->min_address + fix->fix_size
5597 >= minipool_barrier->address))
5598 return NULL;
5600 /* Scan the pool to see if a constant with the same value has
5601 already been added. While we are doing this, also note the
5602 location where we must insert the constant if it doesn't already
5603 exist. */
5604 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5606 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5607 && fix->mode == mp->mode
5608 && (GET_CODE (fix->value) != CODE_LABEL
5609 || (CODE_LABEL_NUMBER (fix->value)
5610 == CODE_LABEL_NUMBER (mp->value)))
5611 && rtx_equal_p (fix->value, mp->value)
5612 /* Check that there is enough slack to move this entry to the
5613 end of the table (this is conservative). */
5614 && (mp->max_address
5615 > (minipool_barrier->address
5616 + minipool_vector_tail->offset
5617 + minipool_vector_tail->fix_size)))
5619 mp->refcount++;
5620 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5623 if (min_mp != NULL)
5624 mp->min_address += fix->fix_size;
5625 else
5627 /* Note the insertion point if necessary. */
5628 if (mp->min_address < min_address)
5629 min_mp = mp;
5630 else if (mp->max_address
5631 < minipool_barrier->address + mp->offset + fix->fix_size)
5633 /* Inserting before this entry would push the fix beyond
5634 its maximum address (which can happen if we have
5635 re-located a forwards fix); force the new fix to come
5636 after it. */
5637 min_mp = mp;
5638 min_address = mp->min_address + fix->fix_size;
5643 /* We need to create a new entry. */
5644 mp = xmalloc (sizeof (* mp));
5645 mp->fix_size = fix->fix_size;
5646 mp->mode = fix->mode;
5647 mp->value = fix->value;
5648 mp->refcount = 1;
5649 mp->max_address = minipool_barrier->address + 65536;
5651 mp->min_address = min_address;
5653 if (min_mp == NULL)
5655 mp->prev = NULL;
5656 mp->next = minipool_vector_head;
5658 if (mp->next == NULL)
5660 minipool_vector_tail = mp;
5661 minipool_vector_label = gen_label_rtx ();
5663 else
5664 mp->next->prev = mp;
5666 minipool_vector_head = mp;
5668 else
5670 mp->next = min_mp->next;
5671 mp->prev = min_mp;
5672 min_mp->next = mp;
5674 if (mp->next != NULL)
5675 mp->next->prev = mp;
5676 else
5677 minipool_vector_tail = mp;
5680 /* Save the new entry. */
5681 min_mp = mp;
5683 if (mp->prev)
5684 mp = mp->prev;
5685 else
5686 mp->offset = 0;
5688 /* Scan over the following entries and adjust their offsets. */
5689 while (mp->next != NULL)
5691 if (mp->next->min_address < mp->min_address + mp->fix_size)
5692 mp->next->min_address = mp->min_address + mp->fix_size;
5694 if (mp->refcount)
5695 mp->next->offset = mp->offset + mp->fix_size;
5696 else
5697 mp->next->offset = mp->offset;
5699 mp = mp->next;
5702 return min_mp;
5705 static void
5706 assign_minipool_offsets (barrier)
5707 Mfix * barrier;
5709 HOST_WIDE_INT offset = 0;
5710 Mnode * mp;
5712 minipool_barrier = barrier;
5714 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5716 mp->offset = offset;
5718 if (mp->refcount > 0)
5719 offset += mp->fix_size;
5723 /* Output the literal table */
5724 static void
5725 dump_minipool (scan)
5726 rtx scan;
5728 Mnode * mp;
5729 Mnode * nmp;
5731 if (rtl_dump_file)
5732 fprintf (rtl_dump_file,
5733 ";; Emitting minipool after insn %u; address %ld\n",
5734 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5736 scan = emit_label_after (gen_label_rtx (), scan);
5737 scan = emit_insn_after (gen_align_4 (), scan);
5738 scan = emit_label_after (minipool_vector_label, scan);
5740 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5742 if (mp->refcount > 0)
5744 if (rtl_dump_file)
5746 fprintf (rtl_dump_file,
5747 ";; Offset %u, min %ld, max %ld ",
5748 (unsigned) mp->offset, (unsigned long) mp->min_address,
5749 (unsigned long) mp->max_address);
5750 arm_print_value (rtl_dump_file, mp->value);
5751 fputc ('\n', rtl_dump_file);
5754 switch (mp->fix_size)
5756 #ifdef HAVE_consttable_1
5757 case 1:
5758 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5759 break;
5761 #endif
5762 #ifdef HAVE_consttable_2
5763 case 2:
5764 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5765 break;
5767 #endif
5768 #ifdef HAVE_consttable_4
5769 case 4:
5770 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5771 break;
5773 #endif
5774 #ifdef HAVE_consttable_8
5775 case 8:
5776 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5777 break;
5779 #endif
5780 default:
5781 abort ();
5782 break;
5786 nmp = mp->next;
5787 free (mp);
5790 minipool_vector_head = minipool_vector_tail = NULL;
5791 scan = emit_insn_after (gen_consttable_end (), scan);
5792 scan = emit_barrier_after (scan);
5795 /* Return the cost of forcibly inserting a barrier after INSN. */
5797 static int
5798 arm_barrier_cost (insn)
5799 rtx insn;
5801 /* Basing the location of the pool on the loop depth is preferable,
5802 but at the moment, the basic block information seems to be
5803 corrupt by this stage of the compilation. */
5804 int base_cost = 50;
5805 rtx next = next_nonnote_insn (insn);
5807 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5808 base_cost -= 20;
5810 switch (GET_CODE (insn))
5812 case CODE_LABEL:
5813 /* It will always be better to place the table before the label, rather
5814 than after it. */
5815 return 50;
5817 case INSN:
5818 case CALL_INSN:
5819 return base_cost;
5821 case JUMP_INSN:
5822 return base_cost - 10;
5824 default:
5825 return base_cost + 10;
5829 /* Find the best place in the insn stream in the range
5830 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5831 Create the barrier by inserting a jump and add a new fix entry for
5832 it. */
5834 static Mfix *
5835 create_fix_barrier (fix, max_address)
5836 Mfix * fix;
5837 HOST_WIDE_INT max_address;
5839 HOST_WIDE_INT count = 0;
5840 rtx barrier;
5841 rtx from = fix->insn;
5842 rtx selected = from;
5843 int selected_cost;
5844 HOST_WIDE_INT selected_address;
5845 Mfix * new_fix;
5846 HOST_WIDE_INT max_count = max_address - fix->address;
5847 rtx label = gen_label_rtx ();
5849 selected_cost = arm_barrier_cost (from);
5850 selected_address = fix->address;
5852 while (from && count < max_count)
5854 rtx tmp;
5855 int new_cost;
5857 /* This code shouldn't have been called if there was a natural barrier
5858 within range. */
5859 if (GET_CODE (from) == BARRIER)
5860 abort ();
5862 /* Count the length of this insn. */
5863 count += get_attr_length (from);
5865 /* If there is a jump table, add its length. */
5866 tmp = is_jump_table (from);
5867 if (tmp != NULL)
5869 count += get_jump_table_size (tmp);
5871 /* Jump tables aren't in a basic block, so base the cost on
5872 the dispatch insn. If we select this location, we will
5873 still put the pool after the table. */
5874 new_cost = arm_barrier_cost (from);
5876 if (count < max_count && new_cost <= selected_cost)
5878 selected = tmp;
5879 selected_cost = new_cost;
5880 selected_address = fix->address + count;
5883 /* Continue after the dispatch table. */
5884 from = NEXT_INSN (tmp);
5885 continue;
5888 new_cost = arm_barrier_cost (from);
5890 if (count < max_count && new_cost <= selected_cost)
5892 selected = from;
5893 selected_cost = new_cost;
5894 selected_address = fix->address + count;
5897 from = NEXT_INSN (from);
5900 /* Create a new JUMP_INSN that branches around a barrier. */
5901 from = emit_jump_insn_after (gen_jump (label), selected);
5902 JUMP_LABEL (from) = label;
5903 barrier = emit_barrier_after (from);
5904 emit_label_after (label, barrier);
5906 /* Create a minipool barrier entry for the new barrier. */
5907 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5908 new_fix->insn = barrier;
5909 new_fix->address = selected_address;
5910 new_fix->next = fix->next;
5911 fix->next = new_fix;
5913 return new_fix;
5916 /* Record that there is a natural barrier in the insn stream at
5917 ADDRESS. */
5918 static void
5919 push_minipool_barrier (insn, address)
5920 rtx insn;
5921 HOST_WIDE_INT address;
5923 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5925 fix->insn = insn;
5926 fix->address = address;
5928 fix->next = NULL;
5929 if (minipool_fix_head != NULL)
5930 minipool_fix_tail->next = fix;
5931 else
5932 minipool_fix_head = fix;
5934 minipool_fix_tail = fix;
5937 /* Record INSN, which will need fixing up to load a value from the
5938 minipool. ADDRESS is the offset of the insn since the start of the
5939 function; LOC is a pointer to the part of the insn which requires
5940 fixing; VALUE is the constant that must be loaded, which is of type
5941 MODE. */
5942 static void
5943 push_minipool_fix (insn, address, loc, mode, value)
5944 rtx insn;
5945 HOST_WIDE_INT address;
5946 rtx * loc;
5947 enum machine_mode mode;
5948 rtx value;
5950 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5952 #ifdef AOF_ASSEMBLER
5953 /* PIC symbol refereneces need to be converted into offsets into the
5954 based area. */
5955 /* XXX This shouldn't be done here. */
5956 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5957 value = aof_pic_entry (value);
5958 #endif /* AOF_ASSEMBLER */
5960 fix->insn = insn;
5961 fix->address = address;
5962 fix->loc = loc;
5963 fix->mode = mode;
5964 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5965 fix->value = value;
5966 fix->forwards = get_attr_pool_range (insn);
5967 fix->backwards = get_attr_neg_pool_range (insn);
5968 fix->minipool = NULL;
5970 /* If an insn doesn't have a range defined for it, then it isn't
5971 expecting to be reworked by this code. Better to abort now than
5972 to generate duff assembly code. */
5973 if (fix->forwards == 0 && fix->backwards == 0)
5974 abort ();
5976 if (rtl_dump_file)
5978 fprintf (rtl_dump_file,
5979 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5980 GET_MODE_NAME (mode),
5981 INSN_UID (insn), (unsigned long) address,
5982 -1 * (long)fix->backwards, (long)fix->forwards);
5983 arm_print_value (rtl_dump_file, fix->value);
5984 fprintf (rtl_dump_file, "\n");
5987 /* Add it to the chain of fixes. */
5988 fix->next = NULL;
5990 if (minipool_fix_head != NULL)
5991 minipool_fix_tail->next = fix;
5992 else
5993 minipool_fix_head = fix;
5995 minipool_fix_tail = fix;
5998 /* Scan INSN and note any of its operands that need fixing. */
6000 static void
6001 note_invalid_constants (insn, address)
6002 rtx insn;
6003 HOST_WIDE_INT address;
6005 int opno;
6007 extract_insn (insn);
6009 if (!constrain_operands (1))
6010 fatal_insn_not_found (insn);
6012 /* Fill in recog_op_alt with information about the constraints of this
6013 insn. */
6014 preprocess_constraints ();
6016 for (opno = 0; opno < recog_data.n_operands; opno++)
6018 /* Things we need to fix can only occur in inputs. */
6019 if (recog_data.operand_type[opno] != OP_IN)
6020 continue;
6022 /* If this alternative is a memory reference, then any mention
6023 of constants in this alternative is really to fool reload
6024 into allowing us to accept one there. We need to fix them up
6025 now so that we output the right code. */
6026 if (recog_op_alt[opno][which_alternative].memory_ok)
6028 rtx op = recog_data.operand[opno];
6030 if (CONSTANT_P (op))
6031 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6032 recog_data.operand_mode[opno], op);
6033 #if 0
6034 /* RWE: Now we look correctly at the operands for the insn,
6035 this shouldn't be needed any more. */
6036 #ifndef AOF_ASSEMBLER
6037 /* XXX Is this still needed? */
6038 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6039 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6040 recog_data.operand_mode[opno],
6041 XVECEXP (op, 0, 0));
6042 #endif
6043 #endif
6044 else if (GET_CODE (op) == MEM
6045 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6046 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6047 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6048 recog_data.operand_mode[opno],
6049 get_pool_constant (XEXP (op, 0)));
6054 void
6055 arm_reorg (first)
6056 rtx first;
6058 rtx insn;
6059 HOST_WIDE_INT address = 0;
6060 Mfix * fix;
6062 minipool_fix_head = minipool_fix_tail = NULL;
6064 /* The first insn must always be a note, or the code below won't
6065 scan it properly. */
6066 if (GET_CODE (first) != NOTE)
6067 abort ();
6069 /* Scan all the insns and record the operands that will need fixing. */
6070 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6072 if (GET_CODE (insn) == BARRIER)
6073 push_minipool_barrier (insn, address);
6074 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6075 || GET_CODE (insn) == JUMP_INSN)
6077 rtx table;
6079 note_invalid_constants (insn, address);
6080 address += get_attr_length (insn);
6082 /* If the insn is a vector jump, add the size of the table
6083 and skip the table. */
6084 if ((table = is_jump_table (insn)) != NULL)
6086 address += get_jump_table_size (table);
6087 insn = table;
6092 fix = minipool_fix_head;
6094 /* Now scan the fixups and perform the required changes. */
6095 while (fix)
6097 Mfix * ftmp;
6098 Mfix * fdel;
6099 Mfix * last_added_fix;
6100 Mfix * last_barrier = NULL;
6101 Mfix * this_fix;
6103 /* Skip any further barriers before the next fix. */
6104 while (fix && GET_CODE (fix->insn) == BARRIER)
6105 fix = fix->next;
6107 /* No more fixes. */
6108 if (fix == NULL)
6109 break;
6111 last_added_fix = NULL;
6113 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6115 if (GET_CODE (ftmp->insn) == BARRIER)
6117 if (ftmp->address >= minipool_vector_head->max_address)
6118 break;
6120 last_barrier = ftmp;
6122 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6123 break;
6125 last_added_fix = ftmp; /* Keep track of the last fix added. */
6128 /* If we found a barrier, drop back to that; any fixes that we
6129 could have reached but come after the barrier will now go in
6130 the next mini-pool. */
6131 if (last_barrier != NULL)
6133 /* Reduce the refcount for those fixes that won't go into this
6134 pool after all. */
6135 for (fdel = last_barrier->next;
6136 fdel && fdel != ftmp;
6137 fdel = fdel->next)
6139 fdel->minipool->refcount--;
6140 fdel->minipool = NULL;
6143 ftmp = last_barrier;
6145 else
6147 /* ftmp is first fix that we can't fit into this pool and
6148 there no natural barriers that we could use. Insert a
6149 new barrier in the code somewhere between the previous
6150 fix and this one, and arrange to jump around it. */
6151 HOST_WIDE_INT max_address;
6153 /* The last item on the list of fixes must be a barrier, so
6154 we can never run off the end of the list of fixes without
6155 last_barrier being set. */
6156 if (ftmp == NULL)
6157 abort ();
6159 max_address = minipool_vector_head->max_address;
6160 /* Check that there isn't another fix that is in range that
6161 we couldn't fit into this pool because the pool was
6162 already too large: we need to put the pool before such an
6163 instruction. */
6164 if (ftmp->address < max_address)
6165 max_address = ftmp->address;
6167 last_barrier = create_fix_barrier (last_added_fix, max_address);
6170 assign_minipool_offsets (last_barrier);
6172 while (ftmp)
6174 if (GET_CODE (ftmp->insn) != BARRIER
6175 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6176 == NULL))
6177 break;
6179 ftmp = ftmp->next;
6182 /* Scan over the fixes we have identified for this pool, fixing them
6183 up and adding the constants to the pool itself. */
6184 for (this_fix = fix; this_fix && ftmp != this_fix;
6185 this_fix = this_fix->next)
6186 if (GET_CODE (this_fix->insn) != BARRIER)
6188 rtx addr
6189 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6190 minipool_vector_label),
6191 this_fix->minipool->offset);
6192 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6195 dump_minipool (last_barrier->insn);
6196 fix = ftmp;
6199 /* From now on we must synthesize any constants that we can't handle
6200 directly. This can happen if the RTL gets split during final
6201 instruction generation. */
6202 after_arm_reorg = 1;
6204 /* Free the minipool memory. */
6205 obstack_free (&minipool_obstack, minipool_startobj);
6208 /* Routines to output assembly language. */
6210 /* If the rtx is the correct value then return the string of the number.
6211 In this way we can ensure that valid double constants are generated even
6212 when cross compiling. */
6214 const char *
6215 fp_immediate_constant (x)
6216 rtx x;
6218 REAL_VALUE_TYPE r;
6219 int i;
6221 if (!fpa_consts_inited)
6222 init_fpa_table ();
6224 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6225 for (i = 0; i < 8; i++)
6226 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6227 return strings_fpa[i];
6229 abort ();
6232 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6234 static const char *
6235 fp_const_from_val (r)
6236 REAL_VALUE_TYPE * r;
6238 int i;
6240 if (!fpa_consts_inited)
6241 init_fpa_table ();
6243 for (i = 0; i < 8; i++)
6244 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6245 return strings_fpa[i];
6247 abort ();
6250 /* Output the operands of a LDM/STM instruction to STREAM.
6251 MASK is the ARM register set mask of which only bits 0-15 are important.
6252 REG is the base register, either the frame pointer or the stack pointer,
6253 INSTR is the possibly suffixed load or store instruction. */
6255 static void
6256 print_multi_reg (stream, instr, reg, mask)
6257 FILE * stream;
6258 const char * instr;
6259 int reg;
6260 int mask;
6262 int i;
6263 int not_first = FALSE;
6265 fputc ('\t', stream);
6266 asm_fprintf (stream, instr, reg);
6267 fputs (", {", stream);
6269 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6270 if (mask & (1 << i))
6272 if (not_first)
6273 fprintf (stream, ", ");
6275 asm_fprintf (stream, "%r", i);
6276 not_first = TRUE;
6279 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6282 /* Output a 'call' insn. */
6284 const char *
6285 output_call (operands)
6286 rtx * operands;
6288 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6290 if (REGNO (operands[0]) == LR_REGNUM)
6292 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6293 output_asm_insn ("mov%?\t%0, %|lr", operands);
6296 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6298 if (TARGET_INTERWORK)
6299 output_asm_insn ("bx%?\t%0", operands);
6300 else
6301 output_asm_insn ("mov%?\t%|pc, %0", operands);
6303 return "";
6306 static int
6307 eliminate_lr2ip (x)
6308 rtx * x;
6310 int something_changed = 0;
6311 rtx x0 = * x;
6312 int code = GET_CODE (x0);
6313 int i, j;
6314 const char * fmt;
6316 switch (code)
6318 case REG:
6319 if (REGNO (x0) == LR_REGNUM)
6321 *x = gen_rtx_REG (SImode, IP_REGNUM);
6322 return 1;
6324 return 0;
6325 default:
6326 /* Scan through the sub-elements and change any references there. */
6327 fmt = GET_RTX_FORMAT (code);
6329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6330 if (fmt[i] == 'e')
6331 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6332 else if (fmt[i] == 'E')
6333 for (j = 0; j < XVECLEN (x0, i); j++)
6334 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6336 return something_changed;
6340 /* Output a 'call' insn that is a reference in memory. */
6342 const char *
6343 output_call_mem (operands)
6344 rtx * operands;
6346 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6347 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6348 if (eliminate_lr2ip (&operands[0]))
6349 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6351 if (TARGET_INTERWORK)
6353 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6354 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6355 output_asm_insn ("bx%?\t%|ip", operands);
6357 else
6359 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6360 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6363 return "";
6367 /* Output a move from arm registers to an fpu registers.
6368 OPERANDS[0] is an fpu register.
6369 OPERANDS[1] is the first registers of an arm register pair. */
6371 const char *
6372 output_mov_long_double_fpu_from_arm (operands)
6373 rtx * operands;
6375 int arm_reg0 = REGNO (operands[1]);
6376 rtx ops[3];
6378 if (arm_reg0 == IP_REGNUM)
6379 abort ();
6381 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6382 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6383 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6385 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6386 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6388 return "";
6391 /* Output a move from an fpu register to arm registers.
6392 OPERANDS[0] is the first registers of an arm register pair.
6393 OPERANDS[1] is an fpu register. */
6395 const char *
6396 output_mov_long_double_arm_from_fpu (operands)
6397 rtx * operands;
6399 int arm_reg0 = REGNO (operands[0]);
6400 rtx ops[3];
6402 if (arm_reg0 == IP_REGNUM)
6403 abort ();
6405 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6406 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6407 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6409 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6410 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6411 return "";
6414 /* Output a move from arm registers to arm registers of a long double
6415 OPERANDS[0] is the destination.
6416 OPERANDS[1] is the source. */
6418 const char *
6419 output_mov_long_double_arm_from_arm (operands)
6420 rtx * operands;
6422 /* We have to be careful here because the two might overlap. */
6423 int dest_start = REGNO (operands[0]);
6424 int src_start = REGNO (operands[1]);
6425 rtx ops[2];
6426 int i;
6428 if (dest_start < src_start)
6430 for (i = 0; i < 3; i++)
6432 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6433 ops[1] = gen_rtx_REG (SImode, src_start + i);
6434 output_asm_insn ("mov%?\t%0, %1", ops);
6437 else
6439 for (i = 2; i >= 0; i--)
6441 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6442 ops[1] = gen_rtx_REG (SImode, src_start + i);
6443 output_asm_insn ("mov%?\t%0, %1", ops);
6447 return "";
6451 /* Output a move from arm registers to an fpu registers.
6452 OPERANDS[0] is an fpu register.
6453 OPERANDS[1] is the first registers of an arm register pair. */
6455 const char *
6456 output_mov_double_fpu_from_arm (operands)
6457 rtx * operands;
6459 int arm_reg0 = REGNO (operands[1]);
6460 rtx ops[2];
6462 if (arm_reg0 == IP_REGNUM)
6463 abort ();
6465 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6466 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6467 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6468 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6469 return "";
6472 /* Output a move from an fpu register to arm registers.
6473 OPERANDS[0] is the first registers of an arm register pair.
6474 OPERANDS[1] is an fpu register. */
6476 const char *
6477 output_mov_double_arm_from_fpu (operands)
6478 rtx * operands;
6480 int arm_reg0 = REGNO (operands[0]);
6481 rtx ops[2];
6483 if (arm_reg0 == IP_REGNUM)
6484 abort ();
6486 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6487 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6488 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6489 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6490 return "";
6493 /* Output a move between double words.
6494 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6495 or MEM<-REG and all MEMs must be offsettable addresses. */
6497 const char *
6498 output_move_double (operands)
6499 rtx * operands;
6501 enum rtx_code code0 = GET_CODE (operands[0]);
6502 enum rtx_code code1 = GET_CODE (operands[1]);
6503 rtx otherops[3];
6505 if (code0 == REG)
6507 int reg0 = REGNO (operands[0]);
6509 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6511 if (code1 == REG)
6513 int reg1 = REGNO (operands[1]);
6514 if (reg1 == IP_REGNUM)
6515 abort ();
6517 /* Ensure the second source is not overwritten. */
6518 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6519 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6520 else
6521 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6523 else if (code1 == CONST_DOUBLE)
6525 if (GET_MODE (operands[1]) == DFmode)
6527 long l[2];
6528 union real_extract u;
6530 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
6531 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
6532 otherops[1] = GEN_INT (l[1]);
6533 operands[1] = GEN_INT (l[0]);
6535 else if (GET_MODE (operands[1]) != VOIDmode)
6536 abort ();
6537 else if (WORDS_BIG_ENDIAN)
6539 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6540 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6542 else
6544 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6545 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6548 output_mov_immediate (operands);
6549 output_mov_immediate (otherops);
6551 else if (code1 == CONST_INT)
6553 #if HOST_BITS_PER_WIDE_INT > 32
6554 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6555 what the upper word is. */
6556 if (WORDS_BIG_ENDIAN)
6558 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6559 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6561 else
6563 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6564 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6566 #else
6567 /* Sign extend the intval into the high-order word. */
6568 if (WORDS_BIG_ENDIAN)
6570 otherops[1] = operands[1];
6571 operands[1] = (INTVAL (operands[1]) < 0
6572 ? constm1_rtx : const0_rtx);
6574 else
6575 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6576 #endif
6577 output_mov_immediate (otherops);
6578 output_mov_immediate (operands);
6580 else if (code1 == MEM)
6582 switch (GET_CODE (XEXP (operands[1], 0)))
6584 case REG:
6585 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6586 break;
6588 case PRE_INC:
6589 abort (); /* Should never happen now. */
6590 break;
6592 case PRE_DEC:
6593 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6594 break;
6596 case POST_INC:
6597 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6598 break;
6600 case POST_DEC:
6601 abort (); /* Should never happen now. */
6602 break;
6604 case LABEL_REF:
6605 case CONST:
6606 output_asm_insn ("adr%?\t%0, %1", operands);
6607 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6608 break;
6610 default:
6611 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6612 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6614 otherops[0] = operands[0];
6615 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6616 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6618 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6620 if (GET_CODE (otherops[2]) == CONST_INT)
6622 switch (INTVAL (otherops[2]))
6624 case -8:
6625 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6626 return "";
6627 case -4:
6628 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6629 return "";
6630 case 4:
6631 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6632 return "";
6635 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6636 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6637 else
6638 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6640 else
6641 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6643 else
6644 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6646 return "ldm%?ia\t%0, %M0";
6648 else
6650 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6651 /* Take care of overlapping base/data reg. */
6652 if (reg_mentioned_p (operands[0], operands[1]))
6654 output_asm_insn ("ldr%?\t%0, %1", otherops);
6655 output_asm_insn ("ldr%?\t%0, %1", operands);
6657 else
6659 output_asm_insn ("ldr%?\t%0, %1", operands);
6660 output_asm_insn ("ldr%?\t%0, %1", otherops);
6665 else
6666 abort (); /* Constraints should prevent this. */
6668 else if (code0 == MEM && code1 == REG)
6670 if (REGNO (operands[1]) == IP_REGNUM)
6671 abort ();
6673 switch (GET_CODE (XEXP (operands[0], 0)))
6675 case REG:
6676 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6677 break;
6679 case PRE_INC:
6680 abort (); /* Should never happen now. */
6681 break;
6683 case PRE_DEC:
6684 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6685 break;
6687 case POST_INC:
6688 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6689 break;
6691 case POST_DEC:
6692 abort (); /* Should never happen now. */
6693 break;
6695 case PLUS:
6696 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6698 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6700 case -8:
6701 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6702 return "";
6704 case -4:
6705 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6706 return "";
6708 case 4:
6709 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6710 return "";
6713 /* Fall through */
6715 default:
6716 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
6717 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6718 output_asm_insn ("str%?\t%1, %0", operands);
6719 output_asm_insn ("str%?\t%1, %0", otherops);
6722 else
6723 /* Constraints should prevent this. */
6724 abort ();
6726 return "";
6730 /* Output an arbitrary MOV reg, #n.
6731 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6733 const char *
6734 output_mov_immediate (operands)
6735 rtx * operands;
6737 HOST_WIDE_INT n = INTVAL (operands[1]);
6739 /* Try to use one MOV. */
6740 if (const_ok_for_arm (n))
6741 output_asm_insn ("mov%?\t%0, %1", operands);
6743 /* Try to use one MVN. */
6744 else if (const_ok_for_arm (~n))
6746 operands[1] = GEN_INT (~n);
6747 output_asm_insn ("mvn%?\t%0, %1", operands);
6749 else
6751 int n_ones = 0;
6752 int i;
6754 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6755 for (i = 0; i < 32; i ++)
6756 if (n & 1 << i)
6757 n_ones ++;
6759 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6760 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6761 else
6762 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6765 return "";
6768 /* Output an ADD r, s, #n where n may be too big for one instruction.
6769 If adding zero to one register, output nothing. */
6771 const char *
6772 output_add_immediate (operands)
6773 rtx * operands;
6775 HOST_WIDE_INT n = INTVAL (operands[2]);
6777 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6779 if (n < 0)
6780 output_multi_immediate (operands,
6781 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6782 -n);
6783 else
6784 output_multi_immediate (operands,
6785 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6789 return "";
6792 /* Output a multiple immediate operation.
6793 OPERANDS is the vector of operands referred to in the output patterns.
6794 INSTR1 is the output pattern to use for the first constant.
6795 INSTR2 is the output pattern to use for subsequent constants.
6796 IMMED_OP is the index of the constant slot in OPERANDS.
6797 N is the constant value. */
6799 static const char *
6800 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6801 rtx * operands;
6802 const char * instr1;
6803 const char * instr2;
6804 int immed_op;
6805 HOST_WIDE_INT n;
6807 #if HOST_BITS_PER_WIDE_INT > 32
6808 n &= 0xffffffff;
6809 #endif
6811 if (n == 0)
6813 /* Quick and easy output. */
6814 operands[immed_op] = const0_rtx;
6815 output_asm_insn (instr1, operands);
6817 else
6819 int i;
6820 const char * instr = instr1;
6822 /* Note that n is never zero here (which would give no output). */
6823 for (i = 0; i < 32; i += 2)
6825 if (n & (3 << i))
6827 operands[immed_op] = GEN_INT (n & (255 << i));
6828 output_asm_insn (instr, operands);
6829 instr = instr2;
6830 i += 6;
6835 return "";
6838 /* Return the appropriate ARM instruction for the operation code.
6839 The returned result should not be overwritten. OP is the rtx of the
6840 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6841 was shifted. */
6843 const char *
6844 arithmetic_instr (op, shift_first_arg)
6845 rtx op;
6846 int shift_first_arg;
6848 switch (GET_CODE (op))
6850 case PLUS:
6851 return "add";
6853 case MINUS:
6854 return shift_first_arg ? "rsb" : "sub";
6856 case IOR:
6857 return "orr";
6859 case XOR:
6860 return "eor";
6862 case AND:
6863 return "and";
6865 default:
6866 abort ();
6870 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6871 for the operation code. The returned result should not be overwritten.
6872 OP is the rtx code of the shift.
6873 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6874 shift. */
6876 static const char *
6877 shift_op (op, amountp)
6878 rtx op;
6879 HOST_WIDE_INT *amountp;
6881 const char * mnem;
6882 enum rtx_code code = GET_CODE (op);
6884 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6885 *amountp = -1;
6886 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6887 *amountp = INTVAL (XEXP (op, 1));
6888 else
6889 abort ();
6891 switch (code)
6893 case ASHIFT:
6894 mnem = "asl";
6895 break;
6897 case ASHIFTRT:
6898 mnem = "asr";
6899 break;
6901 case LSHIFTRT:
6902 mnem = "lsr";
6903 break;
6905 case ROTATERT:
6906 mnem = "ror";
6907 break;
6909 case MULT:
6910 /* We never have to worry about the amount being other than a
6911 power of 2, since this case can never be reloaded from a reg. */
6912 if (*amountp != -1)
6913 *amountp = int_log2 (*amountp);
6914 else
6915 abort ();
6916 return "asl";
6918 default:
6919 abort ();
6922 if (*amountp != -1)
6924 /* This is not 100% correct, but follows from the desire to merge
6925 multiplication by a power of 2 with the recognizer for a
6926 shift. >=32 is not a valid shift for "asl", so we must try and
6927 output a shift that produces the correct arithmetical result.
6928 Using lsr #32 is identical except for the fact that the carry bit
6929 is not set correctly if we set the flags; but we never use the
6930 carry bit from such an operation, so we can ignore that. */
6931 if (code == ROTATERT)
6932 /* Rotate is just modulo 32. */
6933 *amountp &= 31;
6934 else if (*amountp != (*amountp & 31))
6936 if (code == ASHIFT)
6937 mnem = "lsr";
6938 *amountp = 32;
6941 /* Shifts of 0 are no-ops. */
6942 if (*amountp == 0)
6943 return NULL;
6946 return mnem;
6949 /* Obtain the shift from the POWER of two. */
6951 static HOST_WIDE_INT
6952 int_log2 (power)
6953 HOST_WIDE_INT power;
6955 HOST_WIDE_INT shift = 0;
6957 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
6959 if (shift > 31)
6960 abort ();
6961 shift ++;
6964 return shift;
6967 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6968 /bin/as is horribly restrictive. */
6969 #define MAX_ASCII_LEN 51
6971 void
6972 output_ascii_pseudo_op (stream, p, len)
6973 FILE * stream;
6974 const unsigned char * p;
6975 int len;
6977 int i;
6978 int len_so_far = 0;
6980 fputs ("\t.ascii\t\"", stream);
6982 for (i = 0; i < len; i++)
6984 int c = p[i];
6986 if (len_so_far >= MAX_ASCII_LEN)
6988 fputs ("\"\n\t.ascii\t\"", stream);
6989 len_so_far = 0;
6992 switch (c)
6994 case TARGET_TAB:
6995 fputs ("\\t", stream);
6996 len_so_far += 2;
6997 break;
6999 case TARGET_FF:
7000 fputs ("\\f", stream);
7001 len_so_far += 2;
7002 break;
7004 case TARGET_BS:
7005 fputs ("\\b", stream);
7006 len_so_far += 2;
7007 break;
7009 case TARGET_CR:
7010 fputs ("\\r", stream);
7011 len_so_far += 2;
7012 break;
7014 case TARGET_NEWLINE:
7015 fputs ("\\n", stream);
7016 c = p [i + 1];
7017 if ((c >= ' ' && c <= '~')
7018 || c == TARGET_TAB)
7019 /* This is a good place for a line break. */
7020 len_so_far = MAX_ASCII_LEN;
7021 else
7022 len_so_far += 2;
7023 break;
7025 case '\"':
7026 case '\\':
7027 putc ('\\', stream);
7028 len_so_far++;
7029 /* drop through. */
7031 default:
7032 if (c >= ' ' && c <= '~')
7034 putc (c, stream);
7035 len_so_far++;
7037 else
7039 fprintf (stream, "\\%03o", c);
7040 len_so_far += 4;
7042 break;
7046 fputs ("\"\n", stream);
7049 /* Compute the register sabe mask for registers 0 through 12
7050 inclusive. This code is used by both arm_compute_save_reg_mask
7051 and arm_compute_initial_elimination_offset. */
7053 static unsigned long
7054 arm_compute_save_reg0_reg12_mask ()
7056 unsigned long func_type = arm_current_func_type ();
7057 unsigned int save_reg_mask = 0;
7058 unsigned int reg;
7060 if (IS_INTERRUPT (func_type))
7062 unsigned int max_reg;
7063 /* Interrupt functions must not corrupt any registers,
7064 even call clobbered ones. If this is a leaf function
7065 we can just examine the registers used by the RTL, but
7066 otherwise we have to assume that whatever function is
7067 called might clobber anything, and so we have to save
7068 all the call-clobbered registers as well. */
7069 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7070 /* FIQ handlers have registers r8 - r12 banked, so
7071 we only need to check r0 - r7, Normal ISRs only
7072 bank r14 and r15, so we must check up to r12.
7073 r13 is the stack pointer which is always preserved,
7074 so we do not need to consider it here. */
7075 max_reg = 7;
7076 else
7077 max_reg = 12;
7079 for (reg = 0; reg <= max_reg; reg++)
7080 if (regs_ever_live[reg]
7081 || (! current_function_is_leaf && call_used_regs [reg]))
7082 save_reg_mask |= (1 << reg);
7084 else
7086 /* In the normal case we only need to save those registers
7087 which are call saved and which are used by this function. */
7088 for (reg = 0; reg <= 10; reg++)
7089 if (regs_ever_live[reg] && ! call_used_regs [reg])
7090 save_reg_mask |= (1 << reg);
7092 /* Handle the frame pointer as a special case. */
7093 if (! TARGET_APCS_FRAME
7094 && ! frame_pointer_needed
7095 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7096 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7097 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7099 /* If we aren't loading the PIC register,
7100 don't stack it even though it may be live. */
7101 if (flag_pic
7102 && ! TARGET_SINGLE_PIC_BASE
7103 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7104 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7107 return save_reg_mask;
7110 /* Compute a bit mask of which registers need to be
7111 saved on the stack for the current function. */
7113 static unsigned long
7114 arm_compute_save_reg_mask ()
7116 unsigned int save_reg_mask = 0;
7117 unsigned long func_type = arm_current_func_type ();
7119 if (IS_NAKED (func_type))
7120 /* This should never really happen. */
7121 return 0;
7123 /* If we are creating a stack frame, then we must save the frame pointer,
7124 IP (which will hold the old stack pointer), LR and the PC. */
7125 if (frame_pointer_needed)
7126 save_reg_mask |=
7127 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7128 | (1 << IP_REGNUM)
7129 | (1 << LR_REGNUM)
7130 | (1 << PC_REGNUM);
7132 /* Volatile functions do not return, so there
7133 is no need to save any other registers. */
7134 if (IS_VOLATILE (func_type))
7135 return save_reg_mask;
7137 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7139 /* Decide if we need to save the link register.
7140 Interrupt routines have their own banked link register,
7141 so they never need to save it.
7142 Otheriwse if we do not use the link register we do not need to save
7143 it. If we are pushing other registers onto the stack however, we
7144 can save an instruction in the epilogue by pushing the link register
7145 now and then popping it back into the PC. This incurs extra memory
7146 accesses though, so we only do it when optimising for size, and only
7147 if we know that we will not need a fancy return sequence. */
7148 if (! IS_INTERRUPT (func_type)
7149 && (regs_ever_live [LR_REGNUM]
7150 || (save_reg_mask
7151 && optimize_size
7152 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)))
7153 save_reg_mask |= 1 << LR_REGNUM;
7155 if (cfun->machine->lr_save_eliminated)
7156 save_reg_mask &= ~ (1 << LR_REGNUM);
7158 return save_reg_mask;
7161 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7162 everything bar the final return instruction. */
7164 const char *
7165 output_return_instruction (operand, really_return, reverse)
7166 rtx operand;
7167 int really_return;
7168 int reverse;
7170 char conditional[10];
7171 char instr[100];
7172 int reg;
7173 unsigned long live_regs_mask;
7174 unsigned long func_type;
7176 func_type = arm_current_func_type ();
7178 if (IS_NAKED (func_type))
7179 return "";
7181 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7183 /* If this function was declared non-returning, and we have found a tail
7184 call, then we have to trust that the called function won't return. */
7185 if (really_return)
7187 rtx ops[2];
7189 /* Otherwise, trap an attempted return by aborting. */
7190 ops[0] = operand;
7191 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7192 : "abort");
7193 assemble_external_libcall (ops[1]);
7194 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7197 return "";
7200 if (current_function_calls_alloca && !really_return)
7201 abort ();
7203 /* Construct the conditional part of the instruction(s) to be emitted. */
7204 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7206 return_used_this_function = 1;
7208 live_regs_mask = arm_compute_save_reg_mask ();
7210 /* On some ARM architectures it is faster to use LDR rather than LDM to
7211 load a single register. On other architectures, the cost is the same.
7212 In 26 bit mode we have to use LDM in order to be able to restore the CPSR. */
7213 if ((live_regs_mask == (1 << LR_REGNUM))
7214 && ! TARGET_INTERWORK
7215 && ! IS_INTERRUPT (func_type)
7216 && (! really_return || TARGET_APCS_32))
7218 if (! really_return)
7219 sprintf (instr, "ldr%s\t%%|lr, [%%|sp], #4", conditional);
7220 else
7221 sprintf (instr, "ldr%s\t%%|pc, [%%|sp], #4", conditional);
7223 else if (live_regs_mask)
7225 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7226 /* There are two possible reasons for the IP register being saved.
7227 Either a stack frame was created, in which case IP contains the
7228 old stack pointer, or an ISR routine corrupted it. If this in an
7229 ISR routine then just restore IP, otherwise restore IP into SP. */
7230 if (! IS_INTERRUPT (func_type))
7232 live_regs_mask &= ~ (1 << IP_REGNUM);
7233 live_regs_mask |= (1 << SP_REGNUM);
7236 /* Generate the load multiple instruction to restore the registers. */
7237 if (frame_pointer_needed)
7238 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7239 else
7240 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7242 for (reg = 0; reg <= SP_REGNUM; reg++)
7243 if (live_regs_mask & (1 << reg))
7245 strcat (instr, "%|");
7246 strcat (instr, reg_names[reg]);
7247 strcat (instr, ", ");
7250 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7252 /* If we are not restoring the LR register then we will
7253 have added one too many commas to the list above.
7254 Replace it with a closing brace. */
7255 instr [strlen (instr) - 2] = '}';
7257 else
7259 strcat (instr, "%|");
7261 /* At this point there should only be one or two registers left in
7262 live_regs_mask: always LR, and possibly PC if we created a stack
7263 frame. LR contains the return address. If we do not have any
7264 special requirements for function exit (eg interworking, or ISR)
7265 then we can load this value directly into the PC and save an
7266 instruction. */
7267 if (! TARGET_INTERWORK
7268 && ! IS_INTERRUPT (func_type)
7269 && really_return)
7270 strcat (instr, reg_names [PC_REGNUM]);
7271 else
7272 strcat (instr, reg_names [LR_REGNUM]);
7274 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
7277 if (really_return)
7279 /* See if we need to generate an extra instruction to
7280 perform the actual function return. */
7281 switch ((int) ARM_FUNC_TYPE (func_type))
7283 case ARM_FT_ISR:
7284 case ARM_FT_FIQ:
7285 output_asm_insn (instr, & operand);
7287 strcpy (instr, "sub");
7288 strcat (instr, conditional);
7289 strcat (instr, "s\t%|pc, %|lr, #4");
7290 break;
7292 case ARM_FT_EXCEPTION:
7293 output_asm_insn (instr, & operand);
7295 strcpy (instr, "mov");
7296 strcat (instr, conditional);
7297 strcat (instr, "s\t%|pc, %|lr");
7298 break;
7300 case ARM_FT_INTERWORKED:
7301 output_asm_insn (instr, & operand);
7303 strcpy (instr, "bx");
7304 strcat (instr, conditional);
7305 strcat (instr, "\t%|lr");
7306 break;
7308 default:
7309 /* The return has already been handled
7310 by loading the LR into the PC. */
7311 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
7313 output_asm_insn (instr, & operand);
7315 strcpy (instr, "mov");
7316 strcat (instr, conditional);
7317 if (! TARGET_APCS_32)
7318 strcat (instr, "s");
7319 strcat (instr, "\t%|pc, %|lr");
7321 break;
7325 else if (really_return)
7327 switch ((int) ARM_FUNC_TYPE (func_type))
7329 case ARM_FT_ISR:
7330 case ARM_FT_FIQ:
7331 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7332 break;
7334 case ARM_FT_INTERWORKED:
7335 sprintf (instr, "bx%s\t%%|lr", conditional);
7336 break;
7338 case ARM_FT_EXCEPTION:
7339 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7340 break;
7342 default:
7343 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7344 conditional, TARGET_APCS_32 ? "" : "s");
7345 break;
7348 else
7349 /* Nothing to load off the stack, and
7350 no return instruction to generate. */
7351 return "";
7353 output_asm_insn (instr, & operand);
7355 return "";
7358 /* Write the function name into the code section, directly preceding
7359 the function prologue.
7361 Code will be output similar to this:
7363 .ascii "arm_poke_function_name", 0
7364 .align
7366 .word 0xff000000 + (t1 - t0)
7367 arm_poke_function_name
7368 mov ip, sp
7369 stmfd sp!, {fp, ip, lr, pc}
7370 sub fp, ip, #4
7372 When performing a stack backtrace, code can inspect the value
7373 of 'pc' stored at 'fp' + 0. If the trace function then looks
7374 at location pc - 12 and the top 8 bits are set, then we know
7375 that there is a function name embedded immediately preceding this
7376 location and has length ((pc[-3]) & 0xff000000).
7378 We assume that pc is declared as a pointer to an unsigned long.
7380 It is of no benefit to output the function name if we are assembling
7381 a leaf function. These function types will not contain a stack
7382 backtrace structure, therefore it is not possible to determine the
7383 function name. */
7385 void
7386 arm_poke_function_name (stream, name)
7387 FILE * stream;
7388 const char * name;
7390 unsigned long alignlength;
7391 unsigned long length;
7392 rtx x;
7394 length = strlen (name) + 1;
7395 alignlength = ROUND_UP (length);
7397 ASM_OUTPUT_ASCII (stream, name, length);
7398 ASM_OUTPUT_ALIGN (stream, 2);
7399 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7400 assemble_aligned_integer (UNITS_PER_WORD, x);
7403 /* Place some comments into the assembler stream
7404 describing the current function. */
7406 static void
7407 arm_output_function_prologue (f, frame_size)
7408 FILE * f;
7409 HOST_WIDE_INT frame_size;
7411 unsigned long func_type;
7413 if (!TARGET_ARM)
7415 thumb_output_function_prologue (f, frame_size);
7416 return;
7419 /* Sanity check. */
7420 if (arm_ccfsm_state || arm_target_insn)
7421 abort ();
7423 func_type = arm_current_func_type ();
7425 switch ((int) ARM_FUNC_TYPE (func_type))
7427 default:
7428 case ARM_FT_NORMAL:
7429 break;
7430 case ARM_FT_INTERWORKED:
7431 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7432 break;
7433 case ARM_FT_EXCEPTION_HANDLER:
7434 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7435 break;
7436 case ARM_FT_ISR:
7437 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7438 break;
7439 case ARM_FT_FIQ:
7440 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7441 break;
7442 case ARM_FT_EXCEPTION:
7443 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7444 break;
7447 if (IS_NAKED (func_type))
7448 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7450 if (IS_VOLATILE (func_type))
7451 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7453 if (IS_NESTED (func_type))
7454 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7456 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7457 current_function_args_size,
7458 current_function_pretend_args_size, frame_size);
7460 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
7461 frame_pointer_needed,
7462 current_function_anonymous_args);
7464 if (cfun->machine->lr_save_eliminated)
7465 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7467 #ifdef AOF_ASSEMBLER
7468 if (flag_pic)
7469 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7470 #endif
7472 return_used_this_function = 0;
7475 const char *
7476 arm_output_epilogue (really_return)
7477 int really_return;
7479 int reg;
7480 unsigned long saved_regs_mask;
7481 unsigned long func_type;
7482 /* If we need this, then it will always be at least this much. */
7483 int floats_offset = 12;
7484 rtx operands[3];
7485 int frame_size = get_frame_size ();
7486 FILE * f = asm_out_file;
7487 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7489 /* If we have already generated the return instruction
7490 then it is futile to generate anything else. */
7491 if (use_return_insn (FALSE) && return_used_this_function)
7492 return "";
7494 func_type = arm_current_func_type ();
7496 if (IS_NAKED (func_type))
7497 /* Naked functions don't have epilogues. */
7498 return "";
7500 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7502 rtx op;
7504 /* A volatile function should never return. Call abort. */
7505 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7506 assemble_external_libcall (op);
7507 output_asm_insn ("bl\t%a0", &op);
7509 return "";
7512 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7513 && ! really_return)
7514 /* If we are throwing an exception, then we really must
7515 be doing a return, so we can't tail-call. */
7516 abort ();
7518 saved_regs_mask = arm_compute_save_reg_mask ();
7520 /* Compute how far away the floats will be. */
7521 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7522 if (saved_regs_mask & (1 << reg))
7523 floats_offset += 4;
7525 if (frame_pointer_needed)
7527 if (arm_fpu_arch == FP_SOFT2)
7529 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7530 if (regs_ever_live[reg] && !call_used_regs[reg])
7532 floats_offset += 12;
7533 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7534 reg, FP_REGNUM, floats_offset);
7537 else
7539 int start_reg = LAST_ARM_FP_REGNUM;
7541 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7543 if (regs_ever_live[reg] && !call_used_regs[reg])
7545 floats_offset += 12;
7547 /* We can't unstack more than four registers at once. */
7548 if (start_reg - reg == 3)
7550 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7551 reg, FP_REGNUM, floats_offset);
7552 start_reg = reg - 1;
7555 else
7557 if (reg != start_reg)
7558 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7559 reg + 1, start_reg - reg,
7560 FP_REGNUM, floats_offset);
7561 start_reg = reg - 1;
7565 /* Just in case the last register checked also needs unstacking. */
7566 if (reg != start_reg)
7567 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7568 reg + 1, start_reg - reg,
7569 FP_REGNUM, floats_offset);
7572 /* saved_regs_mask should contain the IP, which at the time of stack
7573 frame generation actually contains the old stack pointer. So a
7574 quick way to unwind the stack is just pop the IP register directly
7575 into the stack pointer. */
7576 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7577 abort ();
7578 saved_regs_mask &= ~ (1 << IP_REGNUM);
7579 saved_regs_mask |= (1 << SP_REGNUM);
7581 /* There are two registers left in saved_regs_mask - LR and PC. We
7582 only need to restore the LR register (the return address), but to
7583 save time we can load it directly into the PC, unless we need a
7584 special function exit sequence, or we are not really returning. */
7585 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7586 /* Delete the LR from the register mask, so that the LR on
7587 the stack is loaded into the PC in the register mask. */
7588 saved_regs_mask &= ~ (1 << LR_REGNUM);
7589 else
7590 saved_regs_mask &= ~ (1 << PC_REGNUM);
7592 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7594 if (IS_INTERRUPT (func_type))
7595 /* Interrupt handlers will have pushed the
7596 IP onto the stack, so restore it now. */
7597 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7599 else
7601 /* Restore stack pointer if necessary. */
7602 if (frame_size + current_function_outgoing_args_size != 0)
7604 operands[0] = operands[1] = stack_pointer_rtx;
7605 operands[2] = GEN_INT (frame_size
7606 + current_function_outgoing_args_size);
7607 output_add_immediate (operands);
7610 if (arm_fpu_arch == FP_SOFT2)
7612 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7613 if (regs_ever_live[reg] && !call_used_regs[reg])
7614 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7615 reg, SP_REGNUM);
7617 else
7619 int start_reg = FIRST_ARM_FP_REGNUM;
7621 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7623 if (regs_ever_live[reg] && !call_used_regs[reg])
7625 if (reg - start_reg == 3)
7627 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7628 start_reg, SP_REGNUM);
7629 start_reg = reg + 1;
7632 else
7634 if (reg != start_reg)
7635 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7636 start_reg, reg - start_reg,
7637 SP_REGNUM);
7639 start_reg = reg + 1;
7643 /* Just in case the last register checked also needs unstacking. */
7644 if (reg != start_reg)
7645 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7646 start_reg, reg - start_reg, SP_REGNUM);
7649 /* If we can, restore the LR into the PC. */
7650 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7651 && really_return
7652 && current_function_pretend_args_size == 0
7653 && saved_regs_mask & (1 << LR_REGNUM))
7655 saved_regs_mask &= ~ (1 << LR_REGNUM);
7656 saved_regs_mask |= (1 << PC_REGNUM);
7659 /* Load the registers off the stack. If we only have one register
7660 to load use the LDR instruction - it is faster. */
7661 if (saved_regs_mask == (1 << LR_REGNUM))
7663 /* The excpetion handler ignores the LR, so we do
7664 not really need to load it off the stack. */
7665 if (eh_ofs)
7666 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7667 else
7668 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7670 else if (saved_regs_mask)
7671 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7673 if (current_function_pretend_args_size)
7675 /* Unwind the pre-pushed regs. */
7676 operands[0] = operands[1] = stack_pointer_rtx;
7677 operands[2] = GEN_INT (current_function_pretend_args_size);
7678 output_add_immediate (operands);
7682 #if 0
7683 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7684 /* Adjust the stack to remove the exception handler stuff. */
7685 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7686 REGNO (eh_ofs));
7687 #endif
7689 if (! really_return)
7690 return "";
7692 /* Generate the return instruction. */
7693 switch ((int) ARM_FUNC_TYPE (func_type))
7695 case ARM_FT_EXCEPTION_HANDLER:
7696 /* Even in 26-bit mode we do a mov (rather than a movs)
7697 because we don't have the PSR bits set in the address. */
7698 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7699 break;
7701 case ARM_FT_ISR:
7702 case ARM_FT_FIQ:
7703 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7704 break;
7706 case ARM_FT_EXCEPTION:
7707 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7708 break;
7710 case ARM_FT_INTERWORKED:
7711 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7712 break;
7714 default:
7715 if (frame_pointer_needed)
7716 /* If we used the frame pointer then the return adddress
7717 will have been loaded off the stack directly into the
7718 PC, so there is no need to issue a MOV instruction
7719 here. */
7721 else if (current_function_pretend_args_size == 0
7722 && (saved_regs_mask & (1 << LR_REGNUM)))
7723 /* Similarly we may have been able to load LR into the PC
7724 even if we did not create a stack frame. */
7726 else if (TARGET_APCS_32)
7727 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7728 else
7729 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7730 break;
7733 return "";
7736 static void
7737 arm_output_function_epilogue (file, frame_size)
7738 FILE *file ATTRIBUTE_UNUSED;
7739 HOST_WIDE_INT frame_size;
7741 if (TARGET_THUMB)
7743 /* ??? Probably not safe to set this here, since it assumes that a
7744 function will be emitted as assembly immediately after we generate
7745 RTL for it. This does not happen for inline functions. */
7746 return_used_this_function = 0;
7748 else
7750 if (use_return_insn (FALSE)
7751 && return_used_this_function
7752 && (frame_size + current_function_outgoing_args_size) != 0
7753 && !frame_pointer_needed)
7754 abort ();
7756 /* Reset the ARM-specific per-function variables. */
7757 current_function_anonymous_args = 0;
7758 after_arm_reorg = 0;
7762 /* Generate and emit an insn that we will recognize as a push_multi.
7763 Unfortunately, since this insn does not reflect very well the actual
7764 semantics of the operation, we need to annotate the insn for the benefit
7765 of DWARF2 frame unwind information. */
7767 static rtx
7768 emit_multi_reg_push (mask)
7769 int mask;
7771 int num_regs = 0;
7772 int num_dwarf_regs;
7773 int i, j;
7774 rtx par;
7775 rtx dwarf;
7776 int dwarf_par_index;
7777 rtx tmp, reg;
7779 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7780 if (mask & (1 << i))
7781 num_regs++;
7783 if (num_regs == 0 || num_regs > 16)
7784 abort ();
7786 /* We don't record the PC in the dwarf frame information. */
7787 num_dwarf_regs = num_regs;
7788 if (mask & (1 << PC_REGNUM))
7789 num_dwarf_regs--;
7791 /* For the body of the insn we are going to generate an UNSPEC in
7792 parallel with several USEs. This allows the insn to be recognised
7793 by the push_multi pattern in the arm.md file. The insn looks
7794 something like this:
7796 (parallel [
7797 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7798 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7799 (use (reg:SI 11 fp))
7800 (use (reg:SI 12 ip))
7801 (use (reg:SI 14 lr))
7802 (use (reg:SI 15 pc))
7805 For the frame note however, we try to be more explicit and actually
7806 show each register being stored into the stack frame, plus a (single)
7807 decrement of the stack pointer. We do it this way in order to be
7808 friendly to the stack unwinding code, which only wants to see a single
7809 stack decrement per instruction. The RTL we generate for the note looks
7810 something like this:
7812 (sequence [
7813 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7814 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7815 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7816 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7817 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7820 This sequence is used both by the code to support stack unwinding for
7821 exceptions handlers and the code to generate dwarf2 frame debugging. */
7823 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7824 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
7825 RTX_FRAME_RELATED_P (dwarf) = 1;
7826 dwarf_par_index = 1;
7828 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7830 if (mask & (1 << i))
7832 reg = gen_rtx_REG (SImode, i);
7834 XVECEXP (par, 0, 0)
7835 = gen_rtx_SET (VOIDmode,
7836 gen_rtx_MEM (BLKmode,
7837 gen_rtx_PRE_DEC (BLKmode,
7838 stack_pointer_rtx)),
7839 gen_rtx_UNSPEC (BLKmode,
7840 gen_rtvec (1, reg),
7841 UNSPEC_PUSH_MULT));
7843 if (i != PC_REGNUM)
7845 tmp = gen_rtx_SET (VOIDmode,
7846 gen_rtx_MEM (SImode, stack_pointer_rtx),
7847 reg);
7848 RTX_FRAME_RELATED_P (tmp) = 1;
7849 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7850 dwarf_par_index++;
7853 break;
7857 for (j = 1, i++; j < num_regs; i++)
7859 if (mask & (1 << i))
7861 reg = gen_rtx_REG (SImode, i);
7863 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7865 if (i != PC_REGNUM)
7867 tmp = gen_rtx_SET (VOIDmode,
7868 gen_rtx_MEM (SImode,
7869 plus_constant (stack_pointer_rtx,
7870 4 * j)),
7871 reg);
7872 RTX_FRAME_RELATED_P (tmp) = 1;
7873 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7876 j++;
7880 par = emit_insn (par);
7882 tmp = gen_rtx_SET (SImode,
7883 stack_pointer_rtx,
7884 gen_rtx_PLUS (SImode,
7885 stack_pointer_rtx,
7886 GEN_INT (-4 * num_regs)));
7887 RTX_FRAME_RELATED_P (tmp) = 1;
7888 XVECEXP (dwarf, 0, 0) = tmp;
7890 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7891 REG_NOTES (par));
7892 return par;
7895 static rtx
7896 emit_sfm (base_reg, count)
7897 int base_reg;
7898 int count;
7900 rtx par;
7901 rtx dwarf;
7902 rtx tmp, reg;
7903 int i;
7905 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7906 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7907 RTX_FRAME_RELATED_P (dwarf) = 1;
7909 reg = gen_rtx_REG (XFmode, base_reg++);
7911 XVECEXP (par, 0, 0)
7912 = gen_rtx_SET (VOIDmode,
7913 gen_rtx_MEM (BLKmode,
7914 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7915 gen_rtx_UNSPEC (BLKmode,
7916 gen_rtvec (1, reg),
7917 UNSPEC_PUSH_MULT));
7919 = gen_rtx_SET (VOIDmode,
7920 gen_rtx_MEM (XFmode,
7921 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7922 reg);
7923 RTX_FRAME_RELATED_P (tmp) = 1;
7924 XVECEXP (dwarf, 0, count - 1) = tmp;
7926 for (i = 1; i < count; i++)
7928 reg = gen_rtx_REG (XFmode, base_reg++);
7929 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7931 tmp = gen_rtx_SET (VOIDmode,
7932 gen_rtx_MEM (XFmode,
7933 gen_rtx_PRE_DEC (BLKmode,
7934 stack_pointer_rtx)),
7935 reg);
7936 RTX_FRAME_RELATED_P (tmp) = 1;
7937 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7940 par = emit_insn (par);
7941 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7942 REG_NOTES (par));
7943 return par;
7946 /* Compute the distance from register FROM to register TO.
7947 These can be the arg pointer (26), the soft frame pointer (25),
7948 the stack pointer (13) or the hard frame pointer (11).
7949 Typical stack layout looks like this:
7951 old stack pointer -> | |
7952 ----
7953 | | \
7954 | | saved arguments for
7955 | | vararg functions
7956 | | /
7958 hard FP & arg pointer -> | | \
7959 | | stack
7960 | | frame
7961 | | /
7963 | | \
7964 | | call saved
7965 | | registers
7966 soft frame pointer -> | | /
7968 | | \
7969 | | local
7970 | | variables
7971 | | /
7973 | | \
7974 | | outgoing
7975 | | arguments
7976 current stack pointer -> | | /
7979 For a given funciton some or all of these stack compomnents
7980 may not be needed, giving rise to the possibility of
7981 eliminating some of the registers.
7983 The values returned by this function must reflect the behaviour
7984 of arm_expand_prologue() and arm_compute_save_reg_mask().
7986 The sign of the number returned reflects the direction of stack
7987 growth, so the values are positive for all eliminations except
7988 from the soft frame pointer to the hard frame pointer. */
7990 unsigned int
7991 arm_compute_initial_elimination_offset (from, to)
7992 unsigned int from;
7993 unsigned int to;
7995 unsigned int local_vars = (get_frame_size () + 3) & ~3;
7996 unsigned int outgoing_args = current_function_outgoing_args_size;
7997 unsigned int stack_frame;
7998 unsigned int call_saved_registers;
7999 unsigned long func_type;
8001 func_type = arm_current_func_type ();
8003 /* Volatile functions never return, so there is
8004 no need to save call saved registers. */
8005 call_saved_registers = 0;
8006 if (! IS_VOLATILE (func_type))
8008 unsigned int reg_mask;
8009 unsigned int reg;
8011 /* Make sure that we compute which registers will be saved
8012 on the stack using the same algorithm that is used by
8013 arm_compute_save_reg_mask(). */
8014 reg_mask = arm_compute_save_reg0_reg12_mask ();
8016 /* Now count the number of bits set in save_reg_mask.
8017 For each set bit we need 4 bytes of stack space. */
8018 while (reg_mask)
8020 call_saved_registers += 4;
8021 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8024 if (regs_ever_live[LR_REGNUM]
8025 /* If a stack frame is going to be created, the LR will
8026 be saved as part of that, so we do not need to allow
8027 for it here. */
8028 && ! frame_pointer_needed)
8029 call_saved_registers += 4;
8031 /* If the hard floating point registers are going to be
8032 used then they must be saved on the stack as well.
8033 Each register occupies 12 bytes of stack space. */
8034 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8035 if (regs_ever_live[reg] && ! call_used_regs[reg])
8036 call_saved_registers += 12;
8039 /* The stack frame contains 4 registers - the old frame pointer,
8040 the old stack pointer, the return address and PC of the start
8041 of the function. */
8042 stack_frame = frame_pointer_needed ? 16 : 0;
8044 /* OK, now we have enough information to compute the distances.
8045 There must be an entry in these switch tables for each pair
8046 of registers in ELIMINABLE_REGS, even if some of the entries
8047 seem to be redundant or useless. */
8048 switch (from)
8050 case ARG_POINTER_REGNUM:
8051 switch (to)
8053 case THUMB_HARD_FRAME_POINTER_REGNUM:
8054 return 0;
8056 case FRAME_POINTER_REGNUM:
8057 /* This is the reverse of the soft frame pointer
8058 to hard frame pointer elimination below. */
8059 if (call_saved_registers == 0 && stack_frame == 0)
8060 return 0;
8061 return (call_saved_registers + stack_frame - 4);
8063 case ARM_HARD_FRAME_POINTER_REGNUM:
8064 /* If there is no stack frame then the hard
8065 frame pointer and the arg pointer coincide. */
8066 if (stack_frame == 0 && call_saved_registers != 0)
8067 return 0;
8068 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8069 return (frame_pointer_needed
8070 && current_function_needs_context
8071 && ! current_function_anonymous_args) ? 4 : 0;
8073 case STACK_POINTER_REGNUM:
8074 /* If nothing has been pushed on the stack at all
8075 then this will return -4. This *is* correct! */
8076 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8078 default:
8079 abort ();
8081 break;
8083 case FRAME_POINTER_REGNUM:
8084 switch (to)
8086 case THUMB_HARD_FRAME_POINTER_REGNUM:
8087 return 0;
8089 case ARM_HARD_FRAME_POINTER_REGNUM:
8090 /* The hard frame pointer points to the top entry in the
8091 stack frame. The soft frame pointer to the bottom entry
8092 in the stack frame. If there is no stack frame at all,
8093 then they are identical. */
8094 if (call_saved_registers == 0 && stack_frame == 0)
8095 return 0;
8096 return - (call_saved_registers + stack_frame - 4);
8098 case STACK_POINTER_REGNUM:
8099 return local_vars + outgoing_args;
8101 default:
8102 abort ();
8104 break;
8106 default:
8107 /* You cannot eliminate from the stack pointer.
8108 In theory you could eliminate from the hard frame
8109 pointer to the stack pointer, but this will never
8110 happen, since if a stack frame is not needed the
8111 hard frame pointer will never be used. */
8112 abort ();
8116 /* Generate the prologue instructions for entry into an ARM function. */
8118 void
8119 arm_expand_prologue ()
8121 int reg;
8122 rtx amount;
8123 rtx insn;
8124 rtx ip_rtx;
8125 unsigned long live_regs_mask;
8126 unsigned long func_type;
8127 int fp_offset = 0;
8128 int saved_pretend_args = 0;
8129 unsigned int args_to_push;
8131 func_type = arm_current_func_type ();
8133 /* Naked functions don't have prologues. */
8134 if (IS_NAKED (func_type))
8135 return;
8137 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8138 args_to_push = current_function_pretend_args_size;
8140 /* Compute which register we will have to save onto the stack. */
8141 live_regs_mask = arm_compute_save_reg_mask ();
8143 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8145 if (frame_pointer_needed)
8147 if (IS_INTERRUPT (func_type))
8149 /* Interrupt functions must not corrupt any registers.
8150 Creating a frame pointer however, corrupts the IP
8151 register, so we must push it first. */
8152 insn = emit_multi_reg_push (1 << IP_REGNUM);
8154 /* Do not set RTX_FRAME_RELATED_P on this insn.
8155 The dwarf stack unwinding code only wants to see one
8156 stack decrement per function, and this is not it. If
8157 this instruction is labeled as being part of the frame
8158 creation sequence then dwarf2out_frame_debug_expr will
8159 abort when it encounters the assignment of IP to FP
8160 later on, since the use of SP here establishes SP as
8161 the CFA register and not IP.
8163 Anyway this instruction is not really part of the stack
8164 frame creation although it is part of the prologue. */
8166 else if (IS_NESTED (func_type))
8168 /* The Static chain register is the same as the IP register
8169 used as a scratch register during stack frame creation.
8170 To get around this need to find somewhere to store IP
8171 whilst the frame is being created. We try the following
8172 places in order:
8174 1. The last argument register.
8175 2. A slot on the stack above the frame. (This only
8176 works if the function is not a varargs function).
8177 3. Register r3, after pushing the argument registers
8178 onto the stack.
8180 Note - we only need to tell the dwarf2 backend about the SP
8181 adjustment in the second variant; the static chain register
8182 doesn't need to be unwound, as it doesn't contain a value
8183 inherited from the caller. */
8185 if (regs_ever_live[3] == 0)
8187 insn = gen_rtx_REG (SImode, 3);
8188 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8189 insn = emit_insn (insn);
8191 else if (args_to_push == 0)
8193 rtx dwarf;
8194 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8195 insn = gen_rtx_MEM (SImode, insn);
8196 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8197 insn = emit_insn (insn);
8199 fp_offset = 4;
8201 /* Just tell the dwarf backend that we adjusted SP. */
8202 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8203 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8204 GEN_INT (-fp_offset)));
8205 RTX_FRAME_RELATED_P (insn) = 1;
8206 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8207 dwarf, REG_NOTES (insn));
8209 else
8211 /* Store the args on the stack. */
8212 if (current_function_anonymous_args)
8213 insn = emit_multi_reg_push
8214 ((0xf0 >> (args_to_push / 4)) & 0xf);
8215 else
8216 insn = emit_insn
8217 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8218 GEN_INT (- args_to_push)));
8220 RTX_FRAME_RELATED_P (insn) = 1;
8222 saved_pretend_args = 1;
8223 fp_offset = args_to_push;
8224 args_to_push = 0;
8226 /* Now reuse r3 to preserve IP. */
8227 insn = gen_rtx_REG (SImode, 3);
8228 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8229 (void) emit_insn (insn);
8233 if (fp_offset)
8235 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8236 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8238 else
8239 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8241 insn = emit_insn (insn);
8242 RTX_FRAME_RELATED_P (insn) = 1;
8245 if (args_to_push)
8247 /* Push the argument registers, or reserve space for them. */
8248 if (current_function_anonymous_args)
8249 insn = emit_multi_reg_push
8250 ((0xf0 >> (args_to_push / 4)) & 0xf);
8251 else
8252 insn = emit_insn
8253 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8254 GEN_INT (- args_to_push)));
8255 RTX_FRAME_RELATED_P (insn) = 1;
8258 if (live_regs_mask)
8260 insn = emit_multi_reg_push (live_regs_mask);
8261 RTX_FRAME_RELATED_P (insn) = 1;
8264 if (! IS_VOLATILE (func_type))
8266 /* Save any floating point call-saved registers used by this function. */
8267 if (arm_fpu_arch == FP_SOFT2)
8269 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8270 if (regs_ever_live[reg] && !call_used_regs[reg])
8272 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8273 insn = gen_rtx_MEM (XFmode, insn);
8274 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8275 gen_rtx_REG (XFmode, reg)));
8276 RTX_FRAME_RELATED_P (insn) = 1;
8279 else
8281 int start_reg = LAST_ARM_FP_REGNUM;
8283 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8285 if (regs_ever_live[reg] && !call_used_regs[reg])
8287 if (start_reg - reg == 3)
8289 insn = emit_sfm (reg, 4);
8290 RTX_FRAME_RELATED_P (insn) = 1;
8291 start_reg = reg - 1;
8294 else
8296 if (start_reg != reg)
8298 insn = emit_sfm (reg + 1, start_reg - reg);
8299 RTX_FRAME_RELATED_P (insn) = 1;
8301 start_reg = reg - 1;
8305 if (start_reg != reg)
8307 insn = emit_sfm (reg + 1, start_reg - reg);
8308 RTX_FRAME_RELATED_P (insn) = 1;
8313 if (frame_pointer_needed)
8315 /* Create the new frame pointer. */
8316 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8317 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8318 RTX_FRAME_RELATED_P (insn) = 1;
8320 if (IS_NESTED (func_type))
8322 /* Recover the static chain register. */
8323 if (regs_ever_live [3] == 0
8324 || saved_pretend_args)
8325 insn = gen_rtx_REG (SImode, 3);
8326 else /* if (current_function_pretend_args_size == 0) */
8328 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8329 insn = gen_rtx_MEM (SImode, insn);
8332 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8333 /* Add a USE to stop propagate_one_insn() from barfing. */
8334 emit_insn (gen_rtx_USE (VOIDmode, ip_rtx));
8338 amount = GEN_INT (-(get_frame_size ()
8339 + current_function_outgoing_args_size));
8341 if (amount != const0_rtx)
8343 /* This add can produce multiple insns for a large constant, so we
8344 need to get tricky. */
8345 rtx last = get_last_insn ();
8346 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8347 amount));
8350 last = last ? NEXT_INSN (last) : get_insns ();
8351 RTX_FRAME_RELATED_P (last) = 1;
8353 while (last != insn);
8355 /* If the frame pointer is needed, emit a special barrier that
8356 will prevent the scheduler from moving stores to the frame
8357 before the stack adjustment. */
8358 if (frame_pointer_needed)
8360 rtx unspec = gen_rtx_UNSPEC (SImode,
8361 gen_rtvec (2, stack_pointer_rtx,
8362 hard_frame_pointer_rtx),
8363 UNSPEC_PRLG_STK);
8365 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
8366 gen_rtx_MEM (BLKmode, unspec)));
8370 /* If we are profiling, make sure no instructions are scheduled before
8371 the call to mcount. Similarly if the user has requested no
8372 scheduling in the prolog. */
8373 if (current_function_profile || TARGET_NO_SCHED_PRO)
8374 emit_insn (gen_blockage ());
8376 /* If the link register is being kept alive, with the return address in it,
8377 then make sure that it does not get reused by the ce2 pass. */
8378 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8380 emit_insn (gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, LR_REGNUM)));
8381 cfun->machine->lr_save_eliminated = 1;
8385 /* If CODE is 'd', then the X is a condition operand and the instruction
8386 should only be executed if the condition is true.
8387 if CODE is 'D', then the X is a condition operand and the instruction
8388 should only be executed if the condition is false: however, if the mode
8389 of the comparison is CCFPEmode, then always execute the instruction -- we
8390 do this because in these circumstances !GE does not necessarily imply LT;
8391 in these cases the instruction pattern will take care to make sure that
8392 an instruction containing %d will follow, thereby undoing the effects of
8393 doing this instruction unconditionally.
8394 If CODE is 'N' then X is a floating point operand that must be negated
8395 before output.
8396 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8397 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8399 void
8400 arm_print_operand (stream, x, code)
8401 FILE * stream;
8402 rtx x;
8403 int code;
8405 switch (code)
8407 case '@':
8408 fputs (ASM_COMMENT_START, stream);
8409 return;
8411 case '_':
8412 fputs (user_label_prefix, stream);
8413 return;
8415 case '|':
8416 fputs (REGISTER_PREFIX, stream);
8417 return;
8419 case '?':
8420 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8422 if (TARGET_THUMB || current_insn_predicate != NULL)
8423 abort ();
8425 fputs (arm_condition_codes[arm_current_cc], stream);
8427 else if (current_insn_predicate)
8429 enum arm_cond_code code;
8431 if (TARGET_THUMB)
8432 abort ();
8434 code = get_arm_condition_code (current_insn_predicate);
8435 fputs (arm_condition_codes[code], stream);
8437 return;
8439 case 'N':
8441 REAL_VALUE_TYPE r;
8442 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8443 r = REAL_VALUE_NEGATE (r);
8444 fprintf (stream, "%s", fp_const_from_val (&r));
8446 return;
8448 case 'B':
8449 if (GET_CODE (x) == CONST_INT)
8451 HOST_WIDE_INT val;
8452 val = ARM_SIGN_EXTEND (~INTVAL (x));
8453 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8455 else
8457 putc ('~', stream);
8458 output_addr_const (stream, x);
8460 return;
8462 case 'i':
8463 fprintf (stream, "%s", arithmetic_instr (x, 1));
8464 return;
8466 case 'I':
8467 fprintf (stream, "%s", arithmetic_instr (x, 0));
8468 return;
8470 case 'S':
8472 HOST_WIDE_INT val;
8473 const char * shift = shift_op (x, &val);
8475 if (shift)
8477 fprintf (stream, ", %s ", shift_op (x, &val));
8478 if (val == -1)
8479 arm_print_operand (stream, XEXP (x, 1), 0);
8480 else
8482 fputc ('#', stream);
8483 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8487 return;
8489 /* An explanation of the 'Q', 'R' and 'H' register operands:
8491 In a pair of registers containing a DI or DF value the 'Q'
8492 operand returns the register number of the register containing
8493 the least signficant part of the value. The 'R' operand returns
8494 the register number of the register containing the most
8495 significant part of the value.
8497 The 'H' operand returns the higher of the two register numbers.
8498 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8499 same as the 'Q' operand, since the most signficant part of the
8500 value is held in the lower number register. The reverse is true
8501 on systems where WORDS_BIG_ENDIAN is false.
8503 The purpose of these operands is to distinguish between cases
8504 where the endian-ness of the values is important (for example
8505 when they are added together), and cases where the endian-ness
8506 is irrelevant, but the order of register operations is important.
8507 For example when loading a value from memory into a register
8508 pair, the endian-ness does not matter. Provided that the value
8509 from the lower memory address is put into the lower numbered
8510 register, and the value from the higher address is put into the
8511 higher numbered register, the load will work regardless of whether
8512 the value being loaded is big-wordian or little-wordian. The
8513 order of the two register loads can matter however, if the address
8514 of the memory location is actually held in one of the registers
8515 being overwritten by the load. */
8516 case 'Q':
8517 if (REGNO (x) > LAST_ARM_REGNUM)
8518 abort ();
8519 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8520 return;
8522 case 'R':
8523 if (REGNO (x) > LAST_ARM_REGNUM)
8524 abort ();
8525 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8526 return;
8528 case 'H':
8529 if (REGNO (x) > LAST_ARM_REGNUM)
8530 abort ();
8531 asm_fprintf (stream, "%r", REGNO (x) + 1);
8532 return;
8534 case 'm':
8535 asm_fprintf (stream, "%r",
8536 GET_CODE (XEXP (x, 0)) == REG
8537 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8538 return;
8540 case 'M':
8541 asm_fprintf (stream, "{%r-%r}",
8542 REGNO (x),
8543 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
8544 return;
8546 case 'd':
8547 if (!x)
8548 return;
8550 if (TARGET_ARM)
8551 fputs (arm_condition_codes[get_arm_condition_code (x)],
8552 stream);
8553 else
8554 fputs (thumb_condition_code (x, 0), stream);
8555 return;
8557 case 'D':
8558 if (!x)
8559 return;
8561 if (TARGET_ARM)
8562 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8563 (get_arm_condition_code (x))],
8564 stream);
8565 else
8566 fputs (thumb_condition_code (x, 1), stream);
8567 return;
8569 default:
8570 if (x == 0)
8571 abort ();
8573 if (GET_CODE (x) == REG)
8574 asm_fprintf (stream, "%r", REGNO (x));
8575 else if (GET_CODE (x) == MEM)
8577 output_memory_reference_mode = GET_MODE (x);
8578 output_address (XEXP (x, 0));
8580 else if (GET_CODE (x) == CONST_DOUBLE)
8581 fprintf (stream, "#%s", fp_immediate_constant (x));
8582 else if (GET_CODE (x) == NEG)
8583 abort (); /* This should never happen now. */
8584 else
8586 fputc ('#', stream);
8587 output_addr_const (stream, x);
8592 #ifndef AOF_ASSEMBLER
8593 /* Target hook for assembling integer objects. The ARM version needs to
8594 handle word-sized values specially. */
8596 static bool
8597 arm_assemble_integer (x, size, aligned_p)
8598 rtx x;
8599 unsigned int size;
8600 int aligned_p;
8602 if (size == UNITS_PER_WORD && aligned_p)
8604 fputs ("\t.word\t", asm_out_file);
8605 output_addr_const (asm_out_file, x);
8607 /* Mark symbols as position independent. We only do this in the
8608 .text segment, not in the .data segment. */
8609 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8610 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8612 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
8613 fputs ("(GOTOFF)", asm_out_file);
8614 else if (GET_CODE (x) == LABEL_REF)
8615 fputs ("(GOTOFF)", asm_out_file);
8616 else
8617 fputs ("(GOT)", asm_out_file);
8619 fputc ('\n', asm_out_file);
8620 return true;
8623 return default_assemble_integer (x, size, aligned_p);
8625 #endif
8627 /* A finite state machine takes care of noticing whether or not instructions
8628 can be conditionally executed, and thus decrease execution time and code
8629 size by deleting branch instructions. The fsm is controlled by
8630 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8632 /* The state of the fsm controlling condition codes are:
8633 0: normal, do nothing special
8634 1: make ASM_OUTPUT_OPCODE not output this instruction
8635 2: make ASM_OUTPUT_OPCODE not output this instruction
8636 3: make instructions conditional
8637 4: make instructions conditional
8639 State transitions (state->state by whom under condition):
8640 0 -> 1 final_prescan_insn if the `target' is a label
8641 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8642 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8643 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8644 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8645 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8646 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8647 (the target insn is arm_target_insn).
8649 If the jump clobbers the conditions then we use states 2 and 4.
8651 A similar thing can be done with conditional return insns.
8653 XXX In case the `target' is an unconditional branch, this conditionalising
8654 of the instructions always reduces code size, but not always execution
8655 time. But then, I want to reduce the code size to somewhere near what
8656 /bin/cc produces. */
8658 /* Returns the index of the ARM condition code string in
8659 `arm_condition_codes'. COMPARISON should be an rtx like
8660 `(eq (...) (...))'. */
8662 static enum arm_cond_code
8663 get_arm_condition_code (comparison)
8664 rtx comparison;
8666 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
8667 int code;
8668 enum rtx_code comp_code = GET_CODE (comparison);
8670 if (GET_MODE_CLASS (mode) != MODE_CC)
8671 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
8672 XEXP (comparison, 1));
8674 switch (mode)
8676 case CC_DNEmode: code = ARM_NE; goto dominance;
8677 case CC_DEQmode: code = ARM_EQ; goto dominance;
8678 case CC_DGEmode: code = ARM_GE; goto dominance;
8679 case CC_DGTmode: code = ARM_GT; goto dominance;
8680 case CC_DLEmode: code = ARM_LE; goto dominance;
8681 case CC_DLTmode: code = ARM_LT; goto dominance;
8682 case CC_DGEUmode: code = ARM_CS; goto dominance;
8683 case CC_DGTUmode: code = ARM_HI; goto dominance;
8684 case CC_DLEUmode: code = ARM_LS; goto dominance;
8685 case CC_DLTUmode: code = ARM_CC;
8687 dominance:
8688 if (comp_code != EQ && comp_code != NE)
8689 abort ();
8691 if (comp_code == EQ)
8692 return ARM_INVERSE_CONDITION_CODE (code);
8693 return code;
8695 case CC_NOOVmode:
8696 switch (comp_code)
8698 case NE: return ARM_NE;
8699 case EQ: return ARM_EQ;
8700 case GE: return ARM_PL;
8701 case LT: return ARM_MI;
8702 default: abort ();
8705 case CC_Zmode:
8706 switch (comp_code)
8708 case NE: return ARM_NE;
8709 case EQ: return ARM_EQ;
8710 default: abort ();
8713 case CCFPEmode:
8714 case CCFPmode:
8715 /* These encodings assume that AC=1 in the FPA system control
8716 byte. This allows us to handle all cases except UNEQ and
8717 LTGT. */
8718 switch (comp_code)
8720 case GE: return ARM_GE;
8721 case GT: return ARM_GT;
8722 case LE: return ARM_LS;
8723 case LT: return ARM_MI;
8724 case NE: return ARM_NE;
8725 case EQ: return ARM_EQ;
8726 case ORDERED: return ARM_VC;
8727 case UNORDERED: return ARM_VS;
8728 case UNLT: return ARM_LT;
8729 case UNLE: return ARM_LE;
8730 case UNGT: return ARM_HI;
8731 case UNGE: return ARM_PL;
8732 /* UNEQ and LTGT do not have a representation. */
8733 case UNEQ: /* Fall through. */
8734 case LTGT: /* Fall through. */
8735 default: abort ();
8738 case CC_SWPmode:
8739 switch (comp_code)
8741 case NE: return ARM_NE;
8742 case EQ: return ARM_EQ;
8743 case GE: return ARM_LE;
8744 case GT: return ARM_LT;
8745 case LE: return ARM_GE;
8746 case LT: return ARM_GT;
8747 case GEU: return ARM_LS;
8748 case GTU: return ARM_CC;
8749 case LEU: return ARM_CS;
8750 case LTU: return ARM_HI;
8751 default: abort ();
8754 case CC_Cmode:
8755 switch (comp_code)
8757 case LTU: return ARM_CS;
8758 case GEU: return ARM_CC;
8759 default: abort ();
8762 case CCmode:
8763 switch (comp_code)
8765 case NE: return ARM_NE;
8766 case EQ: return ARM_EQ;
8767 case GE: return ARM_GE;
8768 case GT: return ARM_GT;
8769 case LE: return ARM_LE;
8770 case LT: return ARM_LT;
8771 case GEU: return ARM_CS;
8772 case GTU: return ARM_HI;
8773 case LEU: return ARM_LS;
8774 case LTU: return ARM_CC;
8775 default: abort ();
8778 default: abort ();
8781 abort ();
8785 void
8786 arm_final_prescan_insn (insn)
8787 rtx insn;
8789 /* BODY will hold the body of INSN. */
8790 rtx body = PATTERN (insn);
8792 /* This will be 1 if trying to repeat the trick, and things need to be
8793 reversed if it appears to fail. */
8794 int reverse = 0;
8796 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8797 taken are clobbered, even if the rtl suggests otherwise. It also
8798 means that we have to grub around within the jump expression to find
8799 out what the conditions are when the jump isn't taken. */
8800 int jump_clobbers = 0;
8802 /* If we start with a return insn, we only succeed if we find another one. */
8803 int seeking_return = 0;
8805 /* START_INSN will hold the insn from where we start looking. This is the
8806 first insn after the following code_label if REVERSE is true. */
8807 rtx start_insn = insn;
8809 /* If in state 4, check if the target branch is reached, in order to
8810 change back to state 0. */
8811 if (arm_ccfsm_state == 4)
8813 if (insn == arm_target_insn)
8815 arm_target_insn = NULL;
8816 arm_ccfsm_state = 0;
8818 return;
8821 /* If in state 3, it is possible to repeat the trick, if this insn is an
8822 unconditional branch to a label, and immediately following this branch
8823 is the previous target label which is only used once, and the label this
8824 branch jumps to is not too far off. */
8825 if (arm_ccfsm_state == 3)
8827 if (simplejump_p (insn))
8829 start_insn = next_nonnote_insn (start_insn);
8830 if (GET_CODE (start_insn) == BARRIER)
8832 /* XXX Isn't this always a barrier? */
8833 start_insn = next_nonnote_insn (start_insn);
8835 if (GET_CODE (start_insn) == CODE_LABEL
8836 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8837 && LABEL_NUSES (start_insn) == 1)
8838 reverse = TRUE;
8839 else
8840 return;
8842 else if (GET_CODE (body) == RETURN)
8844 start_insn = next_nonnote_insn (start_insn);
8845 if (GET_CODE (start_insn) == BARRIER)
8846 start_insn = next_nonnote_insn (start_insn);
8847 if (GET_CODE (start_insn) == CODE_LABEL
8848 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8849 && LABEL_NUSES (start_insn) == 1)
8851 reverse = TRUE;
8852 seeking_return = 1;
8854 else
8855 return;
8857 else
8858 return;
8861 if (arm_ccfsm_state != 0 && !reverse)
8862 abort ();
8863 if (GET_CODE (insn) != JUMP_INSN)
8864 return;
8866 /* This jump might be paralleled with a clobber of the condition codes
8867 the jump should always come first */
8868 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8869 body = XVECEXP (body, 0, 0);
8871 #if 0
8872 /* If this is a conditional return then we don't want to know */
8873 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8874 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8875 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8876 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8877 return;
8878 #endif
8880 if (reverse
8881 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8882 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8884 int insns_skipped;
8885 int fail = FALSE, succeed = FALSE;
8886 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8887 int then_not_else = TRUE;
8888 rtx this_insn = start_insn, label = 0;
8890 /* If the jump cannot be done with one instruction, we cannot
8891 conditionally execute the instruction in the inverse case. */
8892 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8894 jump_clobbers = 1;
8895 return;
8898 /* Register the insn jumped to. */
8899 if (reverse)
8901 if (!seeking_return)
8902 label = XEXP (SET_SRC (body), 0);
8904 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8905 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8906 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8908 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8909 then_not_else = FALSE;
8911 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8912 seeking_return = 1;
8913 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8915 seeking_return = 1;
8916 then_not_else = FALSE;
8918 else
8919 abort ();
8921 /* See how many insns this branch skips, and what kind of insns. If all
8922 insns are okay, and the label or unconditional branch to the same
8923 label is not too far away, succeed. */
8924 for (insns_skipped = 0;
8925 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8927 rtx scanbody;
8929 this_insn = next_nonnote_insn (this_insn);
8930 if (!this_insn)
8931 break;
8933 switch (GET_CODE (this_insn))
8935 case CODE_LABEL:
8936 /* Succeed if it is the target label, otherwise fail since
8937 control falls in from somewhere else. */
8938 if (this_insn == label)
8940 if (jump_clobbers)
8942 arm_ccfsm_state = 2;
8943 this_insn = next_nonnote_insn (this_insn);
8945 else
8946 arm_ccfsm_state = 1;
8947 succeed = TRUE;
8949 else
8950 fail = TRUE;
8951 break;
8953 case BARRIER:
8954 /* Succeed if the following insn is the target label.
8955 Otherwise fail.
8956 If return insns are used then the last insn in a function
8957 will be a barrier. */
8958 this_insn = next_nonnote_insn (this_insn);
8959 if (this_insn && this_insn == label)
8961 if (jump_clobbers)
8963 arm_ccfsm_state = 2;
8964 this_insn = next_nonnote_insn (this_insn);
8966 else
8967 arm_ccfsm_state = 1;
8968 succeed = TRUE;
8970 else
8971 fail = TRUE;
8972 break;
8974 case CALL_INSN:
8975 /* If using 32-bit addresses the cc is not preserved over
8976 calls. */
8977 if (TARGET_APCS_32)
8979 /* Succeed if the following insn is the target label,
8980 or if the following two insns are a barrier and
8981 the target label. */
8982 this_insn = next_nonnote_insn (this_insn);
8983 if (this_insn && GET_CODE (this_insn) == BARRIER)
8984 this_insn = next_nonnote_insn (this_insn);
8986 if (this_insn && this_insn == label
8987 && insns_skipped < max_insns_skipped)
8989 if (jump_clobbers)
8991 arm_ccfsm_state = 2;
8992 this_insn = next_nonnote_insn (this_insn);
8994 else
8995 arm_ccfsm_state = 1;
8996 succeed = TRUE;
8998 else
8999 fail = TRUE;
9001 break;
9003 case JUMP_INSN:
9004 /* If this is an unconditional branch to the same label, succeed.
9005 If it is to another label, do nothing. If it is conditional,
9006 fail. */
9007 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9009 scanbody = PATTERN (this_insn);
9010 if (GET_CODE (scanbody) == SET
9011 && GET_CODE (SET_DEST (scanbody)) == PC)
9013 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9014 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9016 arm_ccfsm_state = 2;
9017 succeed = TRUE;
9019 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9020 fail = TRUE;
9022 /* Fail if a conditional return is undesirable (eg on a
9023 StrongARM), but still allow this if optimizing for size. */
9024 else if (GET_CODE (scanbody) == RETURN
9025 && !use_return_insn (TRUE)
9026 && !optimize_size)
9027 fail = TRUE;
9028 else if (GET_CODE (scanbody) == RETURN
9029 && seeking_return)
9031 arm_ccfsm_state = 2;
9032 succeed = TRUE;
9034 else if (GET_CODE (scanbody) == PARALLEL)
9036 switch (get_attr_conds (this_insn))
9038 case CONDS_NOCOND:
9039 break;
9040 default:
9041 fail = TRUE;
9042 break;
9045 else
9046 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9048 break;
9050 case INSN:
9051 /* Instructions using or affecting the condition codes make it
9052 fail. */
9053 scanbody = PATTERN (this_insn);
9054 if (!(GET_CODE (scanbody) == SET
9055 || GET_CODE (scanbody) == PARALLEL)
9056 || get_attr_conds (this_insn) != CONDS_NOCOND)
9057 fail = TRUE;
9058 break;
9060 default:
9061 break;
9064 if (succeed)
9066 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9067 arm_target_label = CODE_LABEL_NUMBER (label);
9068 else if (seeking_return || arm_ccfsm_state == 2)
9070 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9072 this_insn = next_nonnote_insn (this_insn);
9073 if (this_insn && (GET_CODE (this_insn) == BARRIER
9074 || GET_CODE (this_insn) == CODE_LABEL))
9075 abort ();
9077 if (!this_insn)
9079 /* Oh, dear! we ran off the end.. give up */
9080 recog (PATTERN (insn), insn, NULL);
9081 arm_ccfsm_state = 0;
9082 arm_target_insn = NULL;
9083 return;
9085 arm_target_insn = this_insn;
9087 else
9088 abort ();
9089 if (jump_clobbers)
9091 if (reverse)
9092 abort ();
9093 arm_current_cc =
9094 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9095 0), 0), 1));
9096 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9097 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9098 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9099 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9101 else
9103 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9104 what it was. */
9105 if (!reverse)
9106 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9107 0));
9110 if (reverse || then_not_else)
9111 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9114 /* Restore recog_data (getting the attributes of other insns can
9115 destroy this array, but final.c assumes that it remains intact
9116 across this call; since the insn has been recognized already we
9117 call recog direct). */
9118 recog (PATTERN (insn), insn, NULL);
9122 /* Returns true if REGNO is a valid register
9123 for holding a quantity of tyoe MODE. */
9126 arm_hard_regno_mode_ok (regno, mode)
9127 unsigned int regno;
9128 enum machine_mode mode;
9130 if (GET_MODE_CLASS (mode) == MODE_CC)
9131 return regno == CC_REGNUM;
9133 if (TARGET_THUMB)
9134 /* For the Thumb we only allow values bigger than SImode in
9135 registers 0 - 6, so that there is always a second low
9136 register available to hold the upper part of the value.
9137 We probably we ought to ensure that the register is the
9138 start of an even numbered register pair. */
9139 return (NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9141 if (regno <= LAST_ARM_REGNUM)
9142 /* If the register is a general purpose ARM register we allow
9143 it only if it not a special register (SP, LR, PC) and only
9144 if there will be enough (non-special) registers to hold the
9145 entire value. */
9146 return regno < (SP_REGNUM - (unsigned) NUM_REGS (mode));
9148 if ( regno == FRAME_POINTER_REGNUM
9149 || regno == ARG_POINTER_REGNUM)
9150 /* We only allow integers in the fake hard registers. */
9151 return GET_MODE_CLASS (mode) == MODE_INT;
9153 /* The only registers left are the FPU registers
9154 which we only allow to hold FP values. */
9155 return GET_MODE_CLASS (mode) == MODE_FLOAT
9156 && regno >= FIRST_ARM_FP_REGNUM
9157 && regno <= LAST_ARM_FP_REGNUM;
9161 arm_regno_class (regno)
9162 int regno;
9164 if (TARGET_THUMB)
9166 if (regno == STACK_POINTER_REGNUM)
9167 return STACK_REG;
9168 if (regno == CC_REGNUM)
9169 return CC_REG;
9170 if (regno < 8)
9171 return LO_REGS;
9172 return HI_REGS;
9175 if ( regno <= LAST_ARM_REGNUM
9176 || regno == FRAME_POINTER_REGNUM
9177 || regno == ARG_POINTER_REGNUM)
9178 return GENERAL_REGS;
9180 if (regno == CC_REGNUM)
9181 return NO_REGS;
9183 return FPU_REGS;
9186 /* Handle a special case when computing the offset
9187 of an argument from the frame pointer. */
9190 arm_debugger_arg_offset (value, addr)
9191 int value;
9192 rtx addr;
9194 rtx insn;
9196 /* We are only interested if dbxout_parms() failed to compute the offset. */
9197 if (value != 0)
9198 return 0;
9200 /* We can only cope with the case where the address is held in a register. */
9201 if (GET_CODE (addr) != REG)
9202 return 0;
9204 /* If we are using the frame pointer to point at the argument, then
9205 an offset of 0 is correct. */
9206 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9207 return 0;
9209 /* If we are using the stack pointer to point at the
9210 argument, then an offset of 0 is correct. */
9211 if ((TARGET_THUMB || !frame_pointer_needed)
9212 && REGNO (addr) == SP_REGNUM)
9213 return 0;
9215 /* Oh dear. The argument is pointed to by a register rather
9216 than being held in a register, or being stored at a known
9217 offset from the frame pointer. Since GDB only understands
9218 those two kinds of argument we must translate the address
9219 held in the register into an offset from the frame pointer.
9220 We do this by searching through the insns for the function
9221 looking to see where this register gets its value. If the
9222 register is initialised from the frame pointer plus an offset
9223 then we are in luck and we can continue, otherwise we give up.
9225 This code is exercised by producing debugging information
9226 for a function with arguments like this:
9228 double func (double a, double b, int c, double d) {return d;}
9230 Without this code the stab for parameter 'd' will be set to
9231 an offset of 0 from the frame pointer, rather than 8. */
9233 /* The if() statement says:
9235 If the insn is a normal instruction
9236 and if the insn is setting the value in a register
9237 and if the register being set is the register holding the address of the argument
9238 and if the address is computing by an addition
9239 that involves adding to a register
9240 which is the frame pointer
9241 a constant integer
9243 then... */
9245 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9247 if ( GET_CODE (insn) == INSN
9248 && GET_CODE (PATTERN (insn)) == SET
9249 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9250 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9251 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9252 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9253 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9256 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9258 break;
9262 if (value == 0)
9264 debug_rtx (addr);
9265 warning ("unable to compute real location of stacked parameter");
9266 value = 8; /* XXX magic hack */
9269 return value;
9272 #define def_builtin(NAME, TYPE, CODE) \
9273 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9275 void
9276 arm_init_builtins ()
9278 tree endlink = void_list_node;
9279 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9280 tree pchar_type_node = build_pointer_type (char_type_node);
9282 tree int_ftype_int, void_ftype_pchar;
9284 /* void func (void *) */
9285 void_ftype_pchar
9286 = build_function_type (void_type_node,
9287 tree_cons (NULL_TREE, pchar_type_node, endlink));
9289 /* int func (int) */
9290 int_ftype_int
9291 = build_function_type (integer_type_node, int_endlink);
9293 /* Initialize arm V5 builtins. */
9294 if (arm_arch5)
9295 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9298 /* Expand an expression EXP that calls a built-in function,
9299 with result going to TARGET if that's convenient
9300 (and in mode MODE if that's convenient).
9301 SUBTARGET may be used as the target for computing one of EXP's operands.
9302 IGNORE is nonzero if the value is to be ignored. */
9305 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9306 tree exp;
9307 rtx target;
9308 rtx subtarget ATTRIBUTE_UNUSED;
9309 enum machine_mode mode ATTRIBUTE_UNUSED;
9310 int ignore ATTRIBUTE_UNUSED;
9312 enum insn_code icode;
9313 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9314 tree arglist = TREE_OPERAND (exp, 1);
9315 tree arg0;
9316 rtx op0, pat;
9317 enum machine_mode tmode, mode0;
9318 int fcode = DECL_FUNCTION_CODE (fndecl);
9320 switch (fcode)
9322 default:
9323 break;
9325 case ARM_BUILTIN_CLZ:
9326 icode = CODE_FOR_clz;
9327 arg0 = TREE_VALUE (arglist);
9328 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9329 tmode = insn_data[icode].operand[0].mode;
9330 mode0 = insn_data[icode].operand[1].mode;
9332 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9333 op0 = copy_to_mode_reg (mode0, op0);
9334 if (target == 0
9335 || GET_MODE (target) != tmode
9336 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9337 target = gen_reg_rtx (tmode);
9338 pat = GEN_FCN (icode) (target, op0);
9339 if (! pat)
9340 return 0;
9341 emit_insn (pat);
9342 return target;
9345 /* @@@ Should really do something sensible here. */
9346 return NULL_RTX;
9349 /* Recursively search through all of the blocks in a function
9350 checking to see if any of the variables created in that
9351 function match the RTX called 'orig'. If they do then
9352 replace them with the RTX called 'new'. */
9354 static void
9355 replace_symbols_in_block (block, orig, new)
9356 tree block;
9357 rtx orig;
9358 rtx new;
9360 for (; block; block = BLOCK_CHAIN (block))
9362 tree sym;
9364 if (!TREE_USED (block))
9365 continue;
9367 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9369 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9370 || DECL_IGNORED_P (sym)
9371 || TREE_CODE (sym) != VAR_DECL
9372 || DECL_EXTERNAL (sym)
9373 || !rtx_equal_p (DECL_RTL (sym), orig)
9375 continue;
9377 SET_DECL_RTL (sym, new);
9380 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9384 /* Return the number (counting from 0) of
9385 the least significant set bit in MASK. */
9387 #ifdef __GNUC__
9388 inline
9389 #endif
9390 static int
9391 number_of_first_bit_set (mask)
9392 int mask;
9394 int bit;
9396 for (bit = 0;
9397 (mask & (1 << bit)) == 0;
9398 ++bit)
9399 continue;
9401 return bit;
9404 /* Generate code to return from a thumb function.
9405 If 'reg_containing_return_addr' is -1, then the return address is
9406 actually on the stack, at the stack pointer. */
9407 static void
9408 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9409 FILE * f;
9410 int reg_containing_return_addr;
9411 rtx eh_ofs;
9413 unsigned regs_available_for_popping;
9414 unsigned regs_to_pop;
9415 int pops_needed;
9416 unsigned available;
9417 unsigned required;
9418 int mode;
9419 int size;
9420 int restore_a4 = FALSE;
9422 /* Compute the registers we need to pop. */
9423 regs_to_pop = 0;
9424 pops_needed = 0;
9426 /* There is an assumption here, that if eh_ofs is not NULL, the
9427 normal return address will have been pushed. */
9428 if (reg_containing_return_addr == -1 || eh_ofs)
9430 /* When we are generating a return for __builtin_eh_return,
9431 reg_containing_return_addr must specify the return regno. */
9432 if (eh_ofs && reg_containing_return_addr == -1)
9433 abort ();
9435 regs_to_pop |= 1 << LR_REGNUM;
9436 ++pops_needed;
9439 if (TARGET_BACKTRACE)
9441 /* Restore the (ARM) frame pointer and stack pointer. */
9442 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9443 pops_needed += 2;
9446 /* If there is nothing to pop then just emit the BX instruction and
9447 return. */
9448 if (pops_needed == 0)
9450 if (eh_ofs)
9451 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9453 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9454 return;
9456 /* Otherwise if we are not supporting interworking and we have not created
9457 a backtrace structure and the function was not entered in ARM mode then
9458 just pop the return address straight into the PC. */
9459 else if (!TARGET_INTERWORK
9460 && !TARGET_BACKTRACE
9461 && !is_called_in_ARM_mode (current_function_decl))
9463 if (eh_ofs)
9465 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9466 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9467 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9469 else
9470 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9472 return;
9475 /* Find out how many of the (return) argument registers we can corrupt. */
9476 regs_available_for_popping = 0;
9478 /* If returning via __builtin_eh_return, the bottom three registers
9479 all contain information needed for the return. */
9480 if (eh_ofs)
9481 size = 12;
9482 else
9484 #ifdef RTX_CODE
9485 /* If we can deduce the registers used from the function's
9486 return value. This is more reliable that examining
9487 regs_ever_live[] because that will be set if the register is
9488 ever used in the function, not just if the register is used
9489 to hold a return value. */
9491 if (current_function_return_rtx != 0)
9492 mode = GET_MODE (current_function_return_rtx);
9493 else
9494 #endif
9495 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9497 size = GET_MODE_SIZE (mode);
9499 if (size == 0)
9501 /* In a void function we can use any argument register.
9502 In a function that returns a structure on the stack
9503 we can use the second and third argument registers. */
9504 if (mode == VOIDmode)
9505 regs_available_for_popping =
9506 (1 << ARG_REGISTER (1))
9507 | (1 << ARG_REGISTER (2))
9508 | (1 << ARG_REGISTER (3));
9509 else
9510 regs_available_for_popping =
9511 (1 << ARG_REGISTER (2))
9512 | (1 << ARG_REGISTER (3));
9514 else if (size <= 4)
9515 regs_available_for_popping =
9516 (1 << ARG_REGISTER (2))
9517 | (1 << ARG_REGISTER (3));
9518 else if (size <= 8)
9519 regs_available_for_popping =
9520 (1 << ARG_REGISTER (3));
9523 /* Match registers to be popped with registers into which we pop them. */
9524 for (available = regs_available_for_popping,
9525 required = regs_to_pop;
9526 required != 0 && available != 0;
9527 available &= ~(available & - available),
9528 required &= ~(required & - required))
9529 -- pops_needed;
9531 /* If we have any popping registers left over, remove them. */
9532 if (available > 0)
9533 regs_available_for_popping &= ~available;
9535 /* Otherwise if we need another popping register we can use
9536 the fourth argument register. */
9537 else if (pops_needed)
9539 /* If we have not found any free argument registers and
9540 reg a4 contains the return address, we must move it. */
9541 if (regs_available_for_popping == 0
9542 && reg_containing_return_addr == LAST_ARG_REGNUM)
9544 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9545 reg_containing_return_addr = LR_REGNUM;
9547 else if (size > 12)
9549 /* Register a4 is being used to hold part of the return value,
9550 but we have dire need of a free, low register. */
9551 restore_a4 = TRUE;
9553 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9556 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9558 /* The fourth argument register is available. */
9559 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9561 --pops_needed;
9565 /* Pop as many registers as we can. */
9566 thumb_pushpop (f, regs_available_for_popping, FALSE);
9568 /* Process the registers we popped. */
9569 if (reg_containing_return_addr == -1)
9571 /* The return address was popped into the lowest numbered register. */
9572 regs_to_pop &= ~(1 << LR_REGNUM);
9574 reg_containing_return_addr =
9575 number_of_first_bit_set (regs_available_for_popping);
9577 /* Remove this register for the mask of available registers, so that
9578 the return address will not be corrupted by futher pops. */
9579 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9582 /* If we popped other registers then handle them here. */
9583 if (regs_available_for_popping)
9585 int frame_pointer;
9587 /* Work out which register currently contains the frame pointer. */
9588 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9590 /* Move it into the correct place. */
9591 asm_fprintf (f, "\tmov\t%r, %r\n",
9592 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9594 /* (Temporarily) remove it from the mask of popped registers. */
9595 regs_available_for_popping &= ~(1 << frame_pointer);
9596 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9598 if (regs_available_for_popping)
9600 int stack_pointer;
9602 /* We popped the stack pointer as well,
9603 find the register that contains it. */
9604 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9606 /* Move it into the stack register. */
9607 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9609 /* At this point we have popped all necessary registers, so
9610 do not worry about restoring regs_available_for_popping
9611 to its correct value:
9613 assert (pops_needed == 0)
9614 assert (regs_available_for_popping == (1 << frame_pointer))
9615 assert (regs_to_pop == (1 << STACK_POINTER)) */
9617 else
9619 /* Since we have just move the popped value into the frame
9620 pointer, the popping register is available for reuse, and
9621 we know that we still have the stack pointer left to pop. */
9622 regs_available_for_popping |= (1 << frame_pointer);
9626 /* If we still have registers left on the stack, but we no longer have
9627 any registers into which we can pop them, then we must move the return
9628 address into the link register and make available the register that
9629 contained it. */
9630 if (regs_available_for_popping == 0 && pops_needed > 0)
9632 regs_available_for_popping |= 1 << reg_containing_return_addr;
9634 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9635 reg_containing_return_addr);
9637 reg_containing_return_addr = LR_REGNUM;
9640 /* If we have registers left on the stack then pop some more.
9641 We know that at most we will want to pop FP and SP. */
9642 if (pops_needed > 0)
9644 int popped_into;
9645 int move_to;
9647 thumb_pushpop (f, regs_available_for_popping, FALSE);
9649 /* We have popped either FP or SP.
9650 Move whichever one it is into the correct register. */
9651 popped_into = number_of_first_bit_set (regs_available_for_popping);
9652 move_to = number_of_first_bit_set (regs_to_pop);
9654 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9656 regs_to_pop &= ~(1 << move_to);
9658 --pops_needed;
9661 /* If we still have not popped everything then we must have only
9662 had one register available to us and we are now popping the SP. */
9663 if (pops_needed > 0)
9665 int popped_into;
9667 thumb_pushpop (f, regs_available_for_popping, FALSE);
9669 popped_into = number_of_first_bit_set (regs_available_for_popping);
9671 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9673 assert (regs_to_pop == (1 << STACK_POINTER))
9674 assert (pops_needed == 1)
9678 /* If necessary restore the a4 register. */
9679 if (restore_a4)
9681 if (reg_containing_return_addr != LR_REGNUM)
9683 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9684 reg_containing_return_addr = LR_REGNUM;
9687 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9690 if (eh_ofs)
9691 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9693 /* Return to caller. */
9694 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9697 /* Emit code to push or pop registers to or from the stack. */
9699 static void
9700 thumb_pushpop (f, mask, push)
9701 FILE * f;
9702 int mask;
9703 int push;
9705 int regno;
9706 int lo_mask = mask & 0xFF;
9708 if (lo_mask == 0 && !push && (mask & (1 << 15)))
9710 /* Special case. Do not generate a POP PC statement here, do it in
9711 thumb_exit() */
9712 thumb_exit (f, -1, NULL_RTX);
9713 return;
9716 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9718 /* Look at the low registers first. */
9719 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
9721 if (lo_mask & 1)
9723 asm_fprintf (f, "%r", regno);
9725 if ((lo_mask & ~1) != 0)
9726 fprintf (f, ", ");
9730 if (push && (mask & (1 << LR_REGNUM)))
9732 /* Catch pushing the LR. */
9733 if (mask & 0xFF)
9734 fprintf (f, ", ");
9736 asm_fprintf (f, "%r", LR_REGNUM);
9738 else if (!push && (mask & (1 << PC_REGNUM)))
9740 /* Catch popping the PC. */
9741 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9743 /* The PC is never poped directly, instead
9744 it is popped into r3 and then BX is used. */
9745 fprintf (f, "}\n");
9747 thumb_exit (f, -1, NULL_RTX);
9749 return;
9751 else
9753 if (mask & 0xFF)
9754 fprintf (f, ", ");
9756 asm_fprintf (f, "%r", PC_REGNUM);
9760 fprintf (f, "}\n");
9763 void
9764 thumb_final_prescan_insn (insn)
9765 rtx insn;
9767 if (flag_print_asm_name)
9768 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9769 INSN_ADDRESSES (INSN_UID (insn)));
9773 thumb_shiftable_const (val)
9774 unsigned HOST_WIDE_INT val;
9776 unsigned HOST_WIDE_INT mask = 0xff;
9777 int i;
9779 if (val == 0) /* XXX */
9780 return 0;
9782 for (i = 0; i < 25; i++)
9783 if ((val & (mask << i)) == val)
9784 return 1;
9786 return 0;
9789 /* Returns non-zero if the current function contains,
9790 or might contain a far jump. */
9793 thumb_far_jump_used_p (int in_prologue)
9795 rtx insn;
9797 /* This test is only important for leaf functions. */
9798 /* assert (!leaf_function_p ()); */
9800 /* If we have already decided that far jumps may be used,
9801 do not bother checking again, and always return true even if
9802 it turns out that they are not being used. Once we have made
9803 the decision that far jumps are present (and that hence the link
9804 register will be pushed onto the stack) we cannot go back on it. */
9805 if (cfun->machine->far_jump_used)
9806 return 1;
9808 /* If this function is not being called from the prologue/epilogue
9809 generation code then it must be being called from the
9810 INITIAL_ELIMINATION_OFFSET macro. */
9811 if (!in_prologue)
9813 /* In this case we know that we are being asked about the elimination
9814 of the arg pointer register. If that register is not being used,
9815 then there are no arguments on the stack, and we do not have to
9816 worry that a far jump might force the prologue to push the link
9817 register, changing the stack offsets. In this case we can just
9818 return false, since the presence of far jumps in the function will
9819 not affect stack offsets.
9821 If the arg pointer is live (or if it was live, but has now been
9822 eliminated and so set to dead) then we do have to test to see if
9823 the function might contain a far jump. This test can lead to some
9824 false negatives, since before reload is completed, then length of
9825 branch instructions is not known, so gcc defaults to returning their
9826 longest length, which in turn sets the far jump attribute to true.
9828 A false negative will not result in bad code being generated, but it
9829 will result in a needless push and pop of the link register. We
9830 hope that this does not occur too often. */
9831 if (regs_ever_live [ARG_POINTER_REGNUM])
9832 cfun->machine->arg_pointer_live = 1;
9833 else if (!cfun->machine->arg_pointer_live)
9834 return 0;
9837 /* Check to see if the function contains a branch
9838 insn with the far jump attribute set. */
9839 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9841 if (GET_CODE (insn) == JUMP_INSN
9842 /* Ignore tablejump patterns. */
9843 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9844 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9845 && get_attr_far_jump (insn) == FAR_JUMP_YES
9848 /* Record the fact that we have decied that
9849 the function does use far jumps. */
9850 cfun->machine->far_jump_used = 1;
9851 return 1;
9855 return 0;
9858 /* Return non-zero if FUNC must be entered in ARM mode. */
9861 is_called_in_ARM_mode (func)
9862 tree func;
9864 if (TREE_CODE (func) != FUNCTION_DECL)
9865 abort ();
9867 /* Ignore the problem about functions whoes address is taken. */
9868 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9869 return TRUE;
9871 #ifdef ARM_PE
9872 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
9873 #else
9874 return FALSE;
9875 #endif
9878 /* The bits which aren't usefully expanded as rtl. */
9880 const char *
9881 thumb_unexpanded_epilogue ()
9883 int regno;
9884 int live_regs_mask = 0;
9885 int high_regs_pushed = 0;
9886 int leaf_function = leaf_function_p ();
9887 int had_to_push_lr;
9888 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9890 if (return_used_this_function)
9891 return "";
9893 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9894 if (regs_ever_live[regno] && !call_used_regs[regno]
9895 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9896 live_regs_mask |= 1 << regno;
9898 for (regno = 8; regno < 13; regno++)
9900 if (regs_ever_live[regno] && !call_used_regs[regno]
9901 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9902 high_regs_pushed++;
9905 /* The prolog may have pushed some high registers to use as
9906 work registers. eg the testuite file:
9907 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9908 compiles to produce:
9909 push {r4, r5, r6, r7, lr}
9910 mov r7, r9
9911 mov r6, r8
9912 push {r6, r7}
9913 as part of the prolog. We have to undo that pushing here. */
9915 if (high_regs_pushed)
9917 int mask = live_regs_mask;
9918 int next_hi_reg;
9919 int size;
9920 int mode;
9922 #ifdef RTX_CODE
9923 /* If we can deduce the registers used from the function's return value.
9924 This is more reliable that examining regs_ever_live[] because that
9925 will be set if the register is ever used in the function, not just if
9926 the register is used to hold a return value. */
9928 if (current_function_return_rtx != 0)
9929 mode = GET_MODE (current_function_return_rtx);
9930 else
9931 #endif
9932 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9934 size = GET_MODE_SIZE (mode);
9936 /* Unless we are returning a type of size > 12 register r3 is
9937 available. */
9938 if (size < 13)
9939 mask |= 1 << 3;
9941 if (mask == 0)
9942 /* Oh dear! We have no low registers into which we can pop
9943 high registers! */
9944 internal_error
9945 ("no low registers available for popping high registers");
9947 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9948 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9949 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9950 break;
9952 while (high_regs_pushed)
9954 /* Find lo register(s) into which the high register(s) can
9955 be popped. */
9956 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9958 if (mask & (1 << regno))
9959 high_regs_pushed--;
9960 if (high_regs_pushed == 0)
9961 break;
9964 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9966 /* Pop the values into the low register(s). */
9967 thumb_pushpop (asm_out_file, mask, 0);
9969 /* Move the value(s) into the high registers. */
9970 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9972 if (mask & (1 << regno))
9974 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9975 regno);
9977 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
9978 if (regs_ever_live[next_hi_reg]
9979 && !call_used_regs[next_hi_reg]
9980 && !(TARGET_SINGLE_PIC_BASE
9981 && (next_hi_reg == arm_pic_register)))
9982 break;
9988 had_to_push_lr = (live_regs_mask || !leaf_function
9989 || thumb_far_jump_used_p (1));
9991 if (TARGET_BACKTRACE
9992 && ((live_regs_mask & 0xFF) == 0)
9993 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9995 /* The stack backtrace structure creation code had to
9996 push R7 in order to get a work register, so we pop
9997 it now. */
9998 live_regs_mask |= (1 << LAST_LO_REGNUM);
10001 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10003 if (had_to_push_lr
10004 && !is_called_in_ARM_mode (current_function_decl)
10005 && !eh_ofs)
10006 live_regs_mask |= 1 << PC_REGNUM;
10008 /* Either no argument registers were pushed or a backtrace
10009 structure was created which includes an adjusted stack
10010 pointer, so just pop everything. */
10011 if (live_regs_mask)
10012 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10014 if (eh_ofs)
10015 thumb_exit (asm_out_file, 2, eh_ofs);
10016 /* We have either just popped the return address into the
10017 PC or it is was kept in LR for the entire function or
10018 it is still on the stack because we do not want to
10019 return by doing a pop {pc}. */
10020 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10021 thumb_exit (asm_out_file,
10022 (had_to_push_lr
10023 && is_called_in_ARM_mode (current_function_decl)) ?
10024 -1 : LR_REGNUM, NULL_RTX);
10026 else
10028 /* Pop everything but the return address. */
10029 live_regs_mask &= ~(1 << PC_REGNUM);
10031 if (live_regs_mask)
10032 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10034 if (had_to_push_lr)
10035 /* Get the return address into a temporary register. */
10036 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10038 /* Remove the argument registers that were pushed onto the stack. */
10039 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10040 SP_REGNUM, SP_REGNUM,
10041 current_function_pretend_args_size);
10043 if (eh_ofs)
10044 thumb_exit (asm_out_file, 2, eh_ofs);
10045 else
10046 thumb_exit (asm_out_file,
10047 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10050 return "";
10053 /* Functions to save and restore machine-specific function data. */
10055 static void
10056 arm_mark_machine_status (p)
10057 struct function * p;
10059 machine_function *machine = p->machine;
10061 if (machine)
10062 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
10065 static void
10066 arm_init_machine_status (p)
10067 struct function * p;
10069 p->machine =
10070 (machine_function *) xcalloc (1, sizeof (machine_function));
10072 #if ARM_FT_UNKNOWWN != 0
10073 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
10074 #endif
10077 static void
10078 arm_free_machine_status (p)
10079 struct function * p;
10081 if (p->machine)
10083 free (p->machine);
10084 p->machine = NULL;
10088 /* Return an RTX indicating where the return address to the
10089 calling function can be found. */
10092 arm_return_addr (count, frame)
10093 int count;
10094 rtx frame ATTRIBUTE_UNUSED;
10096 if (count != 0)
10097 return NULL_RTX;
10099 if (TARGET_APCS_32)
10100 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10101 else
10103 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10104 GEN_INT (RETURN_ADDR_MASK26));
10105 return get_func_hard_reg_initial_val (cfun, lr);
10109 /* Do anything needed before RTL is emitted for each function. */
10111 void
10112 arm_init_expanders ()
10114 /* Arrange to initialize and mark the machine per-function status. */
10115 init_machine_status = arm_init_machine_status;
10116 mark_machine_status = arm_mark_machine_status;
10117 free_machine_status = arm_free_machine_status;
10120 /* Generate the rest of a function's prologue. */
10122 void
10123 thumb_expand_prologue ()
10125 HOST_WIDE_INT amount = (get_frame_size ()
10126 + current_function_outgoing_args_size);
10127 unsigned long func_type;
10129 func_type = arm_current_func_type ();
10131 /* Naked functions don't have prologues. */
10132 if (IS_NAKED (func_type))
10133 return;
10135 if (IS_INTERRUPT (func_type))
10137 error ("interrupt Service Routines cannot be coded in Thumb mode");
10138 return;
10141 if (frame_pointer_needed)
10142 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10144 if (amount)
10146 amount = ROUND_UP (amount);
10148 if (amount < 512)
10149 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10150 GEN_INT (- amount)));
10151 else
10153 int regno;
10154 rtx reg;
10156 /* The stack decrement is too big for an immediate value in a single
10157 insn. In theory we could issue multiple subtracts, but after
10158 three of them it becomes more space efficient to place the full
10159 value in the constant pool and load into a register. (Also the
10160 ARM debugger really likes to see only one stack decrement per
10161 function). So instead we look for a scratch register into which
10162 we can load the decrement, and then we subtract this from the
10163 stack pointer. Unfortunately on the thumb the only available
10164 scratch registers are the argument registers, and we cannot use
10165 these as they may hold arguments to the function. Instead we
10166 attempt to locate a call preserved register which is used by this
10167 function. If we can find one, then we know that it will have
10168 been pushed at the start of the prologue and so we can corrupt
10169 it now. */
10170 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10171 if (regs_ever_live[regno]
10172 && !call_used_regs[regno] /* Paranoia */
10173 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
10174 && !(frame_pointer_needed
10175 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10176 break;
10178 if (regno > LAST_LO_REGNUM) /* Very unlikely */
10180 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10181 rtx insn;
10183 /* Choose an arbitary, non-argument low register. */
10184 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10186 /* Save it by copying it into a high, scratch register. */
10187 emit_insn (gen_movsi (spare, reg));
10188 /* Add a USE to stop propagate_one_insn() from barfing. */
10189 emit_insn (gen_rtx_USE (VOIDmode, spare));
10191 /* Decrement the stack. */
10192 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10193 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10194 reg));
10196 /* Restore the low register's original value. */
10197 emit_insn (gen_movsi (reg, spare));
10199 /* Emit a USE of the restored scratch register, so that flow
10200 analysis will not consider the restore redundant. The
10201 register won't be used again in this function and isn't
10202 restored by the epilogue. */
10203 emit_insn (gen_rtx_USE (VOIDmode, reg));
10205 else
10207 reg = gen_rtx (REG, SImode, regno);
10209 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10210 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10211 reg));
10216 if (current_function_profile || TARGET_NO_SCHED_PRO)
10217 emit_insn (gen_blockage ());
10220 void
10221 thumb_expand_epilogue ()
10223 HOST_WIDE_INT amount = (get_frame_size ()
10224 + current_function_outgoing_args_size);
10226 /* Naked functions don't have prologues. */
10227 if (IS_NAKED (arm_current_func_type ()))
10228 return;
10230 if (frame_pointer_needed)
10231 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10232 else if (amount)
10234 amount = ROUND_UP (amount);
10236 if (amount < 512)
10237 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10238 GEN_INT (amount)));
10239 else
10241 /* r3 is always free in the epilogue. */
10242 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10244 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10245 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10249 /* Emit a USE (stack_pointer_rtx), so that
10250 the stack adjustment will not be deleted. */
10251 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
10253 if (current_function_profile || TARGET_NO_SCHED_PRO)
10254 emit_insn (gen_blockage ());
10257 static void
10258 thumb_output_function_prologue (f, size)
10259 FILE * f;
10260 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10262 int live_regs_mask = 0;
10263 int high_regs_pushed = 0;
10264 int regno;
10266 if (IS_NAKED (arm_current_func_type ()))
10267 return;
10269 if (is_called_in_ARM_mode (current_function_decl))
10271 const char * name;
10273 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10274 abort ();
10275 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10276 abort ();
10277 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10279 /* Generate code sequence to switch us into Thumb mode. */
10280 /* The .code 32 directive has already been emitted by
10281 ASM_DECLARE_FUNCTION_NAME. */
10282 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10283 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10285 /* Generate a label, so that the debugger will notice the
10286 change in instruction sets. This label is also used by
10287 the assembler to bypass the ARM code when this function
10288 is called from a Thumb encoded function elsewhere in the
10289 same file. Hence the definition of STUB_NAME here must
10290 agree with the definition in gas/config/tc-arm.c */
10292 #define STUB_NAME ".real_start_of"
10294 asm_fprintf (f, "\t.code\t16\n");
10295 #ifdef ARM_PE
10296 if (arm_dllexport_name_p (name))
10297 name = arm_strip_name_encoding (name);
10298 #endif
10299 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10300 asm_fprintf (f, "\t.thumb_func\n");
10301 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10304 if (current_function_pretend_args_size)
10306 if (current_function_anonymous_args)
10308 int num_pushes;
10310 asm_fprintf (f, "\tpush\t{");
10312 num_pushes = NUM_INTS (current_function_pretend_args_size);
10314 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10315 regno <= LAST_ARG_REGNUM;
10316 regno++)
10317 asm_fprintf (f, "%r%s", regno,
10318 regno == LAST_ARG_REGNUM ? "" : ", ");
10320 asm_fprintf (f, "}\n");
10322 else
10323 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10324 SP_REGNUM, SP_REGNUM,
10325 current_function_pretend_args_size);
10328 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10329 if (regs_ever_live[regno] && !call_used_regs[regno]
10330 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10331 live_regs_mask |= 1 << regno;
10333 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10334 live_regs_mask |= 1 << LR_REGNUM;
10336 if (TARGET_BACKTRACE)
10338 int offset;
10339 int work_register = 0;
10340 int wr;
10342 /* We have been asked to create a stack backtrace structure.
10343 The code looks like this:
10345 0 .align 2
10346 0 func:
10347 0 sub SP, #16 Reserve space for 4 registers.
10348 2 push {R7} Get a work register.
10349 4 add R7, SP, #20 Get the stack pointer before the push.
10350 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10351 8 mov R7, PC Get hold of the start of this code plus 12.
10352 10 str R7, [SP, #16] Store it.
10353 12 mov R7, FP Get hold of the current frame pointer.
10354 14 str R7, [SP, #4] Store it.
10355 16 mov R7, LR Get hold of the current return address.
10356 18 str R7, [SP, #12] Store it.
10357 20 add R7, SP, #16 Point at the start of the backtrace structure.
10358 22 mov FP, R7 Put this value into the frame pointer. */
10360 if ((live_regs_mask & 0xFF) == 0)
10362 /* See if the a4 register is free. */
10364 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10365 work_register = LAST_ARG_REGNUM;
10366 else /* We must push a register of our own */
10367 live_regs_mask |= (1 << LAST_LO_REGNUM);
10370 if (work_register == 0)
10372 /* Select a register from the list that will be pushed to
10373 use as our work register. */
10374 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10375 if ((1 << work_register) & live_regs_mask)
10376 break;
10379 asm_fprintf
10380 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10381 SP_REGNUM, SP_REGNUM);
10383 if (live_regs_mask)
10384 thumb_pushpop (f, live_regs_mask, 1);
10386 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10387 if (wr & live_regs_mask)
10388 offset += 4;
10390 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10391 offset + 16 + current_function_pretend_args_size);
10393 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10394 offset + 4);
10396 /* Make sure that the instruction fetching the PC is in the right place
10397 to calculate "start of backtrace creation code + 12". */
10398 if (live_regs_mask)
10400 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10401 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10402 offset + 12);
10403 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10404 ARM_HARD_FRAME_POINTER_REGNUM);
10405 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10406 offset);
10408 else
10410 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10411 ARM_HARD_FRAME_POINTER_REGNUM);
10412 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10413 offset);
10414 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10415 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10416 offset + 12);
10419 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10420 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10421 offset + 8);
10422 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10423 offset + 12);
10424 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10425 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10427 else if (live_regs_mask)
10428 thumb_pushpop (f, live_regs_mask, 1);
10430 for (regno = 8; regno < 13; regno++)
10432 if (regs_ever_live[regno] && !call_used_regs[regno]
10433 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10434 high_regs_pushed++;
10437 if (high_regs_pushed)
10439 int pushable_regs = 0;
10440 int mask = live_regs_mask & 0xff;
10441 int next_hi_reg;
10443 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10445 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
10446 && !(TARGET_SINGLE_PIC_BASE
10447 && (next_hi_reg == arm_pic_register)))
10448 break;
10451 pushable_regs = mask;
10453 if (pushable_regs == 0)
10455 /* Desperation time -- this probably will never happen. */
10456 if (regs_ever_live[LAST_ARG_REGNUM]
10457 || !call_used_regs[LAST_ARG_REGNUM])
10458 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10459 mask = 1 << LAST_ARG_REGNUM;
10462 while (high_regs_pushed > 0)
10464 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10466 if (mask & (1 << regno))
10468 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10470 high_regs_pushed--;
10472 if (high_regs_pushed)
10473 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10474 next_hi_reg--)
10476 if (regs_ever_live[next_hi_reg]
10477 && !call_used_regs[next_hi_reg]
10478 && !(TARGET_SINGLE_PIC_BASE
10479 && (next_hi_reg == arm_pic_register)))
10480 break;
10482 else
10484 mask &= ~((1 << regno) - 1);
10485 break;
10490 thumb_pushpop (f, mask, 1);
10493 if (pushable_regs == 0
10494 && (regs_ever_live[LAST_ARG_REGNUM]
10495 || !call_used_regs[LAST_ARG_REGNUM]))
10496 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10500 /* Handle the case of a double word load into a low register from
10501 a computed memory address. The computed address may involve a
10502 register which is overwritten by the load. */
10504 const char *
10505 thumb_load_double_from_address (operands)
10506 rtx *operands;
10508 rtx addr;
10509 rtx base;
10510 rtx offset;
10511 rtx arg1;
10512 rtx arg2;
10514 if (GET_CODE (operands[0]) != REG)
10515 abort ();
10517 if (GET_CODE (operands[1]) != MEM)
10518 abort ();
10520 /* Get the memory address. */
10521 addr = XEXP (operands[1], 0);
10523 /* Work out how the memory address is computed. */
10524 switch (GET_CODE (addr))
10526 case REG:
10527 operands[2] = gen_rtx (MEM, SImode,
10528 plus_constant (XEXP (operands[1], 0), 4));
10530 if (REGNO (operands[0]) == REGNO (addr))
10532 output_asm_insn ("ldr\t%H0, %2", operands);
10533 output_asm_insn ("ldr\t%0, %1", operands);
10535 else
10537 output_asm_insn ("ldr\t%0, %1", operands);
10538 output_asm_insn ("ldr\t%H0, %2", operands);
10540 break;
10542 case CONST:
10543 /* Compute <address> + 4 for the high order load. */
10544 operands[2] = gen_rtx (MEM, SImode,
10545 plus_constant (XEXP (operands[1], 0), 4));
10547 output_asm_insn ("ldr\t%0, %1", operands);
10548 output_asm_insn ("ldr\t%H0, %2", operands);
10549 break;
10551 case PLUS:
10552 arg1 = XEXP (addr, 0);
10553 arg2 = XEXP (addr, 1);
10555 if (CONSTANT_P (arg1))
10556 base = arg2, offset = arg1;
10557 else
10558 base = arg1, offset = arg2;
10560 if (GET_CODE (base) != REG)
10561 abort ();
10563 /* Catch the case of <address> = <reg> + <reg> */
10564 if (GET_CODE (offset) == REG)
10566 int reg_offset = REGNO (offset);
10567 int reg_base = REGNO (base);
10568 int reg_dest = REGNO (operands[0]);
10570 /* Add the base and offset registers together into the
10571 higher destination register. */
10572 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10573 reg_dest + 1, reg_base, reg_offset);
10575 /* Load the lower destination register from the address in
10576 the higher destination register. */
10577 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10578 reg_dest, reg_dest + 1);
10580 /* Load the higher destination register from its own address
10581 plus 4. */
10582 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10583 reg_dest + 1, reg_dest + 1);
10585 else
10587 /* Compute <address> + 4 for the high order load. */
10588 operands[2] = gen_rtx (MEM, SImode,
10589 plus_constant (XEXP (operands[1], 0), 4));
10591 /* If the computed address is held in the low order register
10592 then load the high order register first, otherwise always
10593 load the low order register first. */
10594 if (REGNO (operands[0]) == REGNO (base))
10596 output_asm_insn ("ldr\t%H0, %2", operands);
10597 output_asm_insn ("ldr\t%0, %1", operands);
10599 else
10601 output_asm_insn ("ldr\t%0, %1", operands);
10602 output_asm_insn ("ldr\t%H0, %2", operands);
10605 break;
10607 case LABEL_REF:
10608 /* With no registers to worry about we can just load the value
10609 directly. */
10610 operands[2] = gen_rtx (MEM, SImode,
10611 plus_constant (XEXP (operands[1], 0), 4));
10613 output_asm_insn ("ldr\t%H0, %2", operands);
10614 output_asm_insn ("ldr\t%0, %1", operands);
10615 break;
10617 default:
10618 abort ();
10619 break;
10622 return "";
10626 const char *
10627 thumb_output_move_mem_multiple (n, operands)
10628 int n;
10629 rtx * operands;
10631 rtx tmp;
10633 switch (n)
10635 case 2:
10636 if (REGNO (operands[4]) > REGNO (operands[5]))
10638 tmp = operands[4];
10639 operands[4] = operands[5];
10640 operands[5] = tmp;
10642 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10643 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
10644 break;
10646 case 3:
10647 if (REGNO (operands[4]) > REGNO (operands[5]))
10649 tmp = operands[4];
10650 operands[4] = operands[5];
10651 operands[5] = tmp;
10653 if (REGNO (operands[5]) > REGNO (operands[6]))
10655 tmp = operands[5];
10656 operands[5] = operands[6];
10657 operands[6] = tmp;
10659 if (REGNO (operands[4]) > REGNO (operands[5]))
10661 tmp = operands[4];
10662 operands[4] = operands[5];
10663 operands[5] = tmp;
10666 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10667 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
10668 break;
10670 default:
10671 abort ();
10674 return "";
10677 /* Routines for generating rtl. */
10679 void
10680 thumb_expand_movstrqi (operands)
10681 rtx * operands;
10683 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10684 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10685 HOST_WIDE_INT len = INTVAL (operands[2]);
10686 HOST_WIDE_INT offset = 0;
10688 while (len >= 12)
10690 emit_insn (gen_movmem12b (out, in, out, in));
10691 len -= 12;
10694 if (len >= 8)
10696 emit_insn (gen_movmem8b (out, in, out, in));
10697 len -= 8;
10700 if (len >= 4)
10702 rtx reg = gen_reg_rtx (SImode);
10703 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10704 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10705 len -= 4;
10706 offset += 4;
10709 if (len >= 2)
10711 rtx reg = gen_reg_rtx (HImode);
10712 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10713 plus_constant (in, offset))));
10714 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10715 reg));
10716 len -= 2;
10717 offset += 2;
10720 if (len)
10722 rtx reg = gen_reg_rtx (QImode);
10723 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10724 plus_constant (in, offset))));
10725 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10726 reg));
10731 thumb_cmp_operand (op, mode)
10732 rtx op;
10733 enum machine_mode mode;
10735 return ((GET_CODE (op) == CONST_INT
10736 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10737 || register_operand (op, mode));
10740 static const char *
10741 thumb_condition_code (x, invert)
10742 rtx x;
10743 int invert;
10745 static const char * const conds[] =
10747 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10748 "hi", "ls", "ge", "lt", "gt", "le"
10750 int val;
10752 switch (GET_CODE (x))
10754 case EQ: val = 0; break;
10755 case NE: val = 1; break;
10756 case GEU: val = 2; break;
10757 case LTU: val = 3; break;
10758 case GTU: val = 8; break;
10759 case LEU: val = 9; break;
10760 case GE: val = 10; break;
10761 case LT: val = 11; break;
10762 case GT: val = 12; break;
10763 case LE: val = 13; break;
10764 default:
10765 abort ();
10768 return conds[val ^ invert];
10771 /* Handle storing a half-word to memory during reload. */
10773 void
10774 thumb_reload_out_hi (operands)
10775 rtx * operands;
10777 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10780 /* Handle storing a half-word to memory during reload. */
10782 void
10783 thumb_reload_in_hi (operands)
10784 rtx * operands ATTRIBUTE_UNUSED;
10786 abort ();
10789 /* Return the length of a function name prefix
10790 that starts with the character 'c'. */
10792 static int
10793 arm_get_strip_length (char c)
10795 switch (c)
10797 ARM_NAME_ENCODING_LENGTHS
10798 default: return 0;
10802 /* Return a pointer to a function's name with any
10803 and all prefix encodings stripped from it. */
10805 const char *
10806 arm_strip_name_encoding (const char * name)
10808 int skip;
10810 while ((skip = arm_get_strip_length (* name)))
10811 name += skip;
10813 return name;
10816 #ifdef AOF_ASSEMBLER
10817 /* Special functions only needed when producing AOF syntax assembler. */
10819 rtx aof_pic_label = NULL_RTX;
10820 struct pic_chain
10822 struct pic_chain * next;
10823 const char * symname;
10826 static struct pic_chain * aof_pic_chain = NULL;
10829 aof_pic_entry (x)
10830 rtx x;
10832 struct pic_chain ** chainp;
10833 int offset;
10835 if (aof_pic_label == NULL_RTX)
10837 /* We mark this here and not in arm_add_gc_roots() to avoid
10838 polluting even more code with ifdefs, and because it never
10839 contains anything useful until we assign to it here. */
10840 ggc_add_rtx_root (&aof_pic_label, 1);
10841 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10844 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10845 offset += 4, chainp = &(*chainp)->next)
10846 if ((*chainp)->symname == XSTR (x, 0))
10847 return plus_constant (aof_pic_label, offset);
10849 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10850 (*chainp)->next = NULL;
10851 (*chainp)->symname = XSTR (x, 0);
10852 return plus_constant (aof_pic_label, offset);
10855 void
10856 aof_dump_pic_table (f)
10857 FILE * f;
10859 struct pic_chain * chain;
10861 if (aof_pic_chain == NULL)
10862 return;
10864 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10865 PIC_OFFSET_TABLE_REGNUM,
10866 PIC_OFFSET_TABLE_REGNUM);
10867 fputs ("|x$adcons|\n", f);
10869 for (chain = aof_pic_chain; chain; chain = chain->next)
10871 fputs ("\tDCD\t", f);
10872 assemble_name (f, chain->symname);
10873 fputs ("\n", f);
10877 int arm_text_section_count = 1;
10879 char *
10880 aof_text_section ()
10882 static char buf[100];
10883 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10884 arm_text_section_count++);
10885 if (flag_pic)
10886 strcat (buf, ", PIC, REENTRANT");
10887 return buf;
10890 static int arm_data_section_count = 1;
10892 char *
10893 aof_data_section ()
10895 static char buf[100];
10896 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10897 return buf;
10900 /* The AOF assembler is religiously strict about declarations of
10901 imported and exported symbols, so that it is impossible to declare
10902 a function as imported near the beginning of the file, and then to
10903 export it later on. It is, however, possible to delay the decision
10904 until all the functions in the file have been compiled. To get
10905 around this, we maintain a list of the imports and exports, and
10906 delete from it any that are subsequently defined. At the end of
10907 compilation we spit the remainder of the list out before the END
10908 directive. */
10910 struct import
10912 struct import * next;
10913 const char * name;
10916 static struct import * imports_list = NULL;
10918 void
10919 aof_add_import (name)
10920 const char * name;
10922 struct import * new;
10924 for (new = imports_list; new; new = new->next)
10925 if (new->name == name)
10926 return;
10928 new = (struct import *) xmalloc (sizeof (struct import));
10929 new->next = imports_list;
10930 imports_list = new;
10931 new->name = name;
10934 void
10935 aof_delete_import (name)
10936 const char * name;
10938 struct import ** old;
10940 for (old = &imports_list; *old; old = & (*old)->next)
10942 if ((*old)->name == name)
10944 *old = (*old)->next;
10945 return;
10950 int arm_main_function = 0;
10952 void
10953 aof_dump_imports (f)
10954 FILE * f;
10956 /* The AOF assembler needs this to cause the startup code to be extracted
10957 from the library. Brining in __main causes the whole thing to work
10958 automagically. */
10959 if (arm_main_function)
10961 text_section ();
10962 fputs ("\tIMPORT __main\n", f);
10963 fputs ("\tDCD __main\n", f);
10966 /* Now dump the remaining imports. */
10967 while (imports_list)
10969 fprintf (f, "\tIMPORT\t");
10970 assemble_name (f, imports_list->name);
10971 fputc ('\n', f);
10972 imports_list = imports_list->next;
10975 #endif /* AOF_ASSEMBLER */
10977 #ifdef OBJECT_FORMAT_ELF
10978 /* Switch to an arbitrary section NAME with attributes as specified
10979 by FLAGS. ALIGN specifies any known alignment requirements for
10980 the section; 0 if the default should be used.
10982 Differs from the default elf version only in the prefix character
10983 used before the section type. */
10985 static void
10986 arm_elf_asm_named_section (name, flags)
10987 const char *name;
10988 unsigned int flags;
10990 char flagchars[8], *f = flagchars;
10991 const char *type;
10993 if (!(flags & SECTION_DEBUG))
10994 *f++ = 'a';
10995 if (flags & SECTION_WRITE)
10996 *f++ = 'w';
10997 if (flags & SECTION_CODE)
10998 *f++ = 'x';
10999 if (flags & SECTION_SMALL)
11000 *f++ = 's';
11001 if (flags & SECTION_MERGE)
11002 *f++ = 'M';
11003 if (flags & SECTION_STRINGS)
11004 *f++ = 'S';
11005 *f = '\0';
11007 if (flags & SECTION_BSS)
11008 type = "nobits";
11009 else
11010 type = "progbits";
11012 if (flags & SECTION_ENTSIZE)
11013 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
11014 name, flagchars, type, flags & SECTION_ENTSIZE);
11015 else
11016 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
11017 name, flagchars, type);
11019 #endif