* Makefile.in (cse.o): Depend on TARGET_H.
[official-gcc.git] / gcc / config / arm / arm.c
blob5bbe6acc90948e565f6b78322a8884717abde1ae
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const struct attribute_spec arm_attribute_table[];
67 /* Forward function declarations. */
68 static void arm_add_gc_roots PARAMS ((void));
69 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
70 static unsigned bit_count PARAMS ((Ulong));
71 static int arm_address_register_rtx_p PARAMS ((rtx, int));
72 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
73 rtx, int));
74 static int thumb_base_register_rtx_p PARAMS ((rtx,
75 enum machine_mode,
76 int));
77 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
78 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
79 static int eliminate_lr2ip PARAMS ((rtx *));
80 static rtx emit_multi_reg_push PARAMS ((int));
81 static rtx emit_sfm PARAMS ((int, int));
82 #ifndef AOF_ASSEMBLER
83 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
84 #endif
85 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
86 static arm_cc get_arm_condition_code PARAMS ((rtx));
87 static void init_fpa_table PARAMS ((void));
88 static Hint int_log2 PARAMS ((Hint));
89 static rtx is_jump_table PARAMS ((rtx));
90 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
91 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
92 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
93 static Ccstar shift_op PARAMS ((rtx, Hint *));
94 static struct machine_function * arm_init_machine_status PARAMS ((void));
95 static int number_of_first_bit_set PARAMS ((int));
96 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
97 static void thumb_exit PARAMS ((FILE *, int, rtx));
98 static void thumb_pushpop PARAMS ((FILE *, int, int));
99 static Ccstar thumb_condition_code PARAMS ((rtx, int));
100 static rtx is_jump_table PARAMS ((rtx));
101 static Hint get_jump_table_size PARAMS ((rtx));
102 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
103 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
104 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
105 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
106 static void assign_minipool_offsets PARAMS ((Mfix *));
107 static void arm_print_value PARAMS ((FILE *, rtx));
108 static void dump_minipool PARAMS ((rtx));
109 static int arm_barrier_cost PARAMS ((rtx));
110 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
111 static void push_minipool_barrier PARAMS ((rtx, Hint));
112 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
113 static void note_invalid_constants PARAMS ((rtx, Hint));
114 static int current_file_function_operand PARAMS ((rtx));
115 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
116 static Ulong arm_compute_save_reg_mask PARAMS ((void));
117 static Ulong arm_isr_value PARAMS ((tree));
118 static Ulong arm_compute_func_type PARAMS ((void));
119 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
120 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
121 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
122 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
123 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
124 static int arm_comp_type_attributes PARAMS ((tree, tree));
125 static void arm_set_default_type_attributes PARAMS ((tree));
126 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
127 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
128 static int arm_get_strip_length PARAMS ((int));
129 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
130 #ifdef OBJECT_FORMAT_ELF
131 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
132 #endif
133 #ifndef ARM_PE
134 static void arm_encode_section_info PARAMS ((tree, int));
135 #endif
136 #ifdef AOF_ASSEMBLER
137 static void aof_globalize_label PARAMS ((FILE *, const char *));
138 #endif
139 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
140 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
141 HOST_WIDE_INT,
142 HOST_WIDE_INT, tree));
143 static int arm_rtx_costs_1 PARAMS ((rtx, enum rtx_code,
144 enum rtx_code));
145 static bool arm_rtx_costs PARAMS ((rtx, int, int, int*));
147 #undef Hint
148 #undef Mmode
149 #undef Ulong
150 #undef Ccstar
152 /* Initialize the GCC target structure. */
153 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
154 #undef TARGET_MERGE_DECL_ATTRIBUTES
155 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
156 #endif
158 #undef TARGET_ATTRIBUTE_TABLE
159 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
161 #ifdef AOF_ASSEMBLER
162 #undef TARGET_ASM_BYTE_OP
163 #define TARGET_ASM_BYTE_OP "\tDCB\t"
164 #undef TARGET_ASM_ALIGNED_HI_OP
165 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
166 #undef TARGET_ASM_ALIGNED_SI_OP
167 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
168 #undef TARGET_ASM_GLOBALIZE_LABEL
169 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
170 #else
171 #undef TARGET_ASM_ALIGNED_SI_OP
172 #define TARGET_ASM_ALIGNED_SI_OP NULL
173 #undef TARGET_ASM_INTEGER
174 #define TARGET_ASM_INTEGER arm_assemble_integer
175 #endif
177 #undef TARGET_ASM_FUNCTION_PROLOGUE
178 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
180 #undef TARGET_ASM_FUNCTION_EPILOGUE
181 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
183 #undef TARGET_COMP_TYPE_ATTRIBUTES
184 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
186 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
187 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
189 #undef TARGET_INIT_BUILTINS
190 #define TARGET_INIT_BUILTINS arm_init_builtins
192 #undef TARGET_EXPAND_BUILTIN
193 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
195 #undef TARGET_SCHED_ADJUST_COST
196 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
198 #undef TARGET_ENCODE_SECTION_INFO
199 #ifdef ARM_PE
200 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
201 #else
202 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
203 #endif
205 #undef TARGET_STRIP_NAME_ENCODING
206 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
208 #undef TARGET_ASM_INTERNAL_LABEL
209 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
211 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
212 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
214 #undef TARGET_ASM_OUTPUT_MI_THUNK
215 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
216 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
217 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
219 #undef TARGET_RTX_COSTS
220 #define TARGET_RTX_COSTS arm_rtx_costs
222 struct gcc_target targetm = TARGET_INITIALIZER;
224 /* Obstack for minipool constant handling. */
225 static struct obstack minipool_obstack;
226 static char * minipool_startobj;
228 /* The maximum number of insns skipped which
229 will be conditionalised if possible. */
230 static int max_insns_skipped = 5;
232 extern FILE * asm_out_file;
234 /* True if we are currently building a constant table. */
235 int making_const_table;
237 /* Define the information needed to generate branch insns. This is
238 stored from the compare operation. */
239 rtx arm_compare_op0, arm_compare_op1;
241 /* What type of floating point are we tuning for? */
242 enum floating_point_type arm_fpu;
244 /* What type of floating point instructions are available? */
245 enum floating_point_type arm_fpu_arch;
247 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
248 enum prog_mode_type arm_prgmode;
250 /* Set by the -mfp=... option. */
251 const char * target_fp_name = NULL;
253 /* Used to parse -mstructure_size_boundary command line option. */
254 const char * structure_size_string = NULL;
255 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
257 /* Bit values used to identify processor capabilities. */
258 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
259 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
260 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
261 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
262 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
263 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
264 #define FL_THUMB (1 << 6) /* Thumb aware */
265 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
266 #define FL_STRONG (1 << 8) /* StrongARM */
267 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
268 #define FL_XSCALE (1 << 10) /* XScale */
270 /* The bits in this mask specify which
271 instructions we are allowed to generate. */
272 static unsigned long insn_flags = 0;
274 /* The bits in this mask specify which instruction scheduling options should
275 be used. Note - there is an overlap with the FL_FAST_MULT. For some
276 hardware we want to be able to generate the multiply instructions, but to
277 tune as if they were not present in the architecture. */
278 static unsigned long tune_flags = 0;
280 /* The following are used in the arm.md file as equivalents to bits
281 in the above two flag variables. */
283 /* Nonzero if this is an "M" variant of the processor. */
284 int arm_fast_multiply = 0;
286 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
287 int arm_arch4 = 0;
289 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
290 int arm_arch5 = 0;
292 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
293 int arm_arch5e = 0;
295 /* Nonzero if this chip can benefit from load scheduling. */
296 int arm_ld_sched = 0;
298 /* Nonzero if this chip is a StrongARM. */
299 int arm_is_strong = 0;
301 /* Nonzero if this chip is an XScale. */
302 int arm_is_xscale = 0;
304 /* Nonzero if this chip is an ARM6 or an ARM7. */
305 int arm_is_6_or_7 = 0;
307 /* Nonzero if generating Thumb instructions. */
308 int thumb_code = 0;
310 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
311 must report the mode of the memory reference from PRINT_OPERAND to
312 PRINT_OPERAND_ADDRESS. */
313 enum machine_mode output_memory_reference_mode;
315 /* The register number to be used for the PIC offset register. */
316 const char * arm_pic_register_string = NULL;
317 int arm_pic_register = INVALID_REGNUM;
319 /* Set to 1 when a return insn is output, this means that the epilogue
320 is not needed. */
321 int return_used_this_function;
323 /* Set to 1 after arm_reorg has started. Reset to start at the start of
324 the next function. */
325 static int after_arm_reorg = 0;
327 /* The maximum number of insns to be used when loading a constant. */
328 static int arm_constant_limit = 3;
330 /* For an explanation of these variables, see final_prescan_insn below. */
331 int arm_ccfsm_state;
332 enum arm_cond_code arm_current_cc;
333 rtx arm_target_insn;
334 int arm_target_label;
336 /* The condition codes of the ARM, and the inverse function. */
337 static const char * const arm_condition_codes[] =
339 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
340 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
343 #define streq(string1, string2) (strcmp (string1, string2) == 0)
345 /* Initialization code. */
347 struct processors
349 const char *const name;
350 const unsigned long flags;
353 /* Not all of these give usefully different compilation alternatives,
354 but there is no simple way of generalizing them. */
355 static const struct processors all_cores[] =
357 /* ARM Cores */
359 {"arm2", FL_CO_PROC | FL_MODE26 },
360 {"arm250", FL_CO_PROC | FL_MODE26 },
361 {"arm3", FL_CO_PROC | FL_MODE26 },
362 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
363 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
364 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
365 {"arm610", FL_MODE26 | FL_MODE32 },
366 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
367 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
368 /* arm7m doesn't exist on its own, but only with D, (and I), but
369 those don't alter the code, so arm7m is sometimes used. */
370 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
371 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
372 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
373 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
374 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
375 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
376 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
377 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
378 {"arm710", FL_MODE26 | FL_MODE32 },
379 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
380 {"arm720", FL_MODE26 | FL_MODE32 },
381 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
382 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
383 {"arm710c", FL_MODE26 | FL_MODE32 },
384 {"arm7100", FL_MODE26 | FL_MODE32 },
385 {"arm7500", FL_MODE26 | FL_MODE32 },
386 /* Doesn't have an external co-proc, but does have embedded fpu. */
387 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
388 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
389 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
390 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
391 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
392 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
393 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
394 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
395 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
396 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
397 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
398 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
399 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
400 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
401 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
402 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
403 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
405 {NULL, 0}
408 static const struct processors all_architectures[] =
410 /* ARM Architectures */
412 { "armv2", FL_CO_PROC | FL_MODE26 },
413 { "armv2a", FL_CO_PROC | FL_MODE26 },
414 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
415 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
416 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
417 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
418 implementations that support it, so we will leave it out for now. */
419 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
420 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
421 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
422 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
423 { NULL, 0 }
426 /* This is a magic stucture. The 'string' field is magically filled in
427 with a pointer to the value specified by the user on the command line
428 assuming that the user has specified such a value. */
430 struct arm_cpu_select arm_select[] =
432 /* string name processors */
433 { NULL, "-mcpu=", all_cores },
434 { NULL, "-march=", all_architectures },
435 { NULL, "-mtune=", all_cores }
438 /* Return the number of bits set in VALUE. */
439 static unsigned
440 bit_count (value)
441 unsigned long value;
443 unsigned long count = 0;
445 while (value)
447 count++;
448 value &= value - 1; /* Clear the least-significant set bit. */
451 return count;
454 /* Fix up any incompatible options that the user has specified.
455 This has now turned into a maze. */
456 void
457 arm_override_options ()
459 unsigned i;
461 /* Set up the flags based on the cpu/architecture selected by the user. */
462 for (i = ARRAY_SIZE (arm_select); i--;)
464 struct arm_cpu_select * ptr = arm_select + i;
466 if (ptr->string != NULL && ptr->string[0] != '\0')
468 const struct processors * sel;
470 for (sel = ptr->processors; sel->name != NULL; sel++)
471 if (streq (ptr->string, sel->name))
473 if (i == 2)
474 tune_flags = sel->flags;
475 else
477 /* If we have been given an architecture and a processor
478 make sure that they are compatible. We only generate
479 a warning though, and we prefer the CPU over the
480 architecture. */
481 if (insn_flags != 0 && (insn_flags ^ sel->flags))
482 warning ("switch -mcpu=%s conflicts with -march= switch",
483 ptr->string);
485 insn_flags = sel->flags;
488 break;
491 if (sel->name == NULL)
492 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
496 /* If the user did not specify a processor, choose one for them. */
497 if (insn_flags == 0)
499 const struct processors * sel;
500 unsigned int sought;
501 static const struct cpu_default
503 const int cpu;
504 const char *const name;
506 cpu_defaults[] =
508 { TARGET_CPU_arm2, "arm2" },
509 { TARGET_CPU_arm6, "arm6" },
510 { TARGET_CPU_arm610, "arm610" },
511 { TARGET_CPU_arm710, "arm710" },
512 { TARGET_CPU_arm7m, "arm7m" },
513 { TARGET_CPU_arm7500fe, "arm7500fe" },
514 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
515 { TARGET_CPU_arm8, "arm8" },
516 { TARGET_CPU_arm810, "arm810" },
517 { TARGET_CPU_arm9, "arm9" },
518 { TARGET_CPU_strongarm, "strongarm" },
519 { TARGET_CPU_xscale, "xscale" },
520 { TARGET_CPU_generic, "arm" },
521 { 0, 0 }
523 const struct cpu_default * def;
525 /* Find the default. */
526 for (def = cpu_defaults; def->name; def++)
527 if (def->cpu == TARGET_CPU_DEFAULT)
528 break;
530 /* Make sure we found the default CPU. */
531 if (def->name == NULL)
532 abort ();
534 /* Find the default CPU's flags. */
535 for (sel = all_cores; sel->name != NULL; sel++)
536 if (streq (def->name, sel->name))
537 break;
539 if (sel->name == NULL)
540 abort ();
542 insn_flags = sel->flags;
544 /* Now check to see if the user has specified some command line
545 switch that require certain abilities from the cpu. */
546 sought = 0;
548 if (TARGET_INTERWORK || TARGET_THUMB)
550 sought |= (FL_THUMB | FL_MODE32);
552 /* Force apcs-32 to be used for interworking. */
553 target_flags |= ARM_FLAG_APCS_32;
555 /* There are no ARM processors that support both APCS-26 and
556 interworking. Therefore we force FL_MODE26 to be removed
557 from insn_flags here (if it was set), so that the search
558 below will always be able to find a compatible processor. */
559 insn_flags &= ~FL_MODE26;
561 else if (!TARGET_APCS_32)
562 sought |= FL_MODE26;
564 if (sought != 0 && ((sought & insn_flags) != sought))
566 /* Try to locate a CPU type that supports all of the abilities
567 of the default CPU, plus the extra abilities requested by
568 the user. */
569 for (sel = all_cores; sel->name != NULL; sel++)
570 if ((sel->flags & sought) == (sought | insn_flags))
571 break;
573 if (sel->name == NULL)
575 unsigned current_bit_count = 0;
576 const struct processors * best_fit = NULL;
578 /* Ideally we would like to issue an error message here
579 saying that it was not possible to find a CPU compatible
580 with the default CPU, but which also supports the command
581 line options specified by the programmer, and so they
582 ought to use the -mcpu=<name> command line option to
583 override the default CPU type.
585 Unfortunately this does not work with multilibing. We
586 need to be able to support multilibs for -mapcs-26 and for
587 -mthumb-interwork and there is no CPU that can support both
588 options. Instead if we cannot find a cpu that has both the
589 characteristics of the default cpu and the given command line
590 options we scan the array again looking for a best match. */
591 for (sel = all_cores; sel->name != NULL; sel++)
592 if ((sel->flags & sought) == sought)
594 unsigned count;
596 count = bit_count (sel->flags & insn_flags);
598 if (count >= current_bit_count)
600 best_fit = sel;
601 current_bit_count = count;
605 if (best_fit == NULL)
606 abort ();
607 else
608 sel = best_fit;
611 insn_flags = sel->flags;
615 /* If tuning has not been specified, tune for whichever processor or
616 architecture has been selected. */
617 if (tune_flags == 0)
618 tune_flags = insn_flags;
620 /* Make sure that the processor choice does not conflict with any of the
621 other command line choices. */
622 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
624 /* If APCS-32 was not the default then it must have been set by the
625 user, so issue a warning message. If the user has specified
626 "-mapcs-32 -mcpu=arm2" then we loose here. */
627 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
628 warning ("target CPU does not support APCS-32" );
629 target_flags &= ~ARM_FLAG_APCS_32;
631 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
633 warning ("target CPU does not support APCS-26" );
634 target_flags |= ARM_FLAG_APCS_32;
637 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
639 warning ("target CPU does not support interworking" );
640 target_flags &= ~ARM_FLAG_INTERWORK;
643 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
645 warning ("target CPU does not support THUMB instructions");
646 target_flags &= ~ARM_FLAG_THUMB;
649 if (TARGET_APCS_FRAME && TARGET_THUMB)
651 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
652 target_flags &= ~ARM_FLAG_APCS_FRAME;
655 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
656 from here where no function is being compiled currently. */
657 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
658 && TARGET_ARM)
659 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
661 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
662 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
664 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
665 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
667 /* If interworking is enabled then APCS-32 must be selected as well. */
668 if (TARGET_INTERWORK)
670 if (!TARGET_APCS_32)
671 warning ("interworking forces APCS-32 to be used" );
672 target_flags |= ARM_FLAG_APCS_32;
675 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
677 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
678 target_flags |= ARM_FLAG_APCS_FRAME;
681 if (TARGET_POKE_FUNCTION_NAME)
682 target_flags |= ARM_FLAG_APCS_FRAME;
684 if (TARGET_APCS_REENT && flag_pic)
685 error ("-fpic and -mapcs-reent are incompatible");
687 if (TARGET_APCS_REENT)
688 warning ("APCS reentrant code not supported. Ignored");
690 /* If this target is normally configured to use APCS frames, warn if they
691 are turned off and debugging is turned on. */
692 if (TARGET_ARM
693 && write_symbols != NO_DEBUG
694 && !TARGET_APCS_FRAME
695 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
696 warning ("-g with -mno-apcs-frame may not give sensible debugging");
698 /* If stack checking is disabled, we can use r10 as the PIC register,
699 which keeps r9 available. */
700 if (flag_pic)
701 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
703 if (TARGET_APCS_FLOAT)
704 warning ("passing floating point arguments in fp regs not yet supported");
706 /* Initialize boolean versions of the flags, for use in the arm.md file. */
707 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
708 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
709 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
710 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
711 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
713 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
714 arm_is_strong = (tune_flags & FL_STRONG) != 0;
715 thumb_code = (TARGET_ARM == 0);
716 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
717 && !(tune_flags & FL_ARCH4))) != 0;
719 /* Default value for floating point code... if no co-processor
720 bus, then schedule for emulated floating point. Otherwise,
721 assume the user has an FPA.
722 Note: this does not prevent use of floating point instructions,
723 -msoft-float does that. */
724 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
726 if (target_fp_name)
728 if (streq (target_fp_name, "2"))
729 arm_fpu_arch = FP_SOFT2;
730 else if (streq (target_fp_name, "3"))
731 arm_fpu_arch = FP_SOFT3;
732 else
733 error ("invalid floating point emulation option: -mfpe-%s",
734 target_fp_name);
736 else
737 arm_fpu_arch = FP_DEFAULT;
739 if (TARGET_FPE && arm_fpu != FP_HARD)
740 arm_fpu = FP_SOFT2;
742 /* For arm2/3 there is no need to do any scheduling if there is only
743 a floating point emulator, or we are doing software floating-point. */
744 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
745 && (tune_flags & FL_MODE32) == 0)
746 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
748 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
750 if (structure_size_string != NULL)
752 int size = strtol (structure_size_string, NULL, 0);
754 if (size == 8 || size == 32)
755 arm_structure_size_boundary = size;
756 else
757 warning ("structure size boundary can only be set to 8 or 32");
760 if (arm_pic_register_string != NULL)
762 int pic_register = decode_reg_name (arm_pic_register_string);
764 if (!flag_pic)
765 warning ("-mpic-register= is useless without -fpic");
767 /* Prevent the user from choosing an obviously stupid PIC register. */
768 else if (pic_register < 0 || call_used_regs[pic_register]
769 || pic_register == HARD_FRAME_POINTER_REGNUM
770 || pic_register == STACK_POINTER_REGNUM
771 || pic_register >= PC_REGNUM)
772 error ("unable to use '%s' for PIC register", arm_pic_register_string);
773 else
774 arm_pic_register = pic_register;
777 if (TARGET_THUMB && flag_schedule_insns)
779 /* Don't warn since it's on by default in -O2. */
780 flag_schedule_insns = 0;
783 /* If optimizing for space, don't synthesize constants.
784 For processors with load scheduling, it never costs more than 2 cycles
785 to load a constant, and the load scheduler may well reduce that to 1. */
786 if (optimize_size || (tune_flags & FL_LDSCHED))
787 arm_constant_limit = 1;
789 if (arm_is_xscale)
790 arm_constant_limit = 2;
792 /* If optimizing for size, bump the number of instructions that we
793 are prepared to conditionally execute (even on a StrongARM).
794 Otherwise for the StrongARM, which has early execution of branches,
795 a sequence that is worth skipping is shorter. */
796 if (optimize_size)
797 max_insns_skipped = 6;
798 else if (arm_is_strong)
799 max_insns_skipped = 3;
801 /* Register global variables with the garbage collector. */
802 arm_add_gc_roots ();
805 static void
806 arm_add_gc_roots ()
808 gcc_obstack_init(&minipool_obstack);
809 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
812 /* A table of known ARM exception types.
813 For use with the interrupt function attribute. */
815 typedef struct
817 const char *const arg;
818 const unsigned long return_value;
820 isr_attribute_arg;
822 static const isr_attribute_arg isr_attribute_args [] =
824 { "IRQ", ARM_FT_ISR },
825 { "irq", ARM_FT_ISR },
826 { "FIQ", ARM_FT_FIQ },
827 { "fiq", ARM_FT_FIQ },
828 { "ABORT", ARM_FT_ISR },
829 { "abort", ARM_FT_ISR },
830 { "ABORT", ARM_FT_ISR },
831 { "abort", ARM_FT_ISR },
832 { "UNDEF", ARM_FT_EXCEPTION },
833 { "undef", ARM_FT_EXCEPTION },
834 { "SWI", ARM_FT_EXCEPTION },
835 { "swi", ARM_FT_EXCEPTION },
836 { NULL, ARM_FT_NORMAL }
839 /* Returns the (interrupt) function type of the current
840 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
842 static unsigned long
843 arm_isr_value (argument)
844 tree argument;
846 const isr_attribute_arg * ptr;
847 const char * arg;
849 /* No argument - default to IRQ. */
850 if (argument == NULL_TREE)
851 return ARM_FT_ISR;
853 /* Get the value of the argument. */
854 if (TREE_VALUE (argument) == NULL_TREE
855 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
856 return ARM_FT_UNKNOWN;
858 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
860 /* Check it against the list of known arguments. */
861 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
862 if (streq (arg, ptr->arg))
863 return ptr->return_value;
865 /* An unrecognized interrupt type. */
866 return ARM_FT_UNKNOWN;
869 /* Computes the type of the current function. */
871 static unsigned long
872 arm_compute_func_type ()
874 unsigned long type = ARM_FT_UNKNOWN;
875 tree a;
876 tree attr;
878 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
879 abort ();
881 /* Decide if the current function is volatile. Such functions
882 never return, and many memory cycles can be saved by not storing
883 register values that will never be needed again. This optimization
884 was added to speed up context switching in a kernel application. */
885 if (optimize > 0
886 && current_function_nothrow
887 && TREE_THIS_VOLATILE (current_function_decl))
888 type |= ARM_FT_VOLATILE;
890 if (current_function_needs_context)
891 type |= ARM_FT_NESTED;
893 attr = DECL_ATTRIBUTES (current_function_decl);
895 a = lookup_attribute ("naked", attr);
896 if (a != NULL_TREE)
897 type |= ARM_FT_NAKED;
899 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
900 type |= ARM_FT_EXCEPTION_HANDLER;
901 else
903 a = lookup_attribute ("isr", attr);
904 if (a == NULL_TREE)
905 a = lookup_attribute ("interrupt", attr);
907 if (a == NULL_TREE)
908 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
909 else
910 type |= arm_isr_value (TREE_VALUE (a));
913 return type;
916 /* Returns the type of the current function. */
918 unsigned long
919 arm_current_func_type ()
921 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
922 cfun->machine->func_type = arm_compute_func_type ();
924 return cfun->machine->func_type;
927 /* Return 1 if it is possible to return using a single instruction. */
930 use_return_insn (iscond)
931 int iscond;
933 int regno;
934 unsigned int func_type;
935 unsigned long saved_int_regs;
937 /* Never use a return instruction before reload has run. */
938 if (!reload_completed)
939 return 0;
941 func_type = arm_current_func_type ();
943 /* Naked functions and volatile functions need special
944 consideration. */
945 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
946 return 0;
948 /* So do interrupt functions that use the frame pointer. */
949 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
950 return 0;
952 /* As do variadic functions. */
953 if (current_function_pretend_args_size
954 || cfun->machine->uses_anonymous_args
955 /* Of if the function calls __builtin_eh_return () */
956 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
957 /* Or if there is no frame pointer and there is a stack adjustment. */
958 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
959 && !frame_pointer_needed))
960 return 0;
962 saved_int_regs = arm_compute_save_reg_mask ();
964 /* Can't be done if interworking with Thumb, and any registers have been
965 stacked. */
966 if (TARGET_INTERWORK && saved_int_regs != 0)
967 return 0;
969 /* On StrongARM, conditional returns are expensive if they aren't
970 taken and multiple registers have been stacked. */
971 if (iscond && arm_is_strong)
973 /* Conditional return when just the LR is stored is a simple
974 conditional-load instruction, that's not expensive. */
975 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
976 return 0;
978 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
979 return 0;
982 /* If there are saved registers but the LR isn't saved, then we need
983 two instructions for the return. */
984 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
985 return 0;
987 /* Can't be done if any of the FPU regs are pushed,
988 since this also requires an insn. */
989 if (TARGET_HARD_FLOAT)
990 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
991 if (regs_ever_live[regno] && !call_used_regs[regno])
992 return 0;
994 return 1;
997 /* Return TRUE if int I is a valid immediate ARM constant. */
1000 const_ok_for_arm (i)
1001 HOST_WIDE_INT i;
1003 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1005 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1006 be all zero, or all one. */
1007 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1008 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1009 != ((~(unsigned HOST_WIDE_INT) 0)
1010 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1011 return FALSE;
1013 /* Fast return for 0 and powers of 2 */
1014 if ((i & (i - 1)) == 0)
1015 return TRUE;
1019 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1020 return TRUE;
1021 mask =
1022 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1023 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1025 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1027 return FALSE;
1030 /* Return true if I is a valid constant for the operation CODE. */
1031 static int
1032 const_ok_for_op (i, code)
1033 HOST_WIDE_INT i;
1034 enum rtx_code code;
1036 if (const_ok_for_arm (i))
1037 return 1;
1039 switch (code)
1041 case PLUS:
1042 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1044 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1045 case XOR:
1046 case IOR:
1047 return 0;
1049 case AND:
1050 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1052 default:
1053 abort ();
1057 /* Emit a sequence of insns to handle a large constant.
1058 CODE is the code of the operation required, it can be any of SET, PLUS,
1059 IOR, AND, XOR, MINUS;
1060 MODE is the mode in which the operation is being performed;
1061 VAL is the integer to operate on;
1062 SOURCE is the other operand (a register, or a null-pointer for SET);
1063 SUBTARGETS means it is safe to create scratch registers if that will
1064 either produce a simpler sequence, or we will want to cse the values.
1065 Return value is the number of insns emitted. */
1068 arm_split_constant (code, mode, val, target, source, subtargets)
1069 enum rtx_code code;
1070 enum machine_mode mode;
1071 HOST_WIDE_INT val;
1072 rtx target;
1073 rtx source;
1074 int subtargets;
1076 if (subtargets || code == SET
1077 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1078 && REGNO (target) != REGNO (source)))
1080 /* After arm_reorg has been called, we can't fix up expensive
1081 constants by pushing them into memory so we must synthesize
1082 them in-line, regardless of the cost. This is only likely to
1083 be more costly on chips that have load delay slots and we are
1084 compiling without running the scheduler (so no splitting
1085 occurred before the final instruction emission).
1087 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1089 if (!after_arm_reorg
1090 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1091 > arm_constant_limit + (code != SET)))
1093 if (code == SET)
1095 /* Currently SET is the only monadic value for CODE, all
1096 the rest are diadic. */
1097 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1098 return 1;
1100 else
1102 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1104 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1105 /* For MINUS, the value is subtracted from, since we never
1106 have subtraction of a constant. */
1107 if (code == MINUS)
1108 emit_insn (gen_rtx_SET (VOIDmode, target,
1109 gen_rtx_MINUS (mode, temp, source)));
1110 else
1111 emit_insn (gen_rtx_SET (VOIDmode, target,
1112 gen_rtx (code, mode, source, temp)));
1113 return 2;
1118 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1121 static int
1122 count_insns_for_constant (remainder, i)
1123 HOST_WIDE_INT remainder;
1124 int i;
1126 HOST_WIDE_INT temp1;
1127 int num_insns = 0;
1130 int end;
1132 if (i <= 0)
1133 i += 32;
1134 if (remainder & (3 << (i - 2)))
1136 end = i - 8;
1137 if (end < 0)
1138 end += 32;
1139 temp1 = remainder & ((0x0ff << end)
1140 | ((i < end) ? (0xff >> (32 - end)) : 0));
1141 remainder &= ~temp1;
1142 num_insns++;
1143 i -= 6;
1145 i -= 2;
1146 } while (remainder);
1147 return num_insns;
1150 /* As above, but extra parameter GENERATE which, if clear, suppresses
1151 RTL generation. */
1153 static int
1154 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1155 enum rtx_code code;
1156 enum machine_mode mode;
1157 HOST_WIDE_INT val;
1158 rtx target;
1159 rtx source;
1160 int subtargets;
1161 int generate;
1163 int can_invert = 0;
1164 int can_negate = 0;
1165 int can_negate_initial = 0;
1166 int can_shift = 0;
1167 int i;
1168 int num_bits_set = 0;
1169 int set_sign_bit_copies = 0;
1170 int clear_sign_bit_copies = 0;
1171 int clear_zero_bit_copies = 0;
1172 int set_zero_bit_copies = 0;
1173 int insns = 0;
1174 unsigned HOST_WIDE_INT temp1, temp2;
1175 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1177 /* Find out which operations are safe for a given CODE. Also do a quick
1178 check for degenerate cases; these can occur when DImode operations
1179 are split. */
1180 switch (code)
1182 case SET:
1183 can_invert = 1;
1184 can_shift = 1;
1185 can_negate = 1;
1186 break;
1188 case PLUS:
1189 can_negate = 1;
1190 can_negate_initial = 1;
1191 break;
1193 case IOR:
1194 if (remainder == 0xffffffff)
1196 if (generate)
1197 emit_insn (gen_rtx_SET (VOIDmode, target,
1198 GEN_INT (ARM_SIGN_EXTEND (val))));
1199 return 1;
1201 if (remainder == 0)
1203 if (reload_completed && rtx_equal_p (target, source))
1204 return 0;
1205 if (generate)
1206 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1207 return 1;
1209 break;
1211 case AND:
1212 if (remainder == 0)
1214 if (generate)
1215 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1216 return 1;
1218 if (remainder == 0xffffffff)
1220 if (reload_completed && rtx_equal_p (target, source))
1221 return 0;
1222 if (generate)
1223 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1224 return 1;
1226 can_invert = 1;
1227 break;
1229 case XOR:
1230 if (remainder == 0)
1232 if (reload_completed && rtx_equal_p (target, source))
1233 return 0;
1234 if (generate)
1235 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1236 return 1;
1238 if (remainder == 0xffffffff)
1240 if (generate)
1241 emit_insn (gen_rtx_SET (VOIDmode, target,
1242 gen_rtx_NOT (mode, source)));
1243 return 1;
1246 /* We don't know how to handle this yet below. */
1247 abort ();
1249 case MINUS:
1250 /* We treat MINUS as (val - source), since (source - val) is always
1251 passed as (source + (-val)). */
1252 if (remainder == 0)
1254 if (generate)
1255 emit_insn (gen_rtx_SET (VOIDmode, target,
1256 gen_rtx_NEG (mode, source)));
1257 return 1;
1259 if (const_ok_for_arm (val))
1261 if (generate)
1262 emit_insn (gen_rtx_SET (VOIDmode, target,
1263 gen_rtx_MINUS (mode, GEN_INT (val),
1264 source)));
1265 return 1;
1267 can_negate = 1;
1269 break;
1271 default:
1272 abort ();
1275 /* If we can do it in one insn get out quickly. */
1276 if (const_ok_for_arm (val)
1277 || (can_negate_initial && const_ok_for_arm (-val))
1278 || (can_invert && const_ok_for_arm (~val)))
1280 if (generate)
1281 emit_insn (gen_rtx_SET (VOIDmode, target,
1282 (source ? gen_rtx (code, mode, source,
1283 GEN_INT (val))
1284 : GEN_INT (val))));
1285 return 1;
1288 /* Calculate a few attributes that may be useful for specific
1289 optimizations. */
1290 for (i = 31; i >= 0; i--)
1292 if ((remainder & (1 << i)) == 0)
1293 clear_sign_bit_copies++;
1294 else
1295 break;
1298 for (i = 31; i >= 0; i--)
1300 if ((remainder & (1 << i)) != 0)
1301 set_sign_bit_copies++;
1302 else
1303 break;
1306 for (i = 0; i <= 31; i++)
1308 if ((remainder & (1 << i)) == 0)
1309 clear_zero_bit_copies++;
1310 else
1311 break;
1314 for (i = 0; i <= 31; i++)
1316 if ((remainder & (1 << i)) != 0)
1317 set_zero_bit_copies++;
1318 else
1319 break;
1322 switch (code)
1324 case SET:
1325 /* See if we can do this by sign_extending a constant that is known
1326 to be negative. This is a good, way of doing it, since the shift
1327 may well merge into a subsequent insn. */
1328 if (set_sign_bit_copies > 1)
1330 if (const_ok_for_arm
1331 (temp1 = ARM_SIGN_EXTEND (remainder
1332 << (set_sign_bit_copies - 1))))
1334 if (generate)
1336 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1337 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1338 GEN_INT (temp1)));
1339 emit_insn (gen_ashrsi3 (target, new_src,
1340 GEN_INT (set_sign_bit_copies - 1)));
1342 return 2;
1344 /* For an inverted constant, we will need to set the low bits,
1345 these will be shifted out of harm's way. */
1346 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1347 if (const_ok_for_arm (~temp1))
1349 if (generate)
1351 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1352 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1353 GEN_INT (temp1)));
1354 emit_insn (gen_ashrsi3 (target, new_src,
1355 GEN_INT (set_sign_bit_copies - 1)));
1357 return 2;
1361 /* See if we can generate this by setting the bottom (or the top)
1362 16 bits, and then shifting these into the other half of the
1363 word. We only look for the simplest cases, to do more would cost
1364 too much. Be careful, however, not to generate this when the
1365 alternative would take fewer insns. */
1366 if (val & 0xffff0000)
1368 temp1 = remainder & 0xffff0000;
1369 temp2 = remainder & 0x0000ffff;
1371 /* Overlaps outside this range are best done using other methods. */
1372 for (i = 9; i < 24; i++)
1374 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1375 && !const_ok_for_arm (temp2))
1377 rtx new_src = (subtargets
1378 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1379 : target);
1380 insns = arm_gen_constant (code, mode, temp2, new_src,
1381 source, subtargets, generate);
1382 source = new_src;
1383 if (generate)
1384 emit_insn (gen_rtx_SET
1385 (VOIDmode, target,
1386 gen_rtx_IOR (mode,
1387 gen_rtx_ASHIFT (mode, source,
1388 GEN_INT (i)),
1389 source)));
1390 return insns + 1;
1394 /* Don't duplicate cases already considered. */
1395 for (i = 17; i < 24; i++)
1397 if (((temp1 | (temp1 >> i)) == remainder)
1398 && !const_ok_for_arm (temp1))
1400 rtx new_src = (subtargets
1401 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1402 : target);
1403 insns = arm_gen_constant (code, mode, temp1, new_src,
1404 source, subtargets, generate);
1405 source = new_src;
1406 if (generate)
1407 emit_insn
1408 (gen_rtx_SET (VOIDmode, target,
1409 gen_rtx_IOR
1410 (mode,
1411 gen_rtx_LSHIFTRT (mode, source,
1412 GEN_INT (i)),
1413 source)));
1414 return insns + 1;
1418 break;
1420 case IOR:
1421 case XOR:
1422 /* If we have IOR or XOR, and the constant can be loaded in a
1423 single instruction, and we can find a temporary to put it in,
1424 then this can be done in two instructions instead of 3-4. */
1425 if (subtargets
1426 /* TARGET can't be NULL if SUBTARGETS is 0 */
1427 || (reload_completed && !reg_mentioned_p (target, source)))
1429 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1431 if (generate)
1433 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1435 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1436 emit_insn (gen_rtx_SET (VOIDmode, target,
1437 gen_rtx (code, mode, source, sub)));
1439 return 2;
1443 if (code == XOR)
1444 break;
1446 if (set_sign_bit_copies > 8
1447 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1449 if (generate)
1451 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1452 rtx shift = GEN_INT (set_sign_bit_copies);
1454 emit_insn (gen_rtx_SET (VOIDmode, sub,
1455 gen_rtx_NOT (mode,
1456 gen_rtx_ASHIFT (mode,
1457 source,
1458 shift))));
1459 emit_insn (gen_rtx_SET (VOIDmode, target,
1460 gen_rtx_NOT (mode,
1461 gen_rtx_LSHIFTRT (mode, sub,
1462 shift))));
1464 return 2;
1467 if (set_zero_bit_copies > 8
1468 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1470 if (generate)
1472 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1473 rtx shift = GEN_INT (set_zero_bit_copies);
1475 emit_insn (gen_rtx_SET (VOIDmode, sub,
1476 gen_rtx_NOT (mode,
1477 gen_rtx_LSHIFTRT (mode,
1478 source,
1479 shift))));
1480 emit_insn (gen_rtx_SET (VOIDmode, target,
1481 gen_rtx_NOT (mode,
1482 gen_rtx_ASHIFT (mode, sub,
1483 shift))));
1485 return 2;
1488 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1490 if (generate)
1492 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1493 emit_insn (gen_rtx_SET (VOIDmode, sub,
1494 gen_rtx_NOT (mode, source)));
1495 source = sub;
1496 if (subtargets)
1497 sub = gen_reg_rtx (mode);
1498 emit_insn (gen_rtx_SET (VOIDmode, sub,
1499 gen_rtx_AND (mode, source,
1500 GEN_INT (temp1))));
1501 emit_insn (gen_rtx_SET (VOIDmode, target,
1502 gen_rtx_NOT (mode, sub)));
1504 return 3;
1506 break;
1508 case AND:
1509 /* See if two shifts will do 2 or more insn's worth of work. */
1510 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1512 HOST_WIDE_INT shift_mask = ((0xffffffff
1513 << (32 - clear_sign_bit_copies))
1514 & 0xffffffff);
1516 if ((remainder | shift_mask) != 0xffffffff)
1518 if (generate)
1520 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1521 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1522 new_src, source, subtargets, 1);
1523 source = new_src;
1525 else
1527 rtx targ = subtargets ? NULL_RTX : target;
1528 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1529 targ, source, subtargets, 0);
1533 if (generate)
1535 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1536 rtx shift = GEN_INT (clear_sign_bit_copies);
1538 emit_insn (gen_ashlsi3 (new_src, source, shift));
1539 emit_insn (gen_lshrsi3 (target, new_src, shift));
1542 return insns + 2;
1545 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1547 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1549 if ((remainder | shift_mask) != 0xffffffff)
1551 if (generate)
1553 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1555 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1556 new_src, source, subtargets, 1);
1557 source = new_src;
1559 else
1561 rtx targ = subtargets ? NULL_RTX : target;
1563 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1564 targ, source, subtargets, 0);
1568 if (generate)
1570 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1571 rtx shift = GEN_INT (clear_zero_bit_copies);
1573 emit_insn (gen_lshrsi3 (new_src, source, shift));
1574 emit_insn (gen_ashlsi3 (target, new_src, shift));
1577 return insns + 2;
1580 break;
1582 default:
1583 break;
1586 for (i = 0; i < 32; i++)
1587 if (remainder & (1 << i))
1588 num_bits_set++;
1590 if (code == AND || (can_invert && num_bits_set > 16))
1591 remainder = (~remainder) & 0xffffffff;
1592 else if (code == PLUS && num_bits_set > 16)
1593 remainder = (-remainder) & 0xffffffff;
1594 else
1596 can_invert = 0;
1597 can_negate = 0;
1600 /* Now try and find a way of doing the job in either two or three
1601 instructions.
1602 We start by looking for the largest block of zeros that are aligned on
1603 a 2-bit boundary, we then fill up the temps, wrapping around to the
1604 top of the word when we drop off the bottom.
1605 In the worst case this code should produce no more than four insns. */
1607 int best_start = 0;
1608 int best_consecutive_zeros = 0;
1610 for (i = 0; i < 32; i += 2)
1612 int consecutive_zeros = 0;
1614 if (!(remainder & (3 << i)))
1616 while ((i < 32) && !(remainder & (3 << i)))
1618 consecutive_zeros += 2;
1619 i += 2;
1621 if (consecutive_zeros > best_consecutive_zeros)
1623 best_consecutive_zeros = consecutive_zeros;
1624 best_start = i - consecutive_zeros;
1626 i -= 2;
1630 /* So long as it won't require any more insns to do so, it's
1631 desirable to emit a small constant (in bits 0...9) in the last
1632 insn. This way there is more chance that it can be combined with
1633 a later addressing insn to form a pre-indexed load or store
1634 operation. Consider:
1636 *((volatile int *)0xe0000100) = 1;
1637 *((volatile int *)0xe0000110) = 2;
1639 We want this to wind up as:
1641 mov rA, #0xe0000000
1642 mov rB, #1
1643 str rB, [rA, #0x100]
1644 mov rB, #2
1645 str rB, [rA, #0x110]
1647 rather than having to synthesize both large constants from scratch.
1649 Therefore, we calculate how many insns would be required to emit
1650 the constant starting from `best_start', and also starting from
1651 zero (ie with bit 31 first to be output). If `best_start' doesn't
1652 yield a shorter sequence, we may as well use zero. */
1653 if (best_start != 0
1654 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1655 && (count_insns_for_constant (remainder, 0) <=
1656 count_insns_for_constant (remainder, best_start)))
1657 best_start = 0;
1659 /* Now start emitting the insns. */
1660 i = best_start;
1663 int end;
1665 if (i <= 0)
1666 i += 32;
1667 if (remainder & (3 << (i - 2)))
1669 end = i - 8;
1670 if (end < 0)
1671 end += 32;
1672 temp1 = remainder & ((0x0ff << end)
1673 | ((i < end) ? (0xff >> (32 - end)) : 0));
1674 remainder &= ~temp1;
1676 if (generate)
1678 rtx new_src, temp1_rtx;
1680 if (code == SET || code == MINUS)
1682 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1683 if (can_invert && code != MINUS)
1684 temp1 = ~temp1;
1686 else
1688 if (remainder && subtargets)
1689 new_src = gen_reg_rtx (mode);
1690 else
1691 new_src = target;
1692 if (can_invert)
1693 temp1 = ~temp1;
1694 else if (can_negate)
1695 temp1 = -temp1;
1698 temp1 = trunc_int_for_mode (temp1, mode);
1699 temp1_rtx = GEN_INT (temp1);
1701 if (code == SET)
1703 else if (code == MINUS)
1704 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1705 else
1706 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1708 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1709 source = new_src;
1712 if (code == SET)
1714 can_invert = 0;
1715 code = PLUS;
1717 else if (code == MINUS)
1718 code = PLUS;
1720 insns++;
1721 i -= 6;
1723 i -= 2;
1725 while (remainder);
1728 return insns;
1731 /* Canonicalize a comparison so that we are more likely to recognize it.
1732 This can be done for a few constant compares, where we can make the
1733 immediate value easier to load. */
1735 enum rtx_code
1736 arm_canonicalize_comparison (code, op1)
1737 enum rtx_code code;
1738 rtx * op1;
1740 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1742 switch (code)
1744 case EQ:
1745 case NE:
1746 return code;
1748 case GT:
1749 case LE:
1750 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1751 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1753 *op1 = GEN_INT (i + 1);
1754 return code == GT ? GE : LT;
1756 break;
1758 case GE:
1759 case LT:
1760 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1761 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1763 *op1 = GEN_INT (i - 1);
1764 return code == GE ? GT : LE;
1766 break;
1768 case GTU:
1769 case LEU:
1770 if (i != ~((unsigned HOST_WIDE_INT) 0)
1771 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1773 *op1 = GEN_INT (i + 1);
1774 return code == GTU ? GEU : LTU;
1776 break;
1778 case GEU:
1779 case LTU:
1780 if (i != 0
1781 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1783 *op1 = GEN_INT (i - 1);
1784 return code == GEU ? GTU : LEU;
1786 break;
1788 default:
1789 abort ();
1792 return code;
1795 /* Decide whether a type should be returned in memory (true)
1796 or in a register (false). This is called by the macro
1797 RETURN_IN_MEMORY. */
1800 arm_return_in_memory (type)
1801 tree type;
1803 HOST_WIDE_INT size;
1805 if (!AGGREGATE_TYPE_P (type))
1806 /* All simple types are returned in registers. */
1807 return 0;
1809 size = int_size_in_bytes (type);
1811 if (TARGET_ATPCS)
1813 /* ATPCS returns aggregate types in memory only if they are
1814 larger than a word (or are variable size). */
1815 return (size < 0 || size > UNITS_PER_WORD);
1818 /* For the arm-wince targets we choose to be compitable with Microsoft's
1819 ARM and Thumb compilers, which always return aggregates in memory. */
1820 #ifndef ARM_WINCE
1821 /* All structures/unions bigger than one word are returned in memory.
1822 Also catch the case where int_size_in_bytes returns -1. In this case
1823 the aggregate is either huge or of varaible size, and in either case
1824 we will want to return it via memory and not in a register. */
1825 if (size < 0 || size > UNITS_PER_WORD)
1826 return 1;
1828 if (TREE_CODE (type) == RECORD_TYPE)
1830 tree field;
1832 /* For a struct the APCS says that we only return in a register
1833 if the type is 'integer like' and every addressable element
1834 has an offset of zero. For practical purposes this means
1835 that the structure can have at most one non bit-field element
1836 and that this element must be the first one in the structure. */
1838 /* Find the first field, ignoring non FIELD_DECL things which will
1839 have been created by C++. */
1840 for (field = TYPE_FIELDS (type);
1841 field && TREE_CODE (field) != FIELD_DECL;
1842 field = TREE_CHAIN (field))
1843 continue;
1845 if (field == NULL)
1846 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1848 /* Check that the first field is valid for returning in a register. */
1850 /* ... Floats are not allowed */
1851 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1852 return 1;
1854 /* ... Aggregates that are not themselves valid for returning in
1855 a register are not allowed. */
1856 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1857 return 1;
1859 /* Now check the remaining fields, if any. Only bitfields are allowed,
1860 since they are not addressable. */
1861 for (field = TREE_CHAIN (field);
1862 field;
1863 field = TREE_CHAIN (field))
1865 if (TREE_CODE (field) != FIELD_DECL)
1866 continue;
1868 if (!DECL_BIT_FIELD_TYPE (field))
1869 return 1;
1872 return 0;
1875 if (TREE_CODE (type) == UNION_TYPE)
1877 tree field;
1879 /* Unions can be returned in registers if every element is
1880 integral, or can be returned in an integer register. */
1881 for (field = TYPE_FIELDS (type);
1882 field;
1883 field = TREE_CHAIN (field))
1885 if (TREE_CODE (field) != FIELD_DECL)
1886 continue;
1888 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1889 return 1;
1891 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1892 return 1;
1895 return 0;
1897 #endif /* not ARM_WINCE */
1899 /* Return all other types in memory. */
1900 return 1;
1903 /* Indicate whether or not words of a double are in big-endian order. */
1906 arm_float_words_big_endian ()
1909 /* For FPA, float words are always big-endian. For VFP, floats words
1910 follow the memory system mode. */
1912 if (TARGET_HARD_FLOAT)
1914 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1915 return 1;
1918 if (TARGET_VFP)
1919 return (TARGET_BIG_END ? 1 : 0);
1921 return 1;
1924 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1925 for a call to a function whose data type is FNTYPE.
1926 For a library call, FNTYPE is NULL. */
1927 void
1928 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1929 CUMULATIVE_ARGS * pcum;
1930 tree fntype;
1931 rtx libname ATTRIBUTE_UNUSED;
1932 int indirect ATTRIBUTE_UNUSED;
1934 /* On the ARM, the offset starts at 0. */
1935 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1937 pcum->call_cookie = CALL_NORMAL;
1939 if (TARGET_LONG_CALLS)
1940 pcum->call_cookie = CALL_LONG;
1942 /* Check for long call/short call attributes. The attributes
1943 override any command line option. */
1944 if (fntype)
1946 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1947 pcum->call_cookie = CALL_SHORT;
1948 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1949 pcum->call_cookie = CALL_LONG;
1953 /* Determine where to put an argument to a function.
1954 Value is zero to push the argument on the stack,
1955 or a hard register in which to store the argument.
1957 MODE is the argument's machine mode.
1958 TYPE is the data type of the argument (as a tree).
1959 This is null for libcalls where that information may
1960 not be available.
1961 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1962 the preceding args and about the function being called.
1963 NAMED is nonzero if this argument is a named parameter
1964 (otherwise it is an extra parameter matching an ellipsis). */
1967 arm_function_arg (pcum, mode, type, named)
1968 CUMULATIVE_ARGS * pcum;
1969 enum machine_mode mode;
1970 tree type ATTRIBUTE_UNUSED;
1971 int named;
1973 if (mode == VOIDmode)
1974 /* Compute operand 2 of the call insn. */
1975 return GEN_INT (pcum->call_cookie);
1977 if (!named || pcum->nregs >= NUM_ARG_REGS)
1978 return NULL_RTX;
1980 return gen_rtx_REG (mode, pcum->nregs);
1983 /* Variable sized types are passed by reference. This is a GCC
1984 extension to the ARM ABI. */
1987 arm_function_arg_pass_by_reference (cum, mode, type, named)
1988 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
1989 enum machine_mode mode ATTRIBUTE_UNUSED;
1990 tree type;
1991 int named ATTRIBUTE_UNUSED;
1993 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1996 /* Implement va_arg. */
1999 arm_va_arg (valist, type)
2000 tree valist, type;
2002 /* Variable sized types are passed by reference. */
2003 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2005 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2006 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2009 return std_expand_builtin_va_arg (valist, type);
2012 /* Encode the current state of the #pragma [no_]long_calls. */
2013 typedef enum
2015 OFF, /* No #pramgma [no_]long_calls is in effect. */
2016 LONG, /* #pragma long_calls is in effect. */
2017 SHORT /* #pragma no_long_calls is in effect. */
2018 } arm_pragma_enum;
2020 static arm_pragma_enum arm_pragma_long_calls = OFF;
2022 void
2023 arm_pr_long_calls (pfile)
2024 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2026 arm_pragma_long_calls = LONG;
2029 void
2030 arm_pr_no_long_calls (pfile)
2031 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2033 arm_pragma_long_calls = SHORT;
2036 void
2037 arm_pr_long_calls_off (pfile)
2038 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2040 arm_pragma_long_calls = OFF;
2043 /* Table of machine attributes. */
2044 const struct attribute_spec arm_attribute_table[] =
2046 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2047 /* Function calls made to this symbol must be done indirectly, because
2048 it may lie outside of the 26 bit addressing range of a normal function
2049 call. */
2050 { "long_call", 0, 0, false, true, true, NULL },
2051 /* Whereas these functions are always known to reside within the 26 bit
2052 addressing range. */
2053 { "short_call", 0, 0, false, true, true, NULL },
2054 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2055 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2056 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2057 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2058 #ifdef ARM_PE
2059 /* ARM/PE has three new attributes:
2060 interfacearm - ?
2061 dllexport - for exporting a function/variable that will live in a dll
2062 dllimport - for importing a function/variable from a dll
2064 Microsoft allows multiple declspecs in one __declspec, separating
2065 them with spaces. We do NOT support this. Instead, use __declspec
2066 multiple times.
2068 { "dllimport", 0, 0, true, false, false, NULL },
2069 { "dllexport", 0, 0, true, false, false, NULL },
2070 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2071 #endif
2072 { NULL, 0, 0, false, false, false, NULL }
2075 /* Handle an attribute requiring a FUNCTION_DECL;
2076 arguments as in struct attribute_spec.handler. */
2078 static tree
2079 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2080 tree * node;
2081 tree name;
2082 tree args ATTRIBUTE_UNUSED;
2083 int flags ATTRIBUTE_UNUSED;
2084 bool * no_add_attrs;
2086 if (TREE_CODE (*node) != FUNCTION_DECL)
2088 warning ("`%s' attribute only applies to functions",
2089 IDENTIFIER_POINTER (name));
2090 *no_add_attrs = true;
2093 return NULL_TREE;
2096 /* Handle an "interrupt" or "isr" attribute;
2097 arguments as in struct attribute_spec.handler. */
2099 static tree
2100 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2101 tree * node;
2102 tree name;
2103 tree args;
2104 int flags;
2105 bool * no_add_attrs;
2107 if (DECL_P (*node))
2109 if (TREE_CODE (*node) != FUNCTION_DECL)
2111 warning ("`%s' attribute only applies to functions",
2112 IDENTIFIER_POINTER (name));
2113 *no_add_attrs = true;
2115 /* FIXME: the argument if any is checked for type attributes;
2116 should it be checked for decl ones? */
2118 else
2120 if (TREE_CODE (*node) == FUNCTION_TYPE
2121 || TREE_CODE (*node) == METHOD_TYPE)
2123 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2125 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2126 *no_add_attrs = true;
2129 else if (TREE_CODE (*node) == POINTER_TYPE
2130 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2131 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2132 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2134 *node = build_type_copy (*node);
2135 TREE_TYPE (*node) = build_type_attribute_variant
2136 (TREE_TYPE (*node),
2137 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2138 *no_add_attrs = true;
2140 else
2142 /* Possibly pass this attribute on from the type to a decl. */
2143 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2144 | (int) ATTR_FLAG_FUNCTION_NEXT
2145 | (int) ATTR_FLAG_ARRAY_NEXT))
2147 *no_add_attrs = true;
2148 return tree_cons (name, args, NULL_TREE);
2150 else
2152 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2157 return NULL_TREE;
2160 /* Return 0 if the attributes for two types are incompatible, 1 if they
2161 are compatible, and 2 if they are nearly compatible (which causes a
2162 warning to be generated). */
2164 static int
2165 arm_comp_type_attributes (type1, type2)
2166 tree type1;
2167 tree type2;
2169 int l1, l2, s1, s2;
2171 /* Check for mismatch of non-default calling convention. */
2172 if (TREE_CODE (type1) != FUNCTION_TYPE)
2173 return 1;
2175 /* Check for mismatched call attributes. */
2176 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2177 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2178 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2179 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2181 /* Only bother to check if an attribute is defined. */
2182 if (l1 | l2 | s1 | s2)
2184 /* If one type has an attribute, the other must have the same attribute. */
2185 if ((l1 != l2) || (s1 != s2))
2186 return 0;
2188 /* Disallow mixed attributes. */
2189 if ((l1 & s2) || (l2 & s1))
2190 return 0;
2193 /* Check for mismatched ISR attribute. */
2194 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2195 if (! l1)
2196 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2197 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2198 if (! l2)
2199 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2200 if (l1 != l2)
2201 return 0;
2203 return 1;
2206 /* Encode long_call or short_call attribute by prefixing
2207 symbol name in DECL with a special character FLAG. */
2209 void
2210 arm_encode_call_attribute (decl, flag)
2211 tree decl;
2212 int flag;
2214 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2215 int len = strlen (str);
2216 char * newstr;
2218 /* Do not allow weak functions to be treated as short call. */
2219 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2220 return;
2222 newstr = alloca (len + 2);
2223 newstr[0] = flag;
2224 strcpy (newstr + 1, str);
2226 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2227 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2230 /* Assigns default attributes to newly defined type. This is used to
2231 set short_call/long_call attributes for function types of
2232 functions defined inside corresponding #pragma scopes. */
2234 static void
2235 arm_set_default_type_attributes (type)
2236 tree type;
2238 /* Add __attribute__ ((long_call)) to all functions, when
2239 inside #pragma long_calls or __attribute__ ((short_call)),
2240 when inside #pragma no_long_calls. */
2241 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2243 tree type_attr_list, attr_name;
2244 type_attr_list = TYPE_ATTRIBUTES (type);
2246 if (arm_pragma_long_calls == LONG)
2247 attr_name = get_identifier ("long_call");
2248 else if (arm_pragma_long_calls == SHORT)
2249 attr_name = get_identifier ("short_call");
2250 else
2251 return;
2253 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2254 TYPE_ATTRIBUTES (type) = type_attr_list;
2258 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2259 defined within the current compilation unit. If this caanot be
2260 determined, then 0 is returned. */
2262 static int
2263 current_file_function_operand (sym_ref)
2264 rtx sym_ref;
2266 /* This is a bit of a fib. A function will have a short call flag
2267 applied to its name if it has the short call attribute, or it has
2268 already been defined within the current compilation unit. */
2269 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2270 return 1;
2272 /* The current function is always defined within the current compilation
2273 unit. if it s a weak definition however, then this may not be the real
2274 definition of the function, and so we have to say no. */
2275 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2276 && !DECL_WEAK (current_function_decl))
2277 return 1;
2279 /* We cannot make the determination - default to returning 0. */
2280 return 0;
2283 /* Return nonzero if a 32 bit "long_call" should be generated for
2284 this call. We generate a long_call if the function:
2286 a. has an __attribute__((long call))
2287 or b. is within the scope of a #pragma long_calls
2288 or c. the -mlong-calls command line switch has been specified
2290 However we do not generate a long call if the function:
2292 d. has an __attribute__ ((short_call))
2293 or e. is inside the scope of a #pragma no_long_calls
2294 or f. has an __attribute__ ((section))
2295 or g. is defined within the current compilation unit.
2297 This function will be called by C fragments contained in the machine
2298 description file. CALL_REF and CALL_COOKIE correspond to the matched
2299 rtl operands. CALL_SYMBOL is used to distinguish between
2300 two different callers of the function. It is set to 1 in the
2301 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2302 and "call_value" patterns. This is because of the difference in the
2303 SYM_REFs passed by these patterns. */
2306 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2307 rtx sym_ref;
2308 int call_cookie;
2309 int call_symbol;
2311 if (!call_symbol)
2313 if (GET_CODE (sym_ref) != MEM)
2314 return 0;
2316 sym_ref = XEXP (sym_ref, 0);
2319 if (GET_CODE (sym_ref) != SYMBOL_REF)
2320 return 0;
2322 if (call_cookie & CALL_SHORT)
2323 return 0;
2325 if (TARGET_LONG_CALLS && flag_function_sections)
2326 return 1;
2328 if (current_file_function_operand (sym_ref))
2329 return 0;
2331 return (call_cookie & CALL_LONG)
2332 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2333 || TARGET_LONG_CALLS;
2336 /* Return nonzero if it is ok to make a tail-call to DECL. */
2338 static bool
2339 arm_function_ok_for_sibcall (decl, exp)
2340 tree decl;
2341 tree exp ATTRIBUTE_UNUSED;
2343 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2345 /* Never tailcall something for which we have no decl, or if we
2346 are in Thumb mode. */
2347 if (decl == NULL || TARGET_THUMB)
2348 return false;
2350 /* Get the calling method. */
2351 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2352 call_type = CALL_SHORT;
2353 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2354 call_type = CALL_LONG;
2356 /* Cannot tail-call to long calls, since these are out of range of
2357 a branch instruction. However, if not compiling PIC, we know
2358 we can reach the symbol if it is in this compilation unit. */
2359 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2360 return false;
2362 /* If we are interworking and the function is not declared static
2363 then we can't tail-call it unless we know that it exists in this
2364 compilation unit (since it might be a Thumb routine). */
2365 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2366 return false;
2368 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2369 if (IS_INTERRUPT (arm_current_func_type ()))
2370 return false;
2372 /* Everything else is ok. */
2373 return true;
2377 /* Addressing mode support functions. */
2379 /* Return non-zero if X is a legitimate immediate operand when compiling
2380 for PIC. */
2382 legitimate_pic_operand_p (x)
2383 rtx x;
2385 if (CONSTANT_P (x)
2386 && flag_pic
2387 && (GET_CODE (x) == SYMBOL_REF
2388 || (GET_CODE (x) == CONST
2389 && GET_CODE (XEXP (x, 0)) == PLUS
2390 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2391 return 0;
2393 return 1;
2397 legitimize_pic_address (orig, mode, reg)
2398 rtx orig;
2399 enum machine_mode mode;
2400 rtx reg;
2402 if (GET_CODE (orig) == SYMBOL_REF
2403 || GET_CODE (orig) == LABEL_REF)
2405 #ifndef AOF_ASSEMBLER
2406 rtx pic_ref, address;
2407 #endif
2408 rtx insn;
2409 int subregs = 0;
2411 if (reg == 0)
2413 if (no_new_pseudos)
2414 abort ();
2415 else
2416 reg = gen_reg_rtx (Pmode);
2418 subregs = 1;
2421 #ifdef AOF_ASSEMBLER
2422 /* The AOF assembler can generate relocations for these directly, and
2423 understands that the PIC register has to be added into the offset. */
2424 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2425 #else
2426 if (subregs)
2427 address = gen_reg_rtx (Pmode);
2428 else
2429 address = reg;
2431 if (TARGET_ARM)
2432 emit_insn (gen_pic_load_addr_arm (address, orig));
2433 else
2434 emit_insn (gen_pic_load_addr_thumb (address, orig));
2436 if ((GET_CODE (orig) == LABEL_REF
2437 || (GET_CODE (orig) == SYMBOL_REF &&
2438 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2439 && NEED_GOT_RELOC)
2440 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2441 else
2443 pic_ref = gen_rtx_MEM (Pmode,
2444 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2445 address));
2446 RTX_UNCHANGING_P (pic_ref) = 1;
2449 insn = emit_move_insn (reg, pic_ref);
2450 #endif
2451 current_function_uses_pic_offset_table = 1;
2452 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2453 by loop. */
2454 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2455 REG_NOTES (insn));
2456 return reg;
2458 else if (GET_CODE (orig) == CONST)
2460 rtx base, offset;
2462 if (GET_CODE (XEXP (orig, 0)) == PLUS
2463 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2464 return orig;
2466 if (reg == 0)
2468 if (no_new_pseudos)
2469 abort ();
2470 else
2471 reg = gen_reg_rtx (Pmode);
2474 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2476 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2477 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2478 base == reg ? 0 : reg);
2480 else
2481 abort ();
2483 if (GET_CODE (offset) == CONST_INT)
2485 /* The base register doesn't really matter, we only want to
2486 test the index for the appropriate mode. */
2487 if (!arm_legitimate_index_p (mode, offset, 0))
2489 if (!no_new_pseudos)
2490 offset = force_reg (Pmode, offset);
2491 else
2492 abort ();
2495 if (GET_CODE (offset) == CONST_INT)
2496 return plus_constant (base, INTVAL (offset));
2499 if (GET_MODE_SIZE (mode) > 4
2500 && (GET_MODE_CLASS (mode) == MODE_INT
2501 || TARGET_SOFT_FLOAT))
2503 emit_insn (gen_addsi3 (reg, base, offset));
2504 return reg;
2507 return gen_rtx_PLUS (Pmode, base, offset);
2510 return orig;
2513 /* Generate code to load the PIC register. PROLOGUE is true if
2514 called from arm_expand_prologue (in which case we want the
2515 generated insns at the start of the function); false if called
2516 by an exception receiver that needs the PIC register reloaded
2517 (in which case the insns are just dumped at the current location). */
2519 void
2520 arm_finalize_pic (prologue)
2521 int prologue ATTRIBUTE_UNUSED;
2523 #ifndef AOF_ASSEMBLER
2524 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2525 rtx global_offset_table;
2527 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2528 return;
2530 if (!flag_pic)
2531 abort ();
2533 start_sequence ();
2534 l1 = gen_label_rtx ();
2536 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2537 /* On the ARM the PC register contains 'dot + 8' at the time of the
2538 addition, on the Thumb it is 'dot + 4'. */
2539 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2540 if (GOT_PCREL)
2541 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2542 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2543 else
2544 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2546 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2548 if (TARGET_ARM)
2550 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2551 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2553 else
2555 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2556 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2559 seq = get_insns ();
2560 end_sequence ();
2561 if (prologue)
2562 emit_insn_after (seq, get_insns ());
2563 else
2564 emit_insn (seq);
2566 /* Need to emit this whether or not we obey regdecls,
2567 since setjmp/longjmp can cause life info to screw up. */
2568 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2569 #endif /* AOF_ASSEMBLER */
2572 /* Return nonzero if X is valid as an ARM state addressing register. */
2573 static int
2574 arm_address_register_rtx_p (x, strict_p)
2575 rtx x;
2576 int strict_p;
2578 int regno;
2580 if (GET_CODE (x) != REG)
2581 return 0;
2583 regno = REGNO (x);
2585 if (strict_p)
2586 return ARM_REGNO_OK_FOR_BASE_P (regno);
2588 return (regno <= LAST_ARM_REGNUM
2589 || regno >= FIRST_PSEUDO_REGISTER
2590 || regno == FRAME_POINTER_REGNUM
2591 || regno == ARG_POINTER_REGNUM);
2594 /* Return nonzero if X is a valid ARM state address operand. */
2596 arm_legitimate_address_p (mode, x, strict_p)
2597 enum machine_mode mode;
2598 rtx x;
2599 int strict_p;
2601 if (arm_address_register_rtx_p (x, strict_p))
2602 return 1;
2604 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2605 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2607 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2608 && GET_MODE_SIZE (mode) <= 4
2609 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2610 && GET_CODE (XEXP (x, 1)) == PLUS
2611 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2612 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2614 /* After reload constants split into minipools will have addresses
2615 from a LABEL_REF. */
2616 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2617 && (GET_CODE (x) == LABEL_REF
2618 || (GET_CODE (x) == CONST
2619 && GET_CODE (XEXP (x, 0)) == PLUS
2620 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2621 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2622 return 1;
2624 else if (mode == TImode)
2625 return 0;
2627 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2629 if (GET_CODE (x) == PLUS
2630 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2631 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2633 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2635 if (val == 4 || val == -4 || val == -8)
2636 return 1;
2640 else if (GET_CODE (x) == PLUS)
2642 rtx xop0 = XEXP (x, 0);
2643 rtx xop1 = XEXP (x, 1);
2645 return ((arm_address_register_rtx_p (xop0, strict_p)
2646 && arm_legitimate_index_p (mode, xop1, strict_p))
2647 || (arm_address_register_rtx_p (xop1, strict_p)
2648 && arm_legitimate_index_p (mode, xop0, strict_p)));
2651 #if 0
2652 /* Reload currently can't handle MINUS, so disable this for now */
2653 else if (GET_CODE (x) == MINUS)
2655 rtx xop0 = XEXP (x, 0);
2656 rtx xop1 = XEXP (x, 1);
2658 return (arm_address_register_rtx_p (xop0, strict_p)
2659 && arm_legitimate_index_p (mode, xop1, strict_p));
2661 #endif
2663 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2664 && GET_CODE (x) == SYMBOL_REF
2665 && CONSTANT_POOL_ADDRESS_P (x)
2666 && ! (flag_pic
2667 && symbol_mentioned_p (get_pool_constant (x))))
2668 return 1;
2670 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2671 && (GET_MODE_SIZE (mode) <= 4)
2672 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2673 return 1;
2675 return 0;
2678 /* Return nonzero if INDEX is valid for an address index operand in
2679 ARM state. */
2680 static int
2681 arm_legitimate_index_p (mode, index, strict_p)
2682 enum machine_mode mode;
2683 rtx index;
2684 int strict_p;
2686 HOST_WIDE_INT range;
2687 enum rtx_code code = GET_CODE (index);
2689 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2690 return (code == CONST_INT && INTVAL (index) < 1024
2691 && INTVAL (index) > -1024
2692 && (INTVAL (index) & 3) == 0);
2694 if (arm_address_register_rtx_p (index, strict_p)
2695 && GET_MODE_SIZE (mode) <= 4)
2696 return 1;
2698 /* XXX What about ldrsb? */
2699 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2700 && (!arm_arch4 || (mode) != HImode))
2702 rtx xiop0 = XEXP (index, 0);
2703 rtx xiop1 = XEXP (index, 1);
2705 return ((arm_address_register_rtx_p (xiop0, strict_p)
2706 && power_of_two_operand (xiop1, SImode))
2707 || (arm_address_register_rtx_p (xiop1, strict_p)
2708 && power_of_two_operand (xiop0, SImode)));
2711 if (GET_MODE_SIZE (mode) <= 4
2712 && (code == LSHIFTRT || code == ASHIFTRT
2713 || code == ASHIFT || code == ROTATERT)
2714 && (!arm_arch4 || (mode) != HImode))
2716 rtx op = XEXP (index, 1);
2718 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2719 && GET_CODE (op) == CONST_INT
2720 && INTVAL (op) > 0
2721 && INTVAL (op) <= 31);
2724 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2725 load, but that has a restricted addressing range and we are unable
2726 to tell here whether that is the case. To be safe we restrict all
2727 loads to that range. */
2728 range = ((mode) == HImode || (mode) == QImode)
2729 ? (arm_arch4 ? 256 : 4095) : 4096;
2731 return (code == CONST_INT
2732 && INTVAL (index) < range
2733 && INTVAL (index) > -range);
2736 /* Return nonzero if X is valid as an ARM state addressing register. */
2737 static int
2738 thumb_base_register_rtx_p (x, mode, strict_p)
2739 rtx x;
2740 enum machine_mode mode;
2741 int strict_p;
2743 int regno;
2745 if (GET_CODE (x) != REG)
2746 return 0;
2748 regno = REGNO (x);
2750 if (strict_p)
2751 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2753 return (regno <= LAST_LO_REGNUM
2754 || regno >= FIRST_PSEUDO_REGISTER
2755 || regno == FRAME_POINTER_REGNUM
2756 || (GET_MODE_SIZE (mode) >= 4
2757 && (regno == STACK_POINTER_REGNUM
2758 || x == hard_frame_pointer_rtx
2759 || x == arg_pointer_rtx)));
2762 /* Return nonzero if x is a legitimate index register. This is the case
2763 for any base register that can access a QImode object. */
2764 inline static int
2765 thumb_index_register_rtx_p (x, strict_p)
2766 rtx x;
2767 int strict_p;
2769 return thumb_base_register_rtx_p (x, QImode, strict_p);
2772 /* Return nonzero if x is a legitimate Thumb-state address.
2774 The AP may be eliminated to either the SP or the FP, so we use the
2775 least common denominator, e.g. SImode, and offsets from 0 to 64.
2777 ??? Verify whether the above is the right approach.
2779 ??? Also, the FP may be eliminated to the SP, so perhaps that
2780 needs special handling also.
2782 ??? Look at how the mips16 port solves this problem. It probably uses
2783 better ways to solve some of these problems.
2785 Although it is not incorrect, we don't accept QImode and HImode
2786 addresses based on the frame pointer or arg pointer until the
2787 reload pass starts. This is so that eliminating such addresses
2788 into stack based ones won't produce impossible code. */
2790 thumb_legitimate_address_p (mode, x, strict_p)
2791 enum machine_mode mode;
2792 rtx x;
2793 int strict_p;
2795 /* ??? Not clear if this is right. Experiment. */
2796 if (GET_MODE_SIZE (mode) < 4
2797 && !(reload_in_progress || reload_completed)
2798 && (reg_mentioned_p (frame_pointer_rtx, x)
2799 || reg_mentioned_p (arg_pointer_rtx, x)
2800 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2801 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2802 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2803 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2804 return 0;
2806 /* Accept any base register. SP only in SImode or larger. */
2807 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2808 return 1;
2810 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2811 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2812 && GET_CODE (x) == SYMBOL_REF
2813 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2814 return 1;
2816 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2817 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2818 && (GET_CODE (x) == LABEL_REF
2819 || (GET_CODE (x) == CONST
2820 && GET_CODE (XEXP (x, 0)) == PLUS
2821 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2822 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2823 return 1;
2825 /* Post-inc indexing only supported for SImode and larger. */
2826 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2827 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2828 return 1;
2830 else if (GET_CODE (x) == PLUS)
2832 /* REG+REG address can be any two index registers. */
2833 /* We disallow FRAME+REG addressing since we know that FRAME
2834 will be replaced with STACK, and SP relative addressing only
2835 permits SP+OFFSET. */
2836 if (GET_MODE_SIZE (mode) <= 4
2837 && XEXP (x, 0) != frame_pointer_rtx
2838 && XEXP (x, 1) != frame_pointer_rtx
2839 && XEXP (x, 0) != virtual_stack_vars_rtx
2840 && XEXP (x, 1) != virtual_stack_vars_rtx
2841 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2842 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2843 return 1;
2845 /* REG+const has 5-7 bit offset for non-SP registers. */
2846 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2847 || XEXP (x, 0) == arg_pointer_rtx)
2848 && GET_CODE (XEXP (x, 1)) == CONST_INT
2849 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2850 return 1;
2852 /* REG+const has 10 bit offset for SP, but only SImode and
2853 larger is supported. */
2854 /* ??? Should probably check for DI/DFmode overflow here
2855 just like GO_IF_LEGITIMATE_OFFSET does. */
2856 else if (GET_CODE (XEXP (x, 0)) == REG
2857 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2858 && GET_MODE_SIZE (mode) >= 4
2859 && GET_CODE (XEXP (x, 1)) == CONST_INT
2860 && INTVAL (XEXP (x, 1)) >= 0
2861 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2862 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2863 return 1;
2865 else if (GET_CODE (XEXP (x, 0)) == REG
2866 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2867 && GET_MODE_SIZE (mode) >= 4
2868 && GET_CODE (XEXP (x, 1)) == CONST_INT
2869 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2870 return 1;
2873 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2874 && GET_CODE (x) == SYMBOL_REF
2875 && CONSTANT_POOL_ADDRESS_P (x)
2876 && !(flag_pic
2877 && symbol_mentioned_p (get_pool_constant (x))))
2878 return 1;
2880 return 0;
2883 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2884 instruction of mode MODE. */
2886 thumb_legitimate_offset_p (mode, val)
2887 enum machine_mode mode;
2888 HOST_WIDE_INT val;
2890 switch (GET_MODE_SIZE (mode))
2892 case 1:
2893 return val >= 0 && val < 32;
2895 case 2:
2896 return val >= 0 && val < 64 && (val & 1) == 0;
2898 default:
2899 return (val >= 0
2900 && (val + GET_MODE_SIZE (mode)) <= 128
2901 && (val & 3) == 0);
2907 #define REG_OR_SUBREG_REG(X) \
2908 (GET_CODE (X) == REG \
2909 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2911 #define REG_OR_SUBREG_RTX(X) \
2912 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2914 #ifndef COSTS_N_INSNS
2915 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2916 #endif
2918 static inline int
2919 arm_rtx_costs_1 (x, code, outer)
2920 rtx x;
2921 enum rtx_code code;
2922 enum rtx_code outer;
2924 enum machine_mode mode = GET_MODE (x);
2925 enum rtx_code subcode;
2926 int extra_cost;
2928 if (TARGET_THUMB)
2930 switch (code)
2932 case ASHIFT:
2933 case ASHIFTRT:
2934 case LSHIFTRT:
2935 case ROTATERT:
2936 case PLUS:
2937 case MINUS:
2938 case COMPARE:
2939 case NEG:
2940 case NOT:
2941 return COSTS_N_INSNS (1);
2943 case MULT:
2944 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2946 int cycles = 0;
2947 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2949 while (i)
2951 i >>= 2;
2952 cycles++;
2954 return COSTS_N_INSNS (2) + cycles;
2956 return COSTS_N_INSNS (1) + 16;
2958 case SET:
2959 return (COSTS_N_INSNS (1)
2960 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2961 + GET_CODE (SET_DEST (x)) == MEM));
2963 case CONST_INT:
2964 if (outer == SET)
2966 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2967 return 0;
2968 if (thumb_shiftable_const (INTVAL (x)))
2969 return COSTS_N_INSNS (2);
2970 return COSTS_N_INSNS (3);
2972 else if (outer == PLUS
2973 && INTVAL (x) < 256 && INTVAL (x) > -256)
2974 return 0;
2975 else if (outer == COMPARE
2976 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2977 return 0;
2978 else if (outer == ASHIFT || outer == ASHIFTRT
2979 || outer == LSHIFTRT)
2980 return 0;
2981 return COSTS_N_INSNS (2);
2983 case CONST:
2984 case CONST_DOUBLE:
2985 case LABEL_REF:
2986 case SYMBOL_REF:
2987 return COSTS_N_INSNS (3);
2989 case UDIV:
2990 case UMOD:
2991 case DIV:
2992 case MOD:
2993 return 100;
2995 case TRUNCATE:
2996 return 99;
2998 case AND:
2999 case XOR:
3000 case IOR:
3001 /* XXX guess. */
3002 return 8;
3004 case ADDRESSOF:
3005 case MEM:
3006 /* XXX another guess. */
3007 /* Memory costs quite a lot for the first word, but subsequent words
3008 load at the equivalent of a single insn each. */
3009 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3010 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3011 ? 4 : 0));
3013 case IF_THEN_ELSE:
3014 /* XXX a guess. */
3015 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3016 return 14;
3017 return 2;
3019 case ZERO_EXTEND:
3020 /* XXX still guessing. */
3021 switch (GET_MODE (XEXP (x, 0)))
3023 case QImode:
3024 return (1 + (mode == DImode ? 4 : 0)
3025 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3027 case HImode:
3028 return (4 + (mode == DImode ? 4 : 0)
3029 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3031 case SImode:
3032 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3034 default:
3035 return 99;
3038 default:
3039 return 99;
3040 #if 0
3041 case FFS:
3042 case FLOAT:
3043 case FIX:
3044 case UNSIGNED_FIX:
3045 /* XXX guess */
3046 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3047 rtx_name[code]);
3048 abort ();
3049 #endif
3053 switch (code)
3055 case MEM:
3056 /* Memory costs quite a lot for the first word, but subsequent words
3057 load at the equivalent of a single insn each. */
3058 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3059 + (GET_CODE (x) == SYMBOL_REF
3060 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3062 case DIV:
3063 case MOD:
3064 return 100;
3066 case ROTATE:
3067 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3068 return 4;
3069 /* Fall through */
3070 case ROTATERT:
3071 if (mode != SImode)
3072 return 8;
3073 /* Fall through */
3074 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3075 if (mode == DImode)
3076 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3077 + ((GET_CODE (XEXP (x, 0)) == REG
3078 || (GET_CODE (XEXP (x, 0)) == SUBREG
3079 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3080 ? 0 : 8));
3081 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3082 || (GET_CODE (XEXP (x, 0)) == SUBREG
3083 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3084 ? 0 : 4)
3085 + ((GET_CODE (XEXP (x, 1)) == REG
3086 || (GET_CODE (XEXP (x, 1)) == SUBREG
3087 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3088 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3089 ? 0 : 4));
3091 case MINUS:
3092 if (mode == DImode)
3093 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3094 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3095 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3096 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3097 ? 0 : 8));
3099 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3100 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3101 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3102 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3103 ? 0 : 8)
3104 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3105 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3106 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
3107 ? 0 : 8));
3109 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3110 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3111 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3112 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3113 || subcode == ASHIFTRT || subcode == LSHIFTRT
3114 || subcode == ROTATE || subcode == ROTATERT
3115 || (subcode == MULT
3116 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3117 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3118 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3119 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3120 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3121 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3122 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3123 return 1;
3124 /* Fall through */
3126 case PLUS:
3127 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3128 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3129 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3130 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3131 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3132 ? 0 : 8));
3134 /* Fall through */
3135 case AND: case XOR: case IOR:
3136 extra_cost = 0;
3138 /* Normally the frame registers will be spilt into reg+const during
3139 reload, so it is a bad idea to combine them with other instructions,
3140 since then they might not be moved outside of loops. As a compromise
3141 we allow integration with ops that have a constant as their second
3142 operand. */
3143 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3144 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3145 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3146 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3147 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3148 extra_cost = 4;
3150 if (mode == DImode)
3151 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3152 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3153 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3154 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3155 ? 0 : 8));
3157 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3158 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3159 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3160 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3161 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3162 ? 0 : 4));
3164 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3165 return (1 + extra_cost
3166 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3167 || subcode == LSHIFTRT || subcode == ASHIFTRT
3168 || subcode == ROTATE || subcode == ROTATERT
3169 || (subcode == MULT
3170 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3171 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3172 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3173 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3174 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3175 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3176 ? 0 : 4));
3178 return 8;
3180 case MULT:
3181 /* There is no point basing this on the tuning, since it is always the
3182 fast variant if it exists at all. */
3183 if (arm_fast_multiply && mode == DImode
3184 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3185 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3186 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3187 return 8;
3189 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3190 || mode == DImode)
3191 return 30;
3193 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3195 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3196 & (unsigned HOST_WIDE_INT) 0xffffffff);
3197 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3198 int j;
3200 /* Tune as appropriate. */
3201 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3203 for (j = 0; i && j < 32; j += booth_unit_size)
3205 i >>= booth_unit_size;
3206 add_cost += 2;
3209 return add_cost;
3212 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3213 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3214 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3216 case TRUNCATE:
3217 if (arm_fast_multiply && mode == SImode
3218 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3219 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3220 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3221 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3222 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3223 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3224 return 8;
3225 return 99;
3227 case NEG:
3228 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3229 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3230 /* Fall through */
3231 case NOT:
3232 if (mode == DImode)
3233 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3235 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3237 case IF_THEN_ELSE:
3238 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3239 return 14;
3240 return 2;
3242 case COMPARE:
3243 return 1;
3245 case ABS:
3246 return 4 + (mode == DImode ? 4 : 0);
3248 case SIGN_EXTEND:
3249 if (GET_MODE (XEXP (x, 0)) == QImode)
3250 return (4 + (mode == DImode ? 4 : 0)
3251 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3252 /* Fall through */
3253 case ZERO_EXTEND:
3254 switch (GET_MODE (XEXP (x, 0)))
3256 case QImode:
3257 return (1 + (mode == DImode ? 4 : 0)
3258 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3260 case HImode:
3261 return (4 + (mode == DImode ? 4 : 0)
3262 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3264 case SImode:
3265 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3267 default:
3268 break;
3270 abort ();
3272 case CONST_INT:
3273 if (const_ok_for_arm (INTVAL (x)))
3274 return outer == SET ? 2 : -1;
3275 else if (outer == AND
3276 && const_ok_for_arm (~INTVAL (x)))
3277 return -1;
3278 else if ((outer == COMPARE
3279 || outer == PLUS || outer == MINUS)
3280 && const_ok_for_arm (-INTVAL (x)))
3281 return -1;
3282 else
3283 return 5;
3285 case CONST:
3286 case LABEL_REF:
3287 case SYMBOL_REF:
3288 return 6;
3290 case CONST_DOUBLE:
3291 if (const_double_rtx_ok_for_fpu (x))
3292 return outer == SET ? 2 : -1;
3293 else if ((outer == COMPARE || outer == PLUS)
3294 && neg_const_double_rtx_ok_for_fpu (x))
3295 return -1;
3296 return 7;
3298 default:
3299 return 99;
3303 static bool
3304 arm_rtx_costs (x, code, outer_code, total)
3305 rtx x;
3306 int code, outer_code;
3307 int *total;
3309 *total = arm_rtx_costs_1 (x, code, outer_code);
3310 return true;
3313 static int
3314 arm_adjust_cost (insn, link, dep, cost)
3315 rtx insn;
3316 rtx link;
3317 rtx dep;
3318 int cost;
3320 rtx i_pat, d_pat;
3322 /* Some true dependencies can have a higher cost depending
3323 on precisely how certain input operands are used. */
3324 if (arm_is_xscale
3325 && REG_NOTE_KIND (link) == 0
3326 && recog_memoized (insn) < 0
3327 && recog_memoized (dep) < 0)
3329 int shift_opnum = get_attr_shift (insn);
3330 enum attr_type attr_type = get_attr_type (dep);
3332 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3333 operand for INSN. If we have a shifted input operand and the
3334 instruction we depend on is another ALU instruction, then we may
3335 have to account for an additional stall. */
3336 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3338 rtx shifted_operand;
3339 int opno;
3341 /* Get the shifted operand. */
3342 extract_insn (insn);
3343 shifted_operand = recog_data.operand[shift_opnum];
3345 /* Iterate over all the operands in DEP. If we write an operand
3346 that overlaps with SHIFTED_OPERAND, then we have increase the
3347 cost of this dependency. */
3348 extract_insn (dep);
3349 preprocess_constraints ();
3350 for (opno = 0; opno < recog_data.n_operands; opno++)
3352 /* We can ignore strict inputs. */
3353 if (recog_data.operand_type[opno] == OP_IN)
3354 continue;
3356 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3357 shifted_operand))
3358 return 2;
3363 /* XXX This is not strictly true for the FPA. */
3364 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3365 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3366 return 0;
3368 /* Call insns don't incur a stall, even if they follow a load. */
3369 if (REG_NOTE_KIND (link) == 0
3370 && GET_CODE (insn) == CALL_INSN)
3371 return 1;
3373 if ((i_pat = single_set (insn)) != NULL
3374 && GET_CODE (SET_SRC (i_pat)) == MEM
3375 && (d_pat = single_set (dep)) != NULL
3376 && GET_CODE (SET_DEST (d_pat)) == MEM)
3378 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3379 /* This is a load after a store, there is no conflict if the load reads
3380 from a cached area. Assume that loads from the stack, and from the
3381 constant pool are cached, and that others will miss. This is a
3382 hack. */
3384 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3385 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3386 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3387 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3388 return 1;
3391 return cost;
3394 /* This code has been fixed for cross compilation. */
3396 static int fpa_consts_inited = 0;
3398 static const char * const strings_fpa[8] =
3400 "0", "1", "2", "3",
3401 "4", "5", "0.5", "10"
3404 static REAL_VALUE_TYPE values_fpa[8];
3406 static void
3407 init_fpa_table ()
3409 int i;
3410 REAL_VALUE_TYPE r;
3412 for (i = 0; i < 8; i++)
3414 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3415 values_fpa[i] = r;
3418 fpa_consts_inited = 1;
3421 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3424 const_double_rtx_ok_for_fpu (x)
3425 rtx x;
3427 REAL_VALUE_TYPE r;
3428 int i;
3430 if (!fpa_consts_inited)
3431 init_fpa_table ();
3433 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3434 if (REAL_VALUE_MINUS_ZERO (r))
3435 return 0;
3437 for (i = 0; i < 8; i++)
3438 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3439 return 1;
3441 return 0;
3444 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3447 neg_const_double_rtx_ok_for_fpu (x)
3448 rtx x;
3450 REAL_VALUE_TYPE r;
3451 int i;
3453 if (!fpa_consts_inited)
3454 init_fpa_table ();
3456 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3457 r = REAL_VALUE_NEGATE (r);
3458 if (REAL_VALUE_MINUS_ZERO (r))
3459 return 0;
3461 for (i = 0; i < 8; i++)
3462 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3463 return 1;
3465 return 0;
3468 /* Predicates for `match_operand' and `match_operator'. */
3470 /* s_register_operand is the same as register_operand, but it doesn't accept
3471 (SUBREG (MEM)...).
3473 This function exists because at the time it was put in it led to better
3474 code. SUBREG(MEM) always needs a reload in the places where
3475 s_register_operand is used, and this seemed to lead to excessive
3476 reloading. */
3479 s_register_operand (op, mode)
3480 rtx op;
3481 enum machine_mode mode;
3483 if (GET_MODE (op) != mode && mode != VOIDmode)
3484 return 0;
3486 if (GET_CODE (op) == SUBREG)
3487 op = SUBREG_REG (op);
3489 /* We don't consider registers whose class is NO_REGS
3490 to be a register operand. */
3491 /* XXX might have to check for lo regs only for thumb ??? */
3492 return (GET_CODE (op) == REG
3493 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3494 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3497 /* A hard register operand (even before reload. */
3500 arm_hard_register_operand (op, mode)
3501 rtx op;
3502 enum machine_mode mode;
3504 if (GET_MODE (op) != mode && mode != VOIDmode)
3505 return 0;
3507 return (GET_CODE (op) == REG
3508 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3511 /* Only accept reg, subreg(reg), const_int. */
3514 reg_or_int_operand (op, mode)
3515 rtx op;
3516 enum machine_mode mode;
3518 if (GET_CODE (op) == CONST_INT)
3519 return 1;
3521 if (GET_MODE (op) != mode && mode != VOIDmode)
3522 return 0;
3524 if (GET_CODE (op) == SUBREG)
3525 op = SUBREG_REG (op);
3527 /* We don't consider registers whose class is NO_REGS
3528 to be a register operand. */
3529 return (GET_CODE (op) == REG
3530 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3531 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3534 /* Return 1 if OP is an item in memory, given that we are in reload. */
3537 arm_reload_memory_operand (op, mode)
3538 rtx op;
3539 enum machine_mode mode ATTRIBUTE_UNUSED;
3541 int regno = true_regnum (op);
3543 return (!CONSTANT_P (op)
3544 && (regno == -1
3545 || (GET_CODE (op) == REG
3546 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3549 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3550 memory access (architecture V4).
3551 MODE is QImode if called when computing constraints, or VOIDmode when
3552 emitting patterns. In this latter case we cannot use memory_operand()
3553 because it will fail on badly formed MEMs, which is precisly what we are
3554 trying to catch. */
3557 bad_signed_byte_operand (op, mode)
3558 rtx op;
3559 enum machine_mode mode ATTRIBUTE_UNUSED;
3561 #if 0
3562 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3563 return 0;
3564 #endif
3565 if (GET_CODE (op) != MEM)
3566 return 0;
3568 op = XEXP (op, 0);
3570 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3571 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3572 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3573 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3574 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3575 return 1;
3577 /* Big constants are also bad. */
3578 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3579 && (INTVAL (XEXP (op, 1)) > 0xff
3580 || -INTVAL (XEXP (op, 1)) > 0xff))
3581 return 1;
3583 /* Everything else is good, or can will automatically be made so. */
3584 return 0;
3587 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3590 arm_rhs_operand (op, mode)
3591 rtx op;
3592 enum machine_mode mode;
3594 return (s_register_operand (op, mode)
3595 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3598 /* Return TRUE for valid operands for the
3599 rhs of an ARM instruction, or a load. */
3602 arm_rhsm_operand (op, mode)
3603 rtx op;
3604 enum machine_mode mode;
3606 return (s_register_operand (op, mode)
3607 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3608 || memory_operand (op, mode));
3611 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3612 constant that is valid when negated. */
3615 arm_add_operand (op, mode)
3616 rtx op;
3617 enum machine_mode mode;
3619 if (TARGET_THUMB)
3620 return thumb_cmp_operand (op, mode);
3622 return (s_register_operand (op, mode)
3623 || (GET_CODE (op) == CONST_INT
3624 && (const_ok_for_arm (INTVAL (op))
3625 || const_ok_for_arm (-INTVAL (op)))));
3629 arm_not_operand (op, mode)
3630 rtx op;
3631 enum machine_mode mode;
3633 return (s_register_operand (op, mode)
3634 || (GET_CODE (op) == CONST_INT
3635 && (const_ok_for_arm (INTVAL (op))
3636 || const_ok_for_arm (~INTVAL (op)))));
3639 /* Return TRUE if the operand is a memory reference which contains an
3640 offsettable address. */
3643 offsettable_memory_operand (op, mode)
3644 rtx op;
3645 enum machine_mode mode;
3647 if (mode == VOIDmode)
3648 mode = GET_MODE (op);
3650 return (mode == GET_MODE (op)
3651 && GET_CODE (op) == MEM
3652 && offsettable_address_p (reload_completed | reload_in_progress,
3653 mode, XEXP (op, 0)));
3656 /* Return TRUE if the operand is a memory reference which is, or can be
3657 made word aligned by adjusting the offset. */
3660 alignable_memory_operand (op, mode)
3661 rtx op;
3662 enum machine_mode mode;
3664 rtx reg;
3666 if (mode == VOIDmode)
3667 mode = GET_MODE (op);
3669 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3670 return 0;
3672 op = XEXP (op, 0);
3674 return ((GET_CODE (reg = op) == REG
3675 || (GET_CODE (op) == SUBREG
3676 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3677 || (GET_CODE (op) == PLUS
3678 && GET_CODE (XEXP (op, 1)) == CONST_INT
3679 && (GET_CODE (reg = XEXP (op, 0)) == REG
3680 || (GET_CODE (XEXP (op, 0)) == SUBREG
3681 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3682 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3685 /* Similar to s_register_operand, but does not allow hard integer
3686 registers. */
3689 f_register_operand (op, mode)
3690 rtx op;
3691 enum machine_mode mode;
3693 if (GET_MODE (op) != mode && mode != VOIDmode)
3694 return 0;
3696 if (GET_CODE (op) == SUBREG)
3697 op = SUBREG_REG (op);
3699 /* We don't consider registers whose class is NO_REGS
3700 to be a register operand. */
3701 return (GET_CODE (op) == REG
3702 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3703 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3706 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3709 fpu_rhs_operand (op, mode)
3710 rtx op;
3711 enum machine_mode mode;
3713 if (s_register_operand (op, mode))
3714 return TRUE;
3716 if (GET_MODE (op) != mode && mode != VOIDmode)
3717 return FALSE;
3719 if (GET_CODE (op) == CONST_DOUBLE)
3720 return const_double_rtx_ok_for_fpu (op);
3722 return FALSE;
3726 fpu_add_operand (op, mode)
3727 rtx op;
3728 enum machine_mode mode;
3730 if (s_register_operand (op, mode))
3731 return TRUE;
3733 if (GET_MODE (op) != mode && mode != VOIDmode)
3734 return FALSE;
3736 if (GET_CODE (op) == CONST_DOUBLE)
3737 return (const_double_rtx_ok_for_fpu (op)
3738 || neg_const_double_rtx_ok_for_fpu (op));
3740 return FALSE;
3743 /* Return nonzero if OP is a constant power of two. */
3746 power_of_two_operand (op, mode)
3747 rtx op;
3748 enum machine_mode mode ATTRIBUTE_UNUSED;
3750 if (GET_CODE (op) == CONST_INT)
3752 HOST_WIDE_INT value = INTVAL (op);
3754 return value != 0 && (value & (value - 1)) == 0;
3757 return FALSE;
3760 /* Return TRUE for a valid operand of a DImode operation.
3761 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3762 Note that this disallows MEM(REG+REG), but allows
3763 MEM(PRE/POST_INC/DEC(REG)). */
3766 di_operand (op, mode)
3767 rtx op;
3768 enum machine_mode mode;
3770 if (s_register_operand (op, mode))
3771 return TRUE;
3773 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3774 return FALSE;
3776 if (GET_CODE (op) == SUBREG)
3777 op = SUBREG_REG (op);
3779 switch (GET_CODE (op))
3781 case CONST_DOUBLE:
3782 case CONST_INT:
3783 return TRUE;
3785 case MEM:
3786 return memory_address_p (DImode, XEXP (op, 0));
3788 default:
3789 return FALSE;
3793 /* Like di_operand, but don't accept constants. */
3796 nonimmediate_di_operand (op, mode)
3797 rtx op;
3798 enum machine_mode mode;
3800 if (s_register_operand (op, mode))
3801 return TRUE;
3803 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3804 return FALSE;
3806 if (GET_CODE (op) == SUBREG)
3807 op = SUBREG_REG (op);
3809 if (GET_CODE (op) == MEM)
3810 return memory_address_p (DImode, XEXP (op, 0));
3812 return FALSE;
3815 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3816 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3817 Note that this disallows MEM(REG+REG), but allows
3818 MEM(PRE/POST_INC/DEC(REG)). */
3821 soft_df_operand (op, mode)
3822 rtx op;
3823 enum machine_mode mode;
3825 if (s_register_operand (op, mode))
3826 return TRUE;
3828 if (mode != VOIDmode && GET_MODE (op) != mode)
3829 return FALSE;
3831 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3832 return FALSE;
3834 if (GET_CODE (op) == SUBREG)
3835 op = SUBREG_REG (op);
3837 switch (GET_CODE (op))
3839 case CONST_DOUBLE:
3840 return TRUE;
3842 case MEM:
3843 return memory_address_p (DFmode, XEXP (op, 0));
3845 default:
3846 return FALSE;
3850 /* Like soft_df_operand, but don't accept constants. */
3853 nonimmediate_soft_df_operand (op, mode)
3854 rtx op;
3855 enum machine_mode mode;
3857 if (s_register_operand (op, mode))
3858 return TRUE;
3860 if (mode != VOIDmode && GET_MODE (op) != mode)
3861 return FALSE;
3863 if (GET_CODE (op) == SUBREG)
3864 op = SUBREG_REG (op);
3866 if (GET_CODE (op) == MEM)
3867 return memory_address_p (DFmode, XEXP (op, 0));
3868 return FALSE;
3871 /* Return TRUE for valid index operands. */
3874 index_operand (op, mode)
3875 rtx op;
3876 enum machine_mode mode;
3878 return (s_register_operand (op, mode)
3879 || (immediate_operand (op, mode)
3880 && (GET_CODE (op) != CONST_INT
3881 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3884 /* Return TRUE for valid shifts by a constant. This also accepts any
3885 power of two on the (somewhat overly relaxed) assumption that the
3886 shift operator in this case was a mult. */
3889 const_shift_operand (op, mode)
3890 rtx op;
3891 enum machine_mode mode;
3893 return (power_of_two_operand (op, mode)
3894 || (immediate_operand (op, mode)
3895 && (GET_CODE (op) != CONST_INT
3896 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3899 /* Return TRUE for arithmetic operators which can be combined with a multiply
3900 (shift). */
3903 shiftable_operator (x, mode)
3904 rtx x;
3905 enum machine_mode mode;
3907 enum rtx_code code;
3909 if (GET_MODE (x) != mode)
3910 return FALSE;
3912 code = GET_CODE (x);
3914 return (code == PLUS || code == MINUS
3915 || code == IOR || code == XOR || code == AND);
3918 /* Return TRUE for binary logical operators. */
3921 logical_binary_operator (x, mode)
3922 rtx x;
3923 enum machine_mode mode;
3925 enum rtx_code code;
3927 if (GET_MODE (x) != mode)
3928 return FALSE;
3930 code = GET_CODE (x);
3932 return (code == IOR || code == XOR || code == AND);
3935 /* Return TRUE for shift operators. */
3938 shift_operator (x, mode)
3939 rtx x;
3940 enum machine_mode mode;
3942 enum rtx_code code;
3944 if (GET_MODE (x) != mode)
3945 return FALSE;
3947 code = GET_CODE (x);
3949 if (code == MULT)
3950 return power_of_two_operand (XEXP (x, 1), mode);
3952 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3953 || code == ROTATERT);
3956 /* Return TRUE if x is EQ or NE. */
3959 equality_operator (x, mode)
3960 rtx x;
3961 enum machine_mode mode ATTRIBUTE_UNUSED;
3963 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3966 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3969 arm_comparison_operator (x, mode)
3970 rtx x;
3971 enum machine_mode mode;
3973 return (comparison_operator (x, mode)
3974 && GET_CODE (x) != LTGT
3975 && GET_CODE (x) != UNEQ);
3978 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3981 minmax_operator (x, mode)
3982 rtx x;
3983 enum machine_mode mode;
3985 enum rtx_code code = GET_CODE (x);
3987 if (GET_MODE (x) != mode)
3988 return FALSE;
3990 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3993 /* Return TRUE if this is the condition code register, if we aren't given
3994 a mode, accept any class CCmode register. */
3997 cc_register (x, mode)
3998 rtx x;
3999 enum machine_mode mode;
4001 if (mode == VOIDmode)
4003 mode = GET_MODE (x);
4005 if (GET_MODE_CLASS (mode) != MODE_CC)
4006 return FALSE;
4009 if ( GET_MODE (x) == mode
4010 && GET_CODE (x) == REG
4011 && REGNO (x) == CC_REGNUM)
4012 return TRUE;
4014 return FALSE;
4017 /* Return TRUE if this is the condition code register, if we aren't given
4018 a mode, accept any class CCmode register which indicates a dominance
4019 expression. */
4022 dominant_cc_register (x, mode)
4023 rtx x;
4024 enum machine_mode mode;
4026 if (mode == VOIDmode)
4028 mode = GET_MODE (x);
4030 if (GET_MODE_CLASS (mode) != MODE_CC)
4031 return FALSE;
4034 if ( mode != CC_DNEmode && mode != CC_DEQmode
4035 && mode != CC_DLEmode && mode != CC_DLTmode
4036 && mode != CC_DGEmode && mode != CC_DGTmode
4037 && mode != CC_DLEUmode && mode != CC_DLTUmode
4038 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4039 return FALSE;
4041 return cc_register (x, mode);
4044 /* Return TRUE if X references a SYMBOL_REF. */
4047 symbol_mentioned_p (x)
4048 rtx x;
4050 const char * fmt;
4051 int i;
4053 if (GET_CODE (x) == SYMBOL_REF)
4054 return 1;
4056 fmt = GET_RTX_FORMAT (GET_CODE (x));
4058 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4060 if (fmt[i] == 'E')
4062 int j;
4064 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4065 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4066 return 1;
4068 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4069 return 1;
4072 return 0;
4075 /* Return TRUE if X references a LABEL_REF. */
4078 label_mentioned_p (x)
4079 rtx x;
4081 const char * fmt;
4082 int i;
4084 if (GET_CODE (x) == LABEL_REF)
4085 return 1;
4087 fmt = GET_RTX_FORMAT (GET_CODE (x));
4088 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4090 if (fmt[i] == 'E')
4092 int j;
4094 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4095 if (label_mentioned_p (XVECEXP (x, i, j)))
4096 return 1;
4098 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4099 return 1;
4102 return 0;
4105 enum rtx_code
4106 minmax_code (x)
4107 rtx x;
4109 enum rtx_code code = GET_CODE (x);
4111 if (code == SMAX)
4112 return GE;
4113 else if (code == SMIN)
4114 return LE;
4115 else if (code == UMIN)
4116 return LEU;
4117 else if (code == UMAX)
4118 return GEU;
4120 abort ();
4123 /* Return 1 if memory locations are adjacent. */
4126 adjacent_mem_locations (a, b)
4127 rtx a, b;
4129 if ((GET_CODE (XEXP (a, 0)) == REG
4130 || (GET_CODE (XEXP (a, 0)) == PLUS
4131 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4132 && (GET_CODE (XEXP (b, 0)) == REG
4133 || (GET_CODE (XEXP (b, 0)) == PLUS
4134 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4136 int val0 = 0, val1 = 0;
4137 int reg0, reg1;
4139 if (GET_CODE (XEXP (a, 0)) == PLUS)
4141 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4142 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4144 else
4145 reg0 = REGNO (XEXP (a, 0));
4147 if (GET_CODE (XEXP (b, 0)) == PLUS)
4149 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4150 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4152 else
4153 reg1 = REGNO (XEXP (b, 0));
4155 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4157 return 0;
4160 /* Return 1 if OP is a load multiple operation. It is known to be
4161 parallel and the first section will be tested. */
4164 load_multiple_operation (op, mode)
4165 rtx op;
4166 enum machine_mode mode ATTRIBUTE_UNUSED;
4168 HOST_WIDE_INT count = XVECLEN (op, 0);
4169 int dest_regno;
4170 rtx src_addr;
4171 HOST_WIDE_INT i = 1, base = 0;
4172 rtx elt;
4174 if (count <= 1
4175 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4176 return 0;
4178 /* Check to see if this might be a write-back. */
4179 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4181 i++;
4182 base = 1;
4184 /* Now check it more carefully. */
4185 if (GET_CODE (SET_DEST (elt)) != REG
4186 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4187 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4188 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4189 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4190 return 0;
4193 /* Perform a quick check so we don't blow up below. */
4194 if (count <= i
4195 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4196 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4197 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4198 return 0;
4200 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4201 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4203 for (; i < count; i++)
4205 elt = XVECEXP (op, 0, i);
4207 if (GET_CODE (elt) != SET
4208 || GET_CODE (SET_DEST (elt)) != REG
4209 || GET_MODE (SET_DEST (elt)) != SImode
4210 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4211 || GET_CODE (SET_SRC (elt)) != MEM
4212 || GET_MODE (SET_SRC (elt)) != SImode
4213 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4214 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4215 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4216 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4217 return 0;
4220 return 1;
4223 /* Return 1 if OP is a store multiple operation. It is known to be
4224 parallel and the first section will be tested. */
4227 store_multiple_operation (op, mode)
4228 rtx op;
4229 enum machine_mode mode ATTRIBUTE_UNUSED;
4231 HOST_WIDE_INT count = XVECLEN (op, 0);
4232 int src_regno;
4233 rtx dest_addr;
4234 HOST_WIDE_INT i = 1, base = 0;
4235 rtx elt;
4237 if (count <= 1
4238 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4239 return 0;
4241 /* Check to see if this might be a write-back. */
4242 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4244 i++;
4245 base = 1;
4247 /* Now check it more carefully. */
4248 if (GET_CODE (SET_DEST (elt)) != REG
4249 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4250 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4251 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4252 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4253 return 0;
4256 /* Perform a quick check so we don't blow up below. */
4257 if (count <= i
4258 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4259 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4260 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4261 return 0;
4263 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4264 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4266 for (; i < count; i++)
4268 elt = XVECEXP (op, 0, i);
4270 if (GET_CODE (elt) != SET
4271 || GET_CODE (SET_SRC (elt)) != REG
4272 || GET_MODE (SET_SRC (elt)) != SImode
4273 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4274 || GET_CODE (SET_DEST (elt)) != MEM
4275 || GET_MODE (SET_DEST (elt)) != SImode
4276 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4277 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4278 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4279 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4280 return 0;
4283 return 1;
4287 load_multiple_sequence (operands, nops, regs, base, load_offset)
4288 rtx * operands;
4289 int nops;
4290 int * regs;
4291 int * base;
4292 HOST_WIDE_INT * load_offset;
4294 int unsorted_regs[4];
4295 HOST_WIDE_INT unsorted_offsets[4];
4296 int order[4];
4297 int base_reg = -1;
4298 int i;
4300 /* Can only handle 2, 3, or 4 insns at present,
4301 though could be easily extended if required. */
4302 if (nops < 2 || nops > 4)
4303 abort ();
4305 /* Loop over the operands and check that the memory references are
4306 suitable (ie immediate offsets from the same base register). At
4307 the same time, extract the target register, and the memory
4308 offsets. */
4309 for (i = 0; i < nops; i++)
4311 rtx reg;
4312 rtx offset;
4314 /* Convert a subreg of a mem into the mem itself. */
4315 if (GET_CODE (operands[nops + i]) == SUBREG)
4316 operands[nops + i] = alter_subreg (operands + (nops + i));
4318 if (GET_CODE (operands[nops + i]) != MEM)
4319 abort ();
4321 /* Don't reorder volatile memory references; it doesn't seem worth
4322 looking for the case where the order is ok anyway. */
4323 if (MEM_VOLATILE_P (operands[nops + i]))
4324 return 0;
4326 offset = const0_rtx;
4328 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4329 || (GET_CODE (reg) == SUBREG
4330 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4331 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4332 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4333 == REG)
4334 || (GET_CODE (reg) == SUBREG
4335 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4336 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4337 == CONST_INT)))
4339 if (i == 0)
4341 base_reg = REGNO (reg);
4342 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4343 ? REGNO (operands[i])
4344 : REGNO (SUBREG_REG (operands[i])));
4345 order[0] = 0;
4347 else
4349 if (base_reg != (int) REGNO (reg))
4350 /* Not addressed from the same base register. */
4351 return 0;
4353 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4354 ? REGNO (operands[i])
4355 : REGNO (SUBREG_REG (operands[i])));
4356 if (unsorted_regs[i] < unsorted_regs[order[0]])
4357 order[0] = i;
4360 /* If it isn't an integer register, or if it overwrites the
4361 base register but isn't the last insn in the list, then
4362 we can't do this. */
4363 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4364 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4365 return 0;
4367 unsorted_offsets[i] = INTVAL (offset);
4369 else
4370 /* Not a suitable memory address. */
4371 return 0;
4374 /* All the useful information has now been extracted from the
4375 operands into unsorted_regs and unsorted_offsets; additionally,
4376 order[0] has been set to the lowest numbered register in the
4377 list. Sort the registers into order, and check that the memory
4378 offsets are ascending and adjacent. */
4380 for (i = 1; i < nops; i++)
4382 int j;
4384 order[i] = order[i - 1];
4385 for (j = 0; j < nops; j++)
4386 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4387 && (order[i] == order[i - 1]
4388 || unsorted_regs[j] < unsorted_regs[order[i]]))
4389 order[i] = j;
4391 /* Have we found a suitable register? if not, one must be used more
4392 than once. */
4393 if (order[i] == order[i - 1])
4394 return 0;
4396 /* Is the memory address adjacent and ascending? */
4397 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4398 return 0;
4401 if (base)
4403 *base = base_reg;
4405 for (i = 0; i < nops; i++)
4406 regs[i] = unsorted_regs[order[i]];
4408 *load_offset = unsorted_offsets[order[0]];
4411 if (unsorted_offsets[order[0]] == 0)
4412 return 1; /* ldmia */
4414 if (unsorted_offsets[order[0]] == 4)
4415 return 2; /* ldmib */
4417 if (unsorted_offsets[order[nops - 1]] == 0)
4418 return 3; /* ldmda */
4420 if (unsorted_offsets[order[nops - 1]] == -4)
4421 return 4; /* ldmdb */
4423 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4424 if the offset isn't small enough. The reason 2 ldrs are faster
4425 is because these ARMs are able to do more than one cache access
4426 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4427 whilst the ARM8 has a double bandwidth cache. This means that
4428 these cores can do both an instruction fetch and a data fetch in
4429 a single cycle, so the trick of calculating the address into a
4430 scratch register (one of the result regs) and then doing a load
4431 multiple actually becomes slower (and no smaller in code size).
4432 That is the transformation
4434 ldr rd1, [rbase + offset]
4435 ldr rd2, [rbase + offset + 4]
4439 add rd1, rbase, offset
4440 ldmia rd1, {rd1, rd2}
4442 produces worse code -- '3 cycles + any stalls on rd2' instead of
4443 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4444 access per cycle, the first sequence could never complete in less
4445 than 6 cycles, whereas the ldm sequence would only take 5 and
4446 would make better use of sequential accesses if not hitting the
4447 cache.
4449 We cheat here and test 'arm_ld_sched' which we currently know to
4450 only be true for the ARM8, ARM9 and StrongARM. If this ever
4451 changes, then the test below needs to be reworked. */
4452 if (nops == 2 && arm_ld_sched)
4453 return 0;
4455 /* Can't do it without setting up the offset, only do this if it takes
4456 no more than one insn. */
4457 return (const_ok_for_arm (unsorted_offsets[order[0]])
4458 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4461 const char *
4462 emit_ldm_seq (operands, nops)
4463 rtx * operands;
4464 int nops;
4466 int regs[4];
4467 int base_reg;
4468 HOST_WIDE_INT offset;
4469 char buf[100];
4470 int i;
4472 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4474 case 1:
4475 strcpy (buf, "ldm%?ia\t");
4476 break;
4478 case 2:
4479 strcpy (buf, "ldm%?ib\t");
4480 break;
4482 case 3:
4483 strcpy (buf, "ldm%?da\t");
4484 break;
4486 case 4:
4487 strcpy (buf, "ldm%?db\t");
4488 break;
4490 case 5:
4491 if (offset >= 0)
4492 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4493 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4494 (long) offset);
4495 else
4496 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4497 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4498 (long) -offset);
4499 output_asm_insn (buf, operands);
4500 base_reg = regs[0];
4501 strcpy (buf, "ldm%?ia\t");
4502 break;
4504 default:
4505 abort ();
4508 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4509 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4511 for (i = 1; i < nops; i++)
4512 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4513 reg_names[regs[i]]);
4515 strcat (buf, "}\t%@ phole ldm");
4517 output_asm_insn (buf, operands);
4518 return "";
4522 store_multiple_sequence (operands, nops, regs, base, load_offset)
4523 rtx * operands;
4524 int nops;
4525 int * regs;
4526 int * base;
4527 HOST_WIDE_INT * load_offset;
4529 int unsorted_regs[4];
4530 HOST_WIDE_INT unsorted_offsets[4];
4531 int order[4];
4532 int base_reg = -1;
4533 int i;
4535 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4536 extended if required. */
4537 if (nops < 2 || nops > 4)
4538 abort ();
4540 /* Loop over the operands and check that the memory references are
4541 suitable (ie immediate offsets from the same base register). At
4542 the same time, extract the target register, and the memory
4543 offsets. */
4544 for (i = 0; i < nops; i++)
4546 rtx reg;
4547 rtx offset;
4549 /* Convert a subreg of a mem into the mem itself. */
4550 if (GET_CODE (operands[nops + i]) == SUBREG)
4551 operands[nops + i] = alter_subreg (operands + (nops + i));
4553 if (GET_CODE (operands[nops + i]) != MEM)
4554 abort ();
4556 /* Don't reorder volatile memory references; it doesn't seem worth
4557 looking for the case where the order is ok anyway. */
4558 if (MEM_VOLATILE_P (operands[nops + i]))
4559 return 0;
4561 offset = const0_rtx;
4563 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4564 || (GET_CODE (reg) == SUBREG
4565 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4566 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4567 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4568 == REG)
4569 || (GET_CODE (reg) == SUBREG
4570 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4571 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4572 == CONST_INT)))
4574 if (i == 0)
4576 base_reg = REGNO (reg);
4577 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4578 ? REGNO (operands[i])
4579 : REGNO (SUBREG_REG (operands[i])));
4580 order[0] = 0;
4582 else
4584 if (base_reg != (int) REGNO (reg))
4585 /* Not addressed from the same base register. */
4586 return 0;
4588 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4589 ? REGNO (operands[i])
4590 : REGNO (SUBREG_REG (operands[i])));
4591 if (unsorted_regs[i] < unsorted_regs[order[0]])
4592 order[0] = i;
4595 /* If it isn't an integer register, then we can't do this. */
4596 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4597 return 0;
4599 unsorted_offsets[i] = INTVAL (offset);
4601 else
4602 /* Not a suitable memory address. */
4603 return 0;
4606 /* All the useful information has now been extracted from the
4607 operands into unsorted_regs and unsorted_offsets; additionally,
4608 order[0] has been set to the lowest numbered register in the
4609 list. Sort the registers into order, and check that the memory
4610 offsets are ascending and adjacent. */
4612 for (i = 1; i < nops; i++)
4614 int j;
4616 order[i] = order[i - 1];
4617 for (j = 0; j < nops; j++)
4618 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4619 && (order[i] == order[i - 1]
4620 || unsorted_regs[j] < unsorted_regs[order[i]]))
4621 order[i] = j;
4623 /* Have we found a suitable register? if not, one must be used more
4624 than once. */
4625 if (order[i] == order[i - 1])
4626 return 0;
4628 /* Is the memory address adjacent and ascending? */
4629 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4630 return 0;
4633 if (base)
4635 *base = base_reg;
4637 for (i = 0; i < nops; i++)
4638 regs[i] = unsorted_regs[order[i]];
4640 *load_offset = unsorted_offsets[order[0]];
4643 if (unsorted_offsets[order[0]] == 0)
4644 return 1; /* stmia */
4646 if (unsorted_offsets[order[0]] == 4)
4647 return 2; /* stmib */
4649 if (unsorted_offsets[order[nops - 1]] == 0)
4650 return 3; /* stmda */
4652 if (unsorted_offsets[order[nops - 1]] == -4)
4653 return 4; /* stmdb */
4655 return 0;
4658 const char *
4659 emit_stm_seq (operands, nops)
4660 rtx * operands;
4661 int nops;
4663 int regs[4];
4664 int base_reg;
4665 HOST_WIDE_INT offset;
4666 char buf[100];
4667 int i;
4669 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4671 case 1:
4672 strcpy (buf, "stm%?ia\t");
4673 break;
4675 case 2:
4676 strcpy (buf, "stm%?ib\t");
4677 break;
4679 case 3:
4680 strcpy (buf, "stm%?da\t");
4681 break;
4683 case 4:
4684 strcpy (buf, "stm%?db\t");
4685 break;
4687 default:
4688 abort ();
4691 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4692 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4694 for (i = 1; i < nops; i++)
4695 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4696 reg_names[regs[i]]);
4698 strcat (buf, "}\t%@ phole stm");
4700 output_asm_insn (buf, operands);
4701 return "";
4705 multi_register_push (op, mode)
4706 rtx op;
4707 enum machine_mode mode ATTRIBUTE_UNUSED;
4709 if (GET_CODE (op) != PARALLEL
4710 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4711 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4712 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4713 return 0;
4715 return 1;
4718 /* Routines for use in generating RTL. */
4721 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4722 in_struct_p, scalar_p)
4723 int base_regno;
4724 int count;
4725 rtx from;
4726 int up;
4727 int write_back;
4728 int unchanging_p;
4729 int in_struct_p;
4730 int scalar_p;
4732 int i = 0, j;
4733 rtx result;
4734 int sign = up ? 1 : -1;
4735 rtx mem;
4737 /* XScale has load-store double instructions, but they have stricter
4738 alignment requirements than load-store multiple, so we can not
4739 use them.
4741 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4742 the pipeline until completion.
4744 NREGS CYCLES
4750 An ldr instruction takes 1-3 cycles, but does not block the
4751 pipeline.
4753 NREGS CYCLES
4754 1 1-3
4755 2 2-6
4756 3 3-9
4757 4 4-12
4759 Best case ldr will always win. However, the more ldr instructions
4760 we issue, the less likely we are to be able to schedule them well.
4761 Using ldr instructions also increases code size.
4763 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4764 for counts of 3 or 4 regs. */
4765 if (arm_is_xscale && count <= 2 && ! optimize_size)
4767 rtx seq;
4769 start_sequence ();
4771 for (i = 0; i < count; i++)
4773 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4774 RTX_UNCHANGING_P (mem) = unchanging_p;
4775 MEM_IN_STRUCT_P (mem) = in_struct_p;
4776 MEM_SCALAR_P (mem) = scalar_p;
4777 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4780 if (write_back)
4781 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4783 seq = get_insns ();
4784 end_sequence ();
4786 return seq;
4789 result = gen_rtx_PARALLEL (VOIDmode,
4790 rtvec_alloc (count + (write_back ? 1 : 0)));
4791 if (write_back)
4793 XVECEXP (result, 0, 0)
4794 = gen_rtx_SET (GET_MODE (from), from,
4795 plus_constant (from, count * 4 * sign));
4796 i = 1;
4797 count++;
4800 for (j = 0; i < count; i++, j++)
4802 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4803 RTX_UNCHANGING_P (mem) = unchanging_p;
4804 MEM_IN_STRUCT_P (mem) = in_struct_p;
4805 MEM_SCALAR_P (mem) = scalar_p;
4806 XVECEXP (result, 0, i)
4807 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4810 return result;
4814 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4815 in_struct_p, scalar_p)
4816 int base_regno;
4817 int count;
4818 rtx to;
4819 int up;
4820 int write_back;
4821 int unchanging_p;
4822 int in_struct_p;
4823 int scalar_p;
4825 int i = 0, j;
4826 rtx result;
4827 int sign = up ? 1 : -1;
4828 rtx mem;
4830 /* See arm_gen_load_multiple for discussion of
4831 the pros/cons of ldm/stm usage for XScale. */
4832 if (arm_is_xscale && count <= 2 && ! optimize_size)
4834 rtx seq;
4836 start_sequence ();
4838 for (i = 0; i < count; i++)
4840 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4841 RTX_UNCHANGING_P (mem) = unchanging_p;
4842 MEM_IN_STRUCT_P (mem) = in_struct_p;
4843 MEM_SCALAR_P (mem) = scalar_p;
4844 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4847 if (write_back)
4848 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4850 seq = get_insns ();
4851 end_sequence ();
4853 return seq;
4856 result = gen_rtx_PARALLEL (VOIDmode,
4857 rtvec_alloc (count + (write_back ? 1 : 0)));
4858 if (write_back)
4860 XVECEXP (result, 0, 0)
4861 = gen_rtx_SET (GET_MODE (to), to,
4862 plus_constant (to, count * 4 * sign));
4863 i = 1;
4864 count++;
4867 for (j = 0; i < count; i++, j++)
4869 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4870 RTX_UNCHANGING_P (mem) = unchanging_p;
4871 MEM_IN_STRUCT_P (mem) = in_struct_p;
4872 MEM_SCALAR_P (mem) = scalar_p;
4874 XVECEXP (result, 0, i)
4875 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4878 return result;
4882 arm_gen_movstrqi (operands)
4883 rtx * operands;
4885 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4886 int i;
4887 rtx src, dst;
4888 rtx st_src, st_dst, fin_src, fin_dst;
4889 rtx part_bytes_reg = NULL;
4890 rtx mem;
4891 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4892 int dst_scalar_p, src_scalar_p;
4894 if (GET_CODE (operands[2]) != CONST_INT
4895 || GET_CODE (operands[3]) != CONST_INT
4896 || INTVAL (operands[2]) > 64
4897 || INTVAL (operands[3]) & 3)
4898 return 0;
4900 st_dst = XEXP (operands[0], 0);
4901 st_src = XEXP (operands[1], 0);
4903 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4904 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4905 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4906 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4907 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4908 src_scalar_p = MEM_SCALAR_P (operands[1]);
4910 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4911 fin_src = src = copy_to_mode_reg (SImode, st_src);
4913 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
4914 out_words_to_go = INTVAL (operands[2]) / 4;
4915 last_bytes = INTVAL (operands[2]) & 3;
4917 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4918 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4920 for (i = 0; in_words_to_go >= 2; i+=4)
4922 if (in_words_to_go > 4)
4923 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4924 src_unchanging_p,
4925 src_in_struct_p,
4926 src_scalar_p));
4927 else
4928 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4929 FALSE, src_unchanging_p,
4930 src_in_struct_p, src_scalar_p));
4932 if (out_words_to_go)
4934 if (out_words_to_go > 4)
4935 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4936 dst_unchanging_p,
4937 dst_in_struct_p,
4938 dst_scalar_p));
4939 else if (out_words_to_go != 1)
4940 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4941 dst, TRUE,
4942 (last_bytes == 0
4943 ? FALSE : TRUE),
4944 dst_unchanging_p,
4945 dst_in_struct_p,
4946 dst_scalar_p));
4947 else
4949 mem = gen_rtx_MEM (SImode, dst);
4950 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4951 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4952 MEM_SCALAR_P (mem) = dst_scalar_p;
4953 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4954 if (last_bytes != 0)
4955 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4959 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4960 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4963 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4964 if (out_words_to_go)
4966 rtx sreg;
4968 mem = gen_rtx_MEM (SImode, src);
4969 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4970 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4971 MEM_SCALAR_P (mem) = src_scalar_p;
4972 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4973 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4975 mem = gen_rtx_MEM (SImode, dst);
4976 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4977 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4978 MEM_SCALAR_P (mem) = dst_scalar_p;
4979 emit_move_insn (mem, sreg);
4980 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4981 in_words_to_go--;
4983 if (in_words_to_go) /* Sanity check */
4984 abort ();
4987 if (in_words_to_go)
4989 if (in_words_to_go < 0)
4990 abort ();
4992 mem = gen_rtx_MEM (SImode, src);
4993 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4994 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4995 MEM_SCALAR_P (mem) = src_scalar_p;
4996 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4999 if (last_bytes && part_bytes_reg == NULL)
5000 abort ();
5002 if (BYTES_BIG_ENDIAN && last_bytes)
5004 rtx tmp = gen_reg_rtx (SImode);
5006 /* The bytes we want are in the top end of the word. */
5007 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5008 GEN_INT (8 * (4 - last_bytes))));
5009 part_bytes_reg = tmp;
5011 while (last_bytes)
5013 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5014 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5015 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5016 MEM_SCALAR_P (mem) = dst_scalar_p;
5017 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5019 if (--last_bytes)
5021 tmp = gen_reg_rtx (SImode);
5022 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5023 part_bytes_reg = tmp;
5028 else
5030 if (last_bytes > 1)
5032 mem = gen_rtx_MEM (HImode, dst);
5033 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5034 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5035 MEM_SCALAR_P (mem) = dst_scalar_p;
5036 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5037 last_bytes -= 2;
5038 if (last_bytes)
5040 rtx tmp = gen_reg_rtx (SImode);
5042 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5043 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5044 part_bytes_reg = tmp;
5048 if (last_bytes)
5050 mem = gen_rtx_MEM (QImode, dst);
5051 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5052 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5053 MEM_SCALAR_P (mem) = dst_scalar_p;
5054 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5058 return 1;
5061 /* Generate a memory reference for a half word, such that it will be loaded
5062 into the top 16 bits of the word. We can assume that the address is
5063 known to be alignable and of the form reg, or plus (reg, const). */
5066 arm_gen_rotated_half_load (memref)
5067 rtx memref;
5069 HOST_WIDE_INT offset = 0;
5070 rtx base = XEXP (memref, 0);
5072 if (GET_CODE (base) == PLUS)
5074 offset = INTVAL (XEXP (base, 1));
5075 base = XEXP (base, 0);
5078 /* If we aren't allowed to generate unaligned addresses, then fail. */
5079 if (TARGET_MMU_TRAPS
5080 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5081 return NULL;
5083 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5085 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5086 return base;
5088 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5091 /* Select a dominance comparison mode if possible. We support three forms.
5092 COND_OR == 0 => (X && Y)
5093 COND_OR == 1 => ((! X( || Y)
5094 COND_OR == 2 => (X || Y)
5095 If we are unable to support a dominance comparsison we return CC mode.
5096 This will then fail to match for the RTL expressions that generate this
5097 call. */
5099 static enum machine_mode
5100 select_dominance_cc_mode (x, y, cond_or)
5101 rtx x;
5102 rtx y;
5103 HOST_WIDE_INT cond_or;
5105 enum rtx_code cond1, cond2;
5106 int swapped = 0;
5108 /* Currently we will probably get the wrong result if the individual
5109 comparisons are not simple. This also ensures that it is safe to
5110 reverse a comparison if necessary. */
5111 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5112 != CCmode)
5113 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5114 != CCmode))
5115 return CCmode;
5117 /* The if_then_else variant of this tests the second condition if the
5118 first passes, but is true if the first fails. Reverse the first
5119 condition to get a true "inclusive-or" expression. */
5120 if (cond_or == 1)
5121 cond1 = reverse_condition (cond1);
5123 /* If the comparisons are not equal, and one doesn't dominate the other,
5124 then we can't do this. */
5125 if (cond1 != cond2
5126 && !comparison_dominates_p (cond1, cond2)
5127 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5128 return CCmode;
5130 if (swapped)
5132 enum rtx_code temp = cond1;
5133 cond1 = cond2;
5134 cond2 = temp;
5137 switch (cond1)
5139 case EQ:
5140 if (cond2 == EQ || !cond_or)
5141 return CC_DEQmode;
5143 switch (cond2)
5145 case LE: return CC_DLEmode;
5146 case LEU: return CC_DLEUmode;
5147 case GE: return CC_DGEmode;
5148 case GEU: return CC_DGEUmode;
5149 default: break;
5152 break;
5154 case LT:
5155 if (cond2 == LT || !cond_or)
5156 return CC_DLTmode;
5157 if (cond2 == LE)
5158 return CC_DLEmode;
5159 if (cond2 == NE)
5160 return CC_DNEmode;
5161 break;
5163 case GT:
5164 if (cond2 == GT || !cond_or)
5165 return CC_DGTmode;
5166 if (cond2 == GE)
5167 return CC_DGEmode;
5168 if (cond2 == NE)
5169 return CC_DNEmode;
5170 break;
5172 case LTU:
5173 if (cond2 == LTU || !cond_or)
5174 return CC_DLTUmode;
5175 if (cond2 == LEU)
5176 return CC_DLEUmode;
5177 if (cond2 == NE)
5178 return CC_DNEmode;
5179 break;
5181 case GTU:
5182 if (cond2 == GTU || !cond_or)
5183 return CC_DGTUmode;
5184 if (cond2 == GEU)
5185 return CC_DGEUmode;
5186 if (cond2 == NE)
5187 return CC_DNEmode;
5188 break;
5190 /* The remaining cases only occur when both comparisons are the
5191 same. */
5192 case NE:
5193 return CC_DNEmode;
5195 case LE:
5196 return CC_DLEmode;
5198 case GE:
5199 return CC_DGEmode;
5201 case LEU:
5202 return CC_DLEUmode;
5204 case GEU:
5205 return CC_DGEUmode;
5207 default:
5208 break;
5211 abort ();
5214 enum machine_mode
5215 arm_select_cc_mode (op, x, y)
5216 enum rtx_code op;
5217 rtx x;
5218 rtx y;
5220 /* All floating point compares return CCFP if it is an equality
5221 comparison, and CCFPE otherwise. */
5222 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5224 switch (op)
5226 case EQ:
5227 case NE:
5228 case UNORDERED:
5229 case ORDERED:
5230 case UNLT:
5231 case UNLE:
5232 case UNGT:
5233 case UNGE:
5234 case UNEQ:
5235 case LTGT:
5236 return CCFPmode;
5238 case LT:
5239 case LE:
5240 case GT:
5241 case GE:
5242 return CCFPEmode;
5244 default:
5245 abort ();
5249 /* A compare with a shifted operand. Because of canonicalization, the
5250 comparison will have to be swapped when we emit the assembler. */
5251 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5252 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5253 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5254 || GET_CODE (x) == ROTATERT))
5255 return CC_SWPmode;
5257 /* This is a special case that is used by combine to allow a
5258 comparison of a shifted byte load to be split into a zero-extend
5259 followed by a comparison of the shifted integer (only valid for
5260 equalities and unsigned inequalities). */
5261 if (GET_MODE (x) == SImode
5262 && GET_CODE (x) == ASHIFT
5263 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5264 && GET_CODE (XEXP (x, 0)) == SUBREG
5265 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5266 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5267 && (op == EQ || op == NE
5268 || op == GEU || op == GTU || op == LTU || op == LEU)
5269 && GET_CODE (y) == CONST_INT)
5270 return CC_Zmode;
5272 /* A construct for a conditional compare, if the false arm contains
5273 0, then both conditions must be true, otherwise either condition
5274 must be true. Not all conditions are possible, so CCmode is
5275 returned if it can't be done. */
5276 if (GET_CODE (x) == IF_THEN_ELSE
5277 && (XEXP (x, 2) == const0_rtx
5278 || XEXP (x, 2) == const1_rtx)
5279 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5280 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5281 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5282 INTVAL (XEXP (x, 2)));
5284 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5285 if (GET_CODE (x) == AND
5286 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5287 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5288 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5290 if (GET_CODE (x) == IOR
5291 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5292 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5293 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5295 /* An operation that sets the condition codes as a side-effect, the
5296 V flag is not set correctly, so we can only use comparisons where
5297 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5298 instead. */
5299 if (GET_MODE (x) == SImode
5300 && y == const0_rtx
5301 && (op == EQ || op == NE || op == LT || op == GE)
5302 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5303 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5304 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5305 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5306 || GET_CODE (x) == LSHIFTRT
5307 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5308 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5309 return CC_NOOVmode;
5311 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5312 return CC_Zmode;
5314 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5315 && GET_CODE (x) == PLUS
5316 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5317 return CC_Cmode;
5319 return CCmode;
5322 /* X and Y are two things to compare using CODE. Emit the compare insn and
5323 return the rtx for register 0 in the proper mode. FP means this is a
5324 floating point compare: I don't think that it is needed on the arm. */
5327 arm_gen_compare_reg (code, x, y)
5328 enum rtx_code code;
5329 rtx x, y;
5331 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5332 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5334 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5335 gen_rtx_COMPARE (mode, x, y)));
5337 return cc_reg;
5340 /* Generate a sequence of insns that will generate the correct return
5341 address mask depending on the physical architecture that the program
5342 is running on. */
5345 arm_gen_return_addr_mask ()
5347 rtx reg = gen_reg_rtx (Pmode);
5349 emit_insn (gen_return_addr_mask (reg));
5350 return reg;
5353 void
5354 arm_reload_in_hi (operands)
5355 rtx * operands;
5357 rtx ref = operands[1];
5358 rtx base, scratch;
5359 HOST_WIDE_INT offset = 0;
5361 if (GET_CODE (ref) == SUBREG)
5363 offset = SUBREG_BYTE (ref);
5364 ref = SUBREG_REG (ref);
5367 if (GET_CODE (ref) == REG)
5369 /* We have a pseudo which has been spilt onto the stack; there
5370 are two cases here: the first where there is a simple
5371 stack-slot replacement and a second where the stack-slot is
5372 out of range, or is used as a subreg. */
5373 if (reg_equiv_mem[REGNO (ref)])
5375 ref = reg_equiv_mem[REGNO (ref)];
5376 base = find_replacement (&XEXP (ref, 0));
5378 else
5379 /* The slot is out of range, or was dressed up in a SUBREG. */
5380 base = reg_equiv_address[REGNO (ref)];
5382 else
5383 base = find_replacement (&XEXP (ref, 0));
5385 /* Handle the case where the address is too complex to be offset by 1. */
5386 if (GET_CODE (base) == MINUS
5387 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5389 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5391 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5392 base = base_plus;
5394 else if (GET_CODE (base) == PLUS)
5396 /* The addend must be CONST_INT, or we would have dealt with it above. */
5397 HOST_WIDE_INT hi, lo;
5399 offset += INTVAL (XEXP (base, 1));
5400 base = XEXP (base, 0);
5402 /* Rework the address into a legal sequence of insns. */
5403 /* Valid range for lo is -4095 -> 4095 */
5404 lo = (offset >= 0
5405 ? (offset & 0xfff)
5406 : -((-offset) & 0xfff));
5408 /* Corner case, if lo is the max offset then we would be out of range
5409 once we have added the additional 1 below, so bump the msb into the
5410 pre-loading insn(s). */
5411 if (lo == 4095)
5412 lo &= 0x7ff;
5414 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5415 ^ (HOST_WIDE_INT) 0x80000000)
5416 - (HOST_WIDE_INT) 0x80000000);
5418 if (hi + lo != offset)
5419 abort ();
5421 if (hi != 0)
5423 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5425 /* Get the base address; addsi3 knows how to handle constants
5426 that require more than one insn. */
5427 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5428 base = base_plus;
5429 offset = lo;
5433 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5434 emit_insn (gen_zero_extendqisi2 (scratch,
5435 gen_rtx_MEM (QImode,
5436 plus_constant (base,
5437 offset))));
5438 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5439 gen_rtx_MEM (QImode,
5440 plus_constant (base,
5441 offset + 1))));
5442 if (!BYTES_BIG_ENDIAN)
5443 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5444 gen_rtx_IOR (SImode,
5445 gen_rtx_ASHIFT
5446 (SImode,
5447 gen_rtx_SUBREG (SImode, operands[0], 0),
5448 GEN_INT (8)),
5449 scratch)));
5450 else
5451 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5452 gen_rtx_IOR (SImode,
5453 gen_rtx_ASHIFT (SImode, scratch,
5454 GEN_INT (8)),
5455 gen_rtx_SUBREG (SImode, operands[0],
5456 0))));
5459 /* Handle storing a half-word to memory during reload by synthesising as two
5460 byte stores. Take care not to clobber the input values until after we
5461 have moved them somewhere safe. This code assumes that if the DImode
5462 scratch in operands[2] overlaps either the input value or output address
5463 in some way, then that value must die in this insn (we absolutely need
5464 two scratch registers for some corner cases). */
5466 void
5467 arm_reload_out_hi (operands)
5468 rtx * operands;
5470 rtx ref = operands[0];
5471 rtx outval = operands[1];
5472 rtx base, scratch;
5473 HOST_WIDE_INT offset = 0;
5475 if (GET_CODE (ref) == SUBREG)
5477 offset = SUBREG_BYTE (ref);
5478 ref = SUBREG_REG (ref);
5481 if (GET_CODE (ref) == REG)
5483 /* We have a pseudo which has been spilt onto the stack; there
5484 are two cases here: the first where there is a simple
5485 stack-slot replacement and a second where the stack-slot is
5486 out of range, or is used as a subreg. */
5487 if (reg_equiv_mem[REGNO (ref)])
5489 ref = reg_equiv_mem[REGNO (ref)];
5490 base = find_replacement (&XEXP (ref, 0));
5492 else
5493 /* The slot is out of range, or was dressed up in a SUBREG. */
5494 base = reg_equiv_address[REGNO (ref)];
5496 else
5497 base = find_replacement (&XEXP (ref, 0));
5499 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5501 /* Handle the case where the address is too complex to be offset by 1. */
5502 if (GET_CODE (base) == MINUS
5503 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5505 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5507 /* Be careful not to destroy OUTVAL. */
5508 if (reg_overlap_mentioned_p (base_plus, outval))
5510 /* Updating base_plus might destroy outval, see if we can
5511 swap the scratch and base_plus. */
5512 if (!reg_overlap_mentioned_p (scratch, outval))
5514 rtx tmp = scratch;
5515 scratch = base_plus;
5516 base_plus = tmp;
5518 else
5520 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5522 /* Be conservative and copy OUTVAL into the scratch now,
5523 this should only be necessary if outval is a subreg
5524 of something larger than a word. */
5525 /* XXX Might this clobber base? I can't see how it can,
5526 since scratch is known to overlap with OUTVAL, and
5527 must be wider than a word. */
5528 emit_insn (gen_movhi (scratch_hi, outval));
5529 outval = scratch_hi;
5533 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5534 base = base_plus;
5536 else if (GET_CODE (base) == PLUS)
5538 /* The addend must be CONST_INT, or we would have dealt with it above. */
5539 HOST_WIDE_INT hi, lo;
5541 offset += INTVAL (XEXP (base, 1));
5542 base = XEXP (base, 0);
5544 /* Rework the address into a legal sequence of insns. */
5545 /* Valid range for lo is -4095 -> 4095 */
5546 lo = (offset >= 0
5547 ? (offset & 0xfff)
5548 : -((-offset) & 0xfff));
5550 /* Corner case, if lo is the max offset then we would be out of range
5551 once we have added the additional 1 below, so bump the msb into the
5552 pre-loading insn(s). */
5553 if (lo == 4095)
5554 lo &= 0x7ff;
5556 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5557 ^ (HOST_WIDE_INT) 0x80000000)
5558 - (HOST_WIDE_INT) 0x80000000);
5560 if (hi + lo != offset)
5561 abort ();
5563 if (hi != 0)
5565 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5567 /* Be careful not to destroy OUTVAL. */
5568 if (reg_overlap_mentioned_p (base_plus, outval))
5570 /* Updating base_plus might destroy outval, see if we
5571 can swap the scratch and base_plus. */
5572 if (!reg_overlap_mentioned_p (scratch, outval))
5574 rtx tmp = scratch;
5575 scratch = base_plus;
5576 base_plus = tmp;
5578 else
5580 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5582 /* Be conservative and copy outval into scratch now,
5583 this should only be necessary if outval is a
5584 subreg of something larger than a word. */
5585 /* XXX Might this clobber base? I can't see how it
5586 can, since scratch is known to overlap with
5587 outval. */
5588 emit_insn (gen_movhi (scratch_hi, outval));
5589 outval = scratch_hi;
5593 /* Get the base address; addsi3 knows how to handle constants
5594 that require more than one insn. */
5595 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5596 base = base_plus;
5597 offset = lo;
5601 if (BYTES_BIG_ENDIAN)
5603 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5604 plus_constant (base, offset + 1)),
5605 gen_lowpart (QImode, outval)));
5606 emit_insn (gen_lshrsi3 (scratch,
5607 gen_rtx_SUBREG (SImode, outval, 0),
5608 GEN_INT (8)));
5609 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5610 gen_lowpart (QImode, scratch)));
5612 else
5614 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5615 gen_lowpart (QImode, outval)));
5616 emit_insn (gen_lshrsi3 (scratch,
5617 gen_rtx_SUBREG (SImode, outval, 0),
5618 GEN_INT (8)));
5619 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5620 plus_constant (base, offset + 1)),
5621 gen_lowpart (QImode, scratch)));
5625 /* Print a symbolic form of X to the debug file, F. */
5627 static void
5628 arm_print_value (f, x)
5629 FILE * f;
5630 rtx x;
5632 switch (GET_CODE (x))
5634 case CONST_INT:
5635 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5636 return;
5638 case CONST_DOUBLE:
5639 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5640 return;
5642 case CONST_STRING:
5643 fprintf (f, "\"%s\"", XSTR (x, 0));
5644 return;
5646 case SYMBOL_REF:
5647 fprintf (f, "`%s'", XSTR (x, 0));
5648 return;
5650 case LABEL_REF:
5651 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5652 return;
5654 case CONST:
5655 arm_print_value (f, XEXP (x, 0));
5656 return;
5658 case PLUS:
5659 arm_print_value (f, XEXP (x, 0));
5660 fprintf (f, "+");
5661 arm_print_value (f, XEXP (x, 1));
5662 return;
5664 case PC:
5665 fprintf (f, "pc");
5666 return;
5668 default:
5669 fprintf (f, "????");
5670 return;
5674 /* Routines for manipulation of the constant pool. */
5676 /* Arm instructions cannot load a large constant directly into a
5677 register; they have to come from a pc relative load. The constant
5678 must therefore be placed in the addressable range of the pc
5679 relative load. Depending on the precise pc relative load
5680 instruction the range is somewhere between 256 bytes and 4k. This
5681 means that we often have to dump a constant inside a function, and
5682 generate code to branch around it.
5684 It is important to minimize this, since the branches will slow
5685 things down and make the code larger.
5687 Normally we can hide the table after an existing unconditional
5688 branch so that there is no interruption of the flow, but in the
5689 worst case the code looks like this:
5691 ldr rn, L1
5693 b L2
5694 align
5695 L1: .long value
5699 ldr rn, L3
5701 b L4
5702 align
5703 L3: .long value
5707 We fix this by performing a scan after scheduling, which notices
5708 which instructions need to have their operands fetched from the
5709 constant table and builds the table.
5711 The algorithm starts by building a table of all the constants that
5712 need fixing up and all the natural barriers in the function (places
5713 where a constant table can be dropped without breaking the flow).
5714 For each fixup we note how far the pc-relative replacement will be
5715 able to reach and the offset of the instruction into the function.
5717 Having built the table we then group the fixes together to form
5718 tables that are as large as possible (subject to addressing
5719 constraints) and emit each table of constants after the last
5720 barrier that is within range of all the instructions in the group.
5721 If a group does not contain a barrier, then we forcibly create one
5722 by inserting a jump instruction into the flow. Once the table has
5723 been inserted, the insns are then modified to reference the
5724 relevant entry in the pool.
5726 Possible enhancements to the algorithm (not implemented) are:
5728 1) For some processors and object formats, there may be benefit in
5729 aligning the pools to the start of cache lines; this alignment
5730 would need to be taken into account when calculating addressability
5731 of a pool. */
5733 /* These typedefs are located at the start of this file, so that
5734 they can be used in the prototypes there. This comment is to
5735 remind readers of that fact so that the following structures
5736 can be understood more easily.
5738 typedef struct minipool_node Mnode;
5739 typedef struct minipool_fixup Mfix; */
5741 struct minipool_node
5743 /* Doubly linked chain of entries. */
5744 Mnode * next;
5745 Mnode * prev;
5746 /* The maximum offset into the code that this entry can be placed. While
5747 pushing fixes for forward references, all entries are sorted in order
5748 of increasing max_address. */
5749 HOST_WIDE_INT max_address;
5750 /* Similarly for an entry inserted for a backwards ref. */
5751 HOST_WIDE_INT min_address;
5752 /* The number of fixes referencing this entry. This can become zero
5753 if we "unpush" an entry. In this case we ignore the entry when we
5754 come to emit the code. */
5755 int refcount;
5756 /* The offset from the start of the minipool. */
5757 HOST_WIDE_INT offset;
5758 /* The value in table. */
5759 rtx value;
5760 /* The mode of value. */
5761 enum machine_mode mode;
5762 int fix_size;
5765 struct minipool_fixup
5767 Mfix * next;
5768 rtx insn;
5769 HOST_WIDE_INT address;
5770 rtx * loc;
5771 enum machine_mode mode;
5772 int fix_size;
5773 rtx value;
5774 Mnode * minipool;
5775 HOST_WIDE_INT forwards;
5776 HOST_WIDE_INT backwards;
5779 /* Fixes less than a word need padding out to a word boundary. */
5780 #define MINIPOOL_FIX_SIZE(mode) \
5781 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5783 static Mnode * minipool_vector_head;
5784 static Mnode * minipool_vector_tail;
5785 static rtx minipool_vector_label;
5787 /* The linked list of all minipool fixes required for this function. */
5788 Mfix * minipool_fix_head;
5789 Mfix * minipool_fix_tail;
5790 /* The fix entry for the current minipool, once it has been placed. */
5791 Mfix * minipool_barrier;
5793 /* Determines if INSN is the start of a jump table. Returns the end
5794 of the TABLE or NULL_RTX. */
5796 static rtx
5797 is_jump_table (insn)
5798 rtx insn;
5800 rtx table;
5802 if (GET_CODE (insn) == JUMP_INSN
5803 && JUMP_LABEL (insn) != NULL
5804 && ((table = next_real_insn (JUMP_LABEL (insn)))
5805 == next_real_insn (insn))
5806 && table != NULL
5807 && GET_CODE (table) == JUMP_INSN
5808 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5809 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5810 return table;
5812 return NULL_RTX;
5815 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5816 #define JUMP_TABLES_IN_TEXT_SECTION 0
5817 #endif
5819 static HOST_WIDE_INT
5820 get_jump_table_size (insn)
5821 rtx insn;
5823 /* ADDR_VECs only take room if read-only data does into the text
5824 section. */
5825 if (JUMP_TABLES_IN_TEXT_SECTION
5826 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5827 || 1
5828 #endif
5831 rtx body = PATTERN (insn);
5832 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5834 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5837 return 0;
5840 /* Move a minipool fix MP from its current location to before MAX_MP.
5841 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5842 contrains may need updating. */
5844 static Mnode *
5845 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5846 Mnode * mp;
5847 Mnode * max_mp;
5848 HOST_WIDE_INT max_address;
5850 /* This should never be true and the code below assumes these are
5851 different. */
5852 if (mp == max_mp)
5853 abort ();
5855 if (max_mp == NULL)
5857 if (max_address < mp->max_address)
5858 mp->max_address = max_address;
5860 else
5862 if (max_address > max_mp->max_address - mp->fix_size)
5863 mp->max_address = max_mp->max_address - mp->fix_size;
5864 else
5865 mp->max_address = max_address;
5867 /* Unlink MP from its current position. Since max_mp is non-null,
5868 mp->prev must be non-null. */
5869 mp->prev->next = mp->next;
5870 if (mp->next != NULL)
5871 mp->next->prev = mp->prev;
5872 else
5873 minipool_vector_tail = mp->prev;
5875 /* Re-insert it before MAX_MP. */
5876 mp->next = max_mp;
5877 mp->prev = max_mp->prev;
5878 max_mp->prev = mp;
5880 if (mp->prev != NULL)
5881 mp->prev->next = mp;
5882 else
5883 minipool_vector_head = mp;
5886 /* Save the new entry. */
5887 max_mp = mp;
5889 /* Scan over the preceding entries and adjust their addresses as
5890 required. */
5891 while (mp->prev != NULL
5892 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5894 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5895 mp = mp->prev;
5898 return max_mp;
5901 /* Add a constant to the minipool for a forward reference. Returns the
5902 node added or NULL if the constant will not fit in this pool. */
5904 static Mnode *
5905 add_minipool_forward_ref (fix)
5906 Mfix * fix;
5908 /* If set, max_mp is the first pool_entry that has a lower
5909 constraint than the one we are trying to add. */
5910 Mnode * max_mp = NULL;
5911 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5912 Mnode * mp;
5914 /* If this fix's address is greater than the address of the first
5915 entry, then we can't put the fix in this pool. We subtract the
5916 size of the current fix to ensure that if the table is fully
5917 packed we still have enough room to insert this value by suffling
5918 the other fixes forwards. */
5919 if (minipool_vector_head &&
5920 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5921 return NULL;
5923 /* Scan the pool to see if a constant with the same value has
5924 already been added. While we are doing this, also note the
5925 location where we must insert the constant if it doesn't already
5926 exist. */
5927 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5929 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5930 && fix->mode == mp->mode
5931 && (GET_CODE (fix->value) != CODE_LABEL
5932 || (CODE_LABEL_NUMBER (fix->value)
5933 == CODE_LABEL_NUMBER (mp->value)))
5934 && rtx_equal_p (fix->value, mp->value))
5936 /* More than one fix references this entry. */
5937 mp->refcount++;
5938 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5941 /* Note the insertion point if necessary. */
5942 if (max_mp == NULL
5943 && mp->max_address > max_address)
5944 max_mp = mp;
5947 /* The value is not currently in the minipool, so we need to create
5948 a new entry for it. If MAX_MP is NULL, the entry will be put on
5949 the end of the list since the placement is less constrained than
5950 any existing entry. Otherwise, we insert the new fix before
5951 MAX_MP and, if neceesary, adjust the constraints on the other
5952 entries. */
5953 mp = xmalloc (sizeof (* mp));
5954 mp->fix_size = fix->fix_size;
5955 mp->mode = fix->mode;
5956 mp->value = fix->value;
5957 mp->refcount = 1;
5958 /* Not yet required for a backwards ref. */
5959 mp->min_address = -65536;
5961 if (max_mp == NULL)
5963 mp->max_address = max_address;
5964 mp->next = NULL;
5965 mp->prev = minipool_vector_tail;
5967 if (mp->prev == NULL)
5969 minipool_vector_head = mp;
5970 minipool_vector_label = gen_label_rtx ();
5972 else
5973 mp->prev->next = mp;
5975 minipool_vector_tail = mp;
5977 else
5979 if (max_address > max_mp->max_address - mp->fix_size)
5980 mp->max_address = max_mp->max_address - mp->fix_size;
5981 else
5982 mp->max_address = max_address;
5984 mp->next = max_mp;
5985 mp->prev = max_mp->prev;
5986 max_mp->prev = mp;
5987 if (mp->prev != NULL)
5988 mp->prev->next = mp;
5989 else
5990 minipool_vector_head = mp;
5993 /* Save the new entry. */
5994 max_mp = mp;
5996 /* Scan over the preceding entries and adjust their addresses as
5997 required. */
5998 while (mp->prev != NULL
5999 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6001 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6002 mp = mp->prev;
6005 return max_mp;
6008 static Mnode *
6009 move_minipool_fix_backward_ref (mp, min_mp, min_address)
6010 Mnode * mp;
6011 Mnode * min_mp;
6012 HOST_WIDE_INT min_address;
6014 HOST_WIDE_INT offset;
6016 /* This should never be true, and the code below assumes these are
6017 different. */
6018 if (mp == min_mp)
6019 abort ();
6021 if (min_mp == NULL)
6023 if (min_address > mp->min_address)
6024 mp->min_address = min_address;
6026 else
6028 /* We will adjust this below if it is too loose. */
6029 mp->min_address = min_address;
6031 /* Unlink MP from its current position. Since min_mp is non-null,
6032 mp->next must be non-null. */
6033 mp->next->prev = mp->prev;
6034 if (mp->prev != NULL)
6035 mp->prev->next = mp->next;
6036 else
6037 minipool_vector_head = mp->next;
6039 /* Reinsert it after MIN_MP. */
6040 mp->prev = min_mp;
6041 mp->next = min_mp->next;
6042 min_mp->next = mp;
6043 if (mp->next != NULL)
6044 mp->next->prev = mp;
6045 else
6046 minipool_vector_tail = mp;
6049 min_mp = mp;
6051 offset = 0;
6052 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6054 mp->offset = offset;
6055 if (mp->refcount > 0)
6056 offset += mp->fix_size;
6058 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6059 mp->next->min_address = mp->min_address + mp->fix_size;
6062 return min_mp;
6065 /* Add a constant to the minipool for a backward reference. Returns the
6066 node added or NULL if the constant will not fit in this pool.
6068 Note that the code for insertion for a backwards reference can be
6069 somewhat confusing because the calculated offsets for each fix do
6070 not take into account the size of the pool (which is still under
6071 construction. */
6073 static Mnode *
6074 add_minipool_backward_ref (fix)
6075 Mfix * fix;
6077 /* If set, min_mp is the last pool_entry that has a lower constraint
6078 than the one we are trying to add. */
6079 Mnode * min_mp = NULL;
6080 /* This can be negative, since it is only a constraint. */
6081 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6082 Mnode * mp;
6084 /* If we can't reach the current pool from this insn, or if we can't
6085 insert this entry at the end of the pool without pushing other
6086 fixes out of range, then we don't try. This ensures that we
6087 can't fail later on. */
6088 if (min_address >= minipool_barrier->address
6089 || (minipool_vector_tail->min_address + fix->fix_size
6090 >= minipool_barrier->address))
6091 return NULL;
6093 /* Scan the pool to see if a constant with the same value has
6094 already been added. While we are doing this, also note the
6095 location where we must insert the constant if it doesn't already
6096 exist. */
6097 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6099 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6100 && fix->mode == mp->mode
6101 && (GET_CODE (fix->value) != CODE_LABEL
6102 || (CODE_LABEL_NUMBER (fix->value)
6103 == CODE_LABEL_NUMBER (mp->value)))
6104 && rtx_equal_p (fix->value, mp->value)
6105 /* Check that there is enough slack to move this entry to the
6106 end of the table (this is conservative). */
6107 && (mp->max_address
6108 > (minipool_barrier->address
6109 + minipool_vector_tail->offset
6110 + minipool_vector_tail->fix_size)))
6112 mp->refcount++;
6113 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6116 if (min_mp != NULL)
6117 mp->min_address += fix->fix_size;
6118 else
6120 /* Note the insertion point if necessary. */
6121 if (mp->min_address < min_address)
6122 min_mp = mp;
6123 else if (mp->max_address
6124 < minipool_barrier->address + mp->offset + fix->fix_size)
6126 /* Inserting before this entry would push the fix beyond
6127 its maximum address (which can happen if we have
6128 re-located a forwards fix); force the new fix to come
6129 after it. */
6130 min_mp = mp;
6131 min_address = mp->min_address + fix->fix_size;
6136 /* We need to create a new entry. */
6137 mp = xmalloc (sizeof (* mp));
6138 mp->fix_size = fix->fix_size;
6139 mp->mode = fix->mode;
6140 mp->value = fix->value;
6141 mp->refcount = 1;
6142 mp->max_address = minipool_barrier->address + 65536;
6144 mp->min_address = min_address;
6146 if (min_mp == NULL)
6148 mp->prev = NULL;
6149 mp->next = minipool_vector_head;
6151 if (mp->next == NULL)
6153 minipool_vector_tail = mp;
6154 minipool_vector_label = gen_label_rtx ();
6156 else
6157 mp->next->prev = mp;
6159 minipool_vector_head = mp;
6161 else
6163 mp->next = min_mp->next;
6164 mp->prev = min_mp;
6165 min_mp->next = mp;
6167 if (mp->next != NULL)
6168 mp->next->prev = mp;
6169 else
6170 minipool_vector_tail = mp;
6173 /* Save the new entry. */
6174 min_mp = mp;
6176 if (mp->prev)
6177 mp = mp->prev;
6178 else
6179 mp->offset = 0;
6181 /* Scan over the following entries and adjust their offsets. */
6182 while (mp->next != NULL)
6184 if (mp->next->min_address < mp->min_address + mp->fix_size)
6185 mp->next->min_address = mp->min_address + mp->fix_size;
6187 if (mp->refcount)
6188 mp->next->offset = mp->offset + mp->fix_size;
6189 else
6190 mp->next->offset = mp->offset;
6192 mp = mp->next;
6195 return min_mp;
6198 static void
6199 assign_minipool_offsets (barrier)
6200 Mfix * barrier;
6202 HOST_WIDE_INT offset = 0;
6203 Mnode * mp;
6205 minipool_barrier = barrier;
6207 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6209 mp->offset = offset;
6211 if (mp->refcount > 0)
6212 offset += mp->fix_size;
6216 /* Output the literal table */
6217 static void
6218 dump_minipool (scan)
6219 rtx scan;
6221 Mnode * mp;
6222 Mnode * nmp;
6224 if (rtl_dump_file)
6225 fprintf (rtl_dump_file,
6226 ";; Emitting minipool after insn %u; address %ld\n",
6227 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6229 scan = emit_label_after (gen_label_rtx (), scan);
6230 scan = emit_insn_after (gen_align_4 (), scan);
6231 scan = emit_label_after (minipool_vector_label, scan);
6233 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6235 if (mp->refcount > 0)
6237 if (rtl_dump_file)
6239 fprintf (rtl_dump_file,
6240 ";; Offset %u, min %ld, max %ld ",
6241 (unsigned) mp->offset, (unsigned long) mp->min_address,
6242 (unsigned long) mp->max_address);
6243 arm_print_value (rtl_dump_file, mp->value);
6244 fputc ('\n', rtl_dump_file);
6247 switch (mp->fix_size)
6249 #ifdef HAVE_consttable_1
6250 case 1:
6251 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6252 break;
6254 #endif
6255 #ifdef HAVE_consttable_2
6256 case 2:
6257 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6258 break;
6260 #endif
6261 #ifdef HAVE_consttable_4
6262 case 4:
6263 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6264 break;
6266 #endif
6267 #ifdef HAVE_consttable_8
6268 case 8:
6269 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6270 break;
6272 #endif
6273 default:
6274 abort ();
6275 break;
6279 nmp = mp->next;
6280 free (mp);
6283 minipool_vector_head = minipool_vector_tail = NULL;
6284 scan = emit_insn_after (gen_consttable_end (), scan);
6285 scan = emit_barrier_after (scan);
6288 /* Return the cost of forcibly inserting a barrier after INSN. */
6290 static int
6291 arm_barrier_cost (insn)
6292 rtx insn;
6294 /* Basing the location of the pool on the loop depth is preferable,
6295 but at the moment, the basic block information seems to be
6296 corrupt by this stage of the compilation. */
6297 int base_cost = 50;
6298 rtx next = next_nonnote_insn (insn);
6300 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6301 base_cost -= 20;
6303 switch (GET_CODE (insn))
6305 case CODE_LABEL:
6306 /* It will always be better to place the table before the label, rather
6307 than after it. */
6308 return 50;
6310 case INSN:
6311 case CALL_INSN:
6312 return base_cost;
6314 case JUMP_INSN:
6315 return base_cost - 10;
6317 default:
6318 return base_cost + 10;
6322 /* Find the best place in the insn stream in the range
6323 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6324 Create the barrier by inserting a jump and add a new fix entry for
6325 it. */
6327 static Mfix *
6328 create_fix_barrier (fix, max_address)
6329 Mfix * fix;
6330 HOST_WIDE_INT max_address;
6332 HOST_WIDE_INT count = 0;
6333 rtx barrier;
6334 rtx from = fix->insn;
6335 rtx selected = from;
6336 int selected_cost;
6337 HOST_WIDE_INT selected_address;
6338 Mfix * new_fix;
6339 HOST_WIDE_INT max_count = max_address - fix->address;
6340 rtx label = gen_label_rtx ();
6342 selected_cost = arm_barrier_cost (from);
6343 selected_address = fix->address;
6345 while (from && count < max_count)
6347 rtx tmp;
6348 int new_cost;
6350 /* This code shouldn't have been called if there was a natural barrier
6351 within range. */
6352 if (GET_CODE (from) == BARRIER)
6353 abort ();
6355 /* Count the length of this insn. */
6356 count += get_attr_length (from);
6358 /* If there is a jump table, add its length. */
6359 tmp = is_jump_table (from);
6360 if (tmp != NULL)
6362 count += get_jump_table_size (tmp);
6364 /* Jump tables aren't in a basic block, so base the cost on
6365 the dispatch insn. If we select this location, we will
6366 still put the pool after the table. */
6367 new_cost = arm_barrier_cost (from);
6369 if (count < max_count && new_cost <= selected_cost)
6371 selected = tmp;
6372 selected_cost = new_cost;
6373 selected_address = fix->address + count;
6376 /* Continue after the dispatch table. */
6377 from = NEXT_INSN (tmp);
6378 continue;
6381 new_cost = arm_barrier_cost (from);
6383 if (count < max_count && new_cost <= selected_cost)
6385 selected = from;
6386 selected_cost = new_cost;
6387 selected_address = fix->address + count;
6390 from = NEXT_INSN (from);
6393 /* Create a new JUMP_INSN that branches around a barrier. */
6394 from = emit_jump_insn_after (gen_jump (label), selected);
6395 JUMP_LABEL (from) = label;
6396 barrier = emit_barrier_after (from);
6397 emit_label_after (label, barrier);
6399 /* Create a minipool barrier entry for the new barrier. */
6400 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6401 new_fix->insn = barrier;
6402 new_fix->address = selected_address;
6403 new_fix->next = fix->next;
6404 fix->next = new_fix;
6406 return new_fix;
6409 /* Record that there is a natural barrier in the insn stream at
6410 ADDRESS. */
6411 static void
6412 push_minipool_barrier (insn, address)
6413 rtx insn;
6414 HOST_WIDE_INT address;
6416 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6418 fix->insn = insn;
6419 fix->address = address;
6421 fix->next = NULL;
6422 if (minipool_fix_head != NULL)
6423 minipool_fix_tail->next = fix;
6424 else
6425 minipool_fix_head = fix;
6427 minipool_fix_tail = fix;
6430 /* Record INSN, which will need fixing up to load a value from the
6431 minipool. ADDRESS is the offset of the insn since the start of the
6432 function; LOC is a pointer to the part of the insn which requires
6433 fixing; VALUE is the constant that must be loaded, which is of type
6434 MODE. */
6435 static void
6436 push_minipool_fix (insn, address, loc, mode, value)
6437 rtx insn;
6438 HOST_WIDE_INT address;
6439 rtx * loc;
6440 enum machine_mode mode;
6441 rtx value;
6443 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6445 #ifdef AOF_ASSEMBLER
6446 /* PIC symbol refereneces need to be converted into offsets into the
6447 based area. */
6448 /* XXX This shouldn't be done here. */
6449 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6450 value = aof_pic_entry (value);
6451 #endif /* AOF_ASSEMBLER */
6453 fix->insn = insn;
6454 fix->address = address;
6455 fix->loc = loc;
6456 fix->mode = mode;
6457 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6458 fix->value = value;
6459 fix->forwards = get_attr_pool_range (insn);
6460 fix->backwards = get_attr_neg_pool_range (insn);
6461 fix->minipool = NULL;
6463 /* If an insn doesn't have a range defined for it, then it isn't
6464 expecting to be reworked by this code. Better to abort now than
6465 to generate duff assembly code. */
6466 if (fix->forwards == 0 && fix->backwards == 0)
6467 abort ();
6469 if (rtl_dump_file)
6471 fprintf (rtl_dump_file,
6472 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6473 GET_MODE_NAME (mode),
6474 INSN_UID (insn), (unsigned long) address,
6475 -1 * (long)fix->backwards, (long)fix->forwards);
6476 arm_print_value (rtl_dump_file, fix->value);
6477 fprintf (rtl_dump_file, "\n");
6480 /* Add it to the chain of fixes. */
6481 fix->next = NULL;
6483 if (minipool_fix_head != NULL)
6484 minipool_fix_tail->next = fix;
6485 else
6486 minipool_fix_head = fix;
6488 minipool_fix_tail = fix;
6491 /* Scan INSN and note any of its operands that need fixing. */
6493 static void
6494 note_invalid_constants (insn, address)
6495 rtx insn;
6496 HOST_WIDE_INT address;
6498 int opno;
6500 extract_insn (insn);
6502 if (!constrain_operands (1))
6503 fatal_insn_not_found (insn);
6505 /* Fill in recog_op_alt with information about the constraints of this
6506 insn. */
6507 preprocess_constraints ();
6509 for (opno = 0; opno < recog_data.n_operands; opno++)
6511 /* Things we need to fix can only occur in inputs. */
6512 if (recog_data.operand_type[opno] != OP_IN)
6513 continue;
6515 /* If this alternative is a memory reference, then any mention
6516 of constants in this alternative is really to fool reload
6517 into allowing us to accept one there. We need to fix them up
6518 now so that we output the right code. */
6519 if (recog_op_alt[opno][which_alternative].memory_ok)
6521 rtx op = recog_data.operand[opno];
6523 if (CONSTANT_P (op))
6524 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6525 recog_data.operand_mode[opno], op);
6526 #if 0
6527 /* RWE: Now we look correctly at the operands for the insn,
6528 this shouldn't be needed any more. */
6529 #ifndef AOF_ASSEMBLER
6530 /* XXX Is this still needed? */
6531 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6532 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6533 recog_data.operand_mode[opno],
6534 XVECEXP (op, 0, 0));
6535 #endif
6536 #endif
6537 else if (GET_CODE (op) == MEM
6538 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6539 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6540 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6541 recog_data.operand_mode[opno],
6542 get_pool_constant (XEXP (op, 0)));
6547 void
6548 arm_reorg (first)
6549 rtx first;
6551 rtx insn;
6552 HOST_WIDE_INT address = 0;
6553 Mfix * fix;
6555 minipool_fix_head = minipool_fix_tail = NULL;
6557 /* The first insn must always be a note, or the code below won't
6558 scan it properly. */
6559 if (GET_CODE (first) != NOTE)
6560 abort ();
6562 /* Scan all the insns and record the operands that will need fixing. */
6563 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6565 if (GET_CODE (insn) == BARRIER)
6566 push_minipool_barrier (insn, address);
6567 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6568 || GET_CODE (insn) == JUMP_INSN)
6570 rtx table;
6572 note_invalid_constants (insn, address);
6573 address += get_attr_length (insn);
6575 /* If the insn is a vector jump, add the size of the table
6576 and skip the table. */
6577 if ((table = is_jump_table (insn)) != NULL)
6579 address += get_jump_table_size (table);
6580 insn = table;
6585 fix = minipool_fix_head;
6587 /* Now scan the fixups and perform the required changes. */
6588 while (fix)
6590 Mfix * ftmp;
6591 Mfix * fdel;
6592 Mfix * last_added_fix;
6593 Mfix * last_barrier = NULL;
6594 Mfix * this_fix;
6596 /* Skip any further barriers before the next fix. */
6597 while (fix && GET_CODE (fix->insn) == BARRIER)
6598 fix = fix->next;
6600 /* No more fixes. */
6601 if (fix == NULL)
6602 break;
6604 last_added_fix = NULL;
6606 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6608 if (GET_CODE (ftmp->insn) == BARRIER)
6610 if (ftmp->address >= minipool_vector_head->max_address)
6611 break;
6613 last_barrier = ftmp;
6615 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6616 break;
6618 last_added_fix = ftmp; /* Keep track of the last fix added. */
6621 /* If we found a barrier, drop back to that; any fixes that we
6622 could have reached but come after the barrier will now go in
6623 the next mini-pool. */
6624 if (last_barrier != NULL)
6626 /* Reduce the refcount for those fixes that won't go into this
6627 pool after all. */
6628 for (fdel = last_barrier->next;
6629 fdel && fdel != ftmp;
6630 fdel = fdel->next)
6632 fdel->minipool->refcount--;
6633 fdel->minipool = NULL;
6636 ftmp = last_barrier;
6638 else
6640 /* ftmp is first fix that we can't fit into this pool and
6641 there no natural barriers that we could use. Insert a
6642 new barrier in the code somewhere between the previous
6643 fix and this one, and arrange to jump around it. */
6644 HOST_WIDE_INT max_address;
6646 /* The last item on the list of fixes must be a barrier, so
6647 we can never run off the end of the list of fixes without
6648 last_barrier being set. */
6649 if (ftmp == NULL)
6650 abort ();
6652 max_address = minipool_vector_head->max_address;
6653 /* Check that there isn't another fix that is in range that
6654 we couldn't fit into this pool because the pool was
6655 already too large: we need to put the pool before such an
6656 instruction. */
6657 if (ftmp->address < max_address)
6658 max_address = ftmp->address;
6660 last_barrier = create_fix_barrier (last_added_fix, max_address);
6663 assign_minipool_offsets (last_barrier);
6665 while (ftmp)
6667 if (GET_CODE (ftmp->insn) != BARRIER
6668 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6669 == NULL))
6670 break;
6672 ftmp = ftmp->next;
6675 /* Scan over the fixes we have identified for this pool, fixing them
6676 up and adding the constants to the pool itself. */
6677 for (this_fix = fix; this_fix && ftmp != this_fix;
6678 this_fix = this_fix->next)
6679 if (GET_CODE (this_fix->insn) != BARRIER)
6681 rtx addr
6682 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6683 minipool_vector_label),
6684 this_fix->minipool->offset);
6685 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6688 dump_minipool (last_barrier->insn);
6689 fix = ftmp;
6692 /* From now on we must synthesize any constants that we can't handle
6693 directly. This can happen if the RTL gets split during final
6694 instruction generation. */
6695 after_arm_reorg = 1;
6697 /* Free the minipool memory. */
6698 obstack_free (&minipool_obstack, minipool_startobj);
6701 /* Routines to output assembly language. */
6703 /* If the rtx is the correct value then return the string of the number.
6704 In this way we can ensure that valid double constants are generated even
6705 when cross compiling. */
6707 const char *
6708 fp_immediate_constant (x)
6709 rtx x;
6711 REAL_VALUE_TYPE r;
6712 int i;
6714 if (!fpa_consts_inited)
6715 init_fpa_table ();
6717 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6718 for (i = 0; i < 8; i++)
6719 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6720 return strings_fpa[i];
6722 abort ();
6725 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6727 static const char *
6728 fp_const_from_val (r)
6729 REAL_VALUE_TYPE * r;
6731 int i;
6733 if (!fpa_consts_inited)
6734 init_fpa_table ();
6736 for (i = 0; i < 8; i++)
6737 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6738 return strings_fpa[i];
6740 abort ();
6743 /* Output the operands of a LDM/STM instruction to STREAM.
6744 MASK is the ARM register set mask of which only bits 0-15 are important.
6745 REG is the base register, either the frame pointer or the stack pointer,
6746 INSTR is the possibly suffixed load or store instruction. */
6748 static void
6749 print_multi_reg (stream, instr, reg, mask)
6750 FILE * stream;
6751 const char * instr;
6752 int reg;
6753 int mask;
6755 int i;
6756 int not_first = FALSE;
6758 fputc ('\t', stream);
6759 asm_fprintf (stream, instr, reg);
6760 fputs (", {", stream);
6762 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6763 if (mask & (1 << i))
6765 if (not_first)
6766 fprintf (stream, ", ");
6768 asm_fprintf (stream, "%r", i);
6769 not_first = TRUE;
6772 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6775 /* Output a 'call' insn. */
6777 const char *
6778 output_call (operands)
6779 rtx * operands;
6781 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6783 if (REGNO (operands[0]) == LR_REGNUM)
6785 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6786 output_asm_insn ("mov%?\t%0, %|lr", operands);
6789 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6791 if (TARGET_INTERWORK)
6792 output_asm_insn ("bx%?\t%0", operands);
6793 else
6794 output_asm_insn ("mov%?\t%|pc, %0", operands);
6796 return "";
6799 static int
6800 eliminate_lr2ip (x)
6801 rtx * x;
6803 int something_changed = 0;
6804 rtx x0 = * x;
6805 int code = GET_CODE (x0);
6806 int i, j;
6807 const char * fmt;
6809 switch (code)
6811 case REG:
6812 if (REGNO (x0) == LR_REGNUM)
6814 *x = gen_rtx_REG (SImode, IP_REGNUM);
6815 return 1;
6817 return 0;
6818 default:
6819 /* Scan through the sub-elements and change any references there. */
6820 fmt = GET_RTX_FORMAT (code);
6822 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6823 if (fmt[i] == 'e')
6824 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6825 else if (fmt[i] == 'E')
6826 for (j = 0; j < XVECLEN (x0, i); j++)
6827 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6829 return something_changed;
6833 /* Output a 'call' insn that is a reference in memory. */
6835 const char *
6836 output_call_mem (operands)
6837 rtx * operands;
6839 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6840 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6841 if (eliminate_lr2ip (&operands[0]))
6842 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6844 if (TARGET_INTERWORK)
6846 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6847 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6848 output_asm_insn ("bx%?\t%|ip", operands);
6850 else
6852 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6853 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6856 return "";
6860 /* Output a move from arm registers to an fpu registers.
6861 OPERANDS[0] is an fpu register.
6862 OPERANDS[1] is the first registers of an arm register pair. */
6864 const char *
6865 output_mov_long_double_fpu_from_arm (operands)
6866 rtx * operands;
6868 int arm_reg0 = REGNO (operands[1]);
6869 rtx ops[3];
6871 if (arm_reg0 == IP_REGNUM)
6872 abort ();
6874 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6875 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6876 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6878 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6879 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6881 return "";
6884 /* Output a move from an fpu register to arm registers.
6885 OPERANDS[0] is the first registers of an arm register pair.
6886 OPERANDS[1] is an fpu register. */
6888 const char *
6889 output_mov_long_double_arm_from_fpu (operands)
6890 rtx * operands;
6892 int arm_reg0 = REGNO (operands[0]);
6893 rtx ops[3];
6895 if (arm_reg0 == IP_REGNUM)
6896 abort ();
6898 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6899 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6900 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6902 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6903 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6904 return "";
6907 /* Output a move from arm registers to arm registers of a long double
6908 OPERANDS[0] is the destination.
6909 OPERANDS[1] is the source. */
6911 const char *
6912 output_mov_long_double_arm_from_arm (operands)
6913 rtx * operands;
6915 /* We have to be careful here because the two might overlap. */
6916 int dest_start = REGNO (operands[0]);
6917 int src_start = REGNO (operands[1]);
6918 rtx ops[2];
6919 int i;
6921 if (dest_start < src_start)
6923 for (i = 0; i < 3; i++)
6925 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6926 ops[1] = gen_rtx_REG (SImode, src_start + i);
6927 output_asm_insn ("mov%?\t%0, %1", ops);
6930 else
6932 for (i = 2; i >= 0; i--)
6934 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6935 ops[1] = gen_rtx_REG (SImode, src_start + i);
6936 output_asm_insn ("mov%?\t%0, %1", ops);
6940 return "";
6944 /* Output a move from arm registers to an fpu registers.
6945 OPERANDS[0] is an fpu register.
6946 OPERANDS[1] is the first registers of an arm register pair. */
6948 const char *
6949 output_mov_double_fpu_from_arm (operands)
6950 rtx * operands;
6952 int arm_reg0 = REGNO (operands[1]);
6953 rtx ops[2];
6955 if (arm_reg0 == IP_REGNUM)
6956 abort ();
6958 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6959 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6960 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6961 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6962 return "";
6965 /* Output a move from an fpu register to arm registers.
6966 OPERANDS[0] is the first registers of an arm register pair.
6967 OPERANDS[1] is an fpu register. */
6969 const char *
6970 output_mov_double_arm_from_fpu (operands)
6971 rtx * operands;
6973 int arm_reg0 = REGNO (operands[0]);
6974 rtx ops[2];
6976 if (arm_reg0 == IP_REGNUM)
6977 abort ();
6979 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6980 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6981 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6982 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6983 return "";
6986 /* Output a move between double words.
6987 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6988 or MEM<-REG and all MEMs must be offsettable addresses. */
6990 const char *
6991 output_move_double (operands)
6992 rtx * operands;
6994 enum rtx_code code0 = GET_CODE (operands[0]);
6995 enum rtx_code code1 = GET_CODE (operands[1]);
6996 rtx otherops[3];
6998 if (code0 == REG)
7000 int reg0 = REGNO (operands[0]);
7002 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7004 if (code1 == REG)
7006 int reg1 = REGNO (operands[1]);
7007 if (reg1 == IP_REGNUM)
7008 abort ();
7010 /* Ensure the second source is not overwritten. */
7011 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7012 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7013 else
7014 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7016 else if (code1 == CONST_DOUBLE)
7018 if (GET_MODE (operands[1]) == DFmode)
7020 REAL_VALUE_TYPE r;
7021 long l[2];
7023 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7024 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7025 otherops[1] = GEN_INT (l[1]);
7026 operands[1] = GEN_INT (l[0]);
7028 else if (GET_MODE (operands[1]) != VOIDmode)
7029 abort ();
7030 else if (WORDS_BIG_ENDIAN)
7032 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7033 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7035 else
7037 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7038 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7041 output_mov_immediate (operands);
7042 output_mov_immediate (otherops);
7044 else if (code1 == CONST_INT)
7046 #if HOST_BITS_PER_WIDE_INT > 32
7047 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7048 what the upper word is. */
7049 if (WORDS_BIG_ENDIAN)
7051 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7052 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7054 else
7056 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7057 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7059 #else
7060 /* Sign extend the intval into the high-order word. */
7061 if (WORDS_BIG_ENDIAN)
7063 otherops[1] = operands[1];
7064 operands[1] = (INTVAL (operands[1]) < 0
7065 ? constm1_rtx : const0_rtx);
7067 else
7068 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7069 #endif
7070 output_mov_immediate (otherops);
7071 output_mov_immediate (operands);
7073 else if (code1 == MEM)
7075 switch (GET_CODE (XEXP (operands[1], 0)))
7077 case REG:
7078 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7079 break;
7081 case PRE_INC:
7082 abort (); /* Should never happen now. */
7083 break;
7085 case PRE_DEC:
7086 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7087 break;
7089 case POST_INC:
7090 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7091 break;
7093 case POST_DEC:
7094 abort (); /* Should never happen now. */
7095 break;
7097 case LABEL_REF:
7098 case CONST:
7099 output_asm_insn ("adr%?\t%0, %1", operands);
7100 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7101 break;
7103 default:
7104 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7105 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7107 otherops[0] = operands[0];
7108 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7109 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7111 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7113 if (GET_CODE (otherops[2]) == CONST_INT)
7115 switch ((int) INTVAL (otherops[2]))
7117 case -8:
7118 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7119 return "";
7120 case -4:
7121 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7122 return "";
7123 case 4:
7124 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7125 return "";
7128 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7129 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7130 else
7131 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7133 else
7134 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7136 else
7137 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7139 return "ldm%?ia\t%0, %M0";
7141 else
7143 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7144 /* Take care of overlapping base/data reg. */
7145 if (reg_mentioned_p (operands[0], operands[1]))
7147 output_asm_insn ("ldr%?\t%0, %1", otherops);
7148 output_asm_insn ("ldr%?\t%0, %1", operands);
7150 else
7152 output_asm_insn ("ldr%?\t%0, %1", operands);
7153 output_asm_insn ("ldr%?\t%0, %1", otherops);
7158 else
7159 abort (); /* Constraints should prevent this. */
7161 else if (code0 == MEM && code1 == REG)
7163 if (REGNO (operands[1]) == IP_REGNUM)
7164 abort ();
7166 switch (GET_CODE (XEXP (operands[0], 0)))
7168 case REG:
7169 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7170 break;
7172 case PRE_INC:
7173 abort (); /* Should never happen now. */
7174 break;
7176 case PRE_DEC:
7177 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7178 break;
7180 case POST_INC:
7181 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7182 break;
7184 case POST_DEC:
7185 abort (); /* Should never happen now. */
7186 break;
7188 case PLUS:
7189 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7191 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7193 case -8:
7194 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7195 return "";
7197 case -4:
7198 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7199 return "";
7201 case 4:
7202 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7203 return "";
7206 /* Fall through */
7208 default:
7209 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7210 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7211 output_asm_insn ("str%?\t%1, %0", operands);
7212 output_asm_insn ("str%?\t%1, %0", otherops);
7215 else
7216 /* Constraints should prevent this. */
7217 abort ();
7219 return "";
7223 /* Output an arbitrary MOV reg, #n.
7224 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7226 const char *
7227 output_mov_immediate (operands)
7228 rtx * operands;
7230 HOST_WIDE_INT n = INTVAL (operands[1]);
7232 /* Try to use one MOV. */
7233 if (const_ok_for_arm (n))
7234 output_asm_insn ("mov%?\t%0, %1", operands);
7236 /* Try to use one MVN. */
7237 else if (const_ok_for_arm (~n))
7239 operands[1] = GEN_INT (~n);
7240 output_asm_insn ("mvn%?\t%0, %1", operands);
7242 else
7244 int n_ones = 0;
7245 int i;
7247 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7248 for (i = 0; i < 32; i ++)
7249 if (n & 1 << i)
7250 n_ones ++;
7252 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7253 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7254 else
7255 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7258 return "";
7261 /* Output an ADD r, s, #n where n may be too big for one instruction.
7262 If adding zero to one register, output nothing. */
7264 const char *
7265 output_add_immediate (operands)
7266 rtx * operands;
7268 HOST_WIDE_INT n = INTVAL (operands[2]);
7270 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7272 if (n < 0)
7273 output_multi_immediate (operands,
7274 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7275 -n);
7276 else
7277 output_multi_immediate (operands,
7278 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7282 return "";
7285 /* Output a multiple immediate operation.
7286 OPERANDS is the vector of operands referred to in the output patterns.
7287 INSTR1 is the output pattern to use for the first constant.
7288 INSTR2 is the output pattern to use for subsequent constants.
7289 IMMED_OP is the index of the constant slot in OPERANDS.
7290 N is the constant value. */
7292 static const char *
7293 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7294 rtx * operands;
7295 const char * instr1;
7296 const char * instr2;
7297 int immed_op;
7298 HOST_WIDE_INT n;
7300 #if HOST_BITS_PER_WIDE_INT > 32
7301 n &= 0xffffffff;
7302 #endif
7304 if (n == 0)
7306 /* Quick and easy output. */
7307 operands[immed_op] = const0_rtx;
7308 output_asm_insn (instr1, operands);
7310 else
7312 int i;
7313 const char * instr = instr1;
7315 /* Note that n is never zero here (which would give no output). */
7316 for (i = 0; i < 32; i += 2)
7318 if (n & (3 << i))
7320 operands[immed_op] = GEN_INT (n & (255 << i));
7321 output_asm_insn (instr, operands);
7322 instr = instr2;
7323 i += 6;
7328 return "";
7331 /* Return the appropriate ARM instruction for the operation code.
7332 The returned result should not be overwritten. OP is the rtx of the
7333 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7334 was shifted. */
7336 const char *
7337 arithmetic_instr (op, shift_first_arg)
7338 rtx op;
7339 int shift_first_arg;
7341 switch (GET_CODE (op))
7343 case PLUS:
7344 return "add";
7346 case MINUS:
7347 return shift_first_arg ? "rsb" : "sub";
7349 case IOR:
7350 return "orr";
7352 case XOR:
7353 return "eor";
7355 case AND:
7356 return "and";
7358 default:
7359 abort ();
7363 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7364 for the operation code. The returned result should not be overwritten.
7365 OP is the rtx code of the shift.
7366 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7367 shift. */
7369 static const char *
7370 shift_op (op, amountp)
7371 rtx op;
7372 HOST_WIDE_INT *amountp;
7374 const char * mnem;
7375 enum rtx_code code = GET_CODE (op);
7377 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7378 *amountp = -1;
7379 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7380 *amountp = INTVAL (XEXP (op, 1));
7381 else
7382 abort ();
7384 switch (code)
7386 case ASHIFT:
7387 mnem = "asl";
7388 break;
7390 case ASHIFTRT:
7391 mnem = "asr";
7392 break;
7394 case LSHIFTRT:
7395 mnem = "lsr";
7396 break;
7398 case ROTATERT:
7399 mnem = "ror";
7400 break;
7402 case MULT:
7403 /* We never have to worry about the amount being other than a
7404 power of 2, since this case can never be reloaded from a reg. */
7405 if (*amountp != -1)
7406 *amountp = int_log2 (*amountp);
7407 else
7408 abort ();
7409 return "asl";
7411 default:
7412 abort ();
7415 if (*amountp != -1)
7417 /* This is not 100% correct, but follows from the desire to merge
7418 multiplication by a power of 2 with the recognizer for a
7419 shift. >=32 is not a valid shift for "asl", so we must try and
7420 output a shift that produces the correct arithmetical result.
7421 Using lsr #32 is identical except for the fact that the carry bit
7422 is not set correctly if we set the flags; but we never use the
7423 carry bit from such an operation, so we can ignore that. */
7424 if (code == ROTATERT)
7425 /* Rotate is just modulo 32. */
7426 *amountp &= 31;
7427 else if (*amountp != (*amountp & 31))
7429 if (code == ASHIFT)
7430 mnem = "lsr";
7431 *amountp = 32;
7434 /* Shifts of 0 are no-ops. */
7435 if (*amountp == 0)
7436 return NULL;
7439 return mnem;
7442 /* Obtain the shift from the POWER of two. */
7444 static HOST_WIDE_INT
7445 int_log2 (power)
7446 HOST_WIDE_INT power;
7448 HOST_WIDE_INT shift = 0;
7450 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7452 if (shift > 31)
7453 abort ();
7454 shift ++;
7457 return shift;
7460 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7461 /bin/as is horribly restrictive. */
7462 #define MAX_ASCII_LEN 51
7464 void
7465 output_ascii_pseudo_op (stream, p, len)
7466 FILE * stream;
7467 const unsigned char * p;
7468 int len;
7470 int i;
7471 int len_so_far = 0;
7473 fputs ("\t.ascii\t\"", stream);
7475 for (i = 0; i < len; i++)
7477 int c = p[i];
7479 if (len_so_far >= MAX_ASCII_LEN)
7481 fputs ("\"\n\t.ascii\t\"", stream);
7482 len_so_far = 0;
7485 switch (c)
7487 case TARGET_TAB:
7488 fputs ("\\t", stream);
7489 len_so_far += 2;
7490 break;
7492 case TARGET_FF:
7493 fputs ("\\f", stream);
7494 len_so_far += 2;
7495 break;
7497 case TARGET_BS:
7498 fputs ("\\b", stream);
7499 len_so_far += 2;
7500 break;
7502 case TARGET_CR:
7503 fputs ("\\r", stream);
7504 len_so_far += 2;
7505 break;
7507 case TARGET_NEWLINE:
7508 fputs ("\\n", stream);
7509 c = p [i + 1];
7510 if ((c >= ' ' && c <= '~')
7511 || c == TARGET_TAB)
7512 /* This is a good place for a line break. */
7513 len_so_far = MAX_ASCII_LEN;
7514 else
7515 len_so_far += 2;
7516 break;
7518 case '\"':
7519 case '\\':
7520 putc ('\\', stream);
7521 len_so_far++;
7522 /* drop through. */
7524 default:
7525 if (c >= ' ' && c <= '~')
7527 putc (c, stream);
7528 len_so_far++;
7530 else
7532 fprintf (stream, "\\%03o", c);
7533 len_so_far += 4;
7535 break;
7539 fputs ("\"\n", stream);
7542 /* Compute the register sabe mask for registers 0 through 12
7543 inclusive. This code is used by both arm_compute_save_reg_mask
7544 and arm_compute_initial_elimination_offset. */
7546 static unsigned long
7547 arm_compute_save_reg0_reg12_mask ()
7549 unsigned long func_type = arm_current_func_type ();
7550 unsigned int save_reg_mask = 0;
7551 unsigned int reg;
7553 if (IS_INTERRUPT (func_type))
7555 unsigned int max_reg;
7556 /* Interrupt functions must not corrupt any registers,
7557 even call clobbered ones. If this is a leaf function
7558 we can just examine the registers used by the RTL, but
7559 otherwise we have to assume that whatever function is
7560 called might clobber anything, and so we have to save
7561 all the call-clobbered registers as well. */
7562 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7563 /* FIQ handlers have registers r8 - r12 banked, so
7564 we only need to check r0 - r7, Normal ISRs only
7565 bank r14 and r15, so we must check up to r12.
7566 r13 is the stack pointer which is always preserved,
7567 so we do not need to consider it here. */
7568 max_reg = 7;
7569 else
7570 max_reg = 12;
7572 for (reg = 0; reg <= max_reg; reg++)
7573 if (regs_ever_live[reg]
7574 || (! current_function_is_leaf && call_used_regs [reg]))
7575 save_reg_mask |= (1 << reg);
7577 else
7579 /* In the normal case we only need to save those registers
7580 which are call saved and which are used by this function. */
7581 for (reg = 0; reg <= 10; reg++)
7582 if (regs_ever_live[reg] && ! call_used_regs [reg])
7583 save_reg_mask |= (1 << reg);
7585 /* Handle the frame pointer as a special case. */
7586 if (! TARGET_APCS_FRAME
7587 && ! frame_pointer_needed
7588 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7589 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7590 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7592 /* If we aren't loading the PIC register,
7593 don't stack it even though it may be live. */
7594 if (flag_pic
7595 && ! TARGET_SINGLE_PIC_BASE
7596 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7597 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7600 return save_reg_mask;
7603 /* Compute a bit mask of which registers need to be
7604 saved on the stack for the current function. */
7606 static unsigned long
7607 arm_compute_save_reg_mask ()
7609 unsigned int save_reg_mask = 0;
7610 unsigned long func_type = arm_current_func_type ();
7612 if (IS_NAKED (func_type))
7613 /* This should never really happen. */
7614 return 0;
7616 /* If we are creating a stack frame, then we must save the frame pointer,
7617 IP (which will hold the old stack pointer), LR and the PC. */
7618 if (frame_pointer_needed)
7619 save_reg_mask |=
7620 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7621 | (1 << IP_REGNUM)
7622 | (1 << LR_REGNUM)
7623 | (1 << PC_REGNUM);
7625 /* Volatile functions do not return, so there
7626 is no need to save any other registers. */
7627 if (IS_VOLATILE (func_type))
7628 return save_reg_mask;
7630 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7632 /* Decide if we need to save the link register.
7633 Interrupt routines have their own banked link register,
7634 so they never need to save it.
7635 Otherwise if we do not use the link register we do not need to save
7636 it. If we are pushing other registers onto the stack however, we
7637 can save an instruction in the epilogue by pushing the link register
7638 now and then popping it back into the PC. This incurs extra memory
7639 accesses though, so we only do it when optimising for size, and only
7640 if we know that we will not need a fancy return sequence. */
7641 if (regs_ever_live [LR_REGNUM]
7642 || (save_reg_mask
7643 && optimize_size
7644 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7645 save_reg_mask |= 1 << LR_REGNUM;
7647 if (cfun->machine->lr_save_eliminated)
7648 save_reg_mask &= ~ (1 << LR_REGNUM);
7650 return save_reg_mask;
7653 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7654 everything bar the final return instruction. */
7656 const char *
7657 output_return_instruction (operand, really_return, reverse)
7658 rtx operand;
7659 int really_return;
7660 int reverse;
7662 char conditional[10];
7663 char instr[100];
7664 int reg;
7665 unsigned long live_regs_mask;
7666 unsigned long func_type;
7668 func_type = arm_current_func_type ();
7670 if (IS_NAKED (func_type))
7671 return "";
7673 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7675 /* If this function was declared non-returning, and we have found a tail
7676 call, then we have to trust that the called function won't return. */
7677 if (really_return)
7679 rtx ops[2];
7681 /* Otherwise, trap an attempted return by aborting. */
7682 ops[0] = operand;
7683 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7684 : "abort");
7685 assemble_external_libcall (ops[1]);
7686 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7689 return "";
7692 if (current_function_calls_alloca && !really_return)
7693 abort ();
7695 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7697 return_used_this_function = 1;
7699 live_regs_mask = arm_compute_save_reg_mask ();
7701 if (live_regs_mask)
7703 const char * return_reg;
7705 /* If we do not have any special requirements for function exit
7706 (eg interworking, or ISR) then we can load the return address
7707 directly into the PC. Otherwise we must load it into LR. */
7708 if (really_return
7709 && ! TARGET_INTERWORK)
7710 return_reg = reg_names[PC_REGNUM];
7711 else
7712 return_reg = reg_names[LR_REGNUM];
7714 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7715 /* There are two possible reasons for the IP register being saved.
7716 Either a stack frame was created, in which case IP contains the
7717 old stack pointer, or an ISR routine corrupted it. If this in an
7718 ISR routine then just restore IP, otherwise restore IP into SP. */
7719 if (! IS_INTERRUPT (func_type))
7721 live_regs_mask &= ~ (1 << IP_REGNUM);
7722 live_regs_mask |= (1 << SP_REGNUM);
7725 /* On some ARM architectures it is faster to use LDR rather than
7726 LDM to load a single register. On other architectures, the
7727 cost is the same. In 26 bit mode, or for exception handlers,
7728 we have to use LDM to load the PC so that the CPSR is also
7729 restored. */
7730 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7732 if (live_regs_mask == (unsigned int)(1 << reg))
7733 break;
7735 if (reg <= LAST_ARM_REGNUM
7736 && (reg != LR_REGNUM
7737 || ! really_return
7738 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7740 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7741 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7743 else
7745 char *p;
7746 int first = 1;
7748 /* Generate the load multiple instruction to restore the registers. */
7749 if (frame_pointer_needed)
7750 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7751 else if (live_regs_mask & (1 << SP_REGNUM))
7752 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7753 else
7754 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7756 p = instr + strlen (instr);
7758 for (reg = 0; reg <= SP_REGNUM; reg++)
7759 if (live_regs_mask & (1 << reg))
7761 int l = strlen (reg_names[reg]);
7763 if (first)
7764 first = 0;
7765 else
7767 memcpy (p, ", ", 2);
7768 p += 2;
7771 memcpy (p, "%|", 2);
7772 memcpy (p + 2, reg_names[reg], l);
7773 p += l + 2;
7776 if (live_regs_mask & (1 << LR_REGNUM))
7778 int l = strlen (return_reg);
7780 if (! first)
7782 memcpy (p, ", ", 2);
7783 p += 2;
7786 memcpy (p, "%|", 2);
7787 memcpy (p + 2, return_reg, l);
7788 strcpy (p + 2 + l, ((TARGET_APCS_32
7789 && !IS_INTERRUPT (func_type))
7790 || !really_return)
7791 ? "}" : "}^");
7793 else
7794 strcpy (p, "}");
7797 output_asm_insn (instr, & operand);
7799 /* See if we need to generate an extra instruction to
7800 perform the actual function return. */
7801 if (really_return
7802 && func_type != ARM_FT_INTERWORKED
7803 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7805 /* The return has already been handled
7806 by loading the LR into the PC. */
7807 really_return = 0;
7811 if (really_return)
7813 switch ((int) ARM_FUNC_TYPE (func_type))
7815 case ARM_FT_ISR:
7816 case ARM_FT_FIQ:
7817 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7818 break;
7820 case ARM_FT_INTERWORKED:
7821 sprintf (instr, "bx%s\t%%|lr", conditional);
7822 break;
7824 case ARM_FT_EXCEPTION:
7825 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7826 break;
7828 default:
7829 /* ARMv5 implementations always provide BX, so interworking
7830 is the default unless APCS-26 is in use. */
7831 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7832 sprintf (instr, "bx%s\t%%|lr", conditional);
7833 else
7834 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7835 conditional, TARGET_APCS_32 ? "" : "s");
7836 break;
7839 output_asm_insn (instr, & operand);
7842 return "";
7845 /* Write the function name into the code section, directly preceding
7846 the function prologue.
7848 Code will be output similar to this:
7850 .ascii "arm_poke_function_name", 0
7851 .align
7853 .word 0xff000000 + (t1 - t0)
7854 arm_poke_function_name
7855 mov ip, sp
7856 stmfd sp!, {fp, ip, lr, pc}
7857 sub fp, ip, #4
7859 When performing a stack backtrace, code can inspect the value
7860 of 'pc' stored at 'fp' + 0. If the trace function then looks
7861 at location pc - 12 and the top 8 bits are set, then we know
7862 that there is a function name embedded immediately preceding this
7863 location and has length ((pc[-3]) & 0xff000000).
7865 We assume that pc is declared as a pointer to an unsigned long.
7867 It is of no benefit to output the function name if we are assembling
7868 a leaf function. These function types will not contain a stack
7869 backtrace structure, therefore it is not possible to determine the
7870 function name. */
7872 void
7873 arm_poke_function_name (stream, name)
7874 FILE * stream;
7875 const char * name;
7877 unsigned long alignlength;
7878 unsigned long length;
7879 rtx x;
7881 length = strlen (name) + 1;
7882 alignlength = ROUND_UP_WORD (length);
7884 ASM_OUTPUT_ASCII (stream, name, length);
7885 ASM_OUTPUT_ALIGN (stream, 2);
7886 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7887 assemble_aligned_integer (UNITS_PER_WORD, x);
7890 /* Place some comments into the assembler stream
7891 describing the current function. */
7893 static void
7894 arm_output_function_prologue (f, frame_size)
7895 FILE * f;
7896 HOST_WIDE_INT frame_size;
7898 unsigned long func_type;
7900 if (!TARGET_ARM)
7902 thumb_output_function_prologue (f, frame_size);
7903 return;
7906 /* Sanity check. */
7907 if (arm_ccfsm_state || arm_target_insn)
7908 abort ();
7910 func_type = arm_current_func_type ();
7912 switch ((int) ARM_FUNC_TYPE (func_type))
7914 default:
7915 case ARM_FT_NORMAL:
7916 break;
7917 case ARM_FT_INTERWORKED:
7918 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7919 break;
7920 case ARM_FT_EXCEPTION_HANDLER:
7921 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7922 break;
7923 case ARM_FT_ISR:
7924 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7925 break;
7926 case ARM_FT_FIQ:
7927 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7928 break;
7929 case ARM_FT_EXCEPTION:
7930 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7931 break;
7934 if (IS_NAKED (func_type))
7935 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7937 if (IS_VOLATILE (func_type))
7938 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7940 if (IS_NESTED (func_type))
7941 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7943 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7944 current_function_args_size,
7945 current_function_pretend_args_size, frame_size);
7947 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7948 frame_pointer_needed,
7949 cfun->machine->uses_anonymous_args);
7951 if (cfun->machine->lr_save_eliminated)
7952 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7954 #ifdef AOF_ASSEMBLER
7955 if (flag_pic)
7956 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7957 #endif
7959 return_used_this_function = 0;
7962 const char *
7963 arm_output_epilogue (really_return)
7964 int really_return;
7966 int reg;
7967 unsigned long saved_regs_mask;
7968 unsigned long func_type;
7969 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7970 frame that is $fp + 4 for a non-variadic function. */
7971 int floats_offset = 0;
7972 rtx operands[3];
7973 int frame_size = arm_get_frame_size ();
7974 FILE * f = asm_out_file;
7975 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7977 /* If we have already generated the return instruction
7978 then it is futile to generate anything else. */
7979 if (use_return_insn (FALSE) && return_used_this_function)
7980 return "";
7982 func_type = arm_current_func_type ();
7984 if (IS_NAKED (func_type))
7985 /* Naked functions don't have epilogues. */
7986 return "";
7988 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7990 rtx op;
7992 /* A volatile function should never return. Call abort. */
7993 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7994 assemble_external_libcall (op);
7995 output_asm_insn ("bl\t%a0", &op);
7997 return "";
8000 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8001 && ! really_return)
8002 /* If we are throwing an exception, then we really must
8003 be doing a return, so we can't tail-call. */
8004 abort ();
8006 saved_regs_mask = arm_compute_save_reg_mask ();
8008 /* XXX We should adjust floats_offset for any anonymous args, and then
8009 re-adjust vfp_offset below to compensate. */
8011 /* Compute how far away the floats will be. */
8012 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8013 if (saved_regs_mask & (1 << reg))
8014 floats_offset += 4;
8016 if (frame_pointer_needed)
8018 int vfp_offset = 4;
8020 if (arm_fpu_arch == FP_SOFT2)
8022 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8023 if (regs_ever_live[reg] && !call_used_regs[reg])
8025 floats_offset += 12;
8026 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8027 reg, FP_REGNUM, floats_offset - vfp_offset);
8030 else
8032 int start_reg = LAST_ARM_FP_REGNUM;
8034 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8036 if (regs_ever_live[reg] && !call_used_regs[reg])
8038 floats_offset += 12;
8040 /* We can't unstack more than four registers at once. */
8041 if (start_reg - reg == 3)
8043 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8044 reg, FP_REGNUM, floats_offset - vfp_offset);
8045 start_reg = reg - 1;
8048 else
8050 if (reg != start_reg)
8051 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8052 reg + 1, start_reg - reg,
8053 FP_REGNUM, floats_offset - vfp_offset);
8054 start_reg = reg - 1;
8058 /* Just in case the last register checked also needs unstacking. */
8059 if (reg != start_reg)
8060 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8061 reg + 1, start_reg - reg,
8062 FP_REGNUM, floats_offset - vfp_offset);
8065 /* saved_regs_mask should contain the IP, which at the time of stack
8066 frame generation actually contains the old stack pointer. So a
8067 quick way to unwind the stack is just pop the IP register directly
8068 into the stack pointer. */
8069 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8070 abort ();
8071 saved_regs_mask &= ~ (1 << IP_REGNUM);
8072 saved_regs_mask |= (1 << SP_REGNUM);
8074 /* There are two registers left in saved_regs_mask - LR and PC. We
8075 only need to restore the LR register (the return address), but to
8076 save time we can load it directly into the PC, unless we need a
8077 special function exit sequence, or we are not really returning. */
8078 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8079 /* Delete the LR from the register mask, so that the LR on
8080 the stack is loaded into the PC in the register mask. */
8081 saved_regs_mask &= ~ (1 << LR_REGNUM);
8082 else
8083 saved_regs_mask &= ~ (1 << PC_REGNUM);
8085 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8087 if (IS_INTERRUPT (func_type))
8088 /* Interrupt handlers will have pushed the
8089 IP onto the stack, so restore it now. */
8090 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
8092 else
8094 /* Restore stack pointer if necessary. */
8095 if (frame_size + current_function_outgoing_args_size != 0)
8097 operands[0] = operands[1] = stack_pointer_rtx;
8098 operands[2] = GEN_INT (frame_size
8099 + current_function_outgoing_args_size);
8100 output_add_immediate (operands);
8103 if (arm_fpu_arch == FP_SOFT2)
8105 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8106 if (regs_ever_live[reg] && !call_used_regs[reg])
8107 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8108 reg, SP_REGNUM);
8110 else
8112 int start_reg = FIRST_ARM_FP_REGNUM;
8114 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8116 if (regs_ever_live[reg] && !call_used_regs[reg])
8118 if (reg - start_reg == 3)
8120 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8121 start_reg, SP_REGNUM);
8122 start_reg = reg + 1;
8125 else
8127 if (reg != start_reg)
8128 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8129 start_reg, reg - start_reg,
8130 SP_REGNUM);
8132 start_reg = reg + 1;
8136 /* Just in case the last register checked also needs unstacking. */
8137 if (reg != start_reg)
8138 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8139 start_reg, reg - start_reg, SP_REGNUM);
8142 /* If we can, restore the LR into the PC. */
8143 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8144 && really_return
8145 && current_function_pretend_args_size == 0
8146 && saved_regs_mask & (1 << LR_REGNUM))
8148 saved_regs_mask &= ~ (1 << LR_REGNUM);
8149 saved_regs_mask |= (1 << PC_REGNUM);
8152 /* Load the registers off the stack. If we only have one register
8153 to load use the LDR instruction - it is faster. */
8154 if (saved_regs_mask == (1 << LR_REGNUM))
8156 /* The exception handler ignores the LR, so we do
8157 not really need to load it off the stack. */
8158 if (eh_ofs)
8159 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8160 else
8161 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8163 else if (saved_regs_mask)
8165 if (saved_regs_mask & (1 << SP_REGNUM))
8166 /* Note - write back to the stack register is not enabled
8167 (ie "ldmfd sp!..."). We know that the stack pointer is
8168 in the list of registers and if we add writeback the
8169 instruction becomes UNPREDICTABLE. */
8170 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8171 else
8172 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8175 if (current_function_pretend_args_size)
8177 /* Unwind the pre-pushed regs. */
8178 operands[0] = operands[1] = stack_pointer_rtx;
8179 operands[2] = GEN_INT (current_function_pretend_args_size);
8180 output_add_immediate (operands);
8184 #if 0
8185 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8186 /* Adjust the stack to remove the exception handler stuff. */
8187 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8188 REGNO (eh_ofs));
8189 #endif
8191 if (! really_return
8192 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8193 && current_function_pretend_args_size == 0
8194 && saved_regs_mask & (1 << PC_REGNUM)))
8195 return "";
8197 /* Generate the return instruction. */
8198 switch ((int) ARM_FUNC_TYPE (func_type))
8200 case ARM_FT_EXCEPTION_HANDLER:
8201 /* Even in 26-bit mode we do a mov (rather than a movs)
8202 because we don't have the PSR bits set in the address. */
8203 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8204 break;
8206 case ARM_FT_ISR:
8207 case ARM_FT_FIQ:
8208 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8209 break;
8211 case ARM_FT_EXCEPTION:
8212 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8213 break;
8215 case ARM_FT_INTERWORKED:
8216 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8217 break;
8219 default:
8220 if (frame_pointer_needed)
8221 /* If we used the frame pointer then the return adddress
8222 will have been loaded off the stack directly into the
8223 PC, so there is no need to issue a MOV instruction
8224 here. */
8226 else if (current_function_pretend_args_size == 0
8227 && (saved_regs_mask & (1 << LR_REGNUM)))
8228 /* Similarly we may have been able to load LR into the PC
8229 even if we did not create a stack frame. */
8231 else if (TARGET_APCS_32)
8232 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8233 else
8234 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8235 break;
8238 return "";
8241 static void
8242 arm_output_function_epilogue (file, frame_size)
8243 FILE *file ATTRIBUTE_UNUSED;
8244 HOST_WIDE_INT frame_size;
8246 if (TARGET_THUMB)
8248 /* ??? Probably not safe to set this here, since it assumes that a
8249 function will be emitted as assembly immediately after we generate
8250 RTL for it. This does not happen for inline functions. */
8251 return_used_this_function = 0;
8253 else
8255 /* We need to take into account any stack-frame rounding. */
8256 frame_size = arm_get_frame_size ();
8258 if (use_return_insn (FALSE)
8259 && return_used_this_function
8260 && (frame_size + current_function_outgoing_args_size) != 0
8261 && !frame_pointer_needed)
8262 abort ();
8264 /* Reset the ARM-specific per-function variables. */
8265 after_arm_reorg = 0;
8269 /* Generate and emit an insn that we will recognize as a push_multi.
8270 Unfortunately, since this insn does not reflect very well the actual
8271 semantics of the operation, we need to annotate the insn for the benefit
8272 of DWARF2 frame unwind information. */
8274 static rtx
8275 emit_multi_reg_push (mask)
8276 int mask;
8278 int num_regs = 0;
8279 int num_dwarf_regs;
8280 int i, j;
8281 rtx par;
8282 rtx dwarf;
8283 int dwarf_par_index;
8284 rtx tmp, reg;
8286 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8287 if (mask & (1 << i))
8288 num_regs++;
8290 if (num_regs == 0 || num_regs > 16)
8291 abort ();
8293 /* We don't record the PC in the dwarf frame information. */
8294 num_dwarf_regs = num_regs;
8295 if (mask & (1 << PC_REGNUM))
8296 num_dwarf_regs--;
8298 /* For the body of the insn we are going to generate an UNSPEC in
8299 parallel with several USEs. This allows the insn to be recognized
8300 by the push_multi pattern in the arm.md file. The insn looks
8301 something like this:
8303 (parallel [
8304 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8305 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8306 (use (reg:SI 11 fp))
8307 (use (reg:SI 12 ip))
8308 (use (reg:SI 14 lr))
8309 (use (reg:SI 15 pc))
8312 For the frame note however, we try to be more explicit and actually
8313 show each register being stored into the stack frame, plus a (single)
8314 decrement of the stack pointer. We do it this way in order to be
8315 friendly to the stack unwinding code, which only wants to see a single
8316 stack decrement per instruction. The RTL we generate for the note looks
8317 something like this:
8319 (sequence [
8320 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8321 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8322 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8323 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8324 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8327 This sequence is used both by the code to support stack unwinding for
8328 exceptions handlers and the code to generate dwarf2 frame debugging. */
8330 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8331 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8332 dwarf_par_index = 1;
8334 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8336 if (mask & (1 << i))
8338 reg = gen_rtx_REG (SImode, i);
8340 XVECEXP (par, 0, 0)
8341 = gen_rtx_SET (VOIDmode,
8342 gen_rtx_MEM (BLKmode,
8343 gen_rtx_PRE_DEC (BLKmode,
8344 stack_pointer_rtx)),
8345 gen_rtx_UNSPEC (BLKmode,
8346 gen_rtvec (1, reg),
8347 UNSPEC_PUSH_MULT));
8349 if (i != PC_REGNUM)
8351 tmp = gen_rtx_SET (VOIDmode,
8352 gen_rtx_MEM (SImode, stack_pointer_rtx),
8353 reg);
8354 RTX_FRAME_RELATED_P (tmp) = 1;
8355 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8356 dwarf_par_index++;
8359 break;
8363 for (j = 1, i++; j < num_regs; i++)
8365 if (mask & (1 << i))
8367 reg = gen_rtx_REG (SImode, i);
8369 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8371 if (i != PC_REGNUM)
8373 tmp = gen_rtx_SET (VOIDmode,
8374 gen_rtx_MEM (SImode,
8375 plus_constant (stack_pointer_rtx,
8376 4 * j)),
8377 reg);
8378 RTX_FRAME_RELATED_P (tmp) = 1;
8379 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8382 j++;
8386 par = emit_insn (par);
8388 tmp = gen_rtx_SET (SImode,
8389 stack_pointer_rtx,
8390 gen_rtx_PLUS (SImode,
8391 stack_pointer_rtx,
8392 GEN_INT (-4 * num_regs)));
8393 RTX_FRAME_RELATED_P (tmp) = 1;
8394 XVECEXP (dwarf, 0, 0) = tmp;
8396 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8397 REG_NOTES (par));
8398 return par;
8401 static rtx
8402 emit_sfm (base_reg, count)
8403 int base_reg;
8404 int count;
8406 rtx par;
8407 rtx dwarf;
8408 rtx tmp, reg;
8409 int i;
8411 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8412 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8414 reg = gen_rtx_REG (XFmode, base_reg++);
8416 XVECEXP (par, 0, 0)
8417 = gen_rtx_SET (VOIDmode,
8418 gen_rtx_MEM (BLKmode,
8419 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8420 gen_rtx_UNSPEC (BLKmode,
8421 gen_rtvec (1, reg),
8422 UNSPEC_PUSH_MULT));
8424 = gen_rtx_SET (VOIDmode,
8425 gen_rtx_MEM (XFmode,
8426 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8427 reg);
8428 RTX_FRAME_RELATED_P (tmp) = 1;
8429 XVECEXP (dwarf, 0, count - 1) = tmp;
8431 for (i = 1; i < count; i++)
8433 reg = gen_rtx_REG (XFmode, base_reg++);
8434 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8436 tmp = gen_rtx_SET (VOIDmode,
8437 gen_rtx_MEM (XFmode,
8438 gen_rtx_PRE_DEC (BLKmode,
8439 stack_pointer_rtx)),
8440 reg);
8441 RTX_FRAME_RELATED_P (tmp) = 1;
8442 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8445 par = emit_insn (par);
8446 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8447 REG_NOTES (par));
8448 return par;
8451 /* Compute the distance from register FROM to register TO.
8452 These can be the arg pointer (26), the soft frame pointer (25),
8453 the stack pointer (13) or the hard frame pointer (11).
8454 Typical stack layout looks like this:
8456 old stack pointer -> | |
8457 ----
8458 | | \
8459 | | saved arguments for
8460 | | vararg functions
8461 | | /
8463 hard FP & arg pointer -> | | \
8464 | | stack
8465 | | frame
8466 | | /
8468 | | \
8469 | | call saved
8470 | | registers
8471 soft frame pointer -> | | /
8473 | | \
8474 | | local
8475 | | variables
8476 | | /
8478 | | \
8479 | | outgoing
8480 | | arguments
8481 current stack pointer -> | | /
8484 For a given function some or all of these stack components
8485 may not be needed, giving rise to the possibility of
8486 eliminating some of the registers.
8488 The values returned by this function must reflect the behavior
8489 of arm_expand_prologue() and arm_compute_save_reg_mask().
8491 The sign of the number returned reflects the direction of stack
8492 growth, so the values are positive for all eliminations except
8493 from the soft frame pointer to the hard frame pointer. */
8495 unsigned int
8496 arm_compute_initial_elimination_offset (from, to)
8497 unsigned int from;
8498 unsigned int to;
8500 unsigned int local_vars = arm_get_frame_size ();
8501 unsigned int outgoing_args = current_function_outgoing_args_size;
8502 unsigned int stack_frame;
8503 unsigned int call_saved_registers;
8504 unsigned long func_type;
8506 func_type = arm_current_func_type ();
8508 /* Volatile functions never return, so there is
8509 no need to save call saved registers. */
8510 call_saved_registers = 0;
8511 if (! IS_VOLATILE (func_type))
8513 unsigned int reg_mask;
8514 unsigned int reg;
8516 /* Make sure that we compute which registers will be saved
8517 on the stack using the same algorithm that is used by
8518 arm_compute_save_reg_mask(). */
8519 reg_mask = arm_compute_save_reg0_reg12_mask ();
8521 /* Now count the number of bits set in save_reg_mask.
8522 For each set bit we need 4 bytes of stack space. */
8523 while (reg_mask)
8525 call_saved_registers += 4;
8526 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8529 if (regs_ever_live[LR_REGNUM]
8530 /* If a stack frame is going to be created, the LR will
8531 be saved as part of that, so we do not need to allow
8532 for it here. */
8533 && ! frame_pointer_needed)
8534 call_saved_registers += 4;
8536 /* If the hard floating point registers are going to be
8537 used then they must be saved on the stack as well.
8538 Each register occupies 12 bytes of stack space. */
8539 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8540 if (regs_ever_live[reg] && ! call_used_regs[reg])
8541 call_saved_registers += 12;
8544 /* The stack frame contains 4 registers - the old frame pointer,
8545 the old stack pointer, the return address and PC of the start
8546 of the function. */
8547 stack_frame = frame_pointer_needed ? 16 : 0;
8549 /* OK, now we have enough information to compute the distances.
8550 There must be an entry in these switch tables for each pair
8551 of registers in ELIMINABLE_REGS, even if some of the entries
8552 seem to be redundant or useless. */
8553 switch (from)
8555 case ARG_POINTER_REGNUM:
8556 switch (to)
8558 case THUMB_HARD_FRAME_POINTER_REGNUM:
8559 return 0;
8561 case FRAME_POINTER_REGNUM:
8562 /* This is the reverse of the soft frame pointer
8563 to hard frame pointer elimination below. */
8564 if (call_saved_registers == 0 && stack_frame == 0)
8565 return 0;
8566 return (call_saved_registers + stack_frame - 4);
8568 case ARM_HARD_FRAME_POINTER_REGNUM:
8569 /* If there is no stack frame then the hard
8570 frame pointer and the arg pointer coincide. */
8571 if (stack_frame == 0 && call_saved_registers != 0)
8572 return 0;
8573 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8574 return (frame_pointer_needed
8575 && current_function_needs_context
8576 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8578 case STACK_POINTER_REGNUM:
8579 /* If nothing has been pushed on the stack at all
8580 then this will return -4. This *is* correct! */
8581 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8583 default:
8584 abort ();
8586 break;
8588 case FRAME_POINTER_REGNUM:
8589 switch (to)
8591 case THUMB_HARD_FRAME_POINTER_REGNUM:
8592 return 0;
8594 case ARM_HARD_FRAME_POINTER_REGNUM:
8595 /* The hard frame pointer points to the top entry in the
8596 stack frame. The soft frame pointer to the bottom entry
8597 in the stack frame. If there is no stack frame at all,
8598 then they are identical. */
8599 if (call_saved_registers == 0 && stack_frame == 0)
8600 return 0;
8601 return - (call_saved_registers + stack_frame - 4);
8603 case STACK_POINTER_REGNUM:
8604 return local_vars + outgoing_args;
8606 default:
8607 abort ();
8609 break;
8611 default:
8612 /* You cannot eliminate from the stack pointer.
8613 In theory you could eliminate from the hard frame
8614 pointer to the stack pointer, but this will never
8615 happen, since if a stack frame is not needed the
8616 hard frame pointer will never be used. */
8617 abort ();
8621 /* Calculate the size of the stack frame, taking into account any
8622 padding that is required to ensure stack-alignment. */
8624 HOST_WIDE_INT
8625 arm_get_frame_size ()
8627 int regno;
8629 int base_size = ROUND_UP_WORD (get_frame_size ());
8630 int entry_size = 0;
8631 unsigned long func_type = arm_current_func_type ();
8632 int leaf;
8634 if (! TARGET_ARM)
8635 abort();
8637 if (! TARGET_ATPCS)
8638 return base_size;
8640 /* We need to know if we are a leaf function. Unfortunately, it
8641 is possible to be called after start_sequence has been called,
8642 which causes get_insns to return the insns for the sequence,
8643 not the function, which will cause leaf_function_p to return
8644 the incorrect result.
8646 To work around this, we cache the computed frame size. This
8647 works because we will only be calling RTL expanders that need
8648 to know about leaf functions once reload has completed, and the
8649 frame size cannot be changed after that time, so we can safely
8650 use the cached value. */
8652 if (reload_completed)
8653 return cfun->machine->frame_size;
8655 leaf = leaf_function_p ();
8657 /* A leaf function does not need any stack alignment if it has nothing
8658 on the stack. */
8659 if (leaf && base_size == 0)
8661 cfun->machine->frame_size = 0;
8662 return 0;
8665 /* We know that SP will be word aligned on entry, and we must
8666 preserve that condition at any subroutine call. But those are
8667 the only constraints. */
8669 /* Space for variadic functions. */
8670 if (current_function_pretend_args_size)
8671 entry_size += current_function_pretend_args_size;
8673 /* Space for saved registers. */
8674 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8676 /* Space for saved FPA registers. */
8677 if (! IS_VOLATILE (func_type))
8679 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8680 if (regs_ever_live[regno] && ! call_used_regs[regno])
8681 entry_size += 12;
8684 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8685 base_size += 4;
8686 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8687 abort ();
8689 cfun->machine->frame_size = base_size;
8691 return base_size;
8694 /* Generate the prologue instructions for entry into an ARM function. */
8696 void
8697 arm_expand_prologue ()
8699 int reg;
8700 rtx amount;
8701 rtx insn;
8702 rtx ip_rtx;
8703 unsigned long live_regs_mask;
8704 unsigned long func_type;
8705 int fp_offset = 0;
8706 int saved_pretend_args = 0;
8707 unsigned int args_to_push;
8709 func_type = arm_current_func_type ();
8711 /* Naked functions don't have prologues. */
8712 if (IS_NAKED (func_type))
8713 return;
8715 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8716 args_to_push = current_function_pretend_args_size;
8718 /* Compute which register we will have to save onto the stack. */
8719 live_regs_mask = arm_compute_save_reg_mask ();
8721 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8723 if (frame_pointer_needed)
8725 if (IS_INTERRUPT (func_type))
8727 /* Interrupt functions must not corrupt any registers.
8728 Creating a frame pointer however, corrupts the IP
8729 register, so we must push it first. */
8730 insn = emit_multi_reg_push (1 << IP_REGNUM);
8732 /* Do not set RTX_FRAME_RELATED_P on this insn.
8733 The dwarf stack unwinding code only wants to see one
8734 stack decrement per function, and this is not it. If
8735 this instruction is labeled as being part of the frame
8736 creation sequence then dwarf2out_frame_debug_expr will
8737 abort when it encounters the assignment of IP to FP
8738 later on, since the use of SP here establishes SP as
8739 the CFA register and not IP.
8741 Anyway this instruction is not really part of the stack
8742 frame creation although it is part of the prologue. */
8744 else if (IS_NESTED (func_type))
8746 /* The Static chain register is the same as the IP register
8747 used as a scratch register during stack frame creation.
8748 To get around this need to find somewhere to store IP
8749 whilst the frame is being created. We try the following
8750 places in order:
8752 1. The last argument register.
8753 2. A slot on the stack above the frame. (This only
8754 works if the function is not a varargs function).
8755 3. Register r3, after pushing the argument registers
8756 onto the stack.
8758 Note - we only need to tell the dwarf2 backend about the SP
8759 adjustment in the second variant; the static chain register
8760 doesn't need to be unwound, as it doesn't contain a value
8761 inherited from the caller. */
8763 if (regs_ever_live[3] == 0)
8765 insn = gen_rtx_REG (SImode, 3);
8766 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8767 insn = emit_insn (insn);
8769 else if (args_to_push == 0)
8771 rtx dwarf;
8772 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8773 insn = gen_rtx_MEM (SImode, insn);
8774 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8775 insn = emit_insn (insn);
8777 fp_offset = 4;
8779 /* Just tell the dwarf backend that we adjusted SP. */
8780 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8781 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8782 GEN_INT (-fp_offset)));
8783 RTX_FRAME_RELATED_P (insn) = 1;
8784 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8785 dwarf, REG_NOTES (insn));
8787 else
8789 /* Store the args on the stack. */
8790 if (cfun->machine->uses_anonymous_args)
8791 insn = emit_multi_reg_push
8792 ((0xf0 >> (args_to_push / 4)) & 0xf);
8793 else
8794 insn = emit_insn
8795 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8796 GEN_INT (- args_to_push)));
8798 RTX_FRAME_RELATED_P (insn) = 1;
8800 saved_pretend_args = 1;
8801 fp_offset = args_to_push;
8802 args_to_push = 0;
8804 /* Now reuse r3 to preserve IP. */
8805 insn = gen_rtx_REG (SImode, 3);
8806 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8807 (void) emit_insn (insn);
8811 if (fp_offset)
8813 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8814 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8816 else
8817 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8819 insn = emit_insn (insn);
8820 RTX_FRAME_RELATED_P (insn) = 1;
8823 if (args_to_push)
8825 /* Push the argument registers, or reserve space for them. */
8826 if (cfun->machine->uses_anonymous_args)
8827 insn = emit_multi_reg_push
8828 ((0xf0 >> (args_to_push / 4)) & 0xf);
8829 else
8830 insn = emit_insn
8831 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8832 GEN_INT (- args_to_push)));
8833 RTX_FRAME_RELATED_P (insn) = 1;
8836 /* If this is an interrupt service routine, and the link register
8837 is going to be pushed, and we are not creating a stack frame,
8838 (which would involve an extra push of IP and a pop in the epilogue)
8839 subtracting four from LR now will mean that the function return
8840 can be done with a single instruction. */
8841 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8842 && (live_regs_mask & (1 << LR_REGNUM)) != 0
8843 && ! frame_pointer_needed)
8844 emit_insn (gen_rtx_SET (SImode,
8845 gen_rtx_REG (SImode, LR_REGNUM),
8846 gen_rtx_PLUS (SImode,
8847 gen_rtx_REG (SImode, LR_REGNUM),
8848 GEN_INT (-4))));
8850 if (live_regs_mask)
8852 insn = emit_multi_reg_push (live_regs_mask);
8853 RTX_FRAME_RELATED_P (insn) = 1;
8856 if (! IS_VOLATILE (func_type))
8858 /* Save any floating point call-saved registers used by this function. */
8859 if (arm_fpu_arch == FP_SOFT2)
8861 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8862 if (regs_ever_live[reg] && !call_used_regs[reg])
8864 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8865 insn = gen_rtx_MEM (XFmode, insn);
8866 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8867 gen_rtx_REG (XFmode, reg)));
8868 RTX_FRAME_RELATED_P (insn) = 1;
8871 else
8873 int start_reg = LAST_ARM_FP_REGNUM;
8875 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8877 if (regs_ever_live[reg] && !call_used_regs[reg])
8879 if (start_reg - reg == 3)
8881 insn = emit_sfm (reg, 4);
8882 RTX_FRAME_RELATED_P (insn) = 1;
8883 start_reg = reg - 1;
8886 else
8888 if (start_reg != reg)
8890 insn = emit_sfm (reg + 1, start_reg - reg);
8891 RTX_FRAME_RELATED_P (insn) = 1;
8893 start_reg = reg - 1;
8897 if (start_reg != reg)
8899 insn = emit_sfm (reg + 1, start_reg - reg);
8900 RTX_FRAME_RELATED_P (insn) = 1;
8905 if (frame_pointer_needed)
8907 /* Create the new frame pointer. */
8908 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8909 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8910 RTX_FRAME_RELATED_P (insn) = 1;
8912 if (IS_NESTED (func_type))
8914 /* Recover the static chain register. */
8915 if (regs_ever_live [3] == 0
8916 || saved_pretend_args)
8917 insn = gen_rtx_REG (SImode, 3);
8918 else /* if (current_function_pretend_args_size == 0) */
8920 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8921 insn = gen_rtx_MEM (SImode, insn);
8924 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8925 /* Add a USE to stop propagate_one_insn() from barfing. */
8926 emit_insn (gen_prologue_use (ip_rtx));
8930 amount = GEN_INT (-(arm_get_frame_size ()
8931 + current_function_outgoing_args_size));
8933 if (amount != const0_rtx)
8935 /* This add can produce multiple insns for a large constant, so we
8936 need to get tricky. */
8937 rtx last = get_last_insn ();
8938 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8939 amount));
8942 last = last ? NEXT_INSN (last) : get_insns ();
8943 RTX_FRAME_RELATED_P (last) = 1;
8945 while (last != insn);
8947 /* If the frame pointer is needed, emit a special barrier that
8948 will prevent the scheduler from moving stores to the frame
8949 before the stack adjustment. */
8950 if (frame_pointer_needed)
8951 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
8952 hard_frame_pointer_rtx));
8955 /* If we are profiling, make sure no instructions are scheduled before
8956 the call to mcount. Similarly if the user has requested no
8957 scheduling in the prolog. */
8958 if (current_function_profile || TARGET_NO_SCHED_PRO)
8959 emit_insn (gen_blockage ());
8961 /* If the link register is being kept alive, with the return address in it,
8962 then make sure that it does not get reused by the ce2 pass. */
8963 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8965 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8966 cfun->machine->lr_save_eliminated = 1;
8970 /* If CODE is 'd', then the X is a condition operand and the instruction
8971 should only be executed if the condition is true.
8972 if CODE is 'D', then the X is a condition operand and the instruction
8973 should only be executed if the condition is false: however, if the mode
8974 of the comparison is CCFPEmode, then always execute the instruction -- we
8975 do this because in these circumstances !GE does not necessarily imply LT;
8976 in these cases the instruction pattern will take care to make sure that
8977 an instruction containing %d will follow, thereby undoing the effects of
8978 doing this instruction unconditionally.
8979 If CODE is 'N' then X is a floating point operand that must be negated
8980 before output.
8981 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8982 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8984 void
8985 arm_print_operand (stream, x, code)
8986 FILE * stream;
8987 rtx x;
8988 int code;
8990 switch (code)
8992 case '@':
8993 fputs (ASM_COMMENT_START, stream);
8994 return;
8996 case '_':
8997 fputs (user_label_prefix, stream);
8998 return;
9000 case '|':
9001 fputs (REGISTER_PREFIX, stream);
9002 return;
9004 case '?':
9005 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9007 if (TARGET_THUMB || current_insn_predicate != NULL)
9008 abort ();
9010 fputs (arm_condition_codes[arm_current_cc], stream);
9012 else if (current_insn_predicate)
9014 enum arm_cond_code code;
9016 if (TARGET_THUMB)
9017 abort ();
9019 code = get_arm_condition_code (current_insn_predicate);
9020 fputs (arm_condition_codes[code], stream);
9022 return;
9024 case 'N':
9026 REAL_VALUE_TYPE r;
9027 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9028 r = REAL_VALUE_NEGATE (r);
9029 fprintf (stream, "%s", fp_const_from_val (&r));
9031 return;
9033 case 'B':
9034 if (GET_CODE (x) == CONST_INT)
9036 HOST_WIDE_INT val;
9037 val = ARM_SIGN_EXTEND (~INTVAL (x));
9038 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9040 else
9042 putc ('~', stream);
9043 output_addr_const (stream, x);
9045 return;
9047 case 'i':
9048 fprintf (stream, "%s", arithmetic_instr (x, 1));
9049 return;
9051 case 'I':
9052 fprintf (stream, "%s", arithmetic_instr (x, 0));
9053 return;
9055 case 'S':
9057 HOST_WIDE_INT val;
9058 const char * shift = shift_op (x, &val);
9060 if (shift)
9062 fprintf (stream, ", %s ", shift_op (x, &val));
9063 if (val == -1)
9064 arm_print_operand (stream, XEXP (x, 1), 0);
9065 else
9067 fputc ('#', stream);
9068 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9072 return;
9074 /* An explanation of the 'Q', 'R' and 'H' register operands:
9076 In a pair of registers containing a DI or DF value the 'Q'
9077 operand returns the register number of the register containing
9078 the least signficant part of the value. The 'R' operand returns
9079 the register number of the register containing the most
9080 significant part of the value.
9082 The 'H' operand returns the higher of the two register numbers.
9083 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9084 same as the 'Q' operand, since the most signficant part of the
9085 value is held in the lower number register. The reverse is true
9086 on systems where WORDS_BIG_ENDIAN is false.
9088 The purpose of these operands is to distinguish between cases
9089 where the endian-ness of the values is important (for example
9090 when they are added together), and cases where the endian-ness
9091 is irrelevant, but the order of register operations is important.
9092 For example when loading a value from memory into a register
9093 pair, the endian-ness does not matter. Provided that the value
9094 from the lower memory address is put into the lower numbered
9095 register, and the value from the higher address is put into the
9096 higher numbered register, the load will work regardless of whether
9097 the value being loaded is big-wordian or little-wordian. The
9098 order of the two register loads can matter however, if the address
9099 of the memory location is actually held in one of the registers
9100 being overwritten by the load. */
9101 case 'Q':
9102 if (REGNO (x) > LAST_ARM_REGNUM)
9103 abort ();
9104 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9105 return;
9107 case 'R':
9108 if (REGNO (x) > LAST_ARM_REGNUM)
9109 abort ();
9110 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9111 return;
9113 case 'H':
9114 if (REGNO (x) > LAST_ARM_REGNUM)
9115 abort ();
9116 asm_fprintf (stream, "%r", REGNO (x) + 1);
9117 return;
9119 case 'm':
9120 asm_fprintf (stream, "%r",
9121 GET_CODE (XEXP (x, 0)) == REG
9122 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9123 return;
9125 case 'M':
9126 asm_fprintf (stream, "{%r-%r}",
9127 REGNO (x),
9128 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9129 return;
9131 case 'd':
9132 /* CONST_TRUE_RTX means always -- that's the default. */
9133 if (x == const_true_rtx)
9134 return;
9136 if (TARGET_ARM)
9137 fputs (arm_condition_codes[get_arm_condition_code (x)],
9138 stream);
9139 else
9140 fputs (thumb_condition_code (x, 0), stream);
9141 return;
9143 case 'D':
9144 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9145 want to do that. */
9146 if (x == const_true_rtx)
9147 abort ();
9149 if (TARGET_ARM)
9150 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9151 (get_arm_condition_code (x))],
9152 stream);
9153 else
9154 fputs (thumb_condition_code (x, 1), stream);
9155 return;
9157 default:
9158 if (x == 0)
9159 abort ();
9161 if (GET_CODE (x) == REG)
9162 asm_fprintf (stream, "%r", REGNO (x));
9163 else if (GET_CODE (x) == MEM)
9165 output_memory_reference_mode = GET_MODE (x);
9166 output_address (XEXP (x, 0));
9168 else if (GET_CODE (x) == CONST_DOUBLE)
9169 fprintf (stream, "#%s", fp_immediate_constant (x));
9170 else if (GET_CODE (x) == NEG)
9171 abort (); /* This should never happen now. */
9172 else
9174 fputc ('#', stream);
9175 output_addr_const (stream, x);
9180 #ifndef AOF_ASSEMBLER
9181 /* Target hook for assembling integer objects. The ARM version needs to
9182 handle word-sized values specially. */
9184 static bool
9185 arm_assemble_integer (x, size, aligned_p)
9186 rtx x;
9187 unsigned int size;
9188 int aligned_p;
9190 if (size == UNITS_PER_WORD && aligned_p)
9192 fputs ("\t.word\t", asm_out_file);
9193 output_addr_const (asm_out_file, x);
9195 /* Mark symbols as position independent. We only do this in the
9196 .text segment, not in the .data segment. */
9197 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9198 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9200 if (GET_CODE (x) == SYMBOL_REF
9201 && (CONSTANT_POOL_ADDRESS_P (x)
9202 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9203 fputs ("(GOTOFF)", asm_out_file);
9204 else if (GET_CODE (x) == LABEL_REF)
9205 fputs ("(GOTOFF)", asm_out_file);
9206 else
9207 fputs ("(GOT)", asm_out_file);
9209 fputc ('\n', asm_out_file);
9210 return true;
9213 return default_assemble_integer (x, size, aligned_p);
9215 #endif
9217 /* A finite state machine takes care of noticing whether or not instructions
9218 can be conditionally executed, and thus decrease execution time and code
9219 size by deleting branch instructions. The fsm is controlled by
9220 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9222 /* The state of the fsm controlling condition codes are:
9223 0: normal, do nothing special
9224 1: make ASM_OUTPUT_OPCODE not output this instruction
9225 2: make ASM_OUTPUT_OPCODE not output this instruction
9226 3: make instructions conditional
9227 4: make instructions conditional
9229 State transitions (state->state by whom under condition):
9230 0 -> 1 final_prescan_insn if the `target' is a label
9231 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9232 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9233 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9234 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9235 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9236 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9237 (the target insn is arm_target_insn).
9239 If the jump clobbers the conditions then we use states 2 and 4.
9241 A similar thing can be done with conditional return insns.
9243 XXX In case the `target' is an unconditional branch, this conditionalising
9244 of the instructions always reduces code size, but not always execution
9245 time. But then, I want to reduce the code size to somewhere near what
9246 /bin/cc produces. */
9248 /* Returns the index of the ARM condition code string in
9249 `arm_condition_codes'. COMPARISON should be an rtx like
9250 `(eq (...) (...))'. */
9252 static enum arm_cond_code
9253 get_arm_condition_code (comparison)
9254 rtx comparison;
9256 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9257 int code;
9258 enum rtx_code comp_code = GET_CODE (comparison);
9260 if (GET_MODE_CLASS (mode) != MODE_CC)
9261 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9262 XEXP (comparison, 1));
9264 switch (mode)
9266 case CC_DNEmode: code = ARM_NE; goto dominance;
9267 case CC_DEQmode: code = ARM_EQ; goto dominance;
9268 case CC_DGEmode: code = ARM_GE; goto dominance;
9269 case CC_DGTmode: code = ARM_GT; goto dominance;
9270 case CC_DLEmode: code = ARM_LE; goto dominance;
9271 case CC_DLTmode: code = ARM_LT; goto dominance;
9272 case CC_DGEUmode: code = ARM_CS; goto dominance;
9273 case CC_DGTUmode: code = ARM_HI; goto dominance;
9274 case CC_DLEUmode: code = ARM_LS; goto dominance;
9275 case CC_DLTUmode: code = ARM_CC;
9277 dominance:
9278 if (comp_code != EQ && comp_code != NE)
9279 abort ();
9281 if (comp_code == EQ)
9282 return ARM_INVERSE_CONDITION_CODE (code);
9283 return code;
9285 case CC_NOOVmode:
9286 switch (comp_code)
9288 case NE: return ARM_NE;
9289 case EQ: return ARM_EQ;
9290 case GE: return ARM_PL;
9291 case LT: return ARM_MI;
9292 default: abort ();
9295 case CC_Zmode:
9296 switch (comp_code)
9298 case NE: return ARM_NE;
9299 case EQ: return ARM_EQ;
9300 default: abort ();
9303 case CCFPEmode:
9304 case CCFPmode:
9305 /* These encodings assume that AC=1 in the FPA system control
9306 byte. This allows us to handle all cases except UNEQ and
9307 LTGT. */
9308 switch (comp_code)
9310 case GE: return ARM_GE;
9311 case GT: return ARM_GT;
9312 case LE: return ARM_LS;
9313 case LT: return ARM_MI;
9314 case NE: return ARM_NE;
9315 case EQ: return ARM_EQ;
9316 case ORDERED: return ARM_VC;
9317 case UNORDERED: return ARM_VS;
9318 case UNLT: return ARM_LT;
9319 case UNLE: return ARM_LE;
9320 case UNGT: return ARM_HI;
9321 case UNGE: return ARM_PL;
9322 /* UNEQ and LTGT do not have a representation. */
9323 case UNEQ: /* Fall through. */
9324 case LTGT: /* Fall through. */
9325 default: abort ();
9328 case CC_SWPmode:
9329 switch (comp_code)
9331 case NE: return ARM_NE;
9332 case EQ: return ARM_EQ;
9333 case GE: return ARM_LE;
9334 case GT: return ARM_LT;
9335 case LE: return ARM_GE;
9336 case LT: return ARM_GT;
9337 case GEU: return ARM_LS;
9338 case GTU: return ARM_CC;
9339 case LEU: return ARM_CS;
9340 case LTU: return ARM_HI;
9341 default: abort ();
9344 case CC_Cmode:
9345 switch (comp_code)
9347 case LTU: return ARM_CS;
9348 case GEU: return ARM_CC;
9349 default: abort ();
9352 case CCmode:
9353 switch (comp_code)
9355 case NE: return ARM_NE;
9356 case EQ: return ARM_EQ;
9357 case GE: return ARM_GE;
9358 case GT: return ARM_GT;
9359 case LE: return ARM_LE;
9360 case LT: return ARM_LT;
9361 case GEU: return ARM_CS;
9362 case GTU: return ARM_HI;
9363 case LEU: return ARM_LS;
9364 case LTU: return ARM_CC;
9365 default: abort ();
9368 default: abort ();
9371 abort ();
9375 void
9376 arm_final_prescan_insn (insn)
9377 rtx insn;
9379 /* BODY will hold the body of INSN. */
9380 rtx body = PATTERN (insn);
9382 /* This will be 1 if trying to repeat the trick, and things need to be
9383 reversed if it appears to fail. */
9384 int reverse = 0;
9386 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9387 taken are clobbered, even if the rtl suggests otherwise. It also
9388 means that we have to grub around within the jump expression to find
9389 out what the conditions are when the jump isn't taken. */
9390 int jump_clobbers = 0;
9392 /* If we start with a return insn, we only succeed if we find another one. */
9393 int seeking_return = 0;
9395 /* START_INSN will hold the insn from where we start looking. This is the
9396 first insn after the following code_label if REVERSE is true. */
9397 rtx start_insn = insn;
9399 /* If in state 4, check if the target branch is reached, in order to
9400 change back to state 0. */
9401 if (arm_ccfsm_state == 4)
9403 if (insn == arm_target_insn)
9405 arm_target_insn = NULL;
9406 arm_ccfsm_state = 0;
9408 return;
9411 /* If in state 3, it is possible to repeat the trick, if this insn is an
9412 unconditional branch to a label, and immediately following this branch
9413 is the previous target label which is only used once, and the label this
9414 branch jumps to is not too far off. */
9415 if (arm_ccfsm_state == 3)
9417 if (simplejump_p (insn))
9419 start_insn = next_nonnote_insn (start_insn);
9420 if (GET_CODE (start_insn) == BARRIER)
9422 /* XXX Isn't this always a barrier? */
9423 start_insn = next_nonnote_insn (start_insn);
9425 if (GET_CODE (start_insn) == CODE_LABEL
9426 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9427 && LABEL_NUSES (start_insn) == 1)
9428 reverse = TRUE;
9429 else
9430 return;
9432 else if (GET_CODE (body) == RETURN)
9434 start_insn = next_nonnote_insn (start_insn);
9435 if (GET_CODE (start_insn) == BARRIER)
9436 start_insn = next_nonnote_insn (start_insn);
9437 if (GET_CODE (start_insn) == CODE_LABEL
9438 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9439 && LABEL_NUSES (start_insn) == 1)
9441 reverse = TRUE;
9442 seeking_return = 1;
9444 else
9445 return;
9447 else
9448 return;
9451 if (arm_ccfsm_state != 0 && !reverse)
9452 abort ();
9453 if (GET_CODE (insn) != JUMP_INSN)
9454 return;
9456 /* This jump might be paralleled with a clobber of the condition codes
9457 the jump should always come first */
9458 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9459 body = XVECEXP (body, 0, 0);
9461 #if 0
9462 /* If this is a conditional return then we don't want to know */
9463 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9464 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9465 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9466 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9467 return;
9468 #endif
9470 if (reverse
9471 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9472 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9474 int insns_skipped;
9475 int fail = FALSE, succeed = FALSE;
9476 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9477 int then_not_else = TRUE;
9478 rtx this_insn = start_insn, label = 0;
9480 /* If the jump cannot be done with one instruction, we cannot
9481 conditionally execute the instruction in the inverse case. */
9482 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9484 jump_clobbers = 1;
9485 return;
9488 /* Register the insn jumped to. */
9489 if (reverse)
9491 if (!seeking_return)
9492 label = XEXP (SET_SRC (body), 0);
9494 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9495 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9496 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9498 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9499 then_not_else = FALSE;
9501 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9502 seeking_return = 1;
9503 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9505 seeking_return = 1;
9506 then_not_else = FALSE;
9508 else
9509 abort ();
9511 /* See how many insns this branch skips, and what kind of insns. If all
9512 insns are okay, and the label or unconditional branch to the same
9513 label is not too far away, succeed. */
9514 for (insns_skipped = 0;
9515 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9517 rtx scanbody;
9519 this_insn = next_nonnote_insn (this_insn);
9520 if (!this_insn)
9521 break;
9523 switch (GET_CODE (this_insn))
9525 case CODE_LABEL:
9526 /* Succeed if it is the target label, otherwise fail since
9527 control falls in from somewhere else. */
9528 if (this_insn == label)
9530 if (jump_clobbers)
9532 arm_ccfsm_state = 2;
9533 this_insn = next_nonnote_insn (this_insn);
9535 else
9536 arm_ccfsm_state = 1;
9537 succeed = TRUE;
9539 else
9540 fail = TRUE;
9541 break;
9543 case BARRIER:
9544 /* Succeed if the following insn is the target label.
9545 Otherwise fail.
9546 If return insns are used then the last insn in a function
9547 will be a barrier. */
9548 this_insn = next_nonnote_insn (this_insn);
9549 if (this_insn && this_insn == label)
9551 if (jump_clobbers)
9553 arm_ccfsm_state = 2;
9554 this_insn = next_nonnote_insn (this_insn);
9556 else
9557 arm_ccfsm_state = 1;
9558 succeed = TRUE;
9560 else
9561 fail = TRUE;
9562 break;
9564 case CALL_INSN:
9565 /* If using 32-bit addresses the cc is not preserved over
9566 calls. */
9567 if (TARGET_APCS_32)
9569 /* Succeed if the following insn is the target label,
9570 or if the following two insns are a barrier and
9571 the target label. */
9572 this_insn = next_nonnote_insn (this_insn);
9573 if (this_insn && GET_CODE (this_insn) == BARRIER)
9574 this_insn = next_nonnote_insn (this_insn);
9576 if (this_insn && this_insn == label
9577 && insns_skipped < max_insns_skipped)
9579 if (jump_clobbers)
9581 arm_ccfsm_state = 2;
9582 this_insn = next_nonnote_insn (this_insn);
9584 else
9585 arm_ccfsm_state = 1;
9586 succeed = TRUE;
9588 else
9589 fail = TRUE;
9591 break;
9593 case JUMP_INSN:
9594 /* If this is an unconditional branch to the same label, succeed.
9595 If it is to another label, do nothing. If it is conditional,
9596 fail. */
9597 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9599 scanbody = PATTERN (this_insn);
9600 if (GET_CODE (scanbody) == SET
9601 && GET_CODE (SET_DEST (scanbody)) == PC)
9603 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9604 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9606 arm_ccfsm_state = 2;
9607 succeed = TRUE;
9609 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9610 fail = TRUE;
9612 /* Fail if a conditional return is undesirable (eg on a
9613 StrongARM), but still allow this if optimizing for size. */
9614 else if (GET_CODE (scanbody) == RETURN
9615 && !use_return_insn (TRUE)
9616 && !optimize_size)
9617 fail = TRUE;
9618 else if (GET_CODE (scanbody) == RETURN
9619 && seeking_return)
9621 arm_ccfsm_state = 2;
9622 succeed = TRUE;
9624 else if (GET_CODE (scanbody) == PARALLEL)
9626 switch (get_attr_conds (this_insn))
9628 case CONDS_NOCOND:
9629 break;
9630 default:
9631 fail = TRUE;
9632 break;
9635 else
9636 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9638 break;
9640 case INSN:
9641 /* Instructions using or affecting the condition codes make it
9642 fail. */
9643 scanbody = PATTERN (this_insn);
9644 if (!(GET_CODE (scanbody) == SET
9645 || GET_CODE (scanbody) == PARALLEL)
9646 || get_attr_conds (this_insn) != CONDS_NOCOND)
9647 fail = TRUE;
9648 break;
9650 default:
9651 break;
9654 if (succeed)
9656 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9657 arm_target_label = CODE_LABEL_NUMBER (label);
9658 else if (seeking_return || arm_ccfsm_state == 2)
9660 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9662 this_insn = next_nonnote_insn (this_insn);
9663 if (this_insn && (GET_CODE (this_insn) == BARRIER
9664 || GET_CODE (this_insn) == CODE_LABEL))
9665 abort ();
9667 if (!this_insn)
9669 /* Oh, dear! we ran off the end.. give up */
9670 recog (PATTERN (insn), insn, NULL);
9671 arm_ccfsm_state = 0;
9672 arm_target_insn = NULL;
9673 return;
9675 arm_target_insn = this_insn;
9677 else
9678 abort ();
9679 if (jump_clobbers)
9681 if (reverse)
9682 abort ();
9683 arm_current_cc =
9684 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9685 0), 0), 1));
9686 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9687 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9688 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9689 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9691 else
9693 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9694 what it was. */
9695 if (!reverse)
9696 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9697 0));
9700 if (reverse || then_not_else)
9701 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9704 /* Restore recog_data (getting the attributes of other insns can
9705 destroy this array, but final.c assumes that it remains intact
9706 across this call; since the insn has been recognized already we
9707 call recog direct). */
9708 recog (PATTERN (insn), insn, NULL);
9712 /* Returns true if REGNO is a valid register
9713 for holding a quantity of tyoe MODE. */
9716 arm_hard_regno_mode_ok (regno, mode)
9717 unsigned int regno;
9718 enum machine_mode mode;
9720 if (GET_MODE_CLASS (mode) == MODE_CC)
9721 return regno == CC_REGNUM;
9723 if (TARGET_THUMB)
9724 /* For the Thumb we only allow values bigger than SImode in
9725 registers 0 - 6, so that there is always a second low
9726 register available to hold the upper part of the value.
9727 We probably we ought to ensure that the register is the
9728 start of an even numbered register pair. */
9729 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9731 if (regno <= LAST_ARM_REGNUM)
9732 /* We allow any value to be stored in the general regisetrs. */
9733 return 1;
9735 if ( regno == FRAME_POINTER_REGNUM
9736 || regno == ARG_POINTER_REGNUM)
9737 /* We only allow integers in the fake hard registers. */
9738 return GET_MODE_CLASS (mode) == MODE_INT;
9740 /* The only registers left are the FPU registers
9741 which we only allow to hold FP values. */
9742 return GET_MODE_CLASS (mode) == MODE_FLOAT
9743 && regno >= FIRST_ARM_FP_REGNUM
9744 && regno <= LAST_ARM_FP_REGNUM;
9748 arm_regno_class (regno)
9749 int regno;
9751 if (TARGET_THUMB)
9753 if (regno == STACK_POINTER_REGNUM)
9754 return STACK_REG;
9755 if (regno == CC_REGNUM)
9756 return CC_REG;
9757 if (regno < 8)
9758 return LO_REGS;
9759 return HI_REGS;
9762 if ( regno <= LAST_ARM_REGNUM
9763 || regno == FRAME_POINTER_REGNUM
9764 || regno == ARG_POINTER_REGNUM)
9765 return GENERAL_REGS;
9767 if (regno == CC_REGNUM)
9768 return NO_REGS;
9770 return FPU_REGS;
9773 /* Handle a special case when computing the offset
9774 of an argument from the frame pointer. */
9777 arm_debugger_arg_offset (value, addr)
9778 int value;
9779 rtx addr;
9781 rtx insn;
9783 /* We are only interested if dbxout_parms() failed to compute the offset. */
9784 if (value != 0)
9785 return 0;
9787 /* We can only cope with the case where the address is held in a register. */
9788 if (GET_CODE (addr) != REG)
9789 return 0;
9791 /* If we are using the frame pointer to point at the argument, then
9792 an offset of 0 is correct. */
9793 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9794 return 0;
9796 /* If we are using the stack pointer to point at the
9797 argument, then an offset of 0 is correct. */
9798 if ((TARGET_THUMB || !frame_pointer_needed)
9799 && REGNO (addr) == SP_REGNUM)
9800 return 0;
9802 /* Oh dear. The argument is pointed to by a register rather
9803 than being held in a register, or being stored at a known
9804 offset from the frame pointer. Since GDB only understands
9805 those two kinds of argument we must translate the address
9806 held in the register into an offset from the frame pointer.
9807 We do this by searching through the insns for the function
9808 looking to see where this register gets its value. If the
9809 register is initialized from the frame pointer plus an offset
9810 then we are in luck and we can continue, otherwise we give up.
9812 This code is exercised by producing debugging information
9813 for a function with arguments like this:
9815 double func (double a, double b, int c, double d) {return d;}
9817 Without this code the stab for parameter 'd' will be set to
9818 an offset of 0 from the frame pointer, rather than 8. */
9820 /* The if() statement says:
9822 If the insn is a normal instruction
9823 and if the insn is setting the value in a register
9824 and if the register being set is the register holding the address of the argument
9825 and if the address is computing by an addition
9826 that involves adding to a register
9827 which is the frame pointer
9828 a constant integer
9830 then... */
9832 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9834 if ( GET_CODE (insn) == INSN
9835 && GET_CODE (PATTERN (insn)) == SET
9836 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9837 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9838 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9839 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9840 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9843 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9845 break;
9849 if (value == 0)
9851 debug_rtx (addr);
9852 warning ("unable to compute real location of stacked parameter");
9853 value = 8; /* XXX magic hack */
9856 return value;
9859 #define def_builtin(NAME, TYPE, CODE) \
9860 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
9862 void
9863 arm_init_builtins ()
9865 tree endlink = void_list_node;
9866 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9867 tree pchar_type_node = build_pointer_type (char_type_node);
9869 tree int_ftype_int, void_ftype_pchar;
9871 /* void func (char *) */
9872 void_ftype_pchar
9873 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
9875 /* int func (int) */
9876 int_ftype_int
9877 = build_function_type (integer_type_node, int_endlink);
9879 /* Initialize arm V5 builtins. */
9880 if (arm_arch5)
9881 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9884 /* Expand an expression EXP that calls a built-in function,
9885 with result going to TARGET if that's convenient
9886 (and in mode MODE if that's convenient).
9887 SUBTARGET may be used as the target for computing one of EXP's operands.
9888 IGNORE is nonzero if the value is to be ignored. */
9891 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9892 tree exp;
9893 rtx target;
9894 rtx subtarget ATTRIBUTE_UNUSED;
9895 enum machine_mode mode ATTRIBUTE_UNUSED;
9896 int ignore ATTRIBUTE_UNUSED;
9898 enum insn_code icode;
9899 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9900 tree arglist = TREE_OPERAND (exp, 1);
9901 tree arg0;
9902 rtx op0, pat;
9903 enum machine_mode tmode, mode0;
9904 int fcode = DECL_FUNCTION_CODE (fndecl);
9906 switch (fcode)
9908 default:
9909 break;
9911 case ARM_BUILTIN_CLZ:
9912 icode = CODE_FOR_clz;
9913 arg0 = TREE_VALUE (arglist);
9914 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9915 tmode = insn_data[icode].operand[0].mode;
9916 mode0 = insn_data[icode].operand[1].mode;
9918 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9919 op0 = copy_to_mode_reg (mode0, op0);
9920 if (target == 0
9921 || GET_MODE (target) != tmode
9922 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9923 target = gen_reg_rtx (tmode);
9924 pat = GEN_FCN (icode) (target, op0);
9925 if (! pat)
9926 return 0;
9927 emit_insn (pat);
9928 return target;
9931 /* @@@ Should really do something sensible here. */
9932 return NULL_RTX;
9935 /* Recursively search through all of the blocks in a function
9936 checking to see if any of the variables created in that
9937 function match the RTX called 'orig'. If they do then
9938 replace them with the RTX called 'new'. */
9940 static void
9941 replace_symbols_in_block (block, orig, new)
9942 tree block;
9943 rtx orig;
9944 rtx new;
9946 for (; block; block = BLOCK_CHAIN (block))
9948 tree sym;
9950 if (!TREE_USED (block))
9951 continue;
9953 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9955 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9956 || DECL_IGNORED_P (sym)
9957 || TREE_CODE (sym) != VAR_DECL
9958 || DECL_EXTERNAL (sym)
9959 || !rtx_equal_p (DECL_RTL (sym), orig)
9961 continue;
9963 SET_DECL_RTL (sym, new);
9966 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9970 /* Return the number (counting from 0) of
9971 the least significant set bit in MASK. */
9973 #ifdef __GNUC__
9974 inline
9975 #endif
9976 static int
9977 number_of_first_bit_set (mask)
9978 int mask;
9980 int bit;
9982 for (bit = 0;
9983 (mask & (1 << bit)) == 0;
9984 ++bit)
9985 continue;
9987 return bit;
9990 /* Generate code to return from a thumb function.
9991 If 'reg_containing_return_addr' is -1, then the return address is
9992 actually on the stack, at the stack pointer. */
9993 static void
9994 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9995 FILE * f;
9996 int reg_containing_return_addr;
9997 rtx eh_ofs;
9999 unsigned regs_available_for_popping;
10000 unsigned regs_to_pop;
10001 int pops_needed;
10002 unsigned available;
10003 unsigned required;
10004 int mode;
10005 int size;
10006 int restore_a4 = FALSE;
10008 /* Compute the registers we need to pop. */
10009 regs_to_pop = 0;
10010 pops_needed = 0;
10012 /* There is an assumption here, that if eh_ofs is not NULL, the
10013 normal return address will have been pushed. */
10014 if (reg_containing_return_addr == -1 || eh_ofs)
10016 /* When we are generating a return for __builtin_eh_return,
10017 reg_containing_return_addr must specify the return regno. */
10018 if (eh_ofs && reg_containing_return_addr == -1)
10019 abort ();
10021 regs_to_pop |= 1 << LR_REGNUM;
10022 ++pops_needed;
10025 if (TARGET_BACKTRACE)
10027 /* Restore the (ARM) frame pointer and stack pointer. */
10028 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10029 pops_needed += 2;
10032 /* If there is nothing to pop then just emit the BX instruction and
10033 return. */
10034 if (pops_needed == 0)
10036 if (eh_ofs)
10037 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10039 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10040 return;
10042 /* Otherwise if we are not supporting interworking and we have not created
10043 a backtrace structure and the function was not entered in ARM mode then
10044 just pop the return address straight into the PC. */
10045 else if (!TARGET_INTERWORK
10046 && !TARGET_BACKTRACE
10047 && !is_called_in_ARM_mode (current_function_decl))
10049 if (eh_ofs)
10051 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10052 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10053 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10055 else
10056 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10058 return;
10061 /* Find out how many of the (return) argument registers we can corrupt. */
10062 regs_available_for_popping = 0;
10064 /* If returning via __builtin_eh_return, the bottom three registers
10065 all contain information needed for the return. */
10066 if (eh_ofs)
10067 size = 12;
10068 else
10070 #ifdef RTX_CODE
10071 /* If we can deduce the registers used from the function's
10072 return value. This is more reliable that examining
10073 regs_ever_live[] because that will be set if the register is
10074 ever used in the function, not just if the register is used
10075 to hold a return value. */
10077 if (current_function_return_rtx != 0)
10078 mode = GET_MODE (current_function_return_rtx);
10079 else
10080 #endif
10081 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10083 size = GET_MODE_SIZE (mode);
10085 if (size == 0)
10087 /* In a void function we can use any argument register.
10088 In a function that returns a structure on the stack
10089 we can use the second and third argument registers. */
10090 if (mode == VOIDmode)
10091 regs_available_for_popping =
10092 (1 << ARG_REGISTER (1))
10093 | (1 << ARG_REGISTER (2))
10094 | (1 << ARG_REGISTER (3));
10095 else
10096 regs_available_for_popping =
10097 (1 << ARG_REGISTER (2))
10098 | (1 << ARG_REGISTER (3));
10100 else if (size <= 4)
10101 regs_available_for_popping =
10102 (1 << ARG_REGISTER (2))
10103 | (1 << ARG_REGISTER (3));
10104 else if (size <= 8)
10105 regs_available_for_popping =
10106 (1 << ARG_REGISTER (3));
10109 /* Match registers to be popped with registers into which we pop them. */
10110 for (available = regs_available_for_popping,
10111 required = regs_to_pop;
10112 required != 0 && available != 0;
10113 available &= ~(available & - available),
10114 required &= ~(required & - required))
10115 -- pops_needed;
10117 /* If we have any popping registers left over, remove them. */
10118 if (available > 0)
10119 regs_available_for_popping &= ~available;
10121 /* Otherwise if we need another popping register we can use
10122 the fourth argument register. */
10123 else if (pops_needed)
10125 /* If we have not found any free argument registers and
10126 reg a4 contains the return address, we must move it. */
10127 if (regs_available_for_popping == 0
10128 && reg_containing_return_addr == LAST_ARG_REGNUM)
10130 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10131 reg_containing_return_addr = LR_REGNUM;
10133 else if (size > 12)
10135 /* Register a4 is being used to hold part of the return value,
10136 but we have dire need of a free, low register. */
10137 restore_a4 = TRUE;
10139 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10142 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10144 /* The fourth argument register is available. */
10145 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10147 --pops_needed;
10151 /* Pop as many registers as we can. */
10152 thumb_pushpop (f, regs_available_for_popping, FALSE);
10154 /* Process the registers we popped. */
10155 if (reg_containing_return_addr == -1)
10157 /* The return address was popped into the lowest numbered register. */
10158 regs_to_pop &= ~(1 << LR_REGNUM);
10160 reg_containing_return_addr =
10161 number_of_first_bit_set (regs_available_for_popping);
10163 /* Remove this register for the mask of available registers, so that
10164 the return address will not be corrupted by futher pops. */
10165 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10168 /* If we popped other registers then handle them here. */
10169 if (regs_available_for_popping)
10171 int frame_pointer;
10173 /* Work out which register currently contains the frame pointer. */
10174 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10176 /* Move it into the correct place. */
10177 asm_fprintf (f, "\tmov\t%r, %r\n",
10178 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10180 /* (Temporarily) remove it from the mask of popped registers. */
10181 regs_available_for_popping &= ~(1 << frame_pointer);
10182 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10184 if (regs_available_for_popping)
10186 int stack_pointer;
10188 /* We popped the stack pointer as well,
10189 find the register that contains it. */
10190 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10192 /* Move it into the stack register. */
10193 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10195 /* At this point we have popped all necessary registers, so
10196 do not worry about restoring regs_available_for_popping
10197 to its correct value:
10199 assert (pops_needed == 0)
10200 assert (regs_available_for_popping == (1 << frame_pointer))
10201 assert (regs_to_pop == (1 << STACK_POINTER)) */
10203 else
10205 /* Since we have just move the popped value into the frame
10206 pointer, the popping register is available for reuse, and
10207 we know that we still have the stack pointer left to pop. */
10208 regs_available_for_popping |= (1 << frame_pointer);
10212 /* If we still have registers left on the stack, but we no longer have
10213 any registers into which we can pop them, then we must move the return
10214 address into the link register and make available the register that
10215 contained it. */
10216 if (regs_available_for_popping == 0 && pops_needed > 0)
10218 regs_available_for_popping |= 1 << reg_containing_return_addr;
10220 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10221 reg_containing_return_addr);
10223 reg_containing_return_addr = LR_REGNUM;
10226 /* If we have registers left on the stack then pop some more.
10227 We know that at most we will want to pop FP and SP. */
10228 if (pops_needed > 0)
10230 int popped_into;
10231 int move_to;
10233 thumb_pushpop (f, regs_available_for_popping, FALSE);
10235 /* We have popped either FP or SP.
10236 Move whichever one it is into the correct register. */
10237 popped_into = number_of_first_bit_set (regs_available_for_popping);
10238 move_to = number_of_first_bit_set (regs_to_pop);
10240 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10242 regs_to_pop &= ~(1 << move_to);
10244 --pops_needed;
10247 /* If we still have not popped everything then we must have only
10248 had one register available to us and we are now popping the SP. */
10249 if (pops_needed > 0)
10251 int popped_into;
10253 thumb_pushpop (f, regs_available_for_popping, FALSE);
10255 popped_into = number_of_first_bit_set (regs_available_for_popping);
10257 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10259 assert (regs_to_pop == (1 << STACK_POINTER))
10260 assert (pops_needed == 1)
10264 /* If necessary restore the a4 register. */
10265 if (restore_a4)
10267 if (reg_containing_return_addr != LR_REGNUM)
10269 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10270 reg_containing_return_addr = LR_REGNUM;
10273 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10276 if (eh_ofs)
10277 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10279 /* Return to caller. */
10280 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10283 /* Emit code to push or pop registers to or from the stack. */
10285 static void
10286 thumb_pushpop (f, mask, push)
10287 FILE * f;
10288 int mask;
10289 int push;
10291 int regno;
10292 int lo_mask = mask & 0xFF;
10294 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10296 /* Special case. Do not generate a POP PC statement here, do it in
10297 thumb_exit() */
10298 thumb_exit (f, -1, NULL_RTX);
10299 return;
10302 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10304 /* Look at the low registers first. */
10305 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10307 if (lo_mask & 1)
10309 asm_fprintf (f, "%r", regno);
10311 if ((lo_mask & ~1) != 0)
10312 fprintf (f, ", ");
10316 if (push && (mask & (1 << LR_REGNUM)))
10318 /* Catch pushing the LR. */
10319 if (mask & 0xFF)
10320 fprintf (f, ", ");
10322 asm_fprintf (f, "%r", LR_REGNUM);
10324 else if (!push && (mask & (1 << PC_REGNUM)))
10326 /* Catch popping the PC. */
10327 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10329 /* The PC is never poped directly, instead
10330 it is popped into r3 and then BX is used. */
10331 fprintf (f, "}\n");
10333 thumb_exit (f, -1, NULL_RTX);
10335 return;
10337 else
10339 if (mask & 0xFF)
10340 fprintf (f, ", ");
10342 asm_fprintf (f, "%r", PC_REGNUM);
10346 fprintf (f, "}\n");
10349 void
10350 thumb_final_prescan_insn (insn)
10351 rtx insn;
10353 if (flag_print_asm_name)
10354 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10355 INSN_ADDRESSES (INSN_UID (insn)));
10359 thumb_shiftable_const (val)
10360 unsigned HOST_WIDE_INT val;
10362 unsigned HOST_WIDE_INT mask = 0xff;
10363 int i;
10365 if (val == 0) /* XXX */
10366 return 0;
10368 for (i = 0; i < 25; i++)
10369 if ((val & (mask << i)) == val)
10370 return 1;
10372 return 0;
10375 /* Returns nonzero if the current function contains,
10376 or might contain a far jump. */
10379 thumb_far_jump_used_p (in_prologue)
10380 int in_prologue;
10382 rtx insn;
10384 /* This test is only important for leaf functions. */
10385 /* assert (!leaf_function_p ()); */
10387 /* If we have already decided that far jumps may be used,
10388 do not bother checking again, and always return true even if
10389 it turns out that they are not being used. Once we have made
10390 the decision that far jumps are present (and that hence the link
10391 register will be pushed onto the stack) we cannot go back on it. */
10392 if (cfun->machine->far_jump_used)
10393 return 1;
10395 /* If this function is not being called from the prologue/epilogue
10396 generation code then it must be being called from the
10397 INITIAL_ELIMINATION_OFFSET macro. */
10398 if (!in_prologue)
10400 /* In this case we know that we are being asked about the elimination
10401 of the arg pointer register. If that register is not being used,
10402 then there are no arguments on the stack, and we do not have to
10403 worry that a far jump might force the prologue to push the link
10404 register, changing the stack offsets. In this case we can just
10405 return false, since the presence of far jumps in the function will
10406 not affect stack offsets.
10408 If the arg pointer is live (or if it was live, but has now been
10409 eliminated and so set to dead) then we do have to test to see if
10410 the function might contain a far jump. This test can lead to some
10411 false negatives, since before reload is completed, then length of
10412 branch instructions is not known, so gcc defaults to returning their
10413 longest length, which in turn sets the far jump attribute to true.
10415 A false negative will not result in bad code being generated, but it
10416 will result in a needless push and pop of the link register. We
10417 hope that this does not occur too often. */
10418 if (regs_ever_live [ARG_POINTER_REGNUM])
10419 cfun->machine->arg_pointer_live = 1;
10420 else if (!cfun->machine->arg_pointer_live)
10421 return 0;
10424 /* Check to see if the function contains a branch
10425 insn with the far jump attribute set. */
10426 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10428 if (GET_CODE (insn) == JUMP_INSN
10429 /* Ignore tablejump patterns. */
10430 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10431 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10432 && get_attr_far_jump (insn) == FAR_JUMP_YES
10435 /* Record the fact that we have decied that
10436 the function does use far jumps. */
10437 cfun->machine->far_jump_used = 1;
10438 return 1;
10442 return 0;
10445 /* Return nonzero if FUNC must be entered in ARM mode. */
10448 is_called_in_ARM_mode (func)
10449 tree func;
10451 if (TREE_CODE (func) != FUNCTION_DECL)
10452 abort ();
10454 /* Ignore the problem about functions whoes address is taken. */
10455 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10456 return TRUE;
10458 #ifdef ARM_PE
10459 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10460 #else
10461 return FALSE;
10462 #endif
10465 /* The bits which aren't usefully expanded as rtl. */
10467 const char *
10468 thumb_unexpanded_epilogue ()
10470 int regno;
10471 int live_regs_mask = 0;
10472 int high_regs_pushed = 0;
10473 int leaf_function = leaf_function_p ();
10474 int had_to_push_lr;
10475 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10477 if (return_used_this_function)
10478 return "";
10480 if (IS_NAKED (arm_current_func_type ()))
10481 return "";
10483 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10484 if (THUMB_REG_PUSHED_P (regno))
10485 live_regs_mask |= 1 << regno;
10487 for (regno = 8; regno < 13; regno++)
10488 if (THUMB_REG_PUSHED_P (regno))
10489 high_regs_pushed++;
10491 /* The prolog may have pushed some high registers to use as
10492 work registers. eg the testuite file:
10493 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10494 compiles to produce:
10495 push {r4, r5, r6, r7, lr}
10496 mov r7, r9
10497 mov r6, r8
10498 push {r6, r7}
10499 as part of the prolog. We have to undo that pushing here. */
10501 if (high_regs_pushed)
10503 int mask = live_regs_mask;
10504 int next_hi_reg;
10505 int size;
10506 int mode;
10508 #ifdef RTX_CODE
10509 /* If we can deduce the registers used from the function's return value.
10510 This is more reliable that examining regs_ever_live[] because that
10511 will be set if the register is ever used in the function, not just if
10512 the register is used to hold a return value. */
10514 if (current_function_return_rtx != 0)
10515 mode = GET_MODE (current_function_return_rtx);
10516 else
10517 #endif
10518 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10520 size = GET_MODE_SIZE (mode);
10522 /* Unless we are returning a type of size > 12 register r3 is
10523 available. */
10524 if (size < 13)
10525 mask |= 1 << 3;
10527 if (mask == 0)
10528 /* Oh dear! We have no low registers into which we can pop
10529 high registers! */
10530 internal_error
10531 ("no low registers available for popping high registers");
10533 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10534 if (THUMB_REG_PUSHED_P (next_hi_reg))
10535 break;
10537 while (high_regs_pushed)
10539 /* Find lo register(s) into which the high register(s) can
10540 be popped. */
10541 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10543 if (mask & (1 << regno))
10544 high_regs_pushed--;
10545 if (high_regs_pushed == 0)
10546 break;
10549 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10551 /* Pop the values into the low register(s). */
10552 thumb_pushpop (asm_out_file, mask, 0);
10554 /* Move the value(s) into the high registers. */
10555 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10557 if (mask & (1 << regno))
10559 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10560 regno);
10562 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10563 if (THUMB_REG_PUSHED_P (next_hi_reg))
10564 break;
10570 had_to_push_lr = (live_regs_mask || !leaf_function
10571 || thumb_far_jump_used_p (1));
10573 if (TARGET_BACKTRACE
10574 && ((live_regs_mask & 0xFF) == 0)
10575 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10577 /* The stack backtrace structure creation code had to
10578 push R7 in order to get a work register, so we pop
10579 it now. */
10580 live_regs_mask |= (1 << LAST_LO_REGNUM);
10583 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10585 if (had_to_push_lr
10586 && !is_called_in_ARM_mode (current_function_decl)
10587 && !eh_ofs)
10588 live_regs_mask |= 1 << PC_REGNUM;
10590 /* Either no argument registers were pushed or a backtrace
10591 structure was created which includes an adjusted stack
10592 pointer, so just pop everything. */
10593 if (live_regs_mask)
10594 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10596 if (eh_ofs)
10597 thumb_exit (asm_out_file, 2, eh_ofs);
10598 /* We have either just popped the return address into the
10599 PC or it is was kept in LR for the entire function or
10600 it is still on the stack because we do not want to
10601 return by doing a pop {pc}. */
10602 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10603 thumb_exit (asm_out_file,
10604 (had_to_push_lr
10605 && is_called_in_ARM_mode (current_function_decl)) ?
10606 -1 : LR_REGNUM, NULL_RTX);
10608 else
10610 /* Pop everything but the return address. */
10611 live_regs_mask &= ~(1 << PC_REGNUM);
10613 if (live_regs_mask)
10614 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10616 if (had_to_push_lr)
10617 /* Get the return address into a temporary register. */
10618 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10620 /* Remove the argument registers that were pushed onto the stack. */
10621 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10622 SP_REGNUM, SP_REGNUM,
10623 current_function_pretend_args_size);
10625 if (eh_ofs)
10626 thumb_exit (asm_out_file, 2, eh_ofs);
10627 else
10628 thumb_exit (asm_out_file,
10629 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10632 return "";
10635 /* Functions to save and restore machine-specific function data. */
10637 static struct machine_function *
10638 arm_init_machine_status ()
10640 struct machine_function *machine;
10641 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10643 #if ARM_FT_UNKNOWN != 0
10644 machine->func_type = ARM_FT_UNKNOWN;
10645 #endif
10646 return machine;
10649 /* Return an RTX indicating where the return address to the
10650 calling function can be found. */
10653 arm_return_addr (count, frame)
10654 int count;
10655 rtx frame ATTRIBUTE_UNUSED;
10657 if (count != 0)
10658 return NULL_RTX;
10660 if (TARGET_APCS_32)
10661 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10662 else
10664 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10665 GEN_INT (RETURN_ADDR_MASK26));
10666 return get_func_hard_reg_initial_val (cfun, lr);
10670 /* Do anything needed before RTL is emitted for each function. */
10672 void
10673 arm_init_expanders ()
10675 /* Arrange to initialize and mark the machine per-function status. */
10676 init_machine_status = arm_init_machine_status;
10679 HOST_WIDE_INT
10680 thumb_get_frame_size ()
10682 int regno;
10684 int base_size = ROUND_UP_WORD (get_frame_size ());
10685 int count_regs = 0;
10686 int entry_size = 0;
10687 int leaf;
10689 if (! TARGET_THUMB)
10690 abort ();
10692 if (! TARGET_ATPCS)
10693 return base_size;
10695 /* We need to know if we are a leaf function. Unfortunately, it
10696 is possible to be called after start_sequence has been called,
10697 which causes get_insns to return the insns for the sequence,
10698 not the function, which will cause leaf_function_p to return
10699 the incorrect result.
10701 To work around this, we cache the computed frame size. This
10702 works because we will only be calling RTL expanders that need
10703 to know about leaf functions once reload has completed, and the
10704 frame size cannot be changed after that time, so we can safely
10705 use the cached value. */
10707 if (reload_completed)
10708 return cfun->machine->frame_size;
10710 leaf = leaf_function_p ();
10712 /* A leaf function does not need any stack alignment if it has nothing
10713 on the stack. */
10714 if (leaf && base_size == 0)
10716 cfun->machine->frame_size = 0;
10717 return 0;
10720 /* We know that SP will be word aligned on entry, and we must
10721 preserve that condition at any subroutine call. But those are
10722 the only constraints. */
10724 /* Space for variadic functions. */
10725 if (current_function_pretend_args_size)
10726 entry_size += current_function_pretend_args_size;
10728 /* Space for pushed lo registers. */
10729 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10730 if (THUMB_REG_PUSHED_P (regno))
10731 count_regs++;
10733 /* Space for backtrace structure. */
10734 if (TARGET_BACKTRACE)
10736 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10737 entry_size += 20;
10738 else
10739 entry_size += 16;
10742 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10743 count_regs++; /* LR */
10745 entry_size += count_regs * 4;
10746 count_regs = 0;
10748 /* Space for pushed hi regs. */
10749 for (regno = 8; regno < 13; regno++)
10750 if (THUMB_REG_PUSHED_P (regno))
10751 count_regs++;
10753 entry_size += count_regs * 4;
10755 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10756 base_size += 4;
10757 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10758 abort ();
10760 cfun->machine->frame_size = base_size;
10762 return base_size;
10765 /* Generate the rest of a function's prologue. */
10767 void
10768 thumb_expand_prologue ()
10770 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10771 + current_function_outgoing_args_size);
10772 unsigned long func_type;
10774 func_type = arm_current_func_type ();
10776 /* Naked functions don't have prologues. */
10777 if (IS_NAKED (func_type))
10778 return;
10780 if (IS_INTERRUPT (func_type))
10782 error ("interrupt Service Routines cannot be coded in Thumb mode");
10783 return;
10786 if (frame_pointer_needed)
10787 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10789 if (amount)
10791 amount = ROUND_UP_WORD (amount);
10793 if (amount < 512)
10794 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10795 GEN_INT (- amount)));
10796 else
10798 int regno;
10799 rtx reg;
10801 /* The stack decrement is too big for an immediate value in a single
10802 insn. In theory we could issue multiple subtracts, but after
10803 three of them it becomes more space efficient to place the full
10804 value in the constant pool and load into a register. (Also the
10805 ARM debugger really likes to see only one stack decrement per
10806 function). So instead we look for a scratch register into which
10807 we can load the decrement, and then we subtract this from the
10808 stack pointer. Unfortunately on the thumb the only available
10809 scratch registers are the argument registers, and we cannot use
10810 these as they may hold arguments to the function. Instead we
10811 attempt to locate a call preserved register which is used by this
10812 function. If we can find one, then we know that it will have
10813 been pushed at the start of the prologue and so we can corrupt
10814 it now. */
10815 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10816 if (THUMB_REG_PUSHED_P (regno)
10817 && !(frame_pointer_needed
10818 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10819 break;
10821 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10823 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10825 /* Choose an arbitary, non-argument low register. */
10826 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10828 /* Save it by copying it into a high, scratch register. */
10829 emit_insn (gen_movsi (spare, reg));
10830 /* Add a USE to stop propagate_one_insn() from barfing. */
10831 emit_insn (gen_prologue_use (spare));
10833 /* Decrement the stack. */
10834 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10835 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10836 reg));
10838 /* Restore the low register's original value. */
10839 emit_insn (gen_movsi (reg, spare));
10841 /* Emit a USE of the restored scratch register, so that flow
10842 analysis will not consider the restore redundant. The
10843 register won't be used again in this function and isn't
10844 restored by the epilogue. */
10845 emit_insn (gen_prologue_use (reg));
10847 else
10849 reg = gen_rtx (REG, SImode, regno);
10851 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10852 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10853 reg));
10858 if (current_function_profile || TARGET_NO_SCHED_PRO)
10859 emit_insn (gen_blockage ());
10862 void
10863 thumb_expand_epilogue ()
10865 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10866 + current_function_outgoing_args_size);
10868 /* Naked functions don't have prologues. */
10869 if (IS_NAKED (arm_current_func_type ()))
10870 return;
10872 if (frame_pointer_needed)
10873 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10874 else if (amount)
10876 amount = ROUND_UP_WORD (amount);
10878 if (amount < 512)
10879 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10880 GEN_INT (amount)));
10881 else
10883 /* r3 is always free in the epilogue. */
10884 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10886 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10887 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10891 /* Emit a USE (stack_pointer_rtx), so that
10892 the stack adjustment will not be deleted. */
10893 emit_insn (gen_prologue_use (stack_pointer_rtx));
10895 if (current_function_profile || TARGET_NO_SCHED_PRO)
10896 emit_insn (gen_blockage ());
10899 static void
10900 thumb_output_function_prologue (f, size)
10901 FILE * f;
10902 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10904 int live_regs_mask = 0;
10905 int high_regs_pushed = 0;
10906 int regno;
10908 if (IS_NAKED (arm_current_func_type ()))
10909 return;
10911 if (is_called_in_ARM_mode (current_function_decl))
10913 const char * name;
10915 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10916 abort ();
10917 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10918 abort ();
10919 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10921 /* Generate code sequence to switch us into Thumb mode. */
10922 /* The .code 32 directive has already been emitted by
10923 ASM_DECLARE_FUNCTION_NAME. */
10924 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10925 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10927 /* Generate a label, so that the debugger will notice the
10928 change in instruction sets. This label is also used by
10929 the assembler to bypass the ARM code when this function
10930 is called from a Thumb encoded function elsewhere in the
10931 same file. Hence the definition of STUB_NAME here must
10932 agree with the definition in gas/config/tc-arm.c */
10934 #define STUB_NAME ".real_start_of"
10936 fprintf (f, "\t.code\t16\n");
10937 #ifdef ARM_PE
10938 if (arm_dllexport_name_p (name))
10939 name = arm_strip_name_encoding (name);
10940 #endif
10941 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10942 fprintf (f, "\t.thumb_func\n");
10943 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10946 if (current_function_pretend_args_size)
10948 if (cfun->machine->uses_anonymous_args)
10950 int num_pushes;
10952 fprintf (f, "\tpush\t{");
10954 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
10956 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10957 regno <= LAST_ARG_REGNUM;
10958 regno++)
10959 asm_fprintf (f, "%r%s", regno,
10960 regno == LAST_ARG_REGNUM ? "" : ", ");
10962 fprintf (f, "}\n");
10964 else
10965 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10966 SP_REGNUM, SP_REGNUM,
10967 current_function_pretend_args_size);
10970 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10971 if (THUMB_REG_PUSHED_P (regno))
10972 live_regs_mask |= 1 << regno;
10974 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10975 live_regs_mask |= 1 << LR_REGNUM;
10977 if (TARGET_BACKTRACE)
10979 int offset;
10980 int work_register = 0;
10981 int wr;
10983 /* We have been asked to create a stack backtrace structure.
10984 The code looks like this:
10986 0 .align 2
10987 0 func:
10988 0 sub SP, #16 Reserve space for 4 registers.
10989 2 push {R7} Get a work register.
10990 4 add R7, SP, #20 Get the stack pointer before the push.
10991 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10992 8 mov R7, PC Get hold of the start of this code plus 12.
10993 10 str R7, [SP, #16] Store it.
10994 12 mov R7, FP Get hold of the current frame pointer.
10995 14 str R7, [SP, #4] Store it.
10996 16 mov R7, LR Get hold of the current return address.
10997 18 str R7, [SP, #12] Store it.
10998 20 add R7, SP, #16 Point at the start of the backtrace structure.
10999 22 mov FP, R7 Put this value into the frame pointer. */
11001 if ((live_regs_mask & 0xFF) == 0)
11003 /* See if the a4 register is free. */
11005 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11006 work_register = LAST_ARG_REGNUM;
11007 else /* We must push a register of our own */
11008 live_regs_mask |= (1 << LAST_LO_REGNUM);
11011 if (work_register == 0)
11013 /* Select a register from the list that will be pushed to
11014 use as our work register. */
11015 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11016 if ((1 << work_register) & live_regs_mask)
11017 break;
11020 asm_fprintf
11021 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11022 SP_REGNUM, SP_REGNUM);
11024 if (live_regs_mask)
11025 thumb_pushpop (f, live_regs_mask, 1);
11027 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11028 if (wr & live_regs_mask)
11029 offset += 4;
11031 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11032 offset + 16 + current_function_pretend_args_size);
11034 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11035 offset + 4);
11037 /* Make sure that the instruction fetching the PC is in the right place
11038 to calculate "start of backtrace creation code + 12". */
11039 if (live_regs_mask)
11041 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11042 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11043 offset + 12);
11044 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11045 ARM_HARD_FRAME_POINTER_REGNUM);
11046 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11047 offset);
11049 else
11051 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11052 ARM_HARD_FRAME_POINTER_REGNUM);
11053 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11054 offset);
11055 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11056 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11057 offset + 12);
11060 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11061 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11062 offset + 8);
11063 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11064 offset + 12);
11065 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11066 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11068 else if (live_regs_mask)
11069 thumb_pushpop (f, live_regs_mask, 1);
11071 for (regno = 8; regno < 13; regno++)
11072 if (THUMB_REG_PUSHED_P (regno))
11073 high_regs_pushed++;
11075 if (high_regs_pushed)
11077 int pushable_regs = 0;
11078 int mask = live_regs_mask & 0xff;
11079 int next_hi_reg;
11081 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11082 if (THUMB_REG_PUSHED_P (next_hi_reg))
11083 break;
11085 pushable_regs = mask;
11087 if (pushable_regs == 0)
11089 /* Desperation time -- this probably will never happen. */
11090 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11091 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11092 mask = 1 << LAST_ARG_REGNUM;
11095 while (high_regs_pushed > 0)
11097 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11099 if (mask & (1 << regno))
11101 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11103 high_regs_pushed--;
11105 if (high_regs_pushed)
11107 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11108 next_hi_reg--)
11109 if (THUMB_REG_PUSHED_P (next_hi_reg))
11110 break;
11112 else
11114 mask &= ~((1 << regno) - 1);
11115 break;
11120 thumb_pushpop (f, mask, 1);
11123 if (pushable_regs == 0
11124 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11125 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11129 /* Handle the case of a double word load into a low register from
11130 a computed memory address. The computed address may involve a
11131 register which is overwritten by the load. */
11133 const char *
11134 thumb_load_double_from_address (operands)
11135 rtx *operands;
11137 rtx addr;
11138 rtx base;
11139 rtx offset;
11140 rtx arg1;
11141 rtx arg2;
11143 if (GET_CODE (operands[0]) != REG)
11144 abort ();
11146 if (GET_CODE (operands[1]) != MEM)
11147 abort ();
11149 /* Get the memory address. */
11150 addr = XEXP (operands[1], 0);
11152 /* Work out how the memory address is computed. */
11153 switch (GET_CODE (addr))
11155 case REG:
11156 operands[2] = gen_rtx (MEM, SImode,
11157 plus_constant (XEXP (operands[1], 0), 4));
11159 if (REGNO (operands[0]) == REGNO (addr))
11161 output_asm_insn ("ldr\t%H0, %2", operands);
11162 output_asm_insn ("ldr\t%0, %1", operands);
11164 else
11166 output_asm_insn ("ldr\t%0, %1", operands);
11167 output_asm_insn ("ldr\t%H0, %2", operands);
11169 break;
11171 case CONST:
11172 /* Compute <address> + 4 for the high order load. */
11173 operands[2] = gen_rtx (MEM, SImode,
11174 plus_constant (XEXP (operands[1], 0), 4));
11176 output_asm_insn ("ldr\t%0, %1", operands);
11177 output_asm_insn ("ldr\t%H0, %2", operands);
11178 break;
11180 case PLUS:
11181 arg1 = XEXP (addr, 0);
11182 arg2 = XEXP (addr, 1);
11184 if (CONSTANT_P (arg1))
11185 base = arg2, offset = arg1;
11186 else
11187 base = arg1, offset = arg2;
11189 if (GET_CODE (base) != REG)
11190 abort ();
11192 /* Catch the case of <address> = <reg> + <reg> */
11193 if (GET_CODE (offset) == REG)
11195 int reg_offset = REGNO (offset);
11196 int reg_base = REGNO (base);
11197 int reg_dest = REGNO (operands[0]);
11199 /* Add the base and offset registers together into the
11200 higher destination register. */
11201 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11202 reg_dest + 1, reg_base, reg_offset);
11204 /* Load the lower destination register from the address in
11205 the higher destination register. */
11206 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11207 reg_dest, reg_dest + 1);
11209 /* Load the higher destination register from its own address
11210 plus 4. */
11211 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11212 reg_dest + 1, reg_dest + 1);
11214 else
11216 /* Compute <address> + 4 for the high order load. */
11217 operands[2] = gen_rtx (MEM, SImode,
11218 plus_constant (XEXP (operands[1], 0), 4));
11220 /* If the computed address is held in the low order register
11221 then load the high order register first, otherwise always
11222 load the low order register first. */
11223 if (REGNO (operands[0]) == REGNO (base))
11225 output_asm_insn ("ldr\t%H0, %2", operands);
11226 output_asm_insn ("ldr\t%0, %1", operands);
11228 else
11230 output_asm_insn ("ldr\t%0, %1", operands);
11231 output_asm_insn ("ldr\t%H0, %2", operands);
11234 break;
11236 case LABEL_REF:
11237 /* With no registers to worry about we can just load the value
11238 directly. */
11239 operands[2] = gen_rtx (MEM, SImode,
11240 plus_constant (XEXP (operands[1], 0), 4));
11242 output_asm_insn ("ldr\t%H0, %2", operands);
11243 output_asm_insn ("ldr\t%0, %1", operands);
11244 break;
11246 default:
11247 abort ();
11248 break;
11251 return "";
11255 const char *
11256 thumb_output_move_mem_multiple (n, operands)
11257 int n;
11258 rtx * operands;
11260 rtx tmp;
11262 switch (n)
11264 case 2:
11265 if (REGNO (operands[4]) > REGNO (operands[5]))
11267 tmp = operands[4];
11268 operands[4] = operands[5];
11269 operands[5] = tmp;
11271 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11272 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11273 break;
11275 case 3:
11276 if (REGNO (operands[4]) > REGNO (operands[5]))
11278 tmp = operands[4];
11279 operands[4] = operands[5];
11280 operands[5] = tmp;
11282 if (REGNO (operands[5]) > REGNO (operands[6]))
11284 tmp = operands[5];
11285 operands[5] = operands[6];
11286 operands[6] = tmp;
11288 if (REGNO (operands[4]) > REGNO (operands[5]))
11290 tmp = operands[4];
11291 operands[4] = operands[5];
11292 operands[5] = tmp;
11295 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11296 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11297 break;
11299 default:
11300 abort ();
11303 return "";
11306 /* Routines for generating rtl. */
11308 void
11309 thumb_expand_movstrqi (operands)
11310 rtx * operands;
11312 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11313 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11314 HOST_WIDE_INT len = INTVAL (operands[2]);
11315 HOST_WIDE_INT offset = 0;
11317 while (len >= 12)
11319 emit_insn (gen_movmem12b (out, in, out, in));
11320 len -= 12;
11323 if (len >= 8)
11325 emit_insn (gen_movmem8b (out, in, out, in));
11326 len -= 8;
11329 if (len >= 4)
11331 rtx reg = gen_reg_rtx (SImode);
11332 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11333 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11334 len -= 4;
11335 offset += 4;
11338 if (len >= 2)
11340 rtx reg = gen_reg_rtx (HImode);
11341 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11342 plus_constant (in, offset))));
11343 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11344 reg));
11345 len -= 2;
11346 offset += 2;
11349 if (len)
11351 rtx reg = gen_reg_rtx (QImode);
11352 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11353 plus_constant (in, offset))));
11354 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11355 reg));
11360 thumb_cmp_operand (op, mode)
11361 rtx op;
11362 enum machine_mode mode;
11364 return ((GET_CODE (op) == CONST_INT
11365 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11366 || register_operand (op, mode));
11369 static const char *
11370 thumb_condition_code (x, invert)
11371 rtx x;
11372 int invert;
11374 static const char * const conds[] =
11376 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11377 "hi", "ls", "ge", "lt", "gt", "le"
11379 int val;
11381 switch (GET_CODE (x))
11383 case EQ: val = 0; break;
11384 case NE: val = 1; break;
11385 case GEU: val = 2; break;
11386 case LTU: val = 3; break;
11387 case GTU: val = 8; break;
11388 case LEU: val = 9; break;
11389 case GE: val = 10; break;
11390 case LT: val = 11; break;
11391 case GT: val = 12; break;
11392 case LE: val = 13; break;
11393 default:
11394 abort ();
11397 return conds[val ^ invert];
11400 /* Handle storing a half-word to memory during reload. */
11402 void
11403 thumb_reload_out_hi (operands)
11404 rtx * operands;
11406 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11409 /* Handle storing a half-word to memory during reload. */
11411 void
11412 thumb_reload_in_hi (operands)
11413 rtx * operands ATTRIBUTE_UNUSED;
11415 abort ();
11418 /* Return the length of a function name prefix
11419 that starts with the character 'c'. */
11421 static int
11422 arm_get_strip_length (c)
11423 int c;
11425 switch (c)
11427 ARM_NAME_ENCODING_LENGTHS
11428 default: return 0;
11432 /* Return a pointer to a function's name with any
11433 and all prefix encodings stripped from it. */
11435 const char *
11436 arm_strip_name_encoding (name)
11437 const char * name;
11439 int skip;
11441 while ((skip = arm_get_strip_length (* name)))
11442 name += skip;
11444 return name;
11447 /* If there is a '*' anywhere in the name's prefix, then
11448 emit the stripped name verbatim, otherwise prepend an
11449 underscore if leading underscores are being used. */
11451 void
11452 arm_asm_output_labelref (stream, name)
11453 FILE * stream;
11454 const char * name;
11456 int skip;
11457 int verbatim = 0;
11459 while ((skip = arm_get_strip_length (* name)))
11461 verbatim |= (*name == '*');
11462 name += skip;
11465 if (verbatim)
11466 fputs (name, stream);
11467 else
11468 asm_fprintf (stream, "%U%s", name);
11471 rtx aof_pic_label;
11473 #ifdef AOF_ASSEMBLER
11474 /* Special functions only needed when producing AOF syntax assembler. */
11476 struct pic_chain
11478 struct pic_chain * next;
11479 const char * symname;
11482 static struct pic_chain * aof_pic_chain = NULL;
11485 aof_pic_entry (x)
11486 rtx x;
11488 struct pic_chain ** chainp;
11489 int offset;
11491 if (aof_pic_label == NULL_RTX)
11493 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11496 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11497 offset += 4, chainp = &(*chainp)->next)
11498 if ((*chainp)->symname == XSTR (x, 0))
11499 return plus_constant (aof_pic_label, offset);
11501 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11502 (*chainp)->next = NULL;
11503 (*chainp)->symname = XSTR (x, 0);
11504 return plus_constant (aof_pic_label, offset);
11507 void
11508 aof_dump_pic_table (f)
11509 FILE * f;
11511 struct pic_chain * chain;
11513 if (aof_pic_chain == NULL)
11514 return;
11516 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11517 PIC_OFFSET_TABLE_REGNUM,
11518 PIC_OFFSET_TABLE_REGNUM);
11519 fputs ("|x$adcons|\n", f);
11521 for (chain = aof_pic_chain; chain; chain = chain->next)
11523 fputs ("\tDCD\t", f);
11524 assemble_name (f, chain->symname);
11525 fputs ("\n", f);
11529 int arm_text_section_count = 1;
11531 char *
11532 aof_text_section ()
11534 static char buf[100];
11535 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11536 arm_text_section_count++);
11537 if (flag_pic)
11538 strcat (buf, ", PIC, REENTRANT");
11539 return buf;
11542 static int arm_data_section_count = 1;
11544 char *
11545 aof_data_section ()
11547 static char buf[100];
11548 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11549 return buf;
11552 /* The AOF assembler is religiously strict about declarations of
11553 imported and exported symbols, so that it is impossible to declare
11554 a function as imported near the beginning of the file, and then to
11555 export it later on. It is, however, possible to delay the decision
11556 until all the functions in the file have been compiled. To get
11557 around this, we maintain a list of the imports and exports, and
11558 delete from it any that are subsequently defined. At the end of
11559 compilation we spit the remainder of the list out before the END
11560 directive. */
11562 struct import
11564 struct import * next;
11565 const char * name;
11568 static struct import * imports_list = NULL;
11570 void
11571 aof_add_import (name)
11572 const char * name;
11574 struct import * new;
11576 for (new = imports_list; new; new = new->next)
11577 if (new->name == name)
11578 return;
11580 new = (struct import *) xmalloc (sizeof (struct import));
11581 new->next = imports_list;
11582 imports_list = new;
11583 new->name = name;
11586 void
11587 aof_delete_import (name)
11588 const char * name;
11590 struct import ** old;
11592 for (old = &imports_list; *old; old = & (*old)->next)
11594 if ((*old)->name == name)
11596 *old = (*old)->next;
11597 return;
11602 int arm_main_function = 0;
11604 void
11605 aof_dump_imports (f)
11606 FILE * f;
11608 /* The AOF assembler needs this to cause the startup code to be extracted
11609 from the library. Brining in __main causes the whole thing to work
11610 automagically. */
11611 if (arm_main_function)
11613 text_section ();
11614 fputs ("\tIMPORT __main\n", f);
11615 fputs ("\tDCD __main\n", f);
11618 /* Now dump the remaining imports. */
11619 while (imports_list)
11621 fprintf (f, "\tIMPORT\t");
11622 assemble_name (f, imports_list->name);
11623 fputc ('\n', f);
11624 imports_list = imports_list->next;
11628 static void
11629 aof_globalize_label (stream, name)
11630 FILE *stream;
11631 const char *name;
11633 default_globalize_label (stream, name);
11634 if (! strcmp (name, "main"))
11635 arm_main_function = 1;
11637 #endif /* AOF_ASSEMBLER */
11639 #ifdef OBJECT_FORMAT_ELF
11640 /* Switch to an arbitrary section NAME with attributes as specified
11641 by FLAGS. ALIGN specifies any known alignment requirements for
11642 the section; 0 if the default should be used.
11644 Differs from the default elf version only in the prefix character
11645 used before the section type. */
11647 static void
11648 arm_elf_asm_named_section (name, flags)
11649 const char *name;
11650 unsigned int flags;
11652 char flagchars[10], *f = flagchars;
11654 if (! named_section_first_declaration (name))
11656 fprintf (asm_out_file, "\t.section\t%s\n", name);
11657 return;
11660 if (!(flags & SECTION_DEBUG))
11661 *f++ = 'a';
11662 if (flags & SECTION_WRITE)
11663 *f++ = 'w';
11664 if (flags & SECTION_CODE)
11665 *f++ = 'x';
11666 if (flags & SECTION_SMALL)
11667 *f++ = 's';
11668 if (flags & SECTION_MERGE)
11669 *f++ = 'M';
11670 if (flags & SECTION_STRINGS)
11671 *f++ = 'S';
11672 if (flags & SECTION_TLS)
11673 *f++ = 'T';
11674 *f = '\0';
11676 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11678 if (!(flags & SECTION_NOTYPE))
11680 const char *type;
11682 if (flags & SECTION_BSS)
11683 type = "nobits";
11684 else
11685 type = "progbits";
11687 fprintf (asm_out_file, ",%%%s", type);
11689 if (flags & SECTION_ENTSIZE)
11690 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11693 putc ('\n', asm_out_file);
11695 #endif
11697 #ifndef ARM_PE
11698 /* Symbols in the text segment can be accessed without indirecting via the
11699 constant pool; it may take an extra binary operation, but this is still
11700 faster than indirecting via memory. Don't do this when not optimizing,
11701 since we won't be calculating al of the offsets necessary to do this
11702 simplification. */
11704 static void
11705 arm_encode_section_info (decl, first)
11706 tree decl;
11707 int first;
11709 /* This doesn't work with AOF syntax, since the string table may be in
11710 a different AREA. */
11711 #ifndef AOF_ASSEMBLER
11712 if (optimize > 0 && TREE_CONSTANT (decl)
11713 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11715 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11716 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11717 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11719 #endif
11721 /* If we are referencing a function that is weak then encode a long call
11722 flag in the function name, otherwise if the function is static or
11723 or known to be defined in this file then encode a short call flag. */
11724 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11726 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11727 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11728 else if (! TREE_PUBLIC (decl))
11729 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11732 #endif /* !ARM_PE */
11734 static void
11735 arm_internal_label (stream, prefix, labelno)
11736 FILE *stream;
11737 const char *prefix;
11738 unsigned long labelno;
11740 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11741 && !strcmp (prefix, "L"))
11743 arm_ccfsm_state = 0;
11744 arm_target_insn = NULL;
11746 default_internal_label (stream, prefix, labelno);
11749 /* Output code to add DELTA to the first argument, and then jump
11750 to FUNCTION. Used for C++ multiple inheritance. */
11752 static void
11753 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
11754 FILE *file;
11755 tree thunk ATTRIBUTE_UNUSED;
11756 HOST_WIDE_INT delta;
11757 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11758 tree function;
11760 int mi_delta = delta;
11761 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11762 int shift = 0;
11763 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11764 ? 1 : 0);
11765 if (mi_delta < 0)
11766 mi_delta = - mi_delta;
11767 while (mi_delta != 0)
11769 if ((mi_delta & (3 << shift)) == 0)
11770 shift += 2;
11771 else
11773 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11774 mi_op, this_regno, this_regno,
11775 mi_delta & (0xff << shift));
11776 mi_delta &= ~(0xff << shift);
11777 shift += 8;
11780 fputs ("\tb\t", file);
11781 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11782 if (NEED_PLT_RELOC)
11783 fputs ("(PLT)", file);
11784 fputc ('\n', file);