* defaults.h (CLZ_DEFINED_VALUE_AT_ZERO): New.
[official-gcc.git] / gcc / config / arm / arm.c
blobe59f461fbeaf07d188edff4bca9fdeb4e52c5c85
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const struct attribute_spec arm_attribute_table[];
67 /* Forward function declarations. */
68 static void arm_add_gc_roots PARAMS ((void));
69 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
70 static unsigned bit_count PARAMS ((Ulong));
71 static int arm_address_register_rtx_p PARAMS ((rtx, int));
72 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
73 rtx, int));
74 static int thumb_base_register_rtx_p PARAMS ((rtx,
75 enum machine_mode,
76 int));
77 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
78 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
79 static int eliminate_lr2ip PARAMS ((rtx *));
80 static rtx emit_multi_reg_push PARAMS ((int));
81 static rtx emit_sfm PARAMS ((int, int));
82 #ifndef AOF_ASSEMBLER
83 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
84 #endif
85 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
86 static arm_cc get_arm_condition_code PARAMS ((rtx));
87 static void init_fpa_table PARAMS ((void));
88 static Hint int_log2 PARAMS ((Hint));
89 static rtx is_jump_table PARAMS ((rtx));
90 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
91 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
92 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
93 static Ccstar shift_op PARAMS ((rtx, Hint *));
94 static struct machine_function * arm_init_machine_status PARAMS ((void));
95 static int number_of_first_bit_set PARAMS ((int));
96 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
97 static void thumb_exit PARAMS ((FILE *, int, rtx));
98 static void thumb_pushpop PARAMS ((FILE *, int, int));
99 static Ccstar thumb_condition_code PARAMS ((rtx, int));
100 static rtx is_jump_table PARAMS ((rtx));
101 static Hint get_jump_table_size PARAMS ((rtx));
102 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
103 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
104 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
105 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
106 static void assign_minipool_offsets PARAMS ((Mfix *));
107 static void arm_print_value PARAMS ((FILE *, rtx));
108 static void dump_minipool PARAMS ((rtx));
109 static int arm_barrier_cost PARAMS ((rtx));
110 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
111 static void push_minipool_barrier PARAMS ((rtx, Hint));
112 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
113 static void note_invalid_constants PARAMS ((rtx, Hint));
114 static int current_file_function_operand PARAMS ((rtx));
115 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
116 static Ulong arm_compute_save_reg_mask PARAMS ((void));
117 static Ulong arm_isr_value PARAMS ((tree));
118 static Ulong arm_compute_func_type PARAMS ((void));
119 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
120 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
121 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
122 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
123 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
124 static int arm_comp_type_attributes PARAMS ((tree, tree));
125 static void arm_set_default_type_attributes PARAMS ((tree));
126 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
127 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
128 static int arm_get_strip_length PARAMS ((int));
129 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
130 #ifdef OBJECT_FORMAT_ELF
131 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
132 #endif
133 #ifndef ARM_PE
134 static void arm_encode_section_info PARAMS ((tree, int));
135 #endif
136 #ifdef AOF_ASSEMBLER
137 static void aof_globalize_label PARAMS ((FILE *, const char *));
138 #endif
139 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
140 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
141 HOST_WIDE_INT,
142 HOST_WIDE_INT, tree));
143 static int arm_rtx_costs_1 PARAMS ((rtx, enum rtx_code,
144 enum rtx_code));
145 static bool arm_rtx_costs PARAMS ((rtx, int, int, int*));
146 static int arm_address_cost PARAMS ((rtx));
148 #undef Hint
149 #undef Mmode
150 #undef Ulong
151 #undef Ccstar
153 /* Initialize the GCC target structure. */
154 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
155 #undef TARGET_MERGE_DECL_ATTRIBUTES
156 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
157 #endif
159 #undef TARGET_ATTRIBUTE_TABLE
160 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
162 #ifdef AOF_ASSEMBLER
163 #undef TARGET_ASM_BYTE_OP
164 #define TARGET_ASM_BYTE_OP "\tDCB\t"
165 #undef TARGET_ASM_ALIGNED_HI_OP
166 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
167 #undef TARGET_ASM_ALIGNED_SI_OP
168 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
169 #undef TARGET_ASM_GLOBALIZE_LABEL
170 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
171 #else
172 #undef TARGET_ASM_ALIGNED_SI_OP
173 #define TARGET_ASM_ALIGNED_SI_OP NULL
174 #undef TARGET_ASM_INTEGER
175 #define TARGET_ASM_INTEGER arm_assemble_integer
176 #endif
178 #undef TARGET_ASM_FUNCTION_PROLOGUE
179 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
181 #undef TARGET_ASM_FUNCTION_EPILOGUE
182 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
184 #undef TARGET_COMP_TYPE_ATTRIBUTES
185 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
187 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
188 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
190 #undef TARGET_SCHED_ADJUST_COST
191 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
193 #undef TARGET_ENCODE_SECTION_INFO
194 #ifdef ARM_PE
195 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
196 #else
197 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
198 #endif
200 #undef TARGET_STRIP_NAME_ENCODING
201 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
203 #undef TARGET_ASM_INTERNAL_LABEL
204 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
206 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
207 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
209 #undef TARGET_ASM_OUTPUT_MI_THUNK
210 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
211 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
212 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
214 #undef TARGET_RTX_COSTS
215 #define TARGET_RTX_COSTS arm_rtx_costs
216 #undef TARGET_ADDRESS_COST
217 #define TARGET_ADDRESS_COST arm_address_cost
219 struct gcc_target targetm = TARGET_INITIALIZER;
221 /* Obstack for minipool constant handling. */
222 static struct obstack minipool_obstack;
223 static char * minipool_startobj;
225 /* The maximum number of insns skipped which
226 will be conditionalised if possible. */
227 static int max_insns_skipped = 5;
229 extern FILE * asm_out_file;
231 /* True if we are currently building a constant table. */
232 int making_const_table;
234 /* Define the information needed to generate branch insns. This is
235 stored from the compare operation. */
236 rtx arm_compare_op0, arm_compare_op1;
238 /* What type of floating point are we tuning for? */
239 enum floating_point_type arm_fpu;
241 /* What type of floating point instructions are available? */
242 enum floating_point_type arm_fpu_arch;
244 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
245 enum prog_mode_type arm_prgmode;
247 /* Set by the -mfp=... option. */
248 const char * target_fp_name = NULL;
250 /* Used to parse -mstructure_size_boundary command line option. */
251 const char * structure_size_string = NULL;
252 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
254 /* Bit values used to identify processor capabilities. */
255 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
256 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
257 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
258 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
259 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
260 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
261 #define FL_THUMB (1 << 6) /* Thumb aware */
262 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
263 #define FL_STRONG (1 << 8) /* StrongARM */
264 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
265 #define FL_XSCALE (1 << 10) /* XScale */
267 /* The bits in this mask specify which
268 instructions we are allowed to generate. */
269 static unsigned long insn_flags = 0;
271 /* The bits in this mask specify which instruction scheduling options should
272 be used. Note - there is an overlap with the FL_FAST_MULT. For some
273 hardware we want to be able to generate the multiply instructions, but to
274 tune as if they were not present in the architecture. */
275 static unsigned long tune_flags = 0;
277 /* The following are used in the arm.md file as equivalents to bits
278 in the above two flag variables. */
280 /* Nonzero if this is an "M" variant of the processor. */
281 int arm_fast_multiply = 0;
283 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
284 int arm_arch4 = 0;
286 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
287 int arm_arch5 = 0;
289 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
290 int arm_arch5e = 0;
292 /* Nonzero if this chip can benefit from load scheduling. */
293 int arm_ld_sched = 0;
295 /* Nonzero if this chip is a StrongARM. */
296 int arm_is_strong = 0;
298 /* Nonzero if this chip is an XScale. */
299 int arm_is_xscale = 0;
301 /* Nonzero if this chip is an ARM6 or an ARM7. */
302 int arm_is_6_or_7 = 0;
304 /* Nonzero if generating Thumb instructions. */
305 int thumb_code = 0;
307 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
308 must report the mode of the memory reference from PRINT_OPERAND to
309 PRINT_OPERAND_ADDRESS. */
310 enum machine_mode output_memory_reference_mode;
312 /* The register number to be used for the PIC offset register. */
313 const char * arm_pic_register_string = NULL;
314 int arm_pic_register = INVALID_REGNUM;
316 /* Set to 1 when a return insn is output, this means that the epilogue
317 is not needed. */
318 int return_used_this_function;
320 /* Set to 1 after arm_reorg has started. Reset to start at the start of
321 the next function. */
322 static int after_arm_reorg = 0;
324 /* The maximum number of insns to be used when loading a constant. */
325 static int arm_constant_limit = 3;
327 /* For an explanation of these variables, see final_prescan_insn below. */
328 int arm_ccfsm_state;
329 enum arm_cond_code arm_current_cc;
330 rtx arm_target_insn;
331 int arm_target_label;
333 /* The condition codes of the ARM, and the inverse function. */
334 static const char * const arm_condition_codes[] =
336 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
337 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
340 #define streq(string1, string2) (strcmp (string1, string2) == 0)
342 /* Initialization code. */
344 struct processors
346 const char *const name;
347 const unsigned long flags;
350 /* Not all of these give usefully different compilation alternatives,
351 but there is no simple way of generalizing them. */
352 static const struct processors all_cores[] =
354 /* ARM Cores */
356 {"arm2", FL_CO_PROC | FL_MODE26 },
357 {"arm250", FL_CO_PROC | FL_MODE26 },
358 {"arm3", FL_CO_PROC | FL_MODE26 },
359 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
360 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
361 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
362 {"arm610", FL_MODE26 | FL_MODE32 },
363 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
364 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
365 /* arm7m doesn't exist on its own, but only with D, (and I), but
366 those don't alter the code, so arm7m is sometimes used. */
367 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
368 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
369 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
370 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
371 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
372 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
373 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
374 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
375 {"arm710", FL_MODE26 | FL_MODE32 },
376 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
377 {"arm720", FL_MODE26 | FL_MODE32 },
378 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
379 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
380 {"arm710c", FL_MODE26 | FL_MODE32 },
381 {"arm7100", FL_MODE26 | FL_MODE32 },
382 {"arm7500", FL_MODE26 | FL_MODE32 },
383 /* Doesn't have an external co-proc, but does have embedded fpu. */
384 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
385 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
386 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
387 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
388 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
389 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
390 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
391 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
392 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
393 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
394 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
395 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
396 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
397 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
398 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
399 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
400 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
402 {NULL, 0}
405 static const struct processors all_architectures[] =
407 /* ARM Architectures */
409 { "armv2", FL_CO_PROC | FL_MODE26 },
410 { "armv2a", FL_CO_PROC | FL_MODE26 },
411 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
412 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
413 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
414 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
415 implementations that support it, so we will leave it out for now. */
416 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
417 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
418 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
419 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
420 { NULL, 0 }
423 /* This is a magic stucture. The 'string' field is magically filled in
424 with a pointer to the value specified by the user on the command line
425 assuming that the user has specified such a value. */
427 struct arm_cpu_select arm_select[] =
429 /* string name processors */
430 { NULL, "-mcpu=", all_cores },
431 { NULL, "-march=", all_architectures },
432 { NULL, "-mtune=", all_cores }
435 /* Return the number of bits set in VALUE. */
436 static unsigned
437 bit_count (value)
438 unsigned long value;
440 unsigned long count = 0;
442 while (value)
444 count++;
445 value &= value - 1; /* Clear the least-significant set bit. */
448 return count;
451 /* Fix up any incompatible options that the user has specified.
452 This has now turned into a maze. */
453 void
454 arm_override_options ()
456 unsigned i;
458 /* Set up the flags based on the cpu/architecture selected by the user. */
459 for (i = ARRAY_SIZE (arm_select); i--;)
461 struct arm_cpu_select * ptr = arm_select + i;
463 if (ptr->string != NULL && ptr->string[0] != '\0')
465 const struct processors * sel;
467 for (sel = ptr->processors; sel->name != NULL; sel++)
468 if (streq (ptr->string, sel->name))
470 if (i == 2)
471 tune_flags = sel->flags;
472 else
474 /* If we have been given an architecture and a processor
475 make sure that they are compatible. We only generate
476 a warning though, and we prefer the CPU over the
477 architecture. */
478 if (insn_flags != 0 && (insn_flags ^ sel->flags))
479 warning ("switch -mcpu=%s conflicts with -march= switch",
480 ptr->string);
482 insn_flags = sel->flags;
485 break;
488 if (sel->name == NULL)
489 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
493 /* If the user did not specify a processor, choose one for them. */
494 if (insn_flags == 0)
496 const struct processors * sel;
497 unsigned int sought;
498 static const struct cpu_default
500 const int cpu;
501 const char *const name;
503 cpu_defaults[] =
505 { TARGET_CPU_arm2, "arm2" },
506 { TARGET_CPU_arm6, "arm6" },
507 { TARGET_CPU_arm610, "arm610" },
508 { TARGET_CPU_arm710, "arm710" },
509 { TARGET_CPU_arm7m, "arm7m" },
510 { TARGET_CPU_arm7500fe, "arm7500fe" },
511 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
512 { TARGET_CPU_arm8, "arm8" },
513 { TARGET_CPU_arm810, "arm810" },
514 { TARGET_CPU_arm9, "arm9" },
515 { TARGET_CPU_strongarm, "strongarm" },
516 { TARGET_CPU_xscale, "xscale" },
517 { TARGET_CPU_generic, "arm" },
518 { 0, 0 }
520 const struct cpu_default * def;
522 /* Find the default. */
523 for (def = cpu_defaults; def->name; def++)
524 if (def->cpu == TARGET_CPU_DEFAULT)
525 break;
527 /* Make sure we found the default CPU. */
528 if (def->name == NULL)
529 abort ();
531 /* Find the default CPU's flags. */
532 for (sel = all_cores; sel->name != NULL; sel++)
533 if (streq (def->name, sel->name))
534 break;
536 if (sel->name == NULL)
537 abort ();
539 insn_flags = sel->flags;
541 /* Now check to see if the user has specified some command line
542 switch that require certain abilities from the cpu. */
543 sought = 0;
545 if (TARGET_INTERWORK || TARGET_THUMB)
547 sought |= (FL_THUMB | FL_MODE32);
549 /* Force apcs-32 to be used for interworking. */
550 target_flags |= ARM_FLAG_APCS_32;
552 /* There are no ARM processors that support both APCS-26 and
553 interworking. Therefore we force FL_MODE26 to be removed
554 from insn_flags here (if it was set), so that the search
555 below will always be able to find a compatible processor. */
556 insn_flags &= ~FL_MODE26;
558 else if (!TARGET_APCS_32)
559 sought |= FL_MODE26;
561 if (sought != 0 && ((sought & insn_flags) != sought))
563 /* Try to locate a CPU type that supports all of the abilities
564 of the default CPU, plus the extra abilities requested by
565 the user. */
566 for (sel = all_cores; sel->name != NULL; sel++)
567 if ((sel->flags & sought) == (sought | insn_flags))
568 break;
570 if (sel->name == NULL)
572 unsigned current_bit_count = 0;
573 const struct processors * best_fit = NULL;
575 /* Ideally we would like to issue an error message here
576 saying that it was not possible to find a CPU compatible
577 with the default CPU, but which also supports the command
578 line options specified by the programmer, and so they
579 ought to use the -mcpu=<name> command line option to
580 override the default CPU type.
582 Unfortunately this does not work with multilibing. We
583 need to be able to support multilibs for -mapcs-26 and for
584 -mthumb-interwork and there is no CPU that can support both
585 options. Instead if we cannot find a cpu that has both the
586 characteristics of the default cpu and the given command line
587 options we scan the array again looking for a best match. */
588 for (sel = all_cores; sel->name != NULL; sel++)
589 if ((sel->flags & sought) == sought)
591 unsigned count;
593 count = bit_count (sel->flags & insn_flags);
595 if (count >= current_bit_count)
597 best_fit = sel;
598 current_bit_count = count;
602 if (best_fit == NULL)
603 abort ();
604 else
605 sel = best_fit;
608 insn_flags = sel->flags;
612 /* If tuning has not been specified, tune for whichever processor or
613 architecture has been selected. */
614 if (tune_flags == 0)
615 tune_flags = insn_flags;
617 /* Make sure that the processor choice does not conflict with any of the
618 other command line choices. */
619 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
621 /* If APCS-32 was not the default then it must have been set by the
622 user, so issue a warning message. If the user has specified
623 "-mapcs-32 -mcpu=arm2" then we loose here. */
624 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
625 warning ("target CPU does not support APCS-32" );
626 target_flags &= ~ARM_FLAG_APCS_32;
628 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
630 warning ("target CPU does not support APCS-26" );
631 target_flags |= ARM_FLAG_APCS_32;
634 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
636 warning ("target CPU does not support interworking" );
637 target_flags &= ~ARM_FLAG_INTERWORK;
640 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
642 warning ("target CPU does not support THUMB instructions");
643 target_flags &= ~ARM_FLAG_THUMB;
646 if (TARGET_APCS_FRAME && TARGET_THUMB)
648 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
649 target_flags &= ~ARM_FLAG_APCS_FRAME;
652 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
653 from here where no function is being compiled currently. */
654 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
655 && TARGET_ARM)
656 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
658 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
659 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
661 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
662 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
664 /* If interworking is enabled then APCS-32 must be selected as well. */
665 if (TARGET_INTERWORK)
667 if (!TARGET_APCS_32)
668 warning ("interworking forces APCS-32 to be used" );
669 target_flags |= ARM_FLAG_APCS_32;
672 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
674 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
675 target_flags |= ARM_FLAG_APCS_FRAME;
678 if (TARGET_POKE_FUNCTION_NAME)
679 target_flags |= ARM_FLAG_APCS_FRAME;
681 if (TARGET_APCS_REENT && flag_pic)
682 error ("-fpic and -mapcs-reent are incompatible");
684 if (TARGET_APCS_REENT)
685 warning ("APCS reentrant code not supported. Ignored");
687 /* If this target is normally configured to use APCS frames, warn if they
688 are turned off and debugging is turned on. */
689 if (TARGET_ARM
690 && write_symbols != NO_DEBUG
691 && !TARGET_APCS_FRAME
692 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
693 warning ("-g with -mno-apcs-frame may not give sensible debugging");
695 /* If stack checking is disabled, we can use r10 as the PIC register,
696 which keeps r9 available. */
697 if (flag_pic)
698 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
700 if (TARGET_APCS_FLOAT)
701 warning ("passing floating point arguments in fp regs not yet supported");
703 /* Initialize boolean versions of the flags, for use in the arm.md file. */
704 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
705 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
706 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
707 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
708 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
710 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
711 arm_is_strong = (tune_flags & FL_STRONG) != 0;
712 thumb_code = (TARGET_ARM == 0);
713 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
714 && !(tune_flags & FL_ARCH4))) != 0;
716 /* Default value for floating point code... if no co-processor
717 bus, then schedule for emulated floating point. Otherwise,
718 assume the user has an FPA.
719 Note: this does not prevent use of floating point instructions,
720 -msoft-float does that. */
721 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
723 if (target_fp_name)
725 if (streq (target_fp_name, "2"))
726 arm_fpu_arch = FP_SOFT2;
727 else if (streq (target_fp_name, "3"))
728 arm_fpu_arch = FP_SOFT3;
729 else
730 error ("invalid floating point emulation option: -mfpe-%s",
731 target_fp_name);
733 else
734 arm_fpu_arch = FP_DEFAULT;
736 if (TARGET_FPE && arm_fpu != FP_HARD)
737 arm_fpu = FP_SOFT2;
739 /* For arm2/3 there is no need to do any scheduling if there is only
740 a floating point emulator, or we are doing software floating-point. */
741 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
742 && (tune_flags & FL_MODE32) == 0)
743 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
745 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
747 if (structure_size_string != NULL)
749 int size = strtol (structure_size_string, NULL, 0);
751 if (size == 8 || size == 32)
752 arm_structure_size_boundary = size;
753 else
754 warning ("structure size boundary can only be set to 8 or 32");
757 if (arm_pic_register_string != NULL)
759 int pic_register = decode_reg_name (arm_pic_register_string);
761 if (!flag_pic)
762 warning ("-mpic-register= is useless without -fpic");
764 /* Prevent the user from choosing an obviously stupid PIC register. */
765 else if (pic_register < 0 || call_used_regs[pic_register]
766 || pic_register == HARD_FRAME_POINTER_REGNUM
767 || pic_register == STACK_POINTER_REGNUM
768 || pic_register >= PC_REGNUM)
769 error ("unable to use '%s' for PIC register", arm_pic_register_string);
770 else
771 arm_pic_register = pic_register;
774 if (TARGET_THUMB && flag_schedule_insns)
776 /* Don't warn since it's on by default in -O2. */
777 flag_schedule_insns = 0;
780 /* If optimizing for space, don't synthesize constants.
781 For processors with load scheduling, it never costs more than 2 cycles
782 to load a constant, and the load scheduler may well reduce that to 1. */
783 if (optimize_size || (tune_flags & FL_LDSCHED))
784 arm_constant_limit = 1;
786 if (arm_is_xscale)
787 arm_constant_limit = 2;
789 /* If optimizing for size, bump the number of instructions that we
790 are prepared to conditionally execute (even on a StrongARM).
791 Otherwise for the StrongARM, which has early execution of branches,
792 a sequence that is worth skipping is shorter. */
793 if (optimize_size)
794 max_insns_skipped = 6;
795 else if (arm_is_strong)
796 max_insns_skipped = 3;
798 /* Register global variables with the garbage collector. */
799 arm_add_gc_roots ();
802 static void
803 arm_add_gc_roots ()
805 gcc_obstack_init(&minipool_obstack);
806 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
809 /* A table of known ARM exception types.
810 For use with the interrupt function attribute. */
812 typedef struct
814 const char *const arg;
815 const unsigned long return_value;
817 isr_attribute_arg;
819 static const isr_attribute_arg isr_attribute_args [] =
821 { "IRQ", ARM_FT_ISR },
822 { "irq", ARM_FT_ISR },
823 { "FIQ", ARM_FT_FIQ },
824 { "fiq", ARM_FT_FIQ },
825 { "ABORT", ARM_FT_ISR },
826 { "abort", ARM_FT_ISR },
827 { "ABORT", ARM_FT_ISR },
828 { "abort", ARM_FT_ISR },
829 { "UNDEF", ARM_FT_EXCEPTION },
830 { "undef", ARM_FT_EXCEPTION },
831 { "SWI", ARM_FT_EXCEPTION },
832 { "swi", ARM_FT_EXCEPTION },
833 { NULL, ARM_FT_NORMAL }
836 /* Returns the (interrupt) function type of the current
837 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
839 static unsigned long
840 arm_isr_value (argument)
841 tree argument;
843 const isr_attribute_arg * ptr;
844 const char * arg;
846 /* No argument - default to IRQ. */
847 if (argument == NULL_TREE)
848 return ARM_FT_ISR;
850 /* Get the value of the argument. */
851 if (TREE_VALUE (argument) == NULL_TREE
852 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
853 return ARM_FT_UNKNOWN;
855 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
857 /* Check it against the list of known arguments. */
858 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
859 if (streq (arg, ptr->arg))
860 return ptr->return_value;
862 /* An unrecognized interrupt type. */
863 return ARM_FT_UNKNOWN;
866 /* Computes the type of the current function. */
868 static unsigned long
869 arm_compute_func_type ()
871 unsigned long type = ARM_FT_UNKNOWN;
872 tree a;
873 tree attr;
875 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
876 abort ();
878 /* Decide if the current function is volatile. Such functions
879 never return, and many memory cycles can be saved by not storing
880 register values that will never be needed again. This optimization
881 was added to speed up context switching in a kernel application. */
882 if (optimize > 0
883 && current_function_nothrow
884 && TREE_THIS_VOLATILE (current_function_decl))
885 type |= ARM_FT_VOLATILE;
887 if (current_function_needs_context)
888 type |= ARM_FT_NESTED;
890 attr = DECL_ATTRIBUTES (current_function_decl);
892 a = lookup_attribute ("naked", attr);
893 if (a != NULL_TREE)
894 type |= ARM_FT_NAKED;
896 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
897 type |= ARM_FT_EXCEPTION_HANDLER;
898 else
900 a = lookup_attribute ("isr", attr);
901 if (a == NULL_TREE)
902 a = lookup_attribute ("interrupt", attr);
904 if (a == NULL_TREE)
905 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
906 else
907 type |= arm_isr_value (TREE_VALUE (a));
910 return type;
913 /* Returns the type of the current function. */
915 unsigned long
916 arm_current_func_type ()
918 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
919 cfun->machine->func_type = arm_compute_func_type ();
921 return cfun->machine->func_type;
924 /* Return 1 if it is possible to return using a single instruction. */
927 use_return_insn (iscond)
928 int iscond;
930 int regno;
931 unsigned int func_type;
932 unsigned long saved_int_regs;
934 /* Never use a return instruction before reload has run. */
935 if (!reload_completed)
936 return 0;
938 func_type = arm_current_func_type ();
940 /* Naked functions and volatile functions need special
941 consideration. */
942 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
943 return 0;
945 /* So do interrupt functions that use the frame pointer. */
946 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
947 return 0;
949 /* As do variadic functions. */
950 if (current_function_pretend_args_size
951 || cfun->machine->uses_anonymous_args
952 /* Of if the function calls __builtin_eh_return () */
953 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
954 /* Or if there is no frame pointer and there is a stack adjustment. */
955 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
956 && !frame_pointer_needed))
957 return 0;
959 saved_int_regs = arm_compute_save_reg_mask ();
961 /* Can't be done if interworking with Thumb, and any registers have been
962 stacked. */
963 if (TARGET_INTERWORK && saved_int_regs != 0)
964 return 0;
966 /* On StrongARM, conditional returns are expensive if they aren't
967 taken and multiple registers have been stacked. */
968 if (iscond && arm_is_strong)
970 /* Conditional return when just the LR is stored is a simple
971 conditional-load instruction, that's not expensive. */
972 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
973 return 0;
975 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
976 return 0;
979 /* If there are saved registers but the LR isn't saved, then we need
980 two instructions for the return. */
981 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
982 return 0;
984 /* Can't be done if any of the FPU regs are pushed,
985 since this also requires an insn. */
986 if (TARGET_HARD_FLOAT)
987 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
988 if (regs_ever_live[regno] && !call_used_regs[regno])
989 return 0;
991 return 1;
994 /* Return TRUE if int I is a valid immediate ARM constant. */
997 const_ok_for_arm (i)
998 HOST_WIDE_INT i;
1000 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1002 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1003 be all zero, or all one. */
1004 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1005 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1006 != ((~(unsigned HOST_WIDE_INT) 0)
1007 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1008 return FALSE;
1010 /* Fast return for 0 and powers of 2 */
1011 if ((i & (i - 1)) == 0)
1012 return TRUE;
1016 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1017 return TRUE;
1018 mask =
1019 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1020 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1022 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1024 return FALSE;
1027 /* Return true if I is a valid constant for the operation CODE. */
1028 static int
1029 const_ok_for_op (i, code)
1030 HOST_WIDE_INT i;
1031 enum rtx_code code;
1033 if (const_ok_for_arm (i))
1034 return 1;
1036 switch (code)
1038 case PLUS:
1039 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1041 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1042 case XOR:
1043 case IOR:
1044 return 0;
1046 case AND:
1047 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1049 default:
1050 abort ();
1054 /* Emit a sequence of insns to handle a large constant.
1055 CODE is the code of the operation required, it can be any of SET, PLUS,
1056 IOR, AND, XOR, MINUS;
1057 MODE is the mode in which the operation is being performed;
1058 VAL is the integer to operate on;
1059 SOURCE is the other operand (a register, or a null-pointer for SET);
1060 SUBTARGETS means it is safe to create scratch registers if that will
1061 either produce a simpler sequence, or we will want to cse the values.
1062 Return value is the number of insns emitted. */
1065 arm_split_constant (code, mode, val, target, source, subtargets)
1066 enum rtx_code code;
1067 enum machine_mode mode;
1068 HOST_WIDE_INT val;
1069 rtx target;
1070 rtx source;
1071 int subtargets;
1073 if (subtargets || code == SET
1074 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1075 && REGNO (target) != REGNO (source)))
1077 /* After arm_reorg has been called, we can't fix up expensive
1078 constants by pushing them into memory so we must synthesize
1079 them in-line, regardless of the cost. This is only likely to
1080 be more costly on chips that have load delay slots and we are
1081 compiling without running the scheduler (so no splitting
1082 occurred before the final instruction emission).
1084 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1086 if (!after_arm_reorg
1087 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1088 > arm_constant_limit + (code != SET)))
1090 if (code == SET)
1092 /* Currently SET is the only monadic value for CODE, all
1093 the rest are diadic. */
1094 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1095 return 1;
1097 else
1099 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1101 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1102 /* For MINUS, the value is subtracted from, since we never
1103 have subtraction of a constant. */
1104 if (code == MINUS)
1105 emit_insn (gen_rtx_SET (VOIDmode, target,
1106 gen_rtx_MINUS (mode, temp, source)));
1107 else
1108 emit_insn (gen_rtx_SET (VOIDmode, target,
1109 gen_rtx (code, mode, source, temp)));
1110 return 2;
1115 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1118 static int
1119 count_insns_for_constant (remainder, i)
1120 HOST_WIDE_INT remainder;
1121 int i;
1123 HOST_WIDE_INT temp1;
1124 int num_insns = 0;
1127 int end;
1129 if (i <= 0)
1130 i += 32;
1131 if (remainder & (3 << (i - 2)))
1133 end = i - 8;
1134 if (end < 0)
1135 end += 32;
1136 temp1 = remainder & ((0x0ff << end)
1137 | ((i < end) ? (0xff >> (32 - end)) : 0));
1138 remainder &= ~temp1;
1139 num_insns++;
1140 i -= 6;
1142 i -= 2;
1143 } while (remainder);
1144 return num_insns;
1147 /* As above, but extra parameter GENERATE which, if clear, suppresses
1148 RTL generation. */
1150 static int
1151 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1152 enum rtx_code code;
1153 enum machine_mode mode;
1154 HOST_WIDE_INT val;
1155 rtx target;
1156 rtx source;
1157 int subtargets;
1158 int generate;
1160 int can_invert = 0;
1161 int can_negate = 0;
1162 int can_negate_initial = 0;
1163 int can_shift = 0;
1164 int i;
1165 int num_bits_set = 0;
1166 int set_sign_bit_copies = 0;
1167 int clear_sign_bit_copies = 0;
1168 int clear_zero_bit_copies = 0;
1169 int set_zero_bit_copies = 0;
1170 int insns = 0;
1171 unsigned HOST_WIDE_INT temp1, temp2;
1172 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1174 /* Find out which operations are safe for a given CODE. Also do a quick
1175 check for degenerate cases; these can occur when DImode operations
1176 are split. */
1177 switch (code)
1179 case SET:
1180 can_invert = 1;
1181 can_shift = 1;
1182 can_negate = 1;
1183 break;
1185 case PLUS:
1186 can_negate = 1;
1187 can_negate_initial = 1;
1188 break;
1190 case IOR:
1191 if (remainder == 0xffffffff)
1193 if (generate)
1194 emit_insn (gen_rtx_SET (VOIDmode, target,
1195 GEN_INT (ARM_SIGN_EXTEND (val))));
1196 return 1;
1198 if (remainder == 0)
1200 if (reload_completed && rtx_equal_p (target, source))
1201 return 0;
1202 if (generate)
1203 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1204 return 1;
1206 break;
1208 case AND:
1209 if (remainder == 0)
1211 if (generate)
1212 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1213 return 1;
1215 if (remainder == 0xffffffff)
1217 if (reload_completed && rtx_equal_p (target, source))
1218 return 0;
1219 if (generate)
1220 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1221 return 1;
1223 can_invert = 1;
1224 break;
1226 case XOR:
1227 if (remainder == 0)
1229 if (reload_completed && rtx_equal_p (target, source))
1230 return 0;
1231 if (generate)
1232 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1233 return 1;
1235 if (remainder == 0xffffffff)
1237 if (generate)
1238 emit_insn (gen_rtx_SET (VOIDmode, target,
1239 gen_rtx_NOT (mode, source)));
1240 return 1;
1243 /* We don't know how to handle this yet below. */
1244 abort ();
1246 case MINUS:
1247 /* We treat MINUS as (val - source), since (source - val) is always
1248 passed as (source + (-val)). */
1249 if (remainder == 0)
1251 if (generate)
1252 emit_insn (gen_rtx_SET (VOIDmode, target,
1253 gen_rtx_NEG (mode, source)));
1254 return 1;
1256 if (const_ok_for_arm (val))
1258 if (generate)
1259 emit_insn (gen_rtx_SET (VOIDmode, target,
1260 gen_rtx_MINUS (mode, GEN_INT (val),
1261 source)));
1262 return 1;
1264 can_negate = 1;
1266 break;
1268 default:
1269 abort ();
1272 /* If we can do it in one insn get out quickly. */
1273 if (const_ok_for_arm (val)
1274 || (can_negate_initial && const_ok_for_arm (-val))
1275 || (can_invert && const_ok_for_arm (~val)))
1277 if (generate)
1278 emit_insn (gen_rtx_SET (VOIDmode, target,
1279 (source ? gen_rtx (code, mode, source,
1280 GEN_INT (val))
1281 : GEN_INT (val))));
1282 return 1;
1285 /* Calculate a few attributes that may be useful for specific
1286 optimizations. */
1287 for (i = 31; i >= 0; i--)
1289 if ((remainder & (1 << i)) == 0)
1290 clear_sign_bit_copies++;
1291 else
1292 break;
1295 for (i = 31; i >= 0; i--)
1297 if ((remainder & (1 << i)) != 0)
1298 set_sign_bit_copies++;
1299 else
1300 break;
1303 for (i = 0; i <= 31; i++)
1305 if ((remainder & (1 << i)) == 0)
1306 clear_zero_bit_copies++;
1307 else
1308 break;
1311 for (i = 0; i <= 31; i++)
1313 if ((remainder & (1 << i)) != 0)
1314 set_zero_bit_copies++;
1315 else
1316 break;
1319 switch (code)
1321 case SET:
1322 /* See if we can do this by sign_extending a constant that is known
1323 to be negative. This is a good, way of doing it, since the shift
1324 may well merge into a subsequent insn. */
1325 if (set_sign_bit_copies > 1)
1327 if (const_ok_for_arm
1328 (temp1 = ARM_SIGN_EXTEND (remainder
1329 << (set_sign_bit_copies - 1))))
1331 if (generate)
1333 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1334 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1335 GEN_INT (temp1)));
1336 emit_insn (gen_ashrsi3 (target, new_src,
1337 GEN_INT (set_sign_bit_copies - 1)));
1339 return 2;
1341 /* For an inverted constant, we will need to set the low bits,
1342 these will be shifted out of harm's way. */
1343 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1344 if (const_ok_for_arm (~temp1))
1346 if (generate)
1348 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1349 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1350 GEN_INT (temp1)));
1351 emit_insn (gen_ashrsi3 (target, new_src,
1352 GEN_INT (set_sign_bit_copies - 1)));
1354 return 2;
1358 /* See if we can generate this by setting the bottom (or the top)
1359 16 bits, and then shifting these into the other half of the
1360 word. We only look for the simplest cases, to do more would cost
1361 too much. Be careful, however, not to generate this when the
1362 alternative would take fewer insns. */
1363 if (val & 0xffff0000)
1365 temp1 = remainder & 0xffff0000;
1366 temp2 = remainder & 0x0000ffff;
1368 /* Overlaps outside this range are best done using other methods. */
1369 for (i = 9; i < 24; i++)
1371 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1372 && !const_ok_for_arm (temp2))
1374 rtx new_src = (subtargets
1375 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1376 : target);
1377 insns = arm_gen_constant (code, mode, temp2, new_src,
1378 source, subtargets, generate);
1379 source = new_src;
1380 if (generate)
1381 emit_insn (gen_rtx_SET
1382 (VOIDmode, target,
1383 gen_rtx_IOR (mode,
1384 gen_rtx_ASHIFT (mode, source,
1385 GEN_INT (i)),
1386 source)));
1387 return insns + 1;
1391 /* Don't duplicate cases already considered. */
1392 for (i = 17; i < 24; i++)
1394 if (((temp1 | (temp1 >> i)) == remainder)
1395 && !const_ok_for_arm (temp1))
1397 rtx new_src = (subtargets
1398 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1399 : target);
1400 insns = arm_gen_constant (code, mode, temp1, new_src,
1401 source, subtargets, generate);
1402 source = new_src;
1403 if (generate)
1404 emit_insn
1405 (gen_rtx_SET (VOIDmode, target,
1406 gen_rtx_IOR
1407 (mode,
1408 gen_rtx_LSHIFTRT (mode, source,
1409 GEN_INT (i)),
1410 source)));
1411 return insns + 1;
1415 break;
1417 case IOR:
1418 case XOR:
1419 /* If we have IOR or XOR, and the constant can be loaded in a
1420 single instruction, and we can find a temporary to put it in,
1421 then this can be done in two instructions instead of 3-4. */
1422 if (subtargets
1423 /* TARGET can't be NULL if SUBTARGETS is 0 */
1424 || (reload_completed && !reg_mentioned_p (target, source)))
1426 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1428 if (generate)
1430 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1432 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1433 emit_insn (gen_rtx_SET (VOIDmode, target,
1434 gen_rtx (code, mode, source, sub)));
1436 return 2;
1440 if (code == XOR)
1441 break;
1443 if (set_sign_bit_copies > 8
1444 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1446 if (generate)
1448 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1449 rtx shift = GEN_INT (set_sign_bit_copies);
1451 emit_insn (gen_rtx_SET (VOIDmode, sub,
1452 gen_rtx_NOT (mode,
1453 gen_rtx_ASHIFT (mode,
1454 source,
1455 shift))));
1456 emit_insn (gen_rtx_SET (VOIDmode, target,
1457 gen_rtx_NOT (mode,
1458 gen_rtx_LSHIFTRT (mode, sub,
1459 shift))));
1461 return 2;
1464 if (set_zero_bit_copies > 8
1465 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1467 if (generate)
1469 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1470 rtx shift = GEN_INT (set_zero_bit_copies);
1472 emit_insn (gen_rtx_SET (VOIDmode, sub,
1473 gen_rtx_NOT (mode,
1474 gen_rtx_LSHIFTRT (mode,
1475 source,
1476 shift))));
1477 emit_insn (gen_rtx_SET (VOIDmode, target,
1478 gen_rtx_NOT (mode,
1479 gen_rtx_ASHIFT (mode, sub,
1480 shift))));
1482 return 2;
1485 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1487 if (generate)
1489 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1490 emit_insn (gen_rtx_SET (VOIDmode, sub,
1491 gen_rtx_NOT (mode, source)));
1492 source = sub;
1493 if (subtargets)
1494 sub = gen_reg_rtx (mode);
1495 emit_insn (gen_rtx_SET (VOIDmode, sub,
1496 gen_rtx_AND (mode, source,
1497 GEN_INT (temp1))));
1498 emit_insn (gen_rtx_SET (VOIDmode, target,
1499 gen_rtx_NOT (mode, sub)));
1501 return 3;
1503 break;
1505 case AND:
1506 /* See if two shifts will do 2 or more insn's worth of work. */
1507 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1509 HOST_WIDE_INT shift_mask = ((0xffffffff
1510 << (32 - clear_sign_bit_copies))
1511 & 0xffffffff);
1513 if ((remainder | shift_mask) != 0xffffffff)
1515 if (generate)
1517 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1518 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1519 new_src, source, subtargets, 1);
1520 source = new_src;
1522 else
1524 rtx targ = subtargets ? NULL_RTX : target;
1525 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1526 targ, source, subtargets, 0);
1530 if (generate)
1532 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1533 rtx shift = GEN_INT (clear_sign_bit_copies);
1535 emit_insn (gen_ashlsi3 (new_src, source, shift));
1536 emit_insn (gen_lshrsi3 (target, new_src, shift));
1539 return insns + 2;
1542 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1544 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1546 if ((remainder | shift_mask) != 0xffffffff)
1548 if (generate)
1550 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1552 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1553 new_src, source, subtargets, 1);
1554 source = new_src;
1556 else
1558 rtx targ = subtargets ? NULL_RTX : target;
1560 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1561 targ, source, subtargets, 0);
1565 if (generate)
1567 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1568 rtx shift = GEN_INT (clear_zero_bit_copies);
1570 emit_insn (gen_lshrsi3 (new_src, source, shift));
1571 emit_insn (gen_ashlsi3 (target, new_src, shift));
1574 return insns + 2;
1577 break;
1579 default:
1580 break;
1583 for (i = 0; i < 32; i++)
1584 if (remainder & (1 << i))
1585 num_bits_set++;
1587 if (code == AND || (can_invert && num_bits_set > 16))
1588 remainder = (~remainder) & 0xffffffff;
1589 else if (code == PLUS && num_bits_set > 16)
1590 remainder = (-remainder) & 0xffffffff;
1591 else
1593 can_invert = 0;
1594 can_negate = 0;
1597 /* Now try and find a way of doing the job in either two or three
1598 instructions.
1599 We start by looking for the largest block of zeros that are aligned on
1600 a 2-bit boundary, we then fill up the temps, wrapping around to the
1601 top of the word when we drop off the bottom.
1602 In the worst case this code should produce no more than four insns. */
1604 int best_start = 0;
1605 int best_consecutive_zeros = 0;
1607 for (i = 0; i < 32; i += 2)
1609 int consecutive_zeros = 0;
1611 if (!(remainder & (3 << i)))
1613 while ((i < 32) && !(remainder & (3 << i)))
1615 consecutive_zeros += 2;
1616 i += 2;
1618 if (consecutive_zeros > best_consecutive_zeros)
1620 best_consecutive_zeros = consecutive_zeros;
1621 best_start = i - consecutive_zeros;
1623 i -= 2;
1627 /* So long as it won't require any more insns to do so, it's
1628 desirable to emit a small constant (in bits 0...9) in the last
1629 insn. This way there is more chance that it can be combined with
1630 a later addressing insn to form a pre-indexed load or store
1631 operation. Consider:
1633 *((volatile int *)0xe0000100) = 1;
1634 *((volatile int *)0xe0000110) = 2;
1636 We want this to wind up as:
1638 mov rA, #0xe0000000
1639 mov rB, #1
1640 str rB, [rA, #0x100]
1641 mov rB, #2
1642 str rB, [rA, #0x110]
1644 rather than having to synthesize both large constants from scratch.
1646 Therefore, we calculate how many insns would be required to emit
1647 the constant starting from `best_start', and also starting from
1648 zero (ie with bit 31 first to be output). If `best_start' doesn't
1649 yield a shorter sequence, we may as well use zero. */
1650 if (best_start != 0
1651 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1652 && (count_insns_for_constant (remainder, 0) <=
1653 count_insns_for_constant (remainder, best_start)))
1654 best_start = 0;
1656 /* Now start emitting the insns. */
1657 i = best_start;
1660 int end;
1662 if (i <= 0)
1663 i += 32;
1664 if (remainder & (3 << (i - 2)))
1666 end = i - 8;
1667 if (end < 0)
1668 end += 32;
1669 temp1 = remainder & ((0x0ff << end)
1670 | ((i < end) ? (0xff >> (32 - end)) : 0));
1671 remainder &= ~temp1;
1673 if (generate)
1675 rtx new_src, temp1_rtx;
1677 if (code == SET || code == MINUS)
1679 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1680 if (can_invert && code != MINUS)
1681 temp1 = ~temp1;
1683 else
1685 if (remainder && subtargets)
1686 new_src = gen_reg_rtx (mode);
1687 else
1688 new_src = target;
1689 if (can_invert)
1690 temp1 = ~temp1;
1691 else if (can_negate)
1692 temp1 = -temp1;
1695 temp1 = trunc_int_for_mode (temp1, mode);
1696 temp1_rtx = GEN_INT (temp1);
1698 if (code == SET)
1700 else if (code == MINUS)
1701 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1702 else
1703 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1705 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1706 source = new_src;
1709 if (code == SET)
1711 can_invert = 0;
1712 code = PLUS;
1714 else if (code == MINUS)
1715 code = PLUS;
1717 insns++;
1718 i -= 6;
1720 i -= 2;
1722 while (remainder);
1725 return insns;
1728 /* Canonicalize a comparison so that we are more likely to recognize it.
1729 This can be done for a few constant compares, where we can make the
1730 immediate value easier to load. */
1732 enum rtx_code
1733 arm_canonicalize_comparison (code, op1)
1734 enum rtx_code code;
1735 rtx * op1;
1737 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1739 switch (code)
1741 case EQ:
1742 case NE:
1743 return code;
1745 case GT:
1746 case LE:
1747 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1748 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1750 *op1 = GEN_INT (i + 1);
1751 return code == GT ? GE : LT;
1753 break;
1755 case GE:
1756 case LT:
1757 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1758 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1760 *op1 = GEN_INT (i - 1);
1761 return code == GE ? GT : LE;
1763 break;
1765 case GTU:
1766 case LEU:
1767 if (i != ~((unsigned HOST_WIDE_INT) 0)
1768 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1770 *op1 = GEN_INT (i + 1);
1771 return code == GTU ? GEU : LTU;
1773 break;
1775 case GEU:
1776 case LTU:
1777 if (i != 0
1778 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1780 *op1 = GEN_INT (i - 1);
1781 return code == GEU ? GTU : LEU;
1783 break;
1785 default:
1786 abort ();
1789 return code;
1792 /* Decide whether a type should be returned in memory (true)
1793 or in a register (false). This is called by the macro
1794 RETURN_IN_MEMORY. */
1797 arm_return_in_memory (type)
1798 tree type;
1800 HOST_WIDE_INT size;
1802 if (!AGGREGATE_TYPE_P (type))
1803 /* All simple types are returned in registers. */
1804 return 0;
1806 size = int_size_in_bytes (type);
1808 if (TARGET_ATPCS)
1810 /* ATPCS returns aggregate types in memory only if they are
1811 larger than a word (or are variable size). */
1812 return (size < 0 || size > UNITS_PER_WORD);
1815 /* For the arm-wince targets we choose to be compatible with Microsoft's
1816 ARM and Thumb compilers, which always return aggregates in memory. */
1817 #ifndef ARM_WINCE
1818 /* All structures/unions bigger than one word are returned in memory.
1819 Also catch the case where int_size_in_bytes returns -1. In this case
1820 the aggregate is either huge or of variable size, and in either case
1821 we will want to return it via memory and not in a register. */
1822 if (size < 0 || size > UNITS_PER_WORD)
1823 return 1;
1825 if (TREE_CODE (type) == RECORD_TYPE)
1827 tree field;
1829 /* For a struct the APCS says that we only return in a register
1830 if the type is 'integer like' and every addressable element
1831 has an offset of zero. For practical purposes this means
1832 that the structure can have at most one non bit-field element
1833 and that this element must be the first one in the structure. */
1835 /* Find the first field, ignoring non FIELD_DECL things which will
1836 have been created by C++. */
1837 for (field = TYPE_FIELDS (type);
1838 field && TREE_CODE (field) != FIELD_DECL;
1839 field = TREE_CHAIN (field))
1840 continue;
1842 if (field == NULL)
1843 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1845 /* Check that the first field is valid for returning in a register. */
1847 /* ... Floats are not allowed */
1848 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1849 return 1;
1851 /* ... Aggregates that are not themselves valid for returning in
1852 a register are not allowed. */
1853 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1854 return 1;
1856 /* Now check the remaining fields, if any. Only bitfields are allowed,
1857 since they are not addressable. */
1858 for (field = TREE_CHAIN (field);
1859 field;
1860 field = TREE_CHAIN (field))
1862 if (TREE_CODE (field) != FIELD_DECL)
1863 continue;
1865 if (!DECL_BIT_FIELD_TYPE (field))
1866 return 1;
1869 return 0;
1872 if (TREE_CODE (type) == UNION_TYPE)
1874 tree field;
1876 /* Unions can be returned in registers if every element is
1877 integral, or can be returned in an integer register. */
1878 for (field = TYPE_FIELDS (type);
1879 field;
1880 field = TREE_CHAIN (field))
1882 if (TREE_CODE (field) != FIELD_DECL)
1883 continue;
1885 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1886 return 1;
1888 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1889 return 1;
1892 return 0;
1894 #endif /* not ARM_WINCE */
1896 /* Return all other types in memory. */
1897 return 1;
1900 /* Indicate whether or not words of a double are in big-endian order. */
1903 arm_float_words_big_endian ()
1906 /* For FPA, float words are always big-endian. For VFP, floats words
1907 follow the memory system mode. */
1909 if (TARGET_HARD_FLOAT)
1911 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1912 return 1;
1915 if (TARGET_VFP)
1916 return (TARGET_BIG_END ? 1 : 0);
1918 return 1;
1921 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1922 for a call to a function whose data type is FNTYPE.
1923 For a library call, FNTYPE is NULL. */
1924 void
1925 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1926 CUMULATIVE_ARGS * pcum;
1927 tree fntype;
1928 rtx libname ATTRIBUTE_UNUSED;
1929 int indirect ATTRIBUTE_UNUSED;
1931 /* On the ARM, the offset starts at 0. */
1932 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1934 pcum->call_cookie = CALL_NORMAL;
1936 if (TARGET_LONG_CALLS)
1937 pcum->call_cookie = CALL_LONG;
1939 /* Check for long call/short call attributes. The attributes
1940 override any command line option. */
1941 if (fntype)
1943 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1944 pcum->call_cookie = CALL_SHORT;
1945 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1946 pcum->call_cookie = CALL_LONG;
1950 /* Determine where to put an argument to a function.
1951 Value is zero to push the argument on the stack,
1952 or a hard register in which to store the argument.
1954 MODE is the argument's machine mode.
1955 TYPE is the data type of the argument (as a tree).
1956 This is null for libcalls where that information may
1957 not be available.
1958 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1959 the preceding args and about the function being called.
1960 NAMED is nonzero if this argument is a named parameter
1961 (otherwise it is an extra parameter matching an ellipsis). */
1964 arm_function_arg (pcum, mode, type, named)
1965 CUMULATIVE_ARGS * pcum;
1966 enum machine_mode mode;
1967 tree type ATTRIBUTE_UNUSED;
1968 int named;
1970 if (mode == VOIDmode)
1971 /* Compute operand 2 of the call insn. */
1972 return GEN_INT (pcum->call_cookie);
1974 if (!named || pcum->nregs >= NUM_ARG_REGS)
1975 return NULL_RTX;
1977 return gen_rtx_REG (mode, pcum->nregs);
1980 /* Variable sized types are passed by reference. This is a GCC
1981 extension to the ARM ABI. */
1984 arm_function_arg_pass_by_reference (cum, mode, type, named)
1985 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
1986 enum machine_mode mode ATTRIBUTE_UNUSED;
1987 tree type;
1988 int named ATTRIBUTE_UNUSED;
1990 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1993 /* Implement va_arg. */
1996 arm_va_arg (valist, type)
1997 tree valist, type;
1999 /* Variable sized types are passed by reference. */
2000 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2002 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2003 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2006 return std_expand_builtin_va_arg (valist, type);
2009 /* Encode the current state of the #pragma [no_]long_calls. */
2010 typedef enum
2012 OFF, /* No #pramgma [no_]long_calls is in effect. */
2013 LONG, /* #pragma long_calls is in effect. */
2014 SHORT /* #pragma no_long_calls is in effect. */
2015 } arm_pragma_enum;
2017 static arm_pragma_enum arm_pragma_long_calls = OFF;
2019 void
2020 arm_pr_long_calls (pfile)
2021 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2023 arm_pragma_long_calls = LONG;
2026 void
2027 arm_pr_no_long_calls (pfile)
2028 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2030 arm_pragma_long_calls = SHORT;
2033 void
2034 arm_pr_long_calls_off (pfile)
2035 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2037 arm_pragma_long_calls = OFF;
2040 /* Table of machine attributes. */
2041 const struct attribute_spec arm_attribute_table[] =
2043 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2044 /* Function calls made to this symbol must be done indirectly, because
2045 it may lie outside of the 26 bit addressing range of a normal function
2046 call. */
2047 { "long_call", 0, 0, false, true, true, NULL },
2048 /* Whereas these functions are always known to reside within the 26 bit
2049 addressing range. */
2050 { "short_call", 0, 0, false, true, true, NULL },
2051 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2052 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2053 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2054 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2055 #ifdef ARM_PE
2056 /* ARM/PE has three new attributes:
2057 interfacearm - ?
2058 dllexport - for exporting a function/variable that will live in a dll
2059 dllimport - for importing a function/variable from a dll
2061 Microsoft allows multiple declspecs in one __declspec, separating
2062 them with spaces. We do NOT support this. Instead, use __declspec
2063 multiple times.
2065 { "dllimport", 0, 0, true, false, false, NULL },
2066 { "dllexport", 0, 0, true, false, false, NULL },
2067 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2068 #endif
2069 { NULL, 0, 0, false, false, false, NULL }
2072 /* Handle an attribute requiring a FUNCTION_DECL;
2073 arguments as in struct attribute_spec.handler. */
2075 static tree
2076 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2077 tree * node;
2078 tree name;
2079 tree args ATTRIBUTE_UNUSED;
2080 int flags ATTRIBUTE_UNUSED;
2081 bool * no_add_attrs;
2083 if (TREE_CODE (*node) != FUNCTION_DECL)
2085 warning ("`%s' attribute only applies to functions",
2086 IDENTIFIER_POINTER (name));
2087 *no_add_attrs = true;
2090 return NULL_TREE;
2093 /* Handle an "interrupt" or "isr" attribute;
2094 arguments as in struct attribute_spec.handler. */
2096 static tree
2097 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2098 tree * node;
2099 tree name;
2100 tree args;
2101 int flags;
2102 bool * no_add_attrs;
2104 if (DECL_P (*node))
2106 if (TREE_CODE (*node) != FUNCTION_DECL)
2108 warning ("`%s' attribute only applies to functions",
2109 IDENTIFIER_POINTER (name));
2110 *no_add_attrs = true;
2112 /* FIXME: the argument if any is checked for type attributes;
2113 should it be checked for decl ones? */
2115 else
2117 if (TREE_CODE (*node) == FUNCTION_TYPE
2118 || TREE_CODE (*node) == METHOD_TYPE)
2120 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2122 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2123 *no_add_attrs = true;
2126 else if (TREE_CODE (*node) == POINTER_TYPE
2127 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2128 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2129 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2131 *node = build_type_copy (*node);
2132 TREE_TYPE (*node) = build_type_attribute_variant
2133 (TREE_TYPE (*node),
2134 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2135 *no_add_attrs = true;
2137 else
2139 /* Possibly pass this attribute on from the type to a decl. */
2140 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2141 | (int) ATTR_FLAG_FUNCTION_NEXT
2142 | (int) ATTR_FLAG_ARRAY_NEXT))
2144 *no_add_attrs = true;
2145 return tree_cons (name, args, NULL_TREE);
2147 else
2149 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2154 return NULL_TREE;
2157 /* Return 0 if the attributes for two types are incompatible, 1 if they
2158 are compatible, and 2 if they are nearly compatible (which causes a
2159 warning to be generated). */
2161 static int
2162 arm_comp_type_attributes (type1, type2)
2163 tree type1;
2164 tree type2;
2166 int l1, l2, s1, s2;
2168 /* Check for mismatch of non-default calling convention. */
2169 if (TREE_CODE (type1) != FUNCTION_TYPE)
2170 return 1;
2172 /* Check for mismatched call attributes. */
2173 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2174 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2175 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2176 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2178 /* Only bother to check if an attribute is defined. */
2179 if (l1 | l2 | s1 | s2)
2181 /* If one type has an attribute, the other must have the same attribute. */
2182 if ((l1 != l2) || (s1 != s2))
2183 return 0;
2185 /* Disallow mixed attributes. */
2186 if ((l1 & s2) || (l2 & s1))
2187 return 0;
2190 /* Check for mismatched ISR attribute. */
2191 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2192 if (! l1)
2193 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2194 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2195 if (! l2)
2196 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2197 if (l1 != l2)
2198 return 0;
2200 return 1;
2203 /* Encode long_call or short_call attribute by prefixing
2204 symbol name in DECL with a special character FLAG. */
2206 void
2207 arm_encode_call_attribute (decl, flag)
2208 tree decl;
2209 int flag;
2211 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2212 int len = strlen (str);
2213 char * newstr;
2215 /* Do not allow weak functions to be treated as short call. */
2216 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2217 return;
2219 newstr = alloca (len + 2);
2220 newstr[0] = flag;
2221 strcpy (newstr + 1, str);
2223 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2224 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2227 /* Assigns default attributes to newly defined type. This is used to
2228 set short_call/long_call attributes for function types of
2229 functions defined inside corresponding #pragma scopes. */
2231 static void
2232 arm_set_default_type_attributes (type)
2233 tree type;
2235 /* Add __attribute__ ((long_call)) to all functions, when
2236 inside #pragma long_calls or __attribute__ ((short_call)),
2237 when inside #pragma no_long_calls. */
2238 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2240 tree type_attr_list, attr_name;
2241 type_attr_list = TYPE_ATTRIBUTES (type);
2243 if (arm_pragma_long_calls == LONG)
2244 attr_name = get_identifier ("long_call");
2245 else if (arm_pragma_long_calls == SHORT)
2246 attr_name = get_identifier ("short_call");
2247 else
2248 return;
2250 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2251 TYPE_ATTRIBUTES (type) = type_attr_list;
2255 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2256 defined within the current compilation unit. If this cannot be
2257 determined, then 0 is returned. */
2259 static int
2260 current_file_function_operand (sym_ref)
2261 rtx sym_ref;
2263 /* This is a bit of a fib. A function will have a short call flag
2264 applied to its name if it has the short call attribute, or it has
2265 already been defined within the current compilation unit. */
2266 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2267 return 1;
2269 /* The current function is always defined within the current compilation
2270 unit. if it s a weak definition however, then this may not be the real
2271 definition of the function, and so we have to say no. */
2272 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2273 && !DECL_WEAK (current_function_decl))
2274 return 1;
2276 /* We cannot make the determination - default to returning 0. */
2277 return 0;
2280 /* Return nonzero if a 32 bit "long_call" should be generated for
2281 this call. We generate a long_call if the function:
2283 a. has an __attribute__((long call))
2284 or b. is within the scope of a #pragma long_calls
2285 or c. the -mlong-calls command line switch has been specified
2287 However we do not generate a long call if the function:
2289 d. has an __attribute__ ((short_call))
2290 or e. is inside the scope of a #pragma no_long_calls
2291 or f. has an __attribute__ ((section))
2292 or g. is defined within the current compilation unit.
2294 This function will be called by C fragments contained in the machine
2295 description file. CALL_REF and CALL_COOKIE correspond to the matched
2296 rtl operands. CALL_SYMBOL is used to distinguish between
2297 two different callers of the function. It is set to 1 in the
2298 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2299 and "call_value" patterns. This is because of the difference in the
2300 SYM_REFs passed by these patterns. */
2303 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2304 rtx sym_ref;
2305 int call_cookie;
2306 int call_symbol;
2308 if (!call_symbol)
2310 if (GET_CODE (sym_ref) != MEM)
2311 return 0;
2313 sym_ref = XEXP (sym_ref, 0);
2316 if (GET_CODE (sym_ref) != SYMBOL_REF)
2317 return 0;
2319 if (call_cookie & CALL_SHORT)
2320 return 0;
2322 if (TARGET_LONG_CALLS && flag_function_sections)
2323 return 1;
2325 if (current_file_function_operand (sym_ref))
2326 return 0;
2328 return (call_cookie & CALL_LONG)
2329 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2330 || TARGET_LONG_CALLS;
2333 /* Return nonzero if it is ok to make a tail-call to DECL. */
2335 static bool
2336 arm_function_ok_for_sibcall (decl, exp)
2337 tree decl;
2338 tree exp ATTRIBUTE_UNUSED;
2340 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2342 /* Never tailcall something for which we have no decl, or if we
2343 are in Thumb mode. */
2344 if (decl == NULL || TARGET_THUMB)
2345 return false;
2347 /* Get the calling method. */
2348 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2349 call_type = CALL_SHORT;
2350 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2351 call_type = CALL_LONG;
2353 /* Cannot tail-call to long calls, since these are out of range of
2354 a branch instruction. However, if not compiling PIC, we know
2355 we can reach the symbol if it is in this compilation unit. */
2356 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2357 return false;
2359 /* If we are interworking and the function is not declared static
2360 then we can't tail-call it unless we know that it exists in this
2361 compilation unit (since it might be a Thumb routine). */
2362 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2363 return false;
2365 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2366 if (IS_INTERRUPT (arm_current_func_type ()))
2367 return false;
2369 /* Everything else is ok. */
2370 return true;
2374 /* Addressing mode support functions. */
2376 /* Return non-zero if X is a legitimate immediate operand when compiling
2377 for PIC. */
2379 legitimate_pic_operand_p (x)
2380 rtx x;
2382 if (CONSTANT_P (x)
2383 && flag_pic
2384 && (GET_CODE (x) == SYMBOL_REF
2385 || (GET_CODE (x) == CONST
2386 && GET_CODE (XEXP (x, 0)) == PLUS
2387 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2388 return 0;
2390 return 1;
2394 legitimize_pic_address (orig, mode, reg)
2395 rtx orig;
2396 enum machine_mode mode;
2397 rtx reg;
2399 if (GET_CODE (orig) == SYMBOL_REF
2400 || GET_CODE (orig) == LABEL_REF)
2402 #ifndef AOF_ASSEMBLER
2403 rtx pic_ref, address;
2404 #endif
2405 rtx insn;
2406 int subregs = 0;
2408 if (reg == 0)
2410 if (no_new_pseudos)
2411 abort ();
2412 else
2413 reg = gen_reg_rtx (Pmode);
2415 subregs = 1;
2418 #ifdef AOF_ASSEMBLER
2419 /* The AOF assembler can generate relocations for these directly, and
2420 understands that the PIC register has to be added into the offset. */
2421 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2422 #else
2423 if (subregs)
2424 address = gen_reg_rtx (Pmode);
2425 else
2426 address = reg;
2428 if (TARGET_ARM)
2429 emit_insn (gen_pic_load_addr_arm (address, orig));
2430 else
2431 emit_insn (gen_pic_load_addr_thumb (address, orig));
2433 if ((GET_CODE (orig) == LABEL_REF
2434 || (GET_CODE (orig) == SYMBOL_REF &&
2435 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2436 && NEED_GOT_RELOC)
2437 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2438 else
2440 pic_ref = gen_rtx_MEM (Pmode,
2441 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2442 address));
2443 RTX_UNCHANGING_P (pic_ref) = 1;
2446 insn = emit_move_insn (reg, pic_ref);
2447 #endif
2448 current_function_uses_pic_offset_table = 1;
2449 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2450 by loop. */
2451 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2452 REG_NOTES (insn));
2453 return reg;
2455 else if (GET_CODE (orig) == CONST)
2457 rtx base, offset;
2459 if (GET_CODE (XEXP (orig, 0)) == PLUS
2460 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2461 return orig;
2463 if (reg == 0)
2465 if (no_new_pseudos)
2466 abort ();
2467 else
2468 reg = gen_reg_rtx (Pmode);
2471 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2473 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2474 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2475 base == reg ? 0 : reg);
2477 else
2478 abort ();
2480 if (GET_CODE (offset) == CONST_INT)
2482 /* The base register doesn't really matter, we only want to
2483 test the index for the appropriate mode. */
2484 if (!arm_legitimate_index_p (mode, offset, 0))
2486 if (!no_new_pseudos)
2487 offset = force_reg (Pmode, offset);
2488 else
2489 abort ();
2492 if (GET_CODE (offset) == CONST_INT)
2493 return plus_constant (base, INTVAL (offset));
2496 if (GET_MODE_SIZE (mode) > 4
2497 && (GET_MODE_CLASS (mode) == MODE_INT
2498 || TARGET_SOFT_FLOAT))
2500 emit_insn (gen_addsi3 (reg, base, offset));
2501 return reg;
2504 return gen_rtx_PLUS (Pmode, base, offset);
2507 return orig;
2510 /* Generate code to load the PIC register. PROLOGUE is true if
2511 called from arm_expand_prologue (in which case we want the
2512 generated insns at the start of the function); false if called
2513 by an exception receiver that needs the PIC register reloaded
2514 (in which case the insns are just dumped at the current location). */
2516 void
2517 arm_finalize_pic (prologue)
2518 int prologue ATTRIBUTE_UNUSED;
2520 #ifndef AOF_ASSEMBLER
2521 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2522 rtx global_offset_table;
2524 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2525 return;
2527 if (!flag_pic)
2528 abort ();
2530 start_sequence ();
2531 l1 = gen_label_rtx ();
2533 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2534 /* On the ARM the PC register contains 'dot + 8' at the time of the
2535 addition, on the Thumb it is 'dot + 4'. */
2536 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2537 if (GOT_PCREL)
2538 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2539 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2540 else
2541 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2543 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2545 if (TARGET_ARM)
2547 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2548 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2550 else
2552 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2553 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2556 seq = get_insns ();
2557 end_sequence ();
2558 if (prologue)
2559 emit_insn_after (seq, get_insns ());
2560 else
2561 emit_insn (seq);
2563 /* Need to emit this whether or not we obey regdecls,
2564 since setjmp/longjmp can cause life info to screw up. */
2565 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2566 #endif /* AOF_ASSEMBLER */
2569 /* Return nonzero if X is valid as an ARM state addressing register. */
2570 static int
2571 arm_address_register_rtx_p (x, strict_p)
2572 rtx x;
2573 int strict_p;
2575 int regno;
2577 if (GET_CODE (x) != REG)
2578 return 0;
2580 regno = REGNO (x);
2582 if (strict_p)
2583 return ARM_REGNO_OK_FOR_BASE_P (regno);
2585 return (regno <= LAST_ARM_REGNUM
2586 || regno >= FIRST_PSEUDO_REGISTER
2587 || regno == FRAME_POINTER_REGNUM
2588 || regno == ARG_POINTER_REGNUM);
2591 /* Return nonzero if X is a valid ARM state address operand. */
2593 arm_legitimate_address_p (mode, x, strict_p)
2594 enum machine_mode mode;
2595 rtx x;
2596 int strict_p;
2598 if (arm_address_register_rtx_p (x, strict_p))
2599 return 1;
2601 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2602 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2604 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2605 && GET_MODE_SIZE (mode) <= 4
2606 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2607 && GET_CODE (XEXP (x, 1)) == PLUS
2608 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2609 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2611 /* After reload constants split into minipools will have addresses
2612 from a LABEL_REF. */
2613 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2614 && (GET_CODE (x) == LABEL_REF
2615 || (GET_CODE (x) == CONST
2616 && GET_CODE (XEXP (x, 0)) == PLUS
2617 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2618 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2619 return 1;
2621 else if (mode == TImode)
2622 return 0;
2624 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2626 if (GET_CODE (x) == PLUS
2627 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2628 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2630 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2632 if (val == 4 || val == -4 || val == -8)
2633 return 1;
2637 else if (GET_CODE (x) == PLUS)
2639 rtx xop0 = XEXP (x, 0);
2640 rtx xop1 = XEXP (x, 1);
2642 return ((arm_address_register_rtx_p (xop0, strict_p)
2643 && arm_legitimate_index_p (mode, xop1, strict_p))
2644 || (arm_address_register_rtx_p (xop1, strict_p)
2645 && arm_legitimate_index_p (mode, xop0, strict_p)));
2648 #if 0
2649 /* Reload currently can't handle MINUS, so disable this for now */
2650 else if (GET_CODE (x) == MINUS)
2652 rtx xop0 = XEXP (x, 0);
2653 rtx xop1 = XEXP (x, 1);
2655 return (arm_address_register_rtx_p (xop0, strict_p)
2656 && arm_legitimate_index_p (mode, xop1, strict_p));
2658 #endif
2660 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2661 && GET_CODE (x) == SYMBOL_REF
2662 && CONSTANT_POOL_ADDRESS_P (x)
2663 && ! (flag_pic
2664 && symbol_mentioned_p (get_pool_constant (x))))
2665 return 1;
2667 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2668 && (GET_MODE_SIZE (mode) <= 4)
2669 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2670 return 1;
2672 return 0;
2675 /* Return nonzero if INDEX is valid for an address index operand in
2676 ARM state. */
2677 static int
2678 arm_legitimate_index_p (mode, index, strict_p)
2679 enum machine_mode mode;
2680 rtx index;
2681 int strict_p;
2683 HOST_WIDE_INT range;
2684 enum rtx_code code = GET_CODE (index);
2686 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2687 return (code == CONST_INT && INTVAL (index) < 1024
2688 && INTVAL (index) > -1024
2689 && (INTVAL (index) & 3) == 0);
2691 if (arm_address_register_rtx_p (index, strict_p)
2692 && GET_MODE_SIZE (mode) <= 4)
2693 return 1;
2695 /* XXX What about ldrsb? */
2696 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2697 && (!arm_arch4 || (mode) != HImode))
2699 rtx xiop0 = XEXP (index, 0);
2700 rtx xiop1 = XEXP (index, 1);
2702 return ((arm_address_register_rtx_p (xiop0, strict_p)
2703 && power_of_two_operand (xiop1, SImode))
2704 || (arm_address_register_rtx_p (xiop1, strict_p)
2705 && power_of_two_operand (xiop0, SImode)));
2708 if (GET_MODE_SIZE (mode) <= 4
2709 && (code == LSHIFTRT || code == ASHIFTRT
2710 || code == ASHIFT || code == ROTATERT)
2711 && (!arm_arch4 || (mode) != HImode))
2713 rtx op = XEXP (index, 1);
2715 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2716 && GET_CODE (op) == CONST_INT
2717 && INTVAL (op) > 0
2718 && INTVAL (op) <= 31);
2721 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2722 load, but that has a restricted addressing range and we are unable
2723 to tell here whether that is the case. To be safe we restrict all
2724 loads to that range. */
2725 range = ((mode) == HImode || (mode) == QImode)
2726 ? (arm_arch4 ? 256 : 4095) : 4096;
2728 return (code == CONST_INT
2729 && INTVAL (index) < range
2730 && INTVAL (index) > -range);
2733 /* Return nonzero if X is valid as an ARM state addressing register. */
2734 static int
2735 thumb_base_register_rtx_p (x, mode, strict_p)
2736 rtx x;
2737 enum machine_mode mode;
2738 int strict_p;
2740 int regno;
2742 if (GET_CODE (x) != REG)
2743 return 0;
2745 regno = REGNO (x);
2747 if (strict_p)
2748 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2750 return (regno <= LAST_LO_REGNUM
2751 || regno >= FIRST_PSEUDO_REGISTER
2752 || regno == FRAME_POINTER_REGNUM
2753 || (GET_MODE_SIZE (mode) >= 4
2754 && (regno == STACK_POINTER_REGNUM
2755 || x == hard_frame_pointer_rtx
2756 || x == arg_pointer_rtx)));
2759 /* Return nonzero if x is a legitimate index register. This is the case
2760 for any base register that can access a QImode object. */
2761 inline static int
2762 thumb_index_register_rtx_p (x, strict_p)
2763 rtx x;
2764 int strict_p;
2766 return thumb_base_register_rtx_p (x, QImode, strict_p);
2769 /* Return nonzero if x is a legitimate Thumb-state address.
2771 The AP may be eliminated to either the SP or the FP, so we use the
2772 least common denominator, e.g. SImode, and offsets from 0 to 64.
2774 ??? Verify whether the above is the right approach.
2776 ??? Also, the FP may be eliminated to the SP, so perhaps that
2777 needs special handling also.
2779 ??? Look at how the mips16 port solves this problem. It probably uses
2780 better ways to solve some of these problems.
2782 Although it is not incorrect, we don't accept QImode and HImode
2783 addresses based on the frame pointer or arg pointer until the
2784 reload pass starts. This is so that eliminating such addresses
2785 into stack based ones won't produce impossible code. */
2787 thumb_legitimate_address_p (mode, x, strict_p)
2788 enum machine_mode mode;
2789 rtx x;
2790 int strict_p;
2792 /* ??? Not clear if this is right. Experiment. */
2793 if (GET_MODE_SIZE (mode) < 4
2794 && !(reload_in_progress || reload_completed)
2795 && (reg_mentioned_p (frame_pointer_rtx, x)
2796 || reg_mentioned_p (arg_pointer_rtx, x)
2797 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2798 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2799 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2800 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2801 return 0;
2803 /* Accept any base register. SP only in SImode or larger. */
2804 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2805 return 1;
2807 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2808 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2809 && GET_CODE (x) == SYMBOL_REF
2810 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2811 return 1;
2813 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2814 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2815 && (GET_CODE (x) == LABEL_REF
2816 || (GET_CODE (x) == CONST
2817 && GET_CODE (XEXP (x, 0)) == PLUS
2818 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2819 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2820 return 1;
2822 /* Post-inc indexing only supported for SImode and larger. */
2823 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2824 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2825 return 1;
2827 else if (GET_CODE (x) == PLUS)
2829 /* REG+REG address can be any two index registers. */
2830 /* We disallow FRAME+REG addressing since we know that FRAME
2831 will be replaced with STACK, and SP relative addressing only
2832 permits SP+OFFSET. */
2833 if (GET_MODE_SIZE (mode) <= 4
2834 && XEXP (x, 0) != frame_pointer_rtx
2835 && XEXP (x, 1) != frame_pointer_rtx
2836 && XEXP (x, 0) != virtual_stack_vars_rtx
2837 && XEXP (x, 1) != virtual_stack_vars_rtx
2838 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2839 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2840 return 1;
2842 /* REG+const has 5-7 bit offset for non-SP registers. */
2843 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2844 || XEXP (x, 0) == arg_pointer_rtx)
2845 && GET_CODE (XEXP (x, 1)) == CONST_INT
2846 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2847 return 1;
2849 /* REG+const has 10 bit offset for SP, but only SImode and
2850 larger is supported. */
2851 /* ??? Should probably check for DI/DFmode overflow here
2852 just like GO_IF_LEGITIMATE_OFFSET does. */
2853 else if (GET_CODE (XEXP (x, 0)) == REG
2854 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2855 && GET_MODE_SIZE (mode) >= 4
2856 && GET_CODE (XEXP (x, 1)) == CONST_INT
2857 && INTVAL (XEXP (x, 1)) >= 0
2858 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2859 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2860 return 1;
2862 else if (GET_CODE (XEXP (x, 0)) == REG
2863 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2864 && GET_MODE_SIZE (mode) >= 4
2865 && GET_CODE (XEXP (x, 1)) == CONST_INT
2866 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2867 return 1;
2870 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2871 && GET_CODE (x) == SYMBOL_REF
2872 && CONSTANT_POOL_ADDRESS_P (x)
2873 && !(flag_pic
2874 && symbol_mentioned_p (get_pool_constant (x))))
2875 return 1;
2877 return 0;
2880 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2881 instruction of mode MODE. */
2883 thumb_legitimate_offset_p (mode, val)
2884 enum machine_mode mode;
2885 HOST_WIDE_INT val;
2887 switch (GET_MODE_SIZE (mode))
2889 case 1:
2890 return val >= 0 && val < 32;
2892 case 2:
2893 return val >= 0 && val < 64 && (val & 1) == 0;
2895 default:
2896 return (val >= 0
2897 && (val + GET_MODE_SIZE (mode)) <= 128
2898 && (val & 3) == 0);
2902 /* Try machine-dependent ways of modifying an illegitimate address
2903 to be legitimate. If we find one, return the new, valid address. */
2906 arm_legitimize_address (x, orig_x, mode)
2907 rtx x;
2908 rtx orig_x;
2909 enum machine_mode mode;
2911 if (GET_CODE (x) == PLUS)
2913 rtx xop0 = XEXP (x, 0);
2914 rtx xop1 = XEXP (x, 1);
2916 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2917 xop0 = force_reg (SImode, xop0);
2919 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2920 xop1 = force_reg (SImode, xop1);
2922 if (ARM_BASE_REGISTER_RTX_P (xop0)
2923 && GET_CODE (xop1) == CONST_INT)
2925 HOST_WIDE_INT n, low_n;
2926 rtx base_reg, val;
2927 n = INTVAL (xop1);
2929 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2931 low_n = n & 0x0f;
2932 n &= ~0x0f;
2933 if (low_n > 4)
2935 n += 16;
2936 low_n -= 16;
2939 else
2941 low_n = ((mode) == TImode ? 0
2942 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2943 n -= low_n;
2946 base_reg = gen_reg_rtx (SImode);
2947 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2948 GEN_INT (n)), NULL_RTX);
2949 emit_move_insn (base_reg, val);
2950 x = (low_n == 0 ? base_reg
2951 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
2953 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2954 x = gen_rtx_PLUS (SImode, xop0, xop1);
2957 /* XXX We don't allow MINUS any more -- see comment in
2958 arm_legitimate_address_p (). */
2959 else if (GET_CODE (x) == MINUS)
2961 rtx xop0 = XEXP (x, 0);
2962 rtx xop1 = XEXP (x, 1);
2964 if (CONSTANT_P (xop0))
2965 xop0 = force_reg (SImode, xop0);
2967 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
2968 xop1 = force_reg (SImode, xop1);
2970 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2971 x = gen_rtx_MINUS (SImode, xop0, xop1);
2974 if (flag_pic)
2976 /* We need to find and carefully transform any SYMBOL and LABEL
2977 references; so go back to the original address expression. */
2978 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
2980 if (new_x != orig_x)
2981 x = new_x;
2984 return x;
2989 #define REG_OR_SUBREG_REG(X) \
2990 (GET_CODE (X) == REG \
2991 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2993 #define REG_OR_SUBREG_RTX(X) \
2994 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2996 #ifndef COSTS_N_INSNS
2997 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2998 #endif
3000 static inline int
3001 arm_rtx_costs_1 (x, code, outer)
3002 rtx x;
3003 enum rtx_code code;
3004 enum rtx_code outer;
3006 enum machine_mode mode = GET_MODE (x);
3007 enum rtx_code subcode;
3008 int extra_cost;
3010 if (TARGET_THUMB)
3012 switch (code)
3014 case ASHIFT:
3015 case ASHIFTRT:
3016 case LSHIFTRT:
3017 case ROTATERT:
3018 case PLUS:
3019 case MINUS:
3020 case COMPARE:
3021 case NEG:
3022 case NOT:
3023 return COSTS_N_INSNS (1);
3025 case MULT:
3026 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3028 int cycles = 0;
3029 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3031 while (i)
3033 i >>= 2;
3034 cycles++;
3036 return COSTS_N_INSNS (2) + cycles;
3038 return COSTS_N_INSNS (1) + 16;
3040 case SET:
3041 return (COSTS_N_INSNS (1)
3042 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3043 + GET_CODE (SET_DEST (x)) == MEM));
3045 case CONST_INT:
3046 if (outer == SET)
3048 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3049 return 0;
3050 if (thumb_shiftable_const (INTVAL (x)))
3051 return COSTS_N_INSNS (2);
3052 return COSTS_N_INSNS (3);
3054 else if (outer == PLUS
3055 && INTVAL (x) < 256 && INTVAL (x) > -256)
3056 return 0;
3057 else if (outer == COMPARE
3058 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3059 return 0;
3060 else if (outer == ASHIFT || outer == ASHIFTRT
3061 || outer == LSHIFTRT)
3062 return 0;
3063 return COSTS_N_INSNS (2);
3065 case CONST:
3066 case CONST_DOUBLE:
3067 case LABEL_REF:
3068 case SYMBOL_REF:
3069 return COSTS_N_INSNS (3);
3071 case UDIV:
3072 case UMOD:
3073 case DIV:
3074 case MOD:
3075 return 100;
3077 case TRUNCATE:
3078 return 99;
3080 case AND:
3081 case XOR:
3082 case IOR:
3083 /* XXX guess. */
3084 return 8;
3086 case ADDRESSOF:
3087 case MEM:
3088 /* XXX another guess. */
3089 /* Memory costs quite a lot for the first word, but subsequent words
3090 load at the equivalent of a single insn each. */
3091 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3092 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3093 ? 4 : 0));
3095 case IF_THEN_ELSE:
3096 /* XXX a guess. */
3097 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3098 return 14;
3099 return 2;
3101 case ZERO_EXTEND:
3102 /* XXX still guessing. */
3103 switch (GET_MODE (XEXP (x, 0)))
3105 case QImode:
3106 return (1 + (mode == DImode ? 4 : 0)
3107 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3109 case HImode:
3110 return (4 + (mode == DImode ? 4 : 0)
3111 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3113 case SImode:
3114 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3116 default:
3117 return 99;
3120 default:
3121 return 99;
3122 #if 0
3123 case FFS:
3124 case FLOAT:
3125 case FIX:
3126 case UNSIGNED_FIX:
3127 /* XXX guess */
3128 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3129 rtx_name[code]);
3130 abort ();
3131 #endif
3135 switch (code)
3137 case MEM:
3138 /* Memory costs quite a lot for the first word, but subsequent words
3139 load at the equivalent of a single insn each. */
3140 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3141 + (GET_CODE (x) == SYMBOL_REF
3142 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3144 case DIV:
3145 case MOD:
3146 return 100;
3148 case ROTATE:
3149 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3150 return 4;
3151 /* Fall through */
3152 case ROTATERT:
3153 if (mode != SImode)
3154 return 8;
3155 /* Fall through */
3156 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3157 if (mode == DImode)
3158 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3159 + ((GET_CODE (XEXP (x, 0)) == REG
3160 || (GET_CODE (XEXP (x, 0)) == SUBREG
3161 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3162 ? 0 : 8));
3163 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3164 || (GET_CODE (XEXP (x, 0)) == SUBREG
3165 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3166 ? 0 : 4)
3167 + ((GET_CODE (XEXP (x, 1)) == REG
3168 || (GET_CODE (XEXP (x, 1)) == SUBREG
3169 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3170 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3171 ? 0 : 4));
3173 case MINUS:
3174 if (mode == DImode)
3175 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3176 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3177 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3178 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3179 ? 0 : 8));
3181 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3182 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3183 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3184 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3185 ? 0 : 8)
3186 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3187 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3188 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
3189 ? 0 : 8));
3191 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3192 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3193 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3194 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3195 || subcode == ASHIFTRT || subcode == LSHIFTRT
3196 || subcode == ROTATE || subcode == ROTATERT
3197 || (subcode == MULT
3198 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3199 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3200 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3201 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3202 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3203 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3204 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3205 return 1;
3206 /* Fall through */
3208 case PLUS:
3209 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3210 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3211 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3212 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3213 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3214 ? 0 : 8));
3216 /* Fall through */
3217 case AND: case XOR: case IOR:
3218 extra_cost = 0;
3220 /* Normally the frame registers will be spilt into reg+const during
3221 reload, so it is a bad idea to combine them with other instructions,
3222 since then they might not be moved outside of loops. As a compromise
3223 we allow integration with ops that have a constant as their second
3224 operand. */
3225 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3226 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3227 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3228 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3229 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3230 extra_cost = 4;
3232 if (mode == DImode)
3233 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3234 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3235 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3236 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3237 ? 0 : 8));
3239 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3240 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3241 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3242 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3243 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3244 ? 0 : 4));
3246 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3247 return (1 + extra_cost
3248 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3249 || subcode == LSHIFTRT || subcode == ASHIFTRT
3250 || subcode == ROTATE || subcode == ROTATERT
3251 || (subcode == MULT
3252 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3253 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3254 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3255 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3256 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3257 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3258 ? 0 : 4));
3260 return 8;
3262 case MULT:
3263 /* There is no point basing this on the tuning, since it is always the
3264 fast variant if it exists at all. */
3265 if (arm_fast_multiply && mode == DImode
3266 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3267 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3268 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3269 return 8;
3271 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3272 || mode == DImode)
3273 return 30;
3275 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3277 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3278 & (unsigned HOST_WIDE_INT) 0xffffffff);
3279 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3280 int j;
3282 /* Tune as appropriate. */
3283 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3285 for (j = 0; i && j < 32; j += booth_unit_size)
3287 i >>= booth_unit_size;
3288 add_cost += 2;
3291 return add_cost;
3294 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3295 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3296 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3298 case TRUNCATE:
3299 if (arm_fast_multiply && mode == SImode
3300 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3301 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3302 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3303 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3304 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3305 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3306 return 8;
3307 return 99;
3309 case NEG:
3310 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3311 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3312 /* Fall through */
3313 case NOT:
3314 if (mode == DImode)
3315 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3317 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3319 case IF_THEN_ELSE:
3320 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3321 return 14;
3322 return 2;
3324 case COMPARE:
3325 return 1;
3327 case ABS:
3328 return 4 + (mode == DImode ? 4 : 0);
3330 case SIGN_EXTEND:
3331 if (GET_MODE (XEXP (x, 0)) == QImode)
3332 return (4 + (mode == DImode ? 4 : 0)
3333 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3334 /* Fall through */
3335 case ZERO_EXTEND:
3336 switch (GET_MODE (XEXP (x, 0)))
3338 case QImode:
3339 return (1 + (mode == DImode ? 4 : 0)
3340 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3342 case HImode:
3343 return (4 + (mode == DImode ? 4 : 0)
3344 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3346 case SImode:
3347 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3349 default:
3350 break;
3352 abort ();
3354 case CONST_INT:
3355 if (const_ok_for_arm (INTVAL (x)))
3356 return outer == SET ? 2 : -1;
3357 else if (outer == AND
3358 && const_ok_for_arm (~INTVAL (x)))
3359 return -1;
3360 else if ((outer == COMPARE
3361 || outer == PLUS || outer == MINUS)
3362 && const_ok_for_arm (-INTVAL (x)))
3363 return -1;
3364 else
3365 return 5;
3367 case CONST:
3368 case LABEL_REF:
3369 case SYMBOL_REF:
3370 return 6;
3372 case CONST_DOUBLE:
3373 if (const_double_rtx_ok_for_fpu (x))
3374 return outer == SET ? 2 : -1;
3375 else if ((outer == COMPARE || outer == PLUS)
3376 && neg_const_double_rtx_ok_for_fpu (x))
3377 return -1;
3378 return 7;
3380 default:
3381 return 99;
3385 static bool
3386 arm_rtx_costs (x, code, outer_code, total)
3387 rtx x;
3388 int code, outer_code;
3389 int *total;
3391 *total = arm_rtx_costs_1 (x, code, outer_code);
3392 return true;
3395 /* All address computations that can be done are free, but rtx cost returns
3396 the same for practically all of them. So we weight the different types
3397 of address here in the order (most pref first):
3398 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3400 static int
3401 arm_address_cost (X)
3402 rtx X;
3404 #define ARM_ADDRESS_COST(X) \
3405 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3406 || GET_CODE (X) == SYMBOL_REF) \
3407 ? 0 \
3408 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3409 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3410 ? 10 \
3411 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3412 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3413 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3414 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3415 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3416 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3417 ? 1 : 0)) \
3418 : 4)))))
3420 #define THUMB_ADDRESS_COST(X) \
3421 ((GET_CODE (X) == REG \
3422 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3423 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3424 ? 1 : 2)
3426 return (TARGET_ARM ? ARM_ADDRESS_COST (X) : THUMB_ADDRESS_COST (X));
3429 static int
3430 arm_adjust_cost (insn, link, dep, cost)
3431 rtx insn;
3432 rtx link;
3433 rtx dep;
3434 int cost;
3436 rtx i_pat, d_pat;
3438 /* Some true dependencies can have a higher cost depending
3439 on precisely how certain input operands are used. */
3440 if (arm_is_xscale
3441 && REG_NOTE_KIND (link) == 0
3442 && recog_memoized (insn) < 0
3443 && recog_memoized (dep) < 0)
3445 int shift_opnum = get_attr_shift (insn);
3446 enum attr_type attr_type = get_attr_type (dep);
3448 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3449 operand for INSN. If we have a shifted input operand and the
3450 instruction we depend on is another ALU instruction, then we may
3451 have to account for an additional stall. */
3452 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3454 rtx shifted_operand;
3455 int opno;
3457 /* Get the shifted operand. */
3458 extract_insn (insn);
3459 shifted_operand = recog_data.operand[shift_opnum];
3461 /* Iterate over all the operands in DEP. If we write an operand
3462 that overlaps with SHIFTED_OPERAND, then we have increase the
3463 cost of this dependency. */
3464 extract_insn (dep);
3465 preprocess_constraints ();
3466 for (opno = 0; opno < recog_data.n_operands; opno++)
3468 /* We can ignore strict inputs. */
3469 if (recog_data.operand_type[opno] == OP_IN)
3470 continue;
3472 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3473 shifted_operand))
3474 return 2;
3479 /* XXX This is not strictly true for the FPA. */
3480 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3481 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3482 return 0;
3484 /* Call insns don't incur a stall, even if they follow a load. */
3485 if (REG_NOTE_KIND (link) == 0
3486 && GET_CODE (insn) == CALL_INSN)
3487 return 1;
3489 if ((i_pat = single_set (insn)) != NULL
3490 && GET_CODE (SET_SRC (i_pat)) == MEM
3491 && (d_pat = single_set (dep)) != NULL
3492 && GET_CODE (SET_DEST (d_pat)) == MEM)
3494 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3495 /* This is a load after a store, there is no conflict if the load reads
3496 from a cached area. Assume that loads from the stack, and from the
3497 constant pool are cached, and that others will miss. This is a
3498 hack. */
3500 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3501 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3502 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3503 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3504 return 1;
3507 return cost;
3510 /* This code has been fixed for cross compilation. */
3512 static int fpa_consts_inited = 0;
3514 static const char * const strings_fpa[8] =
3516 "0", "1", "2", "3",
3517 "4", "5", "0.5", "10"
3520 static REAL_VALUE_TYPE values_fpa[8];
3522 static void
3523 init_fpa_table ()
3525 int i;
3526 REAL_VALUE_TYPE r;
3528 for (i = 0; i < 8; i++)
3530 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3531 values_fpa[i] = r;
3534 fpa_consts_inited = 1;
3537 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3540 const_double_rtx_ok_for_fpu (x)
3541 rtx x;
3543 REAL_VALUE_TYPE r;
3544 int i;
3546 if (!fpa_consts_inited)
3547 init_fpa_table ();
3549 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3550 if (REAL_VALUE_MINUS_ZERO (r))
3551 return 0;
3553 for (i = 0; i < 8; i++)
3554 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3555 return 1;
3557 return 0;
3560 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3563 neg_const_double_rtx_ok_for_fpu (x)
3564 rtx x;
3566 REAL_VALUE_TYPE r;
3567 int i;
3569 if (!fpa_consts_inited)
3570 init_fpa_table ();
3572 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3573 r = REAL_VALUE_NEGATE (r);
3574 if (REAL_VALUE_MINUS_ZERO (r))
3575 return 0;
3577 for (i = 0; i < 8; i++)
3578 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3579 return 1;
3581 return 0;
3584 /* Predicates for `match_operand' and `match_operator'. */
3586 /* s_register_operand is the same as register_operand, but it doesn't accept
3587 (SUBREG (MEM)...).
3589 This function exists because at the time it was put in it led to better
3590 code. SUBREG(MEM) always needs a reload in the places where
3591 s_register_operand is used, and this seemed to lead to excessive
3592 reloading. */
3595 s_register_operand (op, mode)
3596 rtx op;
3597 enum machine_mode mode;
3599 if (GET_MODE (op) != mode && mode != VOIDmode)
3600 return 0;
3602 if (GET_CODE (op) == SUBREG)
3603 op = SUBREG_REG (op);
3605 /* We don't consider registers whose class is NO_REGS
3606 to be a register operand. */
3607 /* XXX might have to check for lo regs only for thumb ??? */
3608 return (GET_CODE (op) == REG
3609 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3610 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3613 /* A hard register operand (even before reload. */
3616 arm_hard_register_operand (op, mode)
3617 rtx op;
3618 enum machine_mode mode;
3620 if (GET_MODE (op) != mode && mode != VOIDmode)
3621 return 0;
3623 return (GET_CODE (op) == REG
3624 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3627 /* Only accept reg, subreg(reg), const_int. */
3630 reg_or_int_operand (op, mode)
3631 rtx op;
3632 enum machine_mode mode;
3634 if (GET_CODE (op) == CONST_INT)
3635 return 1;
3637 if (GET_MODE (op) != mode && mode != VOIDmode)
3638 return 0;
3640 if (GET_CODE (op) == SUBREG)
3641 op = SUBREG_REG (op);
3643 /* We don't consider registers whose class is NO_REGS
3644 to be a register operand. */
3645 return (GET_CODE (op) == REG
3646 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3647 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3650 /* Return 1 if OP is an item in memory, given that we are in reload. */
3653 arm_reload_memory_operand (op, mode)
3654 rtx op;
3655 enum machine_mode mode ATTRIBUTE_UNUSED;
3657 int regno = true_regnum (op);
3659 return (!CONSTANT_P (op)
3660 && (regno == -1
3661 || (GET_CODE (op) == REG
3662 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3665 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3666 memory access (architecture V4).
3667 MODE is QImode if called when computing constraints, or VOIDmode when
3668 emitting patterns. In this latter case we cannot use memory_operand()
3669 because it will fail on badly formed MEMs, which is precisely what we are
3670 trying to catch. */
3673 bad_signed_byte_operand (op, mode)
3674 rtx op;
3675 enum machine_mode mode ATTRIBUTE_UNUSED;
3677 #if 0
3678 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3679 return 0;
3680 #endif
3681 if (GET_CODE (op) != MEM)
3682 return 0;
3684 op = XEXP (op, 0);
3686 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3687 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3688 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3689 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3690 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3691 return 1;
3693 /* Big constants are also bad. */
3694 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3695 && (INTVAL (XEXP (op, 1)) > 0xff
3696 || -INTVAL (XEXP (op, 1)) > 0xff))
3697 return 1;
3699 /* Everything else is good, or can will automatically be made so. */
3700 return 0;
3703 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3706 arm_rhs_operand (op, mode)
3707 rtx op;
3708 enum machine_mode mode;
3710 return (s_register_operand (op, mode)
3711 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3714 /* Return TRUE for valid operands for the
3715 rhs of an ARM instruction, or a load. */
3718 arm_rhsm_operand (op, mode)
3719 rtx op;
3720 enum machine_mode mode;
3722 return (s_register_operand (op, mode)
3723 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3724 || memory_operand (op, mode));
3727 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3728 constant that is valid when negated. */
3731 arm_add_operand (op, mode)
3732 rtx op;
3733 enum machine_mode mode;
3735 if (TARGET_THUMB)
3736 return thumb_cmp_operand (op, mode);
3738 return (s_register_operand (op, mode)
3739 || (GET_CODE (op) == CONST_INT
3740 && (const_ok_for_arm (INTVAL (op))
3741 || const_ok_for_arm (-INTVAL (op)))));
3745 arm_not_operand (op, mode)
3746 rtx op;
3747 enum machine_mode mode;
3749 return (s_register_operand (op, mode)
3750 || (GET_CODE (op) == CONST_INT
3751 && (const_ok_for_arm (INTVAL (op))
3752 || const_ok_for_arm (~INTVAL (op)))));
3755 /* Return TRUE if the operand is a memory reference which contains an
3756 offsettable address. */
3759 offsettable_memory_operand (op, mode)
3760 rtx op;
3761 enum machine_mode mode;
3763 if (mode == VOIDmode)
3764 mode = GET_MODE (op);
3766 return (mode == GET_MODE (op)
3767 && GET_CODE (op) == MEM
3768 && offsettable_address_p (reload_completed | reload_in_progress,
3769 mode, XEXP (op, 0)));
3772 /* Return TRUE if the operand is a memory reference which is, or can be
3773 made word aligned by adjusting the offset. */
3776 alignable_memory_operand (op, mode)
3777 rtx op;
3778 enum machine_mode mode;
3780 rtx reg;
3782 if (mode == VOIDmode)
3783 mode = GET_MODE (op);
3785 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3786 return 0;
3788 op = XEXP (op, 0);
3790 return ((GET_CODE (reg = op) == REG
3791 || (GET_CODE (op) == SUBREG
3792 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3793 || (GET_CODE (op) == PLUS
3794 && GET_CODE (XEXP (op, 1)) == CONST_INT
3795 && (GET_CODE (reg = XEXP (op, 0)) == REG
3796 || (GET_CODE (XEXP (op, 0)) == SUBREG
3797 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3798 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3801 /* Similar to s_register_operand, but does not allow hard integer
3802 registers. */
3805 f_register_operand (op, mode)
3806 rtx op;
3807 enum machine_mode mode;
3809 if (GET_MODE (op) != mode && mode != VOIDmode)
3810 return 0;
3812 if (GET_CODE (op) == SUBREG)
3813 op = SUBREG_REG (op);
3815 /* We don't consider registers whose class is NO_REGS
3816 to be a register operand. */
3817 return (GET_CODE (op) == REG
3818 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3819 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3822 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3825 fpu_rhs_operand (op, mode)
3826 rtx op;
3827 enum machine_mode mode;
3829 if (s_register_operand (op, mode))
3830 return TRUE;
3832 if (GET_MODE (op) != mode && mode != VOIDmode)
3833 return FALSE;
3835 if (GET_CODE (op) == CONST_DOUBLE)
3836 return const_double_rtx_ok_for_fpu (op);
3838 return FALSE;
3842 fpu_add_operand (op, mode)
3843 rtx op;
3844 enum machine_mode mode;
3846 if (s_register_operand (op, mode))
3847 return TRUE;
3849 if (GET_MODE (op) != mode && mode != VOIDmode)
3850 return FALSE;
3852 if (GET_CODE (op) == CONST_DOUBLE)
3853 return (const_double_rtx_ok_for_fpu (op)
3854 || neg_const_double_rtx_ok_for_fpu (op));
3856 return FALSE;
3859 /* Return nonzero if OP is a constant power of two. */
3862 power_of_two_operand (op, mode)
3863 rtx op;
3864 enum machine_mode mode ATTRIBUTE_UNUSED;
3866 if (GET_CODE (op) == CONST_INT)
3868 HOST_WIDE_INT value = INTVAL (op);
3870 return value != 0 && (value & (value - 1)) == 0;
3873 return FALSE;
3876 /* Return TRUE for a valid operand of a DImode operation.
3877 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3878 Note that this disallows MEM(REG+REG), but allows
3879 MEM(PRE/POST_INC/DEC(REG)). */
3882 di_operand (op, mode)
3883 rtx op;
3884 enum machine_mode mode;
3886 if (s_register_operand (op, mode))
3887 return TRUE;
3889 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3890 return FALSE;
3892 if (GET_CODE (op) == SUBREG)
3893 op = SUBREG_REG (op);
3895 switch (GET_CODE (op))
3897 case CONST_DOUBLE:
3898 case CONST_INT:
3899 return TRUE;
3901 case MEM:
3902 return memory_address_p (DImode, XEXP (op, 0));
3904 default:
3905 return FALSE;
3909 /* Like di_operand, but don't accept constants. */
3912 nonimmediate_di_operand (op, mode)
3913 rtx op;
3914 enum machine_mode mode;
3916 if (s_register_operand (op, mode))
3917 return TRUE;
3919 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3920 return FALSE;
3922 if (GET_CODE (op) == SUBREG)
3923 op = SUBREG_REG (op);
3925 if (GET_CODE (op) == MEM)
3926 return memory_address_p (DImode, XEXP (op, 0));
3928 return FALSE;
3931 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3932 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3933 Note that this disallows MEM(REG+REG), but allows
3934 MEM(PRE/POST_INC/DEC(REG)). */
3937 soft_df_operand (op, mode)
3938 rtx op;
3939 enum machine_mode mode;
3941 if (s_register_operand (op, mode))
3942 return TRUE;
3944 if (mode != VOIDmode && GET_MODE (op) != mode)
3945 return FALSE;
3947 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3948 return FALSE;
3950 if (GET_CODE (op) == SUBREG)
3951 op = SUBREG_REG (op);
3953 switch (GET_CODE (op))
3955 case CONST_DOUBLE:
3956 return TRUE;
3958 case MEM:
3959 return memory_address_p (DFmode, XEXP (op, 0));
3961 default:
3962 return FALSE;
3966 /* Like soft_df_operand, but don't accept constants. */
3969 nonimmediate_soft_df_operand (op, mode)
3970 rtx op;
3971 enum machine_mode mode;
3973 if (s_register_operand (op, mode))
3974 return TRUE;
3976 if (mode != VOIDmode && GET_MODE (op) != mode)
3977 return FALSE;
3979 if (GET_CODE (op) == SUBREG)
3980 op = SUBREG_REG (op);
3982 if (GET_CODE (op) == MEM)
3983 return memory_address_p (DFmode, XEXP (op, 0));
3984 return FALSE;
3987 /* Return TRUE for valid index operands. */
3990 index_operand (op, mode)
3991 rtx op;
3992 enum machine_mode mode;
3994 return (s_register_operand (op, mode)
3995 || (immediate_operand (op, mode)
3996 && (GET_CODE (op) != CONST_INT
3997 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4000 /* Return TRUE for valid shifts by a constant. This also accepts any
4001 power of two on the (somewhat overly relaxed) assumption that the
4002 shift operator in this case was a mult. */
4005 const_shift_operand (op, mode)
4006 rtx op;
4007 enum machine_mode mode;
4009 return (power_of_two_operand (op, mode)
4010 || (immediate_operand (op, mode)
4011 && (GET_CODE (op) != CONST_INT
4012 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4015 /* Return TRUE for arithmetic operators which can be combined with a multiply
4016 (shift). */
4019 shiftable_operator (x, mode)
4020 rtx x;
4021 enum machine_mode mode;
4023 enum rtx_code code;
4025 if (GET_MODE (x) != mode)
4026 return FALSE;
4028 code = GET_CODE (x);
4030 return (code == PLUS || code == MINUS
4031 || code == IOR || code == XOR || code == AND);
4034 /* Return TRUE for binary logical operators. */
4037 logical_binary_operator (x, mode)
4038 rtx x;
4039 enum machine_mode mode;
4041 enum rtx_code code;
4043 if (GET_MODE (x) != mode)
4044 return FALSE;
4046 code = GET_CODE (x);
4048 return (code == IOR || code == XOR || code == AND);
4051 /* Return TRUE for shift operators. */
4054 shift_operator (x, mode)
4055 rtx x;
4056 enum machine_mode mode;
4058 enum rtx_code code;
4060 if (GET_MODE (x) != mode)
4061 return FALSE;
4063 code = GET_CODE (x);
4065 if (code == MULT)
4066 return power_of_two_operand (XEXP (x, 1), mode);
4068 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4069 || code == ROTATERT);
4072 /* Return TRUE if x is EQ or NE. */
4075 equality_operator (x, mode)
4076 rtx x;
4077 enum machine_mode mode ATTRIBUTE_UNUSED;
4079 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4082 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4085 arm_comparison_operator (x, mode)
4086 rtx x;
4087 enum machine_mode mode;
4089 return (comparison_operator (x, mode)
4090 && GET_CODE (x) != LTGT
4091 && GET_CODE (x) != UNEQ);
4094 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4097 minmax_operator (x, mode)
4098 rtx x;
4099 enum machine_mode mode;
4101 enum rtx_code code = GET_CODE (x);
4103 if (GET_MODE (x) != mode)
4104 return FALSE;
4106 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4109 /* Return TRUE if this is the condition code register, if we aren't given
4110 a mode, accept any class CCmode register. */
4113 cc_register (x, mode)
4114 rtx x;
4115 enum machine_mode mode;
4117 if (mode == VOIDmode)
4119 mode = GET_MODE (x);
4121 if (GET_MODE_CLASS (mode) != MODE_CC)
4122 return FALSE;
4125 if ( GET_MODE (x) == mode
4126 && GET_CODE (x) == REG
4127 && REGNO (x) == CC_REGNUM)
4128 return TRUE;
4130 return FALSE;
4133 /* Return TRUE if this is the condition code register, if we aren't given
4134 a mode, accept any class CCmode register which indicates a dominance
4135 expression. */
4138 dominant_cc_register (x, mode)
4139 rtx x;
4140 enum machine_mode mode;
4142 if (mode == VOIDmode)
4144 mode = GET_MODE (x);
4146 if (GET_MODE_CLASS (mode) != MODE_CC)
4147 return FALSE;
4150 if ( mode != CC_DNEmode && mode != CC_DEQmode
4151 && mode != CC_DLEmode && mode != CC_DLTmode
4152 && mode != CC_DGEmode && mode != CC_DGTmode
4153 && mode != CC_DLEUmode && mode != CC_DLTUmode
4154 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4155 return FALSE;
4157 return cc_register (x, mode);
4160 /* Return TRUE if X references a SYMBOL_REF. */
4163 symbol_mentioned_p (x)
4164 rtx x;
4166 const char * fmt;
4167 int i;
4169 if (GET_CODE (x) == SYMBOL_REF)
4170 return 1;
4172 fmt = GET_RTX_FORMAT (GET_CODE (x));
4174 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4176 if (fmt[i] == 'E')
4178 int j;
4180 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4181 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4182 return 1;
4184 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4185 return 1;
4188 return 0;
4191 /* Return TRUE if X references a LABEL_REF. */
4194 label_mentioned_p (x)
4195 rtx x;
4197 const char * fmt;
4198 int i;
4200 if (GET_CODE (x) == LABEL_REF)
4201 return 1;
4203 fmt = GET_RTX_FORMAT (GET_CODE (x));
4204 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4206 if (fmt[i] == 'E')
4208 int j;
4210 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4211 if (label_mentioned_p (XVECEXP (x, i, j)))
4212 return 1;
4214 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4215 return 1;
4218 return 0;
4221 enum rtx_code
4222 minmax_code (x)
4223 rtx x;
4225 enum rtx_code code = GET_CODE (x);
4227 if (code == SMAX)
4228 return GE;
4229 else if (code == SMIN)
4230 return LE;
4231 else if (code == UMIN)
4232 return LEU;
4233 else if (code == UMAX)
4234 return GEU;
4236 abort ();
4239 /* Return 1 if memory locations are adjacent. */
4242 adjacent_mem_locations (a, b)
4243 rtx a, b;
4245 if ((GET_CODE (XEXP (a, 0)) == REG
4246 || (GET_CODE (XEXP (a, 0)) == PLUS
4247 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4248 && (GET_CODE (XEXP (b, 0)) == REG
4249 || (GET_CODE (XEXP (b, 0)) == PLUS
4250 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4252 int val0 = 0, val1 = 0;
4253 int reg0, reg1;
4255 if (GET_CODE (XEXP (a, 0)) == PLUS)
4257 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4258 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4260 else
4261 reg0 = REGNO (XEXP (a, 0));
4263 if (GET_CODE (XEXP (b, 0)) == PLUS)
4265 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4266 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4268 else
4269 reg1 = REGNO (XEXP (b, 0));
4271 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4273 return 0;
4276 /* Return 1 if OP is a load multiple operation. It is known to be
4277 parallel and the first section will be tested. */
4280 load_multiple_operation (op, mode)
4281 rtx op;
4282 enum machine_mode mode ATTRIBUTE_UNUSED;
4284 HOST_WIDE_INT count = XVECLEN (op, 0);
4285 int dest_regno;
4286 rtx src_addr;
4287 HOST_WIDE_INT i = 1, base = 0;
4288 rtx elt;
4290 if (count <= 1
4291 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4292 return 0;
4294 /* Check to see if this might be a write-back. */
4295 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4297 i++;
4298 base = 1;
4300 /* Now check it more carefully. */
4301 if (GET_CODE (SET_DEST (elt)) != REG
4302 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4303 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4304 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4305 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4306 return 0;
4309 /* Perform a quick check so we don't blow up below. */
4310 if (count <= i
4311 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4312 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4313 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4314 return 0;
4316 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4317 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4319 for (; i < count; i++)
4321 elt = XVECEXP (op, 0, i);
4323 if (GET_CODE (elt) != SET
4324 || GET_CODE (SET_DEST (elt)) != REG
4325 || GET_MODE (SET_DEST (elt)) != SImode
4326 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4327 || GET_CODE (SET_SRC (elt)) != MEM
4328 || GET_MODE (SET_SRC (elt)) != SImode
4329 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4330 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4331 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4332 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4333 return 0;
4336 return 1;
4339 /* Return 1 if OP is a store multiple operation. It is known to be
4340 parallel and the first section will be tested. */
4343 store_multiple_operation (op, mode)
4344 rtx op;
4345 enum machine_mode mode ATTRIBUTE_UNUSED;
4347 HOST_WIDE_INT count = XVECLEN (op, 0);
4348 int src_regno;
4349 rtx dest_addr;
4350 HOST_WIDE_INT i = 1, base = 0;
4351 rtx elt;
4353 if (count <= 1
4354 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4355 return 0;
4357 /* Check to see if this might be a write-back. */
4358 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4360 i++;
4361 base = 1;
4363 /* Now check it more carefully. */
4364 if (GET_CODE (SET_DEST (elt)) != REG
4365 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4366 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4367 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4368 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4369 return 0;
4372 /* Perform a quick check so we don't blow up below. */
4373 if (count <= i
4374 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4375 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4376 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4377 return 0;
4379 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4380 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4382 for (; i < count; i++)
4384 elt = XVECEXP (op, 0, i);
4386 if (GET_CODE (elt) != SET
4387 || GET_CODE (SET_SRC (elt)) != REG
4388 || GET_MODE (SET_SRC (elt)) != SImode
4389 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4390 || GET_CODE (SET_DEST (elt)) != MEM
4391 || GET_MODE (SET_DEST (elt)) != SImode
4392 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4393 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4394 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4395 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4396 return 0;
4399 return 1;
4403 load_multiple_sequence (operands, nops, regs, base, load_offset)
4404 rtx * operands;
4405 int nops;
4406 int * regs;
4407 int * base;
4408 HOST_WIDE_INT * load_offset;
4410 int unsorted_regs[4];
4411 HOST_WIDE_INT unsorted_offsets[4];
4412 int order[4];
4413 int base_reg = -1;
4414 int i;
4416 /* Can only handle 2, 3, or 4 insns at present,
4417 though could be easily extended if required. */
4418 if (nops < 2 || nops > 4)
4419 abort ();
4421 /* Loop over the operands and check that the memory references are
4422 suitable (ie immediate offsets from the same base register). At
4423 the same time, extract the target register, and the memory
4424 offsets. */
4425 for (i = 0; i < nops; i++)
4427 rtx reg;
4428 rtx offset;
4430 /* Convert a subreg of a mem into the mem itself. */
4431 if (GET_CODE (operands[nops + i]) == SUBREG)
4432 operands[nops + i] = alter_subreg (operands + (nops + i));
4434 if (GET_CODE (operands[nops + i]) != MEM)
4435 abort ();
4437 /* Don't reorder volatile memory references; it doesn't seem worth
4438 looking for the case where the order is ok anyway. */
4439 if (MEM_VOLATILE_P (operands[nops + i]))
4440 return 0;
4442 offset = const0_rtx;
4444 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4445 || (GET_CODE (reg) == SUBREG
4446 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4447 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4448 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4449 == REG)
4450 || (GET_CODE (reg) == SUBREG
4451 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4452 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4453 == CONST_INT)))
4455 if (i == 0)
4457 base_reg = REGNO (reg);
4458 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4459 ? REGNO (operands[i])
4460 : REGNO (SUBREG_REG (operands[i])));
4461 order[0] = 0;
4463 else
4465 if (base_reg != (int) REGNO (reg))
4466 /* Not addressed from the same base register. */
4467 return 0;
4469 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4470 ? REGNO (operands[i])
4471 : REGNO (SUBREG_REG (operands[i])));
4472 if (unsorted_regs[i] < unsorted_regs[order[0]])
4473 order[0] = i;
4476 /* If it isn't an integer register, or if it overwrites the
4477 base register but isn't the last insn in the list, then
4478 we can't do this. */
4479 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4480 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4481 return 0;
4483 unsorted_offsets[i] = INTVAL (offset);
4485 else
4486 /* Not a suitable memory address. */
4487 return 0;
4490 /* All the useful information has now been extracted from the
4491 operands into unsorted_regs and unsorted_offsets; additionally,
4492 order[0] has been set to the lowest numbered register in the
4493 list. Sort the registers into order, and check that the memory
4494 offsets are ascending and adjacent. */
4496 for (i = 1; i < nops; i++)
4498 int j;
4500 order[i] = order[i - 1];
4501 for (j = 0; j < nops; j++)
4502 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4503 && (order[i] == order[i - 1]
4504 || unsorted_regs[j] < unsorted_regs[order[i]]))
4505 order[i] = j;
4507 /* Have we found a suitable register? if not, one must be used more
4508 than once. */
4509 if (order[i] == order[i - 1])
4510 return 0;
4512 /* Is the memory address adjacent and ascending? */
4513 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4514 return 0;
4517 if (base)
4519 *base = base_reg;
4521 for (i = 0; i < nops; i++)
4522 regs[i] = unsorted_regs[order[i]];
4524 *load_offset = unsorted_offsets[order[0]];
4527 if (unsorted_offsets[order[0]] == 0)
4528 return 1; /* ldmia */
4530 if (unsorted_offsets[order[0]] == 4)
4531 return 2; /* ldmib */
4533 if (unsorted_offsets[order[nops - 1]] == 0)
4534 return 3; /* ldmda */
4536 if (unsorted_offsets[order[nops - 1]] == -4)
4537 return 4; /* ldmdb */
4539 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4540 if the offset isn't small enough. The reason 2 ldrs are faster
4541 is because these ARMs are able to do more than one cache access
4542 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4543 whilst the ARM8 has a double bandwidth cache. This means that
4544 these cores can do both an instruction fetch and a data fetch in
4545 a single cycle, so the trick of calculating the address into a
4546 scratch register (one of the result regs) and then doing a load
4547 multiple actually becomes slower (and no smaller in code size).
4548 That is the transformation
4550 ldr rd1, [rbase + offset]
4551 ldr rd2, [rbase + offset + 4]
4555 add rd1, rbase, offset
4556 ldmia rd1, {rd1, rd2}
4558 produces worse code -- '3 cycles + any stalls on rd2' instead of
4559 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4560 access per cycle, the first sequence could never complete in less
4561 than 6 cycles, whereas the ldm sequence would only take 5 and
4562 would make better use of sequential accesses if not hitting the
4563 cache.
4565 We cheat here and test 'arm_ld_sched' which we currently know to
4566 only be true for the ARM8, ARM9 and StrongARM. If this ever
4567 changes, then the test below needs to be reworked. */
4568 if (nops == 2 && arm_ld_sched)
4569 return 0;
4571 /* Can't do it without setting up the offset, only do this if it takes
4572 no more than one insn. */
4573 return (const_ok_for_arm (unsorted_offsets[order[0]])
4574 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4577 const char *
4578 emit_ldm_seq (operands, nops)
4579 rtx * operands;
4580 int nops;
4582 int regs[4];
4583 int base_reg;
4584 HOST_WIDE_INT offset;
4585 char buf[100];
4586 int i;
4588 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4590 case 1:
4591 strcpy (buf, "ldm%?ia\t");
4592 break;
4594 case 2:
4595 strcpy (buf, "ldm%?ib\t");
4596 break;
4598 case 3:
4599 strcpy (buf, "ldm%?da\t");
4600 break;
4602 case 4:
4603 strcpy (buf, "ldm%?db\t");
4604 break;
4606 case 5:
4607 if (offset >= 0)
4608 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4609 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4610 (long) offset);
4611 else
4612 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4613 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4614 (long) -offset);
4615 output_asm_insn (buf, operands);
4616 base_reg = regs[0];
4617 strcpy (buf, "ldm%?ia\t");
4618 break;
4620 default:
4621 abort ();
4624 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4625 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4627 for (i = 1; i < nops; i++)
4628 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4629 reg_names[regs[i]]);
4631 strcat (buf, "}\t%@ phole ldm");
4633 output_asm_insn (buf, operands);
4634 return "";
4638 store_multiple_sequence (operands, nops, regs, base, load_offset)
4639 rtx * operands;
4640 int nops;
4641 int * regs;
4642 int * base;
4643 HOST_WIDE_INT * load_offset;
4645 int unsorted_regs[4];
4646 HOST_WIDE_INT unsorted_offsets[4];
4647 int order[4];
4648 int base_reg = -1;
4649 int i;
4651 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4652 extended if required. */
4653 if (nops < 2 || nops > 4)
4654 abort ();
4656 /* Loop over the operands and check that the memory references are
4657 suitable (ie immediate offsets from the same base register). At
4658 the same time, extract the target register, and the memory
4659 offsets. */
4660 for (i = 0; i < nops; i++)
4662 rtx reg;
4663 rtx offset;
4665 /* Convert a subreg of a mem into the mem itself. */
4666 if (GET_CODE (operands[nops + i]) == SUBREG)
4667 operands[nops + i] = alter_subreg (operands + (nops + i));
4669 if (GET_CODE (operands[nops + i]) != MEM)
4670 abort ();
4672 /* Don't reorder volatile memory references; it doesn't seem worth
4673 looking for the case where the order is ok anyway. */
4674 if (MEM_VOLATILE_P (operands[nops + i]))
4675 return 0;
4677 offset = const0_rtx;
4679 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4680 || (GET_CODE (reg) == SUBREG
4681 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4682 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4683 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4684 == REG)
4685 || (GET_CODE (reg) == SUBREG
4686 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4687 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4688 == CONST_INT)))
4690 if (i == 0)
4692 base_reg = REGNO (reg);
4693 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4694 ? REGNO (operands[i])
4695 : REGNO (SUBREG_REG (operands[i])));
4696 order[0] = 0;
4698 else
4700 if (base_reg != (int) REGNO (reg))
4701 /* Not addressed from the same base register. */
4702 return 0;
4704 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4705 ? REGNO (operands[i])
4706 : REGNO (SUBREG_REG (operands[i])));
4707 if (unsorted_regs[i] < unsorted_regs[order[0]])
4708 order[0] = i;
4711 /* If it isn't an integer register, then we can't do this. */
4712 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4713 return 0;
4715 unsorted_offsets[i] = INTVAL (offset);
4717 else
4718 /* Not a suitable memory address. */
4719 return 0;
4722 /* All the useful information has now been extracted from the
4723 operands into unsorted_regs and unsorted_offsets; additionally,
4724 order[0] has been set to the lowest numbered register in the
4725 list. Sort the registers into order, and check that the memory
4726 offsets are ascending and adjacent. */
4728 for (i = 1; i < nops; i++)
4730 int j;
4732 order[i] = order[i - 1];
4733 for (j = 0; j < nops; j++)
4734 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4735 && (order[i] == order[i - 1]
4736 || unsorted_regs[j] < unsorted_regs[order[i]]))
4737 order[i] = j;
4739 /* Have we found a suitable register? if not, one must be used more
4740 than once. */
4741 if (order[i] == order[i - 1])
4742 return 0;
4744 /* Is the memory address adjacent and ascending? */
4745 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4746 return 0;
4749 if (base)
4751 *base = base_reg;
4753 for (i = 0; i < nops; i++)
4754 regs[i] = unsorted_regs[order[i]];
4756 *load_offset = unsorted_offsets[order[0]];
4759 if (unsorted_offsets[order[0]] == 0)
4760 return 1; /* stmia */
4762 if (unsorted_offsets[order[0]] == 4)
4763 return 2; /* stmib */
4765 if (unsorted_offsets[order[nops - 1]] == 0)
4766 return 3; /* stmda */
4768 if (unsorted_offsets[order[nops - 1]] == -4)
4769 return 4; /* stmdb */
4771 return 0;
4774 const char *
4775 emit_stm_seq (operands, nops)
4776 rtx * operands;
4777 int nops;
4779 int regs[4];
4780 int base_reg;
4781 HOST_WIDE_INT offset;
4782 char buf[100];
4783 int i;
4785 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4787 case 1:
4788 strcpy (buf, "stm%?ia\t");
4789 break;
4791 case 2:
4792 strcpy (buf, "stm%?ib\t");
4793 break;
4795 case 3:
4796 strcpy (buf, "stm%?da\t");
4797 break;
4799 case 4:
4800 strcpy (buf, "stm%?db\t");
4801 break;
4803 default:
4804 abort ();
4807 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4808 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4810 for (i = 1; i < nops; i++)
4811 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4812 reg_names[regs[i]]);
4814 strcat (buf, "}\t%@ phole stm");
4816 output_asm_insn (buf, operands);
4817 return "";
4821 multi_register_push (op, mode)
4822 rtx op;
4823 enum machine_mode mode ATTRIBUTE_UNUSED;
4825 if (GET_CODE (op) != PARALLEL
4826 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4827 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4828 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4829 return 0;
4831 return 1;
4834 /* Routines for use in generating RTL. */
4837 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4838 in_struct_p, scalar_p)
4839 int base_regno;
4840 int count;
4841 rtx from;
4842 int up;
4843 int write_back;
4844 int unchanging_p;
4845 int in_struct_p;
4846 int scalar_p;
4848 int i = 0, j;
4849 rtx result;
4850 int sign = up ? 1 : -1;
4851 rtx mem;
4853 /* XScale has load-store double instructions, but they have stricter
4854 alignment requirements than load-store multiple, so we can not
4855 use them.
4857 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4858 the pipeline until completion.
4860 NREGS CYCLES
4866 An ldr instruction takes 1-3 cycles, but does not block the
4867 pipeline.
4869 NREGS CYCLES
4870 1 1-3
4871 2 2-6
4872 3 3-9
4873 4 4-12
4875 Best case ldr will always win. However, the more ldr instructions
4876 we issue, the less likely we are to be able to schedule them well.
4877 Using ldr instructions also increases code size.
4879 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4880 for counts of 3 or 4 regs. */
4881 if (arm_is_xscale && count <= 2 && ! optimize_size)
4883 rtx seq;
4885 start_sequence ();
4887 for (i = 0; i < count; i++)
4889 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4890 RTX_UNCHANGING_P (mem) = unchanging_p;
4891 MEM_IN_STRUCT_P (mem) = in_struct_p;
4892 MEM_SCALAR_P (mem) = scalar_p;
4893 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4896 if (write_back)
4897 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4899 seq = get_insns ();
4900 end_sequence ();
4902 return seq;
4905 result = gen_rtx_PARALLEL (VOIDmode,
4906 rtvec_alloc (count + (write_back ? 1 : 0)));
4907 if (write_back)
4909 XVECEXP (result, 0, 0)
4910 = gen_rtx_SET (GET_MODE (from), from,
4911 plus_constant (from, count * 4 * sign));
4912 i = 1;
4913 count++;
4916 for (j = 0; i < count; i++, j++)
4918 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4919 RTX_UNCHANGING_P (mem) = unchanging_p;
4920 MEM_IN_STRUCT_P (mem) = in_struct_p;
4921 MEM_SCALAR_P (mem) = scalar_p;
4922 XVECEXP (result, 0, i)
4923 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4926 return result;
4930 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4931 in_struct_p, scalar_p)
4932 int base_regno;
4933 int count;
4934 rtx to;
4935 int up;
4936 int write_back;
4937 int unchanging_p;
4938 int in_struct_p;
4939 int scalar_p;
4941 int i = 0, j;
4942 rtx result;
4943 int sign = up ? 1 : -1;
4944 rtx mem;
4946 /* See arm_gen_load_multiple for discussion of
4947 the pros/cons of ldm/stm usage for XScale. */
4948 if (arm_is_xscale && count <= 2 && ! optimize_size)
4950 rtx seq;
4952 start_sequence ();
4954 for (i = 0; i < count; i++)
4956 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4957 RTX_UNCHANGING_P (mem) = unchanging_p;
4958 MEM_IN_STRUCT_P (mem) = in_struct_p;
4959 MEM_SCALAR_P (mem) = scalar_p;
4960 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4963 if (write_back)
4964 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4966 seq = get_insns ();
4967 end_sequence ();
4969 return seq;
4972 result = gen_rtx_PARALLEL (VOIDmode,
4973 rtvec_alloc (count + (write_back ? 1 : 0)));
4974 if (write_back)
4976 XVECEXP (result, 0, 0)
4977 = gen_rtx_SET (GET_MODE (to), to,
4978 plus_constant (to, count * 4 * sign));
4979 i = 1;
4980 count++;
4983 for (j = 0; i < count; i++, j++)
4985 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4986 RTX_UNCHANGING_P (mem) = unchanging_p;
4987 MEM_IN_STRUCT_P (mem) = in_struct_p;
4988 MEM_SCALAR_P (mem) = scalar_p;
4990 XVECEXP (result, 0, i)
4991 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4994 return result;
4998 arm_gen_movstrqi (operands)
4999 rtx * operands;
5001 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5002 int i;
5003 rtx src, dst;
5004 rtx st_src, st_dst, fin_src, fin_dst;
5005 rtx part_bytes_reg = NULL;
5006 rtx mem;
5007 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5008 int dst_scalar_p, src_scalar_p;
5010 if (GET_CODE (operands[2]) != CONST_INT
5011 || GET_CODE (operands[3]) != CONST_INT
5012 || INTVAL (operands[2]) > 64
5013 || INTVAL (operands[3]) & 3)
5014 return 0;
5016 st_dst = XEXP (operands[0], 0);
5017 st_src = XEXP (operands[1], 0);
5019 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5020 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5021 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5022 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5023 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5024 src_scalar_p = MEM_SCALAR_P (operands[1]);
5026 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5027 fin_src = src = copy_to_mode_reg (SImode, st_src);
5029 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5030 out_words_to_go = INTVAL (operands[2]) / 4;
5031 last_bytes = INTVAL (operands[2]) & 3;
5033 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5034 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5036 for (i = 0; in_words_to_go >= 2; i+=4)
5038 if (in_words_to_go > 4)
5039 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5040 src_unchanging_p,
5041 src_in_struct_p,
5042 src_scalar_p));
5043 else
5044 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5045 FALSE, src_unchanging_p,
5046 src_in_struct_p, src_scalar_p));
5048 if (out_words_to_go)
5050 if (out_words_to_go > 4)
5051 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5052 dst_unchanging_p,
5053 dst_in_struct_p,
5054 dst_scalar_p));
5055 else if (out_words_to_go != 1)
5056 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5057 dst, TRUE,
5058 (last_bytes == 0
5059 ? FALSE : TRUE),
5060 dst_unchanging_p,
5061 dst_in_struct_p,
5062 dst_scalar_p));
5063 else
5065 mem = gen_rtx_MEM (SImode, dst);
5066 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5067 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5068 MEM_SCALAR_P (mem) = dst_scalar_p;
5069 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5070 if (last_bytes != 0)
5071 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5075 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5076 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5079 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5080 if (out_words_to_go)
5082 rtx sreg;
5084 mem = gen_rtx_MEM (SImode, src);
5085 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5086 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5087 MEM_SCALAR_P (mem) = src_scalar_p;
5088 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5089 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5091 mem = gen_rtx_MEM (SImode, dst);
5092 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5093 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5094 MEM_SCALAR_P (mem) = dst_scalar_p;
5095 emit_move_insn (mem, sreg);
5096 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5097 in_words_to_go--;
5099 if (in_words_to_go) /* Sanity check */
5100 abort ();
5103 if (in_words_to_go)
5105 if (in_words_to_go < 0)
5106 abort ();
5108 mem = gen_rtx_MEM (SImode, src);
5109 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5110 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5111 MEM_SCALAR_P (mem) = src_scalar_p;
5112 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5115 if (last_bytes && part_bytes_reg == NULL)
5116 abort ();
5118 if (BYTES_BIG_ENDIAN && last_bytes)
5120 rtx tmp = gen_reg_rtx (SImode);
5122 /* The bytes we want are in the top end of the word. */
5123 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5124 GEN_INT (8 * (4 - last_bytes))));
5125 part_bytes_reg = tmp;
5127 while (last_bytes)
5129 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5130 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5131 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5132 MEM_SCALAR_P (mem) = dst_scalar_p;
5133 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5135 if (--last_bytes)
5137 tmp = gen_reg_rtx (SImode);
5138 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5139 part_bytes_reg = tmp;
5144 else
5146 if (last_bytes > 1)
5148 mem = gen_rtx_MEM (HImode, dst);
5149 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5150 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5151 MEM_SCALAR_P (mem) = dst_scalar_p;
5152 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5153 last_bytes -= 2;
5154 if (last_bytes)
5156 rtx tmp = gen_reg_rtx (SImode);
5158 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5159 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5160 part_bytes_reg = tmp;
5164 if (last_bytes)
5166 mem = gen_rtx_MEM (QImode, dst);
5167 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5168 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5169 MEM_SCALAR_P (mem) = dst_scalar_p;
5170 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5174 return 1;
5177 /* Generate a memory reference for a half word, such that it will be loaded
5178 into the top 16 bits of the word. We can assume that the address is
5179 known to be alignable and of the form reg, or plus (reg, const). */
5182 arm_gen_rotated_half_load (memref)
5183 rtx memref;
5185 HOST_WIDE_INT offset = 0;
5186 rtx base = XEXP (memref, 0);
5188 if (GET_CODE (base) == PLUS)
5190 offset = INTVAL (XEXP (base, 1));
5191 base = XEXP (base, 0);
5194 /* If we aren't allowed to generate unaligned addresses, then fail. */
5195 if (TARGET_MMU_TRAPS
5196 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5197 return NULL;
5199 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5201 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5202 return base;
5204 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5207 /* Select a dominance comparison mode if possible. We support three forms.
5208 COND_OR == 0 => (X && Y)
5209 COND_OR == 1 => ((! X( || Y)
5210 COND_OR == 2 => (X || Y)
5211 If we are unable to support a dominance comparison we return CC mode.
5212 This will then fail to match for the RTL expressions that generate this
5213 call. */
5215 static enum machine_mode
5216 select_dominance_cc_mode (x, y, cond_or)
5217 rtx x;
5218 rtx y;
5219 HOST_WIDE_INT cond_or;
5221 enum rtx_code cond1, cond2;
5222 int swapped = 0;
5224 /* Currently we will probably get the wrong result if the individual
5225 comparisons are not simple. This also ensures that it is safe to
5226 reverse a comparison if necessary. */
5227 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5228 != CCmode)
5229 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5230 != CCmode))
5231 return CCmode;
5233 /* The if_then_else variant of this tests the second condition if the
5234 first passes, but is true if the first fails. Reverse the first
5235 condition to get a true "inclusive-or" expression. */
5236 if (cond_or == 1)
5237 cond1 = reverse_condition (cond1);
5239 /* If the comparisons are not equal, and one doesn't dominate the other,
5240 then we can't do this. */
5241 if (cond1 != cond2
5242 && !comparison_dominates_p (cond1, cond2)
5243 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5244 return CCmode;
5246 if (swapped)
5248 enum rtx_code temp = cond1;
5249 cond1 = cond2;
5250 cond2 = temp;
5253 switch (cond1)
5255 case EQ:
5256 if (cond2 == EQ || !cond_or)
5257 return CC_DEQmode;
5259 switch (cond2)
5261 case LE: return CC_DLEmode;
5262 case LEU: return CC_DLEUmode;
5263 case GE: return CC_DGEmode;
5264 case GEU: return CC_DGEUmode;
5265 default: break;
5268 break;
5270 case LT:
5271 if (cond2 == LT || !cond_or)
5272 return CC_DLTmode;
5273 if (cond2 == LE)
5274 return CC_DLEmode;
5275 if (cond2 == NE)
5276 return CC_DNEmode;
5277 break;
5279 case GT:
5280 if (cond2 == GT || !cond_or)
5281 return CC_DGTmode;
5282 if (cond2 == GE)
5283 return CC_DGEmode;
5284 if (cond2 == NE)
5285 return CC_DNEmode;
5286 break;
5288 case LTU:
5289 if (cond2 == LTU || !cond_or)
5290 return CC_DLTUmode;
5291 if (cond2 == LEU)
5292 return CC_DLEUmode;
5293 if (cond2 == NE)
5294 return CC_DNEmode;
5295 break;
5297 case GTU:
5298 if (cond2 == GTU || !cond_or)
5299 return CC_DGTUmode;
5300 if (cond2 == GEU)
5301 return CC_DGEUmode;
5302 if (cond2 == NE)
5303 return CC_DNEmode;
5304 break;
5306 /* The remaining cases only occur when both comparisons are the
5307 same. */
5308 case NE:
5309 return CC_DNEmode;
5311 case LE:
5312 return CC_DLEmode;
5314 case GE:
5315 return CC_DGEmode;
5317 case LEU:
5318 return CC_DLEUmode;
5320 case GEU:
5321 return CC_DGEUmode;
5323 default:
5324 break;
5327 abort ();
5330 enum machine_mode
5331 arm_select_cc_mode (op, x, y)
5332 enum rtx_code op;
5333 rtx x;
5334 rtx y;
5336 /* All floating point compares return CCFP if it is an equality
5337 comparison, and CCFPE otherwise. */
5338 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5340 switch (op)
5342 case EQ:
5343 case NE:
5344 case UNORDERED:
5345 case ORDERED:
5346 case UNLT:
5347 case UNLE:
5348 case UNGT:
5349 case UNGE:
5350 case UNEQ:
5351 case LTGT:
5352 return CCFPmode;
5354 case LT:
5355 case LE:
5356 case GT:
5357 case GE:
5358 return CCFPEmode;
5360 default:
5361 abort ();
5365 /* A compare with a shifted operand. Because of canonicalization, the
5366 comparison will have to be swapped when we emit the assembler. */
5367 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5368 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5369 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5370 || GET_CODE (x) == ROTATERT))
5371 return CC_SWPmode;
5373 /* This is a special case that is used by combine to allow a
5374 comparison of a shifted byte load to be split into a zero-extend
5375 followed by a comparison of the shifted integer (only valid for
5376 equalities and unsigned inequalities). */
5377 if (GET_MODE (x) == SImode
5378 && GET_CODE (x) == ASHIFT
5379 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5380 && GET_CODE (XEXP (x, 0)) == SUBREG
5381 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5382 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5383 && (op == EQ || op == NE
5384 || op == GEU || op == GTU || op == LTU || op == LEU)
5385 && GET_CODE (y) == CONST_INT)
5386 return CC_Zmode;
5388 /* A construct for a conditional compare, if the false arm contains
5389 0, then both conditions must be true, otherwise either condition
5390 must be true. Not all conditions are possible, so CCmode is
5391 returned if it can't be done. */
5392 if (GET_CODE (x) == IF_THEN_ELSE
5393 && (XEXP (x, 2) == const0_rtx
5394 || XEXP (x, 2) == const1_rtx)
5395 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5396 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5397 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5398 INTVAL (XEXP (x, 2)));
5400 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5401 if (GET_CODE (x) == AND
5402 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5403 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5404 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5406 if (GET_CODE (x) == IOR
5407 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5408 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5409 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5411 /* An operation that sets the condition codes as a side-effect, the
5412 V flag is not set correctly, so we can only use comparisons where
5413 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5414 instead. */
5415 if (GET_MODE (x) == SImode
5416 && y == const0_rtx
5417 && (op == EQ || op == NE || op == LT || op == GE)
5418 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5419 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5420 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5421 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5422 || GET_CODE (x) == LSHIFTRT
5423 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5424 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5425 return CC_NOOVmode;
5427 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5428 return CC_Zmode;
5430 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5431 && GET_CODE (x) == PLUS
5432 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5433 return CC_Cmode;
5435 return CCmode;
5438 /* X and Y are two things to compare using CODE. Emit the compare insn and
5439 return the rtx for register 0 in the proper mode. FP means this is a
5440 floating point compare: I don't think that it is needed on the arm. */
5443 arm_gen_compare_reg (code, x, y)
5444 enum rtx_code code;
5445 rtx x, y;
5447 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5448 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5450 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5451 gen_rtx_COMPARE (mode, x, y)));
5453 return cc_reg;
5456 /* Generate a sequence of insns that will generate the correct return
5457 address mask depending on the physical architecture that the program
5458 is running on. */
5461 arm_gen_return_addr_mask ()
5463 rtx reg = gen_reg_rtx (Pmode);
5465 emit_insn (gen_return_addr_mask (reg));
5466 return reg;
5469 void
5470 arm_reload_in_hi (operands)
5471 rtx * operands;
5473 rtx ref = operands[1];
5474 rtx base, scratch;
5475 HOST_WIDE_INT offset = 0;
5477 if (GET_CODE (ref) == SUBREG)
5479 offset = SUBREG_BYTE (ref);
5480 ref = SUBREG_REG (ref);
5483 if (GET_CODE (ref) == REG)
5485 /* We have a pseudo which has been spilt onto the stack; there
5486 are two cases here: the first where there is a simple
5487 stack-slot replacement and a second where the stack-slot is
5488 out of range, or is used as a subreg. */
5489 if (reg_equiv_mem[REGNO (ref)])
5491 ref = reg_equiv_mem[REGNO (ref)];
5492 base = find_replacement (&XEXP (ref, 0));
5494 else
5495 /* The slot is out of range, or was dressed up in a SUBREG. */
5496 base = reg_equiv_address[REGNO (ref)];
5498 else
5499 base = find_replacement (&XEXP (ref, 0));
5501 /* Handle the case where the address is too complex to be offset by 1. */
5502 if (GET_CODE (base) == MINUS
5503 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5505 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5507 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5508 base = base_plus;
5510 else if (GET_CODE (base) == PLUS)
5512 /* The addend must be CONST_INT, or we would have dealt with it above. */
5513 HOST_WIDE_INT hi, lo;
5515 offset += INTVAL (XEXP (base, 1));
5516 base = XEXP (base, 0);
5518 /* Rework the address into a legal sequence of insns. */
5519 /* Valid range for lo is -4095 -> 4095 */
5520 lo = (offset >= 0
5521 ? (offset & 0xfff)
5522 : -((-offset) & 0xfff));
5524 /* Corner case, if lo is the max offset then we would be out of range
5525 once we have added the additional 1 below, so bump the msb into the
5526 pre-loading insn(s). */
5527 if (lo == 4095)
5528 lo &= 0x7ff;
5530 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5531 ^ (HOST_WIDE_INT) 0x80000000)
5532 - (HOST_WIDE_INT) 0x80000000);
5534 if (hi + lo != offset)
5535 abort ();
5537 if (hi != 0)
5539 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5541 /* Get the base address; addsi3 knows how to handle constants
5542 that require more than one insn. */
5543 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5544 base = base_plus;
5545 offset = lo;
5549 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5550 emit_insn (gen_zero_extendqisi2 (scratch,
5551 gen_rtx_MEM (QImode,
5552 plus_constant (base,
5553 offset))));
5554 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5555 gen_rtx_MEM (QImode,
5556 plus_constant (base,
5557 offset + 1))));
5558 if (!BYTES_BIG_ENDIAN)
5559 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5560 gen_rtx_IOR (SImode,
5561 gen_rtx_ASHIFT
5562 (SImode,
5563 gen_rtx_SUBREG (SImode, operands[0], 0),
5564 GEN_INT (8)),
5565 scratch)));
5566 else
5567 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5568 gen_rtx_IOR (SImode,
5569 gen_rtx_ASHIFT (SImode, scratch,
5570 GEN_INT (8)),
5571 gen_rtx_SUBREG (SImode, operands[0],
5572 0))));
5575 /* Handle storing a half-word to memory during reload by synthesising as two
5576 byte stores. Take care not to clobber the input values until after we
5577 have moved them somewhere safe. This code assumes that if the DImode
5578 scratch in operands[2] overlaps either the input value or output address
5579 in some way, then that value must die in this insn (we absolutely need
5580 two scratch registers for some corner cases). */
5582 void
5583 arm_reload_out_hi (operands)
5584 rtx * operands;
5586 rtx ref = operands[0];
5587 rtx outval = operands[1];
5588 rtx base, scratch;
5589 HOST_WIDE_INT offset = 0;
5591 if (GET_CODE (ref) == SUBREG)
5593 offset = SUBREG_BYTE (ref);
5594 ref = SUBREG_REG (ref);
5597 if (GET_CODE (ref) == REG)
5599 /* We have a pseudo which has been spilt onto the stack; there
5600 are two cases here: the first where there is a simple
5601 stack-slot replacement and a second where the stack-slot is
5602 out of range, or is used as a subreg. */
5603 if (reg_equiv_mem[REGNO (ref)])
5605 ref = reg_equiv_mem[REGNO (ref)];
5606 base = find_replacement (&XEXP (ref, 0));
5608 else
5609 /* The slot is out of range, or was dressed up in a SUBREG. */
5610 base = reg_equiv_address[REGNO (ref)];
5612 else
5613 base = find_replacement (&XEXP (ref, 0));
5615 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5617 /* Handle the case where the address is too complex to be offset by 1. */
5618 if (GET_CODE (base) == MINUS
5619 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5621 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5623 /* Be careful not to destroy OUTVAL. */
5624 if (reg_overlap_mentioned_p (base_plus, outval))
5626 /* Updating base_plus might destroy outval, see if we can
5627 swap the scratch and base_plus. */
5628 if (!reg_overlap_mentioned_p (scratch, outval))
5630 rtx tmp = scratch;
5631 scratch = base_plus;
5632 base_plus = tmp;
5634 else
5636 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5638 /* Be conservative and copy OUTVAL into the scratch now,
5639 this should only be necessary if outval is a subreg
5640 of something larger than a word. */
5641 /* XXX Might this clobber base? I can't see how it can,
5642 since scratch is known to overlap with OUTVAL, and
5643 must be wider than a word. */
5644 emit_insn (gen_movhi (scratch_hi, outval));
5645 outval = scratch_hi;
5649 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5650 base = base_plus;
5652 else if (GET_CODE (base) == PLUS)
5654 /* The addend must be CONST_INT, or we would have dealt with it above. */
5655 HOST_WIDE_INT hi, lo;
5657 offset += INTVAL (XEXP (base, 1));
5658 base = XEXP (base, 0);
5660 /* Rework the address into a legal sequence of insns. */
5661 /* Valid range for lo is -4095 -> 4095 */
5662 lo = (offset >= 0
5663 ? (offset & 0xfff)
5664 : -((-offset) & 0xfff));
5666 /* Corner case, if lo is the max offset then we would be out of range
5667 once we have added the additional 1 below, so bump the msb into the
5668 pre-loading insn(s). */
5669 if (lo == 4095)
5670 lo &= 0x7ff;
5672 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5673 ^ (HOST_WIDE_INT) 0x80000000)
5674 - (HOST_WIDE_INT) 0x80000000);
5676 if (hi + lo != offset)
5677 abort ();
5679 if (hi != 0)
5681 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5683 /* Be careful not to destroy OUTVAL. */
5684 if (reg_overlap_mentioned_p (base_plus, outval))
5686 /* Updating base_plus might destroy outval, see if we
5687 can swap the scratch and base_plus. */
5688 if (!reg_overlap_mentioned_p (scratch, outval))
5690 rtx tmp = scratch;
5691 scratch = base_plus;
5692 base_plus = tmp;
5694 else
5696 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5698 /* Be conservative and copy outval into scratch now,
5699 this should only be necessary if outval is a
5700 subreg of something larger than a word. */
5701 /* XXX Might this clobber base? I can't see how it
5702 can, since scratch is known to overlap with
5703 outval. */
5704 emit_insn (gen_movhi (scratch_hi, outval));
5705 outval = scratch_hi;
5709 /* Get the base address; addsi3 knows how to handle constants
5710 that require more than one insn. */
5711 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5712 base = base_plus;
5713 offset = lo;
5717 if (BYTES_BIG_ENDIAN)
5719 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5720 plus_constant (base, offset + 1)),
5721 gen_lowpart (QImode, outval)));
5722 emit_insn (gen_lshrsi3 (scratch,
5723 gen_rtx_SUBREG (SImode, outval, 0),
5724 GEN_INT (8)));
5725 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5726 gen_lowpart (QImode, scratch)));
5728 else
5730 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5731 gen_lowpart (QImode, outval)));
5732 emit_insn (gen_lshrsi3 (scratch,
5733 gen_rtx_SUBREG (SImode, outval, 0),
5734 GEN_INT (8)));
5735 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5736 plus_constant (base, offset + 1)),
5737 gen_lowpart (QImode, scratch)));
5741 /* Print a symbolic form of X to the debug file, F. */
5743 static void
5744 arm_print_value (f, x)
5745 FILE * f;
5746 rtx x;
5748 switch (GET_CODE (x))
5750 case CONST_INT:
5751 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5752 return;
5754 case CONST_DOUBLE:
5755 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5756 return;
5758 case CONST_STRING:
5759 fprintf (f, "\"%s\"", XSTR (x, 0));
5760 return;
5762 case SYMBOL_REF:
5763 fprintf (f, "`%s'", XSTR (x, 0));
5764 return;
5766 case LABEL_REF:
5767 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5768 return;
5770 case CONST:
5771 arm_print_value (f, XEXP (x, 0));
5772 return;
5774 case PLUS:
5775 arm_print_value (f, XEXP (x, 0));
5776 fprintf (f, "+");
5777 arm_print_value (f, XEXP (x, 1));
5778 return;
5780 case PC:
5781 fprintf (f, "pc");
5782 return;
5784 default:
5785 fprintf (f, "????");
5786 return;
5790 /* Routines for manipulation of the constant pool. */
5792 /* Arm instructions cannot load a large constant directly into a
5793 register; they have to come from a pc relative load. The constant
5794 must therefore be placed in the addressable range of the pc
5795 relative load. Depending on the precise pc relative load
5796 instruction the range is somewhere between 256 bytes and 4k. This
5797 means that we often have to dump a constant inside a function, and
5798 generate code to branch around it.
5800 It is important to minimize this, since the branches will slow
5801 things down and make the code larger.
5803 Normally we can hide the table after an existing unconditional
5804 branch so that there is no interruption of the flow, but in the
5805 worst case the code looks like this:
5807 ldr rn, L1
5809 b L2
5810 align
5811 L1: .long value
5815 ldr rn, L3
5817 b L4
5818 align
5819 L3: .long value
5823 We fix this by performing a scan after scheduling, which notices
5824 which instructions need to have their operands fetched from the
5825 constant table and builds the table.
5827 The algorithm starts by building a table of all the constants that
5828 need fixing up and all the natural barriers in the function (places
5829 where a constant table can be dropped without breaking the flow).
5830 For each fixup we note how far the pc-relative replacement will be
5831 able to reach and the offset of the instruction into the function.
5833 Having built the table we then group the fixes together to form
5834 tables that are as large as possible (subject to addressing
5835 constraints) and emit each table of constants after the last
5836 barrier that is within range of all the instructions in the group.
5837 If a group does not contain a barrier, then we forcibly create one
5838 by inserting a jump instruction into the flow. Once the table has
5839 been inserted, the insns are then modified to reference the
5840 relevant entry in the pool.
5842 Possible enhancements to the algorithm (not implemented) are:
5844 1) For some processors and object formats, there may be benefit in
5845 aligning the pools to the start of cache lines; this alignment
5846 would need to be taken into account when calculating addressability
5847 of a pool. */
5849 /* These typedefs are located at the start of this file, so that
5850 they can be used in the prototypes there. This comment is to
5851 remind readers of that fact so that the following structures
5852 can be understood more easily.
5854 typedef struct minipool_node Mnode;
5855 typedef struct minipool_fixup Mfix; */
5857 struct minipool_node
5859 /* Doubly linked chain of entries. */
5860 Mnode * next;
5861 Mnode * prev;
5862 /* The maximum offset into the code that this entry can be placed. While
5863 pushing fixes for forward references, all entries are sorted in order
5864 of increasing max_address. */
5865 HOST_WIDE_INT max_address;
5866 /* Similarly for an entry inserted for a backwards ref. */
5867 HOST_WIDE_INT min_address;
5868 /* The number of fixes referencing this entry. This can become zero
5869 if we "unpush" an entry. In this case we ignore the entry when we
5870 come to emit the code. */
5871 int refcount;
5872 /* The offset from the start of the minipool. */
5873 HOST_WIDE_INT offset;
5874 /* The value in table. */
5875 rtx value;
5876 /* The mode of value. */
5877 enum machine_mode mode;
5878 int fix_size;
5881 struct minipool_fixup
5883 Mfix * next;
5884 rtx insn;
5885 HOST_WIDE_INT address;
5886 rtx * loc;
5887 enum machine_mode mode;
5888 int fix_size;
5889 rtx value;
5890 Mnode * minipool;
5891 HOST_WIDE_INT forwards;
5892 HOST_WIDE_INT backwards;
5895 /* Fixes less than a word need padding out to a word boundary. */
5896 #define MINIPOOL_FIX_SIZE(mode) \
5897 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5899 static Mnode * minipool_vector_head;
5900 static Mnode * minipool_vector_tail;
5901 static rtx minipool_vector_label;
5903 /* The linked list of all minipool fixes required for this function. */
5904 Mfix * minipool_fix_head;
5905 Mfix * minipool_fix_tail;
5906 /* The fix entry for the current minipool, once it has been placed. */
5907 Mfix * minipool_barrier;
5909 /* Determines if INSN is the start of a jump table. Returns the end
5910 of the TABLE or NULL_RTX. */
5912 static rtx
5913 is_jump_table (insn)
5914 rtx insn;
5916 rtx table;
5918 if (GET_CODE (insn) == JUMP_INSN
5919 && JUMP_LABEL (insn) != NULL
5920 && ((table = next_real_insn (JUMP_LABEL (insn)))
5921 == next_real_insn (insn))
5922 && table != NULL
5923 && GET_CODE (table) == JUMP_INSN
5924 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5925 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5926 return table;
5928 return NULL_RTX;
5931 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5932 #define JUMP_TABLES_IN_TEXT_SECTION 0
5933 #endif
5935 static HOST_WIDE_INT
5936 get_jump_table_size (insn)
5937 rtx insn;
5939 /* ADDR_VECs only take room if read-only data does into the text
5940 section. */
5941 if (JUMP_TABLES_IN_TEXT_SECTION
5942 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5943 || 1
5944 #endif
5947 rtx body = PATTERN (insn);
5948 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5950 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5953 return 0;
5956 /* Move a minipool fix MP from its current location to before MAX_MP.
5957 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5958 contrains may need updating. */
5960 static Mnode *
5961 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5962 Mnode * mp;
5963 Mnode * max_mp;
5964 HOST_WIDE_INT max_address;
5966 /* This should never be true and the code below assumes these are
5967 different. */
5968 if (mp == max_mp)
5969 abort ();
5971 if (max_mp == NULL)
5973 if (max_address < mp->max_address)
5974 mp->max_address = max_address;
5976 else
5978 if (max_address > max_mp->max_address - mp->fix_size)
5979 mp->max_address = max_mp->max_address - mp->fix_size;
5980 else
5981 mp->max_address = max_address;
5983 /* Unlink MP from its current position. Since max_mp is non-null,
5984 mp->prev must be non-null. */
5985 mp->prev->next = mp->next;
5986 if (mp->next != NULL)
5987 mp->next->prev = mp->prev;
5988 else
5989 minipool_vector_tail = mp->prev;
5991 /* Re-insert it before MAX_MP. */
5992 mp->next = max_mp;
5993 mp->prev = max_mp->prev;
5994 max_mp->prev = mp;
5996 if (mp->prev != NULL)
5997 mp->prev->next = mp;
5998 else
5999 minipool_vector_head = mp;
6002 /* Save the new entry. */
6003 max_mp = mp;
6005 /* Scan over the preceding entries and adjust their addresses as
6006 required. */
6007 while (mp->prev != NULL
6008 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6010 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6011 mp = mp->prev;
6014 return max_mp;
6017 /* Add a constant to the minipool for a forward reference. Returns the
6018 node added or NULL if the constant will not fit in this pool. */
6020 static Mnode *
6021 add_minipool_forward_ref (fix)
6022 Mfix * fix;
6024 /* If set, max_mp is the first pool_entry that has a lower
6025 constraint than the one we are trying to add. */
6026 Mnode * max_mp = NULL;
6027 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6028 Mnode * mp;
6030 /* If this fix's address is greater than the address of the first
6031 entry, then we can't put the fix in this pool. We subtract the
6032 size of the current fix to ensure that if the table is fully
6033 packed we still have enough room to insert this value by suffling
6034 the other fixes forwards. */
6035 if (minipool_vector_head &&
6036 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6037 return NULL;
6039 /* Scan the pool to see if a constant with the same value has
6040 already been added. While we are doing this, also note the
6041 location where we must insert the constant if it doesn't already
6042 exist. */
6043 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6045 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6046 && fix->mode == mp->mode
6047 && (GET_CODE (fix->value) != CODE_LABEL
6048 || (CODE_LABEL_NUMBER (fix->value)
6049 == CODE_LABEL_NUMBER (mp->value)))
6050 && rtx_equal_p (fix->value, mp->value))
6052 /* More than one fix references this entry. */
6053 mp->refcount++;
6054 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6057 /* Note the insertion point if necessary. */
6058 if (max_mp == NULL
6059 && mp->max_address > max_address)
6060 max_mp = mp;
6063 /* The value is not currently in the minipool, so we need to create
6064 a new entry for it. If MAX_MP is NULL, the entry will be put on
6065 the end of the list since the placement is less constrained than
6066 any existing entry. Otherwise, we insert the new fix before
6067 MAX_MP and, if necessary, adjust the constraints on the other
6068 entries. */
6069 mp = xmalloc (sizeof (* mp));
6070 mp->fix_size = fix->fix_size;
6071 mp->mode = fix->mode;
6072 mp->value = fix->value;
6073 mp->refcount = 1;
6074 /* Not yet required for a backwards ref. */
6075 mp->min_address = -65536;
6077 if (max_mp == NULL)
6079 mp->max_address = max_address;
6080 mp->next = NULL;
6081 mp->prev = minipool_vector_tail;
6083 if (mp->prev == NULL)
6085 minipool_vector_head = mp;
6086 minipool_vector_label = gen_label_rtx ();
6088 else
6089 mp->prev->next = mp;
6091 minipool_vector_tail = mp;
6093 else
6095 if (max_address > max_mp->max_address - mp->fix_size)
6096 mp->max_address = max_mp->max_address - mp->fix_size;
6097 else
6098 mp->max_address = max_address;
6100 mp->next = max_mp;
6101 mp->prev = max_mp->prev;
6102 max_mp->prev = mp;
6103 if (mp->prev != NULL)
6104 mp->prev->next = mp;
6105 else
6106 minipool_vector_head = mp;
6109 /* Save the new entry. */
6110 max_mp = mp;
6112 /* Scan over the preceding entries and adjust their addresses as
6113 required. */
6114 while (mp->prev != NULL
6115 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6117 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6118 mp = mp->prev;
6121 return max_mp;
6124 static Mnode *
6125 move_minipool_fix_backward_ref (mp, min_mp, min_address)
6126 Mnode * mp;
6127 Mnode * min_mp;
6128 HOST_WIDE_INT min_address;
6130 HOST_WIDE_INT offset;
6132 /* This should never be true, and the code below assumes these are
6133 different. */
6134 if (mp == min_mp)
6135 abort ();
6137 if (min_mp == NULL)
6139 if (min_address > mp->min_address)
6140 mp->min_address = min_address;
6142 else
6144 /* We will adjust this below if it is too loose. */
6145 mp->min_address = min_address;
6147 /* Unlink MP from its current position. Since min_mp is non-null,
6148 mp->next must be non-null. */
6149 mp->next->prev = mp->prev;
6150 if (mp->prev != NULL)
6151 mp->prev->next = mp->next;
6152 else
6153 minipool_vector_head = mp->next;
6155 /* Reinsert it after MIN_MP. */
6156 mp->prev = min_mp;
6157 mp->next = min_mp->next;
6158 min_mp->next = mp;
6159 if (mp->next != NULL)
6160 mp->next->prev = mp;
6161 else
6162 minipool_vector_tail = mp;
6165 min_mp = mp;
6167 offset = 0;
6168 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6170 mp->offset = offset;
6171 if (mp->refcount > 0)
6172 offset += mp->fix_size;
6174 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6175 mp->next->min_address = mp->min_address + mp->fix_size;
6178 return min_mp;
6181 /* Add a constant to the minipool for a backward reference. Returns the
6182 node added or NULL if the constant will not fit in this pool.
6184 Note that the code for insertion for a backwards reference can be
6185 somewhat confusing because the calculated offsets for each fix do
6186 not take into account the size of the pool (which is still under
6187 construction. */
6189 static Mnode *
6190 add_minipool_backward_ref (fix)
6191 Mfix * fix;
6193 /* If set, min_mp is the last pool_entry that has a lower constraint
6194 than the one we are trying to add. */
6195 Mnode * min_mp = NULL;
6196 /* This can be negative, since it is only a constraint. */
6197 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6198 Mnode * mp;
6200 /* If we can't reach the current pool from this insn, or if we can't
6201 insert this entry at the end of the pool without pushing other
6202 fixes out of range, then we don't try. This ensures that we
6203 can't fail later on. */
6204 if (min_address >= minipool_barrier->address
6205 || (minipool_vector_tail->min_address + fix->fix_size
6206 >= minipool_barrier->address))
6207 return NULL;
6209 /* Scan the pool to see if a constant with the same value has
6210 already been added. While we are doing this, also note the
6211 location where we must insert the constant if it doesn't already
6212 exist. */
6213 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6215 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6216 && fix->mode == mp->mode
6217 && (GET_CODE (fix->value) != CODE_LABEL
6218 || (CODE_LABEL_NUMBER (fix->value)
6219 == CODE_LABEL_NUMBER (mp->value)))
6220 && rtx_equal_p (fix->value, mp->value)
6221 /* Check that there is enough slack to move this entry to the
6222 end of the table (this is conservative). */
6223 && (mp->max_address
6224 > (minipool_barrier->address
6225 + minipool_vector_tail->offset
6226 + minipool_vector_tail->fix_size)))
6228 mp->refcount++;
6229 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6232 if (min_mp != NULL)
6233 mp->min_address += fix->fix_size;
6234 else
6236 /* Note the insertion point if necessary. */
6237 if (mp->min_address < min_address)
6238 min_mp = mp;
6239 else if (mp->max_address
6240 < minipool_barrier->address + mp->offset + fix->fix_size)
6242 /* Inserting before this entry would push the fix beyond
6243 its maximum address (which can happen if we have
6244 re-located a forwards fix); force the new fix to come
6245 after it. */
6246 min_mp = mp;
6247 min_address = mp->min_address + fix->fix_size;
6252 /* We need to create a new entry. */
6253 mp = xmalloc (sizeof (* mp));
6254 mp->fix_size = fix->fix_size;
6255 mp->mode = fix->mode;
6256 mp->value = fix->value;
6257 mp->refcount = 1;
6258 mp->max_address = minipool_barrier->address + 65536;
6260 mp->min_address = min_address;
6262 if (min_mp == NULL)
6264 mp->prev = NULL;
6265 mp->next = minipool_vector_head;
6267 if (mp->next == NULL)
6269 minipool_vector_tail = mp;
6270 minipool_vector_label = gen_label_rtx ();
6272 else
6273 mp->next->prev = mp;
6275 minipool_vector_head = mp;
6277 else
6279 mp->next = min_mp->next;
6280 mp->prev = min_mp;
6281 min_mp->next = mp;
6283 if (mp->next != NULL)
6284 mp->next->prev = mp;
6285 else
6286 minipool_vector_tail = mp;
6289 /* Save the new entry. */
6290 min_mp = mp;
6292 if (mp->prev)
6293 mp = mp->prev;
6294 else
6295 mp->offset = 0;
6297 /* Scan over the following entries and adjust their offsets. */
6298 while (mp->next != NULL)
6300 if (mp->next->min_address < mp->min_address + mp->fix_size)
6301 mp->next->min_address = mp->min_address + mp->fix_size;
6303 if (mp->refcount)
6304 mp->next->offset = mp->offset + mp->fix_size;
6305 else
6306 mp->next->offset = mp->offset;
6308 mp = mp->next;
6311 return min_mp;
6314 static void
6315 assign_minipool_offsets (barrier)
6316 Mfix * barrier;
6318 HOST_WIDE_INT offset = 0;
6319 Mnode * mp;
6321 minipool_barrier = barrier;
6323 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6325 mp->offset = offset;
6327 if (mp->refcount > 0)
6328 offset += mp->fix_size;
6332 /* Output the literal table */
6333 static void
6334 dump_minipool (scan)
6335 rtx scan;
6337 Mnode * mp;
6338 Mnode * nmp;
6340 if (rtl_dump_file)
6341 fprintf (rtl_dump_file,
6342 ";; Emitting minipool after insn %u; address %ld\n",
6343 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6345 scan = emit_label_after (gen_label_rtx (), scan);
6346 scan = emit_insn_after (gen_align_4 (), scan);
6347 scan = emit_label_after (minipool_vector_label, scan);
6349 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6351 if (mp->refcount > 0)
6353 if (rtl_dump_file)
6355 fprintf (rtl_dump_file,
6356 ";; Offset %u, min %ld, max %ld ",
6357 (unsigned) mp->offset, (unsigned long) mp->min_address,
6358 (unsigned long) mp->max_address);
6359 arm_print_value (rtl_dump_file, mp->value);
6360 fputc ('\n', rtl_dump_file);
6363 switch (mp->fix_size)
6365 #ifdef HAVE_consttable_1
6366 case 1:
6367 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6368 break;
6370 #endif
6371 #ifdef HAVE_consttable_2
6372 case 2:
6373 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6374 break;
6376 #endif
6377 #ifdef HAVE_consttable_4
6378 case 4:
6379 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6380 break;
6382 #endif
6383 #ifdef HAVE_consttable_8
6384 case 8:
6385 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6386 break;
6388 #endif
6389 default:
6390 abort ();
6391 break;
6395 nmp = mp->next;
6396 free (mp);
6399 minipool_vector_head = minipool_vector_tail = NULL;
6400 scan = emit_insn_after (gen_consttable_end (), scan);
6401 scan = emit_barrier_after (scan);
6404 /* Return the cost of forcibly inserting a barrier after INSN. */
6406 static int
6407 arm_barrier_cost (insn)
6408 rtx insn;
6410 /* Basing the location of the pool on the loop depth is preferable,
6411 but at the moment, the basic block information seems to be
6412 corrupt by this stage of the compilation. */
6413 int base_cost = 50;
6414 rtx next = next_nonnote_insn (insn);
6416 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6417 base_cost -= 20;
6419 switch (GET_CODE (insn))
6421 case CODE_LABEL:
6422 /* It will always be better to place the table before the label, rather
6423 than after it. */
6424 return 50;
6426 case INSN:
6427 case CALL_INSN:
6428 return base_cost;
6430 case JUMP_INSN:
6431 return base_cost - 10;
6433 default:
6434 return base_cost + 10;
6438 /* Find the best place in the insn stream in the range
6439 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6440 Create the barrier by inserting a jump and add a new fix entry for
6441 it. */
6443 static Mfix *
6444 create_fix_barrier (fix, max_address)
6445 Mfix * fix;
6446 HOST_WIDE_INT max_address;
6448 HOST_WIDE_INT count = 0;
6449 rtx barrier;
6450 rtx from = fix->insn;
6451 rtx selected = from;
6452 int selected_cost;
6453 HOST_WIDE_INT selected_address;
6454 Mfix * new_fix;
6455 HOST_WIDE_INT max_count = max_address - fix->address;
6456 rtx label = gen_label_rtx ();
6458 selected_cost = arm_barrier_cost (from);
6459 selected_address = fix->address;
6461 while (from && count < max_count)
6463 rtx tmp;
6464 int new_cost;
6466 /* This code shouldn't have been called if there was a natural barrier
6467 within range. */
6468 if (GET_CODE (from) == BARRIER)
6469 abort ();
6471 /* Count the length of this insn. */
6472 count += get_attr_length (from);
6474 /* If there is a jump table, add its length. */
6475 tmp = is_jump_table (from);
6476 if (tmp != NULL)
6478 count += get_jump_table_size (tmp);
6480 /* Jump tables aren't in a basic block, so base the cost on
6481 the dispatch insn. If we select this location, we will
6482 still put the pool after the table. */
6483 new_cost = arm_barrier_cost (from);
6485 if (count < max_count && new_cost <= selected_cost)
6487 selected = tmp;
6488 selected_cost = new_cost;
6489 selected_address = fix->address + count;
6492 /* Continue after the dispatch table. */
6493 from = NEXT_INSN (tmp);
6494 continue;
6497 new_cost = arm_barrier_cost (from);
6499 if (count < max_count && new_cost <= selected_cost)
6501 selected = from;
6502 selected_cost = new_cost;
6503 selected_address = fix->address + count;
6506 from = NEXT_INSN (from);
6509 /* Create a new JUMP_INSN that branches around a barrier. */
6510 from = emit_jump_insn_after (gen_jump (label), selected);
6511 JUMP_LABEL (from) = label;
6512 barrier = emit_barrier_after (from);
6513 emit_label_after (label, barrier);
6515 /* Create a minipool barrier entry for the new barrier. */
6516 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6517 new_fix->insn = barrier;
6518 new_fix->address = selected_address;
6519 new_fix->next = fix->next;
6520 fix->next = new_fix;
6522 return new_fix;
6525 /* Record that there is a natural barrier in the insn stream at
6526 ADDRESS. */
6527 static void
6528 push_minipool_barrier (insn, address)
6529 rtx insn;
6530 HOST_WIDE_INT address;
6532 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6534 fix->insn = insn;
6535 fix->address = address;
6537 fix->next = NULL;
6538 if (minipool_fix_head != NULL)
6539 minipool_fix_tail->next = fix;
6540 else
6541 minipool_fix_head = fix;
6543 minipool_fix_tail = fix;
6546 /* Record INSN, which will need fixing up to load a value from the
6547 minipool. ADDRESS is the offset of the insn since the start of the
6548 function; LOC is a pointer to the part of the insn which requires
6549 fixing; VALUE is the constant that must be loaded, which is of type
6550 MODE. */
6551 static void
6552 push_minipool_fix (insn, address, loc, mode, value)
6553 rtx insn;
6554 HOST_WIDE_INT address;
6555 rtx * loc;
6556 enum machine_mode mode;
6557 rtx value;
6559 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6561 #ifdef AOF_ASSEMBLER
6562 /* PIC symbol refereneces need to be converted into offsets into the
6563 based area. */
6564 /* XXX This shouldn't be done here. */
6565 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6566 value = aof_pic_entry (value);
6567 #endif /* AOF_ASSEMBLER */
6569 fix->insn = insn;
6570 fix->address = address;
6571 fix->loc = loc;
6572 fix->mode = mode;
6573 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6574 fix->value = value;
6575 fix->forwards = get_attr_pool_range (insn);
6576 fix->backwards = get_attr_neg_pool_range (insn);
6577 fix->minipool = NULL;
6579 /* If an insn doesn't have a range defined for it, then it isn't
6580 expecting to be reworked by this code. Better to abort now than
6581 to generate duff assembly code. */
6582 if (fix->forwards == 0 && fix->backwards == 0)
6583 abort ();
6585 if (rtl_dump_file)
6587 fprintf (rtl_dump_file,
6588 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6589 GET_MODE_NAME (mode),
6590 INSN_UID (insn), (unsigned long) address,
6591 -1 * (long)fix->backwards, (long)fix->forwards);
6592 arm_print_value (rtl_dump_file, fix->value);
6593 fprintf (rtl_dump_file, "\n");
6596 /* Add it to the chain of fixes. */
6597 fix->next = NULL;
6599 if (minipool_fix_head != NULL)
6600 minipool_fix_tail->next = fix;
6601 else
6602 minipool_fix_head = fix;
6604 minipool_fix_tail = fix;
6607 /* Scan INSN and note any of its operands that need fixing. */
6609 static void
6610 note_invalid_constants (insn, address)
6611 rtx insn;
6612 HOST_WIDE_INT address;
6614 int opno;
6616 extract_insn (insn);
6618 if (!constrain_operands (1))
6619 fatal_insn_not_found (insn);
6621 /* Fill in recog_op_alt with information about the constraints of this
6622 insn. */
6623 preprocess_constraints ();
6625 for (opno = 0; opno < recog_data.n_operands; opno++)
6627 /* Things we need to fix can only occur in inputs. */
6628 if (recog_data.operand_type[opno] != OP_IN)
6629 continue;
6631 /* If this alternative is a memory reference, then any mention
6632 of constants in this alternative is really to fool reload
6633 into allowing us to accept one there. We need to fix them up
6634 now so that we output the right code. */
6635 if (recog_op_alt[opno][which_alternative].memory_ok)
6637 rtx op = recog_data.operand[opno];
6639 if (CONSTANT_P (op))
6640 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6641 recog_data.operand_mode[opno], op);
6642 #if 0
6643 /* RWE: Now we look correctly at the operands for the insn,
6644 this shouldn't be needed any more. */
6645 #ifndef AOF_ASSEMBLER
6646 /* XXX Is this still needed? */
6647 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6648 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6649 recog_data.operand_mode[opno],
6650 XVECEXP (op, 0, 0));
6651 #endif
6652 #endif
6653 else if (GET_CODE (op) == MEM
6654 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6655 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6656 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6657 recog_data.operand_mode[opno],
6658 get_pool_constant (XEXP (op, 0)));
6663 void
6664 arm_reorg (first)
6665 rtx first;
6667 rtx insn;
6668 HOST_WIDE_INT address = 0;
6669 Mfix * fix;
6671 minipool_fix_head = minipool_fix_tail = NULL;
6673 /* The first insn must always be a note, or the code below won't
6674 scan it properly. */
6675 if (GET_CODE (first) != NOTE)
6676 abort ();
6678 /* Scan all the insns and record the operands that will need fixing. */
6679 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6681 if (GET_CODE (insn) == BARRIER)
6682 push_minipool_barrier (insn, address);
6683 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6684 || GET_CODE (insn) == JUMP_INSN)
6686 rtx table;
6688 note_invalid_constants (insn, address);
6689 address += get_attr_length (insn);
6691 /* If the insn is a vector jump, add the size of the table
6692 and skip the table. */
6693 if ((table = is_jump_table (insn)) != NULL)
6695 address += get_jump_table_size (table);
6696 insn = table;
6701 fix = minipool_fix_head;
6703 /* Now scan the fixups and perform the required changes. */
6704 while (fix)
6706 Mfix * ftmp;
6707 Mfix * fdel;
6708 Mfix * last_added_fix;
6709 Mfix * last_barrier = NULL;
6710 Mfix * this_fix;
6712 /* Skip any further barriers before the next fix. */
6713 while (fix && GET_CODE (fix->insn) == BARRIER)
6714 fix = fix->next;
6716 /* No more fixes. */
6717 if (fix == NULL)
6718 break;
6720 last_added_fix = NULL;
6722 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6724 if (GET_CODE (ftmp->insn) == BARRIER)
6726 if (ftmp->address >= minipool_vector_head->max_address)
6727 break;
6729 last_barrier = ftmp;
6731 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6732 break;
6734 last_added_fix = ftmp; /* Keep track of the last fix added. */
6737 /* If we found a barrier, drop back to that; any fixes that we
6738 could have reached but come after the barrier will now go in
6739 the next mini-pool. */
6740 if (last_barrier != NULL)
6742 /* Reduce the refcount for those fixes that won't go into this
6743 pool after all. */
6744 for (fdel = last_barrier->next;
6745 fdel && fdel != ftmp;
6746 fdel = fdel->next)
6748 fdel->minipool->refcount--;
6749 fdel->minipool = NULL;
6752 ftmp = last_barrier;
6754 else
6756 /* ftmp is first fix that we can't fit into this pool and
6757 there no natural barriers that we could use. Insert a
6758 new barrier in the code somewhere between the previous
6759 fix and this one, and arrange to jump around it. */
6760 HOST_WIDE_INT max_address;
6762 /* The last item on the list of fixes must be a barrier, so
6763 we can never run off the end of the list of fixes without
6764 last_barrier being set. */
6765 if (ftmp == NULL)
6766 abort ();
6768 max_address = minipool_vector_head->max_address;
6769 /* Check that there isn't another fix that is in range that
6770 we couldn't fit into this pool because the pool was
6771 already too large: we need to put the pool before such an
6772 instruction. */
6773 if (ftmp->address < max_address)
6774 max_address = ftmp->address;
6776 last_barrier = create_fix_barrier (last_added_fix, max_address);
6779 assign_minipool_offsets (last_barrier);
6781 while (ftmp)
6783 if (GET_CODE (ftmp->insn) != BARRIER
6784 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6785 == NULL))
6786 break;
6788 ftmp = ftmp->next;
6791 /* Scan over the fixes we have identified for this pool, fixing them
6792 up and adding the constants to the pool itself. */
6793 for (this_fix = fix; this_fix && ftmp != this_fix;
6794 this_fix = this_fix->next)
6795 if (GET_CODE (this_fix->insn) != BARRIER)
6797 rtx addr
6798 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6799 minipool_vector_label),
6800 this_fix->minipool->offset);
6801 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6804 dump_minipool (last_barrier->insn);
6805 fix = ftmp;
6808 /* From now on we must synthesize any constants that we can't handle
6809 directly. This can happen if the RTL gets split during final
6810 instruction generation. */
6811 after_arm_reorg = 1;
6813 /* Free the minipool memory. */
6814 obstack_free (&minipool_obstack, minipool_startobj);
6817 /* Routines to output assembly language. */
6819 /* If the rtx is the correct value then return the string of the number.
6820 In this way we can ensure that valid double constants are generated even
6821 when cross compiling. */
6823 const char *
6824 fp_immediate_constant (x)
6825 rtx x;
6827 REAL_VALUE_TYPE r;
6828 int i;
6830 if (!fpa_consts_inited)
6831 init_fpa_table ();
6833 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6834 for (i = 0; i < 8; i++)
6835 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6836 return strings_fpa[i];
6838 abort ();
6841 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6843 static const char *
6844 fp_const_from_val (r)
6845 REAL_VALUE_TYPE * r;
6847 int i;
6849 if (!fpa_consts_inited)
6850 init_fpa_table ();
6852 for (i = 0; i < 8; i++)
6853 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6854 return strings_fpa[i];
6856 abort ();
6859 /* Output the operands of a LDM/STM instruction to STREAM.
6860 MASK is the ARM register set mask of which only bits 0-15 are important.
6861 REG is the base register, either the frame pointer or the stack pointer,
6862 INSTR is the possibly suffixed load or store instruction. */
6864 static void
6865 print_multi_reg (stream, instr, reg, mask)
6866 FILE * stream;
6867 const char * instr;
6868 int reg;
6869 int mask;
6871 int i;
6872 int not_first = FALSE;
6874 fputc ('\t', stream);
6875 asm_fprintf (stream, instr, reg);
6876 fputs (", {", stream);
6878 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6879 if (mask & (1 << i))
6881 if (not_first)
6882 fprintf (stream, ", ");
6884 asm_fprintf (stream, "%r", i);
6885 not_first = TRUE;
6888 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6891 /* Output a 'call' insn. */
6893 const char *
6894 output_call (operands)
6895 rtx * operands;
6897 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6899 if (REGNO (operands[0]) == LR_REGNUM)
6901 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6902 output_asm_insn ("mov%?\t%0, %|lr", operands);
6905 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6907 if (TARGET_INTERWORK)
6908 output_asm_insn ("bx%?\t%0", operands);
6909 else
6910 output_asm_insn ("mov%?\t%|pc, %0", operands);
6912 return "";
6915 static int
6916 eliminate_lr2ip (x)
6917 rtx * x;
6919 int something_changed = 0;
6920 rtx x0 = * x;
6921 int code = GET_CODE (x0);
6922 int i, j;
6923 const char * fmt;
6925 switch (code)
6927 case REG:
6928 if (REGNO (x0) == LR_REGNUM)
6930 *x = gen_rtx_REG (SImode, IP_REGNUM);
6931 return 1;
6933 return 0;
6934 default:
6935 /* Scan through the sub-elements and change any references there. */
6936 fmt = GET_RTX_FORMAT (code);
6938 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6939 if (fmt[i] == 'e')
6940 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6941 else if (fmt[i] == 'E')
6942 for (j = 0; j < XVECLEN (x0, i); j++)
6943 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6945 return something_changed;
6949 /* Output a 'call' insn that is a reference in memory. */
6951 const char *
6952 output_call_mem (operands)
6953 rtx * operands;
6955 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6956 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6957 if (eliminate_lr2ip (&operands[0]))
6958 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6960 if (TARGET_INTERWORK)
6962 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6963 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6964 output_asm_insn ("bx%?\t%|ip", operands);
6966 else
6968 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6969 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6972 return "";
6976 /* Output a move from arm registers to an fpu registers.
6977 OPERANDS[0] is an fpu register.
6978 OPERANDS[1] is the first registers of an arm register pair. */
6980 const char *
6981 output_mov_long_double_fpu_from_arm (operands)
6982 rtx * operands;
6984 int arm_reg0 = REGNO (operands[1]);
6985 rtx ops[3];
6987 if (arm_reg0 == IP_REGNUM)
6988 abort ();
6990 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6991 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6992 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6994 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6995 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6997 return "";
7000 /* Output a move from an fpu register to arm registers.
7001 OPERANDS[0] is the first registers of an arm register pair.
7002 OPERANDS[1] is an fpu register. */
7004 const char *
7005 output_mov_long_double_arm_from_fpu (operands)
7006 rtx * operands;
7008 int arm_reg0 = REGNO (operands[0]);
7009 rtx ops[3];
7011 if (arm_reg0 == IP_REGNUM)
7012 abort ();
7014 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7015 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7016 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7018 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7019 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7020 return "";
7023 /* Output a move from arm registers to arm registers of a long double
7024 OPERANDS[0] is the destination.
7025 OPERANDS[1] is the source. */
7027 const char *
7028 output_mov_long_double_arm_from_arm (operands)
7029 rtx * operands;
7031 /* We have to be careful here because the two might overlap. */
7032 int dest_start = REGNO (operands[0]);
7033 int src_start = REGNO (operands[1]);
7034 rtx ops[2];
7035 int i;
7037 if (dest_start < src_start)
7039 for (i = 0; i < 3; i++)
7041 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7042 ops[1] = gen_rtx_REG (SImode, src_start + i);
7043 output_asm_insn ("mov%?\t%0, %1", ops);
7046 else
7048 for (i = 2; i >= 0; i--)
7050 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7051 ops[1] = gen_rtx_REG (SImode, src_start + i);
7052 output_asm_insn ("mov%?\t%0, %1", ops);
7056 return "";
7060 /* Output a move from arm registers to an fpu registers.
7061 OPERANDS[0] is an fpu register.
7062 OPERANDS[1] is the first registers of an arm register pair. */
7064 const char *
7065 output_mov_double_fpu_from_arm (operands)
7066 rtx * operands;
7068 int arm_reg0 = REGNO (operands[1]);
7069 rtx ops[2];
7071 if (arm_reg0 == IP_REGNUM)
7072 abort ();
7074 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7075 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7076 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7077 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7078 return "";
7081 /* Output a move from an fpu register to arm registers.
7082 OPERANDS[0] is the first registers of an arm register pair.
7083 OPERANDS[1] is an fpu register. */
7085 const char *
7086 output_mov_double_arm_from_fpu (operands)
7087 rtx * operands;
7089 int arm_reg0 = REGNO (operands[0]);
7090 rtx ops[2];
7092 if (arm_reg0 == IP_REGNUM)
7093 abort ();
7095 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7096 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7097 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7098 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7099 return "";
7102 /* Output a move between double words.
7103 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7104 or MEM<-REG and all MEMs must be offsettable addresses. */
7106 const char *
7107 output_move_double (operands)
7108 rtx * operands;
7110 enum rtx_code code0 = GET_CODE (operands[0]);
7111 enum rtx_code code1 = GET_CODE (operands[1]);
7112 rtx otherops[3];
7114 if (code0 == REG)
7116 int reg0 = REGNO (operands[0]);
7118 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7120 if (code1 == REG)
7122 int reg1 = REGNO (operands[1]);
7123 if (reg1 == IP_REGNUM)
7124 abort ();
7126 /* Ensure the second source is not overwritten. */
7127 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7128 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7129 else
7130 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7132 else if (code1 == CONST_DOUBLE)
7134 if (GET_MODE (operands[1]) == DFmode)
7136 REAL_VALUE_TYPE r;
7137 long l[2];
7139 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7140 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7141 otherops[1] = GEN_INT (l[1]);
7142 operands[1] = GEN_INT (l[0]);
7144 else if (GET_MODE (operands[1]) != VOIDmode)
7145 abort ();
7146 else if (WORDS_BIG_ENDIAN)
7148 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7149 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7151 else
7153 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7154 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7157 output_mov_immediate (operands);
7158 output_mov_immediate (otherops);
7160 else if (code1 == CONST_INT)
7162 #if HOST_BITS_PER_WIDE_INT > 32
7163 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7164 what the upper word is. */
7165 if (WORDS_BIG_ENDIAN)
7167 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7168 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7170 else
7172 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7173 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7175 #else
7176 /* Sign extend the intval into the high-order word. */
7177 if (WORDS_BIG_ENDIAN)
7179 otherops[1] = operands[1];
7180 operands[1] = (INTVAL (operands[1]) < 0
7181 ? constm1_rtx : const0_rtx);
7183 else
7184 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7185 #endif
7186 output_mov_immediate (otherops);
7187 output_mov_immediate (operands);
7189 else if (code1 == MEM)
7191 switch (GET_CODE (XEXP (operands[1], 0)))
7193 case REG:
7194 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7195 break;
7197 case PRE_INC:
7198 abort (); /* Should never happen now. */
7199 break;
7201 case PRE_DEC:
7202 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7203 break;
7205 case POST_INC:
7206 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7207 break;
7209 case POST_DEC:
7210 abort (); /* Should never happen now. */
7211 break;
7213 case LABEL_REF:
7214 case CONST:
7215 output_asm_insn ("adr%?\t%0, %1", operands);
7216 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7217 break;
7219 default:
7220 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7221 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7223 otherops[0] = operands[0];
7224 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7225 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7227 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7229 if (GET_CODE (otherops[2]) == CONST_INT)
7231 switch ((int) INTVAL (otherops[2]))
7233 case -8:
7234 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7235 return "";
7236 case -4:
7237 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7238 return "";
7239 case 4:
7240 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7241 return "";
7244 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7245 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7246 else
7247 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7249 else
7250 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7252 else
7253 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7255 return "ldm%?ia\t%0, %M0";
7257 else
7259 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7260 /* Take care of overlapping base/data reg. */
7261 if (reg_mentioned_p (operands[0], operands[1]))
7263 output_asm_insn ("ldr%?\t%0, %1", otherops);
7264 output_asm_insn ("ldr%?\t%0, %1", operands);
7266 else
7268 output_asm_insn ("ldr%?\t%0, %1", operands);
7269 output_asm_insn ("ldr%?\t%0, %1", otherops);
7274 else
7275 abort (); /* Constraints should prevent this. */
7277 else if (code0 == MEM && code1 == REG)
7279 if (REGNO (operands[1]) == IP_REGNUM)
7280 abort ();
7282 switch (GET_CODE (XEXP (operands[0], 0)))
7284 case REG:
7285 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7286 break;
7288 case PRE_INC:
7289 abort (); /* Should never happen now. */
7290 break;
7292 case PRE_DEC:
7293 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7294 break;
7296 case POST_INC:
7297 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7298 break;
7300 case POST_DEC:
7301 abort (); /* Should never happen now. */
7302 break;
7304 case PLUS:
7305 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7307 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7309 case -8:
7310 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7311 return "";
7313 case -4:
7314 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7315 return "";
7317 case 4:
7318 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7319 return "";
7322 /* Fall through */
7324 default:
7325 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7326 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7327 output_asm_insn ("str%?\t%1, %0", operands);
7328 output_asm_insn ("str%?\t%1, %0", otherops);
7331 else
7332 /* Constraints should prevent this. */
7333 abort ();
7335 return "";
7339 /* Output an arbitrary MOV reg, #n.
7340 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7342 const char *
7343 output_mov_immediate (operands)
7344 rtx * operands;
7346 HOST_WIDE_INT n = INTVAL (operands[1]);
7348 /* Try to use one MOV. */
7349 if (const_ok_for_arm (n))
7350 output_asm_insn ("mov%?\t%0, %1", operands);
7352 /* Try to use one MVN. */
7353 else if (const_ok_for_arm (~n))
7355 operands[1] = GEN_INT (~n);
7356 output_asm_insn ("mvn%?\t%0, %1", operands);
7358 else
7360 int n_ones = 0;
7361 int i;
7363 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7364 for (i = 0; i < 32; i ++)
7365 if (n & 1 << i)
7366 n_ones ++;
7368 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7369 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7370 else
7371 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7374 return "";
7377 /* Output an ADD r, s, #n where n may be too big for one instruction.
7378 If adding zero to one register, output nothing. */
7380 const char *
7381 output_add_immediate (operands)
7382 rtx * operands;
7384 HOST_WIDE_INT n = INTVAL (operands[2]);
7386 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7388 if (n < 0)
7389 output_multi_immediate (operands,
7390 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7391 -n);
7392 else
7393 output_multi_immediate (operands,
7394 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7398 return "";
7401 /* Output a multiple immediate operation.
7402 OPERANDS is the vector of operands referred to in the output patterns.
7403 INSTR1 is the output pattern to use for the first constant.
7404 INSTR2 is the output pattern to use for subsequent constants.
7405 IMMED_OP is the index of the constant slot in OPERANDS.
7406 N is the constant value. */
7408 static const char *
7409 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7410 rtx * operands;
7411 const char * instr1;
7412 const char * instr2;
7413 int immed_op;
7414 HOST_WIDE_INT n;
7416 #if HOST_BITS_PER_WIDE_INT > 32
7417 n &= 0xffffffff;
7418 #endif
7420 if (n == 0)
7422 /* Quick and easy output. */
7423 operands[immed_op] = const0_rtx;
7424 output_asm_insn (instr1, operands);
7426 else
7428 int i;
7429 const char * instr = instr1;
7431 /* Note that n is never zero here (which would give no output). */
7432 for (i = 0; i < 32; i += 2)
7434 if (n & (3 << i))
7436 operands[immed_op] = GEN_INT (n & (255 << i));
7437 output_asm_insn (instr, operands);
7438 instr = instr2;
7439 i += 6;
7444 return "";
7447 /* Return the appropriate ARM instruction for the operation code.
7448 The returned result should not be overwritten. OP is the rtx of the
7449 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7450 was shifted. */
7452 const char *
7453 arithmetic_instr (op, shift_first_arg)
7454 rtx op;
7455 int shift_first_arg;
7457 switch (GET_CODE (op))
7459 case PLUS:
7460 return "add";
7462 case MINUS:
7463 return shift_first_arg ? "rsb" : "sub";
7465 case IOR:
7466 return "orr";
7468 case XOR:
7469 return "eor";
7471 case AND:
7472 return "and";
7474 default:
7475 abort ();
7479 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7480 for the operation code. The returned result should not be overwritten.
7481 OP is the rtx code of the shift.
7482 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7483 shift. */
7485 static const char *
7486 shift_op (op, amountp)
7487 rtx op;
7488 HOST_WIDE_INT *amountp;
7490 const char * mnem;
7491 enum rtx_code code = GET_CODE (op);
7493 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7494 *amountp = -1;
7495 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7496 *amountp = INTVAL (XEXP (op, 1));
7497 else
7498 abort ();
7500 switch (code)
7502 case ASHIFT:
7503 mnem = "asl";
7504 break;
7506 case ASHIFTRT:
7507 mnem = "asr";
7508 break;
7510 case LSHIFTRT:
7511 mnem = "lsr";
7512 break;
7514 case ROTATERT:
7515 mnem = "ror";
7516 break;
7518 case MULT:
7519 /* We never have to worry about the amount being other than a
7520 power of 2, since this case can never be reloaded from a reg. */
7521 if (*amountp != -1)
7522 *amountp = int_log2 (*amountp);
7523 else
7524 abort ();
7525 return "asl";
7527 default:
7528 abort ();
7531 if (*amountp != -1)
7533 /* This is not 100% correct, but follows from the desire to merge
7534 multiplication by a power of 2 with the recognizer for a
7535 shift. >=32 is not a valid shift for "asl", so we must try and
7536 output a shift that produces the correct arithmetical result.
7537 Using lsr #32 is identical except for the fact that the carry bit
7538 is not set correctly if we set the flags; but we never use the
7539 carry bit from such an operation, so we can ignore that. */
7540 if (code == ROTATERT)
7541 /* Rotate is just modulo 32. */
7542 *amountp &= 31;
7543 else if (*amountp != (*amountp & 31))
7545 if (code == ASHIFT)
7546 mnem = "lsr";
7547 *amountp = 32;
7550 /* Shifts of 0 are no-ops. */
7551 if (*amountp == 0)
7552 return NULL;
7555 return mnem;
7558 /* Obtain the shift from the POWER of two. */
7560 static HOST_WIDE_INT
7561 int_log2 (power)
7562 HOST_WIDE_INT power;
7564 HOST_WIDE_INT shift = 0;
7566 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7568 if (shift > 31)
7569 abort ();
7570 shift ++;
7573 return shift;
7576 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7577 /bin/as is horribly restrictive. */
7578 #define MAX_ASCII_LEN 51
7580 void
7581 output_ascii_pseudo_op (stream, p, len)
7582 FILE * stream;
7583 const unsigned char * p;
7584 int len;
7586 int i;
7587 int len_so_far = 0;
7589 fputs ("\t.ascii\t\"", stream);
7591 for (i = 0; i < len; i++)
7593 int c = p[i];
7595 if (len_so_far >= MAX_ASCII_LEN)
7597 fputs ("\"\n\t.ascii\t\"", stream);
7598 len_so_far = 0;
7601 switch (c)
7603 case TARGET_TAB:
7604 fputs ("\\t", stream);
7605 len_so_far += 2;
7606 break;
7608 case TARGET_FF:
7609 fputs ("\\f", stream);
7610 len_so_far += 2;
7611 break;
7613 case TARGET_BS:
7614 fputs ("\\b", stream);
7615 len_so_far += 2;
7616 break;
7618 case TARGET_CR:
7619 fputs ("\\r", stream);
7620 len_so_far += 2;
7621 break;
7623 case TARGET_NEWLINE:
7624 fputs ("\\n", stream);
7625 c = p [i + 1];
7626 if ((c >= ' ' && c <= '~')
7627 || c == TARGET_TAB)
7628 /* This is a good place for a line break. */
7629 len_so_far = MAX_ASCII_LEN;
7630 else
7631 len_so_far += 2;
7632 break;
7634 case '\"':
7635 case '\\':
7636 putc ('\\', stream);
7637 len_so_far++;
7638 /* drop through. */
7640 default:
7641 if (c >= ' ' && c <= '~')
7643 putc (c, stream);
7644 len_so_far++;
7646 else
7648 fprintf (stream, "\\%03o", c);
7649 len_so_far += 4;
7651 break;
7655 fputs ("\"\n", stream);
7658 /* Compute the register sabe mask for registers 0 through 12
7659 inclusive. This code is used by both arm_compute_save_reg_mask
7660 and arm_compute_initial_elimination_offset. */
7662 static unsigned long
7663 arm_compute_save_reg0_reg12_mask ()
7665 unsigned long func_type = arm_current_func_type ();
7666 unsigned int save_reg_mask = 0;
7667 unsigned int reg;
7669 if (IS_INTERRUPT (func_type))
7671 unsigned int max_reg;
7672 /* Interrupt functions must not corrupt any registers,
7673 even call clobbered ones. If this is a leaf function
7674 we can just examine the registers used by the RTL, but
7675 otherwise we have to assume that whatever function is
7676 called might clobber anything, and so we have to save
7677 all the call-clobbered registers as well. */
7678 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7679 /* FIQ handlers have registers r8 - r12 banked, so
7680 we only need to check r0 - r7, Normal ISRs only
7681 bank r14 and r15, so we must check up to r12.
7682 r13 is the stack pointer which is always preserved,
7683 so we do not need to consider it here. */
7684 max_reg = 7;
7685 else
7686 max_reg = 12;
7688 for (reg = 0; reg <= max_reg; reg++)
7689 if (regs_ever_live[reg]
7690 || (! current_function_is_leaf && call_used_regs [reg]))
7691 save_reg_mask |= (1 << reg);
7693 else
7695 /* In the normal case we only need to save those registers
7696 which are call saved and which are used by this function. */
7697 for (reg = 0; reg <= 10; reg++)
7698 if (regs_ever_live[reg] && ! call_used_regs [reg])
7699 save_reg_mask |= (1 << reg);
7701 /* Handle the frame pointer as a special case. */
7702 if (! TARGET_APCS_FRAME
7703 && ! frame_pointer_needed
7704 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7705 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7706 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7708 /* If we aren't loading the PIC register,
7709 don't stack it even though it may be live. */
7710 if (flag_pic
7711 && ! TARGET_SINGLE_PIC_BASE
7712 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7713 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7716 return save_reg_mask;
7719 /* Compute a bit mask of which registers need to be
7720 saved on the stack for the current function. */
7722 static unsigned long
7723 arm_compute_save_reg_mask ()
7725 unsigned int save_reg_mask = 0;
7726 unsigned long func_type = arm_current_func_type ();
7728 if (IS_NAKED (func_type))
7729 /* This should never really happen. */
7730 return 0;
7732 /* If we are creating a stack frame, then we must save the frame pointer,
7733 IP (which will hold the old stack pointer), LR and the PC. */
7734 if (frame_pointer_needed)
7735 save_reg_mask |=
7736 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7737 | (1 << IP_REGNUM)
7738 | (1 << LR_REGNUM)
7739 | (1 << PC_REGNUM);
7741 /* Volatile functions do not return, so there
7742 is no need to save any other registers. */
7743 if (IS_VOLATILE (func_type))
7744 return save_reg_mask;
7746 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7748 /* Decide if we need to save the link register.
7749 Interrupt routines have their own banked link register,
7750 so they never need to save it.
7751 Otherwise if we do not use the link register we do not need to save
7752 it. If we are pushing other registers onto the stack however, we
7753 can save an instruction in the epilogue by pushing the link register
7754 now and then popping it back into the PC. This incurs extra memory
7755 accesses though, so we only do it when optimising for size, and only
7756 if we know that we will not need a fancy return sequence. */
7757 if (regs_ever_live [LR_REGNUM]
7758 || (save_reg_mask
7759 && optimize_size
7760 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7761 save_reg_mask |= 1 << LR_REGNUM;
7763 if (cfun->machine->lr_save_eliminated)
7764 save_reg_mask &= ~ (1 << LR_REGNUM);
7766 return save_reg_mask;
7769 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7770 everything bar the final return instruction. */
7772 const char *
7773 output_return_instruction (operand, really_return, reverse)
7774 rtx operand;
7775 int really_return;
7776 int reverse;
7778 char conditional[10];
7779 char instr[100];
7780 int reg;
7781 unsigned long live_regs_mask;
7782 unsigned long func_type;
7784 func_type = arm_current_func_type ();
7786 if (IS_NAKED (func_type))
7787 return "";
7789 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7791 /* If this function was declared non-returning, and we have found a tail
7792 call, then we have to trust that the called function won't return. */
7793 if (really_return)
7795 rtx ops[2];
7797 /* Otherwise, trap an attempted return by aborting. */
7798 ops[0] = operand;
7799 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7800 : "abort");
7801 assemble_external_libcall (ops[1]);
7802 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7805 return "";
7808 if (current_function_calls_alloca && !really_return)
7809 abort ();
7811 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7813 return_used_this_function = 1;
7815 live_regs_mask = arm_compute_save_reg_mask ();
7817 if (live_regs_mask)
7819 const char * return_reg;
7821 /* If we do not have any special requirements for function exit
7822 (eg interworking, or ISR) then we can load the return address
7823 directly into the PC. Otherwise we must load it into LR. */
7824 if (really_return
7825 && ! TARGET_INTERWORK)
7826 return_reg = reg_names[PC_REGNUM];
7827 else
7828 return_reg = reg_names[LR_REGNUM];
7830 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7831 /* There are two possible reasons for the IP register being saved.
7832 Either a stack frame was created, in which case IP contains the
7833 old stack pointer, or an ISR routine corrupted it. If this in an
7834 ISR routine then just restore IP, otherwise restore IP into SP. */
7835 if (! IS_INTERRUPT (func_type))
7837 live_regs_mask &= ~ (1 << IP_REGNUM);
7838 live_regs_mask |= (1 << SP_REGNUM);
7841 /* On some ARM architectures it is faster to use LDR rather than
7842 LDM to load a single register. On other architectures, the
7843 cost is the same. In 26 bit mode, or for exception handlers,
7844 we have to use LDM to load the PC so that the CPSR is also
7845 restored. */
7846 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7848 if (live_regs_mask == (unsigned int)(1 << reg))
7849 break;
7851 if (reg <= LAST_ARM_REGNUM
7852 && (reg != LR_REGNUM
7853 || ! really_return
7854 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7856 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7857 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7859 else
7861 char *p;
7862 int first = 1;
7864 /* Generate the load multiple instruction to restore the registers. */
7865 if (frame_pointer_needed)
7866 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7867 else if (live_regs_mask & (1 << SP_REGNUM))
7868 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7869 else
7870 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7872 p = instr + strlen (instr);
7874 for (reg = 0; reg <= SP_REGNUM; reg++)
7875 if (live_regs_mask & (1 << reg))
7877 int l = strlen (reg_names[reg]);
7879 if (first)
7880 first = 0;
7881 else
7883 memcpy (p, ", ", 2);
7884 p += 2;
7887 memcpy (p, "%|", 2);
7888 memcpy (p + 2, reg_names[reg], l);
7889 p += l + 2;
7892 if (live_regs_mask & (1 << LR_REGNUM))
7894 int l = strlen (return_reg);
7896 if (! first)
7898 memcpy (p, ", ", 2);
7899 p += 2;
7902 memcpy (p, "%|", 2);
7903 memcpy (p + 2, return_reg, l);
7904 strcpy (p + 2 + l, ((TARGET_APCS_32
7905 && !IS_INTERRUPT (func_type))
7906 || !really_return)
7907 ? "}" : "}^");
7909 else
7910 strcpy (p, "}");
7913 output_asm_insn (instr, & operand);
7915 /* See if we need to generate an extra instruction to
7916 perform the actual function return. */
7917 if (really_return
7918 && func_type != ARM_FT_INTERWORKED
7919 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7921 /* The return has already been handled
7922 by loading the LR into the PC. */
7923 really_return = 0;
7927 if (really_return)
7929 switch ((int) ARM_FUNC_TYPE (func_type))
7931 case ARM_FT_ISR:
7932 case ARM_FT_FIQ:
7933 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7934 break;
7936 case ARM_FT_INTERWORKED:
7937 sprintf (instr, "bx%s\t%%|lr", conditional);
7938 break;
7940 case ARM_FT_EXCEPTION:
7941 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7942 break;
7944 default:
7945 /* ARMv5 implementations always provide BX, so interworking
7946 is the default unless APCS-26 is in use. */
7947 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7948 sprintf (instr, "bx%s\t%%|lr", conditional);
7949 else
7950 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7951 conditional, TARGET_APCS_32 ? "" : "s");
7952 break;
7955 output_asm_insn (instr, & operand);
7958 return "";
7961 /* Write the function name into the code section, directly preceding
7962 the function prologue.
7964 Code will be output similar to this:
7966 .ascii "arm_poke_function_name", 0
7967 .align
7969 .word 0xff000000 + (t1 - t0)
7970 arm_poke_function_name
7971 mov ip, sp
7972 stmfd sp!, {fp, ip, lr, pc}
7973 sub fp, ip, #4
7975 When performing a stack backtrace, code can inspect the value
7976 of 'pc' stored at 'fp' + 0. If the trace function then looks
7977 at location pc - 12 and the top 8 bits are set, then we know
7978 that there is a function name embedded immediately preceding this
7979 location and has length ((pc[-3]) & 0xff000000).
7981 We assume that pc is declared as a pointer to an unsigned long.
7983 It is of no benefit to output the function name if we are assembling
7984 a leaf function. These function types will not contain a stack
7985 backtrace structure, therefore it is not possible to determine the
7986 function name. */
7988 void
7989 arm_poke_function_name (stream, name)
7990 FILE * stream;
7991 const char * name;
7993 unsigned long alignlength;
7994 unsigned long length;
7995 rtx x;
7997 length = strlen (name) + 1;
7998 alignlength = ROUND_UP_WORD (length);
8000 ASM_OUTPUT_ASCII (stream, name, length);
8001 ASM_OUTPUT_ALIGN (stream, 2);
8002 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8003 assemble_aligned_integer (UNITS_PER_WORD, x);
8006 /* Place some comments into the assembler stream
8007 describing the current function. */
8009 static void
8010 arm_output_function_prologue (f, frame_size)
8011 FILE * f;
8012 HOST_WIDE_INT frame_size;
8014 unsigned long func_type;
8016 if (!TARGET_ARM)
8018 thumb_output_function_prologue (f, frame_size);
8019 return;
8022 /* Sanity check. */
8023 if (arm_ccfsm_state || arm_target_insn)
8024 abort ();
8026 func_type = arm_current_func_type ();
8028 switch ((int) ARM_FUNC_TYPE (func_type))
8030 default:
8031 case ARM_FT_NORMAL:
8032 break;
8033 case ARM_FT_INTERWORKED:
8034 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8035 break;
8036 case ARM_FT_EXCEPTION_HANDLER:
8037 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8038 break;
8039 case ARM_FT_ISR:
8040 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8041 break;
8042 case ARM_FT_FIQ:
8043 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8044 break;
8045 case ARM_FT_EXCEPTION:
8046 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8047 break;
8050 if (IS_NAKED (func_type))
8051 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8053 if (IS_VOLATILE (func_type))
8054 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8056 if (IS_NESTED (func_type))
8057 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8059 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
8060 current_function_args_size,
8061 current_function_pretend_args_size, frame_size);
8063 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8064 frame_pointer_needed,
8065 cfun->machine->uses_anonymous_args);
8067 if (cfun->machine->lr_save_eliminated)
8068 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8070 #ifdef AOF_ASSEMBLER
8071 if (flag_pic)
8072 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8073 #endif
8075 return_used_this_function = 0;
8078 const char *
8079 arm_output_epilogue (really_return)
8080 int really_return;
8082 int reg;
8083 unsigned long saved_regs_mask;
8084 unsigned long func_type;
8085 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8086 frame that is $fp + 4 for a non-variadic function. */
8087 int floats_offset = 0;
8088 rtx operands[3];
8089 int frame_size = arm_get_frame_size ();
8090 FILE * f = asm_out_file;
8091 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8093 /* If we have already generated the return instruction
8094 then it is futile to generate anything else. */
8095 if (use_return_insn (FALSE) && return_used_this_function)
8096 return "";
8098 func_type = arm_current_func_type ();
8100 if (IS_NAKED (func_type))
8101 /* Naked functions don't have epilogues. */
8102 return "";
8104 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8106 rtx op;
8108 /* A volatile function should never return. Call abort. */
8109 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8110 assemble_external_libcall (op);
8111 output_asm_insn ("bl\t%a0", &op);
8113 return "";
8116 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8117 && ! really_return)
8118 /* If we are throwing an exception, then we really must
8119 be doing a return, so we can't tail-call. */
8120 abort ();
8122 saved_regs_mask = arm_compute_save_reg_mask ();
8124 /* XXX We should adjust floats_offset for any anonymous args, and then
8125 re-adjust vfp_offset below to compensate. */
8127 /* Compute how far away the floats will be. */
8128 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8129 if (saved_regs_mask & (1 << reg))
8130 floats_offset += 4;
8132 if (frame_pointer_needed)
8134 int vfp_offset = 4;
8136 if (arm_fpu_arch == FP_SOFT2)
8138 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8139 if (regs_ever_live[reg] && !call_used_regs[reg])
8141 floats_offset += 12;
8142 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8143 reg, FP_REGNUM, floats_offset - vfp_offset);
8146 else
8148 int start_reg = LAST_ARM_FP_REGNUM;
8150 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8152 if (regs_ever_live[reg] && !call_used_regs[reg])
8154 floats_offset += 12;
8156 /* We can't unstack more than four registers at once. */
8157 if (start_reg - reg == 3)
8159 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8160 reg, FP_REGNUM, floats_offset - vfp_offset);
8161 start_reg = reg - 1;
8164 else
8166 if (reg != start_reg)
8167 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8168 reg + 1, start_reg - reg,
8169 FP_REGNUM, floats_offset - vfp_offset);
8170 start_reg = reg - 1;
8174 /* Just in case the last register checked also needs unstacking. */
8175 if (reg != start_reg)
8176 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8177 reg + 1, start_reg - reg,
8178 FP_REGNUM, floats_offset - vfp_offset);
8181 /* saved_regs_mask should contain the IP, which at the time of stack
8182 frame generation actually contains the old stack pointer. So a
8183 quick way to unwind the stack is just pop the IP register directly
8184 into the stack pointer. */
8185 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8186 abort ();
8187 saved_regs_mask &= ~ (1 << IP_REGNUM);
8188 saved_regs_mask |= (1 << SP_REGNUM);
8190 /* There are two registers left in saved_regs_mask - LR and PC. We
8191 only need to restore the LR register (the return address), but to
8192 save time we can load it directly into the PC, unless we need a
8193 special function exit sequence, or we are not really returning. */
8194 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8195 /* Delete the LR from the register mask, so that the LR on
8196 the stack is loaded into the PC in the register mask. */
8197 saved_regs_mask &= ~ (1 << LR_REGNUM);
8198 else
8199 saved_regs_mask &= ~ (1 << PC_REGNUM);
8201 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8203 if (IS_INTERRUPT (func_type))
8204 /* Interrupt handlers will have pushed the
8205 IP onto the stack, so restore it now. */
8206 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8208 else
8210 /* Restore stack pointer if necessary. */
8211 if (frame_size + current_function_outgoing_args_size != 0)
8213 operands[0] = operands[1] = stack_pointer_rtx;
8214 operands[2] = GEN_INT (frame_size
8215 + current_function_outgoing_args_size);
8216 output_add_immediate (operands);
8219 if (arm_fpu_arch == FP_SOFT2)
8221 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8222 if (regs_ever_live[reg] && !call_used_regs[reg])
8223 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8224 reg, SP_REGNUM);
8226 else
8228 int start_reg = FIRST_ARM_FP_REGNUM;
8230 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8232 if (regs_ever_live[reg] && !call_used_regs[reg])
8234 if (reg - start_reg == 3)
8236 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8237 start_reg, SP_REGNUM);
8238 start_reg = reg + 1;
8241 else
8243 if (reg != start_reg)
8244 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8245 start_reg, reg - start_reg,
8246 SP_REGNUM);
8248 start_reg = reg + 1;
8252 /* Just in case the last register checked also needs unstacking. */
8253 if (reg != start_reg)
8254 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8255 start_reg, reg - start_reg, SP_REGNUM);
8258 /* If we can, restore the LR into the PC. */
8259 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8260 && really_return
8261 && current_function_pretend_args_size == 0
8262 && saved_regs_mask & (1 << LR_REGNUM))
8264 saved_regs_mask &= ~ (1 << LR_REGNUM);
8265 saved_regs_mask |= (1 << PC_REGNUM);
8268 /* Load the registers off the stack. If we only have one register
8269 to load use the LDR instruction - it is faster. */
8270 if (saved_regs_mask == (1 << LR_REGNUM))
8272 /* The exception handler ignores the LR, so we do
8273 not really need to load it off the stack. */
8274 if (eh_ofs)
8275 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8276 else
8277 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8279 else if (saved_regs_mask)
8281 if (saved_regs_mask & (1 << SP_REGNUM))
8282 /* Note - write back to the stack register is not enabled
8283 (ie "ldmfd sp!..."). We know that the stack pointer is
8284 in the list of registers and if we add writeback the
8285 instruction becomes UNPREDICTABLE. */
8286 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8287 else
8288 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8291 if (current_function_pretend_args_size)
8293 /* Unwind the pre-pushed regs. */
8294 operands[0] = operands[1] = stack_pointer_rtx;
8295 operands[2] = GEN_INT (current_function_pretend_args_size);
8296 output_add_immediate (operands);
8300 #if 0
8301 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8302 /* Adjust the stack to remove the exception handler stuff. */
8303 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8304 REGNO (eh_ofs));
8305 #endif
8307 if (! really_return
8308 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8309 && current_function_pretend_args_size == 0
8310 && saved_regs_mask & (1 << PC_REGNUM)))
8311 return "";
8313 /* Generate the return instruction. */
8314 switch ((int) ARM_FUNC_TYPE (func_type))
8316 case ARM_FT_EXCEPTION_HANDLER:
8317 /* Even in 26-bit mode we do a mov (rather than a movs)
8318 because we don't have the PSR bits set in the address. */
8319 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8320 break;
8322 case ARM_FT_ISR:
8323 case ARM_FT_FIQ:
8324 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8325 break;
8327 case ARM_FT_EXCEPTION:
8328 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8329 break;
8331 case ARM_FT_INTERWORKED:
8332 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8333 break;
8335 default:
8336 if (frame_pointer_needed)
8337 /* If we used the frame pointer then the return address
8338 will have been loaded off the stack directly into the
8339 PC, so there is no need to issue a MOV instruction
8340 here. */
8342 else if (current_function_pretend_args_size == 0
8343 && (saved_regs_mask & (1 << LR_REGNUM)))
8344 /* Similarly we may have been able to load LR into the PC
8345 even if we did not create a stack frame. */
8347 else if (TARGET_APCS_32)
8348 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8349 else
8350 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8351 break;
8354 return "";
8357 static void
8358 arm_output_function_epilogue (file, frame_size)
8359 FILE *file ATTRIBUTE_UNUSED;
8360 HOST_WIDE_INT frame_size;
8362 if (TARGET_THUMB)
8364 /* ??? Probably not safe to set this here, since it assumes that a
8365 function will be emitted as assembly immediately after we generate
8366 RTL for it. This does not happen for inline functions. */
8367 return_used_this_function = 0;
8369 else
8371 /* We need to take into account any stack-frame rounding. */
8372 frame_size = arm_get_frame_size ();
8374 if (use_return_insn (FALSE)
8375 && return_used_this_function
8376 && (frame_size + current_function_outgoing_args_size) != 0
8377 && !frame_pointer_needed)
8378 abort ();
8380 /* Reset the ARM-specific per-function variables. */
8381 after_arm_reorg = 0;
8385 /* Generate and emit an insn that we will recognize as a push_multi.
8386 Unfortunately, since this insn does not reflect very well the actual
8387 semantics of the operation, we need to annotate the insn for the benefit
8388 of DWARF2 frame unwind information. */
8390 static rtx
8391 emit_multi_reg_push (mask)
8392 int mask;
8394 int num_regs = 0;
8395 int num_dwarf_regs;
8396 int i, j;
8397 rtx par;
8398 rtx dwarf;
8399 int dwarf_par_index;
8400 rtx tmp, reg;
8402 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8403 if (mask & (1 << i))
8404 num_regs++;
8406 if (num_regs == 0 || num_regs > 16)
8407 abort ();
8409 /* We don't record the PC in the dwarf frame information. */
8410 num_dwarf_regs = num_regs;
8411 if (mask & (1 << PC_REGNUM))
8412 num_dwarf_regs--;
8414 /* For the body of the insn we are going to generate an UNSPEC in
8415 parallel with several USEs. This allows the insn to be recognized
8416 by the push_multi pattern in the arm.md file. The insn looks
8417 something like this:
8419 (parallel [
8420 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8421 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8422 (use (reg:SI 11 fp))
8423 (use (reg:SI 12 ip))
8424 (use (reg:SI 14 lr))
8425 (use (reg:SI 15 pc))
8428 For the frame note however, we try to be more explicit and actually
8429 show each register being stored into the stack frame, plus a (single)
8430 decrement of the stack pointer. We do it this way in order to be
8431 friendly to the stack unwinding code, which only wants to see a single
8432 stack decrement per instruction. The RTL we generate for the note looks
8433 something like this:
8435 (sequence [
8436 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8437 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8438 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8439 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8440 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8443 This sequence is used both by the code to support stack unwinding for
8444 exceptions handlers and the code to generate dwarf2 frame debugging. */
8446 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8447 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8448 dwarf_par_index = 1;
8450 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8452 if (mask & (1 << i))
8454 reg = gen_rtx_REG (SImode, i);
8456 XVECEXP (par, 0, 0)
8457 = gen_rtx_SET (VOIDmode,
8458 gen_rtx_MEM (BLKmode,
8459 gen_rtx_PRE_DEC (BLKmode,
8460 stack_pointer_rtx)),
8461 gen_rtx_UNSPEC (BLKmode,
8462 gen_rtvec (1, reg),
8463 UNSPEC_PUSH_MULT));
8465 if (i != PC_REGNUM)
8467 tmp = gen_rtx_SET (VOIDmode,
8468 gen_rtx_MEM (SImode, stack_pointer_rtx),
8469 reg);
8470 RTX_FRAME_RELATED_P (tmp) = 1;
8471 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8472 dwarf_par_index++;
8475 break;
8479 for (j = 1, i++; j < num_regs; i++)
8481 if (mask & (1 << i))
8483 reg = gen_rtx_REG (SImode, i);
8485 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8487 if (i != PC_REGNUM)
8489 tmp = gen_rtx_SET (VOIDmode,
8490 gen_rtx_MEM (SImode,
8491 plus_constant (stack_pointer_rtx,
8492 4 * j)),
8493 reg);
8494 RTX_FRAME_RELATED_P (tmp) = 1;
8495 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8498 j++;
8502 par = emit_insn (par);
8504 tmp = gen_rtx_SET (SImode,
8505 stack_pointer_rtx,
8506 gen_rtx_PLUS (SImode,
8507 stack_pointer_rtx,
8508 GEN_INT (-4 * num_regs)));
8509 RTX_FRAME_RELATED_P (tmp) = 1;
8510 XVECEXP (dwarf, 0, 0) = tmp;
8512 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8513 REG_NOTES (par));
8514 return par;
8517 static rtx
8518 emit_sfm (base_reg, count)
8519 int base_reg;
8520 int count;
8522 rtx par;
8523 rtx dwarf;
8524 rtx tmp, reg;
8525 int i;
8527 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8528 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8530 reg = gen_rtx_REG (XFmode, base_reg++);
8532 XVECEXP (par, 0, 0)
8533 = gen_rtx_SET (VOIDmode,
8534 gen_rtx_MEM (BLKmode,
8535 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8536 gen_rtx_UNSPEC (BLKmode,
8537 gen_rtvec (1, reg),
8538 UNSPEC_PUSH_MULT));
8540 = gen_rtx_SET (VOIDmode,
8541 gen_rtx_MEM (XFmode,
8542 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8543 reg);
8544 RTX_FRAME_RELATED_P (tmp) = 1;
8545 XVECEXP (dwarf, 0, count - 1) = tmp;
8547 for (i = 1; i < count; i++)
8549 reg = gen_rtx_REG (XFmode, base_reg++);
8550 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8552 tmp = gen_rtx_SET (VOIDmode,
8553 gen_rtx_MEM (XFmode,
8554 gen_rtx_PRE_DEC (BLKmode,
8555 stack_pointer_rtx)),
8556 reg);
8557 RTX_FRAME_RELATED_P (tmp) = 1;
8558 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8561 par = emit_insn (par);
8562 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8563 REG_NOTES (par));
8564 return par;
8567 /* Compute the distance from register FROM to register TO.
8568 These can be the arg pointer (26), the soft frame pointer (25),
8569 the stack pointer (13) or the hard frame pointer (11).
8570 Typical stack layout looks like this:
8572 old stack pointer -> | |
8573 ----
8574 | | \
8575 | | saved arguments for
8576 | | vararg functions
8577 | | /
8579 hard FP & arg pointer -> | | \
8580 | | stack
8581 | | frame
8582 | | /
8584 | | \
8585 | | call saved
8586 | | registers
8587 soft frame pointer -> | | /
8589 | | \
8590 | | local
8591 | | variables
8592 | | /
8594 | | \
8595 | | outgoing
8596 | | arguments
8597 current stack pointer -> | | /
8600 For a given function some or all of these stack components
8601 may not be needed, giving rise to the possibility of
8602 eliminating some of the registers.
8604 The values returned by this function must reflect the behavior
8605 of arm_expand_prologue() and arm_compute_save_reg_mask().
8607 The sign of the number returned reflects the direction of stack
8608 growth, so the values are positive for all eliminations except
8609 from the soft frame pointer to the hard frame pointer. */
8611 unsigned int
8612 arm_compute_initial_elimination_offset (from, to)
8613 unsigned int from;
8614 unsigned int to;
8616 unsigned int local_vars = arm_get_frame_size ();
8617 unsigned int outgoing_args = current_function_outgoing_args_size;
8618 unsigned int stack_frame;
8619 unsigned int call_saved_registers;
8620 unsigned long func_type;
8622 func_type = arm_current_func_type ();
8624 /* Volatile functions never return, so there is
8625 no need to save call saved registers. */
8626 call_saved_registers = 0;
8627 if (! IS_VOLATILE (func_type))
8629 unsigned int reg_mask;
8630 unsigned int reg;
8632 /* Make sure that we compute which registers will be saved
8633 on the stack using the same algorithm that is used by
8634 arm_compute_save_reg_mask(). */
8635 reg_mask = arm_compute_save_reg0_reg12_mask ();
8637 /* Now count the number of bits set in save_reg_mask.
8638 For each set bit we need 4 bytes of stack space. */
8639 while (reg_mask)
8641 call_saved_registers += 4;
8642 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8645 if ((regs_ever_live[LR_REGNUM]
8646 /* If optimizing for size, then we save the link register if
8647 any other integer register is saved. This gives a smaller
8648 return sequence. */
8649 || (optimize_size && call_saved_registers > 0))
8650 /* But if a stack frame is going to be created, the LR will
8651 be saved as part of that, so we do not need to allow for
8652 it here. */
8653 && ! frame_pointer_needed)
8654 call_saved_registers += 4;
8656 /* If the hard floating point registers are going to be
8657 used then they must be saved on the stack as well.
8658 Each register occupies 12 bytes of stack space. */
8659 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8660 if (regs_ever_live[reg] && ! call_used_regs[reg])
8661 call_saved_registers += 12;
8664 /* The stack frame contains 4 registers - the old frame pointer,
8665 the old stack pointer, the return address and PC of the start
8666 of the function. */
8667 stack_frame = frame_pointer_needed ? 16 : 0;
8669 /* OK, now we have enough information to compute the distances.
8670 There must be an entry in these switch tables for each pair
8671 of registers in ELIMINABLE_REGS, even if some of the entries
8672 seem to be redundant or useless. */
8673 switch (from)
8675 case ARG_POINTER_REGNUM:
8676 switch (to)
8678 case THUMB_HARD_FRAME_POINTER_REGNUM:
8679 return 0;
8681 case FRAME_POINTER_REGNUM:
8682 /* This is the reverse of the soft frame pointer
8683 to hard frame pointer elimination below. */
8684 if (call_saved_registers == 0 && stack_frame == 0)
8685 return 0;
8686 return (call_saved_registers + stack_frame - 4);
8688 case ARM_HARD_FRAME_POINTER_REGNUM:
8689 /* If there is no stack frame then the hard
8690 frame pointer and the arg pointer coincide. */
8691 if (stack_frame == 0 && call_saved_registers != 0)
8692 return 0;
8693 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8694 return (frame_pointer_needed
8695 && current_function_needs_context
8696 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8698 case STACK_POINTER_REGNUM:
8699 /* If nothing has been pushed on the stack at all
8700 then this will return -4. This *is* correct! */
8701 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8703 default:
8704 abort ();
8706 break;
8708 case FRAME_POINTER_REGNUM:
8709 switch (to)
8711 case THUMB_HARD_FRAME_POINTER_REGNUM:
8712 return 0;
8714 case ARM_HARD_FRAME_POINTER_REGNUM:
8715 /* The hard frame pointer points to the top entry in the
8716 stack frame. The soft frame pointer to the bottom entry
8717 in the stack frame. If there is no stack frame at all,
8718 then they are identical. */
8719 if (call_saved_registers == 0 && stack_frame == 0)
8720 return 0;
8721 return - (call_saved_registers + stack_frame - 4);
8723 case STACK_POINTER_REGNUM:
8724 return local_vars + outgoing_args;
8726 default:
8727 abort ();
8729 break;
8731 default:
8732 /* You cannot eliminate from the stack pointer.
8733 In theory you could eliminate from the hard frame
8734 pointer to the stack pointer, but this will never
8735 happen, since if a stack frame is not needed the
8736 hard frame pointer will never be used. */
8737 abort ();
8741 /* Calculate the size of the stack frame, taking into account any
8742 padding that is required to ensure stack-alignment. */
8744 HOST_WIDE_INT
8745 arm_get_frame_size ()
8747 int regno;
8749 int base_size = ROUND_UP_WORD (get_frame_size ());
8750 int entry_size = 0;
8751 unsigned long func_type = arm_current_func_type ();
8752 int leaf;
8754 if (! TARGET_ARM)
8755 abort();
8757 if (! TARGET_ATPCS)
8758 return base_size;
8760 /* We need to know if we are a leaf function. Unfortunately, it
8761 is possible to be called after start_sequence has been called,
8762 which causes get_insns to return the insns for the sequence,
8763 not the function, which will cause leaf_function_p to return
8764 the incorrect result.
8766 To work around this, we cache the computed frame size. This
8767 works because we will only be calling RTL expanders that need
8768 to know about leaf functions once reload has completed, and the
8769 frame size cannot be changed after that time, so we can safely
8770 use the cached value. */
8772 if (reload_completed)
8773 return cfun->machine->frame_size;
8775 leaf = leaf_function_p ();
8777 /* A leaf function does not need any stack alignment if it has nothing
8778 on the stack. */
8779 if (leaf && base_size == 0)
8781 cfun->machine->frame_size = 0;
8782 return 0;
8785 /* We know that SP will be word aligned on entry, and we must
8786 preserve that condition at any subroutine call. But those are
8787 the only constraints. */
8789 /* Space for variadic functions. */
8790 if (current_function_pretend_args_size)
8791 entry_size += current_function_pretend_args_size;
8793 /* Space for saved registers. */
8794 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8796 /* Space for saved FPA registers. */
8797 if (! IS_VOLATILE (func_type))
8799 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8800 if (regs_ever_live[regno] && ! call_used_regs[regno])
8801 entry_size += 12;
8804 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8805 base_size += 4;
8806 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8807 abort ();
8809 cfun->machine->frame_size = base_size;
8811 return base_size;
8814 /* Generate the prologue instructions for entry into an ARM function. */
8816 void
8817 arm_expand_prologue ()
8819 int reg;
8820 rtx amount;
8821 rtx insn;
8822 rtx ip_rtx;
8823 unsigned long live_regs_mask;
8824 unsigned long func_type;
8825 int fp_offset = 0;
8826 int saved_pretend_args = 0;
8827 unsigned int args_to_push;
8829 func_type = arm_current_func_type ();
8831 /* Naked functions don't have prologues. */
8832 if (IS_NAKED (func_type))
8833 return;
8835 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8836 args_to_push = current_function_pretend_args_size;
8838 /* Compute which register we will have to save onto the stack. */
8839 live_regs_mask = arm_compute_save_reg_mask ();
8841 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8843 if (frame_pointer_needed)
8845 if (IS_INTERRUPT (func_type))
8847 /* Interrupt functions must not corrupt any registers.
8848 Creating a frame pointer however, corrupts the IP
8849 register, so we must push it first. */
8850 insn = emit_multi_reg_push (1 << IP_REGNUM);
8852 /* Do not set RTX_FRAME_RELATED_P on this insn.
8853 The dwarf stack unwinding code only wants to see one
8854 stack decrement per function, and this is not it. If
8855 this instruction is labeled as being part of the frame
8856 creation sequence then dwarf2out_frame_debug_expr will
8857 abort when it encounters the assignment of IP to FP
8858 later on, since the use of SP here establishes SP as
8859 the CFA register and not IP.
8861 Anyway this instruction is not really part of the stack
8862 frame creation although it is part of the prologue. */
8864 else if (IS_NESTED (func_type))
8866 /* The Static chain register is the same as the IP register
8867 used as a scratch register during stack frame creation.
8868 To get around this need to find somewhere to store IP
8869 whilst the frame is being created. We try the following
8870 places in order:
8872 1. The last argument register.
8873 2. A slot on the stack above the frame. (This only
8874 works if the function is not a varargs function).
8875 3. Register r3, after pushing the argument registers
8876 onto the stack.
8878 Note - we only need to tell the dwarf2 backend about the SP
8879 adjustment in the second variant; the static chain register
8880 doesn't need to be unwound, as it doesn't contain a value
8881 inherited from the caller. */
8883 if (regs_ever_live[3] == 0)
8885 insn = gen_rtx_REG (SImode, 3);
8886 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8887 insn = emit_insn (insn);
8889 else if (args_to_push == 0)
8891 rtx dwarf;
8892 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8893 insn = gen_rtx_MEM (SImode, insn);
8894 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8895 insn = emit_insn (insn);
8897 fp_offset = 4;
8899 /* Just tell the dwarf backend that we adjusted SP. */
8900 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8901 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8902 GEN_INT (-fp_offset)));
8903 RTX_FRAME_RELATED_P (insn) = 1;
8904 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8905 dwarf, REG_NOTES (insn));
8907 else
8909 /* Store the args on the stack. */
8910 if (cfun->machine->uses_anonymous_args)
8911 insn = emit_multi_reg_push
8912 ((0xf0 >> (args_to_push / 4)) & 0xf);
8913 else
8914 insn = emit_insn
8915 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8916 GEN_INT (- args_to_push)));
8918 RTX_FRAME_RELATED_P (insn) = 1;
8920 saved_pretend_args = 1;
8921 fp_offset = args_to_push;
8922 args_to_push = 0;
8924 /* Now reuse r3 to preserve IP. */
8925 insn = gen_rtx_REG (SImode, 3);
8926 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8927 (void) emit_insn (insn);
8931 if (fp_offset)
8933 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8934 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8936 else
8937 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8939 insn = emit_insn (insn);
8940 RTX_FRAME_RELATED_P (insn) = 1;
8943 if (args_to_push)
8945 /* Push the argument registers, or reserve space for them. */
8946 if (cfun->machine->uses_anonymous_args)
8947 insn = emit_multi_reg_push
8948 ((0xf0 >> (args_to_push / 4)) & 0xf);
8949 else
8950 insn = emit_insn
8951 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8952 GEN_INT (- args_to_push)));
8953 RTX_FRAME_RELATED_P (insn) = 1;
8956 /* If this is an interrupt service routine, and the link register
8957 is going to be pushed, and we are not creating a stack frame,
8958 (which would involve an extra push of IP and a pop in the epilogue)
8959 subtracting four from LR now will mean that the function return
8960 can be done with a single instruction. */
8961 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8962 && (live_regs_mask & (1 << LR_REGNUM)) != 0
8963 && ! frame_pointer_needed)
8964 emit_insn (gen_rtx_SET (SImode,
8965 gen_rtx_REG (SImode, LR_REGNUM),
8966 gen_rtx_PLUS (SImode,
8967 gen_rtx_REG (SImode, LR_REGNUM),
8968 GEN_INT (-4))));
8970 if (live_regs_mask)
8972 insn = emit_multi_reg_push (live_regs_mask);
8973 RTX_FRAME_RELATED_P (insn) = 1;
8976 if (! IS_VOLATILE (func_type))
8978 /* Save any floating point call-saved registers used by this function. */
8979 if (arm_fpu_arch == FP_SOFT2)
8981 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8982 if (regs_ever_live[reg] && !call_used_regs[reg])
8984 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8985 insn = gen_rtx_MEM (XFmode, insn);
8986 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8987 gen_rtx_REG (XFmode, reg)));
8988 RTX_FRAME_RELATED_P (insn) = 1;
8991 else
8993 int start_reg = LAST_ARM_FP_REGNUM;
8995 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8997 if (regs_ever_live[reg] && !call_used_regs[reg])
8999 if (start_reg - reg == 3)
9001 insn = emit_sfm (reg, 4);
9002 RTX_FRAME_RELATED_P (insn) = 1;
9003 start_reg = reg - 1;
9006 else
9008 if (start_reg != reg)
9010 insn = emit_sfm (reg + 1, start_reg - reg);
9011 RTX_FRAME_RELATED_P (insn) = 1;
9013 start_reg = reg - 1;
9017 if (start_reg != reg)
9019 insn = emit_sfm (reg + 1, start_reg - reg);
9020 RTX_FRAME_RELATED_P (insn) = 1;
9025 if (frame_pointer_needed)
9027 /* Create the new frame pointer. */
9028 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9029 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9030 RTX_FRAME_RELATED_P (insn) = 1;
9032 if (IS_NESTED (func_type))
9034 /* Recover the static chain register. */
9035 if (regs_ever_live [3] == 0
9036 || saved_pretend_args)
9037 insn = gen_rtx_REG (SImode, 3);
9038 else /* if (current_function_pretend_args_size == 0) */
9040 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
9041 insn = gen_rtx_MEM (SImode, insn);
9044 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9045 /* Add a USE to stop propagate_one_insn() from barfing. */
9046 emit_insn (gen_prologue_use (ip_rtx));
9050 amount = GEN_INT (-(arm_get_frame_size ()
9051 + current_function_outgoing_args_size));
9053 if (amount != const0_rtx)
9055 /* This add can produce multiple insns for a large constant, so we
9056 need to get tricky. */
9057 rtx last = get_last_insn ();
9058 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9059 amount));
9062 last = last ? NEXT_INSN (last) : get_insns ();
9063 RTX_FRAME_RELATED_P (last) = 1;
9065 while (last != insn);
9067 /* If the frame pointer is needed, emit a special barrier that
9068 will prevent the scheduler from moving stores to the frame
9069 before the stack adjustment. */
9070 if (frame_pointer_needed)
9071 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9072 hard_frame_pointer_rtx));
9075 /* If we are profiling, make sure no instructions are scheduled before
9076 the call to mcount. Similarly if the user has requested no
9077 scheduling in the prolog. */
9078 if (current_function_profile || TARGET_NO_SCHED_PRO)
9079 emit_insn (gen_blockage ());
9081 /* If the link register is being kept alive, with the return address in it,
9082 then make sure that it does not get reused by the ce2 pass. */
9083 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9085 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9086 cfun->machine->lr_save_eliminated = 1;
9090 /* If CODE is 'd', then the X is a condition operand and the instruction
9091 should only be executed if the condition is true.
9092 if CODE is 'D', then the X is a condition operand and the instruction
9093 should only be executed if the condition is false: however, if the mode
9094 of the comparison is CCFPEmode, then always execute the instruction -- we
9095 do this because in these circumstances !GE does not necessarily imply LT;
9096 in these cases the instruction pattern will take care to make sure that
9097 an instruction containing %d will follow, thereby undoing the effects of
9098 doing this instruction unconditionally.
9099 If CODE is 'N' then X is a floating point operand that must be negated
9100 before output.
9101 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9102 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9104 void
9105 arm_print_operand (stream, x, code)
9106 FILE * stream;
9107 rtx x;
9108 int code;
9110 switch (code)
9112 case '@':
9113 fputs (ASM_COMMENT_START, stream);
9114 return;
9116 case '_':
9117 fputs (user_label_prefix, stream);
9118 return;
9120 case '|':
9121 fputs (REGISTER_PREFIX, stream);
9122 return;
9124 case '?':
9125 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9127 if (TARGET_THUMB || current_insn_predicate != NULL)
9128 abort ();
9130 fputs (arm_condition_codes[arm_current_cc], stream);
9132 else if (current_insn_predicate)
9134 enum arm_cond_code code;
9136 if (TARGET_THUMB)
9137 abort ();
9139 code = get_arm_condition_code (current_insn_predicate);
9140 fputs (arm_condition_codes[code], stream);
9142 return;
9144 case 'N':
9146 REAL_VALUE_TYPE r;
9147 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9148 r = REAL_VALUE_NEGATE (r);
9149 fprintf (stream, "%s", fp_const_from_val (&r));
9151 return;
9153 case 'B':
9154 if (GET_CODE (x) == CONST_INT)
9156 HOST_WIDE_INT val;
9157 val = ARM_SIGN_EXTEND (~INTVAL (x));
9158 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9160 else
9162 putc ('~', stream);
9163 output_addr_const (stream, x);
9165 return;
9167 case 'i':
9168 fprintf (stream, "%s", arithmetic_instr (x, 1));
9169 return;
9171 case 'I':
9172 fprintf (stream, "%s", arithmetic_instr (x, 0));
9173 return;
9175 case 'S':
9177 HOST_WIDE_INT val;
9178 const char * shift = shift_op (x, &val);
9180 if (shift)
9182 fprintf (stream, ", %s ", shift_op (x, &val));
9183 if (val == -1)
9184 arm_print_operand (stream, XEXP (x, 1), 0);
9185 else
9187 fputc ('#', stream);
9188 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9192 return;
9194 /* An explanation of the 'Q', 'R' and 'H' register operands:
9196 In a pair of registers containing a DI or DF value the 'Q'
9197 operand returns the register number of the register containing
9198 the least signficant part of the value. The 'R' operand returns
9199 the register number of the register containing the most
9200 significant part of the value.
9202 The 'H' operand returns the higher of the two register numbers.
9203 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9204 same as the 'Q' operand, since the most signficant part of the
9205 value is held in the lower number register. The reverse is true
9206 on systems where WORDS_BIG_ENDIAN is false.
9208 The purpose of these operands is to distinguish between cases
9209 where the endian-ness of the values is important (for example
9210 when they are added together), and cases where the endian-ness
9211 is irrelevant, but the order of register operations is important.
9212 For example when loading a value from memory into a register
9213 pair, the endian-ness does not matter. Provided that the value
9214 from the lower memory address is put into the lower numbered
9215 register, and the value from the higher address is put into the
9216 higher numbered register, the load will work regardless of whether
9217 the value being loaded is big-wordian or little-wordian. The
9218 order of the two register loads can matter however, if the address
9219 of the memory location is actually held in one of the registers
9220 being overwritten by the load. */
9221 case 'Q':
9222 if (REGNO (x) > LAST_ARM_REGNUM)
9223 abort ();
9224 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9225 return;
9227 case 'R':
9228 if (REGNO (x) > LAST_ARM_REGNUM)
9229 abort ();
9230 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9231 return;
9233 case 'H':
9234 if (REGNO (x) > LAST_ARM_REGNUM)
9235 abort ();
9236 asm_fprintf (stream, "%r", REGNO (x) + 1);
9237 return;
9239 case 'm':
9240 asm_fprintf (stream, "%r",
9241 GET_CODE (XEXP (x, 0)) == REG
9242 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9243 return;
9245 case 'M':
9246 asm_fprintf (stream, "{%r-%r}",
9247 REGNO (x),
9248 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9249 return;
9251 case 'd':
9252 /* CONST_TRUE_RTX means always -- that's the default. */
9253 if (x == const_true_rtx)
9254 return;
9256 if (TARGET_ARM)
9257 fputs (arm_condition_codes[get_arm_condition_code (x)],
9258 stream);
9259 else
9260 fputs (thumb_condition_code (x, 0), stream);
9261 return;
9263 case 'D':
9264 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9265 want to do that. */
9266 if (x == const_true_rtx)
9267 abort ();
9269 if (TARGET_ARM)
9270 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9271 (get_arm_condition_code (x))],
9272 stream);
9273 else
9274 fputs (thumb_condition_code (x, 1), stream);
9275 return;
9277 default:
9278 if (x == 0)
9279 abort ();
9281 if (GET_CODE (x) == REG)
9282 asm_fprintf (stream, "%r", REGNO (x));
9283 else if (GET_CODE (x) == MEM)
9285 output_memory_reference_mode = GET_MODE (x);
9286 output_address (XEXP (x, 0));
9288 else if (GET_CODE (x) == CONST_DOUBLE)
9289 fprintf (stream, "#%s", fp_immediate_constant (x));
9290 else if (GET_CODE (x) == NEG)
9291 abort (); /* This should never happen now. */
9292 else
9294 fputc ('#', stream);
9295 output_addr_const (stream, x);
9300 #ifndef AOF_ASSEMBLER
9301 /* Target hook for assembling integer objects. The ARM version needs to
9302 handle word-sized values specially. */
9304 static bool
9305 arm_assemble_integer (x, size, aligned_p)
9306 rtx x;
9307 unsigned int size;
9308 int aligned_p;
9310 if (size == UNITS_PER_WORD && aligned_p)
9312 fputs ("\t.word\t", asm_out_file);
9313 output_addr_const (asm_out_file, x);
9315 /* Mark symbols as position independent. We only do this in the
9316 .text segment, not in the .data segment. */
9317 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9318 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9320 if (GET_CODE (x) == SYMBOL_REF
9321 && (CONSTANT_POOL_ADDRESS_P (x)
9322 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9323 fputs ("(GOTOFF)", asm_out_file);
9324 else if (GET_CODE (x) == LABEL_REF)
9325 fputs ("(GOTOFF)", asm_out_file);
9326 else
9327 fputs ("(GOT)", asm_out_file);
9329 fputc ('\n', asm_out_file);
9330 return true;
9333 return default_assemble_integer (x, size, aligned_p);
9335 #endif
9337 /* A finite state machine takes care of noticing whether or not instructions
9338 can be conditionally executed, and thus decrease execution time and code
9339 size by deleting branch instructions. The fsm is controlled by
9340 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9342 /* The state of the fsm controlling condition codes are:
9343 0: normal, do nothing special
9344 1: make ASM_OUTPUT_OPCODE not output this instruction
9345 2: make ASM_OUTPUT_OPCODE not output this instruction
9346 3: make instructions conditional
9347 4: make instructions conditional
9349 State transitions (state->state by whom under condition):
9350 0 -> 1 final_prescan_insn if the `target' is a label
9351 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9352 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9353 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9354 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9355 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9356 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9357 (the target insn is arm_target_insn).
9359 If the jump clobbers the conditions then we use states 2 and 4.
9361 A similar thing can be done with conditional return insns.
9363 XXX In case the `target' is an unconditional branch, this conditionalising
9364 of the instructions always reduces code size, but not always execution
9365 time. But then, I want to reduce the code size to somewhere near what
9366 /bin/cc produces. */
9368 /* Returns the index of the ARM condition code string in
9369 `arm_condition_codes'. COMPARISON should be an rtx like
9370 `(eq (...) (...))'. */
9372 static enum arm_cond_code
9373 get_arm_condition_code (comparison)
9374 rtx comparison;
9376 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9377 int code;
9378 enum rtx_code comp_code = GET_CODE (comparison);
9380 if (GET_MODE_CLASS (mode) != MODE_CC)
9381 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9382 XEXP (comparison, 1));
9384 switch (mode)
9386 case CC_DNEmode: code = ARM_NE; goto dominance;
9387 case CC_DEQmode: code = ARM_EQ; goto dominance;
9388 case CC_DGEmode: code = ARM_GE; goto dominance;
9389 case CC_DGTmode: code = ARM_GT; goto dominance;
9390 case CC_DLEmode: code = ARM_LE; goto dominance;
9391 case CC_DLTmode: code = ARM_LT; goto dominance;
9392 case CC_DGEUmode: code = ARM_CS; goto dominance;
9393 case CC_DGTUmode: code = ARM_HI; goto dominance;
9394 case CC_DLEUmode: code = ARM_LS; goto dominance;
9395 case CC_DLTUmode: code = ARM_CC;
9397 dominance:
9398 if (comp_code != EQ && comp_code != NE)
9399 abort ();
9401 if (comp_code == EQ)
9402 return ARM_INVERSE_CONDITION_CODE (code);
9403 return code;
9405 case CC_NOOVmode:
9406 switch (comp_code)
9408 case NE: return ARM_NE;
9409 case EQ: return ARM_EQ;
9410 case GE: return ARM_PL;
9411 case LT: return ARM_MI;
9412 default: abort ();
9415 case CC_Zmode:
9416 switch (comp_code)
9418 case NE: return ARM_NE;
9419 case EQ: return ARM_EQ;
9420 default: abort ();
9423 case CCFPEmode:
9424 case CCFPmode:
9425 /* These encodings assume that AC=1 in the FPA system control
9426 byte. This allows us to handle all cases except UNEQ and
9427 LTGT. */
9428 switch (comp_code)
9430 case GE: return ARM_GE;
9431 case GT: return ARM_GT;
9432 case LE: return ARM_LS;
9433 case LT: return ARM_MI;
9434 case NE: return ARM_NE;
9435 case EQ: return ARM_EQ;
9436 case ORDERED: return ARM_VC;
9437 case UNORDERED: return ARM_VS;
9438 case UNLT: return ARM_LT;
9439 case UNLE: return ARM_LE;
9440 case UNGT: return ARM_HI;
9441 case UNGE: return ARM_PL;
9442 /* UNEQ and LTGT do not have a representation. */
9443 case UNEQ: /* Fall through. */
9444 case LTGT: /* Fall through. */
9445 default: abort ();
9448 case CC_SWPmode:
9449 switch (comp_code)
9451 case NE: return ARM_NE;
9452 case EQ: return ARM_EQ;
9453 case GE: return ARM_LE;
9454 case GT: return ARM_LT;
9455 case LE: return ARM_GE;
9456 case LT: return ARM_GT;
9457 case GEU: return ARM_LS;
9458 case GTU: return ARM_CC;
9459 case LEU: return ARM_CS;
9460 case LTU: return ARM_HI;
9461 default: abort ();
9464 case CC_Cmode:
9465 switch (comp_code)
9467 case LTU: return ARM_CS;
9468 case GEU: return ARM_CC;
9469 default: abort ();
9472 case CCmode:
9473 switch (comp_code)
9475 case NE: return ARM_NE;
9476 case EQ: return ARM_EQ;
9477 case GE: return ARM_GE;
9478 case GT: return ARM_GT;
9479 case LE: return ARM_LE;
9480 case LT: return ARM_LT;
9481 case GEU: return ARM_CS;
9482 case GTU: return ARM_HI;
9483 case LEU: return ARM_LS;
9484 case LTU: return ARM_CC;
9485 default: abort ();
9488 default: abort ();
9491 abort ();
9495 void
9496 arm_final_prescan_insn (insn)
9497 rtx insn;
9499 /* BODY will hold the body of INSN. */
9500 rtx body = PATTERN (insn);
9502 /* This will be 1 if trying to repeat the trick, and things need to be
9503 reversed if it appears to fail. */
9504 int reverse = 0;
9506 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9507 taken are clobbered, even if the rtl suggests otherwise. It also
9508 means that we have to grub around within the jump expression to find
9509 out what the conditions are when the jump isn't taken. */
9510 int jump_clobbers = 0;
9512 /* If we start with a return insn, we only succeed if we find another one. */
9513 int seeking_return = 0;
9515 /* START_INSN will hold the insn from where we start looking. This is the
9516 first insn after the following code_label if REVERSE is true. */
9517 rtx start_insn = insn;
9519 /* If in state 4, check if the target branch is reached, in order to
9520 change back to state 0. */
9521 if (arm_ccfsm_state == 4)
9523 if (insn == arm_target_insn)
9525 arm_target_insn = NULL;
9526 arm_ccfsm_state = 0;
9528 return;
9531 /* If in state 3, it is possible to repeat the trick, if this insn is an
9532 unconditional branch to a label, and immediately following this branch
9533 is the previous target label which is only used once, and the label this
9534 branch jumps to is not too far off. */
9535 if (arm_ccfsm_state == 3)
9537 if (simplejump_p (insn))
9539 start_insn = next_nonnote_insn (start_insn);
9540 if (GET_CODE (start_insn) == BARRIER)
9542 /* XXX Isn't this always a barrier? */
9543 start_insn = next_nonnote_insn (start_insn);
9545 if (GET_CODE (start_insn) == CODE_LABEL
9546 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9547 && LABEL_NUSES (start_insn) == 1)
9548 reverse = TRUE;
9549 else
9550 return;
9552 else if (GET_CODE (body) == RETURN)
9554 start_insn = next_nonnote_insn (start_insn);
9555 if (GET_CODE (start_insn) == BARRIER)
9556 start_insn = next_nonnote_insn (start_insn);
9557 if (GET_CODE (start_insn) == CODE_LABEL
9558 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9559 && LABEL_NUSES (start_insn) == 1)
9561 reverse = TRUE;
9562 seeking_return = 1;
9564 else
9565 return;
9567 else
9568 return;
9571 if (arm_ccfsm_state != 0 && !reverse)
9572 abort ();
9573 if (GET_CODE (insn) != JUMP_INSN)
9574 return;
9576 /* This jump might be paralleled with a clobber of the condition codes
9577 the jump should always come first */
9578 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9579 body = XVECEXP (body, 0, 0);
9581 #if 0
9582 /* If this is a conditional return then we don't want to know */
9583 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9584 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9585 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9586 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9587 return;
9588 #endif
9590 if (reverse
9591 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9592 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9594 int insns_skipped;
9595 int fail = FALSE, succeed = FALSE;
9596 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9597 int then_not_else = TRUE;
9598 rtx this_insn = start_insn, label = 0;
9600 /* If the jump cannot be done with one instruction, we cannot
9601 conditionally execute the instruction in the inverse case. */
9602 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9604 jump_clobbers = 1;
9605 return;
9608 /* Register the insn jumped to. */
9609 if (reverse)
9611 if (!seeking_return)
9612 label = XEXP (SET_SRC (body), 0);
9614 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9615 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9616 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9618 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9619 then_not_else = FALSE;
9621 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9622 seeking_return = 1;
9623 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9625 seeking_return = 1;
9626 then_not_else = FALSE;
9628 else
9629 abort ();
9631 /* See how many insns this branch skips, and what kind of insns. If all
9632 insns are okay, and the label or unconditional branch to the same
9633 label is not too far away, succeed. */
9634 for (insns_skipped = 0;
9635 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9637 rtx scanbody;
9639 this_insn = next_nonnote_insn (this_insn);
9640 if (!this_insn)
9641 break;
9643 switch (GET_CODE (this_insn))
9645 case CODE_LABEL:
9646 /* Succeed if it is the target label, otherwise fail since
9647 control falls in from somewhere else. */
9648 if (this_insn == label)
9650 if (jump_clobbers)
9652 arm_ccfsm_state = 2;
9653 this_insn = next_nonnote_insn (this_insn);
9655 else
9656 arm_ccfsm_state = 1;
9657 succeed = TRUE;
9659 else
9660 fail = TRUE;
9661 break;
9663 case BARRIER:
9664 /* Succeed if the following insn is the target label.
9665 Otherwise fail.
9666 If return insns are used then the last insn in a function
9667 will be a barrier. */
9668 this_insn = next_nonnote_insn (this_insn);
9669 if (this_insn && this_insn == label)
9671 if (jump_clobbers)
9673 arm_ccfsm_state = 2;
9674 this_insn = next_nonnote_insn (this_insn);
9676 else
9677 arm_ccfsm_state = 1;
9678 succeed = TRUE;
9680 else
9681 fail = TRUE;
9682 break;
9684 case CALL_INSN:
9685 /* If using 32-bit addresses the cc is not preserved over
9686 calls. */
9687 if (TARGET_APCS_32)
9689 /* Succeed if the following insn is the target label,
9690 or if the following two insns are a barrier and
9691 the target label. */
9692 this_insn = next_nonnote_insn (this_insn);
9693 if (this_insn && GET_CODE (this_insn) == BARRIER)
9694 this_insn = next_nonnote_insn (this_insn);
9696 if (this_insn && this_insn == label
9697 && insns_skipped < max_insns_skipped)
9699 if (jump_clobbers)
9701 arm_ccfsm_state = 2;
9702 this_insn = next_nonnote_insn (this_insn);
9704 else
9705 arm_ccfsm_state = 1;
9706 succeed = TRUE;
9708 else
9709 fail = TRUE;
9711 break;
9713 case JUMP_INSN:
9714 /* If this is an unconditional branch to the same label, succeed.
9715 If it is to another label, do nothing. If it is conditional,
9716 fail. */
9717 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9719 scanbody = PATTERN (this_insn);
9720 if (GET_CODE (scanbody) == SET
9721 && GET_CODE (SET_DEST (scanbody)) == PC)
9723 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9724 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9726 arm_ccfsm_state = 2;
9727 succeed = TRUE;
9729 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9730 fail = TRUE;
9732 /* Fail if a conditional return is undesirable (eg on a
9733 StrongARM), but still allow this if optimizing for size. */
9734 else if (GET_CODE (scanbody) == RETURN
9735 && !use_return_insn (TRUE)
9736 && !optimize_size)
9737 fail = TRUE;
9738 else if (GET_CODE (scanbody) == RETURN
9739 && seeking_return)
9741 arm_ccfsm_state = 2;
9742 succeed = TRUE;
9744 else if (GET_CODE (scanbody) == PARALLEL)
9746 switch (get_attr_conds (this_insn))
9748 case CONDS_NOCOND:
9749 break;
9750 default:
9751 fail = TRUE;
9752 break;
9755 else
9756 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9758 break;
9760 case INSN:
9761 /* Instructions using or affecting the condition codes make it
9762 fail. */
9763 scanbody = PATTERN (this_insn);
9764 if (!(GET_CODE (scanbody) == SET
9765 || GET_CODE (scanbody) == PARALLEL)
9766 || get_attr_conds (this_insn) != CONDS_NOCOND)
9767 fail = TRUE;
9768 break;
9770 default:
9771 break;
9774 if (succeed)
9776 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9777 arm_target_label = CODE_LABEL_NUMBER (label);
9778 else if (seeking_return || arm_ccfsm_state == 2)
9780 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9782 this_insn = next_nonnote_insn (this_insn);
9783 if (this_insn && (GET_CODE (this_insn) == BARRIER
9784 || GET_CODE (this_insn) == CODE_LABEL))
9785 abort ();
9787 if (!this_insn)
9789 /* Oh, dear! we ran off the end.. give up */
9790 recog (PATTERN (insn), insn, NULL);
9791 arm_ccfsm_state = 0;
9792 arm_target_insn = NULL;
9793 return;
9795 arm_target_insn = this_insn;
9797 else
9798 abort ();
9799 if (jump_clobbers)
9801 if (reverse)
9802 abort ();
9803 arm_current_cc =
9804 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9805 0), 0), 1));
9806 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9807 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9808 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9809 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9811 else
9813 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9814 what it was. */
9815 if (!reverse)
9816 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9817 0));
9820 if (reverse || then_not_else)
9821 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9824 /* Restore recog_data (getting the attributes of other insns can
9825 destroy this array, but final.c assumes that it remains intact
9826 across this call; since the insn has been recognized already we
9827 call recog direct). */
9828 recog (PATTERN (insn), insn, NULL);
9832 /* Returns true if REGNO is a valid register
9833 for holding a quantity of tyoe MODE. */
9836 arm_hard_regno_mode_ok (regno, mode)
9837 unsigned int regno;
9838 enum machine_mode mode;
9840 if (GET_MODE_CLASS (mode) == MODE_CC)
9841 return regno == CC_REGNUM;
9843 if (TARGET_THUMB)
9844 /* For the Thumb we only allow values bigger than SImode in
9845 registers 0 - 6, so that there is always a second low
9846 register available to hold the upper part of the value.
9847 We probably we ought to ensure that the register is the
9848 start of an even numbered register pair. */
9849 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9851 if (regno <= LAST_ARM_REGNUM)
9852 /* We allow any value to be stored in the general regisetrs. */
9853 return 1;
9855 if ( regno == FRAME_POINTER_REGNUM
9856 || regno == ARG_POINTER_REGNUM)
9857 /* We only allow integers in the fake hard registers. */
9858 return GET_MODE_CLASS (mode) == MODE_INT;
9860 /* The only registers left are the FPU registers
9861 which we only allow to hold FP values. */
9862 return GET_MODE_CLASS (mode) == MODE_FLOAT
9863 && regno >= FIRST_ARM_FP_REGNUM
9864 && regno <= LAST_ARM_FP_REGNUM;
9868 arm_regno_class (regno)
9869 int regno;
9871 if (TARGET_THUMB)
9873 if (regno == STACK_POINTER_REGNUM)
9874 return STACK_REG;
9875 if (regno == CC_REGNUM)
9876 return CC_REG;
9877 if (regno < 8)
9878 return LO_REGS;
9879 return HI_REGS;
9882 if ( regno <= LAST_ARM_REGNUM
9883 || regno == FRAME_POINTER_REGNUM
9884 || regno == ARG_POINTER_REGNUM)
9885 return GENERAL_REGS;
9887 if (regno == CC_REGNUM)
9888 return NO_REGS;
9890 return FPU_REGS;
9893 /* Handle a special case when computing the offset
9894 of an argument from the frame pointer. */
9897 arm_debugger_arg_offset (value, addr)
9898 int value;
9899 rtx addr;
9901 rtx insn;
9903 /* We are only interested if dbxout_parms() failed to compute the offset. */
9904 if (value != 0)
9905 return 0;
9907 /* We can only cope with the case where the address is held in a register. */
9908 if (GET_CODE (addr) != REG)
9909 return 0;
9911 /* If we are using the frame pointer to point at the argument, then
9912 an offset of 0 is correct. */
9913 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9914 return 0;
9916 /* If we are using the stack pointer to point at the
9917 argument, then an offset of 0 is correct. */
9918 if ((TARGET_THUMB || !frame_pointer_needed)
9919 && REGNO (addr) == SP_REGNUM)
9920 return 0;
9922 /* Oh dear. The argument is pointed to by a register rather
9923 than being held in a register, or being stored at a known
9924 offset from the frame pointer. Since GDB only understands
9925 those two kinds of argument we must translate the address
9926 held in the register into an offset from the frame pointer.
9927 We do this by searching through the insns for the function
9928 looking to see where this register gets its value. If the
9929 register is initialized from the frame pointer plus an offset
9930 then we are in luck and we can continue, otherwise we give up.
9932 This code is exercised by producing debugging information
9933 for a function with arguments like this:
9935 double func (double a, double b, int c, double d) {return d;}
9937 Without this code the stab for parameter 'd' will be set to
9938 an offset of 0 from the frame pointer, rather than 8. */
9940 /* The if() statement says:
9942 If the insn is a normal instruction
9943 and if the insn is setting the value in a register
9944 and if the register being set is the register holding the address of the argument
9945 and if the address is computing by an addition
9946 that involves adding to a register
9947 which is the frame pointer
9948 a constant integer
9950 then... */
9952 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9954 if ( GET_CODE (insn) == INSN
9955 && GET_CODE (PATTERN (insn)) == SET
9956 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9957 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9958 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9959 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9960 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9963 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9965 break;
9969 if (value == 0)
9971 debug_rtx (addr);
9972 warning ("unable to compute real location of stacked parameter");
9973 value = 8; /* XXX magic hack */
9976 return value;
9979 /* Recursively search through all of the blocks in a function
9980 checking to see if any of the variables created in that
9981 function match the RTX called 'orig'. If they do then
9982 replace them with the RTX called 'new'. */
9984 static void
9985 replace_symbols_in_block (block, orig, new)
9986 tree block;
9987 rtx orig;
9988 rtx new;
9990 for (; block; block = BLOCK_CHAIN (block))
9992 tree sym;
9994 if (!TREE_USED (block))
9995 continue;
9997 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9999 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
10000 || DECL_IGNORED_P (sym)
10001 || TREE_CODE (sym) != VAR_DECL
10002 || DECL_EXTERNAL (sym)
10003 || !rtx_equal_p (DECL_RTL (sym), orig)
10005 continue;
10007 SET_DECL_RTL (sym, new);
10010 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
10014 /* Return the number (counting from 0) of
10015 the least significant set bit in MASK. */
10017 #ifdef __GNUC__
10018 inline
10019 #endif
10020 static int
10021 number_of_first_bit_set (mask)
10022 int mask;
10024 int bit;
10026 for (bit = 0;
10027 (mask & (1 << bit)) == 0;
10028 ++bit)
10029 continue;
10031 return bit;
10034 /* Generate code to return from a thumb function.
10035 If 'reg_containing_return_addr' is -1, then the return address is
10036 actually on the stack, at the stack pointer. */
10037 static void
10038 thumb_exit (f, reg_containing_return_addr, eh_ofs)
10039 FILE * f;
10040 int reg_containing_return_addr;
10041 rtx eh_ofs;
10043 unsigned regs_available_for_popping;
10044 unsigned regs_to_pop;
10045 int pops_needed;
10046 unsigned available;
10047 unsigned required;
10048 int mode;
10049 int size;
10050 int restore_a4 = FALSE;
10052 /* Compute the registers we need to pop. */
10053 regs_to_pop = 0;
10054 pops_needed = 0;
10056 /* There is an assumption here, that if eh_ofs is not NULL, the
10057 normal return address will have been pushed. */
10058 if (reg_containing_return_addr == -1 || eh_ofs)
10060 /* When we are generating a return for __builtin_eh_return,
10061 reg_containing_return_addr must specify the return regno. */
10062 if (eh_ofs && reg_containing_return_addr == -1)
10063 abort ();
10065 regs_to_pop |= 1 << LR_REGNUM;
10066 ++pops_needed;
10069 if (TARGET_BACKTRACE)
10071 /* Restore the (ARM) frame pointer and stack pointer. */
10072 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10073 pops_needed += 2;
10076 /* If there is nothing to pop then just emit the BX instruction and
10077 return. */
10078 if (pops_needed == 0)
10080 if (eh_ofs)
10081 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10083 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10084 return;
10086 /* Otherwise if we are not supporting interworking and we have not created
10087 a backtrace structure and the function was not entered in ARM mode then
10088 just pop the return address straight into the PC. */
10089 else if (!TARGET_INTERWORK
10090 && !TARGET_BACKTRACE
10091 && !is_called_in_ARM_mode (current_function_decl))
10093 if (eh_ofs)
10095 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10096 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10097 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10099 else
10100 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10102 return;
10105 /* Find out how many of the (return) argument registers we can corrupt. */
10106 regs_available_for_popping = 0;
10108 /* If returning via __builtin_eh_return, the bottom three registers
10109 all contain information needed for the return. */
10110 if (eh_ofs)
10111 size = 12;
10112 else
10114 #ifdef RTX_CODE
10115 /* If we can deduce the registers used from the function's
10116 return value. This is more reliable that examining
10117 regs_ever_live[] because that will be set if the register is
10118 ever used in the function, not just if the register is used
10119 to hold a return value. */
10121 if (current_function_return_rtx != 0)
10122 mode = GET_MODE (current_function_return_rtx);
10123 else
10124 #endif
10125 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10127 size = GET_MODE_SIZE (mode);
10129 if (size == 0)
10131 /* In a void function we can use any argument register.
10132 In a function that returns a structure on the stack
10133 we can use the second and third argument registers. */
10134 if (mode == VOIDmode)
10135 regs_available_for_popping =
10136 (1 << ARG_REGISTER (1))
10137 | (1 << ARG_REGISTER (2))
10138 | (1 << ARG_REGISTER (3));
10139 else
10140 regs_available_for_popping =
10141 (1 << ARG_REGISTER (2))
10142 | (1 << ARG_REGISTER (3));
10144 else if (size <= 4)
10145 regs_available_for_popping =
10146 (1 << ARG_REGISTER (2))
10147 | (1 << ARG_REGISTER (3));
10148 else if (size <= 8)
10149 regs_available_for_popping =
10150 (1 << ARG_REGISTER (3));
10153 /* Match registers to be popped with registers into which we pop them. */
10154 for (available = regs_available_for_popping,
10155 required = regs_to_pop;
10156 required != 0 && available != 0;
10157 available &= ~(available & - available),
10158 required &= ~(required & - required))
10159 -- pops_needed;
10161 /* If we have any popping registers left over, remove them. */
10162 if (available > 0)
10163 regs_available_for_popping &= ~available;
10165 /* Otherwise if we need another popping register we can use
10166 the fourth argument register. */
10167 else if (pops_needed)
10169 /* If we have not found any free argument registers and
10170 reg a4 contains the return address, we must move it. */
10171 if (regs_available_for_popping == 0
10172 && reg_containing_return_addr == LAST_ARG_REGNUM)
10174 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10175 reg_containing_return_addr = LR_REGNUM;
10177 else if (size > 12)
10179 /* Register a4 is being used to hold part of the return value,
10180 but we have dire need of a free, low register. */
10181 restore_a4 = TRUE;
10183 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10186 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10188 /* The fourth argument register is available. */
10189 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10191 --pops_needed;
10195 /* Pop as many registers as we can. */
10196 thumb_pushpop (f, regs_available_for_popping, FALSE);
10198 /* Process the registers we popped. */
10199 if (reg_containing_return_addr == -1)
10201 /* The return address was popped into the lowest numbered register. */
10202 regs_to_pop &= ~(1 << LR_REGNUM);
10204 reg_containing_return_addr =
10205 number_of_first_bit_set (regs_available_for_popping);
10207 /* Remove this register for the mask of available registers, so that
10208 the return address will not be corrupted by further pops. */
10209 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10212 /* If we popped other registers then handle them here. */
10213 if (regs_available_for_popping)
10215 int frame_pointer;
10217 /* Work out which register currently contains the frame pointer. */
10218 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10220 /* Move it into the correct place. */
10221 asm_fprintf (f, "\tmov\t%r, %r\n",
10222 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10224 /* (Temporarily) remove it from the mask of popped registers. */
10225 regs_available_for_popping &= ~(1 << frame_pointer);
10226 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10228 if (regs_available_for_popping)
10230 int stack_pointer;
10232 /* We popped the stack pointer as well,
10233 find the register that contains it. */
10234 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10236 /* Move it into the stack register. */
10237 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10239 /* At this point we have popped all necessary registers, so
10240 do not worry about restoring regs_available_for_popping
10241 to its correct value:
10243 assert (pops_needed == 0)
10244 assert (regs_available_for_popping == (1 << frame_pointer))
10245 assert (regs_to_pop == (1 << STACK_POINTER)) */
10247 else
10249 /* Since we have just move the popped value into the frame
10250 pointer, the popping register is available for reuse, and
10251 we know that we still have the stack pointer left to pop. */
10252 regs_available_for_popping |= (1 << frame_pointer);
10256 /* If we still have registers left on the stack, but we no longer have
10257 any registers into which we can pop them, then we must move the return
10258 address into the link register and make available the register that
10259 contained it. */
10260 if (regs_available_for_popping == 0 && pops_needed > 0)
10262 regs_available_for_popping |= 1 << reg_containing_return_addr;
10264 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10265 reg_containing_return_addr);
10267 reg_containing_return_addr = LR_REGNUM;
10270 /* If we have registers left on the stack then pop some more.
10271 We know that at most we will want to pop FP and SP. */
10272 if (pops_needed > 0)
10274 int popped_into;
10275 int move_to;
10277 thumb_pushpop (f, regs_available_for_popping, FALSE);
10279 /* We have popped either FP or SP.
10280 Move whichever one it is into the correct register. */
10281 popped_into = number_of_first_bit_set (regs_available_for_popping);
10282 move_to = number_of_first_bit_set (regs_to_pop);
10284 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10286 regs_to_pop &= ~(1 << move_to);
10288 --pops_needed;
10291 /* If we still have not popped everything then we must have only
10292 had one register available to us and we are now popping the SP. */
10293 if (pops_needed > 0)
10295 int popped_into;
10297 thumb_pushpop (f, regs_available_for_popping, FALSE);
10299 popped_into = number_of_first_bit_set (regs_available_for_popping);
10301 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10303 assert (regs_to_pop == (1 << STACK_POINTER))
10304 assert (pops_needed == 1)
10308 /* If necessary restore the a4 register. */
10309 if (restore_a4)
10311 if (reg_containing_return_addr != LR_REGNUM)
10313 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10314 reg_containing_return_addr = LR_REGNUM;
10317 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10320 if (eh_ofs)
10321 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10323 /* Return to caller. */
10324 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10327 /* Emit code to push or pop registers to or from the stack. */
10329 static void
10330 thumb_pushpop (f, mask, push)
10331 FILE * f;
10332 int mask;
10333 int push;
10335 int regno;
10336 int lo_mask = mask & 0xFF;
10338 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10340 /* Special case. Do not generate a POP PC statement here, do it in
10341 thumb_exit() */
10342 thumb_exit (f, -1, NULL_RTX);
10343 return;
10346 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10348 /* Look at the low registers first. */
10349 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10351 if (lo_mask & 1)
10353 asm_fprintf (f, "%r", regno);
10355 if ((lo_mask & ~1) != 0)
10356 fprintf (f, ", ");
10360 if (push && (mask & (1 << LR_REGNUM)))
10362 /* Catch pushing the LR. */
10363 if (mask & 0xFF)
10364 fprintf (f, ", ");
10366 asm_fprintf (f, "%r", LR_REGNUM);
10368 else if (!push && (mask & (1 << PC_REGNUM)))
10370 /* Catch popping the PC. */
10371 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10373 /* The PC is never poped directly, instead
10374 it is popped into r3 and then BX is used. */
10375 fprintf (f, "}\n");
10377 thumb_exit (f, -1, NULL_RTX);
10379 return;
10381 else
10383 if (mask & 0xFF)
10384 fprintf (f, ", ");
10386 asm_fprintf (f, "%r", PC_REGNUM);
10390 fprintf (f, "}\n");
10393 void
10394 thumb_final_prescan_insn (insn)
10395 rtx insn;
10397 if (flag_print_asm_name)
10398 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10399 INSN_ADDRESSES (INSN_UID (insn)));
10403 thumb_shiftable_const (val)
10404 unsigned HOST_WIDE_INT val;
10406 unsigned HOST_WIDE_INT mask = 0xff;
10407 int i;
10409 if (val == 0) /* XXX */
10410 return 0;
10412 for (i = 0; i < 25; i++)
10413 if ((val & (mask << i)) == val)
10414 return 1;
10416 return 0;
10419 /* Returns nonzero if the current function contains,
10420 or might contain a far jump. */
10423 thumb_far_jump_used_p (in_prologue)
10424 int in_prologue;
10426 rtx insn;
10428 /* This test is only important for leaf functions. */
10429 /* assert (!leaf_function_p ()); */
10431 /* If we have already decided that far jumps may be used,
10432 do not bother checking again, and always return true even if
10433 it turns out that they are not being used. Once we have made
10434 the decision that far jumps are present (and that hence the link
10435 register will be pushed onto the stack) we cannot go back on it. */
10436 if (cfun->machine->far_jump_used)
10437 return 1;
10439 /* If this function is not being called from the prologue/epilogue
10440 generation code then it must be being called from the
10441 INITIAL_ELIMINATION_OFFSET macro. */
10442 if (!in_prologue)
10444 /* In this case we know that we are being asked about the elimination
10445 of the arg pointer register. If that register is not being used,
10446 then there are no arguments on the stack, and we do not have to
10447 worry that a far jump might force the prologue to push the link
10448 register, changing the stack offsets. In this case we can just
10449 return false, since the presence of far jumps in the function will
10450 not affect stack offsets.
10452 If the arg pointer is live (or if it was live, but has now been
10453 eliminated and so set to dead) then we do have to test to see if
10454 the function might contain a far jump. This test can lead to some
10455 false negatives, since before reload is completed, then length of
10456 branch instructions is not known, so gcc defaults to returning their
10457 longest length, which in turn sets the far jump attribute to true.
10459 A false negative will not result in bad code being generated, but it
10460 will result in a needless push and pop of the link register. We
10461 hope that this does not occur too often. */
10462 if (regs_ever_live [ARG_POINTER_REGNUM])
10463 cfun->machine->arg_pointer_live = 1;
10464 else if (!cfun->machine->arg_pointer_live)
10465 return 0;
10468 /* Check to see if the function contains a branch
10469 insn with the far jump attribute set. */
10470 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10472 if (GET_CODE (insn) == JUMP_INSN
10473 /* Ignore tablejump patterns. */
10474 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10475 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10476 && get_attr_far_jump (insn) == FAR_JUMP_YES
10479 /* Record the fact that we have decied that
10480 the function does use far jumps. */
10481 cfun->machine->far_jump_used = 1;
10482 return 1;
10486 return 0;
10489 /* Return nonzero if FUNC must be entered in ARM mode. */
10492 is_called_in_ARM_mode (func)
10493 tree func;
10495 if (TREE_CODE (func) != FUNCTION_DECL)
10496 abort ();
10498 /* Ignore the problem about functions whoes address is taken. */
10499 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10500 return TRUE;
10502 #ifdef ARM_PE
10503 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10504 #else
10505 return FALSE;
10506 #endif
10509 /* The bits which aren't usefully expanded as rtl. */
10511 const char *
10512 thumb_unexpanded_epilogue ()
10514 int regno;
10515 int live_regs_mask = 0;
10516 int high_regs_pushed = 0;
10517 int leaf_function = leaf_function_p ();
10518 int had_to_push_lr;
10519 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10521 if (return_used_this_function)
10522 return "";
10524 if (IS_NAKED (arm_current_func_type ()))
10525 return "";
10527 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10528 if (THUMB_REG_PUSHED_P (regno))
10529 live_regs_mask |= 1 << regno;
10531 for (regno = 8; regno < 13; regno++)
10532 if (THUMB_REG_PUSHED_P (regno))
10533 high_regs_pushed++;
10535 /* The prolog may have pushed some high registers to use as
10536 work registers. eg the testuite file:
10537 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10538 compiles to produce:
10539 push {r4, r5, r6, r7, lr}
10540 mov r7, r9
10541 mov r6, r8
10542 push {r6, r7}
10543 as part of the prolog. We have to undo that pushing here. */
10545 if (high_regs_pushed)
10547 int mask = live_regs_mask;
10548 int next_hi_reg;
10549 int size;
10550 int mode;
10552 #ifdef RTX_CODE
10553 /* If we can deduce the registers used from the function's return value.
10554 This is more reliable that examining regs_ever_live[] because that
10555 will be set if the register is ever used in the function, not just if
10556 the register is used to hold a return value. */
10558 if (current_function_return_rtx != 0)
10559 mode = GET_MODE (current_function_return_rtx);
10560 else
10561 #endif
10562 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10564 size = GET_MODE_SIZE (mode);
10566 /* Unless we are returning a type of size > 12 register r3 is
10567 available. */
10568 if (size < 13)
10569 mask |= 1 << 3;
10571 if (mask == 0)
10572 /* Oh dear! We have no low registers into which we can pop
10573 high registers! */
10574 internal_error
10575 ("no low registers available for popping high registers");
10577 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10578 if (THUMB_REG_PUSHED_P (next_hi_reg))
10579 break;
10581 while (high_regs_pushed)
10583 /* Find lo register(s) into which the high register(s) can
10584 be popped. */
10585 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10587 if (mask & (1 << regno))
10588 high_regs_pushed--;
10589 if (high_regs_pushed == 0)
10590 break;
10593 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10595 /* Pop the values into the low register(s). */
10596 thumb_pushpop (asm_out_file, mask, 0);
10598 /* Move the value(s) into the high registers. */
10599 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10601 if (mask & (1 << regno))
10603 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10604 regno);
10606 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10607 if (THUMB_REG_PUSHED_P (next_hi_reg))
10608 break;
10614 had_to_push_lr = (live_regs_mask || !leaf_function
10615 || thumb_far_jump_used_p (1));
10617 if (TARGET_BACKTRACE
10618 && ((live_regs_mask & 0xFF) == 0)
10619 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10621 /* The stack backtrace structure creation code had to
10622 push R7 in order to get a work register, so we pop
10623 it now. */
10624 live_regs_mask |= (1 << LAST_LO_REGNUM);
10627 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10629 if (had_to_push_lr
10630 && !is_called_in_ARM_mode (current_function_decl)
10631 && !eh_ofs)
10632 live_regs_mask |= 1 << PC_REGNUM;
10634 /* Either no argument registers were pushed or a backtrace
10635 structure was created which includes an adjusted stack
10636 pointer, so just pop everything. */
10637 if (live_regs_mask)
10638 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10640 if (eh_ofs)
10641 thumb_exit (asm_out_file, 2, eh_ofs);
10642 /* We have either just popped the return address into the
10643 PC or it is was kept in LR for the entire function or
10644 it is still on the stack because we do not want to
10645 return by doing a pop {pc}. */
10646 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10647 thumb_exit (asm_out_file,
10648 (had_to_push_lr
10649 && is_called_in_ARM_mode (current_function_decl)) ?
10650 -1 : LR_REGNUM, NULL_RTX);
10652 else
10654 /* Pop everything but the return address. */
10655 live_regs_mask &= ~(1 << PC_REGNUM);
10657 if (live_regs_mask)
10658 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10660 if (had_to_push_lr)
10661 /* Get the return address into a temporary register. */
10662 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10664 /* Remove the argument registers that were pushed onto the stack. */
10665 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10666 SP_REGNUM, SP_REGNUM,
10667 current_function_pretend_args_size);
10669 if (eh_ofs)
10670 thumb_exit (asm_out_file, 2, eh_ofs);
10671 else
10672 thumb_exit (asm_out_file,
10673 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10676 return "";
10679 /* Functions to save and restore machine-specific function data. */
10681 static struct machine_function *
10682 arm_init_machine_status ()
10684 struct machine_function *machine;
10685 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10687 #if ARM_FT_UNKNOWN != 0
10688 machine->func_type = ARM_FT_UNKNOWN;
10689 #endif
10690 return machine;
10693 /* Return an RTX indicating where the return address to the
10694 calling function can be found. */
10697 arm_return_addr (count, frame)
10698 int count;
10699 rtx frame ATTRIBUTE_UNUSED;
10701 if (count != 0)
10702 return NULL_RTX;
10704 if (TARGET_APCS_32)
10705 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10706 else
10708 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10709 GEN_INT (RETURN_ADDR_MASK26));
10710 return get_func_hard_reg_initial_val (cfun, lr);
10714 /* Do anything needed before RTL is emitted for each function. */
10716 void
10717 arm_init_expanders ()
10719 /* Arrange to initialize and mark the machine per-function status. */
10720 init_machine_status = arm_init_machine_status;
10723 HOST_WIDE_INT
10724 thumb_get_frame_size ()
10726 int regno;
10728 int base_size = ROUND_UP_WORD (get_frame_size ());
10729 int count_regs = 0;
10730 int entry_size = 0;
10731 int leaf;
10733 if (! TARGET_THUMB)
10734 abort ();
10736 if (! TARGET_ATPCS)
10737 return base_size;
10739 /* We need to know if we are a leaf function. Unfortunately, it
10740 is possible to be called after start_sequence has been called,
10741 which causes get_insns to return the insns for the sequence,
10742 not the function, which will cause leaf_function_p to return
10743 the incorrect result.
10745 To work around this, we cache the computed frame size. This
10746 works because we will only be calling RTL expanders that need
10747 to know about leaf functions once reload has completed, and the
10748 frame size cannot be changed after that time, so we can safely
10749 use the cached value. */
10751 if (reload_completed)
10752 return cfun->machine->frame_size;
10754 leaf = leaf_function_p ();
10756 /* A leaf function does not need any stack alignment if it has nothing
10757 on the stack. */
10758 if (leaf && base_size == 0)
10760 cfun->machine->frame_size = 0;
10761 return 0;
10764 /* We know that SP will be word aligned on entry, and we must
10765 preserve that condition at any subroutine call. But those are
10766 the only constraints. */
10768 /* Space for variadic functions. */
10769 if (current_function_pretend_args_size)
10770 entry_size += current_function_pretend_args_size;
10772 /* Space for pushed lo registers. */
10773 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10774 if (THUMB_REG_PUSHED_P (regno))
10775 count_regs++;
10777 /* Space for backtrace structure. */
10778 if (TARGET_BACKTRACE)
10780 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10781 entry_size += 20;
10782 else
10783 entry_size += 16;
10786 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10787 count_regs++; /* LR */
10789 entry_size += count_regs * 4;
10790 count_regs = 0;
10792 /* Space for pushed hi regs. */
10793 for (regno = 8; regno < 13; regno++)
10794 if (THUMB_REG_PUSHED_P (regno))
10795 count_regs++;
10797 entry_size += count_regs * 4;
10799 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10800 base_size += 4;
10801 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10802 abort ();
10804 cfun->machine->frame_size = base_size;
10806 return base_size;
10809 /* Generate the rest of a function's prologue. */
10811 void
10812 thumb_expand_prologue ()
10814 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10815 + current_function_outgoing_args_size);
10816 unsigned long func_type;
10818 func_type = arm_current_func_type ();
10820 /* Naked functions don't have prologues. */
10821 if (IS_NAKED (func_type))
10822 return;
10824 if (IS_INTERRUPT (func_type))
10826 error ("interrupt Service Routines cannot be coded in Thumb mode");
10827 return;
10830 if (frame_pointer_needed)
10831 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10833 if (amount)
10835 amount = ROUND_UP_WORD (amount);
10837 if (amount < 512)
10838 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10839 GEN_INT (- amount)));
10840 else
10842 int regno;
10843 rtx reg;
10845 /* The stack decrement is too big for an immediate value in a single
10846 insn. In theory we could issue multiple subtracts, but after
10847 three of them it becomes more space efficient to place the full
10848 value in the constant pool and load into a register. (Also the
10849 ARM debugger really likes to see only one stack decrement per
10850 function). So instead we look for a scratch register into which
10851 we can load the decrement, and then we subtract this from the
10852 stack pointer. Unfortunately on the thumb the only available
10853 scratch registers are the argument registers, and we cannot use
10854 these as they may hold arguments to the function. Instead we
10855 attempt to locate a call preserved register which is used by this
10856 function. If we can find one, then we know that it will have
10857 been pushed at the start of the prologue and so we can corrupt
10858 it now. */
10859 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10860 if (THUMB_REG_PUSHED_P (regno)
10861 && !(frame_pointer_needed
10862 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10863 break;
10865 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10867 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10869 /* Choose an arbitrary, non-argument low register. */
10870 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10872 /* Save it by copying it into a high, scratch register. */
10873 emit_insn (gen_movsi (spare, reg));
10874 /* Add a USE to stop propagate_one_insn() from barfing. */
10875 emit_insn (gen_prologue_use (spare));
10877 /* Decrement the stack. */
10878 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10879 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10880 reg));
10882 /* Restore the low register's original value. */
10883 emit_insn (gen_movsi (reg, spare));
10885 /* Emit a USE of the restored scratch register, so that flow
10886 analysis will not consider the restore redundant. The
10887 register won't be used again in this function and isn't
10888 restored by the epilogue. */
10889 emit_insn (gen_prologue_use (reg));
10891 else
10893 reg = gen_rtx (REG, SImode, regno);
10895 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10896 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10897 reg));
10902 if (current_function_profile || TARGET_NO_SCHED_PRO)
10903 emit_insn (gen_blockage ());
10906 void
10907 thumb_expand_epilogue ()
10909 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10910 + current_function_outgoing_args_size);
10912 /* Naked functions don't have prologues. */
10913 if (IS_NAKED (arm_current_func_type ()))
10914 return;
10916 if (frame_pointer_needed)
10917 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10918 else if (amount)
10920 amount = ROUND_UP_WORD (amount);
10922 if (amount < 512)
10923 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10924 GEN_INT (amount)));
10925 else
10927 /* r3 is always free in the epilogue. */
10928 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10930 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10931 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10935 /* Emit a USE (stack_pointer_rtx), so that
10936 the stack adjustment will not be deleted. */
10937 emit_insn (gen_prologue_use (stack_pointer_rtx));
10939 if (current_function_profile || TARGET_NO_SCHED_PRO)
10940 emit_insn (gen_blockage ());
10943 static void
10944 thumb_output_function_prologue (f, size)
10945 FILE * f;
10946 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10948 int live_regs_mask = 0;
10949 int high_regs_pushed = 0;
10950 int regno;
10952 if (IS_NAKED (arm_current_func_type ()))
10953 return;
10955 if (is_called_in_ARM_mode (current_function_decl))
10957 const char * name;
10959 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10960 abort ();
10961 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10962 abort ();
10963 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10965 /* Generate code sequence to switch us into Thumb mode. */
10966 /* The .code 32 directive has already been emitted by
10967 ASM_DECLARE_FUNCTION_NAME. */
10968 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10969 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10971 /* Generate a label, so that the debugger will notice the
10972 change in instruction sets. This label is also used by
10973 the assembler to bypass the ARM code when this function
10974 is called from a Thumb encoded function elsewhere in the
10975 same file. Hence the definition of STUB_NAME here must
10976 agree with the definition in gas/config/tc-arm.c */
10978 #define STUB_NAME ".real_start_of"
10980 fprintf (f, "\t.code\t16\n");
10981 #ifdef ARM_PE
10982 if (arm_dllexport_name_p (name))
10983 name = arm_strip_name_encoding (name);
10984 #endif
10985 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10986 fprintf (f, "\t.thumb_func\n");
10987 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10990 if (current_function_pretend_args_size)
10992 if (cfun->machine->uses_anonymous_args)
10994 int num_pushes;
10996 fprintf (f, "\tpush\t{");
10998 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
11000 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
11001 regno <= LAST_ARG_REGNUM;
11002 regno++)
11003 asm_fprintf (f, "%r%s", regno,
11004 regno == LAST_ARG_REGNUM ? "" : ", ");
11006 fprintf (f, "}\n");
11008 else
11009 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
11010 SP_REGNUM, SP_REGNUM,
11011 current_function_pretend_args_size);
11014 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11015 if (THUMB_REG_PUSHED_P (regno))
11016 live_regs_mask |= 1 << regno;
11018 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
11019 live_regs_mask |= 1 << LR_REGNUM;
11021 if (TARGET_BACKTRACE)
11023 int offset;
11024 int work_register = 0;
11025 int wr;
11027 /* We have been asked to create a stack backtrace structure.
11028 The code looks like this:
11030 0 .align 2
11031 0 func:
11032 0 sub SP, #16 Reserve space for 4 registers.
11033 2 push {R7} Get a work register.
11034 4 add R7, SP, #20 Get the stack pointer before the push.
11035 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
11036 8 mov R7, PC Get hold of the start of this code plus 12.
11037 10 str R7, [SP, #16] Store it.
11038 12 mov R7, FP Get hold of the current frame pointer.
11039 14 str R7, [SP, #4] Store it.
11040 16 mov R7, LR Get hold of the current return address.
11041 18 str R7, [SP, #12] Store it.
11042 20 add R7, SP, #16 Point at the start of the backtrace structure.
11043 22 mov FP, R7 Put this value into the frame pointer. */
11045 if ((live_regs_mask & 0xFF) == 0)
11047 /* See if the a4 register is free. */
11049 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11050 work_register = LAST_ARG_REGNUM;
11051 else /* We must push a register of our own */
11052 live_regs_mask |= (1 << LAST_LO_REGNUM);
11055 if (work_register == 0)
11057 /* Select a register from the list that will be pushed to
11058 use as our work register. */
11059 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11060 if ((1 << work_register) & live_regs_mask)
11061 break;
11064 asm_fprintf
11065 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11066 SP_REGNUM, SP_REGNUM);
11068 if (live_regs_mask)
11069 thumb_pushpop (f, live_regs_mask, 1);
11071 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11072 if (wr & live_regs_mask)
11073 offset += 4;
11075 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11076 offset + 16 + current_function_pretend_args_size);
11078 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11079 offset + 4);
11081 /* Make sure that the instruction fetching the PC is in the right place
11082 to calculate "start of backtrace creation code + 12". */
11083 if (live_regs_mask)
11085 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11086 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11087 offset + 12);
11088 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11089 ARM_HARD_FRAME_POINTER_REGNUM);
11090 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11091 offset);
11093 else
11095 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11096 ARM_HARD_FRAME_POINTER_REGNUM);
11097 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11098 offset);
11099 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11100 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11101 offset + 12);
11104 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11105 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11106 offset + 8);
11107 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11108 offset + 12);
11109 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11110 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11112 else if (live_regs_mask)
11113 thumb_pushpop (f, live_regs_mask, 1);
11115 for (regno = 8; regno < 13; regno++)
11116 if (THUMB_REG_PUSHED_P (regno))
11117 high_regs_pushed++;
11119 if (high_regs_pushed)
11121 int pushable_regs = 0;
11122 int mask = live_regs_mask & 0xff;
11123 int next_hi_reg;
11125 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11126 if (THUMB_REG_PUSHED_P (next_hi_reg))
11127 break;
11129 pushable_regs = mask;
11131 if (pushable_regs == 0)
11133 /* Desperation time -- this probably will never happen. */
11134 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11135 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11136 mask = 1 << LAST_ARG_REGNUM;
11139 while (high_regs_pushed > 0)
11141 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11143 if (mask & (1 << regno))
11145 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11147 high_regs_pushed--;
11149 if (high_regs_pushed)
11151 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11152 next_hi_reg--)
11153 if (THUMB_REG_PUSHED_P (next_hi_reg))
11154 break;
11156 else
11158 mask &= ~((1 << regno) - 1);
11159 break;
11164 thumb_pushpop (f, mask, 1);
11167 if (pushable_regs == 0
11168 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11169 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11173 /* Handle the case of a double word load into a low register from
11174 a computed memory address. The computed address may involve a
11175 register which is overwritten by the load. */
11177 const char *
11178 thumb_load_double_from_address (operands)
11179 rtx *operands;
11181 rtx addr;
11182 rtx base;
11183 rtx offset;
11184 rtx arg1;
11185 rtx arg2;
11187 if (GET_CODE (operands[0]) != REG)
11188 abort ();
11190 if (GET_CODE (operands[1]) != MEM)
11191 abort ();
11193 /* Get the memory address. */
11194 addr = XEXP (operands[1], 0);
11196 /* Work out how the memory address is computed. */
11197 switch (GET_CODE (addr))
11199 case REG:
11200 operands[2] = gen_rtx (MEM, SImode,
11201 plus_constant (XEXP (operands[1], 0), 4));
11203 if (REGNO (operands[0]) == REGNO (addr))
11205 output_asm_insn ("ldr\t%H0, %2", operands);
11206 output_asm_insn ("ldr\t%0, %1", operands);
11208 else
11210 output_asm_insn ("ldr\t%0, %1", operands);
11211 output_asm_insn ("ldr\t%H0, %2", operands);
11213 break;
11215 case CONST:
11216 /* Compute <address> + 4 for the high order load. */
11217 operands[2] = gen_rtx (MEM, SImode,
11218 plus_constant (XEXP (operands[1], 0), 4));
11220 output_asm_insn ("ldr\t%0, %1", operands);
11221 output_asm_insn ("ldr\t%H0, %2", operands);
11222 break;
11224 case PLUS:
11225 arg1 = XEXP (addr, 0);
11226 arg2 = XEXP (addr, 1);
11228 if (CONSTANT_P (arg1))
11229 base = arg2, offset = arg1;
11230 else
11231 base = arg1, offset = arg2;
11233 if (GET_CODE (base) != REG)
11234 abort ();
11236 /* Catch the case of <address> = <reg> + <reg> */
11237 if (GET_CODE (offset) == REG)
11239 int reg_offset = REGNO (offset);
11240 int reg_base = REGNO (base);
11241 int reg_dest = REGNO (operands[0]);
11243 /* Add the base and offset registers together into the
11244 higher destination register. */
11245 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11246 reg_dest + 1, reg_base, reg_offset);
11248 /* Load the lower destination register from the address in
11249 the higher destination register. */
11250 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11251 reg_dest, reg_dest + 1);
11253 /* Load the higher destination register from its own address
11254 plus 4. */
11255 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11256 reg_dest + 1, reg_dest + 1);
11258 else
11260 /* Compute <address> + 4 for the high order load. */
11261 operands[2] = gen_rtx (MEM, SImode,
11262 plus_constant (XEXP (operands[1], 0), 4));
11264 /* If the computed address is held in the low order register
11265 then load the high order register first, otherwise always
11266 load the low order register first. */
11267 if (REGNO (operands[0]) == REGNO (base))
11269 output_asm_insn ("ldr\t%H0, %2", operands);
11270 output_asm_insn ("ldr\t%0, %1", operands);
11272 else
11274 output_asm_insn ("ldr\t%0, %1", operands);
11275 output_asm_insn ("ldr\t%H0, %2", operands);
11278 break;
11280 case LABEL_REF:
11281 /* With no registers to worry about we can just load the value
11282 directly. */
11283 operands[2] = gen_rtx (MEM, SImode,
11284 plus_constant (XEXP (operands[1], 0), 4));
11286 output_asm_insn ("ldr\t%H0, %2", operands);
11287 output_asm_insn ("ldr\t%0, %1", operands);
11288 break;
11290 default:
11291 abort ();
11292 break;
11295 return "";
11299 const char *
11300 thumb_output_move_mem_multiple (n, operands)
11301 int n;
11302 rtx * operands;
11304 rtx tmp;
11306 switch (n)
11308 case 2:
11309 if (REGNO (operands[4]) > REGNO (operands[5]))
11311 tmp = operands[4];
11312 operands[4] = operands[5];
11313 operands[5] = tmp;
11315 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11316 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11317 break;
11319 case 3:
11320 if (REGNO (operands[4]) > REGNO (operands[5]))
11322 tmp = operands[4];
11323 operands[4] = operands[5];
11324 operands[5] = tmp;
11326 if (REGNO (operands[5]) > REGNO (operands[6]))
11328 tmp = operands[5];
11329 operands[5] = operands[6];
11330 operands[6] = tmp;
11332 if (REGNO (operands[4]) > REGNO (operands[5]))
11334 tmp = operands[4];
11335 operands[4] = operands[5];
11336 operands[5] = tmp;
11339 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11340 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11341 break;
11343 default:
11344 abort ();
11347 return "";
11350 /* Routines for generating rtl. */
11352 void
11353 thumb_expand_movstrqi (operands)
11354 rtx * operands;
11356 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11357 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11358 HOST_WIDE_INT len = INTVAL (operands[2]);
11359 HOST_WIDE_INT offset = 0;
11361 while (len >= 12)
11363 emit_insn (gen_movmem12b (out, in, out, in));
11364 len -= 12;
11367 if (len >= 8)
11369 emit_insn (gen_movmem8b (out, in, out, in));
11370 len -= 8;
11373 if (len >= 4)
11375 rtx reg = gen_reg_rtx (SImode);
11376 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11377 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11378 len -= 4;
11379 offset += 4;
11382 if (len >= 2)
11384 rtx reg = gen_reg_rtx (HImode);
11385 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11386 plus_constant (in, offset))));
11387 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11388 reg));
11389 len -= 2;
11390 offset += 2;
11393 if (len)
11395 rtx reg = gen_reg_rtx (QImode);
11396 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11397 plus_constant (in, offset))));
11398 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11399 reg));
11404 thumb_cmp_operand (op, mode)
11405 rtx op;
11406 enum machine_mode mode;
11408 return ((GET_CODE (op) == CONST_INT
11409 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11410 || register_operand (op, mode));
11413 static const char *
11414 thumb_condition_code (x, invert)
11415 rtx x;
11416 int invert;
11418 static const char * const conds[] =
11420 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11421 "hi", "ls", "ge", "lt", "gt", "le"
11423 int val;
11425 switch (GET_CODE (x))
11427 case EQ: val = 0; break;
11428 case NE: val = 1; break;
11429 case GEU: val = 2; break;
11430 case LTU: val = 3; break;
11431 case GTU: val = 8; break;
11432 case LEU: val = 9; break;
11433 case GE: val = 10; break;
11434 case LT: val = 11; break;
11435 case GT: val = 12; break;
11436 case LE: val = 13; break;
11437 default:
11438 abort ();
11441 return conds[val ^ invert];
11444 /* Handle storing a half-word to memory during reload. */
11446 void
11447 thumb_reload_out_hi (operands)
11448 rtx * operands;
11450 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11453 /* Handle storing a half-word to memory during reload. */
11455 void
11456 thumb_reload_in_hi (operands)
11457 rtx * operands ATTRIBUTE_UNUSED;
11459 abort ();
11462 /* Return the length of a function name prefix
11463 that starts with the character 'c'. */
11465 static int
11466 arm_get_strip_length (c)
11467 int c;
11469 switch (c)
11471 ARM_NAME_ENCODING_LENGTHS
11472 default: return 0;
11476 /* Return a pointer to a function's name with any
11477 and all prefix encodings stripped from it. */
11479 const char *
11480 arm_strip_name_encoding (name)
11481 const char * name;
11483 int skip;
11485 while ((skip = arm_get_strip_length (* name)))
11486 name += skip;
11488 return name;
11491 /* If there is a '*' anywhere in the name's prefix, then
11492 emit the stripped name verbatim, otherwise prepend an
11493 underscore if leading underscores are being used. */
11495 void
11496 arm_asm_output_labelref (stream, name)
11497 FILE * stream;
11498 const char * name;
11500 int skip;
11501 int verbatim = 0;
11503 while ((skip = arm_get_strip_length (* name)))
11505 verbatim |= (*name == '*');
11506 name += skip;
11509 if (verbatim)
11510 fputs (name, stream);
11511 else
11512 asm_fprintf (stream, "%U%s", name);
11515 rtx aof_pic_label;
11517 #ifdef AOF_ASSEMBLER
11518 /* Special functions only needed when producing AOF syntax assembler. */
11520 struct pic_chain
11522 struct pic_chain * next;
11523 const char * symname;
11526 static struct pic_chain * aof_pic_chain = NULL;
11529 aof_pic_entry (x)
11530 rtx x;
11532 struct pic_chain ** chainp;
11533 int offset;
11535 if (aof_pic_label == NULL_RTX)
11537 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11540 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11541 offset += 4, chainp = &(*chainp)->next)
11542 if ((*chainp)->symname == XSTR (x, 0))
11543 return plus_constant (aof_pic_label, offset);
11545 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11546 (*chainp)->next = NULL;
11547 (*chainp)->symname = XSTR (x, 0);
11548 return plus_constant (aof_pic_label, offset);
11551 void
11552 aof_dump_pic_table (f)
11553 FILE * f;
11555 struct pic_chain * chain;
11557 if (aof_pic_chain == NULL)
11558 return;
11560 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11561 PIC_OFFSET_TABLE_REGNUM,
11562 PIC_OFFSET_TABLE_REGNUM);
11563 fputs ("|x$adcons|\n", f);
11565 for (chain = aof_pic_chain; chain; chain = chain->next)
11567 fputs ("\tDCD\t", f);
11568 assemble_name (f, chain->symname);
11569 fputs ("\n", f);
11573 int arm_text_section_count = 1;
11575 char *
11576 aof_text_section ()
11578 static char buf[100];
11579 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11580 arm_text_section_count++);
11581 if (flag_pic)
11582 strcat (buf, ", PIC, REENTRANT");
11583 return buf;
11586 static int arm_data_section_count = 1;
11588 char *
11589 aof_data_section ()
11591 static char buf[100];
11592 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11593 return buf;
11596 /* The AOF assembler is religiously strict about declarations of
11597 imported and exported symbols, so that it is impossible to declare
11598 a function as imported near the beginning of the file, and then to
11599 export it later on. It is, however, possible to delay the decision
11600 until all the functions in the file have been compiled. To get
11601 around this, we maintain a list of the imports and exports, and
11602 delete from it any that are subsequently defined. At the end of
11603 compilation we spit the remainder of the list out before the END
11604 directive. */
11606 struct import
11608 struct import * next;
11609 const char * name;
11612 static struct import * imports_list = NULL;
11614 void
11615 aof_add_import (name)
11616 const char * name;
11618 struct import * new;
11620 for (new = imports_list; new; new = new->next)
11621 if (new->name == name)
11622 return;
11624 new = (struct import *) xmalloc (sizeof (struct import));
11625 new->next = imports_list;
11626 imports_list = new;
11627 new->name = name;
11630 void
11631 aof_delete_import (name)
11632 const char * name;
11634 struct import ** old;
11636 for (old = &imports_list; *old; old = & (*old)->next)
11638 if ((*old)->name == name)
11640 *old = (*old)->next;
11641 return;
11646 int arm_main_function = 0;
11648 void
11649 aof_dump_imports (f)
11650 FILE * f;
11652 /* The AOF assembler needs this to cause the startup code to be extracted
11653 from the library. Brining in __main causes the whole thing to work
11654 automagically. */
11655 if (arm_main_function)
11657 text_section ();
11658 fputs ("\tIMPORT __main\n", f);
11659 fputs ("\tDCD __main\n", f);
11662 /* Now dump the remaining imports. */
11663 while (imports_list)
11665 fprintf (f, "\tIMPORT\t");
11666 assemble_name (f, imports_list->name);
11667 fputc ('\n', f);
11668 imports_list = imports_list->next;
11672 static void
11673 aof_globalize_label (stream, name)
11674 FILE *stream;
11675 const char *name;
11677 default_globalize_label (stream, name);
11678 if (! strcmp (name, "main"))
11679 arm_main_function = 1;
11681 #endif /* AOF_ASSEMBLER */
11683 #ifdef OBJECT_FORMAT_ELF
11684 /* Switch to an arbitrary section NAME with attributes as specified
11685 by FLAGS. ALIGN specifies any known alignment requirements for
11686 the section; 0 if the default should be used.
11688 Differs from the default elf version only in the prefix character
11689 used before the section type. */
11691 static void
11692 arm_elf_asm_named_section (name, flags)
11693 const char *name;
11694 unsigned int flags;
11696 char flagchars[10], *f = flagchars;
11698 if (! named_section_first_declaration (name))
11700 fprintf (asm_out_file, "\t.section\t%s\n", name);
11701 return;
11704 if (!(flags & SECTION_DEBUG))
11705 *f++ = 'a';
11706 if (flags & SECTION_WRITE)
11707 *f++ = 'w';
11708 if (flags & SECTION_CODE)
11709 *f++ = 'x';
11710 if (flags & SECTION_SMALL)
11711 *f++ = 's';
11712 if (flags & SECTION_MERGE)
11713 *f++ = 'M';
11714 if (flags & SECTION_STRINGS)
11715 *f++ = 'S';
11716 if (flags & SECTION_TLS)
11717 *f++ = 'T';
11718 *f = '\0';
11720 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11722 if (!(flags & SECTION_NOTYPE))
11724 const char *type;
11726 if (flags & SECTION_BSS)
11727 type = "nobits";
11728 else
11729 type = "progbits";
11731 fprintf (asm_out_file, ",%%%s", type);
11733 if (flags & SECTION_ENTSIZE)
11734 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11737 putc ('\n', asm_out_file);
11739 #endif
11741 #ifndef ARM_PE
11742 /* Symbols in the text segment can be accessed without indirecting via the
11743 constant pool; it may take an extra binary operation, but this is still
11744 faster than indirecting via memory. Don't do this when not optimizing,
11745 since we won't be calculating al of the offsets necessary to do this
11746 simplification. */
11748 static void
11749 arm_encode_section_info (decl, first)
11750 tree decl;
11751 int first;
11753 /* This doesn't work with AOF syntax, since the string table may be in
11754 a different AREA. */
11755 #ifndef AOF_ASSEMBLER
11756 if (optimize > 0 && TREE_CONSTANT (decl)
11757 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11759 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11760 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11761 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11763 #endif
11765 /* If we are referencing a function that is weak then encode a long call
11766 flag in the function name, otherwise if the function is static or
11767 or known to be defined in this file then encode a short call flag. */
11768 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11770 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11771 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11772 else if (! TREE_PUBLIC (decl))
11773 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11776 #endif /* !ARM_PE */
11778 static void
11779 arm_internal_label (stream, prefix, labelno)
11780 FILE *stream;
11781 const char *prefix;
11782 unsigned long labelno;
11784 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11785 && !strcmp (prefix, "L"))
11787 arm_ccfsm_state = 0;
11788 arm_target_insn = NULL;
11790 default_internal_label (stream, prefix, labelno);
11793 /* Output code to add DELTA to the first argument, and then jump
11794 to FUNCTION. Used for C++ multiple inheritance. */
11796 static void
11797 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
11798 FILE *file;
11799 tree thunk ATTRIBUTE_UNUSED;
11800 HOST_WIDE_INT delta;
11801 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11802 tree function;
11804 int mi_delta = delta;
11805 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11806 int shift = 0;
11807 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11808 ? 1 : 0);
11809 if (mi_delta < 0)
11810 mi_delta = - mi_delta;
11811 while (mi_delta != 0)
11813 if ((mi_delta & (3 << shift)) == 0)
11814 shift += 2;
11815 else
11817 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11818 mi_op, this_regno, this_regno,
11819 mi_delta & (0xff << shift));
11820 mi_delta &= ~(0xff << shift);
11821 shift += 8;
11824 fputs ("\tb\t", file);
11825 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11826 if (NEED_PLT_RELOC)
11827 fputs ("(PLT)", file);
11828 fputc ('\n', file);