* arm.c (arm_compute_initial_elimination_offset): If optimizing for
[official-gcc.git] / gcc / config / arm / arm.c
blob3a8d993af461ca00aefc4b7d93c4e701fd545327
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const struct attribute_spec arm_attribute_table[];
67 /* Forward function declarations. */
68 static void arm_add_gc_roots PARAMS ((void));
69 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
70 static unsigned bit_count PARAMS ((Ulong));
71 static int arm_address_register_rtx_p PARAMS ((rtx, int));
72 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
73 rtx, int));
74 static int thumb_base_register_rtx_p PARAMS ((rtx,
75 enum machine_mode,
76 int));
77 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
78 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
79 static int eliminate_lr2ip PARAMS ((rtx *));
80 static rtx emit_multi_reg_push PARAMS ((int));
81 static rtx emit_sfm PARAMS ((int, int));
82 #ifndef AOF_ASSEMBLER
83 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
84 #endif
85 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
86 static arm_cc get_arm_condition_code PARAMS ((rtx));
87 static void init_fpa_table PARAMS ((void));
88 static Hint int_log2 PARAMS ((Hint));
89 static rtx is_jump_table PARAMS ((rtx));
90 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
91 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
92 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
93 static Ccstar shift_op PARAMS ((rtx, Hint *));
94 static struct machine_function * arm_init_machine_status PARAMS ((void));
95 static int number_of_first_bit_set PARAMS ((int));
96 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
97 static void thumb_exit PARAMS ((FILE *, int, rtx));
98 static void thumb_pushpop PARAMS ((FILE *, int, int));
99 static Ccstar thumb_condition_code PARAMS ((rtx, int));
100 static rtx is_jump_table PARAMS ((rtx));
101 static Hint get_jump_table_size PARAMS ((rtx));
102 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
103 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
104 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
105 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
106 static void assign_minipool_offsets PARAMS ((Mfix *));
107 static void arm_print_value PARAMS ((FILE *, rtx));
108 static void dump_minipool PARAMS ((rtx));
109 static int arm_barrier_cost PARAMS ((rtx));
110 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
111 static void push_minipool_barrier PARAMS ((rtx, Hint));
112 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
113 static void note_invalid_constants PARAMS ((rtx, Hint));
114 static int current_file_function_operand PARAMS ((rtx));
115 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
116 static Ulong arm_compute_save_reg_mask PARAMS ((void));
117 static Ulong arm_isr_value PARAMS ((tree));
118 static Ulong arm_compute_func_type PARAMS ((void));
119 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
120 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
121 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
122 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
123 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
124 static int arm_comp_type_attributes PARAMS ((tree, tree));
125 static void arm_set_default_type_attributes PARAMS ((tree));
126 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
127 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
128 static int arm_get_strip_length PARAMS ((int));
129 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
130 #ifdef OBJECT_FORMAT_ELF
131 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
132 #endif
133 #ifndef ARM_PE
134 static void arm_encode_section_info PARAMS ((tree, int));
135 #endif
136 #ifdef AOF_ASSEMBLER
137 static void aof_globalize_label PARAMS ((FILE *, const char *));
138 #endif
139 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
140 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
141 HOST_WIDE_INT,
142 HOST_WIDE_INT, tree));
143 static int arm_rtx_costs_1 PARAMS ((rtx, enum rtx_code,
144 enum rtx_code));
145 static bool arm_rtx_costs PARAMS ((rtx, int, int, int*));
146 static int arm_address_cost PARAMS ((rtx));
148 #undef Hint
149 #undef Mmode
150 #undef Ulong
151 #undef Ccstar
153 /* Initialize the GCC target structure. */
154 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
155 #undef TARGET_MERGE_DECL_ATTRIBUTES
156 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
157 #endif
159 #undef TARGET_ATTRIBUTE_TABLE
160 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
162 #ifdef AOF_ASSEMBLER
163 #undef TARGET_ASM_BYTE_OP
164 #define TARGET_ASM_BYTE_OP "\tDCB\t"
165 #undef TARGET_ASM_ALIGNED_HI_OP
166 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
167 #undef TARGET_ASM_ALIGNED_SI_OP
168 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
169 #undef TARGET_ASM_GLOBALIZE_LABEL
170 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
171 #else
172 #undef TARGET_ASM_ALIGNED_SI_OP
173 #define TARGET_ASM_ALIGNED_SI_OP NULL
174 #undef TARGET_ASM_INTEGER
175 #define TARGET_ASM_INTEGER arm_assemble_integer
176 #endif
178 #undef TARGET_ASM_FUNCTION_PROLOGUE
179 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
181 #undef TARGET_ASM_FUNCTION_EPILOGUE
182 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
184 #undef TARGET_COMP_TYPE_ATTRIBUTES
185 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
187 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
188 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
190 #undef TARGET_INIT_BUILTINS
191 #define TARGET_INIT_BUILTINS arm_init_builtins
193 #undef TARGET_EXPAND_BUILTIN
194 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
196 #undef TARGET_SCHED_ADJUST_COST
197 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
199 #undef TARGET_ENCODE_SECTION_INFO
200 #ifdef ARM_PE
201 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
202 #else
203 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
204 #endif
206 #undef TARGET_STRIP_NAME_ENCODING
207 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
209 #undef TARGET_ASM_INTERNAL_LABEL
210 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
212 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
213 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
215 #undef TARGET_ASM_OUTPUT_MI_THUNK
216 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
217 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
218 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS arm_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST arm_address_cost
225 struct gcc_target targetm = TARGET_INITIALIZER;
227 /* Obstack for minipool constant handling. */
228 static struct obstack minipool_obstack;
229 static char * minipool_startobj;
231 /* The maximum number of insns skipped which
232 will be conditionalised if possible. */
233 static int max_insns_skipped = 5;
235 extern FILE * asm_out_file;
237 /* True if we are currently building a constant table. */
238 int making_const_table;
240 /* Define the information needed to generate branch insns. This is
241 stored from the compare operation. */
242 rtx arm_compare_op0, arm_compare_op1;
244 /* What type of floating point are we tuning for? */
245 enum floating_point_type arm_fpu;
247 /* What type of floating point instructions are available? */
248 enum floating_point_type arm_fpu_arch;
250 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
251 enum prog_mode_type arm_prgmode;
253 /* Set by the -mfp=... option. */
254 const char * target_fp_name = NULL;
256 /* Used to parse -mstructure_size_boundary command line option. */
257 const char * structure_size_string = NULL;
258 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
260 /* Bit values used to identify processor capabilities. */
261 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
262 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
263 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
264 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
265 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
266 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
267 #define FL_THUMB (1 << 6) /* Thumb aware */
268 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
269 #define FL_STRONG (1 << 8) /* StrongARM */
270 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
271 #define FL_XSCALE (1 << 10) /* XScale */
273 /* The bits in this mask specify which
274 instructions we are allowed to generate. */
275 static unsigned long insn_flags = 0;
277 /* The bits in this mask specify which instruction scheduling options should
278 be used. Note - there is an overlap with the FL_FAST_MULT. For some
279 hardware we want to be able to generate the multiply instructions, but to
280 tune as if they were not present in the architecture. */
281 static unsigned long tune_flags = 0;
283 /* The following are used in the arm.md file as equivalents to bits
284 in the above two flag variables. */
286 /* Nonzero if this is an "M" variant of the processor. */
287 int arm_fast_multiply = 0;
289 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
290 int arm_arch4 = 0;
292 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
293 int arm_arch5 = 0;
295 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
296 int arm_arch5e = 0;
298 /* Nonzero if this chip can benefit from load scheduling. */
299 int arm_ld_sched = 0;
301 /* Nonzero if this chip is a StrongARM. */
302 int arm_is_strong = 0;
304 /* Nonzero if this chip is an XScale. */
305 int arm_is_xscale = 0;
307 /* Nonzero if this chip is an ARM6 or an ARM7. */
308 int arm_is_6_or_7 = 0;
310 /* Nonzero if generating Thumb instructions. */
311 int thumb_code = 0;
313 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
314 must report the mode of the memory reference from PRINT_OPERAND to
315 PRINT_OPERAND_ADDRESS. */
316 enum machine_mode output_memory_reference_mode;
318 /* The register number to be used for the PIC offset register. */
319 const char * arm_pic_register_string = NULL;
320 int arm_pic_register = INVALID_REGNUM;
322 /* Set to 1 when a return insn is output, this means that the epilogue
323 is not needed. */
324 int return_used_this_function;
326 /* Set to 1 after arm_reorg has started. Reset to start at the start of
327 the next function. */
328 static int after_arm_reorg = 0;
330 /* The maximum number of insns to be used when loading a constant. */
331 static int arm_constant_limit = 3;
333 /* For an explanation of these variables, see final_prescan_insn below. */
334 int arm_ccfsm_state;
335 enum arm_cond_code arm_current_cc;
336 rtx arm_target_insn;
337 int arm_target_label;
339 /* The condition codes of the ARM, and the inverse function. */
340 static const char * const arm_condition_codes[] =
342 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
343 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
346 #define streq(string1, string2) (strcmp (string1, string2) == 0)
348 /* Initialization code. */
350 struct processors
352 const char *const name;
353 const unsigned long flags;
356 /* Not all of these give usefully different compilation alternatives,
357 but there is no simple way of generalizing them. */
358 static const struct processors all_cores[] =
360 /* ARM Cores */
362 {"arm2", FL_CO_PROC | FL_MODE26 },
363 {"arm250", FL_CO_PROC | FL_MODE26 },
364 {"arm3", FL_CO_PROC | FL_MODE26 },
365 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
366 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
367 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
368 {"arm610", FL_MODE26 | FL_MODE32 },
369 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
370 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
371 /* arm7m doesn't exist on its own, but only with D, (and I), but
372 those don't alter the code, so arm7m is sometimes used. */
373 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
374 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
375 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
376 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
377 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
378 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
379 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
380 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
381 {"arm710", FL_MODE26 | FL_MODE32 },
382 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
383 {"arm720", FL_MODE26 | FL_MODE32 },
384 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
385 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
386 {"arm710c", FL_MODE26 | FL_MODE32 },
387 {"arm7100", FL_MODE26 | FL_MODE32 },
388 {"arm7500", FL_MODE26 | FL_MODE32 },
389 /* Doesn't have an external co-proc, but does have embedded fpu. */
390 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
391 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
392 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
393 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
394 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
395 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
396 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
397 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
398 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
399 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
400 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
401 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
402 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
403 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
404 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
405 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
406 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
408 {NULL, 0}
411 static const struct processors all_architectures[] =
413 /* ARM Architectures */
415 { "armv2", FL_CO_PROC | FL_MODE26 },
416 { "armv2a", FL_CO_PROC | FL_MODE26 },
417 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
418 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
419 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
420 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
421 implementations that support it, so we will leave it out for now. */
422 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
423 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
424 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
425 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
426 { NULL, 0 }
429 /* This is a magic stucture. The 'string' field is magically filled in
430 with a pointer to the value specified by the user on the command line
431 assuming that the user has specified such a value. */
433 struct arm_cpu_select arm_select[] =
435 /* string name processors */
436 { NULL, "-mcpu=", all_cores },
437 { NULL, "-march=", all_architectures },
438 { NULL, "-mtune=", all_cores }
441 /* Return the number of bits set in VALUE. */
442 static unsigned
443 bit_count (value)
444 unsigned long value;
446 unsigned long count = 0;
448 while (value)
450 count++;
451 value &= value - 1; /* Clear the least-significant set bit. */
454 return count;
457 /* Fix up any incompatible options that the user has specified.
458 This has now turned into a maze. */
459 void
460 arm_override_options ()
462 unsigned i;
464 /* Set up the flags based on the cpu/architecture selected by the user. */
465 for (i = ARRAY_SIZE (arm_select); i--;)
467 struct arm_cpu_select * ptr = arm_select + i;
469 if (ptr->string != NULL && ptr->string[0] != '\0')
471 const struct processors * sel;
473 for (sel = ptr->processors; sel->name != NULL; sel++)
474 if (streq (ptr->string, sel->name))
476 if (i == 2)
477 tune_flags = sel->flags;
478 else
480 /* If we have been given an architecture and a processor
481 make sure that they are compatible. We only generate
482 a warning though, and we prefer the CPU over the
483 architecture. */
484 if (insn_flags != 0 && (insn_flags ^ sel->flags))
485 warning ("switch -mcpu=%s conflicts with -march= switch",
486 ptr->string);
488 insn_flags = sel->flags;
491 break;
494 if (sel->name == NULL)
495 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
499 /* If the user did not specify a processor, choose one for them. */
500 if (insn_flags == 0)
502 const struct processors * sel;
503 unsigned int sought;
504 static const struct cpu_default
506 const int cpu;
507 const char *const name;
509 cpu_defaults[] =
511 { TARGET_CPU_arm2, "arm2" },
512 { TARGET_CPU_arm6, "arm6" },
513 { TARGET_CPU_arm610, "arm610" },
514 { TARGET_CPU_arm710, "arm710" },
515 { TARGET_CPU_arm7m, "arm7m" },
516 { TARGET_CPU_arm7500fe, "arm7500fe" },
517 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
518 { TARGET_CPU_arm8, "arm8" },
519 { TARGET_CPU_arm810, "arm810" },
520 { TARGET_CPU_arm9, "arm9" },
521 { TARGET_CPU_strongarm, "strongarm" },
522 { TARGET_CPU_xscale, "xscale" },
523 { TARGET_CPU_generic, "arm" },
524 { 0, 0 }
526 const struct cpu_default * def;
528 /* Find the default. */
529 for (def = cpu_defaults; def->name; def++)
530 if (def->cpu == TARGET_CPU_DEFAULT)
531 break;
533 /* Make sure we found the default CPU. */
534 if (def->name == NULL)
535 abort ();
537 /* Find the default CPU's flags. */
538 for (sel = all_cores; sel->name != NULL; sel++)
539 if (streq (def->name, sel->name))
540 break;
542 if (sel->name == NULL)
543 abort ();
545 insn_flags = sel->flags;
547 /* Now check to see if the user has specified some command line
548 switch that require certain abilities from the cpu. */
549 sought = 0;
551 if (TARGET_INTERWORK || TARGET_THUMB)
553 sought |= (FL_THUMB | FL_MODE32);
555 /* Force apcs-32 to be used for interworking. */
556 target_flags |= ARM_FLAG_APCS_32;
558 /* There are no ARM processors that support both APCS-26 and
559 interworking. Therefore we force FL_MODE26 to be removed
560 from insn_flags here (if it was set), so that the search
561 below will always be able to find a compatible processor. */
562 insn_flags &= ~FL_MODE26;
564 else if (!TARGET_APCS_32)
565 sought |= FL_MODE26;
567 if (sought != 0 && ((sought & insn_flags) != sought))
569 /* Try to locate a CPU type that supports all of the abilities
570 of the default CPU, plus the extra abilities requested by
571 the user. */
572 for (sel = all_cores; sel->name != NULL; sel++)
573 if ((sel->flags & sought) == (sought | insn_flags))
574 break;
576 if (sel->name == NULL)
578 unsigned current_bit_count = 0;
579 const struct processors * best_fit = NULL;
581 /* Ideally we would like to issue an error message here
582 saying that it was not possible to find a CPU compatible
583 with the default CPU, but which also supports the command
584 line options specified by the programmer, and so they
585 ought to use the -mcpu=<name> command line option to
586 override the default CPU type.
588 Unfortunately this does not work with multilibing. We
589 need to be able to support multilibs for -mapcs-26 and for
590 -mthumb-interwork and there is no CPU that can support both
591 options. Instead if we cannot find a cpu that has both the
592 characteristics of the default cpu and the given command line
593 options we scan the array again looking for a best match. */
594 for (sel = all_cores; sel->name != NULL; sel++)
595 if ((sel->flags & sought) == sought)
597 unsigned count;
599 count = bit_count (sel->flags & insn_flags);
601 if (count >= current_bit_count)
603 best_fit = sel;
604 current_bit_count = count;
608 if (best_fit == NULL)
609 abort ();
610 else
611 sel = best_fit;
614 insn_flags = sel->flags;
618 /* If tuning has not been specified, tune for whichever processor or
619 architecture has been selected. */
620 if (tune_flags == 0)
621 tune_flags = insn_flags;
623 /* Make sure that the processor choice does not conflict with any of the
624 other command line choices. */
625 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
627 /* If APCS-32 was not the default then it must have been set by the
628 user, so issue a warning message. If the user has specified
629 "-mapcs-32 -mcpu=arm2" then we loose here. */
630 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
631 warning ("target CPU does not support APCS-32" );
632 target_flags &= ~ARM_FLAG_APCS_32;
634 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
636 warning ("target CPU does not support APCS-26" );
637 target_flags |= ARM_FLAG_APCS_32;
640 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
642 warning ("target CPU does not support interworking" );
643 target_flags &= ~ARM_FLAG_INTERWORK;
646 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
648 warning ("target CPU does not support THUMB instructions");
649 target_flags &= ~ARM_FLAG_THUMB;
652 if (TARGET_APCS_FRAME && TARGET_THUMB)
654 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
655 target_flags &= ~ARM_FLAG_APCS_FRAME;
658 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
659 from here where no function is being compiled currently. */
660 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
661 && TARGET_ARM)
662 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
664 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
665 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
667 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
668 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
670 /* If interworking is enabled then APCS-32 must be selected as well. */
671 if (TARGET_INTERWORK)
673 if (!TARGET_APCS_32)
674 warning ("interworking forces APCS-32 to be used" );
675 target_flags |= ARM_FLAG_APCS_32;
678 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
680 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
681 target_flags |= ARM_FLAG_APCS_FRAME;
684 if (TARGET_POKE_FUNCTION_NAME)
685 target_flags |= ARM_FLAG_APCS_FRAME;
687 if (TARGET_APCS_REENT && flag_pic)
688 error ("-fpic and -mapcs-reent are incompatible");
690 if (TARGET_APCS_REENT)
691 warning ("APCS reentrant code not supported. Ignored");
693 /* If this target is normally configured to use APCS frames, warn if they
694 are turned off and debugging is turned on. */
695 if (TARGET_ARM
696 && write_symbols != NO_DEBUG
697 && !TARGET_APCS_FRAME
698 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
699 warning ("-g with -mno-apcs-frame may not give sensible debugging");
701 /* If stack checking is disabled, we can use r10 as the PIC register,
702 which keeps r9 available. */
703 if (flag_pic)
704 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
706 if (TARGET_APCS_FLOAT)
707 warning ("passing floating point arguments in fp regs not yet supported");
709 /* Initialize boolean versions of the flags, for use in the arm.md file. */
710 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
711 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
712 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
713 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
714 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
716 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
717 arm_is_strong = (tune_flags & FL_STRONG) != 0;
718 thumb_code = (TARGET_ARM == 0);
719 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
720 && !(tune_flags & FL_ARCH4))) != 0;
722 /* Default value for floating point code... if no co-processor
723 bus, then schedule for emulated floating point. Otherwise,
724 assume the user has an FPA.
725 Note: this does not prevent use of floating point instructions,
726 -msoft-float does that. */
727 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
729 if (target_fp_name)
731 if (streq (target_fp_name, "2"))
732 arm_fpu_arch = FP_SOFT2;
733 else if (streq (target_fp_name, "3"))
734 arm_fpu_arch = FP_SOFT3;
735 else
736 error ("invalid floating point emulation option: -mfpe-%s",
737 target_fp_name);
739 else
740 arm_fpu_arch = FP_DEFAULT;
742 if (TARGET_FPE && arm_fpu != FP_HARD)
743 arm_fpu = FP_SOFT2;
745 /* For arm2/3 there is no need to do any scheduling if there is only
746 a floating point emulator, or we are doing software floating-point. */
747 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
748 && (tune_flags & FL_MODE32) == 0)
749 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
751 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
753 if (structure_size_string != NULL)
755 int size = strtol (structure_size_string, NULL, 0);
757 if (size == 8 || size == 32)
758 arm_structure_size_boundary = size;
759 else
760 warning ("structure size boundary can only be set to 8 or 32");
763 if (arm_pic_register_string != NULL)
765 int pic_register = decode_reg_name (arm_pic_register_string);
767 if (!flag_pic)
768 warning ("-mpic-register= is useless without -fpic");
770 /* Prevent the user from choosing an obviously stupid PIC register. */
771 else if (pic_register < 0 || call_used_regs[pic_register]
772 || pic_register == HARD_FRAME_POINTER_REGNUM
773 || pic_register == STACK_POINTER_REGNUM
774 || pic_register >= PC_REGNUM)
775 error ("unable to use '%s' for PIC register", arm_pic_register_string);
776 else
777 arm_pic_register = pic_register;
780 if (TARGET_THUMB && flag_schedule_insns)
782 /* Don't warn since it's on by default in -O2. */
783 flag_schedule_insns = 0;
786 /* If optimizing for space, don't synthesize constants.
787 For processors with load scheduling, it never costs more than 2 cycles
788 to load a constant, and the load scheduler may well reduce that to 1. */
789 if (optimize_size || (tune_flags & FL_LDSCHED))
790 arm_constant_limit = 1;
792 if (arm_is_xscale)
793 arm_constant_limit = 2;
795 /* If optimizing for size, bump the number of instructions that we
796 are prepared to conditionally execute (even on a StrongARM).
797 Otherwise for the StrongARM, which has early execution of branches,
798 a sequence that is worth skipping is shorter. */
799 if (optimize_size)
800 max_insns_skipped = 6;
801 else if (arm_is_strong)
802 max_insns_skipped = 3;
804 /* Register global variables with the garbage collector. */
805 arm_add_gc_roots ();
808 static void
809 arm_add_gc_roots ()
811 gcc_obstack_init(&minipool_obstack);
812 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
815 /* A table of known ARM exception types.
816 For use with the interrupt function attribute. */
818 typedef struct
820 const char *const arg;
821 const unsigned long return_value;
823 isr_attribute_arg;
825 static const isr_attribute_arg isr_attribute_args [] =
827 { "IRQ", ARM_FT_ISR },
828 { "irq", ARM_FT_ISR },
829 { "FIQ", ARM_FT_FIQ },
830 { "fiq", ARM_FT_FIQ },
831 { "ABORT", ARM_FT_ISR },
832 { "abort", ARM_FT_ISR },
833 { "ABORT", ARM_FT_ISR },
834 { "abort", ARM_FT_ISR },
835 { "UNDEF", ARM_FT_EXCEPTION },
836 { "undef", ARM_FT_EXCEPTION },
837 { "SWI", ARM_FT_EXCEPTION },
838 { "swi", ARM_FT_EXCEPTION },
839 { NULL, ARM_FT_NORMAL }
842 /* Returns the (interrupt) function type of the current
843 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
845 static unsigned long
846 arm_isr_value (argument)
847 tree argument;
849 const isr_attribute_arg * ptr;
850 const char * arg;
852 /* No argument - default to IRQ. */
853 if (argument == NULL_TREE)
854 return ARM_FT_ISR;
856 /* Get the value of the argument. */
857 if (TREE_VALUE (argument) == NULL_TREE
858 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
859 return ARM_FT_UNKNOWN;
861 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
863 /* Check it against the list of known arguments. */
864 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
865 if (streq (arg, ptr->arg))
866 return ptr->return_value;
868 /* An unrecognized interrupt type. */
869 return ARM_FT_UNKNOWN;
872 /* Computes the type of the current function. */
874 static unsigned long
875 arm_compute_func_type ()
877 unsigned long type = ARM_FT_UNKNOWN;
878 tree a;
879 tree attr;
881 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
882 abort ();
884 /* Decide if the current function is volatile. Such functions
885 never return, and many memory cycles can be saved by not storing
886 register values that will never be needed again. This optimization
887 was added to speed up context switching in a kernel application. */
888 if (optimize > 0
889 && current_function_nothrow
890 && TREE_THIS_VOLATILE (current_function_decl))
891 type |= ARM_FT_VOLATILE;
893 if (current_function_needs_context)
894 type |= ARM_FT_NESTED;
896 attr = DECL_ATTRIBUTES (current_function_decl);
898 a = lookup_attribute ("naked", attr);
899 if (a != NULL_TREE)
900 type |= ARM_FT_NAKED;
902 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
903 type |= ARM_FT_EXCEPTION_HANDLER;
904 else
906 a = lookup_attribute ("isr", attr);
907 if (a == NULL_TREE)
908 a = lookup_attribute ("interrupt", attr);
910 if (a == NULL_TREE)
911 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
912 else
913 type |= arm_isr_value (TREE_VALUE (a));
916 return type;
919 /* Returns the type of the current function. */
921 unsigned long
922 arm_current_func_type ()
924 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
925 cfun->machine->func_type = arm_compute_func_type ();
927 return cfun->machine->func_type;
930 /* Return 1 if it is possible to return using a single instruction. */
933 use_return_insn (iscond)
934 int iscond;
936 int regno;
937 unsigned int func_type;
938 unsigned long saved_int_regs;
940 /* Never use a return instruction before reload has run. */
941 if (!reload_completed)
942 return 0;
944 func_type = arm_current_func_type ();
946 /* Naked functions and volatile functions need special
947 consideration. */
948 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
949 return 0;
951 /* So do interrupt functions that use the frame pointer. */
952 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
953 return 0;
955 /* As do variadic functions. */
956 if (current_function_pretend_args_size
957 || cfun->machine->uses_anonymous_args
958 /* Of if the function calls __builtin_eh_return () */
959 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
960 /* Or if there is no frame pointer and there is a stack adjustment. */
961 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
962 && !frame_pointer_needed))
963 return 0;
965 saved_int_regs = arm_compute_save_reg_mask ();
967 /* Can't be done if interworking with Thumb, and any registers have been
968 stacked. */
969 if (TARGET_INTERWORK && saved_int_regs != 0)
970 return 0;
972 /* On StrongARM, conditional returns are expensive if they aren't
973 taken and multiple registers have been stacked. */
974 if (iscond && arm_is_strong)
976 /* Conditional return when just the LR is stored is a simple
977 conditional-load instruction, that's not expensive. */
978 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
979 return 0;
981 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
982 return 0;
985 /* If there are saved registers but the LR isn't saved, then we need
986 two instructions for the return. */
987 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
988 return 0;
990 /* Can't be done if any of the FPU regs are pushed,
991 since this also requires an insn. */
992 if (TARGET_HARD_FLOAT)
993 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
994 if (regs_ever_live[regno] && !call_used_regs[regno])
995 return 0;
997 return 1;
1000 /* Return TRUE if int I is a valid immediate ARM constant. */
1003 const_ok_for_arm (i)
1004 HOST_WIDE_INT i;
1006 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1008 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1009 be all zero, or all one. */
1010 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1011 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1012 != ((~(unsigned HOST_WIDE_INT) 0)
1013 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1014 return FALSE;
1016 /* Fast return for 0 and powers of 2 */
1017 if ((i & (i - 1)) == 0)
1018 return TRUE;
1022 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1023 return TRUE;
1024 mask =
1025 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1026 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1028 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1030 return FALSE;
1033 /* Return true if I is a valid constant for the operation CODE. */
1034 static int
1035 const_ok_for_op (i, code)
1036 HOST_WIDE_INT i;
1037 enum rtx_code code;
1039 if (const_ok_for_arm (i))
1040 return 1;
1042 switch (code)
1044 case PLUS:
1045 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1047 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1048 case XOR:
1049 case IOR:
1050 return 0;
1052 case AND:
1053 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1055 default:
1056 abort ();
1060 /* Emit a sequence of insns to handle a large constant.
1061 CODE is the code of the operation required, it can be any of SET, PLUS,
1062 IOR, AND, XOR, MINUS;
1063 MODE is the mode in which the operation is being performed;
1064 VAL is the integer to operate on;
1065 SOURCE is the other operand (a register, or a null-pointer for SET);
1066 SUBTARGETS means it is safe to create scratch registers if that will
1067 either produce a simpler sequence, or we will want to cse the values.
1068 Return value is the number of insns emitted. */
1071 arm_split_constant (code, mode, val, target, source, subtargets)
1072 enum rtx_code code;
1073 enum machine_mode mode;
1074 HOST_WIDE_INT val;
1075 rtx target;
1076 rtx source;
1077 int subtargets;
1079 if (subtargets || code == SET
1080 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1081 && REGNO (target) != REGNO (source)))
1083 /* After arm_reorg has been called, we can't fix up expensive
1084 constants by pushing them into memory so we must synthesize
1085 them in-line, regardless of the cost. This is only likely to
1086 be more costly on chips that have load delay slots and we are
1087 compiling without running the scheduler (so no splitting
1088 occurred before the final instruction emission).
1090 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1092 if (!after_arm_reorg
1093 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1094 > arm_constant_limit + (code != SET)))
1096 if (code == SET)
1098 /* Currently SET is the only monadic value for CODE, all
1099 the rest are diadic. */
1100 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1101 return 1;
1103 else
1105 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1107 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1108 /* For MINUS, the value is subtracted from, since we never
1109 have subtraction of a constant. */
1110 if (code == MINUS)
1111 emit_insn (gen_rtx_SET (VOIDmode, target,
1112 gen_rtx_MINUS (mode, temp, source)));
1113 else
1114 emit_insn (gen_rtx_SET (VOIDmode, target,
1115 gen_rtx (code, mode, source, temp)));
1116 return 2;
1121 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1124 static int
1125 count_insns_for_constant (remainder, i)
1126 HOST_WIDE_INT remainder;
1127 int i;
1129 HOST_WIDE_INT temp1;
1130 int num_insns = 0;
1133 int end;
1135 if (i <= 0)
1136 i += 32;
1137 if (remainder & (3 << (i - 2)))
1139 end = i - 8;
1140 if (end < 0)
1141 end += 32;
1142 temp1 = remainder & ((0x0ff << end)
1143 | ((i < end) ? (0xff >> (32 - end)) : 0));
1144 remainder &= ~temp1;
1145 num_insns++;
1146 i -= 6;
1148 i -= 2;
1149 } while (remainder);
1150 return num_insns;
1153 /* As above, but extra parameter GENERATE which, if clear, suppresses
1154 RTL generation. */
1156 static int
1157 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1158 enum rtx_code code;
1159 enum machine_mode mode;
1160 HOST_WIDE_INT val;
1161 rtx target;
1162 rtx source;
1163 int subtargets;
1164 int generate;
1166 int can_invert = 0;
1167 int can_negate = 0;
1168 int can_negate_initial = 0;
1169 int can_shift = 0;
1170 int i;
1171 int num_bits_set = 0;
1172 int set_sign_bit_copies = 0;
1173 int clear_sign_bit_copies = 0;
1174 int clear_zero_bit_copies = 0;
1175 int set_zero_bit_copies = 0;
1176 int insns = 0;
1177 unsigned HOST_WIDE_INT temp1, temp2;
1178 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1180 /* Find out which operations are safe for a given CODE. Also do a quick
1181 check for degenerate cases; these can occur when DImode operations
1182 are split. */
1183 switch (code)
1185 case SET:
1186 can_invert = 1;
1187 can_shift = 1;
1188 can_negate = 1;
1189 break;
1191 case PLUS:
1192 can_negate = 1;
1193 can_negate_initial = 1;
1194 break;
1196 case IOR:
1197 if (remainder == 0xffffffff)
1199 if (generate)
1200 emit_insn (gen_rtx_SET (VOIDmode, target,
1201 GEN_INT (ARM_SIGN_EXTEND (val))));
1202 return 1;
1204 if (remainder == 0)
1206 if (reload_completed && rtx_equal_p (target, source))
1207 return 0;
1208 if (generate)
1209 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1210 return 1;
1212 break;
1214 case AND:
1215 if (remainder == 0)
1217 if (generate)
1218 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1219 return 1;
1221 if (remainder == 0xffffffff)
1223 if (reload_completed && rtx_equal_p (target, source))
1224 return 0;
1225 if (generate)
1226 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1227 return 1;
1229 can_invert = 1;
1230 break;
1232 case XOR:
1233 if (remainder == 0)
1235 if (reload_completed && rtx_equal_p (target, source))
1236 return 0;
1237 if (generate)
1238 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1239 return 1;
1241 if (remainder == 0xffffffff)
1243 if (generate)
1244 emit_insn (gen_rtx_SET (VOIDmode, target,
1245 gen_rtx_NOT (mode, source)));
1246 return 1;
1249 /* We don't know how to handle this yet below. */
1250 abort ();
1252 case MINUS:
1253 /* We treat MINUS as (val - source), since (source - val) is always
1254 passed as (source + (-val)). */
1255 if (remainder == 0)
1257 if (generate)
1258 emit_insn (gen_rtx_SET (VOIDmode, target,
1259 gen_rtx_NEG (mode, source)));
1260 return 1;
1262 if (const_ok_for_arm (val))
1264 if (generate)
1265 emit_insn (gen_rtx_SET (VOIDmode, target,
1266 gen_rtx_MINUS (mode, GEN_INT (val),
1267 source)));
1268 return 1;
1270 can_negate = 1;
1272 break;
1274 default:
1275 abort ();
1278 /* If we can do it in one insn get out quickly. */
1279 if (const_ok_for_arm (val)
1280 || (can_negate_initial && const_ok_for_arm (-val))
1281 || (can_invert && const_ok_for_arm (~val)))
1283 if (generate)
1284 emit_insn (gen_rtx_SET (VOIDmode, target,
1285 (source ? gen_rtx (code, mode, source,
1286 GEN_INT (val))
1287 : GEN_INT (val))));
1288 return 1;
1291 /* Calculate a few attributes that may be useful for specific
1292 optimizations. */
1293 for (i = 31; i >= 0; i--)
1295 if ((remainder & (1 << i)) == 0)
1296 clear_sign_bit_copies++;
1297 else
1298 break;
1301 for (i = 31; i >= 0; i--)
1303 if ((remainder & (1 << i)) != 0)
1304 set_sign_bit_copies++;
1305 else
1306 break;
1309 for (i = 0; i <= 31; i++)
1311 if ((remainder & (1 << i)) == 0)
1312 clear_zero_bit_copies++;
1313 else
1314 break;
1317 for (i = 0; i <= 31; i++)
1319 if ((remainder & (1 << i)) != 0)
1320 set_zero_bit_copies++;
1321 else
1322 break;
1325 switch (code)
1327 case SET:
1328 /* See if we can do this by sign_extending a constant that is known
1329 to be negative. This is a good, way of doing it, since the shift
1330 may well merge into a subsequent insn. */
1331 if (set_sign_bit_copies > 1)
1333 if (const_ok_for_arm
1334 (temp1 = ARM_SIGN_EXTEND (remainder
1335 << (set_sign_bit_copies - 1))))
1337 if (generate)
1339 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1340 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1341 GEN_INT (temp1)));
1342 emit_insn (gen_ashrsi3 (target, new_src,
1343 GEN_INT (set_sign_bit_copies - 1)));
1345 return 2;
1347 /* For an inverted constant, we will need to set the low bits,
1348 these will be shifted out of harm's way. */
1349 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1350 if (const_ok_for_arm (~temp1))
1352 if (generate)
1354 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1355 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1356 GEN_INT (temp1)));
1357 emit_insn (gen_ashrsi3 (target, new_src,
1358 GEN_INT (set_sign_bit_copies - 1)));
1360 return 2;
1364 /* See if we can generate this by setting the bottom (or the top)
1365 16 bits, and then shifting these into the other half of the
1366 word. We only look for the simplest cases, to do more would cost
1367 too much. Be careful, however, not to generate this when the
1368 alternative would take fewer insns. */
1369 if (val & 0xffff0000)
1371 temp1 = remainder & 0xffff0000;
1372 temp2 = remainder & 0x0000ffff;
1374 /* Overlaps outside this range are best done using other methods. */
1375 for (i = 9; i < 24; i++)
1377 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1378 && !const_ok_for_arm (temp2))
1380 rtx new_src = (subtargets
1381 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1382 : target);
1383 insns = arm_gen_constant (code, mode, temp2, new_src,
1384 source, subtargets, generate);
1385 source = new_src;
1386 if (generate)
1387 emit_insn (gen_rtx_SET
1388 (VOIDmode, target,
1389 gen_rtx_IOR (mode,
1390 gen_rtx_ASHIFT (mode, source,
1391 GEN_INT (i)),
1392 source)));
1393 return insns + 1;
1397 /* Don't duplicate cases already considered. */
1398 for (i = 17; i < 24; i++)
1400 if (((temp1 | (temp1 >> i)) == remainder)
1401 && !const_ok_for_arm (temp1))
1403 rtx new_src = (subtargets
1404 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1405 : target);
1406 insns = arm_gen_constant (code, mode, temp1, new_src,
1407 source, subtargets, generate);
1408 source = new_src;
1409 if (generate)
1410 emit_insn
1411 (gen_rtx_SET (VOIDmode, target,
1412 gen_rtx_IOR
1413 (mode,
1414 gen_rtx_LSHIFTRT (mode, source,
1415 GEN_INT (i)),
1416 source)));
1417 return insns + 1;
1421 break;
1423 case IOR:
1424 case XOR:
1425 /* If we have IOR or XOR, and the constant can be loaded in a
1426 single instruction, and we can find a temporary to put it in,
1427 then this can be done in two instructions instead of 3-4. */
1428 if (subtargets
1429 /* TARGET can't be NULL if SUBTARGETS is 0 */
1430 || (reload_completed && !reg_mentioned_p (target, source)))
1432 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1434 if (generate)
1436 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1438 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1439 emit_insn (gen_rtx_SET (VOIDmode, target,
1440 gen_rtx (code, mode, source, sub)));
1442 return 2;
1446 if (code == XOR)
1447 break;
1449 if (set_sign_bit_copies > 8
1450 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1452 if (generate)
1454 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1455 rtx shift = GEN_INT (set_sign_bit_copies);
1457 emit_insn (gen_rtx_SET (VOIDmode, sub,
1458 gen_rtx_NOT (mode,
1459 gen_rtx_ASHIFT (mode,
1460 source,
1461 shift))));
1462 emit_insn (gen_rtx_SET (VOIDmode, target,
1463 gen_rtx_NOT (mode,
1464 gen_rtx_LSHIFTRT (mode, sub,
1465 shift))));
1467 return 2;
1470 if (set_zero_bit_copies > 8
1471 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1473 if (generate)
1475 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1476 rtx shift = GEN_INT (set_zero_bit_copies);
1478 emit_insn (gen_rtx_SET (VOIDmode, sub,
1479 gen_rtx_NOT (mode,
1480 gen_rtx_LSHIFTRT (mode,
1481 source,
1482 shift))));
1483 emit_insn (gen_rtx_SET (VOIDmode, target,
1484 gen_rtx_NOT (mode,
1485 gen_rtx_ASHIFT (mode, sub,
1486 shift))));
1488 return 2;
1491 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1493 if (generate)
1495 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1496 emit_insn (gen_rtx_SET (VOIDmode, sub,
1497 gen_rtx_NOT (mode, source)));
1498 source = sub;
1499 if (subtargets)
1500 sub = gen_reg_rtx (mode);
1501 emit_insn (gen_rtx_SET (VOIDmode, sub,
1502 gen_rtx_AND (mode, source,
1503 GEN_INT (temp1))));
1504 emit_insn (gen_rtx_SET (VOIDmode, target,
1505 gen_rtx_NOT (mode, sub)));
1507 return 3;
1509 break;
1511 case AND:
1512 /* See if two shifts will do 2 or more insn's worth of work. */
1513 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1515 HOST_WIDE_INT shift_mask = ((0xffffffff
1516 << (32 - clear_sign_bit_copies))
1517 & 0xffffffff);
1519 if ((remainder | shift_mask) != 0xffffffff)
1521 if (generate)
1523 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1524 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1525 new_src, source, subtargets, 1);
1526 source = new_src;
1528 else
1530 rtx targ = subtargets ? NULL_RTX : target;
1531 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1532 targ, source, subtargets, 0);
1536 if (generate)
1538 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1539 rtx shift = GEN_INT (clear_sign_bit_copies);
1541 emit_insn (gen_ashlsi3 (new_src, source, shift));
1542 emit_insn (gen_lshrsi3 (target, new_src, shift));
1545 return insns + 2;
1548 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1550 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1552 if ((remainder | shift_mask) != 0xffffffff)
1554 if (generate)
1556 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1558 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1559 new_src, source, subtargets, 1);
1560 source = new_src;
1562 else
1564 rtx targ = subtargets ? NULL_RTX : target;
1566 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1567 targ, source, subtargets, 0);
1571 if (generate)
1573 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1574 rtx shift = GEN_INT (clear_zero_bit_copies);
1576 emit_insn (gen_lshrsi3 (new_src, source, shift));
1577 emit_insn (gen_ashlsi3 (target, new_src, shift));
1580 return insns + 2;
1583 break;
1585 default:
1586 break;
1589 for (i = 0; i < 32; i++)
1590 if (remainder & (1 << i))
1591 num_bits_set++;
1593 if (code == AND || (can_invert && num_bits_set > 16))
1594 remainder = (~remainder) & 0xffffffff;
1595 else if (code == PLUS && num_bits_set > 16)
1596 remainder = (-remainder) & 0xffffffff;
1597 else
1599 can_invert = 0;
1600 can_negate = 0;
1603 /* Now try and find a way of doing the job in either two or three
1604 instructions.
1605 We start by looking for the largest block of zeros that are aligned on
1606 a 2-bit boundary, we then fill up the temps, wrapping around to the
1607 top of the word when we drop off the bottom.
1608 In the worst case this code should produce no more than four insns. */
1610 int best_start = 0;
1611 int best_consecutive_zeros = 0;
1613 for (i = 0; i < 32; i += 2)
1615 int consecutive_zeros = 0;
1617 if (!(remainder & (3 << i)))
1619 while ((i < 32) && !(remainder & (3 << i)))
1621 consecutive_zeros += 2;
1622 i += 2;
1624 if (consecutive_zeros > best_consecutive_zeros)
1626 best_consecutive_zeros = consecutive_zeros;
1627 best_start = i - consecutive_zeros;
1629 i -= 2;
1633 /* So long as it won't require any more insns to do so, it's
1634 desirable to emit a small constant (in bits 0...9) in the last
1635 insn. This way there is more chance that it can be combined with
1636 a later addressing insn to form a pre-indexed load or store
1637 operation. Consider:
1639 *((volatile int *)0xe0000100) = 1;
1640 *((volatile int *)0xe0000110) = 2;
1642 We want this to wind up as:
1644 mov rA, #0xe0000000
1645 mov rB, #1
1646 str rB, [rA, #0x100]
1647 mov rB, #2
1648 str rB, [rA, #0x110]
1650 rather than having to synthesize both large constants from scratch.
1652 Therefore, we calculate how many insns would be required to emit
1653 the constant starting from `best_start', and also starting from
1654 zero (ie with bit 31 first to be output). If `best_start' doesn't
1655 yield a shorter sequence, we may as well use zero. */
1656 if (best_start != 0
1657 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1658 && (count_insns_for_constant (remainder, 0) <=
1659 count_insns_for_constant (remainder, best_start)))
1660 best_start = 0;
1662 /* Now start emitting the insns. */
1663 i = best_start;
1666 int end;
1668 if (i <= 0)
1669 i += 32;
1670 if (remainder & (3 << (i - 2)))
1672 end = i - 8;
1673 if (end < 0)
1674 end += 32;
1675 temp1 = remainder & ((0x0ff << end)
1676 | ((i < end) ? (0xff >> (32 - end)) : 0));
1677 remainder &= ~temp1;
1679 if (generate)
1681 rtx new_src, temp1_rtx;
1683 if (code == SET || code == MINUS)
1685 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1686 if (can_invert && code != MINUS)
1687 temp1 = ~temp1;
1689 else
1691 if (remainder && subtargets)
1692 new_src = gen_reg_rtx (mode);
1693 else
1694 new_src = target;
1695 if (can_invert)
1696 temp1 = ~temp1;
1697 else if (can_negate)
1698 temp1 = -temp1;
1701 temp1 = trunc_int_for_mode (temp1, mode);
1702 temp1_rtx = GEN_INT (temp1);
1704 if (code == SET)
1706 else if (code == MINUS)
1707 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1708 else
1709 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1711 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1712 source = new_src;
1715 if (code == SET)
1717 can_invert = 0;
1718 code = PLUS;
1720 else if (code == MINUS)
1721 code = PLUS;
1723 insns++;
1724 i -= 6;
1726 i -= 2;
1728 while (remainder);
1731 return insns;
1734 /* Canonicalize a comparison so that we are more likely to recognize it.
1735 This can be done for a few constant compares, where we can make the
1736 immediate value easier to load. */
1738 enum rtx_code
1739 arm_canonicalize_comparison (code, op1)
1740 enum rtx_code code;
1741 rtx * op1;
1743 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1745 switch (code)
1747 case EQ:
1748 case NE:
1749 return code;
1751 case GT:
1752 case LE:
1753 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1754 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1756 *op1 = GEN_INT (i + 1);
1757 return code == GT ? GE : LT;
1759 break;
1761 case GE:
1762 case LT:
1763 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1764 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1766 *op1 = GEN_INT (i - 1);
1767 return code == GE ? GT : LE;
1769 break;
1771 case GTU:
1772 case LEU:
1773 if (i != ~((unsigned HOST_WIDE_INT) 0)
1774 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1776 *op1 = GEN_INT (i + 1);
1777 return code == GTU ? GEU : LTU;
1779 break;
1781 case GEU:
1782 case LTU:
1783 if (i != 0
1784 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1786 *op1 = GEN_INT (i - 1);
1787 return code == GEU ? GTU : LEU;
1789 break;
1791 default:
1792 abort ();
1795 return code;
1798 /* Decide whether a type should be returned in memory (true)
1799 or in a register (false). This is called by the macro
1800 RETURN_IN_MEMORY. */
1803 arm_return_in_memory (type)
1804 tree type;
1806 HOST_WIDE_INT size;
1808 if (!AGGREGATE_TYPE_P (type))
1809 /* All simple types are returned in registers. */
1810 return 0;
1812 size = int_size_in_bytes (type);
1814 if (TARGET_ATPCS)
1816 /* ATPCS returns aggregate types in memory only if they are
1817 larger than a word (or are variable size). */
1818 return (size < 0 || size > UNITS_PER_WORD);
1821 /* For the arm-wince targets we choose to be compitable with Microsoft's
1822 ARM and Thumb compilers, which always return aggregates in memory. */
1823 #ifndef ARM_WINCE
1824 /* All structures/unions bigger than one word are returned in memory.
1825 Also catch the case where int_size_in_bytes returns -1. In this case
1826 the aggregate is either huge or of varaible size, and in either case
1827 we will want to return it via memory and not in a register. */
1828 if (size < 0 || size > UNITS_PER_WORD)
1829 return 1;
1831 if (TREE_CODE (type) == RECORD_TYPE)
1833 tree field;
1835 /* For a struct the APCS says that we only return in a register
1836 if the type is 'integer like' and every addressable element
1837 has an offset of zero. For practical purposes this means
1838 that the structure can have at most one non bit-field element
1839 and that this element must be the first one in the structure. */
1841 /* Find the first field, ignoring non FIELD_DECL things which will
1842 have been created by C++. */
1843 for (field = TYPE_FIELDS (type);
1844 field && TREE_CODE (field) != FIELD_DECL;
1845 field = TREE_CHAIN (field))
1846 continue;
1848 if (field == NULL)
1849 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1851 /* Check that the first field is valid for returning in a register. */
1853 /* ... Floats are not allowed */
1854 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1855 return 1;
1857 /* ... Aggregates that are not themselves valid for returning in
1858 a register are not allowed. */
1859 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1860 return 1;
1862 /* Now check the remaining fields, if any. Only bitfields are allowed,
1863 since they are not addressable. */
1864 for (field = TREE_CHAIN (field);
1865 field;
1866 field = TREE_CHAIN (field))
1868 if (TREE_CODE (field) != FIELD_DECL)
1869 continue;
1871 if (!DECL_BIT_FIELD_TYPE (field))
1872 return 1;
1875 return 0;
1878 if (TREE_CODE (type) == UNION_TYPE)
1880 tree field;
1882 /* Unions can be returned in registers if every element is
1883 integral, or can be returned in an integer register. */
1884 for (field = TYPE_FIELDS (type);
1885 field;
1886 field = TREE_CHAIN (field))
1888 if (TREE_CODE (field) != FIELD_DECL)
1889 continue;
1891 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1892 return 1;
1894 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1895 return 1;
1898 return 0;
1900 #endif /* not ARM_WINCE */
1902 /* Return all other types in memory. */
1903 return 1;
1906 /* Indicate whether or not words of a double are in big-endian order. */
1909 arm_float_words_big_endian ()
1912 /* For FPA, float words are always big-endian. For VFP, floats words
1913 follow the memory system mode. */
1915 if (TARGET_HARD_FLOAT)
1917 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1918 return 1;
1921 if (TARGET_VFP)
1922 return (TARGET_BIG_END ? 1 : 0);
1924 return 1;
1927 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1928 for a call to a function whose data type is FNTYPE.
1929 For a library call, FNTYPE is NULL. */
1930 void
1931 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1932 CUMULATIVE_ARGS * pcum;
1933 tree fntype;
1934 rtx libname ATTRIBUTE_UNUSED;
1935 int indirect ATTRIBUTE_UNUSED;
1937 /* On the ARM, the offset starts at 0. */
1938 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1940 pcum->call_cookie = CALL_NORMAL;
1942 if (TARGET_LONG_CALLS)
1943 pcum->call_cookie = CALL_LONG;
1945 /* Check for long call/short call attributes. The attributes
1946 override any command line option. */
1947 if (fntype)
1949 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1950 pcum->call_cookie = CALL_SHORT;
1951 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1952 pcum->call_cookie = CALL_LONG;
1956 /* Determine where to put an argument to a function.
1957 Value is zero to push the argument on the stack,
1958 or a hard register in which to store the argument.
1960 MODE is the argument's machine mode.
1961 TYPE is the data type of the argument (as a tree).
1962 This is null for libcalls where that information may
1963 not be available.
1964 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1965 the preceding args and about the function being called.
1966 NAMED is nonzero if this argument is a named parameter
1967 (otherwise it is an extra parameter matching an ellipsis). */
1970 arm_function_arg (pcum, mode, type, named)
1971 CUMULATIVE_ARGS * pcum;
1972 enum machine_mode mode;
1973 tree type ATTRIBUTE_UNUSED;
1974 int named;
1976 if (mode == VOIDmode)
1977 /* Compute operand 2 of the call insn. */
1978 return GEN_INT (pcum->call_cookie);
1980 if (!named || pcum->nregs >= NUM_ARG_REGS)
1981 return NULL_RTX;
1983 return gen_rtx_REG (mode, pcum->nregs);
1986 /* Variable sized types are passed by reference. This is a GCC
1987 extension to the ARM ABI. */
1990 arm_function_arg_pass_by_reference (cum, mode, type, named)
1991 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
1992 enum machine_mode mode ATTRIBUTE_UNUSED;
1993 tree type;
1994 int named ATTRIBUTE_UNUSED;
1996 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1999 /* Implement va_arg. */
2002 arm_va_arg (valist, type)
2003 tree valist, type;
2005 /* Variable sized types are passed by reference. */
2006 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2008 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2009 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2012 return std_expand_builtin_va_arg (valist, type);
2015 /* Encode the current state of the #pragma [no_]long_calls. */
2016 typedef enum
2018 OFF, /* No #pramgma [no_]long_calls is in effect. */
2019 LONG, /* #pragma long_calls is in effect. */
2020 SHORT /* #pragma no_long_calls is in effect. */
2021 } arm_pragma_enum;
2023 static arm_pragma_enum arm_pragma_long_calls = OFF;
2025 void
2026 arm_pr_long_calls (pfile)
2027 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2029 arm_pragma_long_calls = LONG;
2032 void
2033 arm_pr_no_long_calls (pfile)
2034 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2036 arm_pragma_long_calls = SHORT;
2039 void
2040 arm_pr_long_calls_off (pfile)
2041 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2043 arm_pragma_long_calls = OFF;
2046 /* Table of machine attributes. */
2047 const struct attribute_spec arm_attribute_table[] =
2049 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2050 /* Function calls made to this symbol must be done indirectly, because
2051 it may lie outside of the 26 bit addressing range of a normal function
2052 call. */
2053 { "long_call", 0, 0, false, true, true, NULL },
2054 /* Whereas these functions are always known to reside within the 26 bit
2055 addressing range. */
2056 { "short_call", 0, 0, false, true, true, NULL },
2057 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2058 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2059 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2060 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2061 #ifdef ARM_PE
2062 /* ARM/PE has three new attributes:
2063 interfacearm - ?
2064 dllexport - for exporting a function/variable that will live in a dll
2065 dllimport - for importing a function/variable from a dll
2067 Microsoft allows multiple declspecs in one __declspec, separating
2068 them with spaces. We do NOT support this. Instead, use __declspec
2069 multiple times.
2071 { "dllimport", 0, 0, true, false, false, NULL },
2072 { "dllexport", 0, 0, true, false, false, NULL },
2073 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2074 #endif
2075 { NULL, 0, 0, false, false, false, NULL }
2078 /* Handle an attribute requiring a FUNCTION_DECL;
2079 arguments as in struct attribute_spec.handler. */
2081 static tree
2082 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2083 tree * node;
2084 tree name;
2085 tree args ATTRIBUTE_UNUSED;
2086 int flags ATTRIBUTE_UNUSED;
2087 bool * no_add_attrs;
2089 if (TREE_CODE (*node) != FUNCTION_DECL)
2091 warning ("`%s' attribute only applies to functions",
2092 IDENTIFIER_POINTER (name));
2093 *no_add_attrs = true;
2096 return NULL_TREE;
2099 /* Handle an "interrupt" or "isr" attribute;
2100 arguments as in struct attribute_spec.handler. */
2102 static tree
2103 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2104 tree * node;
2105 tree name;
2106 tree args;
2107 int flags;
2108 bool * no_add_attrs;
2110 if (DECL_P (*node))
2112 if (TREE_CODE (*node) != FUNCTION_DECL)
2114 warning ("`%s' attribute only applies to functions",
2115 IDENTIFIER_POINTER (name));
2116 *no_add_attrs = true;
2118 /* FIXME: the argument if any is checked for type attributes;
2119 should it be checked for decl ones? */
2121 else
2123 if (TREE_CODE (*node) == FUNCTION_TYPE
2124 || TREE_CODE (*node) == METHOD_TYPE)
2126 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2128 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2129 *no_add_attrs = true;
2132 else if (TREE_CODE (*node) == POINTER_TYPE
2133 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2134 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2135 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2137 *node = build_type_copy (*node);
2138 TREE_TYPE (*node) = build_type_attribute_variant
2139 (TREE_TYPE (*node),
2140 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2141 *no_add_attrs = true;
2143 else
2145 /* Possibly pass this attribute on from the type to a decl. */
2146 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2147 | (int) ATTR_FLAG_FUNCTION_NEXT
2148 | (int) ATTR_FLAG_ARRAY_NEXT))
2150 *no_add_attrs = true;
2151 return tree_cons (name, args, NULL_TREE);
2153 else
2155 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2160 return NULL_TREE;
2163 /* Return 0 if the attributes for two types are incompatible, 1 if they
2164 are compatible, and 2 if they are nearly compatible (which causes a
2165 warning to be generated). */
2167 static int
2168 arm_comp_type_attributes (type1, type2)
2169 tree type1;
2170 tree type2;
2172 int l1, l2, s1, s2;
2174 /* Check for mismatch of non-default calling convention. */
2175 if (TREE_CODE (type1) != FUNCTION_TYPE)
2176 return 1;
2178 /* Check for mismatched call attributes. */
2179 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2180 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2181 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2182 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2184 /* Only bother to check if an attribute is defined. */
2185 if (l1 | l2 | s1 | s2)
2187 /* If one type has an attribute, the other must have the same attribute. */
2188 if ((l1 != l2) || (s1 != s2))
2189 return 0;
2191 /* Disallow mixed attributes. */
2192 if ((l1 & s2) || (l2 & s1))
2193 return 0;
2196 /* Check for mismatched ISR attribute. */
2197 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2198 if (! l1)
2199 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2200 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2201 if (! l2)
2202 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2203 if (l1 != l2)
2204 return 0;
2206 return 1;
2209 /* Encode long_call or short_call attribute by prefixing
2210 symbol name in DECL with a special character FLAG. */
2212 void
2213 arm_encode_call_attribute (decl, flag)
2214 tree decl;
2215 int flag;
2217 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2218 int len = strlen (str);
2219 char * newstr;
2221 /* Do not allow weak functions to be treated as short call. */
2222 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2223 return;
2225 newstr = alloca (len + 2);
2226 newstr[0] = flag;
2227 strcpy (newstr + 1, str);
2229 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2230 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2233 /* Assigns default attributes to newly defined type. This is used to
2234 set short_call/long_call attributes for function types of
2235 functions defined inside corresponding #pragma scopes. */
2237 static void
2238 arm_set_default_type_attributes (type)
2239 tree type;
2241 /* Add __attribute__ ((long_call)) to all functions, when
2242 inside #pragma long_calls or __attribute__ ((short_call)),
2243 when inside #pragma no_long_calls. */
2244 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2246 tree type_attr_list, attr_name;
2247 type_attr_list = TYPE_ATTRIBUTES (type);
2249 if (arm_pragma_long_calls == LONG)
2250 attr_name = get_identifier ("long_call");
2251 else if (arm_pragma_long_calls == SHORT)
2252 attr_name = get_identifier ("short_call");
2253 else
2254 return;
2256 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2257 TYPE_ATTRIBUTES (type) = type_attr_list;
2261 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2262 defined within the current compilation unit. If this caanot be
2263 determined, then 0 is returned. */
2265 static int
2266 current_file_function_operand (sym_ref)
2267 rtx sym_ref;
2269 /* This is a bit of a fib. A function will have a short call flag
2270 applied to its name if it has the short call attribute, or it has
2271 already been defined within the current compilation unit. */
2272 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2273 return 1;
2275 /* The current function is always defined within the current compilation
2276 unit. if it s a weak definition however, then this may not be the real
2277 definition of the function, and so we have to say no. */
2278 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2279 && !DECL_WEAK (current_function_decl))
2280 return 1;
2282 /* We cannot make the determination - default to returning 0. */
2283 return 0;
2286 /* Return nonzero if a 32 bit "long_call" should be generated for
2287 this call. We generate a long_call if the function:
2289 a. has an __attribute__((long call))
2290 or b. is within the scope of a #pragma long_calls
2291 or c. the -mlong-calls command line switch has been specified
2293 However we do not generate a long call if the function:
2295 d. has an __attribute__ ((short_call))
2296 or e. is inside the scope of a #pragma no_long_calls
2297 or f. has an __attribute__ ((section))
2298 or g. is defined within the current compilation unit.
2300 This function will be called by C fragments contained in the machine
2301 description file. CALL_REF and CALL_COOKIE correspond to the matched
2302 rtl operands. CALL_SYMBOL is used to distinguish between
2303 two different callers of the function. It is set to 1 in the
2304 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2305 and "call_value" patterns. This is because of the difference in the
2306 SYM_REFs passed by these patterns. */
2309 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2310 rtx sym_ref;
2311 int call_cookie;
2312 int call_symbol;
2314 if (!call_symbol)
2316 if (GET_CODE (sym_ref) != MEM)
2317 return 0;
2319 sym_ref = XEXP (sym_ref, 0);
2322 if (GET_CODE (sym_ref) != SYMBOL_REF)
2323 return 0;
2325 if (call_cookie & CALL_SHORT)
2326 return 0;
2328 if (TARGET_LONG_CALLS && flag_function_sections)
2329 return 1;
2331 if (current_file_function_operand (sym_ref))
2332 return 0;
2334 return (call_cookie & CALL_LONG)
2335 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2336 || TARGET_LONG_CALLS;
2339 /* Return nonzero if it is ok to make a tail-call to DECL. */
2341 static bool
2342 arm_function_ok_for_sibcall (decl, exp)
2343 tree decl;
2344 tree exp ATTRIBUTE_UNUSED;
2346 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2348 /* Never tailcall something for which we have no decl, or if we
2349 are in Thumb mode. */
2350 if (decl == NULL || TARGET_THUMB)
2351 return false;
2353 /* Get the calling method. */
2354 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2355 call_type = CALL_SHORT;
2356 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2357 call_type = CALL_LONG;
2359 /* Cannot tail-call to long calls, since these are out of range of
2360 a branch instruction. However, if not compiling PIC, we know
2361 we can reach the symbol if it is in this compilation unit. */
2362 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2363 return false;
2365 /* If we are interworking and the function is not declared static
2366 then we can't tail-call it unless we know that it exists in this
2367 compilation unit (since it might be a Thumb routine). */
2368 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2369 return false;
2371 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2372 if (IS_INTERRUPT (arm_current_func_type ()))
2373 return false;
2375 /* Everything else is ok. */
2376 return true;
2380 /* Addressing mode support functions. */
2382 /* Return non-zero if X is a legitimate immediate operand when compiling
2383 for PIC. */
2385 legitimate_pic_operand_p (x)
2386 rtx x;
2388 if (CONSTANT_P (x)
2389 && flag_pic
2390 && (GET_CODE (x) == SYMBOL_REF
2391 || (GET_CODE (x) == CONST
2392 && GET_CODE (XEXP (x, 0)) == PLUS
2393 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2394 return 0;
2396 return 1;
2400 legitimize_pic_address (orig, mode, reg)
2401 rtx orig;
2402 enum machine_mode mode;
2403 rtx reg;
2405 if (GET_CODE (orig) == SYMBOL_REF
2406 || GET_CODE (orig) == LABEL_REF)
2408 #ifndef AOF_ASSEMBLER
2409 rtx pic_ref, address;
2410 #endif
2411 rtx insn;
2412 int subregs = 0;
2414 if (reg == 0)
2416 if (no_new_pseudos)
2417 abort ();
2418 else
2419 reg = gen_reg_rtx (Pmode);
2421 subregs = 1;
2424 #ifdef AOF_ASSEMBLER
2425 /* The AOF assembler can generate relocations for these directly, and
2426 understands that the PIC register has to be added into the offset. */
2427 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2428 #else
2429 if (subregs)
2430 address = gen_reg_rtx (Pmode);
2431 else
2432 address = reg;
2434 if (TARGET_ARM)
2435 emit_insn (gen_pic_load_addr_arm (address, orig));
2436 else
2437 emit_insn (gen_pic_load_addr_thumb (address, orig));
2439 if ((GET_CODE (orig) == LABEL_REF
2440 || (GET_CODE (orig) == SYMBOL_REF &&
2441 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2442 && NEED_GOT_RELOC)
2443 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2444 else
2446 pic_ref = gen_rtx_MEM (Pmode,
2447 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2448 address));
2449 RTX_UNCHANGING_P (pic_ref) = 1;
2452 insn = emit_move_insn (reg, pic_ref);
2453 #endif
2454 current_function_uses_pic_offset_table = 1;
2455 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2456 by loop. */
2457 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2458 REG_NOTES (insn));
2459 return reg;
2461 else if (GET_CODE (orig) == CONST)
2463 rtx base, offset;
2465 if (GET_CODE (XEXP (orig, 0)) == PLUS
2466 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2467 return orig;
2469 if (reg == 0)
2471 if (no_new_pseudos)
2472 abort ();
2473 else
2474 reg = gen_reg_rtx (Pmode);
2477 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2479 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2480 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2481 base == reg ? 0 : reg);
2483 else
2484 abort ();
2486 if (GET_CODE (offset) == CONST_INT)
2488 /* The base register doesn't really matter, we only want to
2489 test the index for the appropriate mode. */
2490 if (!arm_legitimate_index_p (mode, offset, 0))
2492 if (!no_new_pseudos)
2493 offset = force_reg (Pmode, offset);
2494 else
2495 abort ();
2498 if (GET_CODE (offset) == CONST_INT)
2499 return plus_constant (base, INTVAL (offset));
2502 if (GET_MODE_SIZE (mode) > 4
2503 && (GET_MODE_CLASS (mode) == MODE_INT
2504 || TARGET_SOFT_FLOAT))
2506 emit_insn (gen_addsi3 (reg, base, offset));
2507 return reg;
2510 return gen_rtx_PLUS (Pmode, base, offset);
2513 return orig;
2516 /* Generate code to load the PIC register. PROLOGUE is true if
2517 called from arm_expand_prologue (in which case we want the
2518 generated insns at the start of the function); false if called
2519 by an exception receiver that needs the PIC register reloaded
2520 (in which case the insns are just dumped at the current location). */
2522 void
2523 arm_finalize_pic (prologue)
2524 int prologue ATTRIBUTE_UNUSED;
2526 #ifndef AOF_ASSEMBLER
2527 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2528 rtx global_offset_table;
2530 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2531 return;
2533 if (!flag_pic)
2534 abort ();
2536 start_sequence ();
2537 l1 = gen_label_rtx ();
2539 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2540 /* On the ARM the PC register contains 'dot + 8' at the time of the
2541 addition, on the Thumb it is 'dot + 4'. */
2542 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2543 if (GOT_PCREL)
2544 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2545 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2546 else
2547 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2549 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2551 if (TARGET_ARM)
2553 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2554 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2556 else
2558 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2559 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2562 seq = get_insns ();
2563 end_sequence ();
2564 if (prologue)
2565 emit_insn_after (seq, get_insns ());
2566 else
2567 emit_insn (seq);
2569 /* Need to emit this whether or not we obey regdecls,
2570 since setjmp/longjmp can cause life info to screw up. */
2571 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2572 #endif /* AOF_ASSEMBLER */
2575 /* Return nonzero if X is valid as an ARM state addressing register. */
2576 static int
2577 arm_address_register_rtx_p (x, strict_p)
2578 rtx x;
2579 int strict_p;
2581 int regno;
2583 if (GET_CODE (x) != REG)
2584 return 0;
2586 regno = REGNO (x);
2588 if (strict_p)
2589 return ARM_REGNO_OK_FOR_BASE_P (regno);
2591 return (regno <= LAST_ARM_REGNUM
2592 || regno >= FIRST_PSEUDO_REGISTER
2593 || regno == FRAME_POINTER_REGNUM
2594 || regno == ARG_POINTER_REGNUM);
2597 /* Return nonzero if X is a valid ARM state address operand. */
2599 arm_legitimate_address_p (mode, x, strict_p)
2600 enum machine_mode mode;
2601 rtx x;
2602 int strict_p;
2604 if (arm_address_register_rtx_p (x, strict_p))
2605 return 1;
2607 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2608 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2610 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2611 && GET_MODE_SIZE (mode) <= 4
2612 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2613 && GET_CODE (XEXP (x, 1)) == PLUS
2614 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2615 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2617 /* After reload constants split into minipools will have addresses
2618 from a LABEL_REF. */
2619 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2620 && (GET_CODE (x) == LABEL_REF
2621 || (GET_CODE (x) == CONST
2622 && GET_CODE (XEXP (x, 0)) == PLUS
2623 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2624 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2625 return 1;
2627 else if (mode == TImode)
2628 return 0;
2630 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2632 if (GET_CODE (x) == PLUS
2633 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2634 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2636 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2638 if (val == 4 || val == -4 || val == -8)
2639 return 1;
2643 else if (GET_CODE (x) == PLUS)
2645 rtx xop0 = XEXP (x, 0);
2646 rtx xop1 = XEXP (x, 1);
2648 return ((arm_address_register_rtx_p (xop0, strict_p)
2649 && arm_legitimate_index_p (mode, xop1, strict_p))
2650 || (arm_address_register_rtx_p (xop1, strict_p)
2651 && arm_legitimate_index_p (mode, xop0, strict_p)));
2654 #if 0
2655 /* Reload currently can't handle MINUS, so disable this for now */
2656 else if (GET_CODE (x) == MINUS)
2658 rtx xop0 = XEXP (x, 0);
2659 rtx xop1 = XEXP (x, 1);
2661 return (arm_address_register_rtx_p (xop0, strict_p)
2662 && arm_legitimate_index_p (mode, xop1, strict_p));
2664 #endif
2666 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2667 && GET_CODE (x) == SYMBOL_REF
2668 && CONSTANT_POOL_ADDRESS_P (x)
2669 && ! (flag_pic
2670 && symbol_mentioned_p (get_pool_constant (x))))
2671 return 1;
2673 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2674 && (GET_MODE_SIZE (mode) <= 4)
2675 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2676 return 1;
2678 return 0;
2681 /* Return nonzero if INDEX is valid for an address index operand in
2682 ARM state. */
2683 static int
2684 arm_legitimate_index_p (mode, index, strict_p)
2685 enum machine_mode mode;
2686 rtx index;
2687 int strict_p;
2689 HOST_WIDE_INT range;
2690 enum rtx_code code = GET_CODE (index);
2692 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2693 return (code == CONST_INT && INTVAL (index) < 1024
2694 && INTVAL (index) > -1024
2695 && (INTVAL (index) & 3) == 0);
2697 if (arm_address_register_rtx_p (index, strict_p)
2698 && GET_MODE_SIZE (mode) <= 4)
2699 return 1;
2701 /* XXX What about ldrsb? */
2702 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2703 && (!arm_arch4 || (mode) != HImode))
2705 rtx xiop0 = XEXP (index, 0);
2706 rtx xiop1 = XEXP (index, 1);
2708 return ((arm_address_register_rtx_p (xiop0, strict_p)
2709 && power_of_two_operand (xiop1, SImode))
2710 || (arm_address_register_rtx_p (xiop1, strict_p)
2711 && power_of_two_operand (xiop0, SImode)));
2714 if (GET_MODE_SIZE (mode) <= 4
2715 && (code == LSHIFTRT || code == ASHIFTRT
2716 || code == ASHIFT || code == ROTATERT)
2717 && (!arm_arch4 || (mode) != HImode))
2719 rtx op = XEXP (index, 1);
2721 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2722 && GET_CODE (op) == CONST_INT
2723 && INTVAL (op) > 0
2724 && INTVAL (op) <= 31);
2727 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2728 load, but that has a restricted addressing range and we are unable
2729 to tell here whether that is the case. To be safe we restrict all
2730 loads to that range. */
2731 range = ((mode) == HImode || (mode) == QImode)
2732 ? (arm_arch4 ? 256 : 4095) : 4096;
2734 return (code == CONST_INT
2735 && INTVAL (index) < range
2736 && INTVAL (index) > -range);
2739 /* Return nonzero if X is valid as an ARM state addressing register. */
2740 static int
2741 thumb_base_register_rtx_p (x, mode, strict_p)
2742 rtx x;
2743 enum machine_mode mode;
2744 int strict_p;
2746 int regno;
2748 if (GET_CODE (x) != REG)
2749 return 0;
2751 regno = REGNO (x);
2753 if (strict_p)
2754 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2756 return (regno <= LAST_LO_REGNUM
2757 || regno >= FIRST_PSEUDO_REGISTER
2758 || regno == FRAME_POINTER_REGNUM
2759 || (GET_MODE_SIZE (mode) >= 4
2760 && (regno == STACK_POINTER_REGNUM
2761 || x == hard_frame_pointer_rtx
2762 || x == arg_pointer_rtx)));
2765 /* Return nonzero if x is a legitimate index register. This is the case
2766 for any base register that can access a QImode object. */
2767 inline static int
2768 thumb_index_register_rtx_p (x, strict_p)
2769 rtx x;
2770 int strict_p;
2772 return thumb_base_register_rtx_p (x, QImode, strict_p);
2775 /* Return nonzero if x is a legitimate Thumb-state address.
2777 The AP may be eliminated to either the SP or the FP, so we use the
2778 least common denominator, e.g. SImode, and offsets from 0 to 64.
2780 ??? Verify whether the above is the right approach.
2782 ??? Also, the FP may be eliminated to the SP, so perhaps that
2783 needs special handling also.
2785 ??? Look at how the mips16 port solves this problem. It probably uses
2786 better ways to solve some of these problems.
2788 Although it is not incorrect, we don't accept QImode and HImode
2789 addresses based on the frame pointer or arg pointer until the
2790 reload pass starts. This is so that eliminating such addresses
2791 into stack based ones won't produce impossible code. */
2793 thumb_legitimate_address_p (mode, x, strict_p)
2794 enum machine_mode mode;
2795 rtx x;
2796 int strict_p;
2798 /* ??? Not clear if this is right. Experiment. */
2799 if (GET_MODE_SIZE (mode) < 4
2800 && !(reload_in_progress || reload_completed)
2801 && (reg_mentioned_p (frame_pointer_rtx, x)
2802 || reg_mentioned_p (arg_pointer_rtx, x)
2803 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2804 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2805 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2806 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2807 return 0;
2809 /* Accept any base register. SP only in SImode or larger. */
2810 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2811 return 1;
2813 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2814 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2815 && GET_CODE (x) == SYMBOL_REF
2816 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2817 return 1;
2819 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2820 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2821 && (GET_CODE (x) == LABEL_REF
2822 || (GET_CODE (x) == CONST
2823 && GET_CODE (XEXP (x, 0)) == PLUS
2824 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2825 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2826 return 1;
2828 /* Post-inc indexing only supported for SImode and larger. */
2829 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2830 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2831 return 1;
2833 else if (GET_CODE (x) == PLUS)
2835 /* REG+REG address can be any two index registers. */
2836 /* We disallow FRAME+REG addressing since we know that FRAME
2837 will be replaced with STACK, and SP relative addressing only
2838 permits SP+OFFSET. */
2839 if (GET_MODE_SIZE (mode) <= 4
2840 && XEXP (x, 0) != frame_pointer_rtx
2841 && XEXP (x, 1) != frame_pointer_rtx
2842 && XEXP (x, 0) != virtual_stack_vars_rtx
2843 && XEXP (x, 1) != virtual_stack_vars_rtx
2844 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2845 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2846 return 1;
2848 /* REG+const has 5-7 bit offset for non-SP registers. */
2849 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2850 || XEXP (x, 0) == arg_pointer_rtx)
2851 && GET_CODE (XEXP (x, 1)) == CONST_INT
2852 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2853 return 1;
2855 /* REG+const has 10 bit offset for SP, but only SImode and
2856 larger is supported. */
2857 /* ??? Should probably check for DI/DFmode overflow here
2858 just like GO_IF_LEGITIMATE_OFFSET does. */
2859 else if (GET_CODE (XEXP (x, 0)) == REG
2860 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2861 && GET_MODE_SIZE (mode) >= 4
2862 && GET_CODE (XEXP (x, 1)) == CONST_INT
2863 && INTVAL (XEXP (x, 1)) >= 0
2864 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2865 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2866 return 1;
2868 else if (GET_CODE (XEXP (x, 0)) == REG
2869 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2870 && GET_MODE_SIZE (mode) >= 4
2871 && GET_CODE (XEXP (x, 1)) == CONST_INT
2872 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2873 return 1;
2876 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2877 && GET_CODE (x) == SYMBOL_REF
2878 && CONSTANT_POOL_ADDRESS_P (x)
2879 && !(flag_pic
2880 && symbol_mentioned_p (get_pool_constant (x))))
2881 return 1;
2883 return 0;
2886 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2887 instruction of mode MODE. */
2889 thumb_legitimate_offset_p (mode, val)
2890 enum machine_mode mode;
2891 HOST_WIDE_INT val;
2893 switch (GET_MODE_SIZE (mode))
2895 case 1:
2896 return val >= 0 && val < 32;
2898 case 2:
2899 return val >= 0 && val < 64 && (val & 1) == 0;
2901 default:
2902 return (val >= 0
2903 && (val + GET_MODE_SIZE (mode)) <= 128
2904 && (val & 3) == 0);
2908 /* Try machine-dependent ways of modifying an illegitimate address
2909 to be legitimate. If we find one, return the new, valid address. */
2912 arm_legitimize_address (x, orig_x, mode)
2913 rtx x;
2914 rtx orig_x;
2915 enum machine_mode mode;
2917 if (GET_CODE (x) == PLUS)
2919 rtx xop0 = XEXP (x, 0);
2920 rtx xop1 = XEXP (x, 1);
2922 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2923 xop0 = force_reg (SImode, xop0);
2925 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2926 xop1 = force_reg (SImode, xop1);
2928 if (ARM_BASE_REGISTER_RTX_P (xop0)
2929 && GET_CODE (xop1) == CONST_INT)
2931 HOST_WIDE_INT n, low_n;
2932 rtx base_reg, val;
2933 n = INTVAL (xop1);
2935 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2937 low_n = n & 0x0f;
2938 n &= ~0x0f;
2939 if (low_n > 4)
2941 n += 16;
2942 low_n -= 16;
2945 else
2947 low_n = ((mode) == TImode ? 0
2948 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2949 n -= low_n;
2952 base_reg = gen_reg_rtx (SImode);
2953 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2954 GEN_INT (n)), NULL_RTX);
2955 emit_move_insn (base_reg, val);
2956 x = (low_n == 0 ? base_reg
2957 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
2959 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2960 x = gen_rtx_PLUS (SImode, xop0, xop1);
2963 /* XXX We don't allow MINUS any more -- see comment in
2964 arm_legitimate_address_p (). */
2965 else if (GET_CODE (x) == MINUS)
2967 rtx xop0 = XEXP (x, 0);
2968 rtx xop1 = XEXP (x, 1);
2970 if (CONSTANT_P (xop0))
2971 xop0 = force_reg (SImode, xop0);
2973 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
2974 xop1 = force_reg (SImode, xop1);
2976 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
2977 x = gen_rtx_MINUS (SImode, xop0, xop1);
2980 if (flag_pic)
2982 /* We need to find and carefully transform any SYMBOL and LABEL
2983 references; so go back to the original address expression. */
2984 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
2986 if (new_x != orig_x)
2987 x = new_x;
2990 return x;
2995 #define REG_OR_SUBREG_REG(X) \
2996 (GET_CODE (X) == REG \
2997 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2999 #define REG_OR_SUBREG_RTX(X) \
3000 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3002 #ifndef COSTS_N_INSNS
3003 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3004 #endif
3006 static inline int
3007 arm_rtx_costs_1 (x, code, outer)
3008 rtx x;
3009 enum rtx_code code;
3010 enum rtx_code outer;
3012 enum machine_mode mode = GET_MODE (x);
3013 enum rtx_code subcode;
3014 int extra_cost;
3016 if (TARGET_THUMB)
3018 switch (code)
3020 case ASHIFT:
3021 case ASHIFTRT:
3022 case LSHIFTRT:
3023 case ROTATERT:
3024 case PLUS:
3025 case MINUS:
3026 case COMPARE:
3027 case NEG:
3028 case NOT:
3029 return COSTS_N_INSNS (1);
3031 case MULT:
3032 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3034 int cycles = 0;
3035 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3037 while (i)
3039 i >>= 2;
3040 cycles++;
3042 return COSTS_N_INSNS (2) + cycles;
3044 return COSTS_N_INSNS (1) + 16;
3046 case SET:
3047 return (COSTS_N_INSNS (1)
3048 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3049 + GET_CODE (SET_DEST (x)) == MEM));
3051 case CONST_INT:
3052 if (outer == SET)
3054 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3055 return 0;
3056 if (thumb_shiftable_const (INTVAL (x)))
3057 return COSTS_N_INSNS (2);
3058 return COSTS_N_INSNS (3);
3060 else if (outer == PLUS
3061 && INTVAL (x) < 256 && INTVAL (x) > -256)
3062 return 0;
3063 else if (outer == COMPARE
3064 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3065 return 0;
3066 else if (outer == ASHIFT || outer == ASHIFTRT
3067 || outer == LSHIFTRT)
3068 return 0;
3069 return COSTS_N_INSNS (2);
3071 case CONST:
3072 case CONST_DOUBLE:
3073 case LABEL_REF:
3074 case SYMBOL_REF:
3075 return COSTS_N_INSNS (3);
3077 case UDIV:
3078 case UMOD:
3079 case DIV:
3080 case MOD:
3081 return 100;
3083 case TRUNCATE:
3084 return 99;
3086 case AND:
3087 case XOR:
3088 case IOR:
3089 /* XXX guess. */
3090 return 8;
3092 case ADDRESSOF:
3093 case MEM:
3094 /* XXX another guess. */
3095 /* Memory costs quite a lot for the first word, but subsequent words
3096 load at the equivalent of a single insn each. */
3097 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3098 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3099 ? 4 : 0));
3101 case IF_THEN_ELSE:
3102 /* XXX a guess. */
3103 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3104 return 14;
3105 return 2;
3107 case ZERO_EXTEND:
3108 /* XXX still guessing. */
3109 switch (GET_MODE (XEXP (x, 0)))
3111 case QImode:
3112 return (1 + (mode == DImode ? 4 : 0)
3113 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3115 case HImode:
3116 return (4 + (mode == DImode ? 4 : 0)
3117 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3119 case SImode:
3120 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3122 default:
3123 return 99;
3126 default:
3127 return 99;
3128 #if 0
3129 case FFS:
3130 case FLOAT:
3131 case FIX:
3132 case UNSIGNED_FIX:
3133 /* XXX guess */
3134 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3135 rtx_name[code]);
3136 abort ();
3137 #endif
3141 switch (code)
3143 case MEM:
3144 /* Memory costs quite a lot for the first word, but subsequent words
3145 load at the equivalent of a single insn each. */
3146 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3147 + (GET_CODE (x) == SYMBOL_REF
3148 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3150 case DIV:
3151 case MOD:
3152 return 100;
3154 case ROTATE:
3155 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3156 return 4;
3157 /* Fall through */
3158 case ROTATERT:
3159 if (mode != SImode)
3160 return 8;
3161 /* Fall through */
3162 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3163 if (mode == DImode)
3164 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3165 + ((GET_CODE (XEXP (x, 0)) == REG
3166 || (GET_CODE (XEXP (x, 0)) == SUBREG
3167 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3168 ? 0 : 8));
3169 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3170 || (GET_CODE (XEXP (x, 0)) == SUBREG
3171 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3172 ? 0 : 4)
3173 + ((GET_CODE (XEXP (x, 1)) == REG
3174 || (GET_CODE (XEXP (x, 1)) == SUBREG
3175 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3176 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3177 ? 0 : 4));
3179 case MINUS:
3180 if (mode == DImode)
3181 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3182 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3183 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3184 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3185 ? 0 : 8));
3187 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3188 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3189 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3190 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3191 ? 0 : 8)
3192 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3193 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3194 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
3195 ? 0 : 8));
3197 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3198 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3199 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3200 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3201 || subcode == ASHIFTRT || subcode == LSHIFTRT
3202 || subcode == ROTATE || subcode == ROTATERT
3203 || (subcode == MULT
3204 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3205 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3206 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3207 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3208 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3209 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3210 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3211 return 1;
3212 /* Fall through */
3214 case PLUS:
3215 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3216 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3217 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3218 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3219 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3220 ? 0 : 8));
3222 /* Fall through */
3223 case AND: case XOR: case IOR:
3224 extra_cost = 0;
3226 /* Normally the frame registers will be spilt into reg+const during
3227 reload, so it is a bad idea to combine them with other instructions,
3228 since then they might not be moved outside of loops. As a compromise
3229 we allow integration with ops that have a constant as their second
3230 operand. */
3231 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3232 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3233 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3234 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3235 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3236 extra_cost = 4;
3238 if (mode == DImode)
3239 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3240 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3241 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3242 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3243 ? 0 : 8));
3245 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3246 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3247 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3248 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3249 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3250 ? 0 : 4));
3252 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3253 return (1 + extra_cost
3254 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3255 || subcode == LSHIFTRT || subcode == ASHIFTRT
3256 || subcode == ROTATE || subcode == ROTATERT
3257 || (subcode == MULT
3258 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3259 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3260 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3261 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3262 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3263 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3264 ? 0 : 4));
3266 return 8;
3268 case MULT:
3269 /* There is no point basing this on the tuning, since it is always the
3270 fast variant if it exists at all. */
3271 if (arm_fast_multiply && mode == DImode
3272 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3273 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3274 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3275 return 8;
3277 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3278 || mode == DImode)
3279 return 30;
3281 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3283 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3284 & (unsigned HOST_WIDE_INT) 0xffffffff);
3285 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3286 int j;
3288 /* Tune as appropriate. */
3289 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3291 for (j = 0; i && j < 32; j += booth_unit_size)
3293 i >>= booth_unit_size;
3294 add_cost += 2;
3297 return add_cost;
3300 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3301 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3302 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3304 case TRUNCATE:
3305 if (arm_fast_multiply && mode == SImode
3306 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3307 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3308 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3309 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3310 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3311 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3312 return 8;
3313 return 99;
3315 case NEG:
3316 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3317 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3318 /* Fall through */
3319 case NOT:
3320 if (mode == DImode)
3321 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3323 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3325 case IF_THEN_ELSE:
3326 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3327 return 14;
3328 return 2;
3330 case COMPARE:
3331 return 1;
3333 case ABS:
3334 return 4 + (mode == DImode ? 4 : 0);
3336 case SIGN_EXTEND:
3337 if (GET_MODE (XEXP (x, 0)) == QImode)
3338 return (4 + (mode == DImode ? 4 : 0)
3339 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3340 /* Fall through */
3341 case ZERO_EXTEND:
3342 switch (GET_MODE (XEXP (x, 0)))
3344 case QImode:
3345 return (1 + (mode == DImode ? 4 : 0)
3346 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3348 case HImode:
3349 return (4 + (mode == DImode ? 4 : 0)
3350 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3352 case SImode:
3353 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3355 default:
3356 break;
3358 abort ();
3360 case CONST_INT:
3361 if (const_ok_for_arm (INTVAL (x)))
3362 return outer == SET ? 2 : -1;
3363 else if (outer == AND
3364 && const_ok_for_arm (~INTVAL (x)))
3365 return -1;
3366 else if ((outer == COMPARE
3367 || outer == PLUS || outer == MINUS)
3368 && const_ok_for_arm (-INTVAL (x)))
3369 return -1;
3370 else
3371 return 5;
3373 case CONST:
3374 case LABEL_REF:
3375 case SYMBOL_REF:
3376 return 6;
3378 case CONST_DOUBLE:
3379 if (const_double_rtx_ok_for_fpu (x))
3380 return outer == SET ? 2 : -1;
3381 else if ((outer == COMPARE || outer == PLUS)
3382 && neg_const_double_rtx_ok_for_fpu (x))
3383 return -1;
3384 return 7;
3386 default:
3387 return 99;
3391 static bool
3392 arm_rtx_costs (x, code, outer_code, total)
3393 rtx x;
3394 int code, outer_code;
3395 int *total;
3397 *total = arm_rtx_costs_1 (x, code, outer_code);
3398 return true;
3401 /* All address computations that can be done are free, but rtx cost returns
3402 the same for practically all of them. So we weight the different types
3403 of address here in the order (most pref first):
3404 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3406 static int
3407 arm_address_cost (X)
3408 rtx X;
3410 #define ARM_ADDRESS_COST(X) \
3411 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3412 || GET_CODE (X) == SYMBOL_REF) \
3413 ? 0 \
3414 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3415 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3416 ? 10 \
3417 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3418 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3419 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3420 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3421 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3422 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3423 ? 1 : 0)) \
3424 : 4)))))
3426 #define THUMB_ADDRESS_COST(X) \
3427 ((GET_CODE (X) == REG \
3428 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3429 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3430 ? 1 : 2)
3432 return (TARGET_ARM ? ARM_ADDRESS_COST (X) : THUMB_ADDRESS_COST (X));
3435 static int
3436 arm_adjust_cost (insn, link, dep, cost)
3437 rtx insn;
3438 rtx link;
3439 rtx dep;
3440 int cost;
3442 rtx i_pat, d_pat;
3444 /* Some true dependencies can have a higher cost depending
3445 on precisely how certain input operands are used. */
3446 if (arm_is_xscale
3447 && REG_NOTE_KIND (link) == 0
3448 && recog_memoized (insn) < 0
3449 && recog_memoized (dep) < 0)
3451 int shift_opnum = get_attr_shift (insn);
3452 enum attr_type attr_type = get_attr_type (dep);
3454 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3455 operand for INSN. If we have a shifted input operand and the
3456 instruction we depend on is another ALU instruction, then we may
3457 have to account for an additional stall. */
3458 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3460 rtx shifted_operand;
3461 int opno;
3463 /* Get the shifted operand. */
3464 extract_insn (insn);
3465 shifted_operand = recog_data.operand[shift_opnum];
3467 /* Iterate over all the operands in DEP. If we write an operand
3468 that overlaps with SHIFTED_OPERAND, then we have increase the
3469 cost of this dependency. */
3470 extract_insn (dep);
3471 preprocess_constraints ();
3472 for (opno = 0; opno < recog_data.n_operands; opno++)
3474 /* We can ignore strict inputs. */
3475 if (recog_data.operand_type[opno] == OP_IN)
3476 continue;
3478 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3479 shifted_operand))
3480 return 2;
3485 /* XXX This is not strictly true for the FPA. */
3486 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3487 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3488 return 0;
3490 /* Call insns don't incur a stall, even if they follow a load. */
3491 if (REG_NOTE_KIND (link) == 0
3492 && GET_CODE (insn) == CALL_INSN)
3493 return 1;
3495 if ((i_pat = single_set (insn)) != NULL
3496 && GET_CODE (SET_SRC (i_pat)) == MEM
3497 && (d_pat = single_set (dep)) != NULL
3498 && GET_CODE (SET_DEST (d_pat)) == MEM)
3500 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3501 /* This is a load after a store, there is no conflict if the load reads
3502 from a cached area. Assume that loads from the stack, and from the
3503 constant pool are cached, and that others will miss. This is a
3504 hack. */
3506 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3507 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3508 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3509 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3510 return 1;
3513 return cost;
3516 /* This code has been fixed for cross compilation. */
3518 static int fpa_consts_inited = 0;
3520 static const char * const strings_fpa[8] =
3522 "0", "1", "2", "3",
3523 "4", "5", "0.5", "10"
3526 static REAL_VALUE_TYPE values_fpa[8];
3528 static void
3529 init_fpa_table ()
3531 int i;
3532 REAL_VALUE_TYPE r;
3534 for (i = 0; i < 8; i++)
3536 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3537 values_fpa[i] = r;
3540 fpa_consts_inited = 1;
3543 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3546 const_double_rtx_ok_for_fpu (x)
3547 rtx x;
3549 REAL_VALUE_TYPE r;
3550 int i;
3552 if (!fpa_consts_inited)
3553 init_fpa_table ();
3555 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3556 if (REAL_VALUE_MINUS_ZERO (r))
3557 return 0;
3559 for (i = 0; i < 8; i++)
3560 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3561 return 1;
3563 return 0;
3566 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3569 neg_const_double_rtx_ok_for_fpu (x)
3570 rtx x;
3572 REAL_VALUE_TYPE r;
3573 int i;
3575 if (!fpa_consts_inited)
3576 init_fpa_table ();
3578 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3579 r = REAL_VALUE_NEGATE (r);
3580 if (REAL_VALUE_MINUS_ZERO (r))
3581 return 0;
3583 for (i = 0; i < 8; i++)
3584 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3585 return 1;
3587 return 0;
3590 /* Predicates for `match_operand' and `match_operator'. */
3592 /* s_register_operand is the same as register_operand, but it doesn't accept
3593 (SUBREG (MEM)...).
3595 This function exists because at the time it was put in it led to better
3596 code. SUBREG(MEM) always needs a reload in the places where
3597 s_register_operand is used, and this seemed to lead to excessive
3598 reloading. */
3601 s_register_operand (op, mode)
3602 rtx op;
3603 enum machine_mode mode;
3605 if (GET_MODE (op) != mode && mode != VOIDmode)
3606 return 0;
3608 if (GET_CODE (op) == SUBREG)
3609 op = SUBREG_REG (op);
3611 /* We don't consider registers whose class is NO_REGS
3612 to be a register operand. */
3613 /* XXX might have to check for lo regs only for thumb ??? */
3614 return (GET_CODE (op) == REG
3615 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3616 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3619 /* A hard register operand (even before reload. */
3622 arm_hard_register_operand (op, mode)
3623 rtx op;
3624 enum machine_mode mode;
3626 if (GET_MODE (op) != mode && mode != VOIDmode)
3627 return 0;
3629 return (GET_CODE (op) == REG
3630 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3633 /* Only accept reg, subreg(reg), const_int. */
3636 reg_or_int_operand (op, mode)
3637 rtx op;
3638 enum machine_mode mode;
3640 if (GET_CODE (op) == CONST_INT)
3641 return 1;
3643 if (GET_MODE (op) != mode && mode != VOIDmode)
3644 return 0;
3646 if (GET_CODE (op) == SUBREG)
3647 op = SUBREG_REG (op);
3649 /* We don't consider registers whose class is NO_REGS
3650 to be a register operand. */
3651 return (GET_CODE (op) == REG
3652 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3653 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3656 /* Return 1 if OP is an item in memory, given that we are in reload. */
3659 arm_reload_memory_operand (op, mode)
3660 rtx op;
3661 enum machine_mode mode ATTRIBUTE_UNUSED;
3663 int regno = true_regnum (op);
3665 return (!CONSTANT_P (op)
3666 && (regno == -1
3667 || (GET_CODE (op) == REG
3668 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3671 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3672 memory access (architecture V4).
3673 MODE is QImode if called when computing constraints, or VOIDmode when
3674 emitting patterns. In this latter case we cannot use memory_operand()
3675 because it will fail on badly formed MEMs, which is precisly what we are
3676 trying to catch. */
3679 bad_signed_byte_operand (op, mode)
3680 rtx op;
3681 enum machine_mode mode ATTRIBUTE_UNUSED;
3683 #if 0
3684 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3685 return 0;
3686 #endif
3687 if (GET_CODE (op) != MEM)
3688 return 0;
3690 op = XEXP (op, 0);
3692 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3693 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3694 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3695 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3696 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3697 return 1;
3699 /* Big constants are also bad. */
3700 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3701 && (INTVAL (XEXP (op, 1)) > 0xff
3702 || -INTVAL (XEXP (op, 1)) > 0xff))
3703 return 1;
3705 /* Everything else is good, or can will automatically be made so. */
3706 return 0;
3709 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3712 arm_rhs_operand (op, mode)
3713 rtx op;
3714 enum machine_mode mode;
3716 return (s_register_operand (op, mode)
3717 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3720 /* Return TRUE for valid operands for the
3721 rhs of an ARM instruction, or a load. */
3724 arm_rhsm_operand (op, mode)
3725 rtx op;
3726 enum machine_mode mode;
3728 return (s_register_operand (op, mode)
3729 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3730 || memory_operand (op, mode));
3733 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3734 constant that is valid when negated. */
3737 arm_add_operand (op, mode)
3738 rtx op;
3739 enum machine_mode mode;
3741 if (TARGET_THUMB)
3742 return thumb_cmp_operand (op, mode);
3744 return (s_register_operand (op, mode)
3745 || (GET_CODE (op) == CONST_INT
3746 && (const_ok_for_arm (INTVAL (op))
3747 || const_ok_for_arm (-INTVAL (op)))));
3751 arm_not_operand (op, mode)
3752 rtx op;
3753 enum machine_mode mode;
3755 return (s_register_operand (op, mode)
3756 || (GET_CODE (op) == CONST_INT
3757 && (const_ok_for_arm (INTVAL (op))
3758 || const_ok_for_arm (~INTVAL (op)))));
3761 /* Return TRUE if the operand is a memory reference which contains an
3762 offsettable address. */
3765 offsettable_memory_operand (op, mode)
3766 rtx op;
3767 enum machine_mode mode;
3769 if (mode == VOIDmode)
3770 mode = GET_MODE (op);
3772 return (mode == GET_MODE (op)
3773 && GET_CODE (op) == MEM
3774 && offsettable_address_p (reload_completed | reload_in_progress,
3775 mode, XEXP (op, 0)));
3778 /* Return TRUE if the operand is a memory reference which is, or can be
3779 made word aligned by adjusting the offset. */
3782 alignable_memory_operand (op, mode)
3783 rtx op;
3784 enum machine_mode mode;
3786 rtx reg;
3788 if (mode == VOIDmode)
3789 mode = GET_MODE (op);
3791 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3792 return 0;
3794 op = XEXP (op, 0);
3796 return ((GET_CODE (reg = op) == REG
3797 || (GET_CODE (op) == SUBREG
3798 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3799 || (GET_CODE (op) == PLUS
3800 && GET_CODE (XEXP (op, 1)) == CONST_INT
3801 && (GET_CODE (reg = XEXP (op, 0)) == REG
3802 || (GET_CODE (XEXP (op, 0)) == SUBREG
3803 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3804 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3807 /* Similar to s_register_operand, but does not allow hard integer
3808 registers. */
3811 f_register_operand (op, mode)
3812 rtx op;
3813 enum machine_mode mode;
3815 if (GET_MODE (op) != mode && mode != VOIDmode)
3816 return 0;
3818 if (GET_CODE (op) == SUBREG)
3819 op = SUBREG_REG (op);
3821 /* We don't consider registers whose class is NO_REGS
3822 to be a register operand. */
3823 return (GET_CODE (op) == REG
3824 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3825 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3828 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3831 fpu_rhs_operand (op, mode)
3832 rtx op;
3833 enum machine_mode mode;
3835 if (s_register_operand (op, mode))
3836 return TRUE;
3838 if (GET_MODE (op) != mode && mode != VOIDmode)
3839 return FALSE;
3841 if (GET_CODE (op) == CONST_DOUBLE)
3842 return const_double_rtx_ok_for_fpu (op);
3844 return FALSE;
3848 fpu_add_operand (op, mode)
3849 rtx op;
3850 enum machine_mode mode;
3852 if (s_register_operand (op, mode))
3853 return TRUE;
3855 if (GET_MODE (op) != mode && mode != VOIDmode)
3856 return FALSE;
3858 if (GET_CODE (op) == CONST_DOUBLE)
3859 return (const_double_rtx_ok_for_fpu (op)
3860 || neg_const_double_rtx_ok_for_fpu (op));
3862 return FALSE;
3865 /* Return nonzero if OP is a constant power of two. */
3868 power_of_two_operand (op, mode)
3869 rtx op;
3870 enum machine_mode mode ATTRIBUTE_UNUSED;
3872 if (GET_CODE (op) == CONST_INT)
3874 HOST_WIDE_INT value = INTVAL (op);
3876 return value != 0 && (value & (value - 1)) == 0;
3879 return FALSE;
3882 /* Return TRUE for a valid operand of a DImode operation.
3883 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3884 Note that this disallows MEM(REG+REG), but allows
3885 MEM(PRE/POST_INC/DEC(REG)). */
3888 di_operand (op, mode)
3889 rtx op;
3890 enum machine_mode mode;
3892 if (s_register_operand (op, mode))
3893 return TRUE;
3895 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3896 return FALSE;
3898 if (GET_CODE (op) == SUBREG)
3899 op = SUBREG_REG (op);
3901 switch (GET_CODE (op))
3903 case CONST_DOUBLE:
3904 case CONST_INT:
3905 return TRUE;
3907 case MEM:
3908 return memory_address_p (DImode, XEXP (op, 0));
3910 default:
3911 return FALSE;
3915 /* Like di_operand, but don't accept constants. */
3918 nonimmediate_di_operand (op, mode)
3919 rtx op;
3920 enum machine_mode mode;
3922 if (s_register_operand (op, mode))
3923 return TRUE;
3925 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3926 return FALSE;
3928 if (GET_CODE (op) == SUBREG)
3929 op = SUBREG_REG (op);
3931 if (GET_CODE (op) == MEM)
3932 return memory_address_p (DImode, XEXP (op, 0));
3934 return FALSE;
3937 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3938 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3939 Note that this disallows MEM(REG+REG), but allows
3940 MEM(PRE/POST_INC/DEC(REG)). */
3943 soft_df_operand (op, mode)
3944 rtx op;
3945 enum machine_mode mode;
3947 if (s_register_operand (op, mode))
3948 return TRUE;
3950 if (mode != VOIDmode && GET_MODE (op) != mode)
3951 return FALSE;
3953 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3954 return FALSE;
3956 if (GET_CODE (op) == SUBREG)
3957 op = SUBREG_REG (op);
3959 switch (GET_CODE (op))
3961 case CONST_DOUBLE:
3962 return TRUE;
3964 case MEM:
3965 return memory_address_p (DFmode, XEXP (op, 0));
3967 default:
3968 return FALSE;
3972 /* Like soft_df_operand, but don't accept constants. */
3975 nonimmediate_soft_df_operand (op, mode)
3976 rtx op;
3977 enum machine_mode mode;
3979 if (s_register_operand (op, mode))
3980 return TRUE;
3982 if (mode != VOIDmode && GET_MODE (op) != mode)
3983 return FALSE;
3985 if (GET_CODE (op) == SUBREG)
3986 op = SUBREG_REG (op);
3988 if (GET_CODE (op) == MEM)
3989 return memory_address_p (DFmode, XEXP (op, 0));
3990 return FALSE;
3993 /* Return TRUE for valid index operands. */
3996 index_operand (op, mode)
3997 rtx op;
3998 enum machine_mode mode;
4000 return (s_register_operand (op, mode)
4001 || (immediate_operand (op, mode)
4002 && (GET_CODE (op) != CONST_INT
4003 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4006 /* Return TRUE for valid shifts by a constant. This also accepts any
4007 power of two on the (somewhat overly relaxed) assumption that the
4008 shift operator in this case was a mult. */
4011 const_shift_operand (op, mode)
4012 rtx op;
4013 enum machine_mode mode;
4015 return (power_of_two_operand (op, mode)
4016 || (immediate_operand (op, mode)
4017 && (GET_CODE (op) != CONST_INT
4018 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4021 /* Return TRUE for arithmetic operators which can be combined with a multiply
4022 (shift). */
4025 shiftable_operator (x, mode)
4026 rtx x;
4027 enum machine_mode mode;
4029 enum rtx_code code;
4031 if (GET_MODE (x) != mode)
4032 return FALSE;
4034 code = GET_CODE (x);
4036 return (code == PLUS || code == MINUS
4037 || code == IOR || code == XOR || code == AND);
4040 /* Return TRUE for binary logical operators. */
4043 logical_binary_operator (x, mode)
4044 rtx x;
4045 enum machine_mode mode;
4047 enum rtx_code code;
4049 if (GET_MODE (x) != mode)
4050 return FALSE;
4052 code = GET_CODE (x);
4054 return (code == IOR || code == XOR || code == AND);
4057 /* Return TRUE for shift operators. */
4060 shift_operator (x, mode)
4061 rtx x;
4062 enum machine_mode mode;
4064 enum rtx_code code;
4066 if (GET_MODE (x) != mode)
4067 return FALSE;
4069 code = GET_CODE (x);
4071 if (code == MULT)
4072 return power_of_two_operand (XEXP (x, 1), mode);
4074 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4075 || code == ROTATERT);
4078 /* Return TRUE if x is EQ or NE. */
4081 equality_operator (x, mode)
4082 rtx x;
4083 enum machine_mode mode ATTRIBUTE_UNUSED;
4085 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4088 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4091 arm_comparison_operator (x, mode)
4092 rtx x;
4093 enum machine_mode mode;
4095 return (comparison_operator (x, mode)
4096 && GET_CODE (x) != LTGT
4097 && GET_CODE (x) != UNEQ);
4100 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4103 minmax_operator (x, mode)
4104 rtx x;
4105 enum machine_mode mode;
4107 enum rtx_code code = GET_CODE (x);
4109 if (GET_MODE (x) != mode)
4110 return FALSE;
4112 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4115 /* Return TRUE if this is the condition code register, if we aren't given
4116 a mode, accept any class CCmode register. */
4119 cc_register (x, mode)
4120 rtx x;
4121 enum machine_mode mode;
4123 if (mode == VOIDmode)
4125 mode = GET_MODE (x);
4127 if (GET_MODE_CLASS (mode) != MODE_CC)
4128 return FALSE;
4131 if ( GET_MODE (x) == mode
4132 && GET_CODE (x) == REG
4133 && REGNO (x) == CC_REGNUM)
4134 return TRUE;
4136 return FALSE;
4139 /* Return TRUE if this is the condition code register, if we aren't given
4140 a mode, accept any class CCmode register which indicates a dominance
4141 expression. */
4144 dominant_cc_register (x, mode)
4145 rtx x;
4146 enum machine_mode mode;
4148 if (mode == VOIDmode)
4150 mode = GET_MODE (x);
4152 if (GET_MODE_CLASS (mode) != MODE_CC)
4153 return FALSE;
4156 if ( mode != CC_DNEmode && mode != CC_DEQmode
4157 && mode != CC_DLEmode && mode != CC_DLTmode
4158 && mode != CC_DGEmode && mode != CC_DGTmode
4159 && mode != CC_DLEUmode && mode != CC_DLTUmode
4160 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4161 return FALSE;
4163 return cc_register (x, mode);
4166 /* Return TRUE if X references a SYMBOL_REF. */
4169 symbol_mentioned_p (x)
4170 rtx x;
4172 const char * fmt;
4173 int i;
4175 if (GET_CODE (x) == SYMBOL_REF)
4176 return 1;
4178 fmt = GET_RTX_FORMAT (GET_CODE (x));
4180 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4182 if (fmt[i] == 'E')
4184 int j;
4186 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4187 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4188 return 1;
4190 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4191 return 1;
4194 return 0;
4197 /* Return TRUE if X references a LABEL_REF. */
4200 label_mentioned_p (x)
4201 rtx x;
4203 const char * fmt;
4204 int i;
4206 if (GET_CODE (x) == LABEL_REF)
4207 return 1;
4209 fmt = GET_RTX_FORMAT (GET_CODE (x));
4210 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4212 if (fmt[i] == 'E')
4214 int j;
4216 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4217 if (label_mentioned_p (XVECEXP (x, i, j)))
4218 return 1;
4220 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4221 return 1;
4224 return 0;
4227 enum rtx_code
4228 minmax_code (x)
4229 rtx x;
4231 enum rtx_code code = GET_CODE (x);
4233 if (code == SMAX)
4234 return GE;
4235 else if (code == SMIN)
4236 return LE;
4237 else if (code == UMIN)
4238 return LEU;
4239 else if (code == UMAX)
4240 return GEU;
4242 abort ();
4245 /* Return 1 if memory locations are adjacent. */
4248 adjacent_mem_locations (a, b)
4249 rtx a, b;
4251 if ((GET_CODE (XEXP (a, 0)) == REG
4252 || (GET_CODE (XEXP (a, 0)) == PLUS
4253 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4254 && (GET_CODE (XEXP (b, 0)) == REG
4255 || (GET_CODE (XEXP (b, 0)) == PLUS
4256 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4258 int val0 = 0, val1 = 0;
4259 int reg0, reg1;
4261 if (GET_CODE (XEXP (a, 0)) == PLUS)
4263 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4264 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4266 else
4267 reg0 = REGNO (XEXP (a, 0));
4269 if (GET_CODE (XEXP (b, 0)) == PLUS)
4271 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4272 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4274 else
4275 reg1 = REGNO (XEXP (b, 0));
4277 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4279 return 0;
4282 /* Return 1 if OP is a load multiple operation. It is known to be
4283 parallel and the first section will be tested. */
4286 load_multiple_operation (op, mode)
4287 rtx op;
4288 enum machine_mode mode ATTRIBUTE_UNUSED;
4290 HOST_WIDE_INT count = XVECLEN (op, 0);
4291 int dest_regno;
4292 rtx src_addr;
4293 HOST_WIDE_INT i = 1, base = 0;
4294 rtx elt;
4296 if (count <= 1
4297 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4298 return 0;
4300 /* Check to see if this might be a write-back. */
4301 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4303 i++;
4304 base = 1;
4306 /* Now check it more carefully. */
4307 if (GET_CODE (SET_DEST (elt)) != REG
4308 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4309 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4310 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4311 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4312 return 0;
4315 /* Perform a quick check so we don't blow up below. */
4316 if (count <= i
4317 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4318 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4319 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4320 return 0;
4322 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4323 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4325 for (; i < count; i++)
4327 elt = XVECEXP (op, 0, i);
4329 if (GET_CODE (elt) != SET
4330 || GET_CODE (SET_DEST (elt)) != REG
4331 || GET_MODE (SET_DEST (elt)) != SImode
4332 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4333 || GET_CODE (SET_SRC (elt)) != MEM
4334 || GET_MODE (SET_SRC (elt)) != SImode
4335 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4336 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4337 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4338 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4339 return 0;
4342 return 1;
4345 /* Return 1 if OP is a store multiple operation. It is known to be
4346 parallel and the first section will be tested. */
4349 store_multiple_operation (op, mode)
4350 rtx op;
4351 enum machine_mode mode ATTRIBUTE_UNUSED;
4353 HOST_WIDE_INT count = XVECLEN (op, 0);
4354 int src_regno;
4355 rtx dest_addr;
4356 HOST_WIDE_INT i = 1, base = 0;
4357 rtx elt;
4359 if (count <= 1
4360 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4361 return 0;
4363 /* Check to see if this might be a write-back. */
4364 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4366 i++;
4367 base = 1;
4369 /* Now check it more carefully. */
4370 if (GET_CODE (SET_DEST (elt)) != REG
4371 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4372 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4373 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4374 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4375 return 0;
4378 /* Perform a quick check so we don't blow up below. */
4379 if (count <= i
4380 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4381 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4382 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4383 return 0;
4385 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4386 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4388 for (; i < count; i++)
4390 elt = XVECEXP (op, 0, i);
4392 if (GET_CODE (elt) != SET
4393 || GET_CODE (SET_SRC (elt)) != REG
4394 || GET_MODE (SET_SRC (elt)) != SImode
4395 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4396 || GET_CODE (SET_DEST (elt)) != MEM
4397 || GET_MODE (SET_DEST (elt)) != SImode
4398 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4399 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4400 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4401 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4402 return 0;
4405 return 1;
4409 load_multiple_sequence (operands, nops, regs, base, load_offset)
4410 rtx * operands;
4411 int nops;
4412 int * regs;
4413 int * base;
4414 HOST_WIDE_INT * load_offset;
4416 int unsorted_regs[4];
4417 HOST_WIDE_INT unsorted_offsets[4];
4418 int order[4];
4419 int base_reg = -1;
4420 int i;
4422 /* Can only handle 2, 3, or 4 insns at present,
4423 though could be easily extended if required. */
4424 if (nops < 2 || nops > 4)
4425 abort ();
4427 /* Loop over the operands and check that the memory references are
4428 suitable (ie immediate offsets from the same base register). At
4429 the same time, extract the target register, and the memory
4430 offsets. */
4431 for (i = 0; i < nops; i++)
4433 rtx reg;
4434 rtx offset;
4436 /* Convert a subreg of a mem into the mem itself. */
4437 if (GET_CODE (operands[nops + i]) == SUBREG)
4438 operands[nops + i] = alter_subreg (operands + (nops + i));
4440 if (GET_CODE (operands[nops + i]) != MEM)
4441 abort ();
4443 /* Don't reorder volatile memory references; it doesn't seem worth
4444 looking for the case where the order is ok anyway. */
4445 if (MEM_VOLATILE_P (operands[nops + i]))
4446 return 0;
4448 offset = const0_rtx;
4450 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4451 || (GET_CODE (reg) == SUBREG
4452 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4453 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4454 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4455 == REG)
4456 || (GET_CODE (reg) == SUBREG
4457 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4458 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4459 == CONST_INT)))
4461 if (i == 0)
4463 base_reg = REGNO (reg);
4464 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4465 ? REGNO (operands[i])
4466 : REGNO (SUBREG_REG (operands[i])));
4467 order[0] = 0;
4469 else
4471 if (base_reg != (int) REGNO (reg))
4472 /* Not addressed from the same base register. */
4473 return 0;
4475 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4476 ? REGNO (operands[i])
4477 : REGNO (SUBREG_REG (operands[i])));
4478 if (unsorted_regs[i] < unsorted_regs[order[0]])
4479 order[0] = i;
4482 /* If it isn't an integer register, or if it overwrites the
4483 base register but isn't the last insn in the list, then
4484 we can't do this. */
4485 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4486 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4487 return 0;
4489 unsorted_offsets[i] = INTVAL (offset);
4491 else
4492 /* Not a suitable memory address. */
4493 return 0;
4496 /* All the useful information has now been extracted from the
4497 operands into unsorted_regs and unsorted_offsets; additionally,
4498 order[0] has been set to the lowest numbered register in the
4499 list. Sort the registers into order, and check that the memory
4500 offsets are ascending and adjacent. */
4502 for (i = 1; i < nops; i++)
4504 int j;
4506 order[i] = order[i - 1];
4507 for (j = 0; j < nops; j++)
4508 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4509 && (order[i] == order[i - 1]
4510 || unsorted_regs[j] < unsorted_regs[order[i]]))
4511 order[i] = j;
4513 /* Have we found a suitable register? if not, one must be used more
4514 than once. */
4515 if (order[i] == order[i - 1])
4516 return 0;
4518 /* Is the memory address adjacent and ascending? */
4519 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4520 return 0;
4523 if (base)
4525 *base = base_reg;
4527 for (i = 0; i < nops; i++)
4528 regs[i] = unsorted_regs[order[i]];
4530 *load_offset = unsorted_offsets[order[0]];
4533 if (unsorted_offsets[order[0]] == 0)
4534 return 1; /* ldmia */
4536 if (unsorted_offsets[order[0]] == 4)
4537 return 2; /* ldmib */
4539 if (unsorted_offsets[order[nops - 1]] == 0)
4540 return 3; /* ldmda */
4542 if (unsorted_offsets[order[nops - 1]] == -4)
4543 return 4; /* ldmdb */
4545 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4546 if the offset isn't small enough. The reason 2 ldrs are faster
4547 is because these ARMs are able to do more than one cache access
4548 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4549 whilst the ARM8 has a double bandwidth cache. This means that
4550 these cores can do both an instruction fetch and a data fetch in
4551 a single cycle, so the trick of calculating the address into a
4552 scratch register (one of the result regs) and then doing a load
4553 multiple actually becomes slower (and no smaller in code size).
4554 That is the transformation
4556 ldr rd1, [rbase + offset]
4557 ldr rd2, [rbase + offset + 4]
4561 add rd1, rbase, offset
4562 ldmia rd1, {rd1, rd2}
4564 produces worse code -- '3 cycles + any stalls on rd2' instead of
4565 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4566 access per cycle, the first sequence could never complete in less
4567 than 6 cycles, whereas the ldm sequence would only take 5 and
4568 would make better use of sequential accesses if not hitting the
4569 cache.
4571 We cheat here and test 'arm_ld_sched' which we currently know to
4572 only be true for the ARM8, ARM9 and StrongARM. If this ever
4573 changes, then the test below needs to be reworked. */
4574 if (nops == 2 && arm_ld_sched)
4575 return 0;
4577 /* Can't do it without setting up the offset, only do this if it takes
4578 no more than one insn. */
4579 return (const_ok_for_arm (unsorted_offsets[order[0]])
4580 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4583 const char *
4584 emit_ldm_seq (operands, nops)
4585 rtx * operands;
4586 int nops;
4588 int regs[4];
4589 int base_reg;
4590 HOST_WIDE_INT offset;
4591 char buf[100];
4592 int i;
4594 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4596 case 1:
4597 strcpy (buf, "ldm%?ia\t");
4598 break;
4600 case 2:
4601 strcpy (buf, "ldm%?ib\t");
4602 break;
4604 case 3:
4605 strcpy (buf, "ldm%?da\t");
4606 break;
4608 case 4:
4609 strcpy (buf, "ldm%?db\t");
4610 break;
4612 case 5:
4613 if (offset >= 0)
4614 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4615 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4616 (long) offset);
4617 else
4618 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4619 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4620 (long) -offset);
4621 output_asm_insn (buf, operands);
4622 base_reg = regs[0];
4623 strcpy (buf, "ldm%?ia\t");
4624 break;
4626 default:
4627 abort ();
4630 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4631 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4633 for (i = 1; i < nops; i++)
4634 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4635 reg_names[regs[i]]);
4637 strcat (buf, "}\t%@ phole ldm");
4639 output_asm_insn (buf, operands);
4640 return "";
4644 store_multiple_sequence (operands, nops, regs, base, load_offset)
4645 rtx * operands;
4646 int nops;
4647 int * regs;
4648 int * base;
4649 HOST_WIDE_INT * load_offset;
4651 int unsorted_regs[4];
4652 HOST_WIDE_INT unsorted_offsets[4];
4653 int order[4];
4654 int base_reg = -1;
4655 int i;
4657 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4658 extended if required. */
4659 if (nops < 2 || nops > 4)
4660 abort ();
4662 /* Loop over the operands and check that the memory references are
4663 suitable (ie immediate offsets from the same base register). At
4664 the same time, extract the target register, and the memory
4665 offsets. */
4666 for (i = 0; i < nops; i++)
4668 rtx reg;
4669 rtx offset;
4671 /* Convert a subreg of a mem into the mem itself. */
4672 if (GET_CODE (operands[nops + i]) == SUBREG)
4673 operands[nops + i] = alter_subreg (operands + (nops + i));
4675 if (GET_CODE (operands[nops + i]) != MEM)
4676 abort ();
4678 /* Don't reorder volatile memory references; it doesn't seem worth
4679 looking for the case where the order is ok anyway. */
4680 if (MEM_VOLATILE_P (operands[nops + i]))
4681 return 0;
4683 offset = const0_rtx;
4685 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4686 || (GET_CODE (reg) == SUBREG
4687 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4688 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4689 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4690 == REG)
4691 || (GET_CODE (reg) == SUBREG
4692 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4693 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4694 == CONST_INT)))
4696 if (i == 0)
4698 base_reg = REGNO (reg);
4699 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4700 ? REGNO (operands[i])
4701 : REGNO (SUBREG_REG (operands[i])));
4702 order[0] = 0;
4704 else
4706 if (base_reg != (int) REGNO (reg))
4707 /* Not addressed from the same base register. */
4708 return 0;
4710 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4711 ? REGNO (operands[i])
4712 : REGNO (SUBREG_REG (operands[i])));
4713 if (unsorted_regs[i] < unsorted_regs[order[0]])
4714 order[0] = i;
4717 /* If it isn't an integer register, then we can't do this. */
4718 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4719 return 0;
4721 unsorted_offsets[i] = INTVAL (offset);
4723 else
4724 /* Not a suitable memory address. */
4725 return 0;
4728 /* All the useful information has now been extracted from the
4729 operands into unsorted_regs and unsorted_offsets; additionally,
4730 order[0] has been set to the lowest numbered register in the
4731 list. Sort the registers into order, and check that the memory
4732 offsets are ascending and adjacent. */
4734 for (i = 1; i < nops; i++)
4736 int j;
4738 order[i] = order[i - 1];
4739 for (j = 0; j < nops; j++)
4740 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4741 && (order[i] == order[i - 1]
4742 || unsorted_regs[j] < unsorted_regs[order[i]]))
4743 order[i] = j;
4745 /* Have we found a suitable register? if not, one must be used more
4746 than once. */
4747 if (order[i] == order[i - 1])
4748 return 0;
4750 /* Is the memory address adjacent and ascending? */
4751 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4752 return 0;
4755 if (base)
4757 *base = base_reg;
4759 for (i = 0; i < nops; i++)
4760 regs[i] = unsorted_regs[order[i]];
4762 *load_offset = unsorted_offsets[order[0]];
4765 if (unsorted_offsets[order[0]] == 0)
4766 return 1; /* stmia */
4768 if (unsorted_offsets[order[0]] == 4)
4769 return 2; /* stmib */
4771 if (unsorted_offsets[order[nops - 1]] == 0)
4772 return 3; /* stmda */
4774 if (unsorted_offsets[order[nops - 1]] == -4)
4775 return 4; /* stmdb */
4777 return 0;
4780 const char *
4781 emit_stm_seq (operands, nops)
4782 rtx * operands;
4783 int nops;
4785 int regs[4];
4786 int base_reg;
4787 HOST_WIDE_INT offset;
4788 char buf[100];
4789 int i;
4791 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4793 case 1:
4794 strcpy (buf, "stm%?ia\t");
4795 break;
4797 case 2:
4798 strcpy (buf, "stm%?ib\t");
4799 break;
4801 case 3:
4802 strcpy (buf, "stm%?da\t");
4803 break;
4805 case 4:
4806 strcpy (buf, "stm%?db\t");
4807 break;
4809 default:
4810 abort ();
4813 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4814 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4816 for (i = 1; i < nops; i++)
4817 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4818 reg_names[regs[i]]);
4820 strcat (buf, "}\t%@ phole stm");
4822 output_asm_insn (buf, operands);
4823 return "";
4827 multi_register_push (op, mode)
4828 rtx op;
4829 enum machine_mode mode ATTRIBUTE_UNUSED;
4831 if (GET_CODE (op) != PARALLEL
4832 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4833 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4834 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4835 return 0;
4837 return 1;
4840 /* Routines for use in generating RTL. */
4843 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4844 in_struct_p, scalar_p)
4845 int base_regno;
4846 int count;
4847 rtx from;
4848 int up;
4849 int write_back;
4850 int unchanging_p;
4851 int in_struct_p;
4852 int scalar_p;
4854 int i = 0, j;
4855 rtx result;
4856 int sign = up ? 1 : -1;
4857 rtx mem;
4859 /* XScale has load-store double instructions, but they have stricter
4860 alignment requirements than load-store multiple, so we can not
4861 use them.
4863 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4864 the pipeline until completion.
4866 NREGS CYCLES
4872 An ldr instruction takes 1-3 cycles, but does not block the
4873 pipeline.
4875 NREGS CYCLES
4876 1 1-3
4877 2 2-6
4878 3 3-9
4879 4 4-12
4881 Best case ldr will always win. However, the more ldr instructions
4882 we issue, the less likely we are to be able to schedule them well.
4883 Using ldr instructions also increases code size.
4885 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4886 for counts of 3 or 4 regs. */
4887 if (arm_is_xscale && count <= 2 && ! optimize_size)
4889 rtx seq;
4891 start_sequence ();
4893 for (i = 0; i < count; i++)
4895 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4896 RTX_UNCHANGING_P (mem) = unchanging_p;
4897 MEM_IN_STRUCT_P (mem) = in_struct_p;
4898 MEM_SCALAR_P (mem) = scalar_p;
4899 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4902 if (write_back)
4903 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4905 seq = get_insns ();
4906 end_sequence ();
4908 return seq;
4911 result = gen_rtx_PARALLEL (VOIDmode,
4912 rtvec_alloc (count + (write_back ? 1 : 0)));
4913 if (write_back)
4915 XVECEXP (result, 0, 0)
4916 = gen_rtx_SET (GET_MODE (from), from,
4917 plus_constant (from, count * 4 * sign));
4918 i = 1;
4919 count++;
4922 for (j = 0; i < count; i++, j++)
4924 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4925 RTX_UNCHANGING_P (mem) = unchanging_p;
4926 MEM_IN_STRUCT_P (mem) = in_struct_p;
4927 MEM_SCALAR_P (mem) = scalar_p;
4928 XVECEXP (result, 0, i)
4929 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4932 return result;
4936 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4937 in_struct_p, scalar_p)
4938 int base_regno;
4939 int count;
4940 rtx to;
4941 int up;
4942 int write_back;
4943 int unchanging_p;
4944 int in_struct_p;
4945 int scalar_p;
4947 int i = 0, j;
4948 rtx result;
4949 int sign = up ? 1 : -1;
4950 rtx mem;
4952 /* See arm_gen_load_multiple for discussion of
4953 the pros/cons of ldm/stm usage for XScale. */
4954 if (arm_is_xscale && count <= 2 && ! optimize_size)
4956 rtx seq;
4958 start_sequence ();
4960 for (i = 0; i < count; i++)
4962 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4963 RTX_UNCHANGING_P (mem) = unchanging_p;
4964 MEM_IN_STRUCT_P (mem) = in_struct_p;
4965 MEM_SCALAR_P (mem) = scalar_p;
4966 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4969 if (write_back)
4970 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4972 seq = get_insns ();
4973 end_sequence ();
4975 return seq;
4978 result = gen_rtx_PARALLEL (VOIDmode,
4979 rtvec_alloc (count + (write_back ? 1 : 0)));
4980 if (write_back)
4982 XVECEXP (result, 0, 0)
4983 = gen_rtx_SET (GET_MODE (to), to,
4984 plus_constant (to, count * 4 * sign));
4985 i = 1;
4986 count++;
4989 for (j = 0; i < count; i++, j++)
4991 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4992 RTX_UNCHANGING_P (mem) = unchanging_p;
4993 MEM_IN_STRUCT_P (mem) = in_struct_p;
4994 MEM_SCALAR_P (mem) = scalar_p;
4996 XVECEXP (result, 0, i)
4997 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5000 return result;
5004 arm_gen_movstrqi (operands)
5005 rtx * operands;
5007 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5008 int i;
5009 rtx src, dst;
5010 rtx st_src, st_dst, fin_src, fin_dst;
5011 rtx part_bytes_reg = NULL;
5012 rtx mem;
5013 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5014 int dst_scalar_p, src_scalar_p;
5016 if (GET_CODE (operands[2]) != CONST_INT
5017 || GET_CODE (operands[3]) != CONST_INT
5018 || INTVAL (operands[2]) > 64
5019 || INTVAL (operands[3]) & 3)
5020 return 0;
5022 st_dst = XEXP (operands[0], 0);
5023 st_src = XEXP (operands[1], 0);
5025 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5026 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5027 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5028 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5029 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5030 src_scalar_p = MEM_SCALAR_P (operands[1]);
5032 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5033 fin_src = src = copy_to_mode_reg (SImode, st_src);
5035 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5036 out_words_to_go = INTVAL (operands[2]) / 4;
5037 last_bytes = INTVAL (operands[2]) & 3;
5039 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5040 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5042 for (i = 0; in_words_to_go >= 2; i+=4)
5044 if (in_words_to_go > 4)
5045 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5046 src_unchanging_p,
5047 src_in_struct_p,
5048 src_scalar_p));
5049 else
5050 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5051 FALSE, src_unchanging_p,
5052 src_in_struct_p, src_scalar_p));
5054 if (out_words_to_go)
5056 if (out_words_to_go > 4)
5057 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5058 dst_unchanging_p,
5059 dst_in_struct_p,
5060 dst_scalar_p));
5061 else if (out_words_to_go != 1)
5062 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5063 dst, TRUE,
5064 (last_bytes == 0
5065 ? FALSE : TRUE),
5066 dst_unchanging_p,
5067 dst_in_struct_p,
5068 dst_scalar_p));
5069 else
5071 mem = gen_rtx_MEM (SImode, dst);
5072 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5073 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5074 MEM_SCALAR_P (mem) = dst_scalar_p;
5075 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5076 if (last_bytes != 0)
5077 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5081 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5082 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5085 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5086 if (out_words_to_go)
5088 rtx sreg;
5090 mem = gen_rtx_MEM (SImode, src);
5091 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5092 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5093 MEM_SCALAR_P (mem) = src_scalar_p;
5094 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5095 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5097 mem = gen_rtx_MEM (SImode, dst);
5098 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5099 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5100 MEM_SCALAR_P (mem) = dst_scalar_p;
5101 emit_move_insn (mem, sreg);
5102 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5103 in_words_to_go--;
5105 if (in_words_to_go) /* Sanity check */
5106 abort ();
5109 if (in_words_to_go)
5111 if (in_words_to_go < 0)
5112 abort ();
5114 mem = gen_rtx_MEM (SImode, src);
5115 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5116 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5117 MEM_SCALAR_P (mem) = src_scalar_p;
5118 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5121 if (last_bytes && part_bytes_reg == NULL)
5122 abort ();
5124 if (BYTES_BIG_ENDIAN && last_bytes)
5126 rtx tmp = gen_reg_rtx (SImode);
5128 /* The bytes we want are in the top end of the word. */
5129 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5130 GEN_INT (8 * (4 - last_bytes))));
5131 part_bytes_reg = tmp;
5133 while (last_bytes)
5135 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5136 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5137 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5138 MEM_SCALAR_P (mem) = dst_scalar_p;
5139 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5141 if (--last_bytes)
5143 tmp = gen_reg_rtx (SImode);
5144 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5145 part_bytes_reg = tmp;
5150 else
5152 if (last_bytes > 1)
5154 mem = gen_rtx_MEM (HImode, dst);
5155 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5156 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5157 MEM_SCALAR_P (mem) = dst_scalar_p;
5158 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5159 last_bytes -= 2;
5160 if (last_bytes)
5162 rtx tmp = gen_reg_rtx (SImode);
5164 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5165 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5166 part_bytes_reg = tmp;
5170 if (last_bytes)
5172 mem = gen_rtx_MEM (QImode, dst);
5173 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5174 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5175 MEM_SCALAR_P (mem) = dst_scalar_p;
5176 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5180 return 1;
5183 /* Generate a memory reference for a half word, such that it will be loaded
5184 into the top 16 bits of the word. We can assume that the address is
5185 known to be alignable and of the form reg, or plus (reg, const). */
5188 arm_gen_rotated_half_load (memref)
5189 rtx memref;
5191 HOST_WIDE_INT offset = 0;
5192 rtx base = XEXP (memref, 0);
5194 if (GET_CODE (base) == PLUS)
5196 offset = INTVAL (XEXP (base, 1));
5197 base = XEXP (base, 0);
5200 /* If we aren't allowed to generate unaligned addresses, then fail. */
5201 if (TARGET_MMU_TRAPS
5202 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5203 return NULL;
5205 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5207 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5208 return base;
5210 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5213 /* Select a dominance comparison mode if possible. We support three forms.
5214 COND_OR == 0 => (X && Y)
5215 COND_OR == 1 => ((! X( || Y)
5216 COND_OR == 2 => (X || Y)
5217 If we are unable to support a dominance comparsison we return CC mode.
5218 This will then fail to match for the RTL expressions that generate this
5219 call. */
5221 static enum machine_mode
5222 select_dominance_cc_mode (x, y, cond_or)
5223 rtx x;
5224 rtx y;
5225 HOST_WIDE_INT cond_or;
5227 enum rtx_code cond1, cond2;
5228 int swapped = 0;
5230 /* Currently we will probably get the wrong result if the individual
5231 comparisons are not simple. This also ensures that it is safe to
5232 reverse a comparison if necessary. */
5233 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5234 != CCmode)
5235 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5236 != CCmode))
5237 return CCmode;
5239 /* The if_then_else variant of this tests the second condition if the
5240 first passes, but is true if the first fails. Reverse the first
5241 condition to get a true "inclusive-or" expression. */
5242 if (cond_or == 1)
5243 cond1 = reverse_condition (cond1);
5245 /* If the comparisons are not equal, and one doesn't dominate the other,
5246 then we can't do this. */
5247 if (cond1 != cond2
5248 && !comparison_dominates_p (cond1, cond2)
5249 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5250 return CCmode;
5252 if (swapped)
5254 enum rtx_code temp = cond1;
5255 cond1 = cond2;
5256 cond2 = temp;
5259 switch (cond1)
5261 case EQ:
5262 if (cond2 == EQ || !cond_or)
5263 return CC_DEQmode;
5265 switch (cond2)
5267 case LE: return CC_DLEmode;
5268 case LEU: return CC_DLEUmode;
5269 case GE: return CC_DGEmode;
5270 case GEU: return CC_DGEUmode;
5271 default: break;
5274 break;
5276 case LT:
5277 if (cond2 == LT || !cond_or)
5278 return CC_DLTmode;
5279 if (cond2 == LE)
5280 return CC_DLEmode;
5281 if (cond2 == NE)
5282 return CC_DNEmode;
5283 break;
5285 case GT:
5286 if (cond2 == GT || !cond_or)
5287 return CC_DGTmode;
5288 if (cond2 == GE)
5289 return CC_DGEmode;
5290 if (cond2 == NE)
5291 return CC_DNEmode;
5292 break;
5294 case LTU:
5295 if (cond2 == LTU || !cond_or)
5296 return CC_DLTUmode;
5297 if (cond2 == LEU)
5298 return CC_DLEUmode;
5299 if (cond2 == NE)
5300 return CC_DNEmode;
5301 break;
5303 case GTU:
5304 if (cond2 == GTU || !cond_or)
5305 return CC_DGTUmode;
5306 if (cond2 == GEU)
5307 return CC_DGEUmode;
5308 if (cond2 == NE)
5309 return CC_DNEmode;
5310 break;
5312 /* The remaining cases only occur when both comparisons are the
5313 same. */
5314 case NE:
5315 return CC_DNEmode;
5317 case LE:
5318 return CC_DLEmode;
5320 case GE:
5321 return CC_DGEmode;
5323 case LEU:
5324 return CC_DLEUmode;
5326 case GEU:
5327 return CC_DGEUmode;
5329 default:
5330 break;
5333 abort ();
5336 enum machine_mode
5337 arm_select_cc_mode (op, x, y)
5338 enum rtx_code op;
5339 rtx x;
5340 rtx y;
5342 /* All floating point compares return CCFP if it is an equality
5343 comparison, and CCFPE otherwise. */
5344 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5346 switch (op)
5348 case EQ:
5349 case NE:
5350 case UNORDERED:
5351 case ORDERED:
5352 case UNLT:
5353 case UNLE:
5354 case UNGT:
5355 case UNGE:
5356 case UNEQ:
5357 case LTGT:
5358 return CCFPmode;
5360 case LT:
5361 case LE:
5362 case GT:
5363 case GE:
5364 return CCFPEmode;
5366 default:
5367 abort ();
5371 /* A compare with a shifted operand. Because of canonicalization, the
5372 comparison will have to be swapped when we emit the assembler. */
5373 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5374 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5375 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5376 || GET_CODE (x) == ROTATERT))
5377 return CC_SWPmode;
5379 /* This is a special case that is used by combine to allow a
5380 comparison of a shifted byte load to be split into a zero-extend
5381 followed by a comparison of the shifted integer (only valid for
5382 equalities and unsigned inequalities). */
5383 if (GET_MODE (x) == SImode
5384 && GET_CODE (x) == ASHIFT
5385 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5386 && GET_CODE (XEXP (x, 0)) == SUBREG
5387 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5388 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5389 && (op == EQ || op == NE
5390 || op == GEU || op == GTU || op == LTU || op == LEU)
5391 && GET_CODE (y) == CONST_INT)
5392 return CC_Zmode;
5394 /* A construct for a conditional compare, if the false arm contains
5395 0, then both conditions must be true, otherwise either condition
5396 must be true. Not all conditions are possible, so CCmode is
5397 returned if it can't be done. */
5398 if (GET_CODE (x) == IF_THEN_ELSE
5399 && (XEXP (x, 2) == const0_rtx
5400 || XEXP (x, 2) == const1_rtx)
5401 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5402 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5403 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5404 INTVAL (XEXP (x, 2)));
5406 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5407 if (GET_CODE (x) == AND
5408 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5409 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5410 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5412 if (GET_CODE (x) == IOR
5413 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5414 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5415 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5417 /* An operation that sets the condition codes as a side-effect, the
5418 V flag is not set correctly, so we can only use comparisons where
5419 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5420 instead. */
5421 if (GET_MODE (x) == SImode
5422 && y == const0_rtx
5423 && (op == EQ || op == NE || op == LT || op == GE)
5424 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5425 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5426 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5427 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5428 || GET_CODE (x) == LSHIFTRT
5429 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5430 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5431 return CC_NOOVmode;
5433 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5434 return CC_Zmode;
5436 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5437 && GET_CODE (x) == PLUS
5438 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5439 return CC_Cmode;
5441 return CCmode;
5444 /* X and Y are two things to compare using CODE. Emit the compare insn and
5445 return the rtx for register 0 in the proper mode. FP means this is a
5446 floating point compare: I don't think that it is needed on the arm. */
5449 arm_gen_compare_reg (code, x, y)
5450 enum rtx_code code;
5451 rtx x, y;
5453 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5454 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5456 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5457 gen_rtx_COMPARE (mode, x, y)));
5459 return cc_reg;
5462 /* Generate a sequence of insns that will generate the correct return
5463 address mask depending on the physical architecture that the program
5464 is running on. */
5467 arm_gen_return_addr_mask ()
5469 rtx reg = gen_reg_rtx (Pmode);
5471 emit_insn (gen_return_addr_mask (reg));
5472 return reg;
5475 void
5476 arm_reload_in_hi (operands)
5477 rtx * operands;
5479 rtx ref = operands[1];
5480 rtx base, scratch;
5481 HOST_WIDE_INT offset = 0;
5483 if (GET_CODE (ref) == SUBREG)
5485 offset = SUBREG_BYTE (ref);
5486 ref = SUBREG_REG (ref);
5489 if (GET_CODE (ref) == REG)
5491 /* We have a pseudo which has been spilt onto the stack; there
5492 are two cases here: the first where there is a simple
5493 stack-slot replacement and a second where the stack-slot is
5494 out of range, or is used as a subreg. */
5495 if (reg_equiv_mem[REGNO (ref)])
5497 ref = reg_equiv_mem[REGNO (ref)];
5498 base = find_replacement (&XEXP (ref, 0));
5500 else
5501 /* The slot is out of range, or was dressed up in a SUBREG. */
5502 base = reg_equiv_address[REGNO (ref)];
5504 else
5505 base = find_replacement (&XEXP (ref, 0));
5507 /* Handle the case where the address is too complex to be offset by 1. */
5508 if (GET_CODE (base) == MINUS
5509 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5511 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5513 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5514 base = base_plus;
5516 else if (GET_CODE (base) == PLUS)
5518 /* The addend must be CONST_INT, or we would have dealt with it above. */
5519 HOST_WIDE_INT hi, lo;
5521 offset += INTVAL (XEXP (base, 1));
5522 base = XEXP (base, 0);
5524 /* Rework the address into a legal sequence of insns. */
5525 /* Valid range for lo is -4095 -> 4095 */
5526 lo = (offset >= 0
5527 ? (offset & 0xfff)
5528 : -((-offset) & 0xfff));
5530 /* Corner case, if lo is the max offset then we would be out of range
5531 once we have added the additional 1 below, so bump the msb into the
5532 pre-loading insn(s). */
5533 if (lo == 4095)
5534 lo &= 0x7ff;
5536 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5537 ^ (HOST_WIDE_INT) 0x80000000)
5538 - (HOST_WIDE_INT) 0x80000000);
5540 if (hi + lo != offset)
5541 abort ();
5543 if (hi != 0)
5545 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5547 /* Get the base address; addsi3 knows how to handle constants
5548 that require more than one insn. */
5549 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5550 base = base_plus;
5551 offset = lo;
5555 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5556 emit_insn (gen_zero_extendqisi2 (scratch,
5557 gen_rtx_MEM (QImode,
5558 plus_constant (base,
5559 offset))));
5560 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5561 gen_rtx_MEM (QImode,
5562 plus_constant (base,
5563 offset + 1))));
5564 if (!BYTES_BIG_ENDIAN)
5565 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5566 gen_rtx_IOR (SImode,
5567 gen_rtx_ASHIFT
5568 (SImode,
5569 gen_rtx_SUBREG (SImode, operands[0], 0),
5570 GEN_INT (8)),
5571 scratch)));
5572 else
5573 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5574 gen_rtx_IOR (SImode,
5575 gen_rtx_ASHIFT (SImode, scratch,
5576 GEN_INT (8)),
5577 gen_rtx_SUBREG (SImode, operands[0],
5578 0))));
5581 /* Handle storing a half-word to memory during reload by synthesising as two
5582 byte stores. Take care not to clobber the input values until after we
5583 have moved them somewhere safe. This code assumes that if the DImode
5584 scratch in operands[2] overlaps either the input value or output address
5585 in some way, then that value must die in this insn (we absolutely need
5586 two scratch registers for some corner cases). */
5588 void
5589 arm_reload_out_hi (operands)
5590 rtx * operands;
5592 rtx ref = operands[0];
5593 rtx outval = operands[1];
5594 rtx base, scratch;
5595 HOST_WIDE_INT offset = 0;
5597 if (GET_CODE (ref) == SUBREG)
5599 offset = SUBREG_BYTE (ref);
5600 ref = SUBREG_REG (ref);
5603 if (GET_CODE (ref) == REG)
5605 /* We have a pseudo which has been spilt onto the stack; there
5606 are two cases here: the first where there is a simple
5607 stack-slot replacement and a second where the stack-slot is
5608 out of range, or is used as a subreg. */
5609 if (reg_equiv_mem[REGNO (ref)])
5611 ref = reg_equiv_mem[REGNO (ref)];
5612 base = find_replacement (&XEXP (ref, 0));
5614 else
5615 /* The slot is out of range, or was dressed up in a SUBREG. */
5616 base = reg_equiv_address[REGNO (ref)];
5618 else
5619 base = find_replacement (&XEXP (ref, 0));
5621 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5623 /* Handle the case where the address is too complex to be offset by 1. */
5624 if (GET_CODE (base) == MINUS
5625 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5627 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5629 /* Be careful not to destroy OUTVAL. */
5630 if (reg_overlap_mentioned_p (base_plus, outval))
5632 /* Updating base_plus might destroy outval, see if we can
5633 swap the scratch and base_plus. */
5634 if (!reg_overlap_mentioned_p (scratch, outval))
5636 rtx tmp = scratch;
5637 scratch = base_plus;
5638 base_plus = tmp;
5640 else
5642 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5644 /* Be conservative and copy OUTVAL into the scratch now,
5645 this should only be necessary if outval is a subreg
5646 of something larger than a word. */
5647 /* XXX Might this clobber base? I can't see how it can,
5648 since scratch is known to overlap with OUTVAL, and
5649 must be wider than a word. */
5650 emit_insn (gen_movhi (scratch_hi, outval));
5651 outval = scratch_hi;
5655 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5656 base = base_plus;
5658 else if (GET_CODE (base) == PLUS)
5660 /* The addend must be CONST_INT, or we would have dealt with it above. */
5661 HOST_WIDE_INT hi, lo;
5663 offset += INTVAL (XEXP (base, 1));
5664 base = XEXP (base, 0);
5666 /* Rework the address into a legal sequence of insns. */
5667 /* Valid range for lo is -4095 -> 4095 */
5668 lo = (offset >= 0
5669 ? (offset & 0xfff)
5670 : -((-offset) & 0xfff));
5672 /* Corner case, if lo is the max offset then we would be out of range
5673 once we have added the additional 1 below, so bump the msb into the
5674 pre-loading insn(s). */
5675 if (lo == 4095)
5676 lo &= 0x7ff;
5678 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5679 ^ (HOST_WIDE_INT) 0x80000000)
5680 - (HOST_WIDE_INT) 0x80000000);
5682 if (hi + lo != offset)
5683 abort ();
5685 if (hi != 0)
5687 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5689 /* Be careful not to destroy OUTVAL. */
5690 if (reg_overlap_mentioned_p (base_plus, outval))
5692 /* Updating base_plus might destroy outval, see if we
5693 can swap the scratch and base_plus. */
5694 if (!reg_overlap_mentioned_p (scratch, outval))
5696 rtx tmp = scratch;
5697 scratch = base_plus;
5698 base_plus = tmp;
5700 else
5702 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5704 /* Be conservative and copy outval into scratch now,
5705 this should only be necessary if outval is a
5706 subreg of something larger than a word. */
5707 /* XXX Might this clobber base? I can't see how it
5708 can, since scratch is known to overlap with
5709 outval. */
5710 emit_insn (gen_movhi (scratch_hi, outval));
5711 outval = scratch_hi;
5715 /* Get the base address; addsi3 knows how to handle constants
5716 that require more than one insn. */
5717 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5718 base = base_plus;
5719 offset = lo;
5723 if (BYTES_BIG_ENDIAN)
5725 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5726 plus_constant (base, offset + 1)),
5727 gen_lowpart (QImode, outval)));
5728 emit_insn (gen_lshrsi3 (scratch,
5729 gen_rtx_SUBREG (SImode, outval, 0),
5730 GEN_INT (8)));
5731 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5732 gen_lowpart (QImode, scratch)));
5734 else
5736 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5737 gen_lowpart (QImode, outval)));
5738 emit_insn (gen_lshrsi3 (scratch,
5739 gen_rtx_SUBREG (SImode, outval, 0),
5740 GEN_INT (8)));
5741 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5742 plus_constant (base, offset + 1)),
5743 gen_lowpart (QImode, scratch)));
5747 /* Print a symbolic form of X to the debug file, F. */
5749 static void
5750 arm_print_value (f, x)
5751 FILE * f;
5752 rtx x;
5754 switch (GET_CODE (x))
5756 case CONST_INT:
5757 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5758 return;
5760 case CONST_DOUBLE:
5761 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5762 return;
5764 case CONST_STRING:
5765 fprintf (f, "\"%s\"", XSTR (x, 0));
5766 return;
5768 case SYMBOL_REF:
5769 fprintf (f, "`%s'", XSTR (x, 0));
5770 return;
5772 case LABEL_REF:
5773 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5774 return;
5776 case CONST:
5777 arm_print_value (f, XEXP (x, 0));
5778 return;
5780 case PLUS:
5781 arm_print_value (f, XEXP (x, 0));
5782 fprintf (f, "+");
5783 arm_print_value (f, XEXP (x, 1));
5784 return;
5786 case PC:
5787 fprintf (f, "pc");
5788 return;
5790 default:
5791 fprintf (f, "????");
5792 return;
5796 /* Routines for manipulation of the constant pool. */
5798 /* Arm instructions cannot load a large constant directly into a
5799 register; they have to come from a pc relative load. The constant
5800 must therefore be placed in the addressable range of the pc
5801 relative load. Depending on the precise pc relative load
5802 instruction the range is somewhere between 256 bytes and 4k. This
5803 means that we often have to dump a constant inside a function, and
5804 generate code to branch around it.
5806 It is important to minimize this, since the branches will slow
5807 things down and make the code larger.
5809 Normally we can hide the table after an existing unconditional
5810 branch so that there is no interruption of the flow, but in the
5811 worst case the code looks like this:
5813 ldr rn, L1
5815 b L2
5816 align
5817 L1: .long value
5821 ldr rn, L3
5823 b L4
5824 align
5825 L3: .long value
5829 We fix this by performing a scan after scheduling, which notices
5830 which instructions need to have their operands fetched from the
5831 constant table and builds the table.
5833 The algorithm starts by building a table of all the constants that
5834 need fixing up and all the natural barriers in the function (places
5835 where a constant table can be dropped without breaking the flow).
5836 For each fixup we note how far the pc-relative replacement will be
5837 able to reach and the offset of the instruction into the function.
5839 Having built the table we then group the fixes together to form
5840 tables that are as large as possible (subject to addressing
5841 constraints) and emit each table of constants after the last
5842 barrier that is within range of all the instructions in the group.
5843 If a group does not contain a barrier, then we forcibly create one
5844 by inserting a jump instruction into the flow. Once the table has
5845 been inserted, the insns are then modified to reference the
5846 relevant entry in the pool.
5848 Possible enhancements to the algorithm (not implemented) are:
5850 1) For some processors and object formats, there may be benefit in
5851 aligning the pools to the start of cache lines; this alignment
5852 would need to be taken into account when calculating addressability
5853 of a pool. */
5855 /* These typedefs are located at the start of this file, so that
5856 they can be used in the prototypes there. This comment is to
5857 remind readers of that fact so that the following structures
5858 can be understood more easily.
5860 typedef struct minipool_node Mnode;
5861 typedef struct minipool_fixup Mfix; */
5863 struct minipool_node
5865 /* Doubly linked chain of entries. */
5866 Mnode * next;
5867 Mnode * prev;
5868 /* The maximum offset into the code that this entry can be placed. While
5869 pushing fixes for forward references, all entries are sorted in order
5870 of increasing max_address. */
5871 HOST_WIDE_INT max_address;
5872 /* Similarly for an entry inserted for a backwards ref. */
5873 HOST_WIDE_INT min_address;
5874 /* The number of fixes referencing this entry. This can become zero
5875 if we "unpush" an entry. In this case we ignore the entry when we
5876 come to emit the code. */
5877 int refcount;
5878 /* The offset from the start of the minipool. */
5879 HOST_WIDE_INT offset;
5880 /* The value in table. */
5881 rtx value;
5882 /* The mode of value. */
5883 enum machine_mode mode;
5884 int fix_size;
5887 struct minipool_fixup
5889 Mfix * next;
5890 rtx insn;
5891 HOST_WIDE_INT address;
5892 rtx * loc;
5893 enum machine_mode mode;
5894 int fix_size;
5895 rtx value;
5896 Mnode * minipool;
5897 HOST_WIDE_INT forwards;
5898 HOST_WIDE_INT backwards;
5901 /* Fixes less than a word need padding out to a word boundary. */
5902 #define MINIPOOL_FIX_SIZE(mode) \
5903 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5905 static Mnode * minipool_vector_head;
5906 static Mnode * minipool_vector_tail;
5907 static rtx minipool_vector_label;
5909 /* The linked list of all minipool fixes required for this function. */
5910 Mfix * minipool_fix_head;
5911 Mfix * minipool_fix_tail;
5912 /* The fix entry for the current minipool, once it has been placed. */
5913 Mfix * minipool_barrier;
5915 /* Determines if INSN is the start of a jump table. Returns the end
5916 of the TABLE or NULL_RTX. */
5918 static rtx
5919 is_jump_table (insn)
5920 rtx insn;
5922 rtx table;
5924 if (GET_CODE (insn) == JUMP_INSN
5925 && JUMP_LABEL (insn) != NULL
5926 && ((table = next_real_insn (JUMP_LABEL (insn)))
5927 == next_real_insn (insn))
5928 && table != NULL
5929 && GET_CODE (table) == JUMP_INSN
5930 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5931 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5932 return table;
5934 return NULL_RTX;
5937 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5938 #define JUMP_TABLES_IN_TEXT_SECTION 0
5939 #endif
5941 static HOST_WIDE_INT
5942 get_jump_table_size (insn)
5943 rtx insn;
5945 /* ADDR_VECs only take room if read-only data does into the text
5946 section. */
5947 if (JUMP_TABLES_IN_TEXT_SECTION
5948 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5949 || 1
5950 #endif
5953 rtx body = PATTERN (insn);
5954 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5956 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5959 return 0;
5962 /* Move a minipool fix MP from its current location to before MAX_MP.
5963 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5964 contrains may need updating. */
5966 static Mnode *
5967 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5968 Mnode * mp;
5969 Mnode * max_mp;
5970 HOST_WIDE_INT max_address;
5972 /* This should never be true and the code below assumes these are
5973 different. */
5974 if (mp == max_mp)
5975 abort ();
5977 if (max_mp == NULL)
5979 if (max_address < mp->max_address)
5980 mp->max_address = max_address;
5982 else
5984 if (max_address > max_mp->max_address - mp->fix_size)
5985 mp->max_address = max_mp->max_address - mp->fix_size;
5986 else
5987 mp->max_address = max_address;
5989 /* Unlink MP from its current position. Since max_mp is non-null,
5990 mp->prev must be non-null. */
5991 mp->prev->next = mp->next;
5992 if (mp->next != NULL)
5993 mp->next->prev = mp->prev;
5994 else
5995 minipool_vector_tail = mp->prev;
5997 /* Re-insert it before MAX_MP. */
5998 mp->next = max_mp;
5999 mp->prev = max_mp->prev;
6000 max_mp->prev = mp;
6002 if (mp->prev != NULL)
6003 mp->prev->next = mp;
6004 else
6005 minipool_vector_head = mp;
6008 /* Save the new entry. */
6009 max_mp = mp;
6011 /* Scan over the preceding entries and adjust their addresses as
6012 required. */
6013 while (mp->prev != NULL
6014 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6016 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6017 mp = mp->prev;
6020 return max_mp;
6023 /* Add a constant to the minipool for a forward reference. Returns the
6024 node added or NULL if the constant will not fit in this pool. */
6026 static Mnode *
6027 add_minipool_forward_ref (fix)
6028 Mfix * fix;
6030 /* If set, max_mp is the first pool_entry that has a lower
6031 constraint than the one we are trying to add. */
6032 Mnode * max_mp = NULL;
6033 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6034 Mnode * mp;
6036 /* If this fix's address is greater than the address of the first
6037 entry, then we can't put the fix in this pool. We subtract the
6038 size of the current fix to ensure that if the table is fully
6039 packed we still have enough room to insert this value by suffling
6040 the other fixes forwards. */
6041 if (minipool_vector_head &&
6042 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6043 return NULL;
6045 /* Scan the pool to see if a constant with the same value has
6046 already been added. While we are doing this, also note the
6047 location where we must insert the constant if it doesn't already
6048 exist. */
6049 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6051 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6052 && fix->mode == mp->mode
6053 && (GET_CODE (fix->value) != CODE_LABEL
6054 || (CODE_LABEL_NUMBER (fix->value)
6055 == CODE_LABEL_NUMBER (mp->value)))
6056 && rtx_equal_p (fix->value, mp->value))
6058 /* More than one fix references this entry. */
6059 mp->refcount++;
6060 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6063 /* Note the insertion point if necessary. */
6064 if (max_mp == NULL
6065 && mp->max_address > max_address)
6066 max_mp = mp;
6069 /* The value is not currently in the minipool, so we need to create
6070 a new entry for it. If MAX_MP is NULL, the entry will be put on
6071 the end of the list since the placement is less constrained than
6072 any existing entry. Otherwise, we insert the new fix before
6073 MAX_MP and, if neceesary, adjust the constraints on the other
6074 entries. */
6075 mp = xmalloc (sizeof (* mp));
6076 mp->fix_size = fix->fix_size;
6077 mp->mode = fix->mode;
6078 mp->value = fix->value;
6079 mp->refcount = 1;
6080 /* Not yet required for a backwards ref. */
6081 mp->min_address = -65536;
6083 if (max_mp == NULL)
6085 mp->max_address = max_address;
6086 mp->next = NULL;
6087 mp->prev = minipool_vector_tail;
6089 if (mp->prev == NULL)
6091 minipool_vector_head = mp;
6092 minipool_vector_label = gen_label_rtx ();
6094 else
6095 mp->prev->next = mp;
6097 minipool_vector_tail = mp;
6099 else
6101 if (max_address > max_mp->max_address - mp->fix_size)
6102 mp->max_address = max_mp->max_address - mp->fix_size;
6103 else
6104 mp->max_address = max_address;
6106 mp->next = max_mp;
6107 mp->prev = max_mp->prev;
6108 max_mp->prev = mp;
6109 if (mp->prev != NULL)
6110 mp->prev->next = mp;
6111 else
6112 minipool_vector_head = mp;
6115 /* Save the new entry. */
6116 max_mp = mp;
6118 /* Scan over the preceding entries and adjust their addresses as
6119 required. */
6120 while (mp->prev != NULL
6121 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6123 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6124 mp = mp->prev;
6127 return max_mp;
6130 static Mnode *
6131 move_minipool_fix_backward_ref (mp, min_mp, min_address)
6132 Mnode * mp;
6133 Mnode * min_mp;
6134 HOST_WIDE_INT min_address;
6136 HOST_WIDE_INT offset;
6138 /* This should never be true, and the code below assumes these are
6139 different. */
6140 if (mp == min_mp)
6141 abort ();
6143 if (min_mp == NULL)
6145 if (min_address > mp->min_address)
6146 mp->min_address = min_address;
6148 else
6150 /* We will adjust this below if it is too loose. */
6151 mp->min_address = min_address;
6153 /* Unlink MP from its current position. Since min_mp is non-null,
6154 mp->next must be non-null. */
6155 mp->next->prev = mp->prev;
6156 if (mp->prev != NULL)
6157 mp->prev->next = mp->next;
6158 else
6159 minipool_vector_head = mp->next;
6161 /* Reinsert it after MIN_MP. */
6162 mp->prev = min_mp;
6163 mp->next = min_mp->next;
6164 min_mp->next = mp;
6165 if (mp->next != NULL)
6166 mp->next->prev = mp;
6167 else
6168 minipool_vector_tail = mp;
6171 min_mp = mp;
6173 offset = 0;
6174 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6176 mp->offset = offset;
6177 if (mp->refcount > 0)
6178 offset += mp->fix_size;
6180 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6181 mp->next->min_address = mp->min_address + mp->fix_size;
6184 return min_mp;
6187 /* Add a constant to the minipool for a backward reference. Returns the
6188 node added or NULL if the constant will not fit in this pool.
6190 Note that the code for insertion for a backwards reference can be
6191 somewhat confusing because the calculated offsets for each fix do
6192 not take into account the size of the pool (which is still under
6193 construction. */
6195 static Mnode *
6196 add_minipool_backward_ref (fix)
6197 Mfix * fix;
6199 /* If set, min_mp is the last pool_entry that has a lower constraint
6200 than the one we are trying to add. */
6201 Mnode * min_mp = NULL;
6202 /* This can be negative, since it is only a constraint. */
6203 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6204 Mnode * mp;
6206 /* If we can't reach the current pool from this insn, or if we can't
6207 insert this entry at the end of the pool without pushing other
6208 fixes out of range, then we don't try. This ensures that we
6209 can't fail later on. */
6210 if (min_address >= minipool_barrier->address
6211 || (minipool_vector_tail->min_address + fix->fix_size
6212 >= minipool_barrier->address))
6213 return NULL;
6215 /* Scan the pool to see if a constant with the same value has
6216 already been added. While we are doing this, also note the
6217 location where we must insert the constant if it doesn't already
6218 exist. */
6219 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6221 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6222 && fix->mode == mp->mode
6223 && (GET_CODE (fix->value) != CODE_LABEL
6224 || (CODE_LABEL_NUMBER (fix->value)
6225 == CODE_LABEL_NUMBER (mp->value)))
6226 && rtx_equal_p (fix->value, mp->value)
6227 /* Check that there is enough slack to move this entry to the
6228 end of the table (this is conservative). */
6229 && (mp->max_address
6230 > (minipool_barrier->address
6231 + minipool_vector_tail->offset
6232 + minipool_vector_tail->fix_size)))
6234 mp->refcount++;
6235 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6238 if (min_mp != NULL)
6239 mp->min_address += fix->fix_size;
6240 else
6242 /* Note the insertion point if necessary. */
6243 if (mp->min_address < min_address)
6244 min_mp = mp;
6245 else if (mp->max_address
6246 < minipool_barrier->address + mp->offset + fix->fix_size)
6248 /* Inserting before this entry would push the fix beyond
6249 its maximum address (which can happen if we have
6250 re-located a forwards fix); force the new fix to come
6251 after it. */
6252 min_mp = mp;
6253 min_address = mp->min_address + fix->fix_size;
6258 /* We need to create a new entry. */
6259 mp = xmalloc (sizeof (* mp));
6260 mp->fix_size = fix->fix_size;
6261 mp->mode = fix->mode;
6262 mp->value = fix->value;
6263 mp->refcount = 1;
6264 mp->max_address = minipool_barrier->address + 65536;
6266 mp->min_address = min_address;
6268 if (min_mp == NULL)
6270 mp->prev = NULL;
6271 mp->next = minipool_vector_head;
6273 if (mp->next == NULL)
6275 minipool_vector_tail = mp;
6276 minipool_vector_label = gen_label_rtx ();
6278 else
6279 mp->next->prev = mp;
6281 minipool_vector_head = mp;
6283 else
6285 mp->next = min_mp->next;
6286 mp->prev = min_mp;
6287 min_mp->next = mp;
6289 if (mp->next != NULL)
6290 mp->next->prev = mp;
6291 else
6292 minipool_vector_tail = mp;
6295 /* Save the new entry. */
6296 min_mp = mp;
6298 if (mp->prev)
6299 mp = mp->prev;
6300 else
6301 mp->offset = 0;
6303 /* Scan over the following entries and adjust their offsets. */
6304 while (mp->next != NULL)
6306 if (mp->next->min_address < mp->min_address + mp->fix_size)
6307 mp->next->min_address = mp->min_address + mp->fix_size;
6309 if (mp->refcount)
6310 mp->next->offset = mp->offset + mp->fix_size;
6311 else
6312 mp->next->offset = mp->offset;
6314 mp = mp->next;
6317 return min_mp;
6320 static void
6321 assign_minipool_offsets (barrier)
6322 Mfix * barrier;
6324 HOST_WIDE_INT offset = 0;
6325 Mnode * mp;
6327 minipool_barrier = barrier;
6329 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6331 mp->offset = offset;
6333 if (mp->refcount > 0)
6334 offset += mp->fix_size;
6338 /* Output the literal table */
6339 static void
6340 dump_minipool (scan)
6341 rtx scan;
6343 Mnode * mp;
6344 Mnode * nmp;
6346 if (rtl_dump_file)
6347 fprintf (rtl_dump_file,
6348 ";; Emitting minipool after insn %u; address %ld\n",
6349 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6351 scan = emit_label_after (gen_label_rtx (), scan);
6352 scan = emit_insn_after (gen_align_4 (), scan);
6353 scan = emit_label_after (minipool_vector_label, scan);
6355 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6357 if (mp->refcount > 0)
6359 if (rtl_dump_file)
6361 fprintf (rtl_dump_file,
6362 ";; Offset %u, min %ld, max %ld ",
6363 (unsigned) mp->offset, (unsigned long) mp->min_address,
6364 (unsigned long) mp->max_address);
6365 arm_print_value (rtl_dump_file, mp->value);
6366 fputc ('\n', rtl_dump_file);
6369 switch (mp->fix_size)
6371 #ifdef HAVE_consttable_1
6372 case 1:
6373 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6374 break;
6376 #endif
6377 #ifdef HAVE_consttable_2
6378 case 2:
6379 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6380 break;
6382 #endif
6383 #ifdef HAVE_consttable_4
6384 case 4:
6385 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6386 break;
6388 #endif
6389 #ifdef HAVE_consttable_8
6390 case 8:
6391 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6392 break;
6394 #endif
6395 default:
6396 abort ();
6397 break;
6401 nmp = mp->next;
6402 free (mp);
6405 minipool_vector_head = minipool_vector_tail = NULL;
6406 scan = emit_insn_after (gen_consttable_end (), scan);
6407 scan = emit_barrier_after (scan);
6410 /* Return the cost of forcibly inserting a barrier after INSN. */
6412 static int
6413 arm_barrier_cost (insn)
6414 rtx insn;
6416 /* Basing the location of the pool on the loop depth is preferable,
6417 but at the moment, the basic block information seems to be
6418 corrupt by this stage of the compilation. */
6419 int base_cost = 50;
6420 rtx next = next_nonnote_insn (insn);
6422 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6423 base_cost -= 20;
6425 switch (GET_CODE (insn))
6427 case CODE_LABEL:
6428 /* It will always be better to place the table before the label, rather
6429 than after it. */
6430 return 50;
6432 case INSN:
6433 case CALL_INSN:
6434 return base_cost;
6436 case JUMP_INSN:
6437 return base_cost - 10;
6439 default:
6440 return base_cost + 10;
6444 /* Find the best place in the insn stream in the range
6445 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6446 Create the barrier by inserting a jump and add a new fix entry for
6447 it. */
6449 static Mfix *
6450 create_fix_barrier (fix, max_address)
6451 Mfix * fix;
6452 HOST_WIDE_INT max_address;
6454 HOST_WIDE_INT count = 0;
6455 rtx barrier;
6456 rtx from = fix->insn;
6457 rtx selected = from;
6458 int selected_cost;
6459 HOST_WIDE_INT selected_address;
6460 Mfix * new_fix;
6461 HOST_WIDE_INT max_count = max_address - fix->address;
6462 rtx label = gen_label_rtx ();
6464 selected_cost = arm_barrier_cost (from);
6465 selected_address = fix->address;
6467 while (from && count < max_count)
6469 rtx tmp;
6470 int new_cost;
6472 /* This code shouldn't have been called if there was a natural barrier
6473 within range. */
6474 if (GET_CODE (from) == BARRIER)
6475 abort ();
6477 /* Count the length of this insn. */
6478 count += get_attr_length (from);
6480 /* If there is a jump table, add its length. */
6481 tmp = is_jump_table (from);
6482 if (tmp != NULL)
6484 count += get_jump_table_size (tmp);
6486 /* Jump tables aren't in a basic block, so base the cost on
6487 the dispatch insn. If we select this location, we will
6488 still put the pool after the table. */
6489 new_cost = arm_barrier_cost (from);
6491 if (count < max_count && new_cost <= selected_cost)
6493 selected = tmp;
6494 selected_cost = new_cost;
6495 selected_address = fix->address + count;
6498 /* Continue after the dispatch table. */
6499 from = NEXT_INSN (tmp);
6500 continue;
6503 new_cost = arm_barrier_cost (from);
6505 if (count < max_count && new_cost <= selected_cost)
6507 selected = from;
6508 selected_cost = new_cost;
6509 selected_address = fix->address + count;
6512 from = NEXT_INSN (from);
6515 /* Create a new JUMP_INSN that branches around a barrier. */
6516 from = emit_jump_insn_after (gen_jump (label), selected);
6517 JUMP_LABEL (from) = label;
6518 barrier = emit_barrier_after (from);
6519 emit_label_after (label, barrier);
6521 /* Create a minipool barrier entry for the new barrier. */
6522 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6523 new_fix->insn = barrier;
6524 new_fix->address = selected_address;
6525 new_fix->next = fix->next;
6526 fix->next = new_fix;
6528 return new_fix;
6531 /* Record that there is a natural barrier in the insn stream at
6532 ADDRESS. */
6533 static void
6534 push_minipool_barrier (insn, address)
6535 rtx insn;
6536 HOST_WIDE_INT address;
6538 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6540 fix->insn = insn;
6541 fix->address = address;
6543 fix->next = NULL;
6544 if (minipool_fix_head != NULL)
6545 minipool_fix_tail->next = fix;
6546 else
6547 minipool_fix_head = fix;
6549 minipool_fix_tail = fix;
6552 /* Record INSN, which will need fixing up to load a value from the
6553 minipool. ADDRESS is the offset of the insn since the start of the
6554 function; LOC is a pointer to the part of the insn which requires
6555 fixing; VALUE is the constant that must be loaded, which is of type
6556 MODE. */
6557 static void
6558 push_minipool_fix (insn, address, loc, mode, value)
6559 rtx insn;
6560 HOST_WIDE_INT address;
6561 rtx * loc;
6562 enum machine_mode mode;
6563 rtx value;
6565 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6567 #ifdef AOF_ASSEMBLER
6568 /* PIC symbol refereneces need to be converted into offsets into the
6569 based area. */
6570 /* XXX This shouldn't be done here. */
6571 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6572 value = aof_pic_entry (value);
6573 #endif /* AOF_ASSEMBLER */
6575 fix->insn = insn;
6576 fix->address = address;
6577 fix->loc = loc;
6578 fix->mode = mode;
6579 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6580 fix->value = value;
6581 fix->forwards = get_attr_pool_range (insn);
6582 fix->backwards = get_attr_neg_pool_range (insn);
6583 fix->minipool = NULL;
6585 /* If an insn doesn't have a range defined for it, then it isn't
6586 expecting to be reworked by this code. Better to abort now than
6587 to generate duff assembly code. */
6588 if (fix->forwards == 0 && fix->backwards == 0)
6589 abort ();
6591 if (rtl_dump_file)
6593 fprintf (rtl_dump_file,
6594 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6595 GET_MODE_NAME (mode),
6596 INSN_UID (insn), (unsigned long) address,
6597 -1 * (long)fix->backwards, (long)fix->forwards);
6598 arm_print_value (rtl_dump_file, fix->value);
6599 fprintf (rtl_dump_file, "\n");
6602 /* Add it to the chain of fixes. */
6603 fix->next = NULL;
6605 if (minipool_fix_head != NULL)
6606 minipool_fix_tail->next = fix;
6607 else
6608 minipool_fix_head = fix;
6610 minipool_fix_tail = fix;
6613 /* Scan INSN and note any of its operands that need fixing. */
6615 static void
6616 note_invalid_constants (insn, address)
6617 rtx insn;
6618 HOST_WIDE_INT address;
6620 int opno;
6622 extract_insn (insn);
6624 if (!constrain_operands (1))
6625 fatal_insn_not_found (insn);
6627 /* Fill in recog_op_alt with information about the constraints of this
6628 insn. */
6629 preprocess_constraints ();
6631 for (opno = 0; opno < recog_data.n_operands; opno++)
6633 /* Things we need to fix can only occur in inputs. */
6634 if (recog_data.operand_type[opno] != OP_IN)
6635 continue;
6637 /* If this alternative is a memory reference, then any mention
6638 of constants in this alternative is really to fool reload
6639 into allowing us to accept one there. We need to fix them up
6640 now so that we output the right code. */
6641 if (recog_op_alt[opno][which_alternative].memory_ok)
6643 rtx op = recog_data.operand[opno];
6645 if (CONSTANT_P (op))
6646 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6647 recog_data.operand_mode[opno], op);
6648 #if 0
6649 /* RWE: Now we look correctly at the operands for the insn,
6650 this shouldn't be needed any more. */
6651 #ifndef AOF_ASSEMBLER
6652 /* XXX Is this still needed? */
6653 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6654 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6655 recog_data.operand_mode[opno],
6656 XVECEXP (op, 0, 0));
6657 #endif
6658 #endif
6659 else if (GET_CODE (op) == MEM
6660 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6661 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6662 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6663 recog_data.operand_mode[opno],
6664 get_pool_constant (XEXP (op, 0)));
6669 void
6670 arm_reorg (first)
6671 rtx first;
6673 rtx insn;
6674 HOST_WIDE_INT address = 0;
6675 Mfix * fix;
6677 minipool_fix_head = minipool_fix_tail = NULL;
6679 /* The first insn must always be a note, or the code below won't
6680 scan it properly. */
6681 if (GET_CODE (first) != NOTE)
6682 abort ();
6684 /* Scan all the insns and record the operands that will need fixing. */
6685 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6687 if (GET_CODE (insn) == BARRIER)
6688 push_minipool_barrier (insn, address);
6689 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6690 || GET_CODE (insn) == JUMP_INSN)
6692 rtx table;
6694 note_invalid_constants (insn, address);
6695 address += get_attr_length (insn);
6697 /* If the insn is a vector jump, add the size of the table
6698 and skip the table. */
6699 if ((table = is_jump_table (insn)) != NULL)
6701 address += get_jump_table_size (table);
6702 insn = table;
6707 fix = minipool_fix_head;
6709 /* Now scan the fixups and perform the required changes. */
6710 while (fix)
6712 Mfix * ftmp;
6713 Mfix * fdel;
6714 Mfix * last_added_fix;
6715 Mfix * last_barrier = NULL;
6716 Mfix * this_fix;
6718 /* Skip any further barriers before the next fix. */
6719 while (fix && GET_CODE (fix->insn) == BARRIER)
6720 fix = fix->next;
6722 /* No more fixes. */
6723 if (fix == NULL)
6724 break;
6726 last_added_fix = NULL;
6728 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6730 if (GET_CODE (ftmp->insn) == BARRIER)
6732 if (ftmp->address >= minipool_vector_head->max_address)
6733 break;
6735 last_barrier = ftmp;
6737 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6738 break;
6740 last_added_fix = ftmp; /* Keep track of the last fix added. */
6743 /* If we found a barrier, drop back to that; any fixes that we
6744 could have reached but come after the barrier will now go in
6745 the next mini-pool. */
6746 if (last_barrier != NULL)
6748 /* Reduce the refcount for those fixes that won't go into this
6749 pool after all. */
6750 for (fdel = last_barrier->next;
6751 fdel && fdel != ftmp;
6752 fdel = fdel->next)
6754 fdel->minipool->refcount--;
6755 fdel->minipool = NULL;
6758 ftmp = last_barrier;
6760 else
6762 /* ftmp is first fix that we can't fit into this pool and
6763 there no natural barriers that we could use. Insert a
6764 new barrier in the code somewhere between the previous
6765 fix and this one, and arrange to jump around it. */
6766 HOST_WIDE_INT max_address;
6768 /* The last item on the list of fixes must be a barrier, so
6769 we can never run off the end of the list of fixes without
6770 last_barrier being set. */
6771 if (ftmp == NULL)
6772 abort ();
6774 max_address = minipool_vector_head->max_address;
6775 /* Check that there isn't another fix that is in range that
6776 we couldn't fit into this pool because the pool was
6777 already too large: we need to put the pool before such an
6778 instruction. */
6779 if (ftmp->address < max_address)
6780 max_address = ftmp->address;
6782 last_barrier = create_fix_barrier (last_added_fix, max_address);
6785 assign_minipool_offsets (last_barrier);
6787 while (ftmp)
6789 if (GET_CODE (ftmp->insn) != BARRIER
6790 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6791 == NULL))
6792 break;
6794 ftmp = ftmp->next;
6797 /* Scan over the fixes we have identified for this pool, fixing them
6798 up and adding the constants to the pool itself. */
6799 for (this_fix = fix; this_fix && ftmp != this_fix;
6800 this_fix = this_fix->next)
6801 if (GET_CODE (this_fix->insn) != BARRIER)
6803 rtx addr
6804 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6805 minipool_vector_label),
6806 this_fix->minipool->offset);
6807 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6810 dump_minipool (last_barrier->insn);
6811 fix = ftmp;
6814 /* From now on we must synthesize any constants that we can't handle
6815 directly. This can happen if the RTL gets split during final
6816 instruction generation. */
6817 after_arm_reorg = 1;
6819 /* Free the minipool memory. */
6820 obstack_free (&minipool_obstack, minipool_startobj);
6823 /* Routines to output assembly language. */
6825 /* If the rtx is the correct value then return the string of the number.
6826 In this way we can ensure that valid double constants are generated even
6827 when cross compiling. */
6829 const char *
6830 fp_immediate_constant (x)
6831 rtx x;
6833 REAL_VALUE_TYPE r;
6834 int i;
6836 if (!fpa_consts_inited)
6837 init_fpa_table ();
6839 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6840 for (i = 0; i < 8; i++)
6841 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6842 return strings_fpa[i];
6844 abort ();
6847 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6849 static const char *
6850 fp_const_from_val (r)
6851 REAL_VALUE_TYPE * r;
6853 int i;
6855 if (!fpa_consts_inited)
6856 init_fpa_table ();
6858 for (i = 0; i < 8; i++)
6859 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6860 return strings_fpa[i];
6862 abort ();
6865 /* Output the operands of a LDM/STM instruction to STREAM.
6866 MASK is the ARM register set mask of which only bits 0-15 are important.
6867 REG is the base register, either the frame pointer or the stack pointer,
6868 INSTR is the possibly suffixed load or store instruction. */
6870 static void
6871 print_multi_reg (stream, instr, reg, mask)
6872 FILE * stream;
6873 const char * instr;
6874 int reg;
6875 int mask;
6877 int i;
6878 int not_first = FALSE;
6880 fputc ('\t', stream);
6881 asm_fprintf (stream, instr, reg);
6882 fputs (", {", stream);
6884 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6885 if (mask & (1 << i))
6887 if (not_first)
6888 fprintf (stream, ", ");
6890 asm_fprintf (stream, "%r", i);
6891 not_first = TRUE;
6894 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6897 /* Output a 'call' insn. */
6899 const char *
6900 output_call (operands)
6901 rtx * operands;
6903 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6905 if (REGNO (operands[0]) == LR_REGNUM)
6907 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6908 output_asm_insn ("mov%?\t%0, %|lr", operands);
6911 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6913 if (TARGET_INTERWORK)
6914 output_asm_insn ("bx%?\t%0", operands);
6915 else
6916 output_asm_insn ("mov%?\t%|pc, %0", operands);
6918 return "";
6921 static int
6922 eliminate_lr2ip (x)
6923 rtx * x;
6925 int something_changed = 0;
6926 rtx x0 = * x;
6927 int code = GET_CODE (x0);
6928 int i, j;
6929 const char * fmt;
6931 switch (code)
6933 case REG:
6934 if (REGNO (x0) == LR_REGNUM)
6936 *x = gen_rtx_REG (SImode, IP_REGNUM);
6937 return 1;
6939 return 0;
6940 default:
6941 /* Scan through the sub-elements and change any references there. */
6942 fmt = GET_RTX_FORMAT (code);
6944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6945 if (fmt[i] == 'e')
6946 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6947 else if (fmt[i] == 'E')
6948 for (j = 0; j < XVECLEN (x0, i); j++)
6949 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6951 return something_changed;
6955 /* Output a 'call' insn that is a reference in memory. */
6957 const char *
6958 output_call_mem (operands)
6959 rtx * operands;
6961 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6962 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6963 if (eliminate_lr2ip (&operands[0]))
6964 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6966 if (TARGET_INTERWORK)
6968 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6969 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6970 output_asm_insn ("bx%?\t%|ip", operands);
6972 else
6974 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6975 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6978 return "";
6982 /* Output a move from arm registers to an fpu registers.
6983 OPERANDS[0] is an fpu register.
6984 OPERANDS[1] is the first registers of an arm register pair. */
6986 const char *
6987 output_mov_long_double_fpu_from_arm (operands)
6988 rtx * operands;
6990 int arm_reg0 = REGNO (operands[1]);
6991 rtx ops[3];
6993 if (arm_reg0 == IP_REGNUM)
6994 abort ();
6996 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6997 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6998 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7000 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7001 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7003 return "";
7006 /* Output a move from an fpu register to arm registers.
7007 OPERANDS[0] is the first registers of an arm register pair.
7008 OPERANDS[1] is an fpu register. */
7010 const char *
7011 output_mov_long_double_arm_from_fpu (operands)
7012 rtx * operands;
7014 int arm_reg0 = REGNO (operands[0]);
7015 rtx ops[3];
7017 if (arm_reg0 == IP_REGNUM)
7018 abort ();
7020 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7021 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7022 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7024 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7025 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7026 return "";
7029 /* Output a move from arm registers to arm registers of a long double
7030 OPERANDS[0] is the destination.
7031 OPERANDS[1] is the source. */
7033 const char *
7034 output_mov_long_double_arm_from_arm (operands)
7035 rtx * operands;
7037 /* We have to be careful here because the two might overlap. */
7038 int dest_start = REGNO (operands[0]);
7039 int src_start = REGNO (operands[1]);
7040 rtx ops[2];
7041 int i;
7043 if (dest_start < src_start)
7045 for (i = 0; i < 3; i++)
7047 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7048 ops[1] = gen_rtx_REG (SImode, src_start + i);
7049 output_asm_insn ("mov%?\t%0, %1", ops);
7052 else
7054 for (i = 2; i >= 0; i--)
7056 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7057 ops[1] = gen_rtx_REG (SImode, src_start + i);
7058 output_asm_insn ("mov%?\t%0, %1", ops);
7062 return "";
7066 /* Output a move from arm registers to an fpu registers.
7067 OPERANDS[0] is an fpu register.
7068 OPERANDS[1] is the first registers of an arm register pair. */
7070 const char *
7071 output_mov_double_fpu_from_arm (operands)
7072 rtx * operands;
7074 int arm_reg0 = REGNO (operands[1]);
7075 rtx ops[2];
7077 if (arm_reg0 == IP_REGNUM)
7078 abort ();
7080 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7081 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7082 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7083 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7084 return "";
7087 /* Output a move from an fpu register to arm registers.
7088 OPERANDS[0] is the first registers of an arm register pair.
7089 OPERANDS[1] is an fpu register. */
7091 const char *
7092 output_mov_double_arm_from_fpu (operands)
7093 rtx * operands;
7095 int arm_reg0 = REGNO (operands[0]);
7096 rtx ops[2];
7098 if (arm_reg0 == IP_REGNUM)
7099 abort ();
7101 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7102 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7103 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7104 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7105 return "";
7108 /* Output a move between double words.
7109 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7110 or MEM<-REG and all MEMs must be offsettable addresses. */
7112 const char *
7113 output_move_double (operands)
7114 rtx * operands;
7116 enum rtx_code code0 = GET_CODE (operands[0]);
7117 enum rtx_code code1 = GET_CODE (operands[1]);
7118 rtx otherops[3];
7120 if (code0 == REG)
7122 int reg0 = REGNO (operands[0]);
7124 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7126 if (code1 == REG)
7128 int reg1 = REGNO (operands[1]);
7129 if (reg1 == IP_REGNUM)
7130 abort ();
7132 /* Ensure the second source is not overwritten. */
7133 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7134 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7135 else
7136 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7138 else if (code1 == CONST_DOUBLE)
7140 if (GET_MODE (operands[1]) == DFmode)
7142 REAL_VALUE_TYPE r;
7143 long l[2];
7145 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7146 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7147 otherops[1] = GEN_INT (l[1]);
7148 operands[1] = GEN_INT (l[0]);
7150 else if (GET_MODE (operands[1]) != VOIDmode)
7151 abort ();
7152 else if (WORDS_BIG_ENDIAN)
7154 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7155 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7157 else
7159 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7160 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7163 output_mov_immediate (operands);
7164 output_mov_immediate (otherops);
7166 else if (code1 == CONST_INT)
7168 #if HOST_BITS_PER_WIDE_INT > 32
7169 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7170 what the upper word is. */
7171 if (WORDS_BIG_ENDIAN)
7173 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7174 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7176 else
7178 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7179 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7181 #else
7182 /* Sign extend the intval into the high-order word. */
7183 if (WORDS_BIG_ENDIAN)
7185 otherops[1] = operands[1];
7186 operands[1] = (INTVAL (operands[1]) < 0
7187 ? constm1_rtx : const0_rtx);
7189 else
7190 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7191 #endif
7192 output_mov_immediate (otherops);
7193 output_mov_immediate (operands);
7195 else if (code1 == MEM)
7197 switch (GET_CODE (XEXP (operands[1], 0)))
7199 case REG:
7200 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7201 break;
7203 case PRE_INC:
7204 abort (); /* Should never happen now. */
7205 break;
7207 case PRE_DEC:
7208 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7209 break;
7211 case POST_INC:
7212 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7213 break;
7215 case POST_DEC:
7216 abort (); /* Should never happen now. */
7217 break;
7219 case LABEL_REF:
7220 case CONST:
7221 output_asm_insn ("adr%?\t%0, %1", operands);
7222 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7223 break;
7225 default:
7226 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7227 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7229 otherops[0] = operands[0];
7230 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7231 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7233 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7235 if (GET_CODE (otherops[2]) == CONST_INT)
7237 switch ((int) INTVAL (otherops[2]))
7239 case -8:
7240 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7241 return "";
7242 case -4:
7243 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7244 return "";
7245 case 4:
7246 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7247 return "";
7250 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7251 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7252 else
7253 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7255 else
7256 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7258 else
7259 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7261 return "ldm%?ia\t%0, %M0";
7263 else
7265 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7266 /* Take care of overlapping base/data reg. */
7267 if (reg_mentioned_p (operands[0], operands[1]))
7269 output_asm_insn ("ldr%?\t%0, %1", otherops);
7270 output_asm_insn ("ldr%?\t%0, %1", operands);
7272 else
7274 output_asm_insn ("ldr%?\t%0, %1", operands);
7275 output_asm_insn ("ldr%?\t%0, %1", otherops);
7280 else
7281 abort (); /* Constraints should prevent this. */
7283 else if (code0 == MEM && code1 == REG)
7285 if (REGNO (operands[1]) == IP_REGNUM)
7286 abort ();
7288 switch (GET_CODE (XEXP (operands[0], 0)))
7290 case REG:
7291 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7292 break;
7294 case PRE_INC:
7295 abort (); /* Should never happen now. */
7296 break;
7298 case PRE_DEC:
7299 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7300 break;
7302 case POST_INC:
7303 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7304 break;
7306 case POST_DEC:
7307 abort (); /* Should never happen now. */
7308 break;
7310 case PLUS:
7311 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7313 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7315 case -8:
7316 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7317 return "";
7319 case -4:
7320 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7321 return "";
7323 case 4:
7324 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7325 return "";
7328 /* Fall through */
7330 default:
7331 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7332 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7333 output_asm_insn ("str%?\t%1, %0", operands);
7334 output_asm_insn ("str%?\t%1, %0", otherops);
7337 else
7338 /* Constraints should prevent this. */
7339 abort ();
7341 return "";
7345 /* Output an arbitrary MOV reg, #n.
7346 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7348 const char *
7349 output_mov_immediate (operands)
7350 rtx * operands;
7352 HOST_WIDE_INT n = INTVAL (operands[1]);
7354 /* Try to use one MOV. */
7355 if (const_ok_for_arm (n))
7356 output_asm_insn ("mov%?\t%0, %1", operands);
7358 /* Try to use one MVN. */
7359 else if (const_ok_for_arm (~n))
7361 operands[1] = GEN_INT (~n);
7362 output_asm_insn ("mvn%?\t%0, %1", operands);
7364 else
7366 int n_ones = 0;
7367 int i;
7369 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7370 for (i = 0; i < 32; i ++)
7371 if (n & 1 << i)
7372 n_ones ++;
7374 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7375 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7376 else
7377 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7380 return "";
7383 /* Output an ADD r, s, #n where n may be too big for one instruction.
7384 If adding zero to one register, output nothing. */
7386 const char *
7387 output_add_immediate (operands)
7388 rtx * operands;
7390 HOST_WIDE_INT n = INTVAL (operands[2]);
7392 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7394 if (n < 0)
7395 output_multi_immediate (operands,
7396 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7397 -n);
7398 else
7399 output_multi_immediate (operands,
7400 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7404 return "";
7407 /* Output a multiple immediate operation.
7408 OPERANDS is the vector of operands referred to in the output patterns.
7409 INSTR1 is the output pattern to use for the first constant.
7410 INSTR2 is the output pattern to use for subsequent constants.
7411 IMMED_OP is the index of the constant slot in OPERANDS.
7412 N is the constant value. */
7414 static const char *
7415 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7416 rtx * operands;
7417 const char * instr1;
7418 const char * instr2;
7419 int immed_op;
7420 HOST_WIDE_INT n;
7422 #if HOST_BITS_PER_WIDE_INT > 32
7423 n &= 0xffffffff;
7424 #endif
7426 if (n == 0)
7428 /* Quick and easy output. */
7429 operands[immed_op] = const0_rtx;
7430 output_asm_insn (instr1, operands);
7432 else
7434 int i;
7435 const char * instr = instr1;
7437 /* Note that n is never zero here (which would give no output). */
7438 for (i = 0; i < 32; i += 2)
7440 if (n & (3 << i))
7442 operands[immed_op] = GEN_INT (n & (255 << i));
7443 output_asm_insn (instr, operands);
7444 instr = instr2;
7445 i += 6;
7450 return "";
7453 /* Return the appropriate ARM instruction for the operation code.
7454 The returned result should not be overwritten. OP is the rtx of the
7455 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7456 was shifted. */
7458 const char *
7459 arithmetic_instr (op, shift_first_arg)
7460 rtx op;
7461 int shift_first_arg;
7463 switch (GET_CODE (op))
7465 case PLUS:
7466 return "add";
7468 case MINUS:
7469 return shift_first_arg ? "rsb" : "sub";
7471 case IOR:
7472 return "orr";
7474 case XOR:
7475 return "eor";
7477 case AND:
7478 return "and";
7480 default:
7481 abort ();
7485 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7486 for the operation code. The returned result should not be overwritten.
7487 OP is the rtx code of the shift.
7488 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7489 shift. */
7491 static const char *
7492 shift_op (op, amountp)
7493 rtx op;
7494 HOST_WIDE_INT *amountp;
7496 const char * mnem;
7497 enum rtx_code code = GET_CODE (op);
7499 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7500 *amountp = -1;
7501 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7502 *amountp = INTVAL (XEXP (op, 1));
7503 else
7504 abort ();
7506 switch (code)
7508 case ASHIFT:
7509 mnem = "asl";
7510 break;
7512 case ASHIFTRT:
7513 mnem = "asr";
7514 break;
7516 case LSHIFTRT:
7517 mnem = "lsr";
7518 break;
7520 case ROTATERT:
7521 mnem = "ror";
7522 break;
7524 case MULT:
7525 /* We never have to worry about the amount being other than a
7526 power of 2, since this case can never be reloaded from a reg. */
7527 if (*amountp != -1)
7528 *amountp = int_log2 (*amountp);
7529 else
7530 abort ();
7531 return "asl";
7533 default:
7534 abort ();
7537 if (*amountp != -1)
7539 /* This is not 100% correct, but follows from the desire to merge
7540 multiplication by a power of 2 with the recognizer for a
7541 shift. >=32 is not a valid shift for "asl", so we must try and
7542 output a shift that produces the correct arithmetical result.
7543 Using lsr #32 is identical except for the fact that the carry bit
7544 is not set correctly if we set the flags; but we never use the
7545 carry bit from such an operation, so we can ignore that. */
7546 if (code == ROTATERT)
7547 /* Rotate is just modulo 32. */
7548 *amountp &= 31;
7549 else if (*amountp != (*amountp & 31))
7551 if (code == ASHIFT)
7552 mnem = "lsr";
7553 *amountp = 32;
7556 /* Shifts of 0 are no-ops. */
7557 if (*amountp == 0)
7558 return NULL;
7561 return mnem;
7564 /* Obtain the shift from the POWER of two. */
7566 static HOST_WIDE_INT
7567 int_log2 (power)
7568 HOST_WIDE_INT power;
7570 HOST_WIDE_INT shift = 0;
7572 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7574 if (shift > 31)
7575 abort ();
7576 shift ++;
7579 return shift;
7582 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7583 /bin/as is horribly restrictive. */
7584 #define MAX_ASCII_LEN 51
7586 void
7587 output_ascii_pseudo_op (stream, p, len)
7588 FILE * stream;
7589 const unsigned char * p;
7590 int len;
7592 int i;
7593 int len_so_far = 0;
7595 fputs ("\t.ascii\t\"", stream);
7597 for (i = 0; i < len; i++)
7599 int c = p[i];
7601 if (len_so_far >= MAX_ASCII_LEN)
7603 fputs ("\"\n\t.ascii\t\"", stream);
7604 len_so_far = 0;
7607 switch (c)
7609 case TARGET_TAB:
7610 fputs ("\\t", stream);
7611 len_so_far += 2;
7612 break;
7614 case TARGET_FF:
7615 fputs ("\\f", stream);
7616 len_so_far += 2;
7617 break;
7619 case TARGET_BS:
7620 fputs ("\\b", stream);
7621 len_so_far += 2;
7622 break;
7624 case TARGET_CR:
7625 fputs ("\\r", stream);
7626 len_so_far += 2;
7627 break;
7629 case TARGET_NEWLINE:
7630 fputs ("\\n", stream);
7631 c = p [i + 1];
7632 if ((c >= ' ' && c <= '~')
7633 || c == TARGET_TAB)
7634 /* This is a good place for a line break. */
7635 len_so_far = MAX_ASCII_LEN;
7636 else
7637 len_so_far += 2;
7638 break;
7640 case '\"':
7641 case '\\':
7642 putc ('\\', stream);
7643 len_so_far++;
7644 /* drop through. */
7646 default:
7647 if (c >= ' ' && c <= '~')
7649 putc (c, stream);
7650 len_so_far++;
7652 else
7654 fprintf (stream, "\\%03o", c);
7655 len_so_far += 4;
7657 break;
7661 fputs ("\"\n", stream);
7664 /* Compute the register sabe mask for registers 0 through 12
7665 inclusive. This code is used by both arm_compute_save_reg_mask
7666 and arm_compute_initial_elimination_offset. */
7668 static unsigned long
7669 arm_compute_save_reg0_reg12_mask ()
7671 unsigned long func_type = arm_current_func_type ();
7672 unsigned int save_reg_mask = 0;
7673 unsigned int reg;
7675 if (IS_INTERRUPT (func_type))
7677 unsigned int max_reg;
7678 /* Interrupt functions must not corrupt any registers,
7679 even call clobbered ones. If this is a leaf function
7680 we can just examine the registers used by the RTL, but
7681 otherwise we have to assume that whatever function is
7682 called might clobber anything, and so we have to save
7683 all the call-clobbered registers as well. */
7684 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7685 /* FIQ handlers have registers r8 - r12 banked, so
7686 we only need to check r0 - r7, Normal ISRs only
7687 bank r14 and r15, so we must check up to r12.
7688 r13 is the stack pointer which is always preserved,
7689 so we do not need to consider it here. */
7690 max_reg = 7;
7691 else
7692 max_reg = 12;
7694 for (reg = 0; reg <= max_reg; reg++)
7695 if (regs_ever_live[reg]
7696 || (! current_function_is_leaf && call_used_regs [reg]))
7697 save_reg_mask |= (1 << reg);
7699 else
7701 /* In the normal case we only need to save those registers
7702 which are call saved and which are used by this function. */
7703 for (reg = 0; reg <= 10; reg++)
7704 if (regs_ever_live[reg] && ! call_used_regs [reg])
7705 save_reg_mask |= (1 << reg);
7707 /* Handle the frame pointer as a special case. */
7708 if (! TARGET_APCS_FRAME
7709 && ! frame_pointer_needed
7710 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7711 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7712 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7714 /* If we aren't loading the PIC register,
7715 don't stack it even though it may be live. */
7716 if (flag_pic
7717 && ! TARGET_SINGLE_PIC_BASE
7718 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7719 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7722 return save_reg_mask;
7725 /* Compute a bit mask of which registers need to be
7726 saved on the stack for the current function. */
7728 static unsigned long
7729 arm_compute_save_reg_mask ()
7731 unsigned int save_reg_mask = 0;
7732 unsigned long func_type = arm_current_func_type ();
7734 if (IS_NAKED (func_type))
7735 /* This should never really happen. */
7736 return 0;
7738 /* If we are creating a stack frame, then we must save the frame pointer,
7739 IP (which will hold the old stack pointer), LR and the PC. */
7740 if (frame_pointer_needed)
7741 save_reg_mask |=
7742 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7743 | (1 << IP_REGNUM)
7744 | (1 << LR_REGNUM)
7745 | (1 << PC_REGNUM);
7747 /* Volatile functions do not return, so there
7748 is no need to save any other registers. */
7749 if (IS_VOLATILE (func_type))
7750 return save_reg_mask;
7752 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7754 /* Decide if we need to save the link register.
7755 Interrupt routines have their own banked link register,
7756 so they never need to save it.
7757 Otherwise if we do not use the link register we do not need to save
7758 it. If we are pushing other registers onto the stack however, we
7759 can save an instruction in the epilogue by pushing the link register
7760 now and then popping it back into the PC. This incurs extra memory
7761 accesses though, so we only do it when optimising for size, and only
7762 if we know that we will not need a fancy return sequence. */
7763 if (regs_ever_live [LR_REGNUM]
7764 || (save_reg_mask
7765 && optimize_size
7766 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7767 save_reg_mask |= 1 << LR_REGNUM;
7769 if (cfun->machine->lr_save_eliminated)
7770 save_reg_mask &= ~ (1 << LR_REGNUM);
7772 return save_reg_mask;
7775 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7776 everything bar the final return instruction. */
7778 const char *
7779 output_return_instruction (operand, really_return, reverse)
7780 rtx operand;
7781 int really_return;
7782 int reverse;
7784 char conditional[10];
7785 char instr[100];
7786 int reg;
7787 unsigned long live_regs_mask;
7788 unsigned long func_type;
7790 func_type = arm_current_func_type ();
7792 if (IS_NAKED (func_type))
7793 return "";
7795 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7797 /* If this function was declared non-returning, and we have found a tail
7798 call, then we have to trust that the called function won't return. */
7799 if (really_return)
7801 rtx ops[2];
7803 /* Otherwise, trap an attempted return by aborting. */
7804 ops[0] = operand;
7805 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7806 : "abort");
7807 assemble_external_libcall (ops[1]);
7808 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7811 return "";
7814 if (current_function_calls_alloca && !really_return)
7815 abort ();
7817 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7819 return_used_this_function = 1;
7821 live_regs_mask = arm_compute_save_reg_mask ();
7823 if (live_regs_mask)
7825 const char * return_reg;
7827 /* If we do not have any special requirements for function exit
7828 (eg interworking, or ISR) then we can load the return address
7829 directly into the PC. Otherwise we must load it into LR. */
7830 if (really_return
7831 && ! TARGET_INTERWORK)
7832 return_reg = reg_names[PC_REGNUM];
7833 else
7834 return_reg = reg_names[LR_REGNUM];
7836 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7837 /* There are two possible reasons for the IP register being saved.
7838 Either a stack frame was created, in which case IP contains the
7839 old stack pointer, or an ISR routine corrupted it. If this in an
7840 ISR routine then just restore IP, otherwise restore IP into SP. */
7841 if (! IS_INTERRUPT (func_type))
7843 live_regs_mask &= ~ (1 << IP_REGNUM);
7844 live_regs_mask |= (1 << SP_REGNUM);
7847 /* On some ARM architectures it is faster to use LDR rather than
7848 LDM to load a single register. On other architectures, the
7849 cost is the same. In 26 bit mode, or for exception handlers,
7850 we have to use LDM to load the PC so that the CPSR is also
7851 restored. */
7852 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7854 if (live_regs_mask == (unsigned int)(1 << reg))
7855 break;
7857 if (reg <= LAST_ARM_REGNUM
7858 && (reg != LR_REGNUM
7859 || ! really_return
7860 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7862 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7863 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7865 else
7867 char *p;
7868 int first = 1;
7870 /* Generate the load multiple instruction to restore the registers. */
7871 if (frame_pointer_needed)
7872 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7873 else if (live_regs_mask & (1 << SP_REGNUM))
7874 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7875 else
7876 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7878 p = instr + strlen (instr);
7880 for (reg = 0; reg <= SP_REGNUM; reg++)
7881 if (live_regs_mask & (1 << reg))
7883 int l = strlen (reg_names[reg]);
7885 if (first)
7886 first = 0;
7887 else
7889 memcpy (p, ", ", 2);
7890 p += 2;
7893 memcpy (p, "%|", 2);
7894 memcpy (p + 2, reg_names[reg], l);
7895 p += l + 2;
7898 if (live_regs_mask & (1 << LR_REGNUM))
7900 int l = strlen (return_reg);
7902 if (! first)
7904 memcpy (p, ", ", 2);
7905 p += 2;
7908 memcpy (p, "%|", 2);
7909 memcpy (p + 2, return_reg, l);
7910 strcpy (p + 2 + l, ((TARGET_APCS_32
7911 && !IS_INTERRUPT (func_type))
7912 || !really_return)
7913 ? "}" : "}^");
7915 else
7916 strcpy (p, "}");
7919 output_asm_insn (instr, & operand);
7921 /* See if we need to generate an extra instruction to
7922 perform the actual function return. */
7923 if (really_return
7924 && func_type != ARM_FT_INTERWORKED
7925 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7927 /* The return has already been handled
7928 by loading the LR into the PC. */
7929 really_return = 0;
7933 if (really_return)
7935 switch ((int) ARM_FUNC_TYPE (func_type))
7937 case ARM_FT_ISR:
7938 case ARM_FT_FIQ:
7939 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7940 break;
7942 case ARM_FT_INTERWORKED:
7943 sprintf (instr, "bx%s\t%%|lr", conditional);
7944 break;
7946 case ARM_FT_EXCEPTION:
7947 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7948 break;
7950 default:
7951 /* ARMv5 implementations always provide BX, so interworking
7952 is the default unless APCS-26 is in use. */
7953 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7954 sprintf (instr, "bx%s\t%%|lr", conditional);
7955 else
7956 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7957 conditional, TARGET_APCS_32 ? "" : "s");
7958 break;
7961 output_asm_insn (instr, & operand);
7964 return "";
7967 /* Write the function name into the code section, directly preceding
7968 the function prologue.
7970 Code will be output similar to this:
7972 .ascii "arm_poke_function_name", 0
7973 .align
7975 .word 0xff000000 + (t1 - t0)
7976 arm_poke_function_name
7977 mov ip, sp
7978 stmfd sp!, {fp, ip, lr, pc}
7979 sub fp, ip, #4
7981 When performing a stack backtrace, code can inspect the value
7982 of 'pc' stored at 'fp' + 0. If the trace function then looks
7983 at location pc - 12 and the top 8 bits are set, then we know
7984 that there is a function name embedded immediately preceding this
7985 location and has length ((pc[-3]) & 0xff000000).
7987 We assume that pc is declared as a pointer to an unsigned long.
7989 It is of no benefit to output the function name if we are assembling
7990 a leaf function. These function types will not contain a stack
7991 backtrace structure, therefore it is not possible to determine the
7992 function name. */
7994 void
7995 arm_poke_function_name (stream, name)
7996 FILE * stream;
7997 const char * name;
7999 unsigned long alignlength;
8000 unsigned long length;
8001 rtx x;
8003 length = strlen (name) + 1;
8004 alignlength = ROUND_UP_WORD (length);
8006 ASM_OUTPUT_ASCII (stream, name, length);
8007 ASM_OUTPUT_ALIGN (stream, 2);
8008 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8009 assemble_aligned_integer (UNITS_PER_WORD, x);
8012 /* Place some comments into the assembler stream
8013 describing the current function. */
8015 static void
8016 arm_output_function_prologue (f, frame_size)
8017 FILE * f;
8018 HOST_WIDE_INT frame_size;
8020 unsigned long func_type;
8022 if (!TARGET_ARM)
8024 thumb_output_function_prologue (f, frame_size);
8025 return;
8028 /* Sanity check. */
8029 if (arm_ccfsm_state || arm_target_insn)
8030 abort ();
8032 func_type = arm_current_func_type ();
8034 switch ((int) ARM_FUNC_TYPE (func_type))
8036 default:
8037 case ARM_FT_NORMAL:
8038 break;
8039 case ARM_FT_INTERWORKED:
8040 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8041 break;
8042 case ARM_FT_EXCEPTION_HANDLER:
8043 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8044 break;
8045 case ARM_FT_ISR:
8046 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8047 break;
8048 case ARM_FT_FIQ:
8049 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8050 break;
8051 case ARM_FT_EXCEPTION:
8052 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8053 break;
8056 if (IS_NAKED (func_type))
8057 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8059 if (IS_VOLATILE (func_type))
8060 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8062 if (IS_NESTED (func_type))
8063 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8065 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
8066 current_function_args_size,
8067 current_function_pretend_args_size, frame_size);
8069 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8070 frame_pointer_needed,
8071 cfun->machine->uses_anonymous_args);
8073 if (cfun->machine->lr_save_eliminated)
8074 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8076 #ifdef AOF_ASSEMBLER
8077 if (flag_pic)
8078 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8079 #endif
8081 return_used_this_function = 0;
8084 const char *
8085 arm_output_epilogue (really_return)
8086 int really_return;
8088 int reg;
8089 unsigned long saved_regs_mask;
8090 unsigned long func_type;
8091 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8092 frame that is $fp + 4 for a non-variadic function. */
8093 int floats_offset = 0;
8094 rtx operands[3];
8095 int frame_size = arm_get_frame_size ();
8096 FILE * f = asm_out_file;
8097 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8099 /* If we have already generated the return instruction
8100 then it is futile to generate anything else. */
8101 if (use_return_insn (FALSE) && return_used_this_function)
8102 return "";
8104 func_type = arm_current_func_type ();
8106 if (IS_NAKED (func_type))
8107 /* Naked functions don't have epilogues. */
8108 return "";
8110 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8112 rtx op;
8114 /* A volatile function should never return. Call abort. */
8115 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8116 assemble_external_libcall (op);
8117 output_asm_insn ("bl\t%a0", &op);
8119 return "";
8122 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8123 && ! really_return)
8124 /* If we are throwing an exception, then we really must
8125 be doing a return, so we can't tail-call. */
8126 abort ();
8128 saved_regs_mask = arm_compute_save_reg_mask ();
8130 /* XXX We should adjust floats_offset for any anonymous args, and then
8131 re-adjust vfp_offset below to compensate. */
8133 /* Compute how far away the floats will be. */
8134 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
8135 if (saved_regs_mask & (1 << reg))
8136 floats_offset += 4;
8138 if (frame_pointer_needed)
8140 int vfp_offset = 4;
8142 if (arm_fpu_arch == FP_SOFT2)
8144 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8145 if (regs_ever_live[reg] && !call_used_regs[reg])
8147 floats_offset += 12;
8148 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8149 reg, FP_REGNUM, floats_offset - vfp_offset);
8152 else
8154 int start_reg = LAST_ARM_FP_REGNUM;
8156 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8158 if (regs_ever_live[reg] && !call_used_regs[reg])
8160 floats_offset += 12;
8162 /* We can't unstack more than four registers at once. */
8163 if (start_reg - reg == 3)
8165 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8166 reg, FP_REGNUM, floats_offset - vfp_offset);
8167 start_reg = reg - 1;
8170 else
8172 if (reg != start_reg)
8173 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8174 reg + 1, start_reg - reg,
8175 FP_REGNUM, floats_offset - vfp_offset);
8176 start_reg = reg - 1;
8180 /* Just in case the last register checked also needs unstacking. */
8181 if (reg != start_reg)
8182 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8183 reg + 1, start_reg - reg,
8184 FP_REGNUM, floats_offset - vfp_offset);
8187 /* saved_regs_mask should contain the IP, which at the time of stack
8188 frame generation actually contains the old stack pointer. So a
8189 quick way to unwind the stack is just pop the IP register directly
8190 into the stack pointer. */
8191 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8192 abort ();
8193 saved_regs_mask &= ~ (1 << IP_REGNUM);
8194 saved_regs_mask |= (1 << SP_REGNUM);
8196 /* There are two registers left in saved_regs_mask - LR and PC. We
8197 only need to restore the LR register (the return address), but to
8198 save time we can load it directly into the PC, unless we need a
8199 special function exit sequence, or we are not really returning. */
8200 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8201 /* Delete the LR from the register mask, so that the LR on
8202 the stack is loaded into the PC in the register mask. */
8203 saved_regs_mask &= ~ (1 << LR_REGNUM);
8204 else
8205 saved_regs_mask &= ~ (1 << PC_REGNUM);
8207 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8209 if (IS_INTERRUPT (func_type))
8210 /* Interrupt handlers will have pushed the
8211 IP onto the stack, so restore it now. */
8212 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8214 else
8216 /* Restore stack pointer if necessary. */
8217 if (frame_size + current_function_outgoing_args_size != 0)
8219 operands[0] = operands[1] = stack_pointer_rtx;
8220 operands[2] = GEN_INT (frame_size
8221 + current_function_outgoing_args_size);
8222 output_add_immediate (operands);
8225 if (arm_fpu_arch == FP_SOFT2)
8227 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8228 if (regs_ever_live[reg] && !call_used_regs[reg])
8229 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8230 reg, SP_REGNUM);
8232 else
8234 int start_reg = FIRST_ARM_FP_REGNUM;
8236 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8238 if (regs_ever_live[reg] && !call_used_regs[reg])
8240 if (reg - start_reg == 3)
8242 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8243 start_reg, SP_REGNUM);
8244 start_reg = reg + 1;
8247 else
8249 if (reg != start_reg)
8250 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8251 start_reg, reg - start_reg,
8252 SP_REGNUM);
8254 start_reg = reg + 1;
8258 /* Just in case the last register checked also needs unstacking. */
8259 if (reg != start_reg)
8260 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8261 start_reg, reg - start_reg, SP_REGNUM);
8264 /* If we can, restore the LR into the PC. */
8265 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8266 && really_return
8267 && current_function_pretend_args_size == 0
8268 && saved_regs_mask & (1 << LR_REGNUM))
8270 saved_regs_mask &= ~ (1 << LR_REGNUM);
8271 saved_regs_mask |= (1 << PC_REGNUM);
8274 /* Load the registers off the stack. If we only have one register
8275 to load use the LDR instruction - it is faster. */
8276 if (saved_regs_mask == (1 << LR_REGNUM))
8278 /* The exception handler ignores the LR, so we do
8279 not really need to load it off the stack. */
8280 if (eh_ofs)
8281 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8282 else
8283 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8285 else if (saved_regs_mask)
8287 if (saved_regs_mask & (1 << SP_REGNUM))
8288 /* Note - write back to the stack register is not enabled
8289 (ie "ldmfd sp!..."). We know that the stack pointer is
8290 in the list of registers and if we add writeback the
8291 instruction becomes UNPREDICTABLE. */
8292 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8293 else
8294 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8297 if (current_function_pretend_args_size)
8299 /* Unwind the pre-pushed regs. */
8300 operands[0] = operands[1] = stack_pointer_rtx;
8301 operands[2] = GEN_INT (current_function_pretend_args_size);
8302 output_add_immediate (operands);
8306 #if 0
8307 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8308 /* Adjust the stack to remove the exception handler stuff. */
8309 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8310 REGNO (eh_ofs));
8311 #endif
8313 if (! really_return
8314 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8315 && current_function_pretend_args_size == 0
8316 && saved_regs_mask & (1 << PC_REGNUM)))
8317 return "";
8319 /* Generate the return instruction. */
8320 switch ((int) ARM_FUNC_TYPE (func_type))
8322 case ARM_FT_EXCEPTION_HANDLER:
8323 /* Even in 26-bit mode we do a mov (rather than a movs)
8324 because we don't have the PSR bits set in the address. */
8325 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8326 break;
8328 case ARM_FT_ISR:
8329 case ARM_FT_FIQ:
8330 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8331 break;
8333 case ARM_FT_EXCEPTION:
8334 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8335 break;
8337 case ARM_FT_INTERWORKED:
8338 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8339 break;
8341 default:
8342 if (frame_pointer_needed)
8343 /* If we used the frame pointer then the return adddress
8344 will have been loaded off the stack directly into the
8345 PC, so there is no need to issue a MOV instruction
8346 here. */
8348 else if (current_function_pretend_args_size == 0
8349 && (saved_regs_mask & (1 << LR_REGNUM)))
8350 /* Similarly we may have been able to load LR into the PC
8351 even if we did not create a stack frame. */
8353 else if (TARGET_APCS_32)
8354 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8355 else
8356 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8357 break;
8360 return "";
8363 static void
8364 arm_output_function_epilogue (file, frame_size)
8365 FILE *file ATTRIBUTE_UNUSED;
8366 HOST_WIDE_INT frame_size;
8368 if (TARGET_THUMB)
8370 /* ??? Probably not safe to set this here, since it assumes that a
8371 function will be emitted as assembly immediately after we generate
8372 RTL for it. This does not happen for inline functions. */
8373 return_used_this_function = 0;
8375 else
8377 /* We need to take into account any stack-frame rounding. */
8378 frame_size = arm_get_frame_size ();
8380 if (use_return_insn (FALSE)
8381 && return_used_this_function
8382 && (frame_size + current_function_outgoing_args_size) != 0
8383 && !frame_pointer_needed)
8384 abort ();
8386 /* Reset the ARM-specific per-function variables. */
8387 after_arm_reorg = 0;
8391 /* Generate and emit an insn that we will recognize as a push_multi.
8392 Unfortunately, since this insn does not reflect very well the actual
8393 semantics of the operation, we need to annotate the insn for the benefit
8394 of DWARF2 frame unwind information. */
8396 static rtx
8397 emit_multi_reg_push (mask)
8398 int mask;
8400 int num_regs = 0;
8401 int num_dwarf_regs;
8402 int i, j;
8403 rtx par;
8404 rtx dwarf;
8405 int dwarf_par_index;
8406 rtx tmp, reg;
8408 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8409 if (mask & (1 << i))
8410 num_regs++;
8412 if (num_regs == 0 || num_regs > 16)
8413 abort ();
8415 /* We don't record the PC in the dwarf frame information. */
8416 num_dwarf_regs = num_regs;
8417 if (mask & (1 << PC_REGNUM))
8418 num_dwarf_regs--;
8420 /* For the body of the insn we are going to generate an UNSPEC in
8421 parallel with several USEs. This allows the insn to be recognized
8422 by the push_multi pattern in the arm.md file. The insn looks
8423 something like this:
8425 (parallel [
8426 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8427 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8428 (use (reg:SI 11 fp))
8429 (use (reg:SI 12 ip))
8430 (use (reg:SI 14 lr))
8431 (use (reg:SI 15 pc))
8434 For the frame note however, we try to be more explicit and actually
8435 show each register being stored into the stack frame, plus a (single)
8436 decrement of the stack pointer. We do it this way in order to be
8437 friendly to the stack unwinding code, which only wants to see a single
8438 stack decrement per instruction. The RTL we generate for the note looks
8439 something like this:
8441 (sequence [
8442 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8443 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8444 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8445 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8446 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8449 This sequence is used both by the code to support stack unwinding for
8450 exceptions handlers and the code to generate dwarf2 frame debugging. */
8452 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8453 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8454 dwarf_par_index = 1;
8456 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8458 if (mask & (1 << i))
8460 reg = gen_rtx_REG (SImode, i);
8462 XVECEXP (par, 0, 0)
8463 = gen_rtx_SET (VOIDmode,
8464 gen_rtx_MEM (BLKmode,
8465 gen_rtx_PRE_DEC (BLKmode,
8466 stack_pointer_rtx)),
8467 gen_rtx_UNSPEC (BLKmode,
8468 gen_rtvec (1, reg),
8469 UNSPEC_PUSH_MULT));
8471 if (i != PC_REGNUM)
8473 tmp = gen_rtx_SET (VOIDmode,
8474 gen_rtx_MEM (SImode, stack_pointer_rtx),
8475 reg);
8476 RTX_FRAME_RELATED_P (tmp) = 1;
8477 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8478 dwarf_par_index++;
8481 break;
8485 for (j = 1, i++; j < num_regs; i++)
8487 if (mask & (1 << i))
8489 reg = gen_rtx_REG (SImode, i);
8491 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8493 if (i != PC_REGNUM)
8495 tmp = gen_rtx_SET (VOIDmode,
8496 gen_rtx_MEM (SImode,
8497 plus_constant (stack_pointer_rtx,
8498 4 * j)),
8499 reg);
8500 RTX_FRAME_RELATED_P (tmp) = 1;
8501 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8504 j++;
8508 par = emit_insn (par);
8510 tmp = gen_rtx_SET (SImode,
8511 stack_pointer_rtx,
8512 gen_rtx_PLUS (SImode,
8513 stack_pointer_rtx,
8514 GEN_INT (-4 * num_regs)));
8515 RTX_FRAME_RELATED_P (tmp) = 1;
8516 XVECEXP (dwarf, 0, 0) = tmp;
8518 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8519 REG_NOTES (par));
8520 return par;
8523 static rtx
8524 emit_sfm (base_reg, count)
8525 int base_reg;
8526 int count;
8528 rtx par;
8529 rtx dwarf;
8530 rtx tmp, reg;
8531 int i;
8533 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8534 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8536 reg = gen_rtx_REG (XFmode, base_reg++);
8538 XVECEXP (par, 0, 0)
8539 = gen_rtx_SET (VOIDmode,
8540 gen_rtx_MEM (BLKmode,
8541 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8542 gen_rtx_UNSPEC (BLKmode,
8543 gen_rtvec (1, reg),
8544 UNSPEC_PUSH_MULT));
8546 = gen_rtx_SET (VOIDmode,
8547 gen_rtx_MEM (XFmode,
8548 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8549 reg);
8550 RTX_FRAME_RELATED_P (tmp) = 1;
8551 XVECEXP (dwarf, 0, count - 1) = tmp;
8553 for (i = 1; i < count; i++)
8555 reg = gen_rtx_REG (XFmode, base_reg++);
8556 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8558 tmp = gen_rtx_SET (VOIDmode,
8559 gen_rtx_MEM (XFmode,
8560 gen_rtx_PRE_DEC (BLKmode,
8561 stack_pointer_rtx)),
8562 reg);
8563 RTX_FRAME_RELATED_P (tmp) = 1;
8564 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8567 par = emit_insn (par);
8568 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8569 REG_NOTES (par));
8570 return par;
8573 /* Compute the distance from register FROM to register TO.
8574 These can be the arg pointer (26), the soft frame pointer (25),
8575 the stack pointer (13) or the hard frame pointer (11).
8576 Typical stack layout looks like this:
8578 old stack pointer -> | |
8579 ----
8580 | | \
8581 | | saved arguments for
8582 | | vararg functions
8583 | | /
8585 hard FP & arg pointer -> | | \
8586 | | stack
8587 | | frame
8588 | | /
8590 | | \
8591 | | call saved
8592 | | registers
8593 soft frame pointer -> | | /
8595 | | \
8596 | | local
8597 | | variables
8598 | | /
8600 | | \
8601 | | outgoing
8602 | | arguments
8603 current stack pointer -> | | /
8606 For a given function some or all of these stack components
8607 may not be needed, giving rise to the possibility of
8608 eliminating some of the registers.
8610 The values returned by this function must reflect the behavior
8611 of arm_expand_prologue() and arm_compute_save_reg_mask().
8613 The sign of the number returned reflects the direction of stack
8614 growth, so the values are positive for all eliminations except
8615 from the soft frame pointer to the hard frame pointer. */
8617 unsigned int
8618 arm_compute_initial_elimination_offset (from, to)
8619 unsigned int from;
8620 unsigned int to;
8622 unsigned int local_vars = arm_get_frame_size ();
8623 unsigned int outgoing_args = current_function_outgoing_args_size;
8624 unsigned int stack_frame;
8625 unsigned int call_saved_registers;
8626 unsigned long func_type;
8628 func_type = arm_current_func_type ();
8630 /* Volatile functions never return, so there is
8631 no need to save call saved registers. */
8632 call_saved_registers = 0;
8633 if (! IS_VOLATILE (func_type))
8635 unsigned int reg_mask;
8636 unsigned int reg;
8638 /* Make sure that we compute which registers will be saved
8639 on the stack using the same algorithm that is used by
8640 arm_compute_save_reg_mask(). */
8641 reg_mask = arm_compute_save_reg0_reg12_mask ();
8643 /* Now count the number of bits set in save_reg_mask.
8644 For each set bit we need 4 bytes of stack space. */
8645 while (reg_mask)
8647 call_saved_registers += 4;
8648 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8651 if ((regs_ever_live[LR_REGNUM]
8652 /* If optimizing for size, then we save the link register if
8653 any other integer register is saved. This gives a smaller
8654 return sequence. */
8655 || (optimize_size && call_saved_registers > 0))
8656 /* But if a stack frame is going to be created, the LR will
8657 be saved as part of that, so we do not need to allow for
8658 it here. */
8659 && ! frame_pointer_needed)
8660 call_saved_registers += 4;
8662 /* If the hard floating point registers are going to be
8663 used then they must be saved on the stack as well.
8664 Each register occupies 12 bytes of stack space. */
8665 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8666 if (regs_ever_live[reg] && ! call_used_regs[reg])
8667 call_saved_registers += 12;
8670 /* The stack frame contains 4 registers - the old frame pointer,
8671 the old stack pointer, the return address and PC of the start
8672 of the function. */
8673 stack_frame = frame_pointer_needed ? 16 : 0;
8675 /* OK, now we have enough information to compute the distances.
8676 There must be an entry in these switch tables for each pair
8677 of registers in ELIMINABLE_REGS, even if some of the entries
8678 seem to be redundant or useless. */
8679 switch (from)
8681 case ARG_POINTER_REGNUM:
8682 switch (to)
8684 case THUMB_HARD_FRAME_POINTER_REGNUM:
8685 return 0;
8687 case FRAME_POINTER_REGNUM:
8688 /* This is the reverse of the soft frame pointer
8689 to hard frame pointer elimination below. */
8690 if (call_saved_registers == 0 && stack_frame == 0)
8691 return 0;
8692 return (call_saved_registers + stack_frame - 4);
8694 case ARM_HARD_FRAME_POINTER_REGNUM:
8695 /* If there is no stack frame then the hard
8696 frame pointer and the arg pointer coincide. */
8697 if (stack_frame == 0 && call_saved_registers != 0)
8698 return 0;
8699 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8700 return (frame_pointer_needed
8701 && current_function_needs_context
8702 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8704 case STACK_POINTER_REGNUM:
8705 /* If nothing has been pushed on the stack at all
8706 then this will return -4. This *is* correct! */
8707 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8709 default:
8710 abort ();
8712 break;
8714 case FRAME_POINTER_REGNUM:
8715 switch (to)
8717 case THUMB_HARD_FRAME_POINTER_REGNUM:
8718 return 0;
8720 case ARM_HARD_FRAME_POINTER_REGNUM:
8721 /* The hard frame pointer points to the top entry in the
8722 stack frame. The soft frame pointer to the bottom entry
8723 in the stack frame. If there is no stack frame at all,
8724 then they are identical. */
8725 if (call_saved_registers == 0 && stack_frame == 0)
8726 return 0;
8727 return - (call_saved_registers + stack_frame - 4);
8729 case STACK_POINTER_REGNUM:
8730 return local_vars + outgoing_args;
8732 default:
8733 abort ();
8735 break;
8737 default:
8738 /* You cannot eliminate from the stack pointer.
8739 In theory you could eliminate from the hard frame
8740 pointer to the stack pointer, but this will never
8741 happen, since if a stack frame is not needed the
8742 hard frame pointer will never be used. */
8743 abort ();
8747 /* Calculate the size of the stack frame, taking into account any
8748 padding that is required to ensure stack-alignment. */
8750 HOST_WIDE_INT
8751 arm_get_frame_size ()
8753 int regno;
8755 int base_size = ROUND_UP_WORD (get_frame_size ());
8756 int entry_size = 0;
8757 unsigned long func_type = arm_current_func_type ();
8758 int leaf;
8760 if (! TARGET_ARM)
8761 abort();
8763 if (! TARGET_ATPCS)
8764 return base_size;
8766 /* We need to know if we are a leaf function. Unfortunately, it
8767 is possible to be called after start_sequence has been called,
8768 which causes get_insns to return the insns for the sequence,
8769 not the function, which will cause leaf_function_p to return
8770 the incorrect result.
8772 To work around this, we cache the computed frame size. This
8773 works because we will only be calling RTL expanders that need
8774 to know about leaf functions once reload has completed, and the
8775 frame size cannot be changed after that time, so we can safely
8776 use the cached value. */
8778 if (reload_completed)
8779 return cfun->machine->frame_size;
8781 leaf = leaf_function_p ();
8783 /* A leaf function does not need any stack alignment if it has nothing
8784 on the stack. */
8785 if (leaf && base_size == 0)
8787 cfun->machine->frame_size = 0;
8788 return 0;
8791 /* We know that SP will be word aligned on entry, and we must
8792 preserve that condition at any subroutine call. But those are
8793 the only constraints. */
8795 /* Space for variadic functions. */
8796 if (current_function_pretend_args_size)
8797 entry_size += current_function_pretend_args_size;
8799 /* Space for saved registers. */
8800 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8802 /* Space for saved FPA registers. */
8803 if (! IS_VOLATILE (func_type))
8805 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8806 if (regs_ever_live[regno] && ! call_used_regs[regno])
8807 entry_size += 12;
8810 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8811 base_size += 4;
8812 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8813 abort ();
8815 cfun->machine->frame_size = base_size;
8817 return base_size;
8820 /* Generate the prologue instructions for entry into an ARM function. */
8822 void
8823 arm_expand_prologue ()
8825 int reg;
8826 rtx amount;
8827 rtx insn;
8828 rtx ip_rtx;
8829 unsigned long live_regs_mask;
8830 unsigned long func_type;
8831 int fp_offset = 0;
8832 int saved_pretend_args = 0;
8833 unsigned int args_to_push;
8835 func_type = arm_current_func_type ();
8837 /* Naked functions don't have prologues. */
8838 if (IS_NAKED (func_type))
8839 return;
8841 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8842 args_to_push = current_function_pretend_args_size;
8844 /* Compute which register we will have to save onto the stack. */
8845 live_regs_mask = arm_compute_save_reg_mask ();
8847 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8849 if (frame_pointer_needed)
8851 if (IS_INTERRUPT (func_type))
8853 /* Interrupt functions must not corrupt any registers.
8854 Creating a frame pointer however, corrupts the IP
8855 register, so we must push it first. */
8856 insn = emit_multi_reg_push (1 << IP_REGNUM);
8858 /* Do not set RTX_FRAME_RELATED_P on this insn.
8859 The dwarf stack unwinding code only wants to see one
8860 stack decrement per function, and this is not it. If
8861 this instruction is labeled as being part of the frame
8862 creation sequence then dwarf2out_frame_debug_expr will
8863 abort when it encounters the assignment of IP to FP
8864 later on, since the use of SP here establishes SP as
8865 the CFA register and not IP.
8867 Anyway this instruction is not really part of the stack
8868 frame creation although it is part of the prologue. */
8870 else if (IS_NESTED (func_type))
8872 /* The Static chain register is the same as the IP register
8873 used as a scratch register during stack frame creation.
8874 To get around this need to find somewhere to store IP
8875 whilst the frame is being created. We try the following
8876 places in order:
8878 1. The last argument register.
8879 2. A slot on the stack above the frame. (This only
8880 works if the function is not a varargs function).
8881 3. Register r3, after pushing the argument registers
8882 onto the stack.
8884 Note - we only need to tell the dwarf2 backend about the SP
8885 adjustment in the second variant; the static chain register
8886 doesn't need to be unwound, as it doesn't contain a value
8887 inherited from the caller. */
8889 if (regs_ever_live[3] == 0)
8891 insn = gen_rtx_REG (SImode, 3);
8892 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8893 insn = emit_insn (insn);
8895 else if (args_to_push == 0)
8897 rtx dwarf;
8898 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8899 insn = gen_rtx_MEM (SImode, insn);
8900 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8901 insn = emit_insn (insn);
8903 fp_offset = 4;
8905 /* Just tell the dwarf backend that we adjusted SP. */
8906 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8907 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8908 GEN_INT (-fp_offset)));
8909 RTX_FRAME_RELATED_P (insn) = 1;
8910 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8911 dwarf, REG_NOTES (insn));
8913 else
8915 /* Store the args on the stack. */
8916 if (cfun->machine->uses_anonymous_args)
8917 insn = emit_multi_reg_push
8918 ((0xf0 >> (args_to_push / 4)) & 0xf);
8919 else
8920 insn = emit_insn
8921 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8922 GEN_INT (- args_to_push)));
8924 RTX_FRAME_RELATED_P (insn) = 1;
8926 saved_pretend_args = 1;
8927 fp_offset = args_to_push;
8928 args_to_push = 0;
8930 /* Now reuse r3 to preserve IP. */
8931 insn = gen_rtx_REG (SImode, 3);
8932 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8933 (void) emit_insn (insn);
8937 if (fp_offset)
8939 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8940 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8942 else
8943 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8945 insn = emit_insn (insn);
8946 RTX_FRAME_RELATED_P (insn) = 1;
8949 if (args_to_push)
8951 /* Push the argument registers, or reserve space for them. */
8952 if (cfun->machine->uses_anonymous_args)
8953 insn = emit_multi_reg_push
8954 ((0xf0 >> (args_to_push / 4)) & 0xf);
8955 else
8956 insn = emit_insn
8957 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8958 GEN_INT (- args_to_push)));
8959 RTX_FRAME_RELATED_P (insn) = 1;
8962 /* If this is an interrupt service routine, and the link register
8963 is going to be pushed, and we are not creating a stack frame,
8964 (which would involve an extra push of IP and a pop in the epilogue)
8965 subtracting four from LR now will mean that the function return
8966 can be done with a single instruction. */
8967 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8968 && (live_regs_mask & (1 << LR_REGNUM)) != 0
8969 && ! frame_pointer_needed)
8970 emit_insn (gen_rtx_SET (SImode,
8971 gen_rtx_REG (SImode, LR_REGNUM),
8972 gen_rtx_PLUS (SImode,
8973 gen_rtx_REG (SImode, LR_REGNUM),
8974 GEN_INT (-4))));
8976 if (live_regs_mask)
8978 insn = emit_multi_reg_push (live_regs_mask);
8979 RTX_FRAME_RELATED_P (insn) = 1;
8982 if (! IS_VOLATILE (func_type))
8984 /* Save any floating point call-saved registers used by this function. */
8985 if (arm_fpu_arch == FP_SOFT2)
8987 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8988 if (regs_ever_live[reg] && !call_used_regs[reg])
8990 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8991 insn = gen_rtx_MEM (XFmode, insn);
8992 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8993 gen_rtx_REG (XFmode, reg)));
8994 RTX_FRAME_RELATED_P (insn) = 1;
8997 else
8999 int start_reg = LAST_ARM_FP_REGNUM;
9001 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
9003 if (regs_ever_live[reg] && !call_used_regs[reg])
9005 if (start_reg - reg == 3)
9007 insn = emit_sfm (reg, 4);
9008 RTX_FRAME_RELATED_P (insn) = 1;
9009 start_reg = reg - 1;
9012 else
9014 if (start_reg != reg)
9016 insn = emit_sfm (reg + 1, start_reg - reg);
9017 RTX_FRAME_RELATED_P (insn) = 1;
9019 start_reg = reg - 1;
9023 if (start_reg != reg)
9025 insn = emit_sfm (reg + 1, start_reg - reg);
9026 RTX_FRAME_RELATED_P (insn) = 1;
9031 if (frame_pointer_needed)
9033 /* Create the new frame pointer. */
9034 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9035 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9036 RTX_FRAME_RELATED_P (insn) = 1;
9038 if (IS_NESTED (func_type))
9040 /* Recover the static chain register. */
9041 if (regs_ever_live [3] == 0
9042 || saved_pretend_args)
9043 insn = gen_rtx_REG (SImode, 3);
9044 else /* if (current_function_pretend_args_size == 0) */
9046 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
9047 insn = gen_rtx_MEM (SImode, insn);
9050 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9051 /* Add a USE to stop propagate_one_insn() from barfing. */
9052 emit_insn (gen_prologue_use (ip_rtx));
9056 amount = GEN_INT (-(arm_get_frame_size ()
9057 + current_function_outgoing_args_size));
9059 if (amount != const0_rtx)
9061 /* This add can produce multiple insns for a large constant, so we
9062 need to get tricky. */
9063 rtx last = get_last_insn ();
9064 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9065 amount));
9068 last = last ? NEXT_INSN (last) : get_insns ();
9069 RTX_FRAME_RELATED_P (last) = 1;
9071 while (last != insn);
9073 /* If the frame pointer is needed, emit a special barrier that
9074 will prevent the scheduler from moving stores to the frame
9075 before the stack adjustment. */
9076 if (frame_pointer_needed)
9077 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9078 hard_frame_pointer_rtx));
9081 /* If we are profiling, make sure no instructions are scheduled before
9082 the call to mcount. Similarly if the user has requested no
9083 scheduling in the prolog. */
9084 if (current_function_profile || TARGET_NO_SCHED_PRO)
9085 emit_insn (gen_blockage ());
9087 /* If the link register is being kept alive, with the return address in it,
9088 then make sure that it does not get reused by the ce2 pass. */
9089 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9091 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9092 cfun->machine->lr_save_eliminated = 1;
9096 /* If CODE is 'd', then the X is a condition operand and the instruction
9097 should only be executed if the condition is true.
9098 if CODE is 'D', then the X is a condition operand and the instruction
9099 should only be executed if the condition is false: however, if the mode
9100 of the comparison is CCFPEmode, then always execute the instruction -- we
9101 do this because in these circumstances !GE does not necessarily imply LT;
9102 in these cases the instruction pattern will take care to make sure that
9103 an instruction containing %d will follow, thereby undoing the effects of
9104 doing this instruction unconditionally.
9105 If CODE is 'N' then X is a floating point operand that must be negated
9106 before output.
9107 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9108 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9110 void
9111 arm_print_operand (stream, x, code)
9112 FILE * stream;
9113 rtx x;
9114 int code;
9116 switch (code)
9118 case '@':
9119 fputs (ASM_COMMENT_START, stream);
9120 return;
9122 case '_':
9123 fputs (user_label_prefix, stream);
9124 return;
9126 case '|':
9127 fputs (REGISTER_PREFIX, stream);
9128 return;
9130 case '?':
9131 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9133 if (TARGET_THUMB || current_insn_predicate != NULL)
9134 abort ();
9136 fputs (arm_condition_codes[arm_current_cc], stream);
9138 else if (current_insn_predicate)
9140 enum arm_cond_code code;
9142 if (TARGET_THUMB)
9143 abort ();
9145 code = get_arm_condition_code (current_insn_predicate);
9146 fputs (arm_condition_codes[code], stream);
9148 return;
9150 case 'N':
9152 REAL_VALUE_TYPE r;
9153 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9154 r = REAL_VALUE_NEGATE (r);
9155 fprintf (stream, "%s", fp_const_from_val (&r));
9157 return;
9159 case 'B':
9160 if (GET_CODE (x) == CONST_INT)
9162 HOST_WIDE_INT val;
9163 val = ARM_SIGN_EXTEND (~INTVAL (x));
9164 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9166 else
9168 putc ('~', stream);
9169 output_addr_const (stream, x);
9171 return;
9173 case 'i':
9174 fprintf (stream, "%s", arithmetic_instr (x, 1));
9175 return;
9177 case 'I':
9178 fprintf (stream, "%s", arithmetic_instr (x, 0));
9179 return;
9181 case 'S':
9183 HOST_WIDE_INT val;
9184 const char * shift = shift_op (x, &val);
9186 if (shift)
9188 fprintf (stream, ", %s ", shift_op (x, &val));
9189 if (val == -1)
9190 arm_print_operand (stream, XEXP (x, 1), 0);
9191 else
9193 fputc ('#', stream);
9194 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9198 return;
9200 /* An explanation of the 'Q', 'R' and 'H' register operands:
9202 In a pair of registers containing a DI or DF value the 'Q'
9203 operand returns the register number of the register containing
9204 the least signficant part of the value. The 'R' operand returns
9205 the register number of the register containing the most
9206 significant part of the value.
9208 The 'H' operand returns the higher of the two register numbers.
9209 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9210 same as the 'Q' operand, since the most signficant part of the
9211 value is held in the lower number register. The reverse is true
9212 on systems where WORDS_BIG_ENDIAN is false.
9214 The purpose of these operands is to distinguish between cases
9215 where the endian-ness of the values is important (for example
9216 when they are added together), and cases where the endian-ness
9217 is irrelevant, but the order of register operations is important.
9218 For example when loading a value from memory into a register
9219 pair, the endian-ness does not matter. Provided that the value
9220 from the lower memory address is put into the lower numbered
9221 register, and the value from the higher address is put into the
9222 higher numbered register, the load will work regardless of whether
9223 the value being loaded is big-wordian or little-wordian. The
9224 order of the two register loads can matter however, if the address
9225 of the memory location is actually held in one of the registers
9226 being overwritten by the load. */
9227 case 'Q':
9228 if (REGNO (x) > LAST_ARM_REGNUM)
9229 abort ();
9230 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9231 return;
9233 case 'R':
9234 if (REGNO (x) > LAST_ARM_REGNUM)
9235 abort ();
9236 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9237 return;
9239 case 'H':
9240 if (REGNO (x) > LAST_ARM_REGNUM)
9241 abort ();
9242 asm_fprintf (stream, "%r", REGNO (x) + 1);
9243 return;
9245 case 'm':
9246 asm_fprintf (stream, "%r",
9247 GET_CODE (XEXP (x, 0)) == REG
9248 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9249 return;
9251 case 'M':
9252 asm_fprintf (stream, "{%r-%r}",
9253 REGNO (x),
9254 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9255 return;
9257 case 'd':
9258 /* CONST_TRUE_RTX means always -- that's the default. */
9259 if (x == const_true_rtx)
9260 return;
9262 if (TARGET_ARM)
9263 fputs (arm_condition_codes[get_arm_condition_code (x)],
9264 stream);
9265 else
9266 fputs (thumb_condition_code (x, 0), stream);
9267 return;
9269 case 'D':
9270 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9271 want to do that. */
9272 if (x == const_true_rtx)
9273 abort ();
9275 if (TARGET_ARM)
9276 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9277 (get_arm_condition_code (x))],
9278 stream);
9279 else
9280 fputs (thumb_condition_code (x, 1), stream);
9281 return;
9283 default:
9284 if (x == 0)
9285 abort ();
9287 if (GET_CODE (x) == REG)
9288 asm_fprintf (stream, "%r", REGNO (x));
9289 else if (GET_CODE (x) == MEM)
9291 output_memory_reference_mode = GET_MODE (x);
9292 output_address (XEXP (x, 0));
9294 else if (GET_CODE (x) == CONST_DOUBLE)
9295 fprintf (stream, "#%s", fp_immediate_constant (x));
9296 else if (GET_CODE (x) == NEG)
9297 abort (); /* This should never happen now. */
9298 else
9300 fputc ('#', stream);
9301 output_addr_const (stream, x);
9306 #ifndef AOF_ASSEMBLER
9307 /* Target hook for assembling integer objects. The ARM version needs to
9308 handle word-sized values specially. */
9310 static bool
9311 arm_assemble_integer (x, size, aligned_p)
9312 rtx x;
9313 unsigned int size;
9314 int aligned_p;
9316 if (size == UNITS_PER_WORD && aligned_p)
9318 fputs ("\t.word\t", asm_out_file);
9319 output_addr_const (asm_out_file, x);
9321 /* Mark symbols as position independent. We only do this in the
9322 .text segment, not in the .data segment. */
9323 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9324 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9326 if (GET_CODE (x) == SYMBOL_REF
9327 && (CONSTANT_POOL_ADDRESS_P (x)
9328 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9329 fputs ("(GOTOFF)", asm_out_file);
9330 else if (GET_CODE (x) == LABEL_REF)
9331 fputs ("(GOTOFF)", asm_out_file);
9332 else
9333 fputs ("(GOT)", asm_out_file);
9335 fputc ('\n', asm_out_file);
9336 return true;
9339 return default_assemble_integer (x, size, aligned_p);
9341 #endif
9343 /* A finite state machine takes care of noticing whether or not instructions
9344 can be conditionally executed, and thus decrease execution time and code
9345 size by deleting branch instructions. The fsm is controlled by
9346 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9348 /* The state of the fsm controlling condition codes are:
9349 0: normal, do nothing special
9350 1: make ASM_OUTPUT_OPCODE not output this instruction
9351 2: make ASM_OUTPUT_OPCODE not output this instruction
9352 3: make instructions conditional
9353 4: make instructions conditional
9355 State transitions (state->state by whom under condition):
9356 0 -> 1 final_prescan_insn if the `target' is a label
9357 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9358 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9359 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9360 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9361 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9362 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9363 (the target insn is arm_target_insn).
9365 If the jump clobbers the conditions then we use states 2 and 4.
9367 A similar thing can be done with conditional return insns.
9369 XXX In case the `target' is an unconditional branch, this conditionalising
9370 of the instructions always reduces code size, but not always execution
9371 time. But then, I want to reduce the code size to somewhere near what
9372 /bin/cc produces. */
9374 /* Returns the index of the ARM condition code string in
9375 `arm_condition_codes'. COMPARISON should be an rtx like
9376 `(eq (...) (...))'. */
9378 static enum arm_cond_code
9379 get_arm_condition_code (comparison)
9380 rtx comparison;
9382 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9383 int code;
9384 enum rtx_code comp_code = GET_CODE (comparison);
9386 if (GET_MODE_CLASS (mode) != MODE_CC)
9387 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9388 XEXP (comparison, 1));
9390 switch (mode)
9392 case CC_DNEmode: code = ARM_NE; goto dominance;
9393 case CC_DEQmode: code = ARM_EQ; goto dominance;
9394 case CC_DGEmode: code = ARM_GE; goto dominance;
9395 case CC_DGTmode: code = ARM_GT; goto dominance;
9396 case CC_DLEmode: code = ARM_LE; goto dominance;
9397 case CC_DLTmode: code = ARM_LT; goto dominance;
9398 case CC_DGEUmode: code = ARM_CS; goto dominance;
9399 case CC_DGTUmode: code = ARM_HI; goto dominance;
9400 case CC_DLEUmode: code = ARM_LS; goto dominance;
9401 case CC_DLTUmode: code = ARM_CC;
9403 dominance:
9404 if (comp_code != EQ && comp_code != NE)
9405 abort ();
9407 if (comp_code == EQ)
9408 return ARM_INVERSE_CONDITION_CODE (code);
9409 return code;
9411 case CC_NOOVmode:
9412 switch (comp_code)
9414 case NE: return ARM_NE;
9415 case EQ: return ARM_EQ;
9416 case GE: return ARM_PL;
9417 case LT: return ARM_MI;
9418 default: abort ();
9421 case CC_Zmode:
9422 switch (comp_code)
9424 case NE: return ARM_NE;
9425 case EQ: return ARM_EQ;
9426 default: abort ();
9429 case CCFPEmode:
9430 case CCFPmode:
9431 /* These encodings assume that AC=1 in the FPA system control
9432 byte. This allows us to handle all cases except UNEQ and
9433 LTGT. */
9434 switch (comp_code)
9436 case GE: return ARM_GE;
9437 case GT: return ARM_GT;
9438 case LE: return ARM_LS;
9439 case LT: return ARM_MI;
9440 case NE: return ARM_NE;
9441 case EQ: return ARM_EQ;
9442 case ORDERED: return ARM_VC;
9443 case UNORDERED: return ARM_VS;
9444 case UNLT: return ARM_LT;
9445 case UNLE: return ARM_LE;
9446 case UNGT: return ARM_HI;
9447 case UNGE: return ARM_PL;
9448 /* UNEQ and LTGT do not have a representation. */
9449 case UNEQ: /* Fall through. */
9450 case LTGT: /* Fall through. */
9451 default: abort ();
9454 case CC_SWPmode:
9455 switch (comp_code)
9457 case NE: return ARM_NE;
9458 case EQ: return ARM_EQ;
9459 case GE: return ARM_LE;
9460 case GT: return ARM_LT;
9461 case LE: return ARM_GE;
9462 case LT: return ARM_GT;
9463 case GEU: return ARM_LS;
9464 case GTU: return ARM_CC;
9465 case LEU: return ARM_CS;
9466 case LTU: return ARM_HI;
9467 default: abort ();
9470 case CC_Cmode:
9471 switch (comp_code)
9473 case LTU: return ARM_CS;
9474 case GEU: return ARM_CC;
9475 default: abort ();
9478 case CCmode:
9479 switch (comp_code)
9481 case NE: return ARM_NE;
9482 case EQ: return ARM_EQ;
9483 case GE: return ARM_GE;
9484 case GT: return ARM_GT;
9485 case LE: return ARM_LE;
9486 case LT: return ARM_LT;
9487 case GEU: return ARM_CS;
9488 case GTU: return ARM_HI;
9489 case LEU: return ARM_LS;
9490 case LTU: return ARM_CC;
9491 default: abort ();
9494 default: abort ();
9497 abort ();
9501 void
9502 arm_final_prescan_insn (insn)
9503 rtx insn;
9505 /* BODY will hold the body of INSN. */
9506 rtx body = PATTERN (insn);
9508 /* This will be 1 if trying to repeat the trick, and things need to be
9509 reversed if it appears to fail. */
9510 int reverse = 0;
9512 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9513 taken are clobbered, even if the rtl suggests otherwise. It also
9514 means that we have to grub around within the jump expression to find
9515 out what the conditions are when the jump isn't taken. */
9516 int jump_clobbers = 0;
9518 /* If we start with a return insn, we only succeed if we find another one. */
9519 int seeking_return = 0;
9521 /* START_INSN will hold the insn from where we start looking. This is the
9522 first insn after the following code_label if REVERSE is true. */
9523 rtx start_insn = insn;
9525 /* If in state 4, check if the target branch is reached, in order to
9526 change back to state 0. */
9527 if (arm_ccfsm_state == 4)
9529 if (insn == arm_target_insn)
9531 arm_target_insn = NULL;
9532 arm_ccfsm_state = 0;
9534 return;
9537 /* If in state 3, it is possible to repeat the trick, if this insn is an
9538 unconditional branch to a label, and immediately following this branch
9539 is the previous target label which is only used once, and the label this
9540 branch jumps to is not too far off. */
9541 if (arm_ccfsm_state == 3)
9543 if (simplejump_p (insn))
9545 start_insn = next_nonnote_insn (start_insn);
9546 if (GET_CODE (start_insn) == BARRIER)
9548 /* XXX Isn't this always a barrier? */
9549 start_insn = next_nonnote_insn (start_insn);
9551 if (GET_CODE (start_insn) == CODE_LABEL
9552 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9553 && LABEL_NUSES (start_insn) == 1)
9554 reverse = TRUE;
9555 else
9556 return;
9558 else if (GET_CODE (body) == RETURN)
9560 start_insn = next_nonnote_insn (start_insn);
9561 if (GET_CODE (start_insn) == BARRIER)
9562 start_insn = next_nonnote_insn (start_insn);
9563 if (GET_CODE (start_insn) == CODE_LABEL
9564 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9565 && LABEL_NUSES (start_insn) == 1)
9567 reverse = TRUE;
9568 seeking_return = 1;
9570 else
9571 return;
9573 else
9574 return;
9577 if (arm_ccfsm_state != 0 && !reverse)
9578 abort ();
9579 if (GET_CODE (insn) != JUMP_INSN)
9580 return;
9582 /* This jump might be paralleled with a clobber of the condition codes
9583 the jump should always come first */
9584 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9585 body = XVECEXP (body, 0, 0);
9587 #if 0
9588 /* If this is a conditional return then we don't want to know */
9589 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9590 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9591 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9592 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9593 return;
9594 #endif
9596 if (reverse
9597 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9598 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9600 int insns_skipped;
9601 int fail = FALSE, succeed = FALSE;
9602 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9603 int then_not_else = TRUE;
9604 rtx this_insn = start_insn, label = 0;
9606 /* If the jump cannot be done with one instruction, we cannot
9607 conditionally execute the instruction in the inverse case. */
9608 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9610 jump_clobbers = 1;
9611 return;
9614 /* Register the insn jumped to. */
9615 if (reverse)
9617 if (!seeking_return)
9618 label = XEXP (SET_SRC (body), 0);
9620 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9621 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9622 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9624 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9625 then_not_else = FALSE;
9627 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9628 seeking_return = 1;
9629 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9631 seeking_return = 1;
9632 then_not_else = FALSE;
9634 else
9635 abort ();
9637 /* See how many insns this branch skips, and what kind of insns. If all
9638 insns are okay, and the label or unconditional branch to the same
9639 label is not too far away, succeed. */
9640 for (insns_skipped = 0;
9641 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9643 rtx scanbody;
9645 this_insn = next_nonnote_insn (this_insn);
9646 if (!this_insn)
9647 break;
9649 switch (GET_CODE (this_insn))
9651 case CODE_LABEL:
9652 /* Succeed if it is the target label, otherwise fail since
9653 control falls in from somewhere else. */
9654 if (this_insn == label)
9656 if (jump_clobbers)
9658 arm_ccfsm_state = 2;
9659 this_insn = next_nonnote_insn (this_insn);
9661 else
9662 arm_ccfsm_state = 1;
9663 succeed = TRUE;
9665 else
9666 fail = TRUE;
9667 break;
9669 case BARRIER:
9670 /* Succeed if the following insn is the target label.
9671 Otherwise fail.
9672 If return insns are used then the last insn in a function
9673 will be a barrier. */
9674 this_insn = next_nonnote_insn (this_insn);
9675 if (this_insn && this_insn == label)
9677 if (jump_clobbers)
9679 arm_ccfsm_state = 2;
9680 this_insn = next_nonnote_insn (this_insn);
9682 else
9683 arm_ccfsm_state = 1;
9684 succeed = TRUE;
9686 else
9687 fail = TRUE;
9688 break;
9690 case CALL_INSN:
9691 /* If using 32-bit addresses the cc is not preserved over
9692 calls. */
9693 if (TARGET_APCS_32)
9695 /* Succeed if the following insn is the target label,
9696 or if the following two insns are a barrier and
9697 the target label. */
9698 this_insn = next_nonnote_insn (this_insn);
9699 if (this_insn && GET_CODE (this_insn) == BARRIER)
9700 this_insn = next_nonnote_insn (this_insn);
9702 if (this_insn && this_insn == label
9703 && insns_skipped < max_insns_skipped)
9705 if (jump_clobbers)
9707 arm_ccfsm_state = 2;
9708 this_insn = next_nonnote_insn (this_insn);
9710 else
9711 arm_ccfsm_state = 1;
9712 succeed = TRUE;
9714 else
9715 fail = TRUE;
9717 break;
9719 case JUMP_INSN:
9720 /* If this is an unconditional branch to the same label, succeed.
9721 If it is to another label, do nothing. If it is conditional,
9722 fail. */
9723 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9725 scanbody = PATTERN (this_insn);
9726 if (GET_CODE (scanbody) == SET
9727 && GET_CODE (SET_DEST (scanbody)) == PC)
9729 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9730 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9732 arm_ccfsm_state = 2;
9733 succeed = TRUE;
9735 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9736 fail = TRUE;
9738 /* Fail if a conditional return is undesirable (eg on a
9739 StrongARM), but still allow this if optimizing for size. */
9740 else if (GET_CODE (scanbody) == RETURN
9741 && !use_return_insn (TRUE)
9742 && !optimize_size)
9743 fail = TRUE;
9744 else if (GET_CODE (scanbody) == RETURN
9745 && seeking_return)
9747 arm_ccfsm_state = 2;
9748 succeed = TRUE;
9750 else if (GET_CODE (scanbody) == PARALLEL)
9752 switch (get_attr_conds (this_insn))
9754 case CONDS_NOCOND:
9755 break;
9756 default:
9757 fail = TRUE;
9758 break;
9761 else
9762 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9764 break;
9766 case INSN:
9767 /* Instructions using or affecting the condition codes make it
9768 fail. */
9769 scanbody = PATTERN (this_insn);
9770 if (!(GET_CODE (scanbody) == SET
9771 || GET_CODE (scanbody) == PARALLEL)
9772 || get_attr_conds (this_insn) != CONDS_NOCOND)
9773 fail = TRUE;
9774 break;
9776 default:
9777 break;
9780 if (succeed)
9782 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9783 arm_target_label = CODE_LABEL_NUMBER (label);
9784 else if (seeking_return || arm_ccfsm_state == 2)
9786 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9788 this_insn = next_nonnote_insn (this_insn);
9789 if (this_insn && (GET_CODE (this_insn) == BARRIER
9790 || GET_CODE (this_insn) == CODE_LABEL))
9791 abort ();
9793 if (!this_insn)
9795 /* Oh, dear! we ran off the end.. give up */
9796 recog (PATTERN (insn), insn, NULL);
9797 arm_ccfsm_state = 0;
9798 arm_target_insn = NULL;
9799 return;
9801 arm_target_insn = this_insn;
9803 else
9804 abort ();
9805 if (jump_clobbers)
9807 if (reverse)
9808 abort ();
9809 arm_current_cc =
9810 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9811 0), 0), 1));
9812 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9813 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9814 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9815 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9817 else
9819 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9820 what it was. */
9821 if (!reverse)
9822 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9823 0));
9826 if (reverse || then_not_else)
9827 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9830 /* Restore recog_data (getting the attributes of other insns can
9831 destroy this array, but final.c assumes that it remains intact
9832 across this call; since the insn has been recognized already we
9833 call recog direct). */
9834 recog (PATTERN (insn), insn, NULL);
9838 /* Returns true if REGNO is a valid register
9839 for holding a quantity of tyoe MODE. */
9842 arm_hard_regno_mode_ok (regno, mode)
9843 unsigned int regno;
9844 enum machine_mode mode;
9846 if (GET_MODE_CLASS (mode) == MODE_CC)
9847 return regno == CC_REGNUM;
9849 if (TARGET_THUMB)
9850 /* For the Thumb we only allow values bigger than SImode in
9851 registers 0 - 6, so that there is always a second low
9852 register available to hold the upper part of the value.
9853 We probably we ought to ensure that the register is the
9854 start of an even numbered register pair. */
9855 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9857 if (regno <= LAST_ARM_REGNUM)
9858 /* We allow any value to be stored in the general regisetrs. */
9859 return 1;
9861 if ( regno == FRAME_POINTER_REGNUM
9862 || regno == ARG_POINTER_REGNUM)
9863 /* We only allow integers in the fake hard registers. */
9864 return GET_MODE_CLASS (mode) == MODE_INT;
9866 /* The only registers left are the FPU registers
9867 which we only allow to hold FP values. */
9868 return GET_MODE_CLASS (mode) == MODE_FLOAT
9869 && regno >= FIRST_ARM_FP_REGNUM
9870 && regno <= LAST_ARM_FP_REGNUM;
9874 arm_regno_class (regno)
9875 int regno;
9877 if (TARGET_THUMB)
9879 if (regno == STACK_POINTER_REGNUM)
9880 return STACK_REG;
9881 if (regno == CC_REGNUM)
9882 return CC_REG;
9883 if (regno < 8)
9884 return LO_REGS;
9885 return HI_REGS;
9888 if ( regno <= LAST_ARM_REGNUM
9889 || regno == FRAME_POINTER_REGNUM
9890 || regno == ARG_POINTER_REGNUM)
9891 return GENERAL_REGS;
9893 if (regno == CC_REGNUM)
9894 return NO_REGS;
9896 return FPU_REGS;
9899 /* Handle a special case when computing the offset
9900 of an argument from the frame pointer. */
9903 arm_debugger_arg_offset (value, addr)
9904 int value;
9905 rtx addr;
9907 rtx insn;
9909 /* We are only interested if dbxout_parms() failed to compute the offset. */
9910 if (value != 0)
9911 return 0;
9913 /* We can only cope with the case where the address is held in a register. */
9914 if (GET_CODE (addr) != REG)
9915 return 0;
9917 /* If we are using the frame pointer to point at the argument, then
9918 an offset of 0 is correct. */
9919 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9920 return 0;
9922 /* If we are using the stack pointer to point at the
9923 argument, then an offset of 0 is correct. */
9924 if ((TARGET_THUMB || !frame_pointer_needed)
9925 && REGNO (addr) == SP_REGNUM)
9926 return 0;
9928 /* Oh dear. The argument is pointed to by a register rather
9929 than being held in a register, or being stored at a known
9930 offset from the frame pointer. Since GDB only understands
9931 those two kinds of argument we must translate the address
9932 held in the register into an offset from the frame pointer.
9933 We do this by searching through the insns for the function
9934 looking to see where this register gets its value. If the
9935 register is initialized from the frame pointer plus an offset
9936 then we are in luck and we can continue, otherwise we give up.
9938 This code is exercised by producing debugging information
9939 for a function with arguments like this:
9941 double func (double a, double b, int c, double d) {return d;}
9943 Without this code the stab for parameter 'd' will be set to
9944 an offset of 0 from the frame pointer, rather than 8. */
9946 /* The if() statement says:
9948 If the insn is a normal instruction
9949 and if the insn is setting the value in a register
9950 and if the register being set is the register holding the address of the argument
9951 and if the address is computing by an addition
9952 that involves adding to a register
9953 which is the frame pointer
9954 a constant integer
9956 then... */
9958 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9960 if ( GET_CODE (insn) == INSN
9961 && GET_CODE (PATTERN (insn)) == SET
9962 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9963 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9964 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9965 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9966 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9969 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9971 break;
9975 if (value == 0)
9977 debug_rtx (addr);
9978 warning ("unable to compute real location of stacked parameter");
9979 value = 8; /* XXX magic hack */
9982 return value;
9985 #define def_builtin(NAME, TYPE, CODE) \
9986 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
9988 void
9989 arm_init_builtins ()
9991 tree endlink = void_list_node;
9992 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9993 tree pchar_type_node = build_pointer_type (char_type_node);
9995 tree int_ftype_int, void_ftype_pchar;
9997 /* void func (char *) */
9998 void_ftype_pchar
9999 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
10001 /* int func (int) */
10002 int_ftype_int
10003 = build_function_type (integer_type_node, int_endlink);
10005 /* Initialize arm V5 builtins. */
10006 if (arm_arch5)
10007 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
10010 /* Expand an expression EXP that calls a built-in function,
10011 with result going to TARGET if that's convenient
10012 (and in mode MODE if that's convenient).
10013 SUBTARGET may be used as the target for computing one of EXP's operands.
10014 IGNORE is nonzero if the value is to be ignored. */
10017 arm_expand_builtin (exp, target, subtarget, mode, ignore)
10018 tree exp;
10019 rtx target;
10020 rtx subtarget ATTRIBUTE_UNUSED;
10021 enum machine_mode mode ATTRIBUTE_UNUSED;
10022 int ignore ATTRIBUTE_UNUSED;
10024 enum insn_code icode;
10025 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10026 tree arglist = TREE_OPERAND (exp, 1);
10027 tree arg0;
10028 rtx op0, pat;
10029 enum machine_mode tmode, mode0;
10030 int fcode = DECL_FUNCTION_CODE (fndecl);
10032 switch (fcode)
10034 default:
10035 break;
10037 case ARM_BUILTIN_CLZ:
10038 icode = CODE_FOR_clz;
10039 arg0 = TREE_VALUE (arglist);
10040 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10041 tmode = insn_data[icode].operand[0].mode;
10042 mode0 = insn_data[icode].operand[1].mode;
10044 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10045 op0 = copy_to_mode_reg (mode0, op0);
10046 if (target == 0
10047 || GET_MODE (target) != tmode
10048 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10049 target = gen_reg_rtx (tmode);
10050 pat = GEN_FCN (icode) (target, op0);
10051 if (! pat)
10052 return 0;
10053 emit_insn (pat);
10054 return target;
10057 /* @@@ Should really do something sensible here. */
10058 return NULL_RTX;
10061 /* Recursively search through all of the blocks in a function
10062 checking to see if any of the variables created in that
10063 function match the RTX called 'orig'. If they do then
10064 replace them with the RTX called 'new'. */
10066 static void
10067 replace_symbols_in_block (block, orig, new)
10068 tree block;
10069 rtx orig;
10070 rtx new;
10072 for (; block; block = BLOCK_CHAIN (block))
10074 tree sym;
10076 if (!TREE_USED (block))
10077 continue;
10079 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
10081 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
10082 || DECL_IGNORED_P (sym)
10083 || TREE_CODE (sym) != VAR_DECL
10084 || DECL_EXTERNAL (sym)
10085 || !rtx_equal_p (DECL_RTL (sym), orig)
10087 continue;
10089 SET_DECL_RTL (sym, new);
10092 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
10096 /* Return the number (counting from 0) of
10097 the least significant set bit in MASK. */
10099 #ifdef __GNUC__
10100 inline
10101 #endif
10102 static int
10103 number_of_first_bit_set (mask)
10104 int mask;
10106 int bit;
10108 for (bit = 0;
10109 (mask & (1 << bit)) == 0;
10110 ++bit)
10111 continue;
10113 return bit;
10116 /* Generate code to return from a thumb function.
10117 If 'reg_containing_return_addr' is -1, then the return address is
10118 actually on the stack, at the stack pointer. */
10119 static void
10120 thumb_exit (f, reg_containing_return_addr, eh_ofs)
10121 FILE * f;
10122 int reg_containing_return_addr;
10123 rtx eh_ofs;
10125 unsigned regs_available_for_popping;
10126 unsigned regs_to_pop;
10127 int pops_needed;
10128 unsigned available;
10129 unsigned required;
10130 int mode;
10131 int size;
10132 int restore_a4 = FALSE;
10134 /* Compute the registers we need to pop. */
10135 regs_to_pop = 0;
10136 pops_needed = 0;
10138 /* There is an assumption here, that if eh_ofs is not NULL, the
10139 normal return address will have been pushed. */
10140 if (reg_containing_return_addr == -1 || eh_ofs)
10142 /* When we are generating a return for __builtin_eh_return,
10143 reg_containing_return_addr must specify the return regno. */
10144 if (eh_ofs && reg_containing_return_addr == -1)
10145 abort ();
10147 regs_to_pop |= 1 << LR_REGNUM;
10148 ++pops_needed;
10151 if (TARGET_BACKTRACE)
10153 /* Restore the (ARM) frame pointer and stack pointer. */
10154 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10155 pops_needed += 2;
10158 /* If there is nothing to pop then just emit the BX instruction and
10159 return. */
10160 if (pops_needed == 0)
10162 if (eh_ofs)
10163 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10165 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10166 return;
10168 /* Otherwise if we are not supporting interworking and we have not created
10169 a backtrace structure and the function was not entered in ARM mode then
10170 just pop the return address straight into the PC. */
10171 else if (!TARGET_INTERWORK
10172 && !TARGET_BACKTRACE
10173 && !is_called_in_ARM_mode (current_function_decl))
10175 if (eh_ofs)
10177 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10178 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10179 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10181 else
10182 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10184 return;
10187 /* Find out how many of the (return) argument registers we can corrupt. */
10188 regs_available_for_popping = 0;
10190 /* If returning via __builtin_eh_return, the bottom three registers
10191 all contain information needed for the return. */
10192 if (eh_ofs)
10193 size = 12;
10194 else
10196 #ifdef RTX_CODE
10197 /* If we can deduce the registers used from the function's
10198 return value. This is more reliable that examining
10199 regs_ever_live[] because that will be set if the register is
10200 ever used in the function, not just if the register is used
10201 to hold a return value. */
10203 if (current_function_return_rtx != 0)
10204 mode = GET_MODE (current_function_return_rtx);
10205 else
10206 #endif
10207 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10209 size = GET_MODE_SIZE (mode);
10211 if (size == 0)
10213 /* In a void function we can use any argument register.
10214 In a function that returns a structure on the stack
10215 we can use the second and third argument registers. */
10216 if (mode == VOIDmode)
10217 regs_available_for_popping =
10218 (1 << ARG_REGISTER (1))
10219 | (1 << ARG_REGISTER (2))
10220 | (1 << ARG_REGISTER (3));
10221 else
10222 regs_available_for_popping =
10223 (1 << ARG_REGISTER (2))
10224 | (1 << ARG_REGISTER (3));
10226 else if (size <= 4)
10227 regs_available_for_popping =
10228 (1 << ARG_REGISTER (2))
10229 | (1 << ARG_REGISTER (3));
10230 else if (size <= 8)
10231 regs_available_for_popping =
10232 (1 << ARG_REGISTER (3));
10235 /* Match registers to be popped with registers into which we pop them. */
10236 for (available = regs_available_for_popping,
10237 required = regs_to_pop;
10238 required != 0 && available != 0;
10239 available &= ~(available & - available),
10240 required &= ~(required & - required))
10241 -- pops_needed;
10243 /* If we have any popping registers left over, remove them. */
10244 if (available > 0)
10245 regs_available_for_popping &= ~available;
10247 /* Otherwise if we need another popping register we can use
10248 the fourth argument register. */
10249 else if (pops_needed)
10251 /* If we have not found any free argument registers and
10252 reg a4 contains the return address, we must move it. */
10253 if (regs_available_for_popping == 0
10254 && reg_containing_return_addr == LAST_ARG_REGNUM)
10256 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10257 reg_containing_return_addr = LR_REGNUM;
10259 else if (size > 12)
10261 /* Register a4 is being used to hold part of the return value,
10262 but we have dire need of a free, low register. */
10263 restore_a4 = TRUE;
10265 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10268 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10270 /* The fourth argument register is available. */
10271 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10273 --pops_needed;
10277 /* Pop as many registers as we can. */
10278 thumb_pushpop (f, regs_available_for_popping, FALSE);
10280 /* Process the registers we popped. */
10281 if (reg_containing_return_addr == -1)
10283 /* The return address was popped into the lowest numbered register. */
10284 regs_to_pop &= ~(1 << LR_REGNUM);
10286 reg_containing_return_addr =
10287 number_of_first_bit_set (regs_available_for_popping);
10289 /* Remove this register for the mask of available registers, so that
10290 the return address will not be corrupted by futher pops. */
10291 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10294 /* If we popped other registers then handle them here. */
10295 if (regs_available_for_popping)
10297 int frame_pointer;
10299 /* Work out which register currently contains the frame pointer. */
10300 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10302 /* Move it into the correct place. */
10303 asm_fprintf (f, "\tmov\t%r, %r\n",
10304 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10306 /* (Temporarily) remove it from the mask of popped registers. */
10307 regs_available_for_popping &= ~(1 << frame_pointer);
10308 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10310 if (regs_available_for_popping)
10312 int stack_pointer;
10314 /* We popped the stack pointer as well,
10315 find the register that contains it. */
10316 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10318 /* Move it into the stack register. */
10319 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10321 /* At this point we have popped all necessary registers, so
10322 do not worry about restoring regs_available_for_popping
10323 to its correct value:
10325 assert (pops_needed == 0)
10326 assert (regs_available_for_popping == (1 << frame_pointer))
10327 assert (regs_to_pop == (1 << STACK_POINTER)) */
10329 else
10331 /* Since we have just move the popped value into the frame
10332 pointer, the popping register is available for reuse, and
10333 we know that we still have the stack pointer left to pop. */
10334 regs_available_for_popping |= (1 << frame_pointer);
10338 /* If we still have registers left on the stack, but we no longer have
10339 any registers into which we can pop them, then we must move the return
10340 address into the link register and make available the register that
10341 contained it. */
10342 if (regs_available_for_popping == 0 && pops_needed > 0)
10344 regs_available_for_popping |= 1 << reg_containing_return_addr;
10346 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10347 reg_containing_return_addr);
10349 reg_containing_return_addr = LR_REGNUM;
10352 /* If we have registers left on the stack then pop some more.
10353 We know that at most we will want to pop FP and SP. */
10354 if (pops_needed > 0)
10356 int popped_into;
10357 int move_to;
10359 thumb_pushpop (f, regs_available_for_popping, FALSE);
10361 /* We have popped either FP or SP.
10362 Move whichever one it is into the correct register. */
10363 popped_into = number_of_first_bit_set (regs_available_for_popping);
10364 move_to = number_of_first_bit_set (regs_to_pop);
10366 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10368 regs_to_pop &= ~(1 << move_to);
10370 --pops_needed;
10373 /* If we still have not popped everything then we must have only
10374 had one register available to us and we are now popping the SP. */
10375 if (pops_needed > 0)
10377 int popped_into;
10379 thumb_pushpop (f, regs_available_for_popping, FALSE);
10381 popped_into = number_of_first_bit_set (regs_available_for_popping);
10383 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10385 assert (regs_to_pop == (1 << STACK_POINTER))
10386 assert (pops_needed == 1)
10390 /* If necessary restore the a4 register. */
10391 if (restore_a4)
10393 if (reg_containing_return_addr != LR_REGNUM)
10395 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10396 reg_containing_return_addr = LR_REGNUM;
10399 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10402 if (eh_ofs)
10403 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10405 /* Return to caller. */
10406 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10409 /* Emit code to push or pop registers to or from the stack. */
10411 static void
10412 thumb_pushpop (f, mask, push)
10413 FILE * f;
10414 int mask;
10415 int push;
10417 int regno;
10418 int lo_mask = mask & 0xFF;
10420 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10422 /* Special case. Do not generate a POP PC statement here, do it in
10423 thumb_exit() */
10424 thumb_exit (f, -1, NULL_RTX);
10425 return;
10428 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10430 /* Look at the low registers first. */
10431 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10433 if (lo_mask & 1)
10435 asm_fprintf (f, "%r", regno);
10437 if ((lo_mask & ~1) != 0)
10438 fprintf (f, ", ");
10442 if (push && (mask & (1 << LR_REGNUM)))
10444 /* Catch pushing the LR. */
10445 if (mask & 0xFF)
10446 fprintf (f, ", ");
10448 asm_fprintf (f, "%r", LR_REGNUM);
10450 else if (!push && (mask & (1 << PC_REGNUM)))
10452 /* Catch popping the PC. */
10453 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10455 /* The PC is never poped directly, instead
10456 it is popped into r3 and then BX is used. */
10457 fprintf (f, "}\n");
10459 thumb_exit (f, -1, NULL_RTX);
10461 return;
10463 else
10465 if (mask & 0xFF)
10466 fprintf (f, ", ");
10468 asm_fprintf (f, "%r", PC_REGNUM);
10472 fprintf (f, "}\n");
10475 void
10476 thumb_final_prescan_insn (insn)
10477 rtx insn;
10479 if (flag_print_asm_name)
10480 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10481 INSN_ADDRESSES (INSN_UID (insn)));
10485 thumb_shiftable_const (val)
10486 unsigned HOST_WIDE_INT val;
10488 unsigned HOST_WIDE_INT mask = 0xff;
10489 int i;
10491 if (val == 0) /* XXX */
10492 return 0;
10494 for (i = 0; i < 25; i++)
10495 if ((val & (mask << i)) == val)
10496 return 1;
10498 return 0;
10501 /* Returns nonzero if the current function contains,
10502 or might contain a far jump. */
10505 thumb_far_jump_used_p (in_prologue)
10506 int in_prologue;
10508 rtx insn;
10510 /* This test is only important for leaf functions. */
10511 /* assert (!leaf_function_p ()); */
10513 /* If we have already decided that far jumps may be used,
10514 do not bother checking again, and always return true even if
10515 it turns out that they are not being used. Once we have made
10516 the decision that far jumps are present (and that hence the link
10517 register will be pushed onto the stack) we cannot go back on it. */
10518 if (cfun->machine->far_jump_used)
10519 return 1;
10521 /* If this function is not being called from the prologue/epilogue
10522 generation code then it must be being called from the
10523 INITIAL_ELIMINATION_OFFSET macro. */
10524 if (!in_prologue)
10526 /* In this case we know that we are being asked about the elimination
10527 of the arg pointer register. If that register is not being used,
10528 then there are no arguments on the stack, and we do not have to
10529 worry that a far jump might force the prologue to push the link
10530 register, changing the stack offsets. In this case we can just
10531 return false, since the presence of far jumps in the function will
10532 not affect stack offsets.
10534 If the arg pointer is live (or if it was live, but has now been
10535 eliminated and so set to dead) then we do have to test to see if
10536 the function might contain a far jump. This test can lead to some
10537 false negatives, since before reload is completed, then length of
10538 branch instructions is not known, so gcc defaults to returning their
10539 longest length, which in turn sets the far jump attribute to true.
10541 A false negative will not result in bad code being generated, but it
10542 will result in a needless push and pop of the link register. We
10543 hope that this does not occur too often. */
10544 if (regs_ever_live [ARG_POINTER_REGNUM])
10545 cfun->machine->arg_pointer_live = 1;
10546 else if (!cfun->machine->arg_pointer_live)
10547 return 0;
10550 /* Check to see if the function contains a branch
10551 insn with the far jump attribute set. */
10552 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10554 if (GET_CODE (insn) == JUMP_INSN
10555 /* Ignore tablejump patterns. */
10556 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10557 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10558 && get_attr_far_jump (insn) == FAR_JUMP_YES
10561 /* Record the fact that we have decied that
10562 the function does use far jumps. */
10563 cfun->machine->far_jump_used = 1;
10564 return 1;
10568 return 0;
10571 /* Return nonzero if FUNC must be entered in ARM mode. */
10574 is_called_in_ARM_mode (func)
10575 tree func;
10577 if (TREE_CODE (func) != FUNCTION_DECL)
10578 abort ();
10580 /* Ignore the problem about functions whoes address is taken. */
10581 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10582 return TRUE;
10584 #ifdef ARM_PE
10585 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10586 #else
10587 return FALSE;
10588 #endif
10591 /* The bits which aren't usefully expanded as rtl. */
10593 const char *
10594 thumb_unexpanded_epilogue ()
10596 int regno;
10597 int live_regs_mask = 0;
10598 int high_regs_pushed = 0;
10599 int leaf_function = leaf_function_p ();
10600 int had_to_push_lr;
10601 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10603 if (return_used_this_function)
10604 return "";
10606 if (IS_NAKED (arm_current_func_type ()))
10607 return "";
10609 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10610 if (THUMB_REG_PUSHED_P (regno))
10611 live_regs_mask |= 1 << regno;
10613 for (regno = 8; regno < 13; regno++)
10614 if (THUMB_REG_PUSHED_P (regno))
10615 high_regs_pushed++;
10617 /* The prolog may have pushed some high registers to use as
10618 work registers. eg the testuite file:
10619 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10620 compiles to produce:
10621 push {r4, r5, r6, r7, lr}
10622 mov r7, r9
10623 mov r6, r8
10624 push {r6, r7}
10625 as part of the prolog. We have to undo that pushing here. */
10627 if (high_regs_pushed)
10629 int mask = live_regs_mask;
10630 int next_hi_reg;
10631 int size;
10632 int mode;
10634 #ifdef RTX_CODE
10635 /* If we can deduce the registers used from the function's return value.
10636 This is more reliable that examining regs_ever_live[] because that
10637 will be set if the register is ever used in the function, not just if
10638 the register is used to hold a return value. */
10640 if (current_function_return_rtx != 0)
10641 mode = GET_MODE (current_function_return_rtx);
10642 else
10643 #endif
10644 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10646 size = GET_MODE_SIZE (mode);
10648 /* Unless we are returning a type of size > 12 register r3 is
10649 available. */
10650 if (size < 13)
10651 mask |= 1 << 3;
10653 if (mask == 0)
10654 /* Oh dear! We have no low registers into which we can pop
10655 high registers! */
10656 internal_error
10657 ("no low registers available for popping high registers");
10659 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10660 if (THUMB_REG_PUSHED_P (next_hi_reg))
10661 break;
10663 while (high_regs_pushed)
10665 /* Find lo register(s) into which the high register(s) can
10666 be popped. */
10667 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10669 if (mask & (1 << regno))
10670 high_regs_pushed--;
10671 if (high_regs_pushed == 0)
10672 break;
10675 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10677 /* Pop the values into the low register(s). */
10678 thumb_pushpop (asm_out_file, mask, 0);
10680 /* Move the value(s) into the high registers. */
10681 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10683 if (mask & (1 << regno))
10685 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10686 regno);
10688 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10689 if (THUMB_REG_PUSHED_P (next_hi_reg))
10690 break;
10696 had_to_push_lr = (live_regs_mask || !leaf_function
10697 || thumb_far_jump_used_p (1));
10699 if (TARGET_BACKTRACE
10700 && ((live_regs_mask & 0xFF) == 0)
10701 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10703 /* The stack backtrace structure creation code had to
10704 push R7 in order to get a work register, so we pop
10705 it now. */
10706 live_regs_mask |= (1 << LAST_LO_REGNUM);
10709 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10711 if (had_to_push_lr
10712 && !is_called_in_ARM_mode (current_function_decl)
10713 && !eh_ofs)
10714 live_regs_mask |= 1 << PC_REGNUM;
10716 /* Either no argument registers were pushed or a backtrace
10717 structure was created which includes an adjusted stack
10718 pointer, so just pop everything. */
10719 if (live_regs_mask)
10720 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10722 if (eh_ofs)
10723 thumb_exit (asm_out_file, 2, eh_ofs);
10724 /* We have either just popped the return address into the
10725 PC or it is was kept in LR for the entire function or
10726 it is still on the stack because we do not want to
10727 return by doing a pop {pc}. */
10728 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10729 thumb_exit (asm_out_file,
10730 (had_to_push_lr
10731 && is_called_in_ARM_mode (current_function_decl)) ?
10732 -1 : LR_REGNUM, NULL_RTX);
10734 else
10736 /* Pop everything but the return address. */
10737 live_regs_mask &= ~(1 << PC_REGNUM);
10739 if (live_regs_mask)
10740 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10742 if (had_to_push_lr)
10743 /* Get the return address into a temporary register. */
10744 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10746 /* Remove the argument registers that were pushed onto the stack. */
10747 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10748 SP_REGNUM, SP_REGNUM,
10749 current_function_pretend_args_size);
10751 if (eh_ofs)
10752 thumb_exit (asm_out_file, 2, eh_ofs);
10753 else
10754 thumb_exit (asm_out_file,
10755 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10758 return "";
10761 /* Functions to save and restore machine-specific function data. */
10763 static struct machine_function *
10764 arm_init_machine_status ()
10766 struct machine_function *machine;
10767 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10769 #if ARM_FT_UNKNOWN != 0
10770 machine->func_type = ARM_FT_UNKNOWN;
10771 #endif
10772 return machine;
10775 /* Return an RTX indicating where the return address to the
10776 calling function can be found. */
10779 arm_return_addr (count, frame)
10780 int count;
10781 rtx frame ATTRIBUTE_UNUSED;
10783 if (count != 0)
10784 return NULL_RTX;
10786 if (TARGET_APCS_32)
10787 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10788 else
10790 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10791 GEN_INT (RETURN_ADDR_MASK26));
10792 return get_func_hard_reg_initial_val (cfun, lr);
10796 /* Do anything needed before RTL is emitted for each function. */
10798 void
10799 arm_init_expanders ()
10801 /* Arrange to initialize and mark the machine per-function status. */
10802 init_machine_status = arm_init_machine_status;
10805 HOST_WIDE_INT
10806 thumb_get_frame_size ()
10808 int regno;
10810 int base_size = ROUND_UP_WORD (get_frame_size ());
10811 int count_regs = 0;
10812 int entry_size = 0;
10813 int leaf;
10815 if (! TARGET_THUMB)
10816 abort ();
10818 if (! TARGET_ATPCS)
10819 return base_size;
10821 /* We need to know if we are a leaf function. Unfortunately, it
10822 is possible to be called after start_sequence has been called,
10823 which causes get_insns to return the insns for the sequence,
10824 not the function, which will cause leaf_function_p to return
10825 the incorrect result.
10827 To work around this, we cache the computed frame size. This
10828 works because we will only be calling RTL expanders that need
10829 to know about leaf functions once reload has completed, and the
10830 frame size cannot be changed after that time, so we can safely
10831 use the cached value. */
10833 if (reload_completed)
10834 return cfun->machine->frame_size;
10836 leaf = leaf_function_p ();
10838 /* A leaf function does not need any stack alignment if it has nothing
10839 on the stack. */
10840 if (leaf && base_size == 0)
10842 cfun->machine->frame_size = 0;
10843 return 0;
10846 /* We know that SP will be word aligned on entry, and we must
10847 preserve that condition at any subroutine call. But those are
10848 the only constraints. */
10850 /* Space for variadic functions. */
10851 if (current_function_pretend_args_size)
10852 entry_size += current_function_pretend_args_size;
10854 /* Space for pushed lo registers. */
10855 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10856 if (THUMB_REG_PUSHED_P (regno))
10857 count_regs++;
10859 /* Space for backtrace structure. */
10860 if (TARGET_BACKTRACE)
10862 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10863 entry_size += 20;
10864 else
10865 entry_size += 16;
10868 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10869 count_regs++; /* LR */
10871 entry_size += count_regs * 4;
10872 count_regs = 0;
10874 /* Space for pushed hi regs. */
10875 for (regno = 8; regno < 13; regno++)
10876 if (THUMB_REG_PUSHED_P (regno))
10877 count_regs++;
10879 entry_size += count_regs * 4;
10881 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10882 base_size += 4;
10883 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10884 abort ();
10886 cfun->machine->frame_size = base_size;
10888 return base_size;
10891 /* Generate the rest of a function's prologue. */
10893 void
10894 thumb_expand_prologue ()
10896 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10897 + current_function_outgoing_args_size);
10898 unsigned long func_type;
10900 func_type = arm_current_func_type ();
10902 /* Naked functions don't have prologues. */
10903 if (IS_NAKED (func_type))
10904 return;
10906 if (IS_INTERRUPT (func_type))
10908 error ("interrupt Service Routines cannot be coded in Thumb mode");
10909 return;
10912 if (frame_pointer_needed)
10913 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10915 if (amount)
10917 amount = ROUND_UP_WORD (amount);
10919 if (amount < 512)
10920 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10921 GEN_INT (- amount)));
10922 else
10924 int regno;
10925 rtx reg;
10927 /* The stack decrement is too big for an immediate value in a single
10928 insn. In theory we could issue multiple subtracts, but after
10929 three of them it becomes more space efficient to place the full
10930 value in the constant pool and load into a register. (Also the
10931 ARM debugger really likes to see only one stack decrement per
10932 function). So instead we look for a scratch register into which
10933 we can load the decrement, and then we subtract this from the
10934 stack pointer. Unfortunately on the thumb the only available
10935 scratch registers are the argument registers, and we cannot use
10936 these as they may hold arguments to the function. Instead we
10937 attempt to locate a call preserved register which is used by this
10938 function. If we can find one, then we know that it will have
10939 been pushed at the start of the prologue and so we can corrupt
10940 it now. */
10941 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10942 if (THUMB_REG_PUSHED_P (regno)
10943 && !(frame_pointer_needed
10944 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10945 break;
10947 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10949 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10951 /* Choose an arbitary, non-argument low register. */
10952 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10954 /* Save it by copying it into a high, scratch register. */
10955 emit_insn (gen_movsi (spare, reg));
10956 /* Add a USE to stop propagate_one_insn() from barfing. */
10957 emit_insn (gen_prologue_use (spare));
10959 /* Decrement the stack. */
10960 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10961 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10962 reg));
10964 /* Restore the low register's original value. */
10965 emit_insn (gen_movsi (reg, spare));
10967 /* Emit a USE of the restored scratch register, so that flow
10968 analysis will not consider the restore redundant. The
10969 register won't be used again in this function and isn't
10970 restored by the epilogue. */
10971 emit_insn (gen_prologue_use (reg));
10973 else
10975 reg = gen_rtx (REG, SImode, regno);
10977 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10978 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10979 reg));
10984 if (current_function_profile || TARGET_NO_SCHED_PRO)
10985 emit_insn (gen_blockage ());
10988 void
10989 thumb_expand_epilogue ()
10991 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10992 + current_function_outgoing_args_size);
10994 /* Naked functions don't have prologues. */
10995 if (IS_NAKED (arm_current_func_type ()))
10996 return;
10998 if (frame_pointer_needed)
10999 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
11000 else if (amount)
11002 amount = ROUND_UP_WORD (amount);
11004 if (amount < 512)
11005 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
11006 GEN_INT (amount)));
11007 else
11009 /* r3 is always free in the epilogue. */
11010 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
11012 emit_insn (gen_movsi (reg, GEN_INT (amount)));
11013 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
11017 /* Emit a USE (stack_pointer_rtx), so that
11018 the stack adjustment will not be deleted. */
11019 emit_insn (gen_prologue_use (stack_pointer_rtx));
11021 if (current_function_profile || TARGET_NO_SCHED_PRO)
11022 emit_insn (gen_blockage ());
11025 static void
11026 thumb_output_function_prologue (f, size)
11027 FILE * f;
11028 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11030 int live_regs_mask = 0;
11031 int high_regs_pushed = 0;
11032 int regno;
11034 if (IS_NAKED (arm_current_func_type ()))
11035 return;
11037 if (is_called_in_ARM_mode (current_function_decl))
11039 const char * name;
11041 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
11042 abort ();
11043 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
11044 abort ();
11045 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11047 /* Generate code sequence to switch us into Thumb mode. */
11048 /* The .code 32 directive has already been emitted by
11049 ASM_DECLARE_FUNCTION_NAME. */
11050 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
11051 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
11053 /* Generate a label, so that the debugger will notice the
11054 change in instruction sets. This label is also used by
11055 the assembler to bypass the ARM code when this function
11056 is called from a Thumb encoded function elsewhere in the
11057 same file. Hence the definition of STUB_NAME here must
11058 agree with the definition in gas/config/tc-arm.c */
11060 #define STUB_NAME ".real_start_of"
11062 fprintf (f, "\t.code\t16\n");
11063 #ifdef ARM_PE
11064 if (arm_dllexport_name_p (name))
11065 name = arm_strip_name_encoding (name);
11066 #endif
11067 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
11068 fprintf (f, "\t.thumb_func\n");
11069 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
11072 if (current_function_pretend_args_size)
11074 if (cfun->machine->uses_anonymous_args)
11076 int num_pushes;
11078 fprintf (f, "\tpush\t{");
11080 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
11082 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
11083 regno <= LAST_ARG_REGNUM;
11084 regno++)
11085 asm_fprintf (f, "%r%s", regno,
11086 regno == LAST_ARG_REGNUM ? "" : ", ");
11088 fprintf (f, "}\n");
11090 else
11091 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
11092 SP_REGNUM, SP_REGNUM,
11093 current_function_pretend_args_size);
11096 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11097 if (THUMB_REG_PUSHED_P (regno))
11098 live_regs_mask |= 1 << regno;
11100 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
11101 live_regs_mask |= 1 << LR_REGNUM;
11103 if (TARGET_BACKTRACE)
11105 int offset;
11106 int work_register = 0;
11107 int wr;
11109 /* We have been asked to create a stack backtrace structure.
11110 The code looks like this:
11112 0 .align 2
11113 0 func:
11114 0 sub SP, #16 Reserve space for 4 registers.
11115 2 push {R7} Get a work register.
11116 4 add R7, SP, #20 Get the stack pointer before the push.
11117 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
11118 8 mov R7, PC Get hold of the start of this code plus 12.
11119 10 str R7, [SP, #16] Store it.
11120 12 mov R7, FP Get hold of the current frame pointer.
11121 14 str R7, [SP, #4] Store it.
11122 16 mov R7, LR Get hold of the current return address.
11123 18 str R7, [SP, #12] Store it.
11124 20 add R7, SP, #16 Point at the start of the backtrace structure.
11125 22 mov FP, R7 Put this value into the frame pointer. */
11127 if ((live_regs_mask & 0xFF) == 0)
11129 /* See if the a4 register is free. */
11131 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
11132 work_register = LAST_ARG_REGNUM;
11133 else /* We must push a register of our own */
11134 live_regs_mask |= (1 << LAST_LO_REGNUM);
11137 if (work_register == 0)
11139 /* Select a register from the list that will be pushed to
11140 use as our work register. */
11141 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
11142 if ((1 << work_register) & live_regs_mask)
11143 break;
11146 asm_fprintf
11147 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11148 SP_REGNUM, SP_REGNUM);
11150 if (live_regs_mask)
11151 thumb_pushpop (f, live_regs_mask, 1);
11153 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11154 if (wr & live_regs_mask)
11155 offset += 4;
11157 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11158 offset + 16 + current_function_pretend_args_size);
11160 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11161 offset + 4);
11163 /* Make sure that the instruction fetching the PC is in the right place
11164 to calculate "start of backtrace creation code + 12". */
11165 if (live_regs_mask)
11167 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11168 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11169 offset + 12);
11170 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11171 ARM_HARD_FRAME_POINTER_REGNUM);
11172 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11173 offset);
11175 else
11177 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11178 ARM_HARD_FRAME_POINTER_REGNUM);
11179 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11180 offset);
11181 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11182 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11183 offset + 12);
11186 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11187 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11188 offset + 8);
11189 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11190 offset + 12);
11191 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11192 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11194 else if (live_regs_mask)
11195 thumb_pushpop (f, live_regs_mask, 1);
11197 for (regno = 8; regno < 13; regno++)
11198 if (THUMB_REG_PUSHED_P (regno))
11199 high_regs_pushed++;
11201 if (high_regs_pushed)
11203 int pushable_regs = 0;
11204 int mask = live_regs_mask & 0xff;
11205 int next_hi_reg;
11207 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11208 if (THUMB_REG_PUSHED_P (next_hi_reg))
11209 break;
11211 pushable_regs = mask;
11213 if (pushable_regs == 0)
11215 /* Desperation time -- this probably will never happen. */
11216 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11217 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11218 mask = 1 << LAST_ARG_REGNUM;
11221 while (high_regs_pushed > 0)
11223 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11225 if (mask & (1 << regno))
11227 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11229 high_regs_pushed--;
11231 if (high_regs_pushed)
11233 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11234 next_hi_reg--)
11235 if (THUMB_REG_PUSHED_P (next_hi_reg))
11236 break;
11238 else
11240 mask &= ~((1 << regno) - 1);
11241 break;
11246 thumb_pushpop (f, mask, 1);
11249 if (pushable_regs == 0
11250 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11251 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11255 /* Handle the case of a double word load into a low register from
11256 a computed memory address. The computed address may involve a
11257 register which is overwritten by the load. */
11259 const char *
11260 thumb_load_double_from_address (operands)
11261 rtx *operands;
11263 rtx addr;
11264 rtx base;
11265 rtx offset;
11266 rtx arg1;
11267 rtx arg2;
11269 if (GET_CODE (operands[0]) != REG)
11270 abort ();
11272 if (GET_CODE (operands[1]) != MEM)
11273 abort ();
11275 /* Get the memory address. */
11276 addr = XEXP (operands[1], 0);
11278 /* Work out how the memory address is computed. */
11279 switch (GET_CODE (addr))
11281 case REG:
11282 operands[2] = gen_rtx (MEM, SImode,
11283 plus_constant (XEXP (operands[1], 0), 4));
11285 if (REGNO (operands[0]) == REGNO (addr))
11287 output_asm_insn ("ldr\t%H0, %2", operands);
11288 output_asm_insn ("ldr\t%0, %1", operands);
11290 else
11292 output_asm_insn ("ldr\t%0, %1", operands);
11293 output_asm_insn ("ldr\t%H0, %2", operands);
11295 break;
11297 case CONST:
11298 /* Compute <address> + 4 for the high order load. */
11299 operands[2] = gen_rtx (MEM, SImode,
11300 plus_constant (XEXP (operands[1], 0), 4));
11302 output_asm_insn ("ldr\t%0, %1", operands);
11303 output_asm_insn ("ldr\t%H0, %2", operands);
11304 break;
11306 case PLUS:
11307 arg1 = XEXP (addr, 0);
11308 arg2 = XEXP (addr, 1);
11310 if (CONSTANT_P (arg1))
11311 base = arg2, offset = arg1;
11312 else
11313 base = arg1, offset = arg2;
11315 if (GET_CODE (base) != REG)
11316 abort ();
11318 /* Catch the case of <address> = <reg> + <reg> */
11319 if (GET_CODE (offset) == REG)
11321 int reg_offset = REGNO (offset);
11322 int reg_base = REGNO (base);
11323 int reg_dest = REGNO (operands[0]);
11325 /* Add the base and offset registers together into the
11326 higher destination register. */
11327 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11328 reg_dest + 1, reg_base, reg_offset);
11330 /* Load the lower destination register from the address in
11331 the higher destination register. */
11332 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11333 reg_dest, reg_dest + 1);
11335 /* Load the higher destination register from its own address
11336 plus 4. */
11337 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11338 reg_dest + 1, reg_dest + 1);
11340 else
11342 /* Compute <address> + 4 for the high order load. */
11343 operands[2] = gen_rtx (MEM, SImode,
11344 plus_constant (XEXP (operands[1], 0), 4));
11346 /* If the computed address is held in the low order register
11347 then load the high order register first, otherwise always
11348 load the low order register first. */
11349 if (REGNO (operands[0]) == REGNO (base))
11351 output_asm_insn ("ldr\t%H0, %2", operands);
11352 output_asm_insn ("ldr\t%0, %1", operands);
11354 else
11356 output_asm_insn ("ldr\t%0, %1", operands);
11357 output_asm_insn ("ldr\t%H0, %2", operands);
11360 break;
11362 case LABEL_REF:
11363 /* With no registers to worry about we can just load the value
11364 directly. */
11365 operands[2] = gen_rtx (MEM, SImode,
11366 plus_constant (XEXP (operands[1], 0), 4));
11368 output_asm_insn ("ldr\t%H0, %2", operands);
11369 output_asm_insn ("ldr\t%0, %1", operands);
11370 break;
11372 default:
11373 abort ();
11374 break;
11377 return "";
11381 const char *
11382 thumb_output_move_mem_multiple (n, operands)
11383 int n;
11384 rtx * operands;
11386 rtx tmp;
11388 switch (n)
11390 case 2:
11391 if (REGNO (operands[4]) > REGNO (operands[5]))
11393 tmp = operands[4];
11394 operands[4] = operands[5];
11395 operands[5] = tmp;
11397 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11398 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11399 break;
11401 case 3:
11402 if (REGNO (operands[4]) > REGNO (operands[5]))
11404 tmp = operands[4];
11405 operands[4] = operands[5];
11406 operands[5] = tmp;
11408 if (REGNO (operands[5]) > REGNO (operands[6]))
11410 tmp = operands[5];
11411 operands[5] = operands[6];
11412 operands[6] = tmp;
11414 if (REGNO (operands[4]) > REGNO (operands[5]))
11416 tmp = operands[4];
11417 operands[4] = operands[5];
11418 operands[5] = tmp;
11421 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11422 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11423 break;
11425 default:
11426 abort ();
11429 return "";
11432 /* Routines for generating rtl. */
11434 void
11435 thumb_expand_movstrqi (operands)
11436 rtx * operands;
11438 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11439 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11440 HOST_WIDE_INT len = INTVAL (operands[2]);
11441 HOST_WIDE_INT offset = 0;
11443 while (len >= 12)
11445 emit_insn (gen_movmem12b (out, in, out, in));
11446 len -= 12;
11449 if (len >= 8)
11451 emit_insn (gen_movmem8b (out, in, out, in));
11452 len -= 8;
11455 if (len >= 4)
11457 rtx reg = gen_reg_rtx (SImode);
11458 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11459 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11460 len -= 4;
11461 offset += 4;
11464 if (len >= 2)
11466 rtx reg = gen_reg_rtx (HImode);
11467 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11468 plus_constant (in, offset))));
11469 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11470 reg));
11471 len -= 2;
11472 offset += 2;
11475 if (len)
11477 rtx reg = gen_reg_rtx (QImode);
11478 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11479 plus_constant (in, offset))));
11480 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11481 reg));
11486 thumb_cmp_operand (op, mode)
11487 rtx op;
11488 enum machine_mode mode;
11490 return ((GET_CODE (op) == CONST_INT
11491 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11492 || register_operand (op, mode));
11495 static const char *
11496 thumb_condition_code (x, invert)
11497 rtx x;
11498 int invert;
11500 static const char * const conds[] =
11502 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11503 "hi", "ls", "ge", "lt", "gt", "le"
11505 int val;
11507 switch (GET_CODE (x))
11509 case EQ: val = 0; break;
11510 case NE: val = 1; break;
11511 case GEU: val = 2; break;
11512 case LTU: val = 3; break;
11513 case GTU: val = 8; break;
11514 case LEU: val = 9; break;
11515 case GE: val = 10; break;
11516 case LT: val = 11; break;
11517 case GT: val = 12; break;
11518 case LE: val = 13; break;
11519 default:
11520 abort ();
11523 return conds[val ^ invert];
11526 /* Handle storing a half-word to memory during reload. */
11528 void
11529 thumb_reload_out_hi (operands)
11530 rtx * operands;
11532 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11535 /* Handle storing a half-word to memory during reload. */
11537 void
11538 thumb_reload_in_hi (operands)
11539 rtx * operands ATTRIBUTE_UNUSED;
11541 abort ();
11544 /* Return the length of a function name prefix
11545 that starts with the character 'c'. */
11547 static int
11548 arm_get_strip_length (c)
11549 int c;
11551 switch (c)
11553 ARM_NAME_ENCODING_LENGTHS
11554 default: return 0;
11558 /* Return a pointer to a function's name with any
11559 and all prefix encodings stripped from it. */
11561 const char *
11562 arm_strip_name_encoding (name)
11563 const char * name;
11565 int skip;
11567 while ((skip = arm_get_strip_length (* name)))
11568 name += skip;
11570 return name;
11573 /* If there is a '*' anywhere in the name's prefix, then
11574 emit the stripped name verbatim, otherwise prepend an
11575 underscore if leading underscores are being used. */
11577 void
11578 arm_asm_output_labelref (stream, name)
11579 FILE * stream;
11580 const char * name;
11582 int skip;
11583 int verbatim = 0;
11585 while ((skip = arm_get_strip_length (* name)))
11587 verbatim |= (*name == '*');
11588 name += skip;
11591 if (verbatim)
11592 fputs (name, stream);
11593 else
11594 asm_fprintf (stream, "%U%s", name);
11597 rtx aof_pic_label;
11599 #ifdef AOF_ASSEMBLER
11600 /* Special functions only needed when producing AOF syntax assembler. */
11602 struct pic_chain
11604 struct pic_chain * next;
11605 const char * symname;
11608 static struct pic_chain * aof_pic_chain = NULL;
11611 aof_pic_entry (x)
11612 rtx x;
11614 struct pic_chain ** chainp;
11615 int offset;
11617 if (aof_pic_label == NULL_RTX)
11619 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11622 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11623 offset += 4, chainp = &(*chainp)->next)
11624 if ((*chainp)->symname == XSTR (x, 0))
11625 return plus_constant (aof_pic_label, offset);
11627 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11628 (*chainp)->next = NULL;
11629 (*chainp)->symname = XSTR (x, 0);
11630 return plus_constant (aof_pic_label, offset);
11633 void
11634 aof_dump_pic_table (f)
11635 FILE * f;
11637 struct pic_chain * chain;
11639 if (aof_pic_chain == NULL)
11640 return;
11642 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11643 PIC_OFFSET_TABLE_REGNUM,
11644 PIC_OFFSET_TABLE_REGNUM);
11645 fputs ("|x$adcons|\n", f);
11647 for (chain = aof_pic_chain; chain; chain = chain->next)
11649 fputs ("\tDCD\t", f);
11650 assemble_name (f, chain->symname);
11651 fputs ("\n", f);
11655 int arm_text_section_count = 1;
11657 char *
11658 aof_text_section ()
11660 static char buf[100];
11661 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11662 arm_text_section_count++);
11663 if (flag_pic)
11664 strcat (buf, ", PIC, REENTRANT");
11665 return buf;
11668 static int arm_data_section_count = 1;
11670 char *
11671 aof_data_section ()
11673 static char buf[100];
11674 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11675 return buf;
11678 /* The AOF assembler is religiously strict about declarations of
11679 imported and exported symbols, so that it is impossible to declare
11680 a function as imported near the beginning of the file, and then to
11681 export it later on. It is, however, possible to delay the decision
11682 until all the functions in the file have been compiled. To get
11683 around this, we maintain a list of the imports and exports, and
11684 delete from it any that are subsequently defined. At the end of
11685 compilation we spit the remainder of the list out before the END
11686 directive. */
11688 struct import
11690 struct import * next;
11691 const char * name;
11694 static struct import * imports_list = NULL;
11696 void
11697 aof_add_import (name)
11698 const char * name;
11700 struct import * new;
11702 for (new = imports_list; new; new = new->next)
11703 if (new->name == name)
11704 return;
11706 new = (struct import *) xmalloc (sizeof (struct import));
11707 new->next = imports_list;
11708 imports_list = new;
11709 new->name = name;
11712 void
11713 aof_delete_import (name)
11714 const char * name;
11716 struct import ** old;
11718 for (old = &imports_list; *old; old = & (*old)->next)
11720 if ((*old)->name == name)
11722 *old = (*old)->next;
11723 return;
11728 int arm_main_function = 0;
11730 void
11731 aof_dump_imports (f)
11732 FILE * f;
11734 /* The AOF assembler needs this to cause the startup code to be extracted
11735 from the library. Brining in __main causes the whole thing to work
11736 automagically. */
11737 if (arm_main_function)
11739 text_section ();
11740 fputs ("\tIMPORT __main\n", f);
11741 fputs ("\tDCD __main\n", f);
11744 /* Now dump the remaining imports. */
11745 while (imports_list)
11747 fprintf (f, "\tIMPORT\t");
11748 assemble_name (f, imports_list->name);
11749 fputc ('\n', f);
11750 imports_list = imports_list->next;
11754 static void
11755 aof_globalize_label (stream, name)
11756 FILE *stream;
11757 const char *name;
11759 default_globalize_label (stream, name);
11760 if (! strcmp (name, "main"))
11761 arm_main_function = 1;
11763 #endif /* AOF_ASSEMBLER */
11765 #ifdef OBJECT_FORMAT_ELF
11766 /* Switch to an arbitrary section NAME with attributes as specified
11767 by FLAGS. ALIGN specifies any known alignment requirements for
11768 the section; 0 if the default should be used.
11770 Differs from the default elf version only in the prefix character
11771 used before the section type. */
11773 static void
11774 arm_elf_asm_named_section (name, flags)
11775 const char *name;
11776 unsigned int flags;
11778 char flagchars[10], *f = flagchars;
11780 if (! named_section_first_declaration (name))
11782 fprintf (asm_out_file, "\t.section\t%s\n", name);
11783 return;
11786 if (!(flags & SECTION_DEBUG))
11787 *f++ = 'a';
11788 if (flags & SECTION_WRITE)
11789 *f++ = 'w';
11790 if (flags & SECTION_CODE)
11791 *f++ = 'x';
11792 if (flags & SECTION_SMALL)
11793 *f++ = 's';
11794 if (flags & SECTION_MERGE)
11795 *f++ = 'M';
11796 if (flags & SECTION_STRINGS)
11797 *f++ = 'S';
11798 if (flags & SECTION_TLS)
11799 *f++ = 'T';
11800 *f = '\0';
11802 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11804 if (!(flags & SECTION_NOTYPE))
11806 const char *type;
11808 if (flags & SECTION_BSS)
11809 type = "nobits";
11810 else
11811 type = "progbits";
11813 fprintf (asm_out_file, ",%%%s", type);
11815 if (flags & SECTION_ENTSIZE)
11816 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11819 putc ('\n', asm_out_file);
11821 #endif
11823 #ifndef ARM_PE
11824 /* Symbols in the text segment can be accessed without indirecting via the
11825 constant pool; it may take an extra binary operation, but this is still
11826 faster than indirecting via memory. Don't do this when not optimizing,
11827 since we won't be calculating al of the offsets necessary to do this
11828 simplification. */
11830 static void
11831 arm_encode_section_info (decl, first)
11832 tree decl;
11833 int first;
11835 /* This doesn't work with AOF syntax, since the string table may be in
11836 a different AREA. */
11837 #ifndef AOF_ASSEMBLER
11838 if (optimize > 0 && TREE_CONSTANT (decl)
11839 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11841 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11842 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11843 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11845 #endif
11847 /* If we are referencing a function that is weak then encode a long call
11848 flag in the function name, otherwise if the function is static or
11849 or known to be defined in this file then encode a short call flag. */
11850 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11852 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11853 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11854 else if (! TREE_PUBLIC (decl))
11855 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11858 #endif /* !ARM_PE */
11860 static void
11861 arm_internal_label (stream, prefix, labelno)
11862 FILE *stream;
11863 const char *prefix;
11864 unsigned long labelno;
11866 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11867 && !strcmp (prefix, "L"))
11869 arm_ccfsm_state = 0;
11870 arm_target_insn = NULL;
11872 default_internal_label (stream, prefix, labelno);
11875 /* Output code to add DELTA to the first argument, and then jump
11876 to FUNCTION. Used for C++ multiple inheritance. */
11878 static void
11879 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
11880 FILE *file;
11881 tree thunk ATTRIBUTE_UNUSED;
11882 HOST_WIDE_INT delta;
11883 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11884 tree function;
11886 int mi_delta = delta;
11887 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11888 int shift = 0;
11889 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11890 ? 1 : 0);
11891 if (mi_delta < 0)
11892 mi_delta = - mi_delta;
11893 while (mi_delta != 0)
11895 if ((mi_delta & (3 << shift)) == 0)
11896 shift += 2;
11897 else
11899 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11900 mi_op, this_regno, this_regno,
11901 mi_delta & (0xff << shift));
11902 mi_delta &= ~(0xff << shift);
11903 shift += 8;
11906 fputs ("\tb\t", file);
11907 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11908 if (NEED_PLT_RELOC)
11909 fputs ("(PLT)", file);
11910 fputc ('\n', file);