* targhooks.c: New file.
[official-gcc.git] / gcc / config / arm / arm.c
blob5e92b28b79b566a8359bc08639a025edc9ca8ecf
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 const struct attribute_spec arm_attribute_table[];
60 /* Forward function declarations. */
61 static void arm_add_gc_roots (void);
62 static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
63 rtx, rtx, int, int);
64 static unsigned bit_count (unsigned long);
65 static int arm_address_register_rtx_p (rtx, int);
66 static int arm_legitimate_index_p (enum machine_mode, rtx, int);
67 static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
68 inline static int thumb_index_register_rtx_p (rtx, int);
69 static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
70 static rtx emit_multi_reg_push (int);
71 static rtx emit_sfm (int, int);
72 #ifndef AOF_ASSEMBLER
73 static bool arm_assemble_integer (rtx, unsigned int, int);
74 #endif
75 static const char *fp_const_from_val (REAL_VALUE_TYPE *);
76 static arm_cc get_arm_condition_code (rtx);
77 static void init_fpa_table (void);
78 static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
79 static rtx is_jump_table (rtx);
80 static const char *output_multi_immediate (rtx *, const char *, const char *,
81 int, HOST_WIDE_INT);
82 static void print_multi_reg (FILE *, const char *, int, int);
83 static const char *shift_op (rtx, HOST_WIDE_INT *);
84 static struct machine_function *arm_init_machine_status (void);
85 static int number_of_first_bit_set (int);
86 static void replace_symbols_in_block (tree, rtx, rtx);
87 static void thumb_exit (FILE *, int, rtx);
88 static void thumb_pushpop (FILE *, int, int);
89 static const char *thumb_condition_code (rtx, int);
90 static rtx is_jump_table (rtx);
91 static HOST_WIDE_INT get_jump_table_size (rtx);
92 static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
93 static Mnode *add_minipool_forward_ref (Mfix *);
94 static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
95 static Mnode *add_minipool_backward_ref (Mfix *);
96 static void assign_minipool_offsets (Mfix *);
97 static void arm_print_value (FILE *, rtx);
98 static void dump_minipool (rtx);
99 static int arm_barrier_cost (rtx);
100 static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
101 static void push_minipool_barrier (rtx, HOST_WIDE_INT);
102 static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
103 rtx);
104 static void arm_reorg (void);
105 static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
106 static int current_file_function_operand (rtx);
107 static unsigned long arm_compute_save_reg0_reg12_mask (void);
108 static unsigned long arm_compute_save_reg_mask (void);
109 static unsigned long arm_isr_value (tree);
110 static unsigned long arm_compute_func_type (void);
111 static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
112 static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
113 static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
114 static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
115 static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
116 static int arm_comp_type_attributes (tree, tree);
117 static void arm_set_default_type_attributes (tree);
118 static int arm_adjust_cost (rtx, rtx, rtx, int);
119 static int arm_use_dfa_pipeline_interface (void);
120 static int count_insns_for_constant (HOST_WIDE_INT, int);
121 static int arm_get_strip_length (int);
122 static bool arm_function_ok_for_sibcall (tree, tree);
123 static void arm_internal_label (FILE *, const char *, unsigned long);
124 static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 tree);
126 static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
127 static bool arm_rtx_costs (rtx, int, int, int *);
128 static int arm_address_cost (rtx);
129 static bool arm_memory_load_p (rtx);
130 static bool arm_cirrus_insn_p (rtx);
131 static void cirrus_reorg (rtx);
132 static void arm_init_builtins (void);
133 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
134 static void arm_init_iwmmxt_builtins (void);
135 static rtx safe_vector_operand (rtx, enum machine_mode);
136 static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
137 static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
138 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
140 #ifdef OBJECT_FORMAT_ELF
141 static void arm_elf_asm_named_section (const char *, unsigned int);
142 #endif
143 #ifndef ARM_PE
144 static void arm_encode_section_info (tree, rtx, int);
145 #endif
146 #ifdef AOF_ASSEMBLER
147 static void aof_globalize_label (FILE *, const char *);
148 static void aof_dump_imports (FILE *);
149 static void aof_dump_pic_table (FILE *);
150 static void aof_file_start (void);
151 static void aof_file_end (void);
152 #endif
155 /* Initialize the GCC target structure. */
156 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
157 #undef TARGET_MERGE_DECL_ATTRIBUTES
158 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
159 #endif
161 #undef TARGET_ATTRIBUTE_TABLE
162 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
164 #ifdef AOF_ASSEMBLER
165 #undef TARGET_ASM_BYTE_OP
166 #define TARGET_ASM_BYTE_OP "\tDCB\t"
167 #undef TARGET_ASM_ALIGNED_HI_OP
168 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
169 #undef TARGET_ASM_ALIGNED_SI_OP
170 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
171 #undef TARGET_ASM_GLOBALIZE_LABEL
172 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
173 #undef TARGET_ASM_FILE_START
174 #define TARGET_ASM_FILE_START aof_file_start
175 #undef TARGET_ASM_FILE_END
176 #define TARGET_ASM_FILE_END aof_file_end
177 #else
178 #undef TARGET_ASM_ALIGNED_SI_OP
179 #define TARGET_ASM_ALIGNED_SI_OP NULL
180 #undef TARGET_ASM_INTEGER
181 #define TARGET_ASM_INTEGER arm_assemble_integer
182 #endif
184 #undef TARGET_ASM_FUNCTION_PROLOGUE
185 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
187 #undef TARGET_ASM_FUNCTION_EPILOGUE
188 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
190 #undef TARGET_COMP_TYPE_ATTRIBUTES
191 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
193 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
194 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
196 #undef TARGET_SCHED_ADJUST_COST
197 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
199 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
200 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE arm_use_dfa_pipeline_interface
202 #undef TARGET_ENCODE_SECTION_INFO
203 #ifdef ARM_PE
204 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
205 #else
206 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
207 #endif
209 #undef TARGET_STRIP_NAME_ENCODING
210 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
212 #undef TARGET_ASM_INTERNAL_LABEL
213 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
215 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
216 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
218 #undef TARGET_ASM_OUTPUT_MI_THUNK
219 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
220 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
221 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
223 #undef TARGET_RTX_COSTS
224 #define TARGET_RTX_COSTS arm_rtx_costs
225 #undef TARGET_ADDRESS_COST
226 #define TARGET_ADDRESS_COST arm_address_cost
228 #undef TARGET_MACHINE_DEPENDENT_REORG
229 #define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
231 #undef TARGET_INIT_BUILTINS
232 #define TARGET_INIT_BUILTINS arm_init_builtins
233 #undef TARGET_EXPAND_BUILTIN
234 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
236 struct gcc_target targetm = TARGET_INITIALIZER;
238 /* Obstack for minipool constant handling. */
239 static struct obstack minipool_obstack;
240 static char * minipool_startobj;
242 /* The maximum number of insns skipped which
243 will be conditionalised if possible. */
244 static int max_insns_skipped = 5;
246 extern FILE * asm_out_file;
248 /* True if we are currently building a constant table. */
249 int making_const_table;
251 /* Define the information needed to generate branch insns. This is
252 stored from the compare operation. */
253 rtx arm_compare_op0, arm_compare_op1;
255 /* What type of floating point are we tuning for? */
256 enum fputype arm_fpu_tune;
258 /* What type of floating point instructions are available? */
259 enum fputype arm_fpu_arch;
261 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
262 enum prog_mode_type arm_prgmode;
264 /* Set by the -mfp=... option. */
265 const char * target_fp_name = NULL;
267 /* Used to parse -mstructure_size_boundary command line option. */
268 const char * structure_size_string = NULL;
269 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
271 /* Bit values used to identify processor capabilities. */
272 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
273 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
274 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
275 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
276 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
277 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
278 #define FL_THUMB (1 << 6) /* Thumb aware */
279 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
280 #define FL_STRONG (1 << 8) /* StrongARM */
281 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
282 #define FL_XSCALE (1 << 10) /* XScale */
283 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
284 #define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */
286 /* The bits in this mask specify which
287 instructions we are allowed to generate. */
288 static unsigned long insn_flags = 0;
290 /* The bits in this mask specify which instruction scheduling options should
291 be used. Note - there is an overlap with the FL_FAST_MULT. For some
292 hardware we want to be able to generate the multiply instructions, but to
293 tune as if they were not present in the architecture. */
294 static unsigned long tune_flags = 0;
296 /* The following are used in the arm.md file as equivalents to bits
297 in the above two flag variables. */
299 /* Nonzero if this is an "M" variant of the processor. */
300 int arm_fast_multiply = 0;
302 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
303 int arm_arch4 = 0;
305 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
306 int arm_arch5 = 0;
308 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
309 int arm_arch5e = 0;
311 /* Nonzero if this chip can benefit from load scheduling. */
312 int arm_ld_sched = 0;
314 /* Nonzero if this chip is a StrongARM. */
315 int arm_is_strong = 0;
317 /* Nonzero if this chip supports Intel Wireless MMX technology. */
318 int arm_arch_iwmmxt = 0;
320 /* Nonzero if this chip is an XScale. */
321 int arm_arch_xscale = 0;
323 /* Nonzero if tuning for XScale */
324 int arm_tune_xscale = 0;
326 /* Nonzero if this chip is an ARM6 or an ARM7. */
327 int arm_is_6_or_7 = 0;
329 /* Nonzero if this chip is a Cirrus/DSP. */
330 int arm_is_cirrus = 0;
332 /* Nonzero if generating Thumb instructions. */
333 int thumb_code = 0;
335 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
336 must report the mode of the memory reference from PRINT_OPERAND to
337 PRINT_OPERAND_ADDRESS. */
338 enum machine_mode output_memory_reference_mode;
340 /* The register number to be used for the PIC offset register. */
341 const char * arm_pic_register_string = NULL;
342 int arm_pic_register = INVALID_REGNUM;
344 /* Set to 1 when a return insn is output, this means that the epilogue
345 is not needed. */
346 int return_used_this_function;
348 /* Set to 1 after arm_reorg has started. Reset to start at the start of
349 the next function. */
350 static int after_arm_reorg = 0;
352 /* The maximum number of insns to be used when loading a constant. */
353 static int arm_constant_limit = 3;
355 /* For an explanation of these variables, see final_prescan_insn below. */
356 int arm_ccfsm_state;
357 enum arm_cond_code arm_current_cc;
358 rtx arm_target_insn;
359 int arm_target_label;
361 /* The condition codes of the ARM, and the inverse function. */
362 static const char * const arm_condition_codes[] =
364 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
365 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
368 #define streq(string1, string2) (strcmp (string1, string2) == 0)
370 /* Initialization code. */
372 struct processors
374 const char *const name;
375 const unsigned long flags;
378 /* Not all of these give usefully different compilation alternatives,
379 but there is no simple way of generalizing them. */
380 static const struct processors all_cores[] =
382 /* ARM Cores */
384 {"arm2", FL_CO_PROC | FL_MODE26 },
385 {"arm250", FL_CO_PROC | FL_MODE26 },
386 {"arm3", FL_CO_PROC | FL_MODE26 },
387 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
388 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
389 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
390 {"arm610", FL_MODE26 | FL_MODE32 },
391 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
392 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
393 /* arm7m doesn't exist on its own, but only with D, (and I), but
394 those don't alter the code, so arm7m is sometimes used. */
395 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
396 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
397 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
398 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
399 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
400 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
401 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
402 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
403 {"arm710", FL_MODE26 | FL_MODE32 },
404 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
405 {"arm720", FL_MODE26 | FL_MODE32 },
406 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
407 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
408 {"arm710c", FL_MODE26 | FL_MODE32 },
409 {"arm7100", FL_MODE26 | FL_MODE32 },
410 {"arm7500", FL_MODE26 | FL_MODE32 },
411 /* Doesn't have an external co-proc, but does have embedded fpa. */
412 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
413 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
414 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
415 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
416 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
417 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
418 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
419 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
420 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
421 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
422 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
423 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
424 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
425 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
426 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
427 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
428 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
429 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
430 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
432 {NULL, 0}
435 static const struct processors all_architectures[] =
437 /* ARM Architectures */
439 { "armv2", FL_CO_PROC | FL_MODE26 },
440 { "armv2a", FL_CO_PROC | FL_MODE26 },
441 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
442 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
443 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
444 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
445 implementations that support it, so we will leave it out for now. */
446 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
447 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
448 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
449 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
450 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
451 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
452 { NULL, 0 }
455 /* This is a magic structure. The 'string' field is magically filled in
456 with a pointer to the value specified by the user on the command line
457 assuming that the user has specified such a value. */
459 struct arm_cpu_select arm_select[] =
461 /* string name processors */
462 { NULL, "-mcpu=", all_cores },
463 { NULL, "-march=", all_architectures },
464 { NULL, "-mtune=", all_cores }
467 /* Return the number of bits set in VALUE. */
468 static unsigned
469 bit_count (unsigned long value)
471 unsigned long count = 0;
473 while (value)
475 count++;
476 value &= value - 1; /* Clear the least-significant set bit. */
479 return count;
482 /* Fix up any incompatible options that the user has specified.
483 This has now turned into a maze. */
484 void
485 arm_override_options (void)
487 unsigned i;
489 /* Set up the flags based on the cpu/architecture selected by the user. */
490 for (i = ARRAY_SIZE (arm_select); i--;)
492 struct arm_cpu_select * ptr = arm_select + i;
494 if (ptr->string != NULL && ptr->string[0] != '\0')
496 const struct processors * sel;
498 for (sel = ptr->processors; sel->name != NULL; sel++)
499 if (streq (ptr->string, sel->name))
501 if (i == 2)
502 tune_flags = sel->flags;
503 else
505 /* If we have been given an architecture and a processor
506 make sure that they are compatible. We only generate
507 a warning though, and we prefer the CPU over the
508 architecture. */
509 if (insn_flags != 0 && (insn_flags ^ sel->flags))
510 warning ("switch -mcpu=%s conflicts with -march= switch",
511 ptr->string);
513 insn_flags = sel->flags;
516 break;
519 if (sel->name == NULL)
520 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
524 /* If the user did not specify a processor, choose one for them. */
525 if (insn_flags == 0)
527 const struct processors * sel;
528 unsigned int sought;
529 static const struct cpu_default
531 const int cpu;
532 const char *const name;
534 cpu_defaults[] =
536 { TARGET_CPU_arm2, "arm2" },
537 { TARGET_CPU_arm6, "arm6" },
538 { TARGET_CPU_arm610, "arm610" },
539 { TARGET_CPU_arm710, "arm710" },
540 { TARGET_CPU_arm7m, "arm7m" },
541 { TARGET_CPU_arm7500fe, "arm7500fe" },
542 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
543 { TARGET_CPU_arm8, "arm8" },
544 { TARGET_CPU_arm810, "arm810" },
545 { TARGET_CPU_arm9, "arm9" },
546 { TARGET_CPU_strongarm, "strongarm" },
547 { TARGET_CPU_xscale, "xscale" },
548 { TARGET_CPU_ep9312, "ep9312" },
549 { TARGET_CPU_iwmmxt, "iwmmxt" },
550 { TARGET_CPU_generic, "arm" },
551 { 0, 0 }
553 const struct cpu_default * def;
555 /* Find the default. */
556 for (def = cpu_defaults; def->name; def++)
557 if (def->cpu == TARGET_CPU_DEFAULT)
558 break;
560 /* Make sure we found the default CPU. */
561 if (def->name == NULL)
562 abort ();
564 /* Find the default CPU's flags. */
565 for (sel = all_cores; sel->name != NULL; sel++)
566 if (streq (def->name, sel->name))
567 break;
569 if (sel->name == NULL)
570 abort ();
572 insn_flags = sel->flags;
574 /* Now check to see if the user has specified some command line
575 switch that require certain abilities from the cpu. */
576 sought = 0;
578 if (TARGET_INTERWORK || TARGET_THUMB)
580 sought |= (FL_THUMB | FL_MODE32);
582 /* Force apcs-32 to be used for interworking. */
583 target_flags |= ARM_FLAG_APCS_32;
585 /* There are no ARM processors that support both APCS-26 and
586 interworking. Therefore we force FL_MODE26 to be removed
587 from insn_flags here (if it was set), so that the search
588 below will always be able to find a compatible processor. */
589 insn_flags &= ~FL_MODE26;
591 else if (!TARGET_APCS_32)
592 sought |= FL_MODE26;
594 if (sought != 0 && ((sought & insn_flags) != sought))
596 /* Try to locate a CPU type that supports all of the abilities
597 of the default CPU, plus the extra abilities requested by
598 the user. */
599 for (sel = all_cores; sel->name != NULL; sel++)
600 if ((sel->flags & sought) == (sought | insn_flags))
601 break;
603 if (sel->name == NULL)
605 unsigned current_bit_count = 0;
606 const struct processors * best_fit = NULL;
608 /* Ideally we would like to issue an error message here
609 saying that it was not possible to find a CPU compatible
610 with the default CPU, but which also supports the command
611 line options specified by the programmer, and so they
612 ought to use the -mcpu=<name> command line option to
613 override the default CPU type.
615 Unfortunately this does not work with multilibing. We
616 need to be able to support multilibs for -mapcs-26 and for
617 -mthumb-interwork and there is no CPU that can support both
618 options. Instead if we cannot find a cpu that has both the
619 characteristics of the default cpu and the given command line
620 options we scan the array again looking for a best match. */
621 for (sel = all_cores; sel->name != NULL; sel++)
622 if ((sel->flags & sought) == sought)
624 unsigned count;
626 count = bit_count (sel->flags & insn_flags);
628 if (count >= current_bit_count)
630 best_fit = sel;
631 current_bit_count = count;
635 if (best_fit == NULL)
636 abort ();
637 else
638 sel = best_fit;
641 insn_flags = sel->flags;
645 /* If tuning has not been specified, tune for whichever processor or
646 architecture has been selected. */
647 if (tune_flags == 0)
648 tune_flags = insn_flags;
650 /* Make sure that the processor choice does not conflict with any of the
651 other command line choices. */
652 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
654 /* If APCS-32 was not the default then it must have been set by the
655 user, so issue a warning message. If the user has specified
656 "-mapcs-32 -mcpu=arm2" then we loose here. */
657 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
658 warning ("target CPU does not support APCS-32" );
659 target_flags &= ~ARM_FLAG_APCS_32;
661 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
663 warning ("target CPU does not support APCS-26" );
664 target_flags |= ARM_FLAG_APCS_32;
667 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
669 warning ("target CPU does not support interworking" );
670 target_flags &= ~ARM_FLAG_INTERWORK;
673 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
675 warning ("target CPU does not support THUMB instructions");
676 target_flags &= ~ARM_FLAG_THUMB;
679 if (TARGET_APCS_FRAME && TARGET_THUMB)
681 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
682 target_flags &= ~ARM_FLAG_APCS_FRAME;
685 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
686 from here where no function is being compiled currently. */
687 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
688 && TARGET_ARM)
689 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
691 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
692 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
694 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
695 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
697 /* If interworking is enabled then APCS-32 must be selected as well. */
698 if (TARGET_INTERWORK)
700 if (!TARGET_APCS_32)
701 warning ("interworking forces APCS-32 to be used" );
702 target_flags |= ARM_FLAG_APCS_32;
705 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
707 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
708 target_flags |= ARM_FLAG_APCS_FRAME;
711 if (TARGET_POKE_FUNCTION_NAME)
712 target_flags |= ARM_FLAG_APCS_FRAME;
714 if (TARGET_APCS_REENT && flag_pic)
715 error ("-fpic and -mapcs-reent are incompatible");
717 if (TARGET_APCS_REENT)
718 warning ("APCS reentrant code not supported. Ignored");
720 /* If this target is normally configured to use APCS frames, warn if they
721 are turned off and debugging is turned on. */
722 if (TARGET_ARM
723 && write_symbols != NO_DEBUG
724 && !TARGET_APCS_FRAME
725 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
726 warning ("-g with -mno-apcs-frame may not give sensible debugging");
728 /* If stack checking is disabled, we can use r10 as the PIC register,
729 which keeps r9 available. */
730 if (flag_pic)
731 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
733 if (TARGET_APCS_FLOAT)
734 warning ("passing floating point arguments in fp regs not yet supported");
736 /* Initialize boolean versions of the flags, for use in the arm.md file. */
737 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
738 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
739 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
740 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
741 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
743 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
744 arm_is_strong = (tune_flags & FL_STRONG) != 0;
745 thumb_code = (TARGET_ARM == 0);
746 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
747 && !(tune_flags & FL_ARCH4))) != 0;
748 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
749 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
750 arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
752 if (TARGET_IWMMXT && (! TARGET_ATPCS))
753 target_flags |= ARM_FLAG_ATPCS;
755 if (arm_is_cirrus)
757 arm_fpu_tune = FPUTYPE_MAVERICK;
759 /* Ignore -mhard-float if -mcpu=ep9312. */
760 if (TARGET_HARD_FLOAT)
761 target_flags ^= ARM_FLAG_SOFT_FLOAT;
763 else
764 /* Default value for floating point code... if no co-processor
765 bus, then schedule for emulated floating point. Otherwise,
766 assume the user has an FPA.
767 Note: this does not prevent use of floating point instructions,
768 -msoft-float does that. */
769 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
771 if (target_fp_name)
773 if (streq (target_fp_name, "2"))
774 arm_fpu_arch = FPUTYPE_FPA_EMU2;
775 else if (streq (target_fp_name, "3"))
776 arm_fpu_arch = FPUTYPE_FPA_EMU3;
777 else
778 error ("invalid floating point emulation option: -mfpe-%s",
779 target_fp_name);
781 else
782 arm_fpu_arch = FPUTYPE_DEFAULT;
784 if (TARGET_FPE)
786 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
787 arm_fpu_tune = FPUTYPE_FPA_EMU2;
788 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
789 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
790 else if (arm_fpu_tune != FPUTYPE_FPA)
791 arm_fpu_tune = FPUTYPE_FPA_EMU2;
794 /* For arm2/3 there is no need to do any scheduling if there is only
795 a floating point emulator, or we are doing software floating-point. */
796 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
797 && (tune_flags & FL_MODE32) == 0)
798 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
800 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
802 if (structure_size_string != NULL)
804 int size = strtol (structure_size_string, NULL, 0);
806 if (size == 8 || size == 32)
807 arm_structure_size_boundary = size;
808 else
809 warning ("structure size boundary can only be set to 8 or 32");
812 if (arm_pic_register_string != NULL)
814 int pic_register = decode_reg_name (arm_pic_register_string);
816 if (!flag_pic)
817 warning ("-mpic-register= is useless without -fpic");
819 /* Prevent the user from choosing an obviously stupid PIC register. */
820 else if (pic_register < 0 || call_used_regs[pic_register]
821 || pic_register == HARD_FRAME_POINTER_REGNUM
822 || pic_register == STACK_POINTER_REGNUM
823 || pic_register >= PC_REGNUM)
824 error ("unable to use '%s' for PIC register", arm_pic_register_string);
825 else
826 arm_pic_register = pic_register;
829 if (TARGET_THUMB && flag_schedule_insns)
831 /* Don't warn since it's on by default in -O2. */
832 flag_schedule_insns = 0;
835 /* If optimizing for space, don't synthesize constants.
836 For processors with load scheduling, it never costs more than 2 cycles
837 to load a constant, and the load scheduler may well reduce that to 1. */
838 if (optimize_size || (tune_flags & FL_LDSCHED))
839 arm_constant_limit = 1;
841 if (arm_arch_xscale)
842 arm_constant_limit = 2;
844 /* If optimizing for size, bump the number of instructions that we
845 are prepared to conditionally execute (even on a StrongARM).
846 Otherwise for the StrongARM, which has early execution of branches,
847 a sequence that is worth skipping is shorter. */
848 if (optimize_size)
849 max_insns_skipped = 6;
850 else if (arm_is_strong)
851 max_insns_skipped = 3;
853 /* Register global variables with the garbage collector. */
854 arm_add_gc_roots ();
857 static void
858 arm_add_gc_roots (void)
860 gcc_obstack_init(&minipool_obstack);
861 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
864 /* A table of known ARM exception types.
865 For use with the interrupt function attribute. */
867 typedef struct
869 const char *const arg;
870 const unsigned long return_value;
872 isr_attribute_arg;
874 static const isr_attribute_arg isr_attribute_args [] =
876 { "IRQ", ARM_FT_ISR },
877 { "irq", ARM_FT_ISR },
878 { "FIQ", ARM_FT_FIQ },
879 { "fiq", ARM_FT_FIQ },
880 { "ABORT", ARM_FT_ISR },
881 { "abort", ARM_FT_ISR },
882 { "ABORT", ARM_FT_ISR },
883 { "abort", ARM_FT_ISR },
884 { "UNDEF", ARM_FT_EXCEPTION },
885 { "undef", ARM_FT_EXCEPTION },
886 { "SWI", ARM_FT_EXCEPTION },
887 { "swi", ARM_FT_EXCEPTION },
888 { NULL, ARM_FT_NORMAL }
891 /* Returns the (interrupt) function type of the current
892 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
894 static unsigned long
895 arm_isr_value (tree argument)
897 const isr_attribute_arg * ptr;
898 const char * arg;
900 /* No argument - default to IRQ. */
901 if (argument == NULL_TREE)
902 return ARM_FT_ISR;
904 /* Get the value of the argument. */
905 if (TREE_VALUE (argument) == NULL_TREE
906 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
907 return ARM_FT_UNKNOWN;
909 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
911 /* Check it against the list of known arguments. */
912 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
913 if (streq (arg, ptr->arg))
914 return ptr->return_value;
916 /* An unrecognized interrupt type. */
917 return ARM_FT_UNKNOWN;
920 /* Computes the type of the current function. */
922 static unsigned long
923 arm_compute_func_type (void)
925 unsigned long type = ARM_FT_UNKNOWN;
926 tree a;
927 tree attr;
929 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
930 abort ();
932 /* Decide if the current function is volatile. Such functions
933 never return, and many memory cycles can be saved by not storing
934 register values that will never be needed again. This optimization
935 was added to speed up context switching in a kernel application. */
936 if (optimize > 0
937 && current_function_nothrow
938 && TREE_THIS_VOLATILE (current_function_decl))
939 type |= ARM_FT_VOLATILE;
941 if (current_function_needs_context)
942 type |= ARM_FT_NESTED;
944 attr = DECL_ATTRIBUTES (current_function_decl);
946 a = lookup_attribute ("naked", attr);
947 if (a != NULL_TREE)
948 type |= ARM_FT_NAKED;
950 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
951 type |= ARM_FT_EXCEPTION_HANDLER;
952 else
954 a = lookup_attribute ("isr", attr);
955 if (a == NULL_TREE)
956 a = lookup_attribute ("interrupt", attr);
958 if (a == NULL_TREE)
959 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
960 else
961 type |= arm_isr_value (TREE_VALUE (a));
964 return type;
967 /* Returns the type of the current function. */
969 unsigned long
970 arm_current_func_type (void)
972 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
973 cfun->machine->func_type = arm_compute_func_type ();
975 return cfun->machine->func_type;
978 /* Return 1 if it is possible to return using a single instruction. */
981 use_return_insn (int iscond)
983 int regno;
984 unsigned int func_type;
985 unsigned long saved_int_regs;
987 /* Never use a return instruction before reload has run. */
988 if (!reload_completed)
989 return 0;
991 func_type = arm_current_func_type ();
993 /* Naked functions and volatile functions need special
994 consideration. */
995 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
996 return 0;
998 /* So do interrupt functions that use the frame pointer. */
999 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
1000 return 0;
1002 /* As do variadic functions. */
1003 if (current_function_pretend_args_size
1004 || cfun->machine->uses_anonymous_args
1005 /* Of if the function calls __builtin_eh_return () */
1006 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
1007 /* Or if there is no frame pointer and there is a stack adjustment. */
1008 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
1009 && !frame_pointer_needed))
1010 return 0;
1012 saved_int_regs = arm_compute_save_reg_mask ();
1014 /* Can't be done if interworking with Thumb, and any registers have been
1015 stacked. */
1016 if (TARGET_INTERWORK && saved_int_regs != 0)
1017 return 0;
1019 /* On StrongARM, conditional returns are expensive if they aren't
1020 taken and multiple registers have been stacked. */
1021 if (iscond && arm_is_strong)
1023 /* Conditional return when just the LR is stored is a simple
1024 conditional-load instruction, that's not expensive. */
1025 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1026 return 0;
1028 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1029 return 0;
1032 /* If there are saved registers but the LR isn't saved, then we need
1033 two instructions for the return. */
1034 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1035 return 0;
1037 /* Can't be done if any of the FPA regs are pushed,
1038 since this also requires an insn. */
1039 if (TARGET_HARD_FLOAT)
1040 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1041 if (regs_ever_live[regno] && !call_used_regs[regno])
1042 return 0;
1044 if (TARGET_REALLY_IWMMXT)
1045 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
1046 if (regs_ever_live[regno] && ! call_used_regs [regno])
1047 return 0;
1049 return 1;
1052 /* Return TRUE if int I is a valid immediate ARM constant. */
1055 const_ok_for_arm (HOST_WIDE_INT i)
1057 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1059 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1060 be all zero, or all one. */
1061 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1062 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1063 != ((~(unsigned HOST_WIDE_INT) 0)
1064 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1065 return FALSE;
1067 /* Fast return for 0 and powers of 2 */
1068 if ((i & (i - 1)) == 0)
1069 return TRUE;
1073 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1074 return TRUE;
1075 mask =
1076 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1077 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1079 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1081 return FALSE;
1084 /* Return true if I is a valid constant for the operation CODE. */
1085 static int
1086 const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
1088 if (const_ok_for_arm (i))
1089 return 1;
1091 switch (code)
1093 case PLUS:
1094 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1096 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1097 case XOR:
1098 case IOR:
1099 return 0;
1101 case AND:
1102 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1104 default:
1105 abort ();
1109 /* Emit a sequence of insns to handle a large constant.
1110 CODE is the code of the operation required, it can be any of SET, PLUS,
1111 IOR, AND, XOR, MINUS;
1112 MODE is the mode in which the operation is being performed;
1113 VAL is the integer to operate on;
1114 SOURCE is the other operand (a register, or a null-pointer for SET);
1115 SUBTARGETS means it is safe to create scratch registers if that will
1116 either produce a simpler sequence, or we will want to cse the values.
1117 Return value is the number of insns emitted. */
1120 arm_split_constant (enum rtx_code code, enum machine_mode mode,
1121 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
1123 if (subtargets || code == SET
1124 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1125 && REGNO (target) != REGNO (source)))
1127 /* After arm_reorg has been called, we can't fix up expensive
1128 constants by pushing them into memory so we must synthesize
1129 them in-line, regardless of the cost. This is only likely to
1130 be more costly on chips that have load delay slots and we are
1131 compiling without running the scheduler (so no splitting
1132 occurred before the final instruction emission).
1134 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1136 if (!after_arm_reorg
1137 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1138 > arm_constant_limit + (code != SET)))
1140 if (code == SET)
1142 /* Currently SET is the only monadic value for CODE, all
1143 the rest are diadic. */
1144 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1145 return 1;
1147 else
1149 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1151 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1152 /* For MINUS, the value is subtracted from, since we never
1153 have subtraction of a constant. */
1154 if (code == MINUS)
1155 emit_insn (gen_rtx_SET (VOIDmode, target,
1156 gen_rtx_MINUS (mode, temp, source)));
1157 else
1158 emit_insn (gen_rtx_SET (VOIDmode, target,
1159 gen_rtx (code, mode, source, temp)));
1160 return 2;
1165 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1168 static int
1169 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1171 HOST_WIDE_INT temp1;
1172 int num_insns = 0;
1175 int end;
1177 if (i <= 0)
1178 i += 32;
1179 if (remainder & (3 << (i - 2)))
1181 end = i - 8;
1182 if (end < 0)
1183 end += 32;
1184 temp1 = remainder & ((0x0ff << end)
1185 | ((i < end) ? (0xff >> (32 - end)) : 0));
1186 remainder &= ~temp1;
1187 num_insns++;
1188 i -= 6;
1190 i -= 2;
1191 } while (remainder);
1192 return num_insns;
1195 /* As above, but extra parameter GENERATE which, if clear, suppresses
1196 RTL generation. */
1198 static int
1199 arm_gen_constant (enum rtx_code code, enum machine_mode mode,
1200 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1201 int generate)
1203 int can_invert = 0;
1204 int can_negate = 0;
1205 int can_negate_initial = 0;
1206 int can_shift = 0;
1207 int i;
1208 int num_bits_set = 0;
1209 int set_sign_bit_copies = 0;
1210 int clear_sign_bit_copies = 0;
1211 int clear_zero_bit_copies = 0;
1212 int set_zero_bit_copies = 0;
1213 int insns = 0;
1214 unsigned HOST_WIDE_INT temp1, temp2;
1215 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1217 /* Find out which operations are safe for a given CODE. Also do a quick
1218 check for degenerate cases; these can occur when DImode operations
1219 are split. */
1220 switch (code)
1222 case SET:
1223 can_invert = 1;
1224 can_shift = 1;
1225 can_negate = 1;
1226 break;
1228 case PLUS:
1229 can_negate = 1;
1230 can_negate_initial = 1;
1231 break;
1233 case IOR:
1234 if (remainder == 0xffffffff)
1236 if (generate)
1237 emit_insn (gen_rtx_SET (VOIDmode, target,
1238 GEN_INT (ARM_SIGN_EXTEND (val))));
1239 return 1;
1241 if (remainder == 0)
1243 if (reload_completed && rtx_equal_p (target, source))
1244 return 0;
1245 if (generate)
1246 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1247 return 1;
1249 break;
1251 case AND:
1252 if (remainder == 0)
1254 if (generate)
1255 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1256 return 1;
1258 if (remainder == 0xffffffff)
1260 if (reload_completed && rtx_equal_p (target, source))
1261 return 0;
1262 if (generate)
1263 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1264 return 1;
1266 can_invert = 1;
1267 break;
1269 case XOR:
1270 if (remainder == 0)
1272 if (reload_completed && rtx_equal_p (target, source))
1273 return 0;
1274 if (generate)
1275 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1276 return 1;
1278 if (remainder == 0xffffffff)
1280 if (generate)
1281 emit_insn (gen_rtx_SET (VOIDmode, target,
1282 gen_rtx_NOT (mode, source)));
1283 return 1;
1286 /* We don't know how to handle this yet below. */
1287 abort ();
1289 case MINUS:
1290 /* We treat MINUS as (val - source), since (source - val) is always
1291 passed as (source + (-val)). */
1292 if (remainder == 0)
1294 if (generate)
1295 emit_insn (gen_rtx_SET (VOIDmode, target,
1296 gen_rtx_NEG (mode, source)));
1297 return 1;
1299 if (const_ok_for_arm (val))
1301 if (generate)
1302 emit_insn (gen_rtx_SET (VOIDmode, target,
1303 gen_rtx_MINUS (mode, GEN_INT (val),
1304 source)));
1305 return 1;
1307 can_negate = 1;
1309 break;
1311 default:
1312 abort ();
1315 /* If we can do it in one insn get out quickly. */
1316 if (const_ok_for_arm (val)
1317 || (can_negate_initial && const_ok_for_arm (-val))
1318 || (can_invert && const_ok_for_arm (~val)))
1320 if (generate)
1321 emit_insn (gen_rtx_SET (VOIDmode, target,
1322 (source ? gen_rtx (code, mode, source,
1323 GEN_INT (val))
1324 : GEN_INT (val))));
1325 return 1;
1328 /* Calculate a few attributes that may be useful for specific
1329 optimizations. */
1330 for (i = 31; i >= 0; i--)
1332 if ((remainder & (1 << i)) == 0)
1333 clear_sign_bit_copies++;
1334 else
1335 break;
1338 for (i = 31; i >= 0; i--)
1340 if ((remainder & (1 << i)) != 0)
1341 set_sign_bit_copies++;
1342 else
1343 break;
1346 for (i = 0; i <= 31; i++)
1348 if ((remainder & (1 << i)) == 0)
1349 clear_zero_bit_copies++;
1350 else
1351 break;
1354 for (i = 0; i <= 31; i++)
1356 if ((remainder & (1 << i)) != 0)
1357 set_zero_bit_copies++;
1358 else
1359 break;
1362 switch (code)
1364 case SET:
1365 /* See if we can do this by sign_extending a constant that is known
1366 to be negative. This is a good, way of doing it, since the shift
1367 may well merge into a subsequent insn. */
1368 if (set_sign_bit_copies > 1)
1370 if (const_ok_for_arm
1371 (temp1 = ARM_SIGN_EXTEND (remainder
1372 << (set_sign_bit_copies - 1))))
1374 if (generate)
1376 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1377 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1378 GEN_INT (temp1)));
1379 emit_insn (gen_ashrsi3 (target, new_src,
1380 GEN_INT (set_sign_bit_copies - 1)));
1382 return 2;
1384 /* For an inverted constant, we will need to set the low bits,
1385 these will be shifted out of harm's way. */
1386 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1387 if (const_ok_for_arm (~temp1))
1389 if (generate)
1391 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1392 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1393 GEN_INT (temp1)));
1394 emit_insn (gen_ashrsi3 (target, new_src,
1395 GEN_INT (set_sign_bit_copies - 1)));
1397 return 2;
1401 /* See if we can generate this by setting the bottom (or the top)
1402 16 bits, and then shifting these into the other half of the
1403 word. We only look for the simplest cases, to do more would cost
1404 too much. Be careful, however, not to generate this when the
1405 alternative would take fewer insns. */
1406 if (val & 0xffff0000)
1408 temp1 = remainder & 0xffff0000;
1409 temp2 = remainder & 0x0000ffff;
1411 /* Overlaps outside this range are best done using other methods. */
1412 for (i = 9; i < 24; i++)
1414 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1415 && !const_ok_for_arm (temp2))
1417 rtx new_src = (subtargets
1418 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1419 : target);
1420 insns = arm_gen_constant (code, mode, temp2, new_src,
1421 source, subtargets, generate);
1422 source = new_src;
1423 if (generate)
1424 emit_insn (gen_rtx_SET
1425 (VOIDmode, target,
1426 gen_rtx_IOR (mode,
1427 gen_rtx_ASHIFT (mode, source,
1428 GEN_INT (i)),
1429 source)));
1430 return insns + 1;
1434 /* Don't duplicate cases already considered. */
1435 for (i = 17; i < 24; i++)
1437 if (((temp1 | (temp1 >> i)) == remainder)
1438 && !const_ok_for_arm (temp1))
1440 rtx new_src = (subtargets
1441 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1442 : target);
1443 insns = arm_gen_constant (code, mode, temp1, new_src,
1444 source, subtargets, generate);
1445 source = new_src;
1446 if (generate)
1447 emit_insn
1448 (gen_rtx_SET (VOIDmode, target,
1449 gen_rtx_IOR
1450 (mode,
1451 gen_rtx_LSHIFTRT (mode, source,
1452 GEN_INT (i)),
1453 source)));
1454 return insns + 1;
1458 break;
1460 case IOR:
1461 case XOR:
1462 /* If we have IOR or XOR, and the constant can be loaded in a
1463 single instruction, and we can find a temporary to put it in,
1464 then this can be done in two instructions instead of 3-4. */
1465 if (subtargets
1466 /* TARGET can't be NULL if SUBTARGETS is 0 */
1467 || (reload_completed && !reg_mentioned_p (target, source)))
1469 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1471 if (generate)
1473 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1475 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1476 emit_insn (gen_rtx_SET (VOIDmode, target,
1477 gen_rtx (code, mode, source, sub)));
1479 return 2;
1483 if (code == XOR)
1484 break;
1486 if (set_sign_bit_copies > 8
1487 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1489 if (generate)
1491 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1492 rtx shift = GEN_INT (set_sign_bit_copies);
1494 emit_insn (gen_rtx_SET (VOIDmode, sub,
1495 gen_rtx_NOT (mode,
1496 gen_rtx_ASHIFT (mode,
1497 source,
1498 shift))));
1499 emit_insn (gen_rtx_SET (VOIDmode, target,
1500 gen_rtx_NOT (mode,
1501 gen_rtx_LSHIFTRT (mode, sub,
1502 shift))));
1504 return 2;
1507 if (set_zero_bit_copies > 8
1508 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1510 if (generate)
1512 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1513 rtx shift = GEN_INT (set_zero_bit_copies);
1515 emit_insn (gen_rtx_SET (VOIDmode, sub,
1516 gen_rtx_NOT (mode,
1517 gen_rtx_LSHIFTRT (mode,
1518 source,
1519 shift))));
1520 emit_insn (gen_rtx_SET (VOIDmode, target,
1521 gen_rtx_NOT (mode,
1522 gen_rtx_ASHIFT (mode, sub,
1523 shift))));
1525 return 2;
1528 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1530 if (generate)
1532 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1533 emit_insn (gen_rtx_SET (VOIDmode, sub,
1534 gen_rtx_NOT (mode, source)));
1535 source = sub;
1536 if (subtargets)
1537 sub = gen_reg_rtx (mode);
1538 emit_insn (gen_rtx_SET (VOIDmode, sub,
1539 gen_rtx_AND (mode, source,
1540 GEN_INT (temp1))));
1541 emit_insn (gen_rtx_SET (VOIDmode, target,
1542 gen_rtx_NOT (mode, sub)));
1544 return 3;
1546 break;
1548 case AND:
1549 /* See if two shifts will do 2 or more insn's worth of work. */
1550 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1552 HOST_WIDE_INT shift_mask = ((0xffffffff
1553 << (32 - clear_sign_bit_copies))
1554 & 0xffffffff);
1556 if ((remainder | shift_mask) != 0xffffffff)
1558 if (generate)
1560 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1561 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1562 new_src, source, subtargets, 1);
1563 source = new_src;
1565 else
1567 rtx targ = subtargets ? NULL_RTX : target;
1568 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1569 targ, source, subtargets, 0);
1573 if (generate)
1575 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1576 rtx shift = GEN_INT (clear_sign_bit_copies);
1578 emit_insn (gen_ashlsi3 (new_src, source, shift));
1579 emit_insn (gen_lshrsi3 (target, new_src, shift));
1582 return insns + 2;
1585 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1587 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1589 if ((remainder | shift_mask) != 0xffffffff)
1591 if (generate)
1593 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1595 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1596 new_src, source, subtargets, 1);
1597 source = new_src;
1599 else
1601 rtx targ = subtargets ? NULL_RTX : target;
1603 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1604 targ, source, subtargets, 0);
1608 if (generate)
1610 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1611 rtx shift = GEN_INT (clear_zero_bit_copies);
1613 emit_insn (gen_lshrsi3 (new_src, source, shift));
1614 emit_insn (gen_ashlsi3 (target, new_src, shift));
1617 return insns + 2;
1620 break;
1622 default:
1623 break;
1626 for (i = 0; i < 32; i++)
1627 if (remainder & (1 << i))
1628 num_bits_set++;
1630 if (code == AND || (can_invert && num_bits_set > 16))
1631 remainder = (~remainder) & 0xffffffff;
1632 else if (code == PLUS && num_bits_set > 16)
1633 remainder = (-remainder) & 0xffffffff;
1634 else
1636 can_invert = 0;
1637 can_negate = 0;
1640 /* Now try and find a way of doing the job in either two or three
1641 instructions.
1642 We start by looking for the largest block of zeros that are aligned on
1643 a 2-bit boundary, we then fill up the temps, wrapping around to the
1644 top of the word when we drop off the bottom.
1645 In the worst case this code should produce no more than four insns. */
1647 int best_start = 0;
1648 int best_consecutive_zeros = 0;
1650 for (i = 0; i < 32; i += 2)
1652 int consecutive_zeros = 0;
1654 if (!(remainder & (3 << i)))
1656 while ((i < 32) && !(remainder & (3 << i)))
1658 consecutive_zeros += 2;
1659 i += 2;
1661 if (consecutive_zeros > best_consecutive_zeros)
1663 best_consecutive_zeros = consecutive_zeros;
1664 best_start = i - consecutive_zeros;
1666 i -= 2;
1670 /* So long as it won't require any more insns to do so, it's
1671 desirable to emit a small constant (in bits 0...9) in the last
1672 insn. This way there is more chance that it can be combined with
1673 a later addressing insn to form a pre-indexed load or store
1674 operation. Consider:
1676 *((volatile int *)0xe0000100) = 1;
1677 *((volatile int *)0xe0000110) = 2;
1679 We want this to wind up as:
1681 mov rA, #0xe0000000
1682 mov rB, #1
1683 str rB, [rA, #0x100]
1684 mov rB, #2
1685 str rB, [rA, #0x110]
1687 rather than having to synthesize both large constants from scratch.
1689 Therefore, we calculate how many insns would be required to emit
1690 the constant starting from `best_start', and also starting from
1691 zero (ie with bit 31 first to be output). If `best_start' doesn't
1692 yield a shorter sequence, we may as well use zero. */
1693 if (best_start != 0
1694 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1695 && (count_insns_for_constant (remainder, 0) <=
1696 count_insns_for_constant (remainder, best_start)))
1697 best_start = 0;
1699 /* Now start emitting the insns. */
1700 i = best_start;
1703 int end;
1705 if (i <= 0)
1706 i += 32;
1707 if (remainder & (3 << (i - 2)))
1709 end = i - 8;
1710 if (end < 0)
1711 end += 32;
1712 temp1 = remainder & ((0x0ff << end)
1713 | ((i < end) ? (0xff >> (32 - end)) : 0));
1714 remainder &= ~temp1;
1716 if (generate)
1718 rtx new_src, temp1_rtx;
1720 if (code == SET || code == MINUS)
1722 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1723 if (can_invert && code != MINUS)
1724 temp1 = ~temp1;
1726 else
1728 if (remainder && subtargets)
1729 new_src = gen_reg_rtx (mode);
1730 else
1731 new_src = target;
1732 if (can_invert)
1733 temp1 = ~temp1;
1734 else if (can_negate)
1735 temp1 = -temp1;
1738 temp1 = trunc_int_for_mode (temp1, mode);
1739 temp1_rtx = GEN_INT (temp1);
1741 if (code == SET)
1743 else if (code == MINUS)
1744 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1745 else
1746 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1748 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1749 source = new_src;
1752 if (code == SET)
1754 can_invert = 0;
1755 code = PLUS;
1757 else if (code == MINUS)
1758 code = PLUS;
1760 insns++;
1761 i -= 6;
1763 i -= 2;
1765 while (remainder);
1768 return insns;
1771 /* Canonicalize a comparison so that we are more likely to recognize it.
1772 This can be done for a few constant compares, where we can make the
1773 immediate value easier to load. */
1775 enum rtx_code
1776 arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
1778 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1780 switch (code)
1782 case EQ:
1783 case NE:
1784 return code;
1786 case GT:
1787 case LE:
1788 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1789 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1791 *op1 = GEN_INT (i + 1);
1792 return code == GT ? GE : LT;
1794 break;
1796 case GE:
1797 case LT:
1798 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1799 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1801 *op1 = GEN_INT (i - 1);
1802 return code == GE ? GT : LE;
1804 break;
1806 case GTU:
1807 case LEU:
1808 if (i != ~((unsigned HOST_WIDE_INT) 0)
1809 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1811 *op1 = GEN_INT (i + 1);
1812 return code == GTU ? GEU : LTU;
1814 break;
1816 case GEU:
1817 case LTU:
1818 if (i != 0
1819 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1821 *op1 = GEN_INT (i - 1);
1822 return code == GEU ? GTU : LEU;
1824 break;
1826 default:
1827 abort ();
1830 return code;
1833 /* Decide whether a type should be returned in memory (true)
1834 or in a register (false). This is called by the macro
1835 RETURN_IN_MEMORY. */
1837 arm_return_in_memory (tree type)
1839 HOST_WIDE_INT size;
1841 if (!AGGREGATE_TYPE_P (type))
1842 /* All simple types are returned in registers. */
1843 return 0;
1845 size = int_size_in_bytes (type);
1847 if (TARGET_ATPCS)
1849 /* ATPCS returns aggregate types in memory only if they are
1850 larger than a word (or are variable size). */
1851 return (size < 0 || size > UNITS_PER_WORD);
1854 /* For the arm-wince targets we choose to be compatible with Microsoft's
1855 ARM and Thumb compilers, which always return aggregates in memory. */
1856 #ifndef ARM_WINCE
1857 /* All structures/unions bigger than one word are returned in memory.
1858 Also catch the case where int_size_in_bytes returns -1. In this case
1859 the aggregate is either huge or of variable size, and in either case
1860 we will want to return it via memory and not in a register. */
1861 if (size < 0 || size > UNITS_PER_WORD)
1862 return 1;
1864 if (TREE_CODE (type) == RECORD_TYPE)
1866 tree field;
1868 /* For a struct the APCS says that we only return in a register
1869 if the type is 'integer like' and every addressable element
1870 has an offset of zero. For practical purposes this means
1871 that the structure can have at most one non bit-field element
1872 and that this element must be the first one in the structure. */
1874 /* Find the first field, ignoring non FIELD_DECL things which will
1875 have been created by C++. */
1876 for (field = TYPE_FIELDS (type);
1877 field && TREE_CODE (field) != FIELD_DECL;
1878 field = TREE_CHAIN (field))
1879 continue;
1881 if (field == NULL)
1882 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1884 /* Check that the first field is valid for returning in a register. */
1886 /* ... Floats are not allowed */
1887 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1888 return 1;
1890 /* ... Aggregates that are not themselves valid for returning in
1891 a register are not allowed. */
1892 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1893 return 1;
1895 /* Now check the remaining fields, if any. Only bitfields are allowed,
1896 since they are not addressable. */
1897 for (field = TREE_CHAIN (field);
1898 field;
1899 field = TREE_CHAIN (field))
1901 if (TREE_CODE (field) != FIELD_DECL)
1902 continue;
1904 if (!DECL_BIT_FIELD_TYPE (field))
1905 return 1;
1908 return 0;
1911 if (TREE_CODE (type) == UNION_TYPE)
1913 tree field;
1915 /* Unions can be returned in registers if every element is
1916 integral, or can be returned in an integer register. */
1917 for (field = TYPE_FIELDS (type);
1918 field;
1919 field = TREE_CHAIN (field))
1921 if (TREE_CODE (field) != FIELD_DECL)
1922 continue;
1924 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1925 return 1;
1927 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1928 return 1;
1931 return 0;
1933 #endif /* not ARM_WINCE */
1935 /* Return all other types in memory. */
1936 return 1;
1939 /* Indicate whether or not words of a double are in big-endian order. */
1942 arm_float_words_big_endian (void)
1944 if (TARGET_CIRRUS)
1945 return 0;
1947 /* For FPA, float words are always big-endian. For VFP, floats words
1948 follow the memory system mode. */
1950 if (TARGET_HARD_FLOAT)
1952 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1953 return 1;
1956 if (TARGET_VFP)
1957 return (TARGET_BIG_END ? 1 : 0);
1959 return 1;
1962 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1963 for a call to a function whose data type is FNTYPE.
1964 For a library call, FNTYPE is NULL. */
1965 void
1966 arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
1967 rtx libname ATTRIBUTE_UNUSED,
1968 tree fndecl ATTRIBUTE_UNUSED)
1970 /* On the ARM, the offset starts at 0. */
1971 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype), fntype)) ? 1 : 0);
1972 pcum->iwmmxt_nregs = 0;
1974 pcum->call_cookie = CALL_NORMAL;
1976 if (TARGET_LONG_CALLS)
1977 pcum->call_cookie = CALL_LONG;
1979 /* Check for long call/short call attributes. The attributes
1980 override any command line option. */
1981 if (fntype)
1983 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1984 pcum->call_cookie = CALL_SHORT;
1985 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1986 pcum->call_cookie = CALL_LONG;
1989 /* Varargs vectors are treated the same as long long.
1990 named_count avoids having to change the way arm handles 'named' */
1991 pcum->named_count = 0;
1992 pcum->nargs = 0;
1994 if (TARGET_REALLY_IWMMXT && fntype)
1996 tree fn_arg;
1998 for (fn_arg = TYPE_ARG_TYPES (fntype);
1999 fn_arg;
2000 fn_arg = TREE_CHAIN (fn_arg))
2001 pcum->named_count += 1;
2003 if (! pcum->named_count)
2004 pcum->named_count = INT_MAX;
2008 /* Determine where to put an argument to a function.
2009 Value is zero to push the argument on the stack,
2010 or a hard register in which to store the argument.
2012 MODE is the argument's machine mode.
2013 TYPE is the data type of the argument (as a tree).
2014 This is null for libcalls where that information may
2015 not be available.
2016 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2017 the preceding args and about the function being called.
2018 NAMED is nonzero if this argument is a named parameter
2019 (otherwise it is an extra parameter matching an ellipsis). */
2022 arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
2023 tree type ATTRIBUTE_UNUSED, int named)
2025 if (TARGET_REALLY_IWMMXT)
2027 if (VECTOR_MODE_SUPPORTED_P (mode))
2029 /* varargs vectors are treated the same as long long.
2030 named_count avoids having to change the way arm handles 'named' */
2031 if (pcum->named_count <= pcum->nargs + 1)
2033 if (pcum->nregs == 1)
2034 pcum->nregs += 1;
2035 if (pcum->nregs <= 2)
2036 return gen_rtx_REG (mode, pcum->nregs);
2037 else
2038 return NULL_RTX;
2040 else if (pcum->iwmmxt_nregs <= 9)
2041 return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
2042 else
2043 return NULL_RTX;
2045 else if ((mode == DImode || mode == DFmode) && pcum->nregs & 1)
2046 pcum->nregs += 1;
2049 if (mode == VOIDmode)
2050 /* Compute operand 2 of the call insn. */
2051 return GEN_INT (pcum->call_cookie);
2053 if (!named || pcum->nregs >= NUM_ARG_REGS)
2054 return NULL_RTX;
2056 return gen_rtx_REG (mode, pcum->nregs);
2059 /* Variable sized types are passed by reference. This is a GCC
2060 extension to the ARM ABI. */
2063 arm_function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2064 enum machine_mode mode ATTRIBUTE_UNUSED,
2065 tree type, int named ATTRIBUTE_UNUSED)
2067 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2070 /* Implement va_arg. */
2073 arm_va_arg (tree valist, tree type)
2075 /* Variable sized types are passed by reference. */
2076 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2078 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2079 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2082 if (FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), NULL) == IWMMXT_ALIGNMENT)
2084 tree minus_eight;
2085 tree t;
2087 /* Maintain 64-bit alignment of the valist pointer by
2088 constructing: valist = ((valist + (8 - 1)) & -8). */
2089 minus_eight = build_int_2 (- (IWMMXT_ALIGNMENT / BITS_PER_UNIT), -1);
2090 t = build_int_2 ((IWMMXT_ALIGNMENT / BITS_PER_UNIT) - 1, 0);
2091 t = build (PLUS_EXPR, TREE_TYPE (valist), valist, t);
2092 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, minus_eight);
2093 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2094 TREE_SIDE_EFFECTS (t) = 1;
2095 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2097 /* This is to stop the combine pass optimizing
2098 away the alignment adjustment. */
2099 mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
2102 return std_expand_builtin_va_arg (valist, type);
2105 /* Encode the current state of the #pragma [no_]long_calls. */
2106 typedef enum
2108 OFF, /* No #pramgma [no_]long_calls is in effect. */
2109 LONG, /* #pragma long_calls is in effect. */
2110 SHORT /* #pragma no_long_calls is in effect. */
2111 } arm_pragma_enum;
2113 static arm_pragma_enum arm_pragma_long_calls = OFF;
2115 void
2116 arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2118 arm_pragma_long_calls = LONG;
2121 void
2122 arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2124 arm_pragma_long_calls = SHORT;
2127 void
2128 arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2130 arm_pragma_long_calls = OFF;
2133 /* Table of machine attributes. */
2134 const struct attribute_spec arm_attribute_table[] =
2136 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2137 /* Function calls made to this symbol must be done indirectly, because
2138 it may lie outside of the 26 bit addressing range of a normal function
2139 call. */
2140 { "long_call", 0, 0, false, true, true, NULL },
2141 /* Whereas these functions are always known to reside within the 26 bit
2142 addressing range. */
2143 { "short_call", 0, 0, false, true, true, NULL },
2144 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2145 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2146 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2147 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2148 #ifdef ARM_PE
2149 /* ARM/PE has three new attributes:
2150 interfacearm - ?
2151 dllexport - for exporting a function/variable that will live in a dll
2152 dllimport - for importing a function/variable from a dll
2154 Microsoft allows multiple declspecs in one __declspec, separating
2155 them with spaces. We do NOT support this. Instead, use __declspec
2156 multiple times.
2158 { "dllimport", 0, 0, true, false, false, NULL },
2159 { "dllexport", 0, 0, true, false, false, NULL },
2160 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2161 #endif
2162 { NULL, 0, 0, false, false, false, NULL }
2165 /* Handle an attribute requiring a FUNCTION_DECL;
2166 arguments as in struct attribute_spec.handler. */
2167 static tree
2168 arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2169 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
2171 if (TREE_CODE (*node) != FUNCTION_DECL)
2173 warning ("`%s' attribute only applies to functions",
2174 IDENTIFIER_POINTER (name));
2175 *no_add_attrs = true;
2178 return NULL_TREE;
2181 /* Handle an "interrupt" or "isr" attribute;
2182 arguments as in struct attribute_spec.handler. */
2183 static tree
2184 arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2185 bool *no_add_attrs)
2187 if (DECL_P (*node))
2189 if (TREE_CODE (*node) != FUNCTION_DECL)
2191 warning ("`%s' attribute only applies to functions",
2192 IDENTIFIER_POINTER (name));
2193 *no_add_attrs = true;
2195 /* FIXME: the argument if any is checked for type attributes;
2196 should it be checked for decl ones? */
2198 else
2200 if (TREE_CODE (*node) == FUNCTION_TYPE
2201 || TREE_CODE (*node) == METHOD_TYPE)
2203 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2205 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2206 *no_add_attrs = true;
2209 else if (TREE_CODE (*node) == POINTER_TYPE
2210 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2211 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2212 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2214 *node = build_type_copy (*node);
2215 TREE_TYPE (*node) = build_type_attribute_variant
2216 (TREE_TYPE (*node),
2217 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2218 *no_add_attrs = true;
2220 else
2222 /* Possibly pass this attribute on from the type to a decl. */
2223 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2224 | (int) ATTR_FLAG_FUNCTION_NEXT
2225 | (int) ATTR_FLAG_ARRAY_NEXT))
2227 *no_add_attrs = true;
2228 return tree_cons (name, args, NULL_TREE);
2230 else
2232 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2237 return NULL_TREE;
2240 /* Return 0 if the attributes for two types are incompatible, 1 if they
2241 are compatible, and 2 if they are nearly compatible (which causes a
2242 warning to be generated). */
2243 static int
2244 arm_comp_type_attributes (tree type1, tree type2)
2246 int l1, l2, s1, s2;
2248 /* Check for mismatch of non-default calling convention. */
2249 if (TREE_CODE (type1) != FUNCTION_TYPE)
2250 return 1;
2252 /* Check for mismatched call attributes. */
2253 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2254 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2255 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2256 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2258 /* Only bother to check if an attribute is defined. */
2259 if (l1 | l2 | s1 | s2)
2261 /* If one type has an attribute, the other must have the same attribute. */
2262 if ((l1 != l2) || (s1 != s2))
2263 return 0;
2265 /* Disallow mixed attributes. */
2266 if ((l1 & s2) || (l2 & s1))
2267 return 0;
2270 /* Check for mismatched ISR attribute. */
2271 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2272 if (! l1)
2273 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2274 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2275 if (! l2)
2276 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2277 if (l1 != l2)
2278 return 0;
2280 return 1;
2283 /* Encode long_call or short_call attribute by prefixing
2284 symbol name in DECL with a special character FLAG. */
2285 void
2286 arm_encode_call_attribute (tree decl, int flag)
2288 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2289 int len = strlen (str);
2290 char * newstr;
2292 /* Do not allow weak functions to be treated as short call. */
2293 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2294 return;
2296 newstr = alloca (len + 2);
2297 newstr[0] = flag;
2298 strcpy (newstr + 1, str);
2300 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2301 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2304 /* Assigns default attributes to newly defined type. This is used to
2305 set short_call/long_call attributes for function types of
2306 functions defined inside corresponding #pragma scopes. */
2307 static void
2308 arm_set_default_type_attributes (tree type)
2310 /* Add __attribute__ ((long_call)) to all functions, when
2311 inside #pragma long_calls or __attribute__ ((short_call)),
2312 when inside #pragma no_long_calls. */
2313 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2315 tree type_attr_list, attr_name;
2316 type_attr_list = TYPE_ATTRIBUTES (type);
2318 if (arm_pragma_long_calls == LONG)
2319 attr_name = get_identifier ("long_call");
2320 else if (arm_pragma_long_calls == SHORT)
2321 attr_name = get_identifier ("short_call");
2322 else
2323 return;
2325 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2326 TYPE_ATTRIBUTES (type) = type_attr_list;
2330 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2331 defined within the current compilation unit. If this cannot be
2332 determined, then 0 is returned. */
2333 static int
2334 current_file_function_operand (rtx sym_ref)
2336 /* This is a bit of a fib. A function will have a short call flag
2337 applied to its name if it has the short call attribute, or it has
2338 already been defined within the current compilation unit. */
2339 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2340 return 1;
2342 /* The current function is always defined within the current compilation
2343 unit. if it s a weak definition however, then this may not be the real
2344 definition of the function, and so we have to say no. */
2345 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2346 && !DECL_WEAK (current_function_decl))
2347 return 1;
2349 /* We cannot make the determination - default to returning 0. */
2350 return 0;
2353 /* Return nonzero if a 32 bit "long_call" should be generated for
2354 this call. We generate a long_call if the function:
2356 a. has an __attribute__((long call))
2357 or b. is within the scope of a #pragma long_calls
2358 or c. the -mlong-calls command line switch has been specified
2360 However we do not generate a long call if the function:
2362 d. has an __attribute__ ((short_call))
2363 or e. is inside the scope of a #pragma no_long_calls
2364 or f. has an __attribute__ ((section))
2365 or g. is defined within the current compilation unit.
2367 This function will be called by C fragments contained in the machine
2368 description file. CALL_REF and CALL_COOKIE correspond to the matched
2369 rtl operands. CALL_SYMBOL is used to distinguish between
2370 two different callers of the function. It is set to 1 in the
2371 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2372 and "call_value" patterns. This is because of the difference in the
2373 SYM_REFs passed by these patterns. */
2375 arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
2377 if (!call_symbol)
2379 if (GET_CODE (sym_ref) != MEM)
2380 return 0;
2382 sym_ref = XEXP (sym_ref, 0);
2385 if (GET_CODE (sym_ref) != SYMBOL_REF)
2386 return 0;
2388 if (call_cookie & CALL_SHORT)
2389 return 0;
2391 if (TARGET_LONG_CALLS && flag_function_sections)
2392 return 1;
2394 if (current_file_function_operand (sym_ref))
2395 return 0;
2397 return (call_cookie & CALL_LONG)
2398 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2399 || TARGET_LONG_CALLS;
2402 /* Return nonzero if it is ok to make a tail-call to DECL. */
2403 static bool
2404 arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2406 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2408 if (cfun->machine->sibcall_blocked)
2409 return false;
2411 /* Never tailcall something for which we have no decl, or if we
2412 are in Thumb mode. */
2413 if (decl == NULL || TARGET_THUMB)
2414 return false;
2416 /* Get the calling method. */
2417 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2418 call_type = CALL_SHORT;
2419 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2420 call_type = CALL_LONG;
2422 /* Cannot tail-call to long calls, since these are out of range of
2423 a branch instruction. However, if not compiling PIC, we know
2424 we can reach the symbol if it is in this compilation unit. */
2425 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2426 return false;
2428 /* If we are interworking and the function is not declared static
2429 then we can't tail-call it unless we know that it exists in this
2430 compilation unit (since it might be a Thumb routine). */
2431 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2432 return false;
2434 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2435 if (IS_INTERRUPT (arm_current_func_type ()))
2436 return false;
2438 /* Everything else is ok. */
2439 return true;
2443 /* Addressing mode support functions. */
2445 /* Return nonzero if X is a legitimate immediate operand when compiling
2446 for PIC. */
2448 legitimate_pic_operand_p (rtx x)
2450 if (CONSTANT_P (x)
2451 && flag_pic
2452 && (GET_CODE (x) == SYMBOL_REF
2453 || (GET_CODE (x) == CONST
2454 && GET_CODE (XEXP (x, 0)) == PLUS
2455 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2456 return 0;
2458 return 1;
2462 legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
2464 if (GET_CODE (orig) == SYMBOL_REF
2465 || GET_CODE (orig) == LABEL_REF)
2467 #ifndef AOF_ASSEMBLER
2468 rtx pic_ref, address;
2469 #endif
2470 rtx insn;
2471 int subregs = 0;
2473 if (reg == 0)
2475 if (no_new_pseudos)
2476 abort ();
2477 else
2478 reg = gen_reg_rtx (Pmode);
2480 subregs = 1;
2483 #ifdef AOF_ASSEMBLER
2484 /* The AOF assembler can generate relocations for these directly, and
2485 understands that the PIC register has to be added into the offset. */
2486 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2487 #else
2488 if (subregs)
2489 address = gen_reg_rtx (Pmode);
2490 else
2491 address = reg;
2493 if (TARGET_ARM)
2494 emit_insn (gen_pic_load_addr_arm (address, orig));
2495 else
2496 emit_insn (gen_pic_load_addr_thumb (address, orig));
2498 if ((GET_CODE (orig) == LABEL_REF
2499 || (GET_CODE (orig) == SYMBOL_REF &&
2500 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2501 && NEED_GOT_RELOC)
2502 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2503 else
2505 pic_ref = gen_rtx_MEM (Pmode,
2506 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2507 address));
2508 RTX_UNCHANGING_P (pic_ref) = 1;
2511 insn = emit_move_insn (reg, pic_ref);
2512 #endif
2513 current_function_uses_pic_offset_table = 1;
2514 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2515 by loop. */
2516 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2517 REG_NOTES (insn));
2518 return reg;
2520 else if (GET_CODE (orig) == CONST)
2522 rtx base, offset;
2524 if (GET_CODE (XEXP (orig, 0)) == PLUS
2525 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2526 return orig;
2528 if (reg == 0)
2530 if (no_new_pseudos)
2531 abort ();
2532 else
2533 reg = gen_reg_rtx (Pmode);
2536 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2538 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2539 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2540 base == reg ? 0 : reg);
2542 else
2543 abort ();
2545 if (GET_CODE (offset) == CONST_INT)
2547 /* The base register doesn't really matter, we only want to
2548 test the index for the appropriate mode. */
2549 if (!arm_legitimate_index_p (mode, offset, 0))
2551 if (!no_new_pseudos)
2552 offset = force_reg (Pmode, offset);
2553 else
2554 abort ();
2557 if (GET_CODE (offset) == CONST_INT)
2558 return plus_constant (base, INTVAL (offset));
2561 if (GET_MODE_SIZE (mode) > 4
2562 && (GET_MODE_CLASS (mode) == MODE_INT
2563 || TARGET_SOFT_FLOAT))
2565 emit_insn (gen_addsi3 (reg, base, offset));
2566 return reg;
2569 return gen_rtx_PLUS (Pmode, base, offset);
2572 return orig;
2575 /* Generate code to load the PIC register. PROLOGUE is true if
2576 called from arm_expand_prologue (in which case we want the
2577 generated insns at the start of the function); false if called
2578 by an exception receiver that needs the PIC register reloaded
2579 (in which case the insns are just dumped at the current location). */
2580 void
2581 arm_finalize_pic (int prologue ATTRIBUTE_UNUSED)
2583 #ifndef AOF_ASSEMBLER
2584 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2585 rtx global_offset_table;
2587 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2588 return;
2590 if (!flag_pic)
2591 abort ();
2593 start_sequence ();
2594 l1 = gen_label_rtx ();
2596 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2597 /* On the ARM the PC register contains 'dot + 8' at the time of the
2598 addition, on the Thumb it is 'dot + 4'. */
2599 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2600 if (GOT_PCREL)
2601 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2602 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2603 else
2604 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2606 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2608 if (TARGET_ARM)
2610 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2611 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2613 else
2615 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2616 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2619 seq = get_insns ();
2620 end_sequence ();
2621 if (prologue)
2622 emit_insn_after (seq, get_insns ());
2623 else
2624 emit_insn (seq);
2626 /* Need to emit this whether or not we obey regdecls,
2627 since setjmp/longjmp can cause life info to screw up. */
2628 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2629 #endif /* AOF_ASSEMBLER */
2632 /* Return nonzero if X is valid as an ARM state addressing register. */
2633 static int
2634 arm_address_register_rtx_p (rtx x, int strict_p)
2636 int regno;
2638 if (GET_CODE (x) != REG)
2639 return 0;
2641 regno = REGNO (x);
2643 if (strict_p)
2644 return ARM_REGNO_OK_FOR_BASE_P (regno);
2646 return (regno <= LAST_ARM_REGNUM
2647 || regno >= FIRST_PSEUDO_REGISTER
2648 || regno == FRAME_POINTER_REGNUM
2649 || regno == ARG_POINTER_REGNUM);
2652 /* Return nonzero if X is a valid ARM state address operand. */
2654 arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2656 if (arm_address_register_rtx_p (x, strict_p))
2657 return 1;
2659 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2660 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2662 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2663 && GET_MODE_SIZE (mode) <= 4
2664 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2665 && GET_CODE (XEXP (x, 1)) == PLUS
2666 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2667 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2669 /* After reload constants split into minipools will have addresses
2670 from a LABEL_REF. */
2671 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2672 && (GET_CODE (x) == LABEL_REF
2673 || (GET_CODE (x) == CONST
2674 && GET_CODE (XEXP (x, 0)) == PLUS
2675 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2676 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2677 return 1;
2679 else if (mode == TImode)
2680 return 0;
2682 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2684 if (GET_CODE (x) == PLUS
2685 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2686 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2688 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2690 if (val == 4 || val == -4 || val == -8)
2691 return 1;
2695 else if (GET_CODE (x) == PLUS)
2697 rtx xop0 = XEXP (x, 0);
2698 rtx xop1 = XEXP (x, 1);
2700 return ((arm_address_register_rtx_p (xop0, strict_p)
2701 && arm_legitimate_index_p (mode, xop1, strict_p))
2702 || (arm_address_register_rtx_p (xop1, strict_p)
2703 && arm_legitimate_index_p (mode, xop0, strict_p)));
2706 #if 0
2707 /* Reload currently can't handle MINUS, so disable this for now */
2708 else if (GET_CODE (x) == MINUS)
2710 rtx xop0 = XEXP (x, 0);
2711 rtx xop1 = XEXP (x, 1);
2713 return (arm_address_register_rtx_p (xop0, strict_p)
2714 && arm_legitimate_index_p (mode, xop1, strict_p));
2716 #endif
2718 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2719 && GET_CODE (x) == SYMBOL_REF
2720 && CONSTANT_POOL_ADDRESS_P (x)
2721 && ! (flag_pic
2722 && symbol_mentioned_p (get_pool_constant (x))))
2723 return 1;
2725 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2726 && (GET_MODE_SIZE (mode) <= 4)
2727 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2728 return 1;
2730 return 0;
2733 /* Return nonzero if INDEX is valid for an address index operand in
2734 ARM state. */
2735 static int
2736 arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
2738 HOST_WIDE_INT range;
2739 enum rtx_code code = GET_CODE (index);
2741 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2742 return (code == CONST_INT && INTVAL (index) < 1024
2743 && INTVAL (index) > -1024
2744 && (INTVAL (index) & 3) == 0);
2746 if (TARGET_CIRRUS
2747 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2748 return (code == CONST_INT
2749 && INTVAL (index) < 255
2750 && INTVAL (index) > -255);
2752 if (arm_address_register_rtx_p (index, strict_p)
2753 && GET_MODE_SIZE (mode) <= 4)
2754 return 1;
2756 if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
2757 return (code == CONST_INT
2758 && INTVAL (index) < 256
2759 && INTVAL (index) > -256);
2761 /* XXX What about ldrsb? */
2762 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2763 && (!arm_arch4 || (mode) != HImode))
2765 rtx xiop0 = XEXP (index, 0);
2766 rtx xiop1 = XEXP (index, 1);
2768 return ((arm_address_register_rtx_p (xiop0, strict_p)
2769 && power_of_two_operand (xiop1, SImode))
2770 || (arm_address_register_rtx_p (xiop1, strict_p)
2771 && power_of_two_operand (xiop0, SImode)));
2774 if (GET_MODE_SIZE (mode) <= 4
2775 && (code == LSHIFTRT || code == ASHIFTRT
2776 || code == ASHIFT || code == ROTATERT)
2777 && (!arm_arch4 || (mode) != HImode))
2779 rtx op = XEXP (index, 1);
2781 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2782 && GET_CODE (op) == CONST_INT
2783 && INTVAL (op) > 0
2784 && INTVAL (op) <= 31);
2787 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2788 load, but that has a restricted addressing range and we are unable
2789 to tell here whether that is the case. To be safe we restrict all
2790 loads to that range. */
2791 range = ((mode) == HImode || (mode) == QImode)
2792 ? (arm_arch4 ? 256 : 4095) : 4096;
2794 return (code == CONST_INT
2795 && INTVAL (index) < range
2796 && INTVAL (index) > -range);
2799 /* Return nonzero if X is valid as an ARM state addressing register. */
2800 static int
2801 thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
2803 int regno;
2805 if (GET_CODE (x) != REG)
2806 return 0;
2808 regno = REGNO (x);
2810 if (strict_p)
2811 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2813 return (regno <= LAST_LO_REGNUM
2814 || regno >= FIRST_PSEUDO_REGISTER
2815 || regno == FRAME_POINTER_REGNUM
2816 || (GET_MODE_SIZE (mode) >= 4
2817 && (regno == STACK_POINTER_REGNUM
2818 || x == hard_frame_pointer_rtx
2819 || x == arg_pointer_rtx)));
2822 /* Return nonzero if x is a legitimate index register. This is the case
2823 for any base register that can access a QImode object. */
2824 inline static int
2825 thumb_index_register_rtx_p (rtx x, int strict_p)
2827 return thumb_base_register_rtx_p (x, QImode, strict_p);
2830 /* Return nonzero if x is a legitimate Thumb-state address.
2832 The AP may be eliminated to either the SP or the FP, so we use the
2833 least common denominator, e.g. SImode, and offsets from 0 to 64.
2835 ??? Verify whether the above is the right approach.
2837 ??? Also, the FP may be eliminated to the SP, so perhaps that
2838 needs special handling also.
2840 ??? Look at how the mips16 port solves this problem. It probably uses
2841 better ways to solve some of these problems.
2843 Although it is not incorrect, we don't accept QImode and HImode
2844 addresses based on the frame pointer or arg pointer until the
2845 reload pass starts. This is so that eliminating such addresses
2846 into stack based ones won't produce impossible code. */
2848 thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2850 /* ??? Not clear if this is right. Experiment. */
2851 if (GET_MODE_SIZE (mode) < 4
2852 && !(reload_in_progress || reload_completed)
2853 && (reg_mentioned_p (frame_pointer_rtx, x)
2854 || reg_mentioned_p (arg_pointer_rtx, x)
2855 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2856 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2857 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2858 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2859 return 0;
2861 /* Accept any base register. SP only in SImode or larger. */
2862 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2863 return 1;
2865 /* This is PC relative data before arm_reorg runs. */
2866 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2867 && GET_CODE (x) == SYMBOL_REF
2868 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2869 return 1;
2871 /* This is PC relative data after arm_reorg runs. */
2872 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2873 && (GET_CODE (x) == LABEL_REF
2874 || (GET_CODE (x) == CONST
2875 && GET_CODE (XEXP (x, 0)) == PLUS
2876 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2877 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2878 return 1;
2880 /* Post-inc indexing only supported for SImode and larger. */
2881 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2882 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2883 return 1;
2885 else if (GET_CODE (x) == PLUS)
2887 /* REG+REG address can be any two index registers. */
2888 /* We disallow FRAME+REG addressing since we know that FRAME
2889 will be replaced with STACK, and SP relative addressing only
2890 permits SP+OFFSET. */
2891 if (GET_MODE_SIZE (mode) <= 4
2892 && XEXP (x, 0) != frame_pointer_rtx
2893 && XEXP (x, 1) != frame_pointer_rtx
2894 && XEXP (x, 0) != virtual_stack_vars_rtx
2895 && XEXP (x, 1) != virtual_stack_vars_rtx
2896 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2897 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2898 return 1;
2900 /* REG+const has 5-7 bit offset for non-SP registers. */
2901 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2902 || XEXP (x, 0) == arg_pointer_rtx)
2903 && GET_CODE (XEXP (x, 1)) == CONST_INT
2904 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2905 return 1;
2907 /* REG+const has 10 bit offset for SP, but only SImode and
2908 larger is supported. */
2909 /* ??? Should probably check for DI/DFmode overflow here
2910 just like GO_IF_LEGITIMATE_OFFSET does. */
2911 else if (GET_CODE (XEXP (x, 0)) == REG
2912 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2913 && GET_MODE_SIZE (mode) >= 4
2914 && GET_CODE (XEXP (x, 1)) == CONST_INT
2915 && INTVAL (XEXP (x, 1)) >= 0
2916 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2917 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2918 return 1;
2920 else if (GET_CODE (XEXP (x, 0)) == REG
2921 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2922 && GET_MODE_SIZE (mode) >= 4
2923 && GET_CODE (XEXP (x, 1)) == CONST_INT
2924 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2925 return 1;
2928 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2929 && GET_CODE (x) == SYMBOL_REF
2930 && CONSTANT_POOL_ADDRESS_P (x)
2931 && !(flag_pic
2932 && symbol_mentioned_p (get_pool_constant (x))))
2933 return 1;
2935 return 0;
2938 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2939 instruction of mode MODE. */
2941 thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
2943 switch (GET_MODE_SIZE (mode))
2945 case 1:
2946 return val >= 0 && val < 32;
2948 case 2:
2949 return val >= 0 && val < 64 && (val & 1) == 0;
2951 default:
2952 return (val >= 0
2953 && (val + GET_MODE_SIZE (mode)) <= 128
2954 && (val & 3) == 0);
2958 /* Try machine-dependent ways of modifying an illegitimate address
2959 to be legitimate. If we find one, return the new, valid address. */
2961 arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
2963 if (GET_CODE (x) == PLUS)
2965 rtx xop0 = XEXP (x, 0);
2966 rtx xop1 = XEXP (x, 1);
2968 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2969 xop0 = force_reg (SImode, xop0);
2971 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2972 xop1 = force_reg (SImode, xop1);
2974 if (ARM_BASE_REGISTER_RTX_P (xop0)
2975 && GET_CODE (xop1) == CONST_INT)
2977 HOST_WIDE_INT n, low_n;
2978 rtx base_reg, val;
2979 n = INTVAL (xop1);
2981 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2983 low_n = n & 0x0f;
2984 n &= ~0x0f;
2985 if (low_n > 4)
2987 n += 16;
2988 low_n -= 16;
2991 else
2993 low_n = ((mode) == TImode ? 0
2994 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2995 n -= low_n;
2998 base_reg = gen_reg_rtx (SImode);
2999 val = force_operand (gen_rtx_PLUS (SImode, xop0,
3000 GEN_INT (n)), NULL_RTX);
3001 emit_move_insn (base_reg, val);
3002 x = (low_n == 0 ? base_reg
3003 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
3005 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3006 x = gen_rtx_PLUS (SImode, xop0, xop1);
3009 /* XXX We don't allow MINUS any more -- see comment in
3010 arm_legitimate_address_p (). */
3011 else if (GET_CODE (x) == MINUS)
3013 rtx xop0 = XEXP (x, 0);
3014 rtx xop1 = XEXP (x, 1);
3016 if (CONSTANT_P (xop0))
3017 xop0 = force_reg (SImode, xop0);
3019 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3020 xop1 = force_reg (SImode, xop1);
3022 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3023 x = gen_rtx_MINUS (SImode, xop0, xop1);
3026 if (flag_pic)
3028 /* We need to find and carefully transform any SYMBOL and LABEL
3029 references; so go back to the original address expression. */
3030 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3032 if (new_x != orig_x)
3033 x = new_x;
3036 return x;
3041 #define REG_OR_SUBREG_REG(X) \
3042 (GET_CODE (X) == REG \
3043 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3045 #define REG_OR_SUBREG_RTX(X) \
3046 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3048 #ifndef COSTS_N_INSNS
3049 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3050 #endif
3051 /* Worker routine for arm_rtx_costs. */
3052 static inline int
3053 arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
3055 enum machine_mode mode = GET_MODE (x);
3056 enum rtx_code subcode;
3057 int extra_cost;
3059 if (TARGET_THUMB)
3061 switch (code)
3063 case ASHIFT:
3064 case ASHIFTRT:
3065 case LSHIFTRT:
3066 case ROTATERT:
3067 case PLUS:
3068 case MINUS:
3069 case COMPARE:
3070 case NEG:
3071 case NOT:
3072 return COSTS_N_INSNS (1);
3074 case MULT:
3075 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3077 int cycles = 0;
3078 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3080 while (i)
3082 i >>= 2;
3083 cycles++;
3085 return COSTS_N_INSNS (2) + cycles;
3087 return COSTS_N_INSNS (1) + 16;
3089 case SET:
3090 return (COSTS_N_INSNS (1)
3091 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3092 + GET_CODE (SET_DEST (x)) == MEM));
3094 case CONST_INT:
3095 if (outer == SET)
3097 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3098 return 0;
3099 if (thumb_shiftable_const (INTVAL (x)))
3100 return COSTS_N_INSNS (2);
3101 return COSTS_N_INSNS (3);
3103 else if (outer == PLUS
3104 && INTVAL (x) < 256 && INTVAL (x) > -256)
3105 return 0;
3106 else if (outer == COMPARE
3107 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3108 return 0;
3109 else if (outer == ASHIFT || outer == ASHIFTRT
3110 || outer == LSHIFTRT)
3111 return 0;
3112 return COSTS_N_INSNS (2);
3114 case CONST:
3115 case CONST_DOUBLE:
3116 case LABEL_REF:
3117 case SYMBOL_REF:
3118 return COSTS_N_INSNS (3);
3120 case UDIV:
3121 case UMOD:
3122 case DIV:
3123 case MOD:
3124 return 100;
3126 case TRUNCATE:
3127 return 99;
3129 case AND:
3130 case XOR:
3131 case IOR:
3132 /* XXX guess. */
3133 return 8;
3135 case ADDRESSOF:
3136 case MEM:
3137 /* XXX another guess. */
3138 /* Memory costs quite a lot for the first word, but subsequent words
3139 load at the equivalent of a single insn each. */
3140 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3141 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3142 ? 4 : 0));
3144 case IF_THEN_ELSE:
3145 /* XXX a guess. */
3146 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3147 return 14;
3148 return 2;
3150 case ZERO_EXTEND:
3151 /* XXX still guessing. */
3152 switch (GET_MODE (XEXP (x, 0)))
3154 case QImode:
3155 return (1 + (mode == DImode ? 4 : 0)
3156 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3158 case HImode:
3159 return (4 + (mode == DImode ? 4 : 0)
3160 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3162 case SImode:
3163 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3165 default:
3166 return 99;
3169 default:
3170 return 99;
3174 switch (code)
3176 case MEM:
3177 /* Memory costs quite a lot for the first word, but subsequent words
3178 load at the equivalent of a single insn each. */
3179 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3180 + (GET_CODE (x) == SYMBOL_REF
3181 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3183 case DIV:
3184 case MOD:
3185 return 100;
3187 case ROTATE:
3188 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3189 return 4;
3190 /* Fall through */
3191 case ROTATERT:
3192 if (mode != SImode)
3193 return 8;
3194 /* Fall through */
3195 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3196 if (mode == DImode)
3197 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3198 + ((GET_CODE (XEXP (x, 0)) == REG
3199 || (GET_CODE (XEXP (x, 0)) == SUBREG
3200 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3201 ? 0 : 8));
3202 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3203 || (GET_CODE (XEXP (x, 0)) == SUBREG
3204 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3205 ? 0 : 4)
3206 + ((GET_CODE (XEXP (x, 1)) == REG
3207 || (GET_CODE (XEXP (x, 1)) == SUBREG
3208 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3209 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3210 ? 0 : 4));
3212 case MINUS:
3213 if (mode == DImode)
3214 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3215 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3216 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3217 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3218 ? 0 : 8));
3220 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3221 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3222 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3223 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3224 ? 0 : 8)
3225 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3226 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3227 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
3228 ? 0 : 8));
3230 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3231 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3232 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3233 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3234 || subcode == ASHIFTRT || subcode == LSHIFTRT
3235 || subcode == ROTATE || subcode == ROTATERT
3236 || (subcode == MULT
3237 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3238 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3239 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3240 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3241 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3242 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3243 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3244 return 1;
3245 /* Fall through */
3247 case PLUS:
3248 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3249 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3250 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3251 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3252 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3253 ? 0 : 8));
3255 /* Fall through */
3256 case AND: case XOR: case IOR:
3257 extra_cost = 0;
3259 /* Normally the frame registers will be spilt into reg+const during
3260 reload, so it is a bad idea to combine them with other instructions,
3261 since then they might not be moved outside of loops. As a compromise
3262 we allow integration with ops that have a constant as their second
3263 operand. */
3264 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3265 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3266 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3267 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3268 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3269 extra_cost = 4;
3271 if (mode == DImode)
3272 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3273 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3274 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3275 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3276 ? 0 : 8));
3278 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3279 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3280 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3281 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3282 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3283 ? 0 : 4));
3285 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3286 return (1 + extra_cost
3287 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3288 || subcode == LSHIFTRT || subcode == ASHIFTRT
3289 || subcode == ROTATE || subcode == ROTATERT
3290 || (subcode == MULT
3291 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3292 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3293 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3294 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3295 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3296 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3297 ? 0 : 4));
3299 return 8;
3301 case MULT:
3302 /* There is no point basing this on the tuning, since it is always the
3303 fast variant if it exists at all. */
3304 if (arm_fast_multiply && mode == DImode
3305 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3306 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3307 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3308 return 8;
3310 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3311 || mode == DImode)
3312 return 30;
3314 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3316 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3317 & (unsigned HOST_WIDE_INT) 0xffffffff);
3318 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3319 int j;
3321 /* Tune as appropriate. */
3322 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3324 for (j = 0; i && j < 32; j += booth_unit_size)
3326 i >>= booth_unit_size;
3327 add_cost += 2;
3330 return add_cost;
3333 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3334 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3335 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3337 case TRUNCATE:
3338 if (arm_fast_multiply && mode == SImode
3339 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3340 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3341 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3342 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3343 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3344 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3345 return 8;
3346 return 99;
3348 case NEG:
3349 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3350 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3351 /* Fall through */
3352 case NOT:
3353 if (mode == DImode)
3354 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3356 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3358 case IF_THEN_ELSE:
3359 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3360 return 14;
3361 return 2;
3363 case COMPARE:
3364 return 1;
3366 case ABS:
3367 return 4 + (mode == DImode ? 4 : 0);
3369 case SIGN_EXTEND:
3370 if (GET_MODE (XEXP (x, 0)) == QImode)
3371 return (4 + (mode == DImode ? 4 : 0)
3372 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3373 /* Fall through */
3374 case ZERO_EXTEND:
3375 switch (GET_MODE (XEXP (x, 0)))
3377 case QImode:
3378 return (1 + (mode == DImode ? 4 : 0)
3379 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3381 case HImode:
3382 return (4 + (mode == DImode ? 4 : 0)
3383 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3385 case SImode:
3386 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3388 case V8QImode:
3389 case V4HImode:
3390 case V2SImode:
3391 case V4QImode:
3392 case V2HImode:
3393 return 1;
3395 default:
3396 break;
3398 abort ();
3400 case CONST_INT:
3401 if (const_ok_for_arm (INTVAL (x)))
3402 return outer == SET ? 2 : -1;
3403 else if (outer == AND
3404 && const_ok_for_arm (~INTVAL (x)))
3405 return -1;
3406 else if ((outer == COMPARE
3407 || outer == PLUS || outer == MINUS)
3408 && const_ok_for_arm (-INTVAL (x)))
3409 return -1;
3410 else
3411 return 5;
3413 case CONST:
3414 case LABEL_REF:
3415 case SYMBOL_REF:
3416 return 6;
3418 case CONST_DOUBLE:
3419 if (const_double_rtx_ok_for_fpa (x))
3420 return outer == SET ? 2 : -1;
3421 else if ((outer == COMPARE || outer == PLUS)
3422 && neg_const_double_rtx_ok_for_fpa (x))
3423 return -1;
3424 return 7;
3426 default:
3427 return 99;
3431 static bool
3432 arm_rtx_costs (rtx x, int code, int outer_code, int *total)
3434 *total = arm_rtx_costs_1 (x, code, outer_code);
3435 return true;
3438 /* All address computations that can be done are free, but rtx cost returns
3439 the same for practically all of them. So we weight the different types
3440 of address here in the order (most pref first):
3441 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3442 static int
3443 arm_address_cost (rtx x)
3445 #define ARM_ADDRESS_COST(X) \
3446 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3447 || GET_CODE (X) == SYMBOL_REF) \
3448 ? 0 \
3449 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3450 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3451 ? 10 \
3452 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3453 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3454 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3455 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3456 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3457 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3458 ? 1 : 0)) \
3459 : 4)))))
3461 #define THUMB_ADDRESS_COST(X) \
3462 ((GET_CODE (X) == REG \
3463 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3464 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3465 ? 1 : 2)
3467 return (TARGET_ARM ? ARM_ADDRESS_COST (x) : THUMB_ADDRESS_COST (x));
3470 static int
3471 arm_use_dfa_pipeline_interface (void)
3473 return true;
3476 static int
3477 arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
3479 rtx i_pat, d_pat;
3481 /* Some true dependencies can have a higher cost depending
3482 on precisely how certain input operands are used. */
3483 if (arm_tune_xscale
3484 && REG_NOTE_KIND (link) == 0
3485 && recog_memoized (insn) >= 0
3486 && recog_memoized (dep) >= 0)
3488 int shift_opnum = get_attr_shift (insn);
3489 enum attr_type attr_type = get_attr_type (dep);
3491 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3492 operand for INSN. If we have a shifted input operand and the
3493 instruction we depend on is another ALU instruction, then we may
3494 have to account for an additional stall. */
3495 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3497 rtx shifted_operand;
3498 int opno;
3500 /* Get the shifted operand. */
3501 extract_insn (insn);
3502 shifted_operand = recog_data.operand[shift_opnum];
3504 /* Iterate over all the operands in DEP. If we write an operand
3505 that overlaps with SHIFTED_OPERAND, then we have increase the
3506 cost of this dependency. */
3507 extract_insn (dep);
3508 preprocess_constraints ();
3509 for (opno = 0; opno < recog_data.n_operands; opno++)
3511 /* We can ignore strict inputs. */
3512 if (recog_data.operand_type[opno] == OP_IN)
3513 continue;
3515 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3516 shifted_operand))
3517 return 2;
3522 /* XXX This is not strictly true for the FPA. */
3523 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3524 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3525 return 0;
3527 /* Call insns don't incur a stall, even if they follow a load. */
3528 if (REG_NOTE_KIND (link) == 0
3529 && GET_CODE (insn) == CALL_INSN)
3530 return 1;
3532 if ((i_pat = single_set (insn)) != NULL
3533 && GET_CODE (SET_SRC (i_pat)) == MEM
3534 && (d_pat = single_set (dep)) != NULL
3535 && GET_CODE (SET_DEST (d_pat)) == MEM)
3537 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3538 /* This is a load after a store, there is no conflict if the load reads
3539 from a cached area. Assume that loads from the stack, and from the
3540 constant pool are cached, and that others will miss. This is a
3541 hack. */
3543 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3544 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3545 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3546 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3547 return 1;
3550 return cost;
3553 static int fpa_consts_inited = 0;
3555 static const char * const strings_fpa[8] =
3557 "0", "1", "2", "3",
3558 "4", "5", "0.5", "10"
3561 static REAL_VALUE_TYPE values_fpa[8];
3563 static void
3564 init_fpa_table (void)
3566 int i;
3567 REAL_VALUE_TYPE r;
3569 for (i = 0; i < 8; i++)
3571 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3572 values_fpa[i] = r;
3575 fpa_consts_inited = 1;
3578 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3580 const_double_rtx_ok_for_fpa (rtx x)
3582 REAL_VALUE_TYPE r;
3583 int i;
3585 if (!fpa_consts_inited)
3586 init_fpa_table ();
3588 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3589 if (REAL_VALUE_MINUS_ZERO (r))
3590 return 0;
3592 for (i = 0; i < 8; i++)
3593 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3594 return 1;
3596 return 0;
3599 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3601 neg_const_double_rtx_ok_for_fpa (rtx x)
3603 REAL_VALUE_TYPE r;
3604 int i;
3606 if (!fpa_consts_inited)
3607 init_fpa_table ();
3609 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3610 r = REAL_VALUE_NEGATE (r);
3611 if (REAL_VALUE_MINUS_ZERO (r))
3612 return 0;
3614 for (i = 0; i < 8; i++)
3615 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3616 return 1;
3618 return 0;
3621 /* Predicates for `match_operand' and `match_operator'. */
3623 /* s_register_operand is the same as register_operand, but it doesn't accept
3624 (SUBREG (MEM)...).
3626 This function exists because at the time it was put in it led to better
3627 code. SUBREG(MEM) always needs a reload in the places where
3628 s_register_operand is used, and this seemed to lead to excessive
3629 reloading. */
3631 s_register_operand (rtx op, enum machine_mode mode)
3633 if (GET_MODE (op) != mode && mode != VOIDmode)
3634 return 0;
3636 if (GET_CODE (op) == SUBREG)
3637 op = SUBREG_REG (op);
3639 /* We don't consider registers whose class is NO_REGS
3640 to be a register operand. */
3641 /* XXX might have to check for lo regs only for thumb ??? */
3642 return (GET_CODE (op) == REG
3643 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3644 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3647 /* A hard register operand (even before reload. */
3649 arm_hard_register_operand (rtx op, enum machine_mode mode)
3651 if (GET_MODE (op) != mode && mode != VOIDmode)
3652 return 0;
3654 return (GET_CODE (op) == REG
3655 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3658 /* Only accept reg, subreg(reg), const_int. */
3660 reg_or_int_operand (rtx op, enum machine_mode mode)
3662 if (GET_CODE (op) == CONST_INT)
3663 return 1;
3665 if (GET_MODE (op) != mode && mode != VOIDmode)
3666 return 0;
3668 if (GET_CODE (op) == SUBREG)
3669 op = SUBREG_REG (op);
3671 /* We don't consider registers whose class is NO_REGS
3672 to be a register operand. */
3673 return (GET_CODE (op) == REG
3674 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3675 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3678 /* Return 1 if OP is an item in memory, given that we are in reload. */
3680 arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3682 int regno = true_regnum (op);
3684 return (!CONSTANT_P (op)
3685 && (regno == -1
3686 || (GET_CODE (op) == REG
3687 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3690 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3691 memory access (architecture V4).
3692 MODE is QImode if called when computing constraints, or VOIDmode when
3693 emitting patterns. In this latter case we cannot use memory_operand()
3694 because it will fail on badly formed MEMs, which is precisely what we are
3695 trying to catch. */
3697 bad_signed_byte_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3699 if (GET_CODE (op) != MEM)
3700 return 0;
3702 op = XEXP (op, 0);
3704 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3705 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3706 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3707 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3708 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3709 return 1;
3711 /* Big constants are also bad. */
3712 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3713 && (INTVAL (XEXP (op, 1)) > 0xff
3714 || -INTVAL (XEXP (op, 1)) > 0xff))
3715 return 1;
3717 /* Everything else is good, or can will automatically be made so. */
3718 return 0;
3721 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3723 arm_rhs_operand (rtx op, enum machine_mode mode)
3725 return (s_register_operand (op, mode)
3726 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3729 /* Return TRUE for valid operands for the
3730 rhs of an ARM instruction, or a load. */
3732 arm_rhsm_operand (rtx op, enum machine_mode mode)
3734 return (s_register_operand (op, mode)
3735 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3736 || memory_operand (op, mode));
3739 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3740 constant that is valid when negated. */
3742 arm_add_operand (rtx op, enum machine_mode mode)
3744 if (TARGET_THUMB)
3745 return thumb_cmp_operand (op, mode);
3747 return (s_register_operand (op, mode)
3748 || (GET_CODE (op) == CONST_INT
3749 && (const_ok_for_arm (INTVAL (op))
3750 || const_ok_for_arm (-INTVAL (op)))));
3754 arm_not_operand (rtx op, enum machine_mode mode)
3756 return (s_register_operand (op, mode)
3757 || (GET_CODE (op) == CONST_INT
3758 && (const_ok_for_arm (INTVAL (op))
3759 || const_ok_for_arm (~INTVAL (op)))));
3762 /* Return TRUE if the operand is a memory reference which contains an
3763 offsettable address. */
3765 offsettable_memory_operand (rtx op, enum machine_mode mode)
3767 if (mode == VOIDmode)
3768 mode = GET_MODE (op);
3770 return (mode == GET_MODE (op)
3771 && GET_CODE (op) == MEM
3772 && offsettable_address_p (reload_completed | reload_in_progress,
3773 mode, XEXP (op, 0)));
3776 /* Return TRUE if the operand is a memory reference which is, or can be
3777 made word aligned by adjusting the offset. */
3779 alignable_memory_operand (rtx op, enum machine_mode mode)
3781 rtx reg;
3783 if (mode == VOIDmode)
3784 mode = GET_MODE (op);
3786 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3787 return 0;
3789 op = XEXP (op, 0);
3791 return ((GET_CODE (reg = op) == REG
3792 || (GET_CODE (op) == SUBREG
3793 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3794 || (GET_CODE (op) == PLUS
3795 && GET_CODE (XEXP (op, 1)) == CONST_INT
3796 && (GET_CODE (reg = XEXP (op, 0)) == REG
3797 || (GET_CODE (XEXP (op, 0)) == SUBREG
3798 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3799 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3802 /* Similar to s_register_operand, but does not allow hard integer
3803 registers. */
3805 f_register_operand (rtx op, enum machine_mode mode)
3807 if (GET_MODE (op) != mode && mode != VOIDmode)
3808 return 0;
3810 if (GET_CODE (op) == SUBREG)
3811 op = SUBREG_REG (op);
3813 /* We don't consider registers whose class is NO_REGS
3814 to be a register operand. */
3815 return (GET_CODE (op) == REG
3816 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3817 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
3820 /* Return TRUE for valid operands for the rhs of an FPA instruction. */
3822 fpa_rhs_operand (rtx op, enum machine_mode mode)
3824 if (s_register_operand (op, mode))
3825 return TRUE;
3827 if (GET_MODE (op) != mode && mode != VOIDmode)
3828 return FALSE;
3830 if (GET_CODE (op) == CONST_DOUBLE)
3831 return const_double_rtx_ok_for_fpa (op);
3833 return FALSE;
3837 fpa_add_operand (rtx op, enum machine_mode mode)
3839 if (s_register_operand (op, mode))
3840 return TRUE;
3842 if (GET_MODE (op) != mode && mode != VOIDmode)
3843 return FALSE;
3845 if (GET_CODE (op) == CONST_DOUBLE)
3846 return (const_double_rtx_ok_for_fpa (op)
3847 || neg_const_double_rtx_ok_for_fpa (op));
3849 return FALSE;
3852 /* Return nonzero if OP is a valid Cirrus memory address pattern. */
3854 cirrus_memory_offset (rtx op)
3856 /* Reject eliminable registers. */
3857 if (! (reload_in_progress || reload_completed)
3858 && ( reg_mentioned_p (frame_pointer_rtx, op)
3859 || reg_mentioned_p (arg_pointer_rtx, op)
3860 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3861 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3862 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3863 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3864 return 0;
3866 if (GET_CODE (op) == MEM)
3868 rtx ind;
3870 ind = XEXP (op, 0);
3872 /* Match: (mem (reg)). */
3873 if (GET_CODE (ind) == REG)
3874 return 1;
3876 /* Match:
3877 (mem (plus (reg)
3878 (const))). */
3879 if (GET_CODE (ind) == PLUS
3880 && GET_CODE (XEXP (ind, 0)) == REG
3881 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
3882 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
3883 return 1;
3886 return 0;
3889 /* Return nonzero if OP is a Cirrus or general register. */
3891 cirrus_register_operand (rtx op, enum machine_mode mode)
3893 if (GET_MODE (op) != mode && mode != VOIDmode)
3894 return FALSE;
3896 if (GET_CODE (op) == SUBREG)
3897 op = SUBREG_REG (op);
3899 return (GET_CODE (op) == REG
3900 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
3901 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
3904 /* Return nonzero if OP is a cirrus FP register. */
3906 cirrus_fp_register (rtx op, enum machine_mode mode)
3908 if (GET_MODE (op) != mode && mode != VOIDmode)
3909 return FALSE;
3911 if (GET_CODE (op) == SUBREG)
3912 op = SUBREG_REG (op);
3914 return (GET_CODE (op) == REG
3915 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3916 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
3919 /* Return nonzero if OP is a 6bit constant (0..63). */
3921 cirrus_shift_const (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3923 return (GET_CODE (op) == CONST_INT
3924 && INTVAL (op) >= 0
3925 && INTVAL (op) < 64);
3928 /* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
3929 Use by the Cirrus Maverick code which has to workaround
3930 a hardware bug triggered by such instructions. */
3931 static bool
3932 arm_memory_load_p (rtx insn)
3934 rtx body, lhs, rhs;;
3936 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
3937 return false;
3939 body = PATTERN (insn);
3941 if (GET_CODE (body) != SET)
3942 return false;
3944 lhs = XEXP (body, 0);
3945 rhs = XEXP (body, 1);
3947 lhs = REG_OR_SUBREG_RTX (lhs);
3949 /* If the destination is not a general purpose
3950 register we do not have to worry. */
3951 if (GET_CODE (lhs) != REG
3952 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
3953 return false;
3955 /* As well as loads from memory we also have to react
3956 to loads of invalid constants which will be turned
3957 into loads from the minipool. */
3958 return (GET_CODE (rhs) == MEM
3959 || GET_CODE (rhs) == SYMBOL_REF
3960 || note_invalid_constants (insn, -1, false));
3963 /* Return TRUE if INSN is a Cirrus instruction. */
3964 static bool
3965 arm_cirrus_insn_p (rtx insn)
3967 enum attr_cirrus attr;
3969 /* get_attr aborts on USE and CLOBBER. */
3970 if (!insn
3971 || GET_CODE (insn) != INSN
3972 || GET_CODE (PATTERN (insn)) == USE
3973 || GET_CODE (PATTERN (insn)) == CLOBBER)
3974 return 0;
3976 attr = get_attr_cirrus (insn);
3978 return attr != CIRRUS_NOT;
3981 /* Cirrus reorg for invalid instruction combinations. */
3982 static void
3983 cirrus_reorg (rtx first)
3985 enum attr_cirrus attr;
3986 rtx body = PATTERN (first);
3987 rtx t;
3988 int nops;
3990 /* Any branch must be followed by 2 non Cirrus instructions. */
3991 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
3993 nops = 0;
3994 t = next_nonnote_insn (first);
3996 if (arm_cirrus_insn_p (t))
3997 ++ nops;
3999 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4000 ++ nops;
4002 while (nops --)
4003 emit_insn_after (gen_nop (), first);
4005 return;
4008 /* (float (blah)) is in parallel with a clobber. */
4009 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4010 body = XVECEXP (body, 0, 0);
4012 if (GET_CODE (body) == SET)
4014 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4016 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4017 be followed by a non Cirrus insn. */
4018 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4020 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
4021 emit_insn_after (gen_nop (), first);
4023 return;
4025 else if (arm_memory_load_p (first))
4027 unsigned int arm_regno;
4029 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4030 ldr/cfmv64hr combination where the Rd field is the same
4031 in both instructions must be split with a non Cirrus
4032 insn. Example:
4034 ldr r0, blah
4036 cfmvsr mvf0, r0. */
4038 /* Get Arm register number for ldr insn. */
4039 if (GET_CODE (lhs) == REG)
4040 arm_regno = REGNO (lhs);
4041 else if (GET_CODE (rhs) == REG)
4042 arm_regno = REGNO (rhs);
4043 else
4044 abort ();
4046 /* Next insn. */
4047 first = next_nonnote_insn (first);
4049 if (! arm_cirrus_insn_p (first))
4050 return;
4052 body = PATTERN (first);
4054 /* (float (blah)) is in parallel with a clobber. */
4055 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4056 body = XVECEXP (body, 0, 0);
4058 if (GET_CODE (body) == FLOAT)
4059 body = XEXP (body, 0);
4061 if (get_attr_cirrus (first) == CIRRUS_MOVE
4062 && GET_CODE (XEXP (body, 1)) == REG
4063 && arm_regno == REGNO (XEXP (body, 1)))
4064 emit_insn_after (gen_nop (), first);
4066 return;
4070 /* get_attr aborts on USE and CLOBBER. */
4071 if (!first
4072 || GET_CODE (first) != INSN
4073 || GET_CODE (PATTERN (first)) == USE
4074 || GET_CODE (PATTERN (first)) == CLOBBER)
4075 return;
4077 attr = get_attr_cirrus (first);
4079 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4080 must be followed by a non-coprocessor instruction. */
4081 if (attr == CIRRUS_COMPARE)
4083 nops = 0;
4085 t = next_nonnote_insn (first);
4087 if (arm_cirrus_insn_p (t))
4088 ++ nops;
4090 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4091 ++ nops;
4093 while (nops --)
4094 emit_insn_after (gen_nop (), first);
4096 return;
4100 /* Return nonzero if OP is a constant power of two. */
4102 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4104 if (GET_CODE (op) == CONST_INT)
4106 HOST_WIDE_INT value = INTVAL (op);
4108 return value != 0 && (value & (value - 1)) == 0;
4111 return FALSE;
4114 /* Return TRUE for a valid operand of a DImode operation.
4115 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4116 Note that this disallows MEM(REG+REG), but allows
4117 MEM(PRE/POST_INC/DEC(REG)). */
4119 di_operand (rtx op, enum machine_mode mode)
4121 if (s_register_operand (op, mode))
4122 return TRUE;
4124 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4125 return FALSE;
4127 if (GET_CODE (op) == SUBREG)
4128 op = SUBREG_REG (op);
4130 switch (GET_CODE (op))
4132 case CONST_DOUBLE:
4133 case CONST_INT:
4134 return TRUE;
4136 case MEM:
4137 return memory_address_p (DImode, XEXP (op, 0));
4139 default:
4140 return FALSE;
4144 /* Like di_operand, but don't accept constants. */
4146 nonimmediate_di_operand (rtx op, enum machine_mode mode)
4148 if (s_register_operand (op, mode))
4149 return TRUE;
4151 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4152 return FALSE;
4154 if (GET_CODE (op) == SUBREG)
4155 op = SUBREG_REG (op);
4157 if (GET_CODE (op) == MEM)
4158 return memory_address_p (DImode, XEXP (op, 0));
4160 return FALSE;
4163 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
4164 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4165 Note that this disallows MEM(REG+REG), but allows
4166 MEM(PRE/POST_INC/DEC(REG)). */
4168 soft_df_operand (rtx op, enum machine_mode mode)
4170 if (s_register_operand (op, mode))
4171 return TRUE;
4173 if (mode != VOIDmode && GET_MODE (op) != mode)
4174 return FALSE;
4176 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4177 return FALSE;
4179 if (GET_CODE (op) == SUBREG)
4180 op = SUBREG_REG (op);
4182 switch (GET_CODE (op))
4184 case CONST_DOUBLE:
4185 return TRUE;
4187 case MEM:
4188 return memory_address_p (DFmode, XEXP (op, 0));
4190 default:
4191 return FALSE;
4195 /* Like soft_df_operand, but don't accept constants. */
4197 nonimmediate_soft_df_operand (rtx op, enum machine_mode mode)
4199 if (s_register_operand (op, mode))
4200 return TRUE;
4202 if (mode != VOIDmode && GET_MODE (op) != mode)
4203 return FALSE;
4205 if (GET_CODE (op) == SUBREG)
4206 op = SUBREG_REG (op);
4208 if (GET_CODE (op) == MEM)
4209 return memory_address_p (DFmode, XEXP (op, 0));
4210 return FALSE;
4213 /* Return TRUE for valid index operands. */
4215 index_operand (rtx op, enum machine_mode mode)
4217 return (s_register_operand (op, mode)
4218 || (immediate_operand (op, mode)
4219 && (GET_CODE (op) != CONST_INT
4220 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4223 /* Return TRUE for valid shifts by a constant. This also accepts any
4224 power of two on the (somewhat overly relaxed) assumption that the
4225 shift operator in this case was a mult. */
4227 const_shift_operand (rtx op, enum machine_mode mode)
4229 return (power_of_two_operand (op, mode)
4230 || (immediate_operand (op, mode)
4231 && (GET_CODE (op) != CONST_INT
4232 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4235 /* Return TRUE for arithmetic operators which can be combined with a multiply
4236 (shift). */
4238 shiftable_operator (rtx x, enum machine_mode mode)
4240 enum rtx_code code;
4242 if (GET_MODE (x) != mode)
4243 return FALSE;
4245 code = GET_CODE (x);
4247 return (code == PLUS || code == MINUS
4248 || code == IOR || code == XOR || code == AND);
4251 /* Return TRUE for binary logical operators. */
4253 logical_binary_operator (rtx x, enum machine_mode mode)
4255 enum rtx_code code;
4257 if (GET_MODE (x) != mode)
4258 return FALSE;
4260 code = GET_CODE (x);
4262 return (code == IOR || code == XOR || code == AND);
4265 /* Return TRUE for shift operators. */
4267 shift_operator (rtx x,enum machine_mode mode)
4269 enum rtx_code code;
4271 if (GET_MODE (x) != mode)
4272 return FALSE;
4274 code = GET_CODE (x);
4276 if (code == MULT)
4277 return power_of_two_operand (XEXP (x, 1), mode);
4279 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4280 || code == ROTATERT);
4283 /* Return TRUE if x is EQ or NE. */
4285 equality_operator (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
4287 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4290 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4292 arm_comparison_operator (rtx x, enum machine_mode mode)
4294 return (comparison_operator (x, mode)
4295 && GET_CODE (x) != LTGT
4296 && GET_CODE (x) != UNEQ);
4299 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4301 minmax_operator (rtx x, enum machine_mode mode)
4303 enum rtx_code code = GET_CODE (x);
4305 if (GET_MODE (x) != mode)
4306 return FALSE;
4308 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4311 /* Return TRUE if this is the condition code register, if we aren't given
4312 a mode, accept any class CCmode register. */
4314 cc_register (rtx x, enum machine_mode mode)
4316 if (mode == VOIDmode)
4318 mode = GET_MODE (x);
4320 if (GET_MODE_CLASS (mode) != MODE_CC)
4321 return FALSE;
4324 if ( GET_MODE (x) == mode
4325 && GET_CODE (x) == REG
4326 && REGNO (x) == CC_REGNUM)
4327 return TRUE;
4329 return FALSE;
4332 /* Return TRUE if this is the condition code register, if we aren't given
4333 a mode, accept any class CCmode register which indicates a dominance
4334 expression. */
4336 dominant_cc_register (rtx x, enum machine_mode mode)
4338 if (mode == VOIDmode)
4340 mode = GET_MODE (x);
4342 if (GET_MODE_CLASS (mode) != MODE_CC)
4343 return FALSE;
4346 if (mode != CC_DNEmode && mode != CC_DEQmode
4347 && mode != CC_DLEmode && mode != CC_DLTmode
4348 && mode != CC_DGEmode && mode != CC_DGTmode
4349 && mode != CC_DLEUmode && mode != CC_DLTUmode
4350 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4351 return FALSE;
4353 return cc_register (x, mode);
4356 /* Return TRUE if X references a SYMBOL_REF. */
4358 symbol_mentioned_p (rtx x)
4360 const char * fmt;
4361 int i;
4363 if (GET_CODE (x) == SYMBOL_REF)
4364 return 1;
4366 fmt = GET_RTX_FORMAT (GET_CODE (x));
4368 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4370 if (fmt[i] == 'E')
4372 int j;
4374 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4375 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4376 return 1;
4378 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4379 return 1;
4382 return 0;
4385 /* Return TRUE if X references a LABEL_REF. */
4387 label_mentioned_p (rtx x)
4389 const char * fmt;
4390 int i;
4392 if (GET_CODE (x) == LABEL_REF)
4393 return 1;
4395 fmt = GET_RTX_FORMAT (GET_CODE (x));
4396 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4398 if (fmt[i] == 'E')
4400 int j;
4402 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4403 if (label_mentioned_p (XVECEXP (x, i, j)))
4404 return 1;
4406 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4407 return 1;
4410 return 0;
4413 enum rtx_code
4414 minmax_code (rtx x)
4416 enum rtx_code code = GET_CODE (x);
4418 if (code == SMAX)
4419 return GE;
4420 else if (code == SMIN)
4421 return LE;
4422 else if (code == UMIN)
4423 return LEU;
4424 else if (code == UMAX)
4425 return GEU;
4427 abort ();
4430 /* Return 1 if memory locations are adjacent. */
4432 adjacent_mem_locations (rtx a, rtx b)
4434 if ((GET_CODE (XEXP (a, 0)) == REG
4435 || (GET_CODE (XEXP (a, 0)) == PLUS
4436 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4437 && (GET_CODE (XEXP (b, 0)) == REG
4438 || (GET_CODE (XEXP (b, 0)) == PLUS
4439 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4441 int val0 = 0, val1 = 0;
4442 int reg0, reg1;
4444 if (GET_CODE (XEXP (a, 0)) == PLUS)
4446 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4447 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4449 else
4450 reg0 = REGNO (XEXP (a, 0));
4452 if (GET_CODE (XEXP (b, 0)) == PLUS)
4454 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4455 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4457 else
4458 reg1 = REGNO (XEXP (b, 0));
4460 /* Don't accept any offset that will require multiple
4461 instructions to handle, since this would cause the
4462 arith_adjacentmem pattern to output an overlong sequence. */
4463 if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
4464 return 0;
4466 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4468 return 0;
4471 /* Return 1 if OP is a load multiple operation. It is known to be
4472 parallel and the first section will be tested. */
4474 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4476 HOST_WIDE_INT count = XVECLEN (op, 0);
4477 int dest_regno;
4478 rtx src_addr;
4479 HOST_WIDE_INT i = 1, base = 0;
4480 rtx elt;
4482 if (count <= 1
4483 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4484 return 0;
4486 /* Check to see if this might be a write-back. */
4487 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4489 i++;
4490 base = 1;
4492 /* Now check it more carefully. */
4493 if (GET_CODE (SET_DEST (elt)) != REG
4494 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4495 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4496 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4497 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4498 return 0;
4501 /* Perform a quick check so we don't blow up below. */
4502 if (count <= i
4503 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4504 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4505 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4506 return 0;
4508 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4509 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4511 for (; i < count; i++)
4513 elt = XVECEXP (op, 0, i);
4515 if (GET_CODE (elt) != SET
4516 || GET_CODE (SET_DEST (elt)) != REG
4517 || GET_MODE (SET_DEST (elt)) != SImode
4518 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4519 || GET_CODE (SET_SRC (elt)) != MEM
4520 || GET_MODE (SET_SRC (elt)) != SImode
4521 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4522 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4523 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4524 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4525 return 0;
4528 return 1;
4531 /* Return 1 if OP is a store multiple operation. It is known to be
4532 parallel and the first section will be tested. */
4534 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4536 HOST_WIDE_INT count = XVECLEN (op, 0);
4537 int src_regno;
4538 rtx dest_addr;
4539 HOST_WIDE_INT i = 1, base = 0;
4540 rtx elt;
4542 if (count <= 1
4543 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4544 return 0;
4546 /* Check to see if this might be a write-back. */
4547 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4549 i++;
4550 base = 1;
4552 /* Now check it more carefully. */
4553 if (GET_CODE (SET_DEST (elt)) != REG
4554 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4555 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4556 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4557 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4558 return 0;
4561 /* Perform a quick check so we don't blow up below. */
4562 if (count <= i
4563 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4564 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4565 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4566 return 0;
4568 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4569 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4571 for (; i < count; i++)
4573 elt = XVECEXP (op, 0, i);
4575 if (GET_CODE (elt) != SET
4576 || GET_CODE (SET_SRC (elt)) != REG
4577 || GET_MODE (SET_SRC (elt)) != SImode
4578 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4579 || GET_CODE (SET_DEST (elt)) != MEM
4580 || GET_MODE (SET_DEST (elt)) != SImode
4581 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4582 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4583 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4584 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4585 return 0;
4588 return 1;
4592 load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4593 HOST_WIDE_INT *load_offset)
4595 int unsorted_regs[4];
4596 HOST_WIDE_INT unsorted_offsets[4];
4597 int order[4];
4598 int base_reg = -1;
4599 int i;
4601 /* Can only handle 2, 3, or 4 insns at present,
4602 though could be easily extended if required. */
4603 if (nops < 2 || nops > 4)
4604 abort ();
4606 /* Loop over the operands and check that the memory references are
4607 suitable (ie immediate offsets from the same base register). At
4608 the same time, extract the target register, and the memory
4609 offsets. */
4610 for (i = 0; i < nops; i++)
4612 rtx reg;
4613 rtx offset;
4615 /* Convert a subreg of a mem into the mem itself. */
4616 if (GET_CODE (operands[nops + i]) == SUBREG)
4617 operands[nops + i] = alter_subreg (operands + (nops + i));
4619 if (GET_CODE (operands[nops + i]) != MEM)
4620 abort ();
4622 /* Don't reorder volatile memory references; it doesn't seem worth
4623 looking for the case where the order is ok anyway. */
4624 if (MEM_VOLATILE_P (operands[nops + i]))
4625 return 0;
4627 offset = const0_rtx;
4629 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4630 || (GET_CODE (reg) == SUBREG
4631 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4632 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4633 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4634 == REG)
4635 || (GET_CODE (reg) == SUBREG
4636 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4637 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4638 == CONST_INT)))
4640 if (i == 0)
4642 base_reg = REGNO (reg);
4643 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4644 ? REGNO (operands[i])
4645 : REGNO (SUBREG_REG (operands[i])));
4646 order[0] = 0;
4648 else
4650 if (base_reg != (int) REGNO (reg))
4651 /* Not addressed from the same base register. */
4652 return 0;
4654 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4655 ? REGNO (operands[i])
4656 : REGNO (SUBREG_REG (operands[i])));
4657 if (unsorted_regs[i] < unsorted_regs[order[0]])
4658 order[0] = i;
4661 /* If it isn't an integer register, or if it overwrites the
4662 base register but isn't the last insn in the list, then
4663 we can't do this. */
4664 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4665 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4666 return 0;
4668 unsorted_offsets[i] = INTVAL (offset);
4670 else
4671 /* Not a suitable memory address. */
4672 return 0;
4675 /* All the useful information has now been extracted from the
4676 operands into unsorted_regs and unsorted_offsets; additionally,
4677 order[0] has been set to the lowest numbered register in the
4678 list. Sort the registers into order, and check that the memory
4679 offsets are ascending and adjacent. */
4681 for (i = 1; i < nops; i++)
4683 int j;
4685 order[i] = order[i - 1];
4686 for (j = 0; j < nops; j++)
4687 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4688 && (order[i] == order[i - 1]
4689 || unsorted_regs[j] < unsorted_regs[order[i]]))
4690 order[i] = j;
4692 /* Have we found a suitable register? if not, one must be used more
4693 than once. */
4694 if (order[i] == order[i - 1])
4695 return 0;
4697 /* Is the memory address adjacent and ascending? */
4698 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4699 return 0;
4702 if (base)
4704 *base = base_reg;
4706 for (i = 0; i < nops; i++)
4707 regs[i] = unsorted_regs[order[i]];
4709 *load_offset = unsorted_offsets[order[0]];
4712 if (unsorted_offsets[order[0]] == 0)
4713 return 1; /* ldmia */
4715 if (unsorted_offsets[order[0]] == 4)
4716 return 2; /* ldmib */
4718 if (unsorted_offsets[order[nops - 1]] == 0)
4719 return 3; /* ldmda */
4721 if (unsorted_offsets[order[nops - 1]] == -4)
4722 return 4; /* ldmdb */
4724 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4725 if the offset isn't small enough. The reason 2 ldrs are faster
4726 is because these ARMs are able to do more than one cache access
4727 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4728 whilst the ARM8 has a double bandwidth cache. This means that
4729 these cores can do both an instruction fetch and a data fetch in
4730 a single cycle, so the trick of calculating the address into a
4731 scratch register (one of the result regs) and then doing a load
4732 multiple actually becomes slower (and no smaller in code size).
4733 That is the transformation
4735 ldr rd1, [rbase + offset]
4736 ldr rd2, [rbase + offset + 4]
4740 add rd1, rbase, offset
4741 ldmia rd1, {rd1, rd2}
4743 produces worse code -- '3 cycles + any stalls on rd2' instead of
4744 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4745 access per cycle, the first sequence could never complete in less
4746 than 6 cycles, whereas the ldm sequence would only take 5 and
4747 would make better use of sequential accesses if not hitting the
4748 cache.
4750 We cheat here and test 'arm_ld_sched' which we currently know to
4751 only be true for the ARM8, ARM9 and StrongARM. If this ever
4752 changes, then the test below needs to be reworked. */
4753 if (nops == 2 && arm_ld_sched)
4754 return 0;
4756 /* Can't do it without setting up the offset, only do this if it takes
4757 no more than one insn. */
4758 return (const_ok_for_arm (unsorted_offsets[order[0]])
4759 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4762 const char *
4763 emit_ldm_seq (rtx *operands, int nops)
4765 int regs[4];
4766 int base_reg;
4767 HOST_WIDE_INT offset;
4768 char buf[100];
4769 int i;
4771 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4773 case 1:
4774 strcpy (buf, "ldm%?ia\t");
4775 break;
4777 case 2:
4778 strcpy (buf, "ldm%?ib\t");
4779 break;
4781 case 3:
4782 strcpy (buf, "ldm%?da\t");
4783 break;
4785 case 4:
4786 strcpy (buf, "ldm%?db\t");
4787 break;
4789 case 5:
4790 if (offset >= 0)
4791 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4792 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4793 (long) offset);
4794 else
4795 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4796 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4797 (long) -offset);
4798 output_asm_insn (buf, operands);
4799 base_reg = regs[0];
4800 strcpy (buf, "ldm%?ia\t");
4801 break;
4803 default:
4804 abort ();
4807 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4808 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4810 for (i = 1; i < nops; i++)
4811 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4812 reg_names[regs[i]]);
4814 strcat (buf, "}\t%@ phole ldm");
4816 output_asm_insn (buf, operands);
4817 return "";
4821 store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4822 HOST_WIDE_INT * load_offset)
4824 int unsorted_regs[4];
4825 HOST_WIDE_INT unsorted_offsets[4];
4826 int order[4];
4827 int base_reg = -1;
4828 int i;
4830 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4831 extended if required. */
4832 if (nops < 2 || nops > 4)
4833 abort ();
4835 /* Loop over the operands and check that the memory references are
4836 suitable (ie immediate offsets from the same base register). At
4837 the same time, extract the target register, and the memory
4838 offsets. */
4839 for (i = 0; i < nops; i++)
4841 rtx reg;
4842 rtx offset;
4844 /* Convert a subreg of a mem into the mem itself. */
4845 if (GET_CODE (operands[nops + i]) == SUBREG)
4846 operands[nops + i] = alter_subreg (operands + (nops + i));
4848 if (GET_CODE (operands[nops + i]) != MEM)
4849 abort ();
4851 /* Don't reorder volatile memory references; it doesn't seem worth
4852 looking for the case where the order is ok anyway. */
4853 if (MEM_VOLATILE_P (operands[nops + i]))
4854 return 0;
4856 offset = const0_rtx;
4858 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4859 || (GET_CODE (reg) == SUBREG
4860 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4861 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4862 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4863 == REG)
4864 || (GET_CODE (reg) == SUBREG
4865 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4866 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4867 == CONST_INT)))
4869 if (i == 0)
4871 base_reg = REGNO (reg);
4872 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4873 ? REGNO (operands[i])
4874 : REGNO (SUBREG_REG (operands[i])));
4875 order[0] = 0;
4877 else
4879 if (base_reg != (int) REGNO (reg))
4880 /* Not addressed from the same base register. */
4881 return 0;
4883 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4884 ? REGNO (operands[i])
4885 : REGNO (SUBREG_REG (operands[i])));
4886 if (unsorted_regs[i] < unsorted_regs[order[0]])
4887 order[0] = i;
4890 /* If it isn't an integer register, then we can't do this. */
4891 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4892 return 0;
4894 unsorted_offsets[i] = INTVAL (offset);
4896 else
4897 /* Not a suitable memory address. */
4898 return 0;
4901 /* All the useful information has now been extracted from the
4902 operands into unsorted_regs and unsorted_offsets; additionally,
4903 order[0] has been set to the lowest numbered register in the
4904 list. Sort the registers into order, and check that the memory
4905 offsets are ascending and adjacent. */
4907 for (i = 1; i < nops; i++)
4909 int j;
4911 order[i] = order[i - 1];
4912 for (j = 0; j < nops; j++)
4913 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4914 && (order[i] == order[i - 1]
4915 || unsorted_regs[j] < unsorted_regs[order[i]]))
4916 order[i] = j;
4918 /* Have we found a suitable register? if not, one must be used more
4919 than once. */
4920 if (order[i] == order[i - 1])
4921 return 0;
4923 /* Is the memory address adjacent and ascending? */
4924 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4925 return 0;
4928 if (base)
4930 *base = base_reg;
4932 for (i = 0; i < nops; i++)
4933 regs[i] = unsorted_regs[order[i]];
4935 *load_offset = unsorted_offsets[order[0]];
4938 if (unsorted_offsets[order[0]] == 0)
4939 return 1; /* stmia */
4941 if (unsorted_offsets[order[0]] == 4)
4942 return 2; /* stmib */
4944 if (unsorted_offsets[order[nops - 1]] == 0)
4945 return 3; /* stmda */
4947 if (unsorted_offsets[order[nops - 1]] == -4)
4948 return 4; /* stmdb */
4950 return 0;
4953 const char *
4954 emit_stm_seq (rtx *operands, int nops)
4956 int regs[4];
4957 int base_reg;
4958 HOST_WIDE_INT offset;
4959 char buf[100];
4960 int i;
4962 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4964 case 1:
4965 strcpy (buf, "stm%?ia\t");
4966 break;
4968 case 2:
4969 strcpy (buf, "stm%?ib\t");
4970 break;
4972 case 3:
4973 strcpy (buf, "stm%?da\t");
4974 break;
4976 case 4:
4977 strcpy (buf, "stm%?db\t");
4978 break;
4980 default:
4981 abort ();
4984 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4985 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4987 for (i = 1; i < nops; i++)
4988 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4989 reg_names[regs[i]]);
4991 strcat (buf, "}\t%@ phole stm");
4993 output_asm_insn (buf, operands);
4994 return "";
4998 multi_register_push (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
5000 if (GET_CODE (op) != PARALLEL
5001 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5002 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
5003 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
5004 return 0;
5006 return 1;
5009 /* Routines for use in generating RTL. */
5012 arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
5013 int write_back, int unchanging_p, int in_struct_p,
5014 int scalar_p)
5016 int i = 0, j;
5017 rtx result;
5018 int sign = up ? 1 : -1;
5019 rtx mem;
5021 /* XScale has load-store double instructions, but they have stricter
5022 alignment requirements than load-store multiple, so we can not
5023 use them.
5025 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5026 the pipeline until completion.
5028 NREGS CYCLES
5034 An ldr instruction takes 1-3 cycles, but does not block the
5035 pipeline.
5037 NREGS CYCLES
5038 1 1-3
5039 2 2-6
5040 3 3-9
5041 4 4-12
5043 Best case ldr will always win. However, the more ldr instructions
5044 we issue, the less likely we are to be able to schedule them well.
5045 Using ldr instructions also increases code size.
5047 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5048 for counts of 3 or 4 regs. */
5049 if (arm_tune_xscale && count <= 2 && ! optimize_size)
5051 rtx seq;
5053 start_sequence ();
5055 for (i = 0; i < count; i++)
5057 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
5058 RTX_UNCHANGING_P (mem) = unchanging_p;
5059 MEM_IN_STRUCT_P (mem) = in_struct_p;
5060 MEM_SCALAR_P (mem) = scalar_p;
5061 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5064 if (write_back)
5065 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5067 seq = get_insns ();
5068 end_sequence ();
5070 return seq;
5073 result = gen_rtx_PARALLEL (VOIDmode,
5074 rtvec_alloc (count + (write_back ? 1 : 0)));
5075 if (write_back)
5077 XVECEXP (result, 0, 0)
5078 = gen_rtx_SET (GET_MODE (from), from,
5079 plus_constant (from, count * 4 * sign));
5080 i = 1;
5081 count++;
5084 for (j = 0; i < count; i++, j++)
5086 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
5087 RTX_UNCHANGING_P (mem) = unchanging_p;
5088 MEM_IN_STRUCT_P (mem) = in_struct_p;
5089 MEM_SCALAR_P (mem) = scalar_p;
5090 XVECEXP (result, 0, i)
5091 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
5094 return result;
5098 arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
5099 int write_back, int unchanging_p, int in_struct_p,
5100 int scalar_p)
5102 int i = 0, j;
5103 rtx result;
5104 int sign = up ? 1 : -1;
5105 rtx mem;
5107 /* See arm_gen_load_multiple for discussion of
5108 the pros/cons of ldm/stm usage for XScale. */
5109 if (arm_tune_xscale && count <= 2 && ! optimize_size)
5111 rtx seq;
5113 start_sequence ();
5115 for (i = 0; i < count; i++)
5117 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5118 RTX_UNCHANGING_P (mem) = unchanging_p;
5119 MEM_IN_STRUCT_P (mem) = in_struct_p;
5120 MEM_SCALAR_P (mem) = scalar_p;
5121 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5124 if (write_back)
5125 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5127 seq = get_insns ();
5128 end_sequence ();
5130 return seq;
5133 result = gen_rtx_PARALLEL (VOIDmode,
5134 rtvec_alloc (count + (write_back ? 1 : 0)));
5135 if (write_back)
5137 XVECEXP (result, 0, 0)
5138 = gen_rtx_SET (GET_MODE (to), to,
5139 plus_constant (to, count * 4 * sign));
5140 i = 1;
5141 count++;
5144 for (j = 0; i < count; i++, j++)
5146 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
5147 RTX_UNCHANGING_P (mem) = unchanging_p;
5148 MEM_IN_STRUCT_P (mem) = in_struct_p;
5149 MEM_SCALAR_P (mem) = scalar_p;
5151 XVECEXP (result, 0, i)
5152 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5155 return result;
5159 arm_gen_movstrqi (rtx *operands)
5161 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5162 int i;
5163 rtx src, dst;
5164 rtx st_src, st_dst, fin_src, fin_dst;
5165 rtx part_bytes_reg = NULL;
5166 rtx mem;
5167 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5168 int dst_scalar_p, src_scalar_p;
5170 if (GET_CODE (operands[2]) != CONST_INT
5171 || GET_CODE (operands[3]) != CONST_INT
5172 || INTVAL (operands[2]) > 64
5173 || INTVAL (operands[3]) & 3)
5174 return 0;
5176 st_dst = XEXP (operands[0], 0);
5177 st_src = XEXP (operands[1], 0);
5179 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5180 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5181 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5182 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5183 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5184 src_scalar_p = MEM_SCALAR_P (operands[1]);
5186 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5187 fin_src = src = copy_to_mode_reg (SImode, st_src);
5189 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5190 out_words_to_go = INTVAL (operands[2]) / 4;
5191 last_bytes = INTVAL (operands[2]) & 3;
5193 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5194 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5196 for (i = 0; in_words_to_go >= 2; i+=4)
5198 if (in_words_to_go > 4)
5199 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5200 src_unchanging_p,
5201 src_in_struct_p,
5202 src_scalar_p));
5203 else
5204 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5205 FALSE, src_unchanging_p,
5206 src_in_struct_p, src_scalar_p));
5208 if (out_words_to_go)
5210 if (out_words_to_go > 4)
5211 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5212 dst_unchanging_p,
5213 dst_in_struct_p,
5214 dst_scalar_p));
5215 else if (out_words_to_go != 1)
5216 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5217 dst, TRUE,
5218 (last_bytes == 0
5219 ? FALSE : TRUE),
5220 dst_unchanging_p,
5221 dst_in_struct_p,
5222 dst_scalar_p));
5223 else
5225 mem = gen_rtx_MEM (SImode, dst);
5226 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5227 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5228 MEM_SCALAR_P (mem) = dst_scalar_p;
5229 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5230 if (last_bytes != 0)
5231 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5235 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5236 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5239 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5240 if (out_words_to_go)
5242 rtx sreg;
5244 mem = gen_rtx_MEM (SImode, src);
5245 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5246 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5247 MEM_SCALAR_P (mem) = src_scalar_p;
5248 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5249 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5251 mem = gen_rtx_MEM (SImode, dst);
5252 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5253 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5254 MEM_SCALAR_P (mem) = dst_scalar_p;
5255 emit_move_insn (mem, sreg);
5256 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5257 in_words_to_go--;
5259 if (in_words_to_go) /* Sanity check */
5260 abort ();
5263 if (in_words_to_go)
5265 if (in_words_to_go < 0)
5266 abort ();
5268 mem = gen_rtx_MEM (SImode, src);
5269 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5270 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5271 MEM_SCALAR_P (mem) = src_scalar_p;
5272 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5275 if (last_bytes && part_bytes_reg == NULL)
5276 abort ();
5278 if (BYTES_BIG_ENDIAN && last_bytes)
5280 rtx tmp = gen_reg_rtx (SImode);
5282 /* The bytes we want are in the top end of the word. */
5283 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5284 GEN_INT (8 * (4 - last_bytes))));
5285 part_bytes_reg = tmp;
5287 while (last_bytes)
5289 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5290 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5291 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5292 MEM_SCALAR_P (mem) = dst_scalar_p;
5293 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5295 if (--last_bytes)
5297 tmp = gen_reg_rtx (SImode);
5298 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5299 part_bytes_reg = tmp;
5304 else
5306 if (last_bytes > 1)
5308 mem = gen_rtx_MEM (HImode, dst);
5309 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5310 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5311 MEM_SCALAR_P (mem) = dst_scalar_p;
5312 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5313 last_bytes -= 2;
5314 if (last_bytes)
5316 rtx tmp = gen_reg_rtx (SImode);
5318 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5319 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5320 part_bytes_reg = tmp;
5324 if (last_bytes)
5326 mem = gen_rtx_MEM (QImode, dst);
5327 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5328 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5329 MEM_SCALAR_P (mem) = dst_scalar_p;
5330 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5334 return 1;
5337 /* Generate a memory reference for a half word, such that it will be loaded
5338 into the top 16 bits of the word. We can assume that the address is
5339 known to be alignable and of the form reg, or plus (reg, const). */
5342 arm_gen_rotated_half_load (rtx memref)
5344 HOST_WIDE_INT offset = 0;
5345 rtx base = XEXP (memref, 0);
5347 if (GET_CODE (base) == PLUS)
5349 offset = INTVAL (XEXP (base, 1));
5350 base = XEXP (base, 0);
5353 /* If we aren't allowed to generate unaligned addresses, then fail. */
5354 if (TARGET_MMU_TRAPS
5355 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5356 return NULL;
5358 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5360 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5361 return base;
5363 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5366 /* Select a dominance comparison mode if possible for a test of the general
5367 form (OP (COND_OR (X) (Y)) (const_int 0)). We support three forms.
5368 COND_OR == DOM_CC_X_AND_Y => (X && Y)
5369 COND_OR == DOM_CC_NX_OR_Y => ((! X) || Y)
5370 COND_OR == DOM_CC_X_OR_Y => (X || Y)
5371 In all cases OP will be either EQ or NE, but we don't need to know which
5372 here. If we are unable to support a dominance comparison we return
5373 CC mode. This will then fail to match for the RTL expressions that
5374 generate this call. */
5375 enum machine_mode
5376 arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
5378 enum rtx_code cond1, cond2;
5379 int swapped = 0;
5381 /* Currently we will probably get the wrong result if the individual
5382 comparisons are not simple. This also ensures that it is safe to
5383 reverse a comparison if necessary. */
5384 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5385 != CCmode)
5386 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5387 != CCmode))
5388 return CCmode;
5390 /* The if_then_else variant of this tests the second condition if the
5391 first passes, but is true if the first fails. Reverse the first
5392 condition to get a true "inclusive-or" expression. */
5393 if (cond_or == DOM_CC_NX_OR_Y)
5394 cond1 = reverse_condition (cond1);
5396 /* If the comparisons are not equal, and one doesn't dominate the other,
5397 then we can't do this. */
5398 if (cond1 != cond2
5399 && !comparison_dominates_p (cond1, cond2)
5400 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5401 return CCmode;
5403 if (swapped)
5405 enum rtx_code temp = cond1;
5406 cond1 = cond2;
5407 cond2 = temp;
5410 switch (cond1)
5412 case EQ:
5413 if (cond2 == EQ || cond_or == DOM_CC_X_AND_Y)
5414 return CC_DEQmode;
5416 switch (cond2)
5418 case LE: return CC_DLEmode;
5419 case LEU: return CC_DLEUmode;
5420 case GE: return CC_DGEmode;
5421 case GEU: return CC_DGEUmode;
5422 default: break;
5425 break;
5427 case LT:
5428 if (cond2 == LT || cond_or == DOM_CC_X_AND_Y)
5429 return CC_DLTmode;
5430 if (cond2 == LE)
5431 return CC_DLEmode;
5432 if (cond2 == NE)
5433 return CC_DNEmode;
5434 break;
5436 case GT:
5437 if (cond2 == GT || cond_or == DOM_CC_X_AND_Y)
5438 return CC_DGTmode;
5439 if (cond2 == GE)
5440 return CC_DGEmode;
5441 if (cond2 == NE)
5442 return CC_DNEmode;
5443 break;
5445 case LTU:
5446 if (cond2 == LTU || cond_or == DOM_CC_X_AND_Y)
5447 return CC_DLTUmode;
5448 if (cond2 == LEU)
5449 return CC_DLEUmode;
5450 if (cond2 == NE)
5451 return CC_DNEmode;
5452 break;
5454 case GTU:
5455 if (cond2 == GTU || cond_or == DOM_CC_X_AND_Y)
5456 return CC_DGTUmode;
5457 if (cond2 == GEU)
5458 return CC_DGEUmode;
5459 if (cond2 == NE)
5460 return CC_DNEmode;
5461 break;
5463 /* The remaining cases only occur when both comparisons are the
5464 same. */
5465 case NE:
5466 return CC_DNEmode;
5468 case LE:
5469 return CC_DLEmode;
5471 case GE:
5472 return CC_DGEmode;
5474 case LEU:
5475 return CC_DLEUmode;
5477 case GEU:
5478 return CC_DGEUmode;
5480 default:
5481 break;
5484 abort ();
5487 enum machine_mode
5488 arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
5490 /* All floating point compares return CCFP if it is an equality
5491 comparison, and CCFPE otherwise. */
5492 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5494 switch (op)
5496 case EQ:
5497 case NE:
5498 case UNORDERED:
5499 case ORDERED:
5500 case UNLT:
5501 case UNLE:
5502 case UNGT:
5503 case UNGE:
5504 case UNEQ:
5505 case LTGT:
5506 return CCFPmode;
5508 case LT:
5509 case LE:
5510 case GT:
5511 case GE:
5512 if (TARGET_CIRRUS)
5513 return CCFPmode;
5514 return CCFPEmode;
5516 default:
5517 abort ();
5521 /* A compare with a shifted operand. Because of canonicalization, the
5522 comparison will have to be swapped when we emit the assembler. */
5523 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5524 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5525 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5526 || GET_CODE (x) == ROTATERT))
5527 return CC_SWPmode;
5529 /* This is a special case that is used by combine to allow a
5530 comparison of a shifted byte load to be split into a zero-extend
5531 followed by a comparison of the shifted integer (only valid for
5532 equalities and unsigned inequalities). */
5533 if (GET_MODE (x) == SImode
5534 && GET_CODE (x) == ASHIFT
5535 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5536 && GET_CODE (XEXP (x, 0)) == SUBREG
5537 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5538 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5539 && (op == EQ || op == NE
5540 || op == GEU || op == GTU || op == LTU || op == LEU)
5541 && GET_CODE (y) == CONST_INT)
5542 return CC_Zmode;
5544 /* A construct for a conditional compare, if the false arm contains
5545 0, then both conditions must be true, otherwise either condition
5546 must be true. Not all conditions are possible, so CCmode is
5547 returned if it can't be done. */
5548 if (GET_CODE (x) == IF_THEN_ELSE
5549 && (XEXP (x, 2) == const0_rtx
5550 || XEXP (x, 2) == const1_rtx)
5551 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5552 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5553 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5554 INTVAL (XEXP (x, 2)));
5556 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5557 if (GET_CODE (x) == AND
5558 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5559 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5560 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5561 DOM_CC_X_AND_Y);
5563 if (GET_CODE (x) == IOR
5564 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5565 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5566 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5567 DOM_CC_X_OR_Y);
5569 /* An operation that sets the condition codes as a side-effect, the
5570 V flag is not set correctly, so we can only use comparisons where
5571 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5572 instead. */
5573 if (GET_MODE (x) == SImode
5574 && y == const0_rtx
5575 && (op == EQ || op == NE || op == LT || op == GE)
5576 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5577 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5578 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5579 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5580 || GET_CODE (x) == LSHIFTRT
5581 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5582 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5583 return CC_NOOVmode;
5585 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5586 return CC_Zmode;
5588 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5589 && GET_CODE (x) == PLUS
5590 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5591 return CC_Cmode;
5593 return CCmode;
5596 /* X and Y are two things to compare using CODE. Emit the compare insn and
5597 return the rtx for register 0 in the proper mode. FP means this is a
5598 floating point compare: I don't think that it is needed on the arm. */
5600 arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
5602 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5603 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5605 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5606 gen_rtx_COMPARE (mode, x, y)));
5608 return cc_reg;
5611 /* Generate a sequence of insns that will generate the correct return
5612 address mask depending on the physical architecture that the program
5613 is running on. */
5615 arm_gen_return_addr_mask (void)
5617 rtx reg = gen_reg_rtx (Pmode);
5619 emit_insn (gen_return_addr_mask (reg));
5620 return reg;
5623 void
5624 arm_reload_in_hi (rtx *operands)
5626 rtx ref = operands[1];
5627 rtx base, scratch;
5628 HOST_WIDE_INT offset = 0;
5630 if (GET_CODE (ref) == SUBREG)
5632 offset = SUBREG_BYTE (ref);
5633 ref = SUBREG_REG (ref);
5636 if (GET_CODE (ref) == REG)
5638 /* We have a pseudo which has been spilt onto the stack; there
5639 are two cases here: the first where there is a simple
5640 stack-slot replacement and a second where the stack-slot is
5641 out of range, or is used as a subreg. */
5642 if (reg_equiv_mem[REGNO (ref)])
5644 ref = reg_equiv_mem[REGNO (ref)];
5645 base = find_replacement (&XEXP (ref, 0));
5647 else
5648 /* The slot is out of range, or was dressed up in a SUBREG. */
5649 base = reg_equiv_address[REGNO (ref)];
5651 else
5652 base = find_replacement (&XEXP (ref, 0));
5654 /* Handle the case where the address is too complex to be offset by 1. */
5655 if (GET_CODE (base) == MINUS
5656 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5658 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5660 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5661 base = base_plus;
5663 else if (GET_CODE (base) == PLUS)
5665 /* The addend must be CONST_INT, or we would have dealt with it above. */
5666 HOST_WIDE_INT hi, lo;
5668 offset += INTVAL (XEXP (base, 1));
5669 base = XEXP (base, 0);
5671 /* Rework the address into a legal sequence of insns. */
5672 /* Valid range for lo is -4095 -> 4095 */
5673 lo = (offset >= 0
5674 ? (offset & 0xfff)
5675 : -((-offset) & 0xfff));
5677 /* Corner case, if lo is the max offset then we would be out of range
5678 once we have added the additional 1 below, so bump the msb into the
5679 pre-loading insn(s). */
5680 if (lo == 4095)
5681 lo &= 0x7ff;
5683 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5684 ^ (HOST_WIDE_INT) 0x80000000)
5685 - (HOST_WIDE_INT) 0x80000000);
5687 if (hi + lo != offset)
5688 abort ();
5690 if (hi != 0)
5692 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5694 /* Get the base address; addsi3 knows how to handle constants
5695 that require more than one insn. */
5696 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5697 base = base_plus;
5698 offset = lo;
5702 /* Operands[2] may overlap operands[0] (though it won't overlap
5703 operands[1]), that's why we asked for a DImode reg -- so we can
5704 use the bit that does not overlap. */
5705 if (REGNO (operands[2]) == REGNO (operands[0]))
5706 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5707 else
5708 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5710 emit_insn (gen_zero_extendqisi2 (scratch,
5711 gen_rtx_MEM (QImode,
5712 plus_constant (base,
5713 offset))));
5714 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5715 gen_rtx_MEM (QImode,
5716 plus_constant (base,
5717 offset + 1))));
5718 if (!BYTES_BIG_ENDIAN)
5719 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5720 gen_rtx_IOR (SImode,
5721 gen_rtx_ASHIFT
5722 (SImode,
5723 gen_rtx_SUBREG (SImode, operands[0], 0),
5724 GEN_INT (8)),
5725 scratch)));
5726 else
5727 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5728 gen_rtx_IOR (SImode,
5729 gen_rtx_ASHIFT (SImode, scratch,
5730 GEN_INT (8)),
5731 gen_rtx_SUBREG (SImode, operands[0],
5732 0))));
5735 /* Handle storing a half-word to memory during reload by synthesizing as two
5736 byte stores. Take care not to clobber the input values until after we
5737 have moved them somewhere safe. This code assumes that if the DImode
5738 scratch in operands[2] overlaps either the input value or output address
5739 in some way, then that value must die in this insn (we absolutely need
5740 two scratch registers for some corner cases). */
5741 void
5742 arm_reload_out_hi (rtx *operands)
5744 rtx ref = operands[0];
5745 rtx outval = operands[1];
5746 rtx base, scratch;
5747 HOST_WIDE_INT offset = 0;
5749 if (GET_CODE (ref) == SUBREG)
5751 offset = SUBREG_BYTE (ref);
5752 ref = SUBREG_REG (ref);
5755 if (GET_CODE (ref) == REG)
5757 /* We have a pseudo which has been spilt onto the stack; there
5758 are two cases here: the first where there is a simple
5759 stack-slot replacement and a second where the stack-slot is
5760 out of range, or is used as a subreg. */
5761 if (reg_equiv_mem[REGNO (ref)])
5763 ref = reg_equiv_mem[REGNO (ref)];
5764 base = find_replacement (&XEXP (ref, 0));
5766 else
5767 /* The slot is out of range, or was dressed up in a SUBREG. */
5768 base = reg_equiv_address[REGNO (ref)];
5770 else
5771 base = find_replacement (&XEXP (ref, 0));
5773 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5775 /* Handle the case where the address is too complex to be offset by 1. */
5776 if (GET_CODE (base) == MINUS
5777 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5779 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5781 /* Be careful not to destroy OUTVAL. */
5782 if (reg_overlap_mentioned_p (base_plus, outval))
5784 /* Updating base_plus might destroy outval, see if we can
5785 swap the scratch and base_plus. */
5786 if (!reg_overlap_mentioned_p (scratch, outval))
5788 rtx tmp = scratch;
5789 scratch = base_plus;
5790 base_plus = tmp;
5792 else
5794 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5796 /* Be conservative and copy OUTVAL into the scratch now,
5797 this should only be necessary if outval is a subreg
5798 of something larger than a word. */
5799 /* XXX Might this clobber base? I can't see how it can,
5800 since scratch is known to overlap with OUTVAL, and
5801 must be wider than a word. */
5802 emit_insn (gen_movhi (scratch_hi, outval));
5803 outval = scratch_hi;
5807 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5808 base = base_plus;
5810 else if (GET_CODE (base) == PLUS)
5812 /* The addend must be CONST_INT, or we would have dealt with it above. */
5813 HOST_WIDE_INT hi, lo;
5815 offset += INTVAL (XEXP (base, 1));
5816 base = XEXP (base, 0);
5818 /* Rework the address into a legal sequence of insns. */
5819 /* Valid range for lo is -4095 -> 4095 */
5820 lo = (offset >= 0
5821 ? (offset & 0xfff)
5822 : -((-offset) & 0xfff));
5824 /* Corner case, if lo is the max offset then we would be out of range
5825 once we have added the additional 1 below, so bump the msb into the
5826 pre-loading insn(s). */
5827 if (lo == 4095)
5828 lo &= 0x7ff;
5830 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5831 ^ (HOST_WIDE_INT) 0x80000000)
5832 - (HOST_WIDE_INT) 0x80000000);
5834 if (hi + lo != offset)
5835 abort ();
5837 if (hi != 0)
5839 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5841 /* Be careful not to destroy OUTVAL. */
5842 if (reg_overlap_mentioned_p (base_plus, outval))
5844 /* Updating base_plus might destroy outval, see if we
5845 can swap the scratch and base_plus. */
5846 if (!reg_overlap_mentioned_p (scratch, outval))
5848 rtx tmp = scratch;
5849 scratch = base_plus;
5850 base_plus = tmp;
5852 else
5854 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5856 /* Be conservative and copy outval into scratch now,
5857 this should only be necessary if outval is a
5858 subreg of something larger than a word. */
5859 /* XXX Might this clobber base? I can't see how it
5860 can, since scratch is known to overlap with
5861 outval. */
5862 emit_insn (gen_movhi (scratch_hi, outval));
5863 outval = scratch_hi;
5867 /* Get the base address; addsi3 knows how to handle constants
5868 that require more than one insn. */
5869 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5870 base = base_plus;
5871 offset = lo;
5875 if (BYTES_BIG_ENDIAN)
5877 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5878 plus_constant (base, offset + 1)),
5879 gen_lowpart (QImode, outval)));
5880 emit_insn (gen_lshrsi3 (scratch,
5881 gen_rtx_SUBREG (SImode, outval, 0),
5882 GEN_INT (8)));
5883 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5884 gen_lowpart (QImode, scratch)));
5886 else
5888 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5889 gen_lowpart (QImode, outval)));
5890 emit_insn (gen_lshrsi3 (scratch,
5891 gen_rtx_SUBREG (SImode, outval, 0),
5892 GEN_INT (8)));
5893 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5894 plus_constant (base, offset + 1)),
5895 gen_lowpart (QImode, scratch)));
5899 /* Print a symbolic form of X to the debug file, F. */
5900 static void
5901 arm_print_value (FILE *f, rtx x)
5903 switch (GET_CODE (x))
5905 case CONST_INT:
5906 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5907 return;
5909 case CONST_DOUBLE:
5910 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5911 return;
5913 case CONST_VECTOR:
5915 int i;
5917 fprintf (f, "<");
5918 for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
5920 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (CONST_VECTOR_ELT (x, i)));
5921 if (i < (CONST_VECTOR_NUNITS (x) - 1))
5922 fputc (',', f);
5924 fprintf (f, ">");
5926 return;
5928 case CONST_STRING:
5929 fprintf (f, "\"%s\"", XSTR (x, 0));
5930 return;
5932 case SYMBOL_REF:
5933 fprintf (f, "`%s'", XSTR (x, 0));
5934 return;
5936 case LABEL_REF:
5937 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5938 return;
5940 case CONST:
5941 arm_print_value (f, XEXP (x, 0));
5942 return;
5944 case PLUS:
5945 arm_print_value (f, XEXP (x, 0));
5946 fprintf (f, "+");
5947 arm_print_value (f, XEXP (x, 1));
5948 return;
5950 case PC:
5951 fprintf (f, "pc");
5952 return;
5954 default:
5955 fprintf (f, "????");
5956 return;
5960 /* Routines for manipulation of the constant pool. */
5962 /* Arm instructions cannot load a large constant directly into a
5963 register; they have to come from a pc relative load. The constant
5964 must therefore be placed in the addressable range of the pc
5965 relative load. Depending on the precise pc relative load
5966 instruction the range is somewhere between 256 bytes and 4k. This
5967 means that we often have to dump a constant inside a function, and
5968 generate code to branch around it.
5970 It is important to minimize this, since the branches will slow
5971 things down and make the code larger.
5973 Normally we can hide the table after an existing unconditional
5974 branch so that there is no interruption of the flow, but in the
5975 worst case the code looks like this:
5977 ldr rn, L1
5979 b L2
5980 align
5981 L1: .long value
5985 ldr rn, L3
5987 b L4
5988 align
5989 L3: .long value
5993 We fix this by performing a scan after scheduling, which notices
5994 which instructions need to have their operands fetched from the
5995 constant table and builds the table.
5997 The algorithm starts by building a table of all the constants that
5998 need fixing up and all the natural barriers in the function (places
5999 where a constant table can be dropped without breaking the flow).
6000 For each fixup we note how far the pc-relative replacement will be
6001 able to reach and the offset of the instruction into the function.
6003 Having built the table we then group the fixes together to form
6004 tables that are as large as possible (subject to addressing
6005 constraints) and emit each table of constants after the last
6006 barrier that is within range of all the instructions in the group.
6007 If a group does not contain a barrier, then we forcibly create one
6008 by inserting a jump instruction into the flow. Once the table has
6009 been inserted, the insns are then modified to reference the
6010 relevant entry in the pool.
6012 Possible enhancements to the algorithm (not implemented) are:
6014 1) For some processors and object formats, there may be benefit in
6015 aligning the pools to the start of cache lines; this alignment
6016 would need to be taken into account when calculating addressability
6017 of a pool. */
6019 /* These typedefs are located at the start of this file, so that
6020 they can be used in the prototypes there. This comment is to
6021 remind readers of that fact so that the following structures
6022 can be understood more easily.
6024 typedef struct minipool_node Mnode;
6025 typedef struct minipool_fixup Mfix; */
6027 struct minipool_node
6029 /* Doubly linked chain of entries. */
6030 Mnode * next;
6031 Mnode * prev;
6032 /* The maximum offset into the code that this entry can be placed. While
6033 pushing fixes for forward references, all entries are sorted in order
6034 of increasing max_address. */
6035 HOST_WIDE_INT max_address;
6036 /* Similarly for an entry inserted for a backwards ref. */
6037 HOST_WIDE_INT min_address;
6038 /* The number of fixes referencing this entry. This can become zero
6039 if we "unpush" an entry. In this case we ignore the entry when we
6040 come to emit the code. */
6041 int refcount;
6042 /* The offset from the start of the minipool. */
6043 HOST_WIDE_INT offset;
6044 /* The value in table. */
6045 rtx value;
6046 /* The mode of value. */
6047 enum machine_mode mode;
6048 /* The size of the value. With iWMMXt enabled
6049 sizes > 4 also imply an alignment of 8-bytes. */
6050 int fix_size;
6053 struct minipool_fixup
6055 Mfix * next;
6056 rtx insn;
6057 HOST_WIDE_INT address;
6058 rtx * loc;
6059 enum machine_mode mode;
6060 int fix_size;
6061 rtx value;
6062 Mnode * minipool;
6063 HOST_WIDE_INT forwards;
6064 HOST_WIDE_INT backwards;
6067 /* Fixes less than a word need padding out to a word boundary. */
6068 #define MINIPOOL_FIX_SIZE(mode) \
6069 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
6071 static Mnode * minipool_vector_head;
6072 static Mnode * minipool_vector_tail;
6073 static rtx minipool_vector_label;
6075 /* The linked list of all minipool fixes required for this function. */
6076 Mfix * minipool_fix_head;
6077 Mfix * minipool_fix_tail;
6078 /* The fix entry for the current minipool, once it has been placed. */
6079 Mfix * minipool_barrier;
6081 /* Determines if INSN is the start of a jump table. Returns the end
6082 of the TABLE or NULL_RTX. */
6083 static rtx
6084 is_jump_table (rtx insn)
6086 rtx table;
6088 if (GET_CODE (insn) == JUMP_INSN
6089 && JUMP_LABEL (insn) != NULL
6090 && ((table = next_real_insn (JUMP_LABEL (insn)))
6091 == next_real_insn (insn))
6092 && table != NULL
6093 && GET_CODE (table) == JUMP_INSN
6094 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6095 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6096 return table;
6098 return NULL_RTX;
6101 #ifndef JUMP_TABLES_IN_TEXT_SECTION
6102 #define JUMP_TABLES_IN_TEXT_SECTION 0
6103 #endif
6105 static HOST_WIDE_INT
6106 get_jump_table_size (rtx insn)
6108 /* ADDR_VECs only take room if read-only data does into the text
6109 section. */
6110 if (JUMP_TABLES_IN_TEXT_SECTION
6111 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
6112 || 1
6113 #endif
6116 rtx body = PATTERN (insn);
6117 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
6119 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6122 return 0;
6125 /* Move a minipool fix MP from its current location to before MAX_MP.
6126 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
6127 constraints may need updating. */
6128 static Mnode *
6129 move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
6130 HOST_WIDE_INT max_address)
6132 /* This should never be true and the code below assumes these are
6133 different. */
6134 if (mp == max_mp)
6135 abort ();
6137 if (max_mp == NULL)
6139 if (max_address < mp->max_address)
6140 mp->max_address = max_address;
6142 else
6144 if (max_address > max_mp->max_address - mp->fix_size)
6145 mp->max_address = max_mp->max_address - mp->fix_size;
6146 else
6147 mp->max_address = max_address;
6149 /* Unlink MP from its current position. Since max_mp is non-null,
6150 mp->prev must be non-null. */
6151 mp->prev->next = mp->next;
6152 if (mp->next != NULL)
6153 mp->next->prev = mp->prev;
6154 else
6155 minipool_vector_tail = mp->prev;
6157 /* Re-insert it before MAX_MP. */
6158 mp->next = max_mp;
6159 mp->prev = max_mp->prev;
6160 max_mp->prev = mp;
6162 if (mp->prev != NULL)
6163 mp->prev->next = mp;
6164 else
6165 minipool_vector_head = mp;
6168 /* Save the new entry. */
6169 max_mp = mp;
6171 /* Scan over the preceding entries and adjust their addresses as
6172 required. */
6173 while (mp->prev != NULL
6174 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6176 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6177 mp = mp->prev;
6180 return max_mp;
6183 /* Add a constant to the minipool for a forward reference. Returns the
6184 node added or NULL if the constant will not fit in this pool. */
6185 static Mnode *
6186 add_minipool_forward_ref (Mfix *fix)
6188 /* If set, max_mp is the first pool_entry that has a lower
6189 constraint than the one we are trying to add. */
6190 Mnode * max_mp = NULL;
6191 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6192 Mnode * mp;
6194 /* If this fix's address is greater than the address of the first
6195 entry, then we can't put the fix in this pool. We subtract the
6196 size of the current fix to ensure that if the table is fully
6197 packed we still have enough room to insert this value by suffling
6198 the other fixes forwards. */
6199 if (minipool_vector_head &&
6200 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6201 return NULL;
6203 /* Scan the pool to see if a constant with the same value has
6204 already been added. While we are doing this, also note the
6205 location where we must insert the constant if it doesn't already
6206 exist. */
6207 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6209 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6210 && fix->mode == mp->mode
6211 && (GET_CODE (fix->value) != CODE_LABEL
6212 || (CODE_LABEL_NUMBER (fix->value)
6213 == CODE_LABEL_NUMBER (mp->value)))
6214 && rtx_equal_p (fix->value, mp->value))
6216 /* More than one fix references this entry. */
6217 mp->refcount++;
6218 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6221 /* Note the insertion point if necessary. */
6222 if (max_mp == NULL
6223 && mp->max_address > max_address)
6224 max_mp = mp;
6226 /* If we are inserting an 8-bytes aligned quantity and
6227 we have not already found an insertion point, then
6228 make sure that all such 8-byte aligned quantities are
6229 placed at the start of the pool. */
6230 if (TARGET_REALLY_IWMMXT
6231 && max_mp == NULL
6232 && fix->fix_size == 8
6233 && mp->fix_size != 8)
6235 max_mp = mp;
6236 max_address = mp->max_address;
6240 /* The value is not currently in the minipool, so we need to create
6241 a new entry for it. If MAX_MP is NULL, the entry will be put on
6242 the end of the list since the placement is less constrained than
6243 any existing entry. Otherwise, we insert the new fix before
6244 MAX_MP and, if necessary, adjust the constraints on the other
6245 entries. */
6246 mp = xmalloc (sizeof (* mp));
6247 mp->fix_size = fix->fix_size;
6248 mp->mode = fix->mode;
6249 mp->value = fix->value;
6250 mp->refcount = 1;
6251 /* Not yet required for a backwards ref. */
6252 mp->min_address = -65536;
6254 if (max_mp == NULL)
6256 mp->max_address = max_address;
6257 mp->next = NULL;
6258 mp->prev = minipool_vector_tail;
6260 if (mp->prev == NULL)
6262 minipool_vector_head = mp;
6263 minipool_vector_label = gen_label_rtx ();
6265 else
6266 mp->prev->next = mp;
6268 minipool_vector_tail = mp;
6270 else
6272 if (max_address > max_mp->max_address - mp->fix_size)
6273 mp->max_address = max_mp->max_address - mp->fix_size;
6274 else
6275 mp->max_address = max_address;
6277 mp->next = max_mp;
6278 mp->prev = max_mp->prev;
6279 max_mp->prev = mp;
6280 if (mp->prev != NULL)
6281 mp->prev->next = mp;
6282 else
6283 minipool_vector_head = mp;
6286 /* Save the new entry. */
6287 max_mp = mp;
6289 /* Scan over the preceding entries and adjust their addresses as
6290 required. */
6291 while (mp->prev != NULL
6292 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6294 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6295 mp = mp->prev;
6298 return max_mp;
6301 static Mnode *
6302 move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
6303 HOST_WIDE_INT min_address)
6305 HOST_WIDE_INT offset;
6307 /* This should never be true, and the code below assumes these are
6308 different. */
6309 if (mp == min_mp)
6310 abort ();
6312 if (min_mp == NULL)
6314 if (min_address > mp->min_address)
6315 mp->min_address = min_address;
6317 else
6319 /* We will adjust this below if it is too loose. */
6320 mp->min_address = min_address;
6322 /* Unlink MP from its current position. Since min_mp is non-null,
6323 mp->next must be non-null. */
6324 mp->next->prev = mp->prev;
6325 if (mp->prev != NULL)
6326 mp->prev->next = mp->next;
6327 else
6328 minipool_vector_head = mp->next;
6330 /* Reinsert it after MIN_MP. */
6331 mp->prev = min_mp;
6332 mp->next = min_mp->next;
6333 min_mp->next = mp;
6334 if (mp->next != NULL)
6335 mp->next->prev = mp;
6336 else
6337 minipool_vector_tail = mp;
6340 min_mp = mp;
6342 offset = 0;
6343 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6345 mp->offset = offset;
6346 if (mp->refcount > 0)
6347 offset += mp->fix_size;
6349 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6350 mp->next->min_address = mp->min_address + mp->fix_size;
6353 return min_mp;
6356 /* Add a constant to the minipool for a backward reference. Returns the
6357 node added or NULL if the constant will not fit in this pool.
6359 Note that the code for insertion for a backwards reference can be
6360 somewhat confusing because the calculated offsets for each fix do
6361 not take into account the size of the pool (which is still under
6362 construction. */
6363 static Mnode *
6364 add_minipool_backward_ref (Mfix *fix)
6366 /* If set, min_mp is the last pool_entry that has a lower constraint
6367 than the one we are trying to add. */
6368 Mnode *min_mp = NULL;
6369 /* This can be negative, since it is only a constraint. */
6370 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6371 Mnode *mp;
6373 /* If we can't reach the current pool from this insn, or if we can't
6374 insert this entry at the end of the pool without pushing other
6375 fixes out of range, then we don't try. This ensures that we
6376 can't fail later on. */
6377 if (min_address >= minipool_barrier->address
6378 || (minipool_vector_tail->min_address + fix->fix_size
6379 >= minipool_barrier->address))
6380 return NULL;
6382 /* Scan the pool to see if a constant with the same value has
6383 already been added. While we are doing this, also note the
6384 location where we must insert the constant if it doesn't already
6385 exist. */
6386 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6388 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6389 && fix->mode == mp->mode
6390 && (GET_CODE (fix->value) != CODE_LABEL
6391 || (CODE_LABEL_NUMBER (fix->value)
6392 == CODE_LABEL_NUMBER (mp->value)))
6393 && rtx_equal_p (fix->value, mp->value)
6394 /* Check that there is enough slack to move this entry to the
6395 end of the table (this is conservative). */
6396 && (mp->max_address
6397 > (minipool_barrier->address
6398 + minipool_vector_tail->offset
6399 + minipool_vector_tail->fix_size)))
6401 mp->refcount++;
6402 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6405 if (min_mp != NULL)
6406 mp->min_address += fix->fix_size;
6407 else
6409 /* Note the insertion point if necessary. */
6410 if (mp->min_address < min_address)
6412 /* For now, we do not allow the insertion of 8-byte alignment
6413 requiring nodes anywhere but at the start of the pool. */
6414 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8 && mp->fix_size != 8)
6415 return NULL;
6416 else
6417 min_mp = mp;
6419 else if (mp->max_address
6420 < minipool_barrier->address + mp->offset + fix->fix_size)
6422 /* Inserting before this entry would push the fix beyond
6423 its maximum address (which can happen if we have
6424 re-located a forwards fix); force the new fix to come
6425 after it. */
6426 min_mp = mp;
6427 min_address = mp->min_address + fix->fix_size;
6429 /* If we are inserting an 8-bytes aligned quantity and
6430 we have not already found an insertion point, then
6431 make sure that all such 8-byte aligned quantities are
6432 placed at the start of the pool. */
6433 else if (TARGET_REALLY_IWMMXT
6434 && min_mp == NULL
6435 && fix->fix_size == 8
6436 && mp->fix_size < 8)
6438 min_mp = mp;
6439 min_address = mp->min_address + fix->fix_size;
6444 /* We need to create a new entry. */
6445 mp = xmalloc (sizeof (* mp));
6446 mp->fix_size = fix->fix_size;
6447 mp->mode = fix->mode;
6448 mp->value = fix->value;
6449 mp->refcount = 1;
6450 mp->max_address = minipool_barrier->address + 65536;
6452 mp->min_address = min_address;
6454 if (min_mp == NULL)
6456 mp->prev = NULL;
6457 mp->next = minipool_vector_head;
6459 if (mp->next == NULL)
6461 minipool_vector_tail = mp;
6462 minipool_vector_label = gen_label_rtx ();
6464 else
6465 mp->next->prev = mp;
6467 minipool_vector_head = mp;
6469 else
6471 mp->next = min_mp->next;
6472 mp->prev = min_mp;
6473 min_mp->next = mp;
6475 if (mp->next != NULL)
6476 mp->next->prev = mp;
6477 else
6478 minipool_vector_tail = mp;
6481 /* Save the new entry. */
6482 min_mp = mp;
6484 if (mp->prev)
6485 mp = mp->prev;
6486 else
6487 mp->offset = 0;
6489 /* Scan over the following entries and adjust their offsets. */
6490 while (mp->next != NULL)
6492 if (mp->next->min_address < mp->min_address + mp->fix_size)
6493 mp->next->min_address = mp->min_address + mp->fix_size;
6495 if (mp->refcount)
6496 mp->next->offset = mp->offset + mp->fix_size;
6497 else
6498 mp->next->offset = mp->offset;
6500 mp = mp->next;
6503 return min_mp;
6506 static void
6507 assign_minipool_offsets (Mfix *barrier)
6509 HOST_WIDE_INT offset = 0;
6510 Mnode *mp;
6512 minipool_barrier = barrier;
6514 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6516 mp->offset = offset;
6518 if (mp->refcount > 0)
6519 offset += mp->fix_size;
6523 /* Output the literal table */
6524 static void
6525 dump_minipool (rtx scan)
6527 Mnode * mp;
6528 Mnode * nmp;
6529 int align64 = 0;
6531 if (TARGET_REALLY_IWMMXT)
6532 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6533 if (mp->refcount > 0 && mp->fix_size == 8)
6535 align64 = 1;
6536 break;
6539 if (rtl_dump_file)
6540 fprintf (rtl_dump_file,
6541 ";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
6542 INSN_UID (scan), (unsigned long) minipool_barrier->address, align64 ? 8 : 4);
6544 scan = emit_label_after (gen_label_rtx (), scan);
6545 scan = emit_insn_after (align64 ? gen_align_8 () : gen_align_4 (), scan);
6546 scan = emit_label_after (minipool_vector_label, scan);
6548 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6550 if (mp->refcount > 0)
6552 if (rtl_dump_file)
6554 fprintf (rtl_dump_file,
6555 ";; Offset %u, min %ld, max %ld ",
6556 (unsigned) mp->offset, (unsigned long) mp->min_address,
6557 (unsigned long) mp->max_address);
6558 arm_print_value (rtl_dump_file, mp->value);
6559 fputc ('\n', rtl_dump_file);
6562 switch (mp->fix_size)
6564 #ifdef HAVE_consttable_1
6565 case 1:
6566 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6567 break;
6569 #endif
6570 #ifdef HAVE_consttable_2
6571 case 2:
6572 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6573 break;
6575 #endif
6576 #ifdef HAVE_consttable_4
6577 case 4:
6578 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6579 break;
6581 #endif
6582 #ifdef HAVE_consttable_8
6583 case 8:
6584 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6585 break;
6587 #endif
6588 default:
6589 abort ();
6590 break;
6594 nmp = mp->next;
6595 free (mp);
6598 minipool_vector_head = minipool_vector_tail = NULL;
6599 scan = emit_insn_after (gen_consttable_end (), scan);
6600 scan = emit_barrier_after (scan);
6603 /* Return the cost of forcibly inserting a barrier after INSN. */
6604 static int
6605 arm_barrier_cost (rtx insn)
6607 /* Basing the location of the pool on the loop depth is preferable,
6608 but at the moment, the basic block information seems to be
6609 corrupt by this stage of the compilation. */
6610 int base_cost = 50;
6611 rtx next = next_nonnote_insn (insn);
6613 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6614 base_cost -= 20;
6616 switch (GET_CODE (insn))
6618 case CODE_LABEL:
6619 /* It will always be better to place the table before the label, rather
6620 than after it. */
6621 return 50;
6623 case INSN:
6624 case CALL_INSN:
6625 return base_cost;
6627 case JUMP_INSN:
6628 return base_cost - 10;
6630 default:
6631 return base_cost + 10;
6635 /* Find the best place in the insn stream in the range
6636 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6637 Create the barrier by inserting a jump and add a new fix entry for
6638 it. */
6639 static Mfix *
6640 create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
6642 HOST_WIDE_INT count = 0;
6643 rtx barrier;
6644 rtx from = fix->insn;
6645 rtx selected = from;
6646 int selected_cost;
6647 HOST_WIDE_INT selected_address;
6648 Mfix * new_fix;
6649 HOST_WIDE_INT max_count = max_address - fix->address;
6650 rtx label = gen_label_rtx ();
6652 selected_cost = arm_barrier_cost (from);
6653 selected_address = fix->address;
6655 while (from && count < max_count)
6657 rtx tmp;
6658 int new_cost;
6660 /* This code shouldn't have been called if there was a natural barrier
6661 within range. */
6662 if (GET_CODE (from) == BARRIER)
6663 abort ();
6665 /* Count the length of this insn. */
6666 count += get_attr_length (from);
6668 /* If there is a jump table, add its length. */
6669 tmp = is_jump_table (from);
6670 if (tmp != NULL)
6672 count += get_jump_table_size (tmp);
6674 /* Jump tables aren't in a basic block, so base the cost on
6675 the dispatch insn. If we select this location, we will
6676 still put the pool after the table. */
6677 new_cost = arm_barrier_cost (from);
6679 if (count < max_count && new_cost <= selected_cost)
6681 selected = tmp;
6682 selected_cost = new_cost;
6683 selected_address = fix->address + count;
6686 /* Continue after the dispatch table. */
6687 from = NEXT_INSN (tmp);
6688 continue;
6691 new_cost = arm_barrier_cost (from);
6693 if (count < max_count && new_cost <= selected_cost)
6695 selected = from;
6696 selected_cost = new_cost;
6697 selected_address = fix->address + count;
6700 from = NEXT_INSN (from);
6703 /* Create a new JUMP_INSN that branches around a barrier. */
6704 from = emit_jump_insn_after (gen_jump (label), selected);
6705 JUMP_LABEL (from) = label;
6706 barrier = emit_barrier_after (from);
6707 emit_label_after (label, barrier);
6709 /* Create a minipool barrier entry for the new barrier. */
6710 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6711 new_fix->insn = barrier;
6712 new_fix->address = selected_address;
6713 new_fix->next = fix->next;
6714 fix->next = new_fix;
6716 return new_fix;
6719 /* Record that there is a natural barrier in the insn stream at
6720 ADDRESS. */
6721 static void
6722 push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
6724 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6726 fix->insn = insn;
6727 fix->address = address;
6729 fix->next = NULL;
6730 if (minipool_fix_head != NULL)
6731 minipool_fix_tail->next = fix;
6732 else
6733 minipool_fix_head = fix;
6735 minipool_fix_tail = fix;
6738 /* Record INSN, which will need fixing up to load a value from the
6739 minipool. ADDRESS is the offset of the insn since the start of the
6740 function; LOC is a pointer to the part of the insn which requires
6741 fixing; VALUE is the constant that must be loaded, which is of type
6742 MODE. */
6743 static void
6744 push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
6745 enum machine_mode mode, rtx value)
6747 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6749 #ifdef AOF_ASSEMBLER
6750 /* PIC symbol references need to be converted into offsets into the
6751 based area. */
6752 /* XXX This shouldn't be done here. */
6753 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6754 value = aof_pic_entry (value);
6755 #endif /* AOF_ASSEMBLER */
6757 fix->insn = insn;
6758 fix->address = address;
6759 fix->loc = loc;
6760 fix->mode = mode;
6761 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6762 fix->value = value;
6763 fix->forwards = get_attr_pool_range (insn);
6764 fix->backwards = get_attr_neg_pool_range (insn);
6765 fix->minipool = NULL;
6767 /* If an insn doesn't have a range defined for it, then it isn't
6768 expecting to be reworked by this code. Better to abort now than
6769 to generate duff assembly code. */
6770 if (fix->forwards == 0 && fix->backwards == 0)
6771 abort ();
6773 /* With iWMMXt enabled, the pool is aligned to an 8-byte boundary.
6774 So there might be an empty word before the start of the pool.
6775 Hence we reduce the forward range by 4 to allow for this
6776 possibility. */
6777 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8)
6778 fix->forwards -= 4;
6780 if (rtl_dump_file)
6782 fprintf (rtl_dump_file,
6783 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6784 GET_MODE_NAME (mode),
6785 INSN_UID (insn), (unsigned long) address,
6786 -1 * (long)fix->backwards, (long)fix->forwards);
6787 arm_print_value (rtl_dump_file, fix->value);
6788 fprintf (rtl_dump_file, "\n");
6791 /* Add it to the chain of fixes. */
6792 fix->next = NULL;
6794 if (minipool_fix_head != NULL)
6795 minipool_fix_tail->next = fix;
6796 else
6797 minipool_fix_head = fix;
6799 minipool_fix_tail = fix;
6802 /* Scan INSN and note any of its operands that need fixing.
6803 If DO_PUSHES is false we do not actually push any of the fixups
6804 needed. The function returns TRUE is any fixups were needed/pushed.
6805 This is used by arm_memory_load_p() which needs to know about loads
6806 of constants that will be converted into minipool loads. */
6807 static bool
6808 note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
6810 bool result = false;
6811 int opno;
6813 extract_insn (insn);
6815 if (!constrain_operands (1))
6816 fatal_insn_not_found (insn);
6818 /* Fill in recog_op_alt with information about the constraints of this insn. */
6819 preprocess_constraints ();
6821 for (opno = 0; opno < recog_data.n_operands; opno++)
6823 /* Things we need to fix can only occur in inputs. */
6824 if (recog_data.operand_type[opno] != OP_IN)
6825 continue;
6827 /* If this alternative is a memory reference, then any mention
6828 of constants in this alternative is really to fool reload
6829 into allowing us to accept one there. We need to fix them up
6830 now so that we output the right code. */
6831 if (recog_op_alt[opno][which_alternative].memory_ok)
6833 rtx op = recog_data.operand[opno];
6835 if (CONSTANT_P (op))
6837 if (do_pushes)
6838 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6839 recog_data.operand_mode[opno], op);
6840 result = true;
6842 else if (GET_CODE (op) == MEM
6843 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6844 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6846 if (do_pushes)
6847 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6848 recog_data.operand_mode[opno],
6849 get_pool_constant (XEXP (op, 0)));
6851 result = true;
6856 return result;
6859 /* Gcc puts the pool in the wrong place for ARM, since we can only
6860 load addresses a limited distance around the pc. We do some
6861 special munging to move the constant pool values to the correct
6862 point in the code. */
6863 static void
6864 arm_reorg (void)
6866 rtx insn;
6867 HOST_WIDE_INT address = 0;
6868 Mfix * fix;
6870 minipool_fix_head = minipool_fix_tail = NULL;
6872 /* The first insn must always be a note, or the code below won't
6873 scan it properly. */
6874 insn = get_insns ();
6875 if (GET_CODE (insn) != NOTE)
6876 abort ();
6878 /* Scan all the insns and record the operands that will need fixing. */
6879 for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
6881 if (TARGET_CIRRUS_FIX_INVALID_INSNS
6882 && (arm_cirrus_insn_p (insn)
6883 || GET_CODE (insn) == JUMP_INSN
6884 || arm_memory_load_p (insn)))
6885 cirrus_reorg (insn);
6887 if (GET_CODE (insn) == BARRIER)
6888 push_minipool_barrier (insn, address);
6889 else if (INSN_P (insn))
6891 rtx table;
6893 note_invalid_constants (insn, address, true);
6894 address += get_attr_length (insn);
6896 /* If the insn is a vector jump, add the size of the table
6897 and skip the table. */
6898 if ((table = is_jump_table (insn)) != NULL)
6900 address += get_jump_table_size (table);
6901 insn = table;
6906 fix = minipool_fix_head;
6908 /* Now scan the fixups and perform the required changes. */
6909 while (fix)
6911 Mfix * ftmp;
6912 Mfix * fdel;
6913 Mfix * last_added_fix;
6914 Mfix * last_barrier = NULL;
6915 Mfix * this_fix;
6917 /* Skip any further barriers before the next fix. */
6918 while (fix && GET_CODE (fix->insn) == BARRIER)
6919 fix = fix->next;
6921 /* No more fixes. */
6922 if (fix == NULL)
6923 break;
6925 last_added_fix = NULL;
6927 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6929 if (GET_CODE (ftmp->insn) == BARRIER)
6931 if (ftmp->address >= minipool_vector_head->max_address)
6932 break;
6934 last_barrier = ftmp;
6936 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6937 break;
6939 last_added_fix = ftmp; /* Keep track of the last fix added. */
6942 /* If we found a barrier, drop back to that; any fixes that we
6943 could have reached but come after the barrier will now go in
6944 the next mini-pool. */
6945 if (last_barrier != NULL)
6947 /* Reduce the refcount for those fixes that won't go into this
6948 pool after all. */
6949 for (fdel = last_barrier->next;
6950 fdel && fdel != ftmp;
6951 fdel = fdel->next)
6953 fdel->minipool->refcount--;
6954 fdel->minipool = NULL;
6957 ftmp = last_barrier;
6959 else
6961 /* ftmp is first fix that we can't fit into this pool and
6962 there no natural barriers that we could use. Insert a
6963 new barrier in the code somewhere between the previous
6964 fix and this one, and arrange to jump around it. */
6965 HOST_WIDE_INT max_address;
6967 /* The last item on the list of fixes must be a barrier, so
6968 we can never run off the end of the list of fixes without
6969 last_barrier being set. */
6970 if (ftmp == NULL)
6971 abort ();
6973 max_address = minipool_vector_head->max_address;
6974 /* Check that there isn't another fix that is in range that
6975 we couldn't fit into this pool because the pool was
6976 already too large: we need to put the pool before such an
6977 instruction. */
6978 if (ftmp->address < max_address)
6979 max_address = ftmp->address;
6981 last_barrier = create_fix_barrier (last_added_fix, max_address);
6984 assign_minipool_offsets (last_barrier);
6986 while (ftmp)
6988 if (GET_CODE (ftmp->insn) != BARRIER
6989 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6990 == NULL))
6991 break;
6993 ftmp = ftmp->next;
6996 /* Scan over the fixes we have identified for this pool, fixing them
6997 up and adding the constants to the pool itself. */
6998 for (this_fix = fix; this_fix && ftmp != this_fix;
6999 this_fix = this_fix->next)
7000 if (GET_CODE (this_fix->insn) != BARRIER)
7002 rtx addr
7003 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7004 minipool_vector_label),
7005 this_fix->minipool->offset);
7006 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7009 dump_minipool (last_barrier->insn);
7010 fix = ftmp;
7013 /* From now on we must synthesize any constants that we can't handle
7014 directly. This can happen if the RTL gets split during final
7015 instruction generation. */
7016 after_arm_reorg = 1;
7018 /* Free the minipool memory. */
7019 obstack_free (&minipool_obstack, minipool_startobj);
7022 /* Routines to output assembly language. */
7024 /* If the rtx is the correct value then return the string of the number.
7025 In this way we can ensure that valid double constants are generated even
7026 when cross compiling. */
7027 const char *
7028 fp_immediate_constant (rtx x)
7030 REAL_VALUE_TYPE r;
7031 int i;
7033 if (!fpa_consts_inited)
7034 init_fpa_table ();
7036 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7037 for (i = 0; i < 8; i++)
7038 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
7039 return strings_fpa[i];
7041 abort ();
7044 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
7045 static const char *
7046 fp_const_from_val (REAL_VALUE_TYPE *r)
7048 int i;
7050 if (!fpa_consts_inited)
7051 init_fpa_table ();
7053 for (i = 0; i < 8; i++)
7054 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
7055 return strings_fpa[i];
7057 abort ();
7060 /* Output the operands of a LDM/STM instruction to STREAM.
7061 MASK is the ARM register set mask of which only bits 0-15 are important.
7062 REG is the base register, either the frame pointer or the stack pointer,
7063 INSTR is the possibly suffixed load or store instruction. */
7064 static void
7065 print_multi_reg (FILE *stream, const char *instr, int reg, int mask)
7067 int i;
7068 int not_first = FALSE;
7070 fputc ('\t', stream);
7071 asm_fprintf (stream, instr, reg);
7072 fputs (", {", stream);
7074 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7075 if (mask & (1 << i))
7077 if (not_first)
7078 fprintf (stream, ", ");
7080 asm_fprintf (stream, "%r", i);
7081 not_first = TRUE;
7084 fprintf (stream, "}");
7086 /* Add a ^ character for the 26-bit ABI, but only if we were loading
7087 the PC. Otherwise we would generate an UNPREDICTABLE instruction.
7088 Strictly speaking the instruction would be unpredicatble only if
7089 we were writing back the base register as well, but since we never
7090 want to generate an LDM type 2 instruction (register bank switching)
7091 which is what you get if the PC is not being loaded, we do not need
7092 to check for writeback. */
7093 if (! TARGET_APCS_32
7094 && ((mask & (1 << PC_REGNUM)) != 0))
7095 fprintf (stream, "^");
7097 fprintf (stream, "\n");
7100 /* Output a 'call' insn. */
7101 const char *
7102 output_call (rtx *operands)
7104 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
7106 if (REGNO (operands[0]) == LR_REGNUM)
7108 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
7109 output_asm_insn ("mov%?\t%0, %|lr", operands);
7112 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7114 if (TARGET_INTERWORK)
7115 output_asm_insn ("bx%?\t%0", operands);
7116 else
7117 output_asm_insn ("mov%?\t%|pc, %0", operands);
7119 return "";
7122 /* Output a 'call' insn that is a reference in memory. */
7123 const char *
7124 output_call_mem (rtx *operands)
7126 if (TARGET_INTERWORK)
7128 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7129 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7130 output_asm_insn ("bx%?\t%|ip", operands);
7132 else if (regno_use_in (LR_REGNUM, operands[0]))
7134 /* LR is used in the memory address. We load the address in the
7135 first instruction. It's safe to use IP as the target of the
7136 load since the call will kill it anyway. */
7137 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7138 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7139 output_asm_insn ("mov%?\t%|pc, %|ip", operands);
7141 else
7143 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7144 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7147 return "";
7151 /* Output a move from arm registers to an fpa registers.
7152 OPERANDS[0] is an fpa register.
7153 OPERANDS[1] is the first registers of an arm register pair. */
7154 const char *
7155 output_mov_long_double_fpa_from_arm (rtx *operands)
7157 int arm_reg0 = REGNO (operands[1]);
7158 rtx ops[3];
7160 if (arm_reg0 == IP_REGNUM)
7161 abort ();
7163 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7164 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7165 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7167 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7168 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7170 return "";
7173 /* Output a move from an fpa register to arm registers.
7174 OPERANDS[0] is the first registers of an arm register pair.
7175 OPERANDS[1] is an fpa register. */
7176 const char *
7177 output_mov_long_double_arm_from_fpa (rtx *operands)
7179 int arm_reg0 = REGNO (operands[0]);
7180 rtx ops[3];
7182 if (arm_reg0 == IP_REGNUM)
7183 abort ();
7185 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7186 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7187 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7189 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7190 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7191 return "";
7194 /* Output a move from arm registers to arm registers of a long double
7195 OPERANDS[0] is the destination.
7196 OPERANDS[1] is the source. */
7197 const char *
7198 output_mov_long_double_arm_from_arm (rtx *operands)
7200 /* We have to be careful here because the two might overlap. */
7201 int dest_start = REGNO (operands[0]);
7202 int src_start = REGNO (operands[1]);
7203 rtx ops[2];
7204 int i;
7206 if (dest_start < src_start)
7208 for (i = 0; i < 3; i++)
7210 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7211 ops[1] = gen_rtx_REG (SImode, src_start + i);
7212 output_asm_insn ("mov%?\t%0, %1", ops);
7215 else
7217 for (i = 2; i >= 0; i--)
7219 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7220 ops[1] = gen_rtx_REG (SImode, src_start + i);
7221 output_asm_insn ("mov%?\t%0, %1", ops);
7225 return "";
7229 /* Output a move from arm registers to an fpa registers.
7230 OPERANDS[0] is an fpa register.
7231 OPERANDS[1] is the first registers of an arm register pair. */
7232 const char *
7233 output_mov_double_fpa_from_arm (rtx *operands)
7235 int arm_reg0 = REGNO (operands[1]);
7236 rtx ops[2];
7238 if (arm_reg0 == IP_REGNUM)
7239 abort ();
7241 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7242 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7243 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7244 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7245 return "";
7248 /* Output a move from an fpa register to arm registers.
7249 OPERANDS[0] is the first registers of an arm register pair.
7250 OPERANDS[1] is an fpa register. */
7251 const char *
7252 output_mov_double_arm_from_fpa (rtx *operands)
7254 int arm_reg0 = REGNO (operands[0]);
7255 rtx ops[2];
7257 if (arm_reg0 == IP_REGNUM)
7258 abort ();
7260 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7261 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7262 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7263 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7264 return "";
7267 /* Output a move between double words.
7268 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7269 or MEM<-REG and all MEMs must be offsettable addresses. */
7270 const char *
7271 output_move_double (rtx *operands)
7273 enum rtx_code code0 = GET_CODE (operands[0]);
7274 enum rtx_code code1 = GET_CODE (operands[1]);
7275 rtx otherops[3];
7277 if (code0 == REG)
7279 int reg0 = REGNO (operands[0]);
7281 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7283 if (code1 == REG)
7285 int reg1 = REGNO (operands[1]);
7286 if (reg1 == IP_REGNUM)
7287 abort ();
7289 /* Ensure the second source is not overwritten. */
7290 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7291 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7292 else
7293 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7295 else if (code1 == CONST_VECTOR)
7297 HOST_WIDE_INT hint = 0;
7299 switch (GET_MODE (operands[1]))
7301 case V2SImode:
7302 otherops[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 1)));
7303 operands[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)));
7304 break;
7306 case V4HImode:
7307 if (BYTES_BIG_ENDIAN)
7309 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7310 hint <<= 16;
7311 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7313 else
7315 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7316 hint <<= 16;
7317 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7320 otherops[1] = GEN_INT (hint);
7321 hint = 0;
7323 if (BYTES_BIG_ENDIAN)
7325 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7326 hint <<= 16;
7327 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7329 else
7331 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7332 hint <<= 16;
7333 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7336 operands[1] = GEN_INT (hint);
7337 break;
7339 case V8QImode:
7340 if (BYTES_BIG_ENDIAN)
7342 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7343 hint <<= 8;
7344 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7345 hint <<= 8;
7346 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7347 hint <<= 8;
7348 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7350 else
7352 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7353 hint <<= 8;
7354 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7355 hint <<= 8;
7356 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7357 hint <<= 8;
7358 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7361 otherops[1] = GEN_INT (hint);
7362 hint = 0;
7364 if (BYTES_BIG_ENDIAN)
7366 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7367 hint <<= 8;
7368 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7369 hint <<= 8;
7370 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7371 hint <<= 8;
7372 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7374 else
7376 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7377 hint <<= 8;
7378 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7379 hint <<= 8;
7380 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7381 hint <<= 8;
7382 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7385 operands[1] = GEN_INT (hint);
7386 break;
7388 default:
7389 abort ();
7391 output_mov_immediate (operands);
7392 output_mov_immediate (otherops);
7394 else if (code1 == CONST_DOUBLE)
7396 if (GET_MODE (operands[1]) == DFmode)
7398 REAL_VALUE_TYPE r;
7399 long l[2];
7401 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7402 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7403 otherops[1] = GEN_INT (l[1]);
7404 operands[1] = GEN_INT (l[0]);
7406 else if (GET_MODE (operands[1]) != VOIDmode)
7407 abort ();
7408 else if (WORDS_BIG_ENDIAN)
7410 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7411 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7413 else
7415 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7416 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7419 output_mov_immediate (operands);
7420 output_mov_immediate (otherops);
7422 else if (code1 == CONST_INT)
7424 #if HOST_BITS_PER_WIDE_INT > 32
7425 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7426 what the upper word is. */
7427 if (WORDS_BIG_ENDIAN)
7429 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7430 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7432 else
7434 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7435 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7437 #else
7438 /* Sign extend the intval into the high-order word. */
7439 if (WORDS_BIG_ENDIAN)
7441 otherops[1] = operands[1];
7442 operands[1] = (INTVAL (operands[1]) < 0
7443 ? constm1_rtx : const0_rtx);
7445 else
7446 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7447 #endif
7448 output_mov_immediate (otherops);
7449 output_mov_immediate (operands);
7451 else if (code1 == MEM)
7453 switch (GET_CODE (XEXP (operands[1], 0)))
7455 case REG:
7456 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7457 break;
7459 case PRE_INC:
7460 abort (); /* Should never happen now. */
7461 break;
7463 case PRE_DEC:
7464 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7465 break;
7467 case POST_INC:
7468 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7469 break;
7471 case POST_DEC:
7472 abort (); /* Should never happen now. */
7473 break;
7475 case LABEL_REF:
7476 case CONST:
7477 output_asm_insn ("adr%?\t%0, %1", operands);
7478 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7479 break;
7481 default:
7482 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7483 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7485 otherops[0] = operands[0];
7486 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7487 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7489 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7491 if (GET_CODE (otherops[2]) == CONST_INT)
7493 switch ((int) INTVAL (otherops[2]))
7495 case -8:
7496 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7497 return "";
7498 case -4:
7499 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7500 return "";
7501 case 4:
7502 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7503 return "";
7506 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7507 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7508 else
7509 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7511 else
7512 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7514 else
7515 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7517 return "ldm%?ia\t%0, %M0";
7519 else
7521 otherops[1] = adjust_address (operands[1], SImode, 4);
7522 /* Take care of overlapping base/data reg. */
7523 if (reg_mentioned_p (operands[0], operands[1]))
7525 output_asm_insn ("ldr%?\t%0, %1", otherops);
7526 output_asm_insn ("ldr%?\t%0, %1", operands);
7528 else
7530 output_asm_insn ("ldr%?\t%0, %1", operands);
7531 output_asm_insn ("ldr%?\t%0, %1", otherops);
7536 else
7537 abort (); /* Constraints should prevent this. */
7539 else if (code0 == MEM && code1 == REG)
7541 if (REGNO (operands[1]) == IP_REGNUM)
7542 abort ();
7544 switch (GET_CODE (XEXP (operands[0], 0)))
7546 case REG:
7547 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7548 break;
7550 case PRE_INC:
7551 abort (); /* Should never happen now. */
7552 break;
7554 case PRE_DEC:
7555 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7556 break;
7558 case POST_INC:
7559 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7560 break;
7562 case POST_DEC:
7563 abort (); /* Should never happen now. */
7564 break;
7566 case PLUS:
7567 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7569 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7571 case -8:
7572 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7573 return "";
7575 case -4:
7576 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7577 return "";
7579 case 4:
7580 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7581 return "";
7584 /* Fall through */
7586 default:
7587 otherops[0] = adjust_address (operands[0], SImode, 4);
7588 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7589 output_asm_insn ("str%?\t%1, %0", operands);
7590 output_asm_insn ("str%?\t%1, %0", otherops);
7593 else
7594 /* Constraints should prevent this. */
7595 abort ();
7597 return "";
7601 /* Output an arbitrary MOV reg, #n.
7602 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7603 const char *
7604 output_mov_immediate (rtx *operands)
7606 HOST_WIDE_INT n = INTVAL (operands[1]);
7608 /* Try to use one MOV. */
7609 if (const_ok_for_arm (n))
7610 output_asm_insn ("mov%?\t%0, %1", operands);
7612 /* Try to use one MVN. */
7613 else if (const_ok_for_arm (~n))
7615 operands[1] = GEN_INT (~n);
7616 output_asm_insn ("mvn%?\t%0, %1", operands);
7618 else
7620 int n_ones = 0;
7621 int i;
7623 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7624 for (i = 0; i < 32; i++)
7625 if (n & 1 << i)
7626 n_ones++;
7628 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7629 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7630 else
7631 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7634 return "";
7637 /* Output an ADD r, s, #n where n may be too big for one instruction.
7638 If adding zero to one register, output nothing. */
7639 const char *
7640 output_add_immediate (rtx *operands)
7642 HOST_WIDE_INT n = INTVAL (operands[2]);
7644 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7646 if (n < 0)
7647 output_multi_immediate (operands,
7648 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7649 -n);
7650 else
7651 output_multi_immediate (operands,
7652 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7656 return "";
7659 /* Output a multiple immediate operation.
7660 OPERANDS is the vector of operands referred to in the output patterns.
7661 INSTR1 is the output pattern to use for the first constant.
7662 INSTR2 is the output pattern to use for subsequent constants.
7663 IMMED_OP is the index of the constant slot in OPERANDS.
7664 N is the constant value. */
7665 static const char *
7666 output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
7667 int immed_op, HOST_WIDE_INT n)
7669 #if HOST_BITS_PER_WIDE_INT > 32
7670 n &= 0xffffffff;
7671 #endif
7673 if (n == 0)
7675 /* Quick and easy output. */
7676 operands[immed_op] = const0_rtx;
7677 output_asm_insn (instr1, operands);
7679 else
7681 int i;
7682 const char * instr = instr1;
7684 /* Note that n is never zero here (which would give no output). */
7685 for (i = 0; i < 32; i += 2)
7687 if (n & (3 << i))
7689 operands[immed_op] = GEN_INT (n & (255 << i));
7690 output_asm_insn (instr, operands);
7691 instr = instr2;
7692 i += 6;
7697 return "";
7700 /* Return the appropriate ARM instruction for the operation code.
7701 The returned result should not be overwritten. OP is the rtx of the
7702 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7703 was shifted. */
7704 const char *
7705 arithmetic_instr (rtx op, int shift_first_arg)
7707 switch (GET_CODE (op))
7709 case PLUS:
7710 return "add";
7712 case MINUS:
7713 return shift_first_arg ? "rsb" : "sub";
7715 case IOR:
7716 return "orr";
7718 case XOR:
7719 return "eor";
7721 case AND:
7722 return "and";
7724 default:
7725 abort ();
7729 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7730 for the operation code. The returned result should not be overwritten.
7731 OP is the rtx code of the shift.
7732 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7733 shift. */
7734 static const char *
7735 shift_op (rtx op, HOST_WIDE_INT *amountp)
7737 const char * mnem;
7738 enum rtx_code code = GET_CODE (op);
7740 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7741 *amountp = -1;
7742 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7743 *amountp = INTVAL (XEXP (op, 1));
7744 else
7745 abort ();
7747 switch (code)
7749 case ASHIFT:
7750 mnem = "asl";
7751 break;
7753 case ASHIFTRT:
7754 mnem = "asr";
7755 break;
7757 case LSHIFTRT:
7758 mnem = "lsr";
7759 break;
7761 case ROTATERT:
7762 mnem = "ror";
7763 break;
7765 case MULT:
7766 /* We never have to worry about the amount being other than a
7767 power of 2, since this case can never be reloaded from a reg. */
7768 if (*amountp != -1)
7769 *amountp = int_log2 (*amountp);
7770 else
7771 abort ();
7772 return "asl";
7774 default:
7775 abort ();
7778 if (*amountp != -1)
7780 /* This is not 100% correct, but follows from the desire to merge
7781 multiplication by a power of 2 with the recognizer for a
7782 shift. >=32 is not a valid shift for "asl", so we must try and
7783 output a shift that produces the correct arithmetical result.
7784 Using lsr #32 is identical except for the fact that the carry bit
7785 is not set correctly if we set the flags; but we never use the
7786 carry bit from such an operation, so we can ignore that. */
7787 if (code == ROTATERT)
7788 /* Rotate is just modulo 32. */
7789 *amountp &= 31;
7790 else if (*amountp != (*amountp & 31))
7792 if (code == ASHIFT)
7793 mnem = "lsr";
7794 *amountp = 32;
7797 /* Shifts of 0 are no-ops. */
7798 if (*amountp == 0)
7799 return NULL;
7802 return mnem;
7805 /* Obtain the shift from the POWER of two. */
7807 static HOST_WIDE_INT
7808 int_log2 (HOST_WIDE_INT power)
7810 HOST_WIDE_INT shift = 0;
7812 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7814 if (shift > 31)
7815 abort ();
7816 shift++;
7819 return shift;
7822 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7823 /bin/as is horribly restrictive. */
7824 #define MAX_ASCII_LEN 51
7826 void
7827 output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
7829 int i;
7830 int len_so_far = 0;
7832 fputs ("\t.ascii\t\"", stream);
7834 for (i = 0; i < len; i++)
7836 int c = p[i];
7838 if (len_so_far >= MAX_ASCII_LEN)
7840 fputs ("\"\n\t.ascii\t\"", stream);
7841 len_so_far = 0;
7844 switch (c)
7846 case TARGET_TAB:
7847 fputs ("\\t", stream);
7848 len_so_far += 2;
7849 break;
7851 case TARGET_FF:
7852 fputs ("\\f", stream);
7853 len_so_far += 2;
7854 break;
7856 case TARGET_BS:
7857 fputs ("\\b", stream);
7858 len_so_far += 2;
7859 break;
7861 case TARGET_CR:
7862 fputs ("\\r", stream);
7863 len_so_far += 2;
7864 break;
7866 case TARGET_NEWLINE:
7867 fputs ("\\n", stream);
7868 c = p [i + 1];
7869 if ((c >= ' ' && c <= '~')
7870 || c == TARGET_TAB)
7871 /* This is a good place for a line break. */
7872 len_so_far = MAX_ASCII_LEN;
7873 else
7874 len_so_far += 2;
7875 break;
7877 case '\"':
7878 case '\\':
7879 putc ('\\', stream);
7880 len_so_far++;
7881 /* drop through. */
7883 default:
7884 if (c >= ' ' && c <= '~')
7886 putc (c, stream);
7887 len_so_far++;
7889 else
7891 fprintf (stream, "\\%03o", c);
7892 len_so_far += 4;
7894 break;
7898 fputs ("\"\n", stream);
7901 /* Compute the register sabe mask for registers 0 through 12
7902 inclusive. This code is used by both arm_compute_save_reg_mask
7903 and arm_compute_initial_elimination_offset. */
7904 static unsigned long
7905 arm_compute_save_reg0_reg12_mask (void)
7907 unsigned long func_type = arm_current_func_type ();
7908 unsigned int save_reg_mask = 0;
7909 unsigned int reg;
7911 if (IS_INTERRUPT (func_type))
7913 unsigned int max_reg;
7914 /* Interrupt functions must not corrupt any registers,
7915 even call clobbered ones. If this is a leaf function
7916 we can just examine the registers used by the RTL, but
7917 otherwise we have to assume that whatever function is
7918 called might clobber anything, and so we have to save
7919 all the call-clobbered registers as well. */
7920 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7921 /* FIQ handlers have registers r8 - r12 banked, so
7922 we only need to check r0 - r7, Normal ISRs only
7923 bank r14 and r15, so we must check up to r12.
7924 r13 is the stack pointer which is always preserved,
7925 so we do not need to consider it here. */
7926 max_reg = 7;
7927 else
7928 max_reg = 12;
7930 for (reg = 0; reg <= max_reg; reg++)
7931 if (regs_ever_live[reg]
7932 || (! current_function_is_leaf && call_used_regs [reg]))
7933 save_reg_mask |= (1 << reg);
7935 else
7937 /* In the normal case we only need to save those registers
7938 which are call saved and which are used by this function. */
7939 for (reg = 0; reg <= 10; reg++)
7940 if (regs_ever_live[reg] && ! call_used_regs [reg])
7941 save_reg_mask |= (1 << reg);
7943 /* Handle the frame pointer as a special case. */
7944 if (! TARGET_APCS_FRAME
7945 && ! frame_pointer_needed
7946 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7947 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7948 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7950 /* If we aren't loading the PIC register,
7951 don't stack it even though it may be live. */
7952 if (flag_pic
7953 && ! TARGET_SINGLE_PIC_BASE
7954 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7955 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7958 return save_reg_mask;
7961 /* Compute a bit mask of which registers need to be
7962 saved on the stack for the current function. */
7964 static unsigned long
7965 arm_compute_save_reg_mask (void)
7967 unsigned int save_reg_mask = 0;
7968 unsigned long func_type = arm_current_func_type ();
7970 if (IS_NAKED (func_type))
7971 /* This should never really happen. */
7972 return 0;
7974 /* If we are creating a stack frame, then we must save the frame pointer,
7975 IP (which will hold the old stack pointer), LR and the PC. */
7976 if (frame_pointer_needed)
7977 save_reg_mask |=
7978 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7979 | (1 << IP_REGNUM)
7980 | (1 << LR_REGNUM)
7981 | (1 << PC_REGNUM);
7983 /* Volatile functions do not return, so there
7984 is no need to save any other registers. */
7985 if (IS_VOLATILE (func_type))
7986 return save_reg_mask;
7988 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7990 /* Decide if we need to save the link register.
7991 Interrupt routines have their own banked link register,
7992 so they never need to save it.
7993 Otherwise if we do not use the link register we do not need to save
7994 it. If we are pushing other registers onto the stack however, we
7995 can save an instruction in the epilogue by pushing the link register
7996 now and then popping it back into the PC. This incurs extra memory
7997 accesses though, so we only do it when optimizing for size, and only
7998 if we know that we will not need a fancy return sequence. */
7999 if (regs_ever_live [LR_REGNUM]
8000 || (save_reg_mask
8001 && optimize_size
8002 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
8003 save_reg_mask |= 1 << LR_REGNUM;
8005 if (cfun->machine->lr_save_eliminated)
8006 save_reg_mask &= ~ (1 << LR_REGNUM);
8008 if (TARGET_REALLY_IWMMXT
8009 && ((bit_count (save_reg_mask)
8010 + ARM_NUM_INTS (current_function_pretend_args_size)) % 2) != 0)
8012 unsigned int reg;
8014 /* The total number of registers that are going to be pushed
8015 onto the stack is odd. We need to ensure that the stack
8016 is 64-bit aligned before we start to save iWMMXt registers,
8017 and also before we start to create locals. (A local variable
8018 might be a double or long long which we will load/store using
8019 an iWMMXt instruction). Therefore we need to push another
8020 ARM register, so that the stack will be 64-bit aligned. We
8021 try to avoid using the arg registers (r0 -r3) as they might be
8022 used to pass values in a tail call. */
8023 for (reg = 4; reg <= 12; reg++)
8024 if ((save_reg_mask & (1 << reg)) == 0)
8025 break;
8027 if (reg <= 12)
8028 save_reg_mask |= (1 << reg);
8029 else
8031 cfun->machine->sibcall_blocked = 1;
8032 save_reg_mask |= (1 << 3);
8036 return save_reg_mask;
8039 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
8040 everything bar the final return instruction. */
8041 const char *
8042 output_return_instruction (rtx operand, int really_return, int reverse)
8044 char conditional[10];
8045 char instr[100];
8046 int reg;
8047 unsigned long live_regs_mask;
8048 unsigned long func_type;
8050 func_type = arm_current_func_type ();
8052 if (IS_NAKED (func_type))
8053 return "";
8055 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8057 /* If this function was declared non-returning, and we have found a tail
8058 call, then we have to trust that the called function won't return. */
8059 if (really_return)
8061 rtx ops[2];
8063 /* Otherwise, trap an attempted return by aborting. */
8064 ops[0] = operand;
8065 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
8066 : "abort");
8067 assemble_external_libcall (ops[1]);
8068 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
8071 return "";
8074 if (current_function_calls_alloca && !really_return)
8075 abort ();
8077 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
8079 return_used_this_function = 1;
8081 live_regs_mask = arm_compute_save_reg_mask ();
8083 if (live_regs_mask)
8085 const char * return_reg;
8087 /* If we do not have any special requirements for function exit
8088 (eg interworking, or ISR) then we can load the return address
8089 directly into the PC. Otherwise we must load it into LR. */
8090 if (really_return
8091 && ! TARGET_INTERWORK)
8092 return_reg = reg_names[PC_REGNUM];
8093 else
8094 return_reg = reg_names[LR_REGNUM];
8096 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
8097 /* There are two possible reasons for the IP register being saved.
8098 Either a stack frame was created, in which case IP contains the
8099 old stack pointer, or an ISR routine corrupted it. If this in an
8100 ISR routine then just restore IP, otherwise restore IP into SP. */
8101 if (! IS_INTERRUPT (func_type))
8103 live_regs_mask &= ~ (1 << IP_REGNUM);
8104 live_regs_mask |= (1 << SP_REGNUM);
8107 /* On some ARM architectures it is faster to use LDR rather than
8108 LDM to load a single register. On other architectures, the
8109 cost is the same. In 26 bit mode, or for exception handlers,
8110 we have to use LDM to load the PC so that the CPSR is also
8111 restored. */
8112 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8114 if (live_regs_mask == (unsigned int)(1 << reg))
8115 break;
8117 if (reg <= LAST_ARM_REGNUM
8118 && (reg != LR_REGNUM
8119 || ! really_return
8120 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
8122 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
8123 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
8125 else
8127 char *p;
8128 int first = 1;
8130 /* Generate the load multiple instruction to restore the registers. */
8131 if (frame_pointer_needed)
8132 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
8133 else if (live_regs_mask & (1 << SP_REGNUM))
8134 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
8135 else
8136 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
8138 p = instr + strlen (instr);
8140 for (reg = 0; reg <= SP_REGNUM; reg++)
8141 if (live_regs_mask & (1 << reg))
8143 int l = strlen (reg_names[reg]);
8145 if (first)
8146 first = 0;
8147 else
8149 memcpy (p, ", ", 2);
8150 p += 2;
8153 memcpy (p, "%|", 2);
8154 memcpy (p + 2, reg_names[reg], l);
8155 p += l + 2;
8158 if (live_regs_mask & (1 << LR_REGNUM))
8160 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
8161 /* Decide if we need to add the ^ symbol to the end of the
8162 register list. This causes the saved condition codes
8163 register to be copied into the current condition codes
8164 register. We do the copy if we are conforming to the 32-bit
8165 ABI and this is an interrupt function, or if we are
8166 conforming to the 26-bit ABI. There is a special case for
8167 the 26-bit ABI however, which is if we are writing back the
8168 stack pointer but not loading the PC. In this case adding
8169 the ^ symbol would create a type 2 LDM instruction, where
8170 writeback is UNPREDICTABLE. We are safe in leaving the ^
8171 character off in this case however, since the actual return
8172 instruction will be a MOVS which will restore the CPSR. */
8173 if ((TARGET_APCS_32 && IS_INTERRUPT (func_type))
8174 || (! TARGET_APCS_32 && really_return))
8175 strcat (p, "^");
8177 else
8178 strcpy (p, "}");
8181 output_asm_insn (instr, & operand);
8183 /* See if we need to generate an extra instruction to
8184 perform the actual function return. */
8185 if (really_return
8186 && func_type != ARM_FT_INTERWORKED
8187 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8189 /* The return has already been handled
8190 by loading the LR into the PC. */
8191 really_return = 0;
8195 if (really_return)
8197 switch ((int) ARM_FUNC_TYPE (func_type))
8199 case ARM_FT_ISR:
8200 case ARM_FT_FIQ:
8201 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
8202 break;
8204 case ARM_FT_INTERWORKED:
8205 sprintf (instr, "bx%s\t%%|lr", conditional);
8206 break;
8208 case ARM_FT_EXCEPTION:
8209 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
8210 break;
8212 default:
8213 /* ARMv5 implementations always provide BX, so interworking
8214 is the default unless APCS-26 is in use. */
8215 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
8216 sprintf (instr, "bx%s\t%%|lr", conditional);
8217 else
8218 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
8219 conditional, TARGET_APCS_32 ? "" : "s");
8220 break;
8223 output_asm_insn (instr, & operand);
8226 return "";
8229 /* Write the function name into the code section, directly preceding
8230 the function prologue.
8232 Code will be output similar to this:
8234 .ascii "arm_poke_function_name", 0
8235 .align
8237 .word 0xff000000 + (t1 - t0)
8238 arm_poke_function_name
8239 mov ip, sp
8240 stmfd sp!, {fp, ip, lr, pc}
8241 sub fp, ip, #4
8243 When performing a stack backtrace, code can inspect the value
8244 of 'pc' stored at 'fp' + 0. If the trace function then looks
8245 at location pc - 12 and the top 8 bits are set, then we know
8246 that there is a function name embedded immediately preceding this
8247 location and has length ((pc[-3]) & 0xff000000).
8249 We assume that pc is declared as a pointer to an unsigned long.
8251 It is of no benefit to output the function name if we are assembling
8252 a leaf function. These function types will not contain a stack
8253 backtrace structure, therefore it is not possible to determine the
8254 function name. */
8255 void
8256 arm_poke_function_name (FILE *stream, const char *name)
8258 unsigned long alignlength;
8259 unsigned long length;
8260 rtx x;
8262 length = strlen (name) + 1;
8263 alignlength = ROUND_UP_WORD (length);
8265 ASM_OUTPUT_ASCII (stream, name, length);
8266 ASM_OUTPUT_ALIGN (stream, 2);
8267 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8268 assemble_aligned_integer (UNITS_PER_WORD, x);
8271 /* Place some comments into the assembler stream
8272 describing the current function. */
8273 static void
8274 arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
8276 unsigned long func_type;
8278 if (!TARGET_ARM)
8280 thumb_output_function_prologue (f, frame_size);
8281 return;
8284 /* Sanity check. */
8285 if (arm_ccfsm_state || arm_target_insn)
8286 abort ();
8288 func_type = arm_current_func_type ();
8290 switch ((int) ARM_FUNC_TYPE (func_type))
8292 default:
8293 case ARM_FT_NORMAL:
8294 break;
8295 case ARM_FT_INTERWORKED:
8296 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8297 break;
8298 case ARM_FT_EXCEPTION_HANDLER:
8299 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8300 break;
8301 case ARM_FT_ISR:
8302 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8303 break;
8304 case ARM_FT_FIQ:
8305 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8306 break;
8307 case ARM_FT_EXCEPTION:
8308 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8309 break;
8312 if (IS_NAKED (func_type))
8313 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8315 if (IS_VOLATILE (func_type))
8316 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8318 if (IS_NESTED (func_type))
8319 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8321 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
8322 current_function_args_size,
8323 current_function_pretend_args_size, frame_size);
8325 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8326 frame_pointer_needed,
8327 cfun->machine->uses_anonymous_args);
8329 if (cfun->machine->lr_save_eliminated)
8330 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8332 #ifdef AOF_ASSEMBLER
8333 if (flag_pic)
8334 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8335 #endif
8337 return_used_this_function = 0;
8340 const char *
8341 arm_output_epilogue (int really_return)
8343 int reg;
8344 unsigned long saved_regs_mask;
8345 unsigned long func_type;
8346 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8347 frame that is $fp + 4 for a non-variadic function. */
8348 int floats_offset = 0;
8349 rtx operands[3];
8350 int frame_size = arm_get_frame_size ();
8351 FILE * f = asm_out_file;
8352 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8353 unsigned int lrm_count = 0;
8355 /* If we have already generated the return instruction
8356 then it is futile to generate anything else. */
8357 if (use_return_insn (FALSE) && return_used_this_function)
8358 return "";
8360 func_type = arm_current_func_type ();
8362 if (IS_NAKED (func_type))
8363 /* Naked functions don't have epilogues. */
8364 return "";
8366 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8368 rtx op;
8370 /* A volatile function should never return. Call abort. */
8371 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8372 assemble_external_libcall (op);
8373 output_asm_insn ("bl\t%a0", &op);
8375 return "";
8378 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8379 && ! really_return)
8380 /* If we are throwing an exception, then we really must
8381 be doing a return, so we can't tail-call. */
8382 abort ();
8384 saved_regs_mask = arm_compute_save_reg_mask ();
8386 if (TARGET_IWMMXT)
8387 lrm_count = bit_count (saved_regs_mask);
8389 /* XXX We should adjust floats_offset for any anonymous args, and then
8390 re-adjust vfp_offset below to compensate. */
8392 /* Compute how far away the floats will be. */
8393 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8394 if (saved_regs_mask & (1 << reg))
8395 floats_offset += 4;
8397 if (frame_pointer_needed)
8399 int vfp_offset = 4;
8401 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8403 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8404 if (regs_ever_live[reg] && !call_used_regs[reg])
8406 floats_offset += 12;
8407 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8408 reg, FP_REGNUM, floats_offset - vfp_offset);
8411 else
8413 int start_reg = LAST_ARM_FP_REGNUM;
8415 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8417 if (regs_ever_live[reg] && !call_used_regs[reg])
8419 floats_offset += 12;
8421 /* We can't unstack more than four registers at once. */
8422 if (start_reg - reg == 3)
8424 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8425 reg, FP_REGNUM, floats_offset - vfp_offset);
8426 start_reg = reg - 1;
8429 else
8431 if (reg != start_reg)
8432 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8433 reg + 1, start_reg - reg,
8434 FP_REGNUM, floats_offset - vfp_offset);
8435 start_reg = reg - 1;
8439 /* Just in case the last register checked also needs unstacking. */
8440 if (reg != start_reg)
8441 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8442 reg + 1, start_reg - reg,
8443 FP_REGNUM, floats_offset - vfp_offset);
8446 if (TARGET_IWMMXT)
8448 /* The frame pointer is guaranteed to be non-double-word aligned.
8449 This is because it is set to (old_stack_pointer - 4) and the
8450 old_stack_pointer was double word aligned. Thus the offset to
8451 the iWMMXt registers to be loaded must also be non-double-word
8452 sized, so that the resultant address *is* double-word aligned.
8453 We can ignore floats_offset since that was already included in
8454 the live_regs_mask. */
8455 lrm_count += (lrm_count % 2 ? 2 : 1);
8457 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8458 if (regs_ever_live[reg] && !call_used_regs[reg])
8460 asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
8461 reg, FP_REGNUM, lrm_count * 4);
8462 lrm_count += 2;
8466 /* saved_regs_mask should contain the IP, which at the time of stack
8467 frame generation actually contains the old stack pointer. So a
8468 quick way to unwind the stack is just pop the IP register directly
8469 into the stack pointer. */
8470 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8471 abort ();
8472 saved_regs_mask &= ~ (1 << IP_REGNUM);
8473 saved_regs_mask |= (1 << SP_REGNUM);
8475 /* There are two registers left in saved_regs_mask - LR and PC. We
8476 only need to restore the LR register (the return address), but to
8477 save time we can load it directly into the PC, unless we need a
8478 special function exit sequence, or we are not really returning. */
8479 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8480 /* Delete the LR from the register mask, so that the LR on
8481 the stack is loaded into the PC in the register mask. */
8482 saved_regs_mask &= ~ (1 << LR_REGNUM);
8483 else
8484 saved_regs_mask &= ~ (1 << PC_REGNUM);
8486 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8488 if (IS_INTERRUPT (func_type))
8489 /* Interrupt handlers will have pushed the
8490 IP onto the stack, so restore it now. */
8491 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8493 else
8495 /* Restore stack pointer if necessary. */
8496 if (frame_size + current_function_outgoing_args_size != 0)
8498 operands[0] = operands[1] = stack_pointer_rtx;
8499 operands[2] = GEN_INT (frame_size
8500 + current_function_outgoing_args_size);
8501 output_add_immediate (operands);
8504 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8506 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8507 if (regs_ever_live[reg] && !call_used_regs[reg])
8508 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8509 reg, SP_REGNUM);
8511 else
8513 int start_reg = FIRST_ARM_FP_REGNUM;
8515 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8517 if (regs_ever_live[reg] && !call_used_regs[reg])
8519 if (reg - start_reg == 3)
8521 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8522 start_reg, SP_REGNUM);
8523 start_reg = reg + 1;
8526 else
8528 if (reg != start_reg)
8529 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8530 start_reg, reg - start_reg,
8531 SP_REGNUM);
8533 start_reg = reg + 1;
8537 /* Just in case the last register checked also needs unstacking. */
8538 if (reg != start_reg)
8539 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8540 start_reg, reg - start_reg, SP_REGNUM);
8543 if (TARGET_IWMMXT)
8544 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8545 if (regs_ever_live[reg] && !call_used_regs[reg])
8546 asm_fprintf (f, "\twldrd\t%r, [%r, #+8]!\n", reg, SP_REGNUM);
8548 /* If we can, restore the LR into the PC. */
8549 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8550 && really_return
8551 && current_function_pretend_args_size == 0
8552 && saved_regs_mask & (1 << LR_REGNUM))
8554 saved_regs_mask &= ~ (1 << LR_REGNUM);
8555 saved_regs_mask |= (1 << PC_REGNUM);
8558 /* Load the registers off the stack. If we only have one register
8559 to load use the LDR instruction - it is faster. */
8560 if (saved_regs_mask == (1 << LR_REGNUM))
8562 /* The exception handler ignores the LR, so we do
8563 not really need to load it off the stack. */
8564 if (eh_ofs)
8565 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8566 else
8567 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8569 else if (saved_regs_mask)
8571 if (saved_regs_mask & (1 << SP_REGNUM))
8572 /* Note - write back to the stack register is not enabled
8573 (ie "ldmfd sp!..."). We know that the stack pointer is
8574 in the list of registers and if we add writeback the
8575 instruction becomes UNPREDICTABLE. */
8576 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8577 else
8578 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8581 if (current_function_pretend_args_size)
8583 /* Unwind the pre-pushed regs. */
8584 operands[0] = operands[1] = stack_pointer_rtx;
8585 operands[2] = GEN_INT (current_function_pretend_args_size);
8586 output_add_immediate (operands);
8590 if (! really_return
8591 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8592 && current_function_pretend_args_size == 0
8593 && saved_regs_mask & (1 << PC_REGNUM)))
8594 return "";
8596 /* Generate the return instruction. */
8597 switch ((int) ARM_FUNC_TYPE (func_type))
8599 case ARM_FT_EXCEPTION_HANDLER:
8600 /* Even in 26-bit mode we do a mov (rather than a movs)
8601 because we don't have the PSR bits set in the address. */
8602 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8603 break;
8605 case ARM_FT_ISR:
8606 case ARM_FT_FIQ:
8607 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8608 break;
8610 case ARM_FT_EXCEPTION:
8611 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8612 break;
8614 case ARM_FT_INTERWORKED:
8615 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8616 break;
8618 default:
8619 if (frame_pointer_needed)
8620 /* If we used the frame pointer then the return address
8621 will have been loaded off the stack directly into the
8622 PC, so there is no need to issue a MOV instruction
8623 here. */
8625 else if (current_function_pretend_args_size == 0
8626 && (saved_regs_mask & (1 << LR_REGNUM)))
8627 /* Similarly we may have been able to load LR into the PC
8628 even if we did not create a stack frame. */
8630 else if (TARGET_APCS_32)
8631 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8632 else
8633 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8634 break;
8637 return "";
8640 static void
8641 arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8642 HOST_WIDE_INT frame_size)
8644 if (TARGET_THUMB)
8646 /* ??? Probably not safe to set this here, since it assumes that a
8647 function will be emitted as assembly immediately after we generate
8648 RTL for it. This does not happen for inline functions. */
8649 return_used_this_function = 0;
8651 else
8653 /* We need to take into account any stack-frame rounding. */
8654 frame_size = arm_get_frame_size ();
8656 if (use_return_insn (FALSE)
8657 && return_used_this_function
8658 && (frame_size + current_function_outgoing_args_size) != 0
8659 && !frame_pointer_needed)
8660 abort ();
8662 /* Reset the ARM-specific per-function variables. */
8663 after_arm_reorg = 0;
8667 /* Generate and emit an insn that we will recognize as a push_multi.
8668 Unfortunately, since this insn does not reflect very well the actual
8669 semantics of the operation, we need to annotate the insn for the benefit
8670 of DWARF2 frame unwind information. */
8671 static rtx
8672 emit_multi_reg_push (int mask)
8674 int num_regs = 0;
8675 int num_dwarf_regs;
8676 int i, j;
8677 rtx par;
8678 rtx dwarf;
8679 int dwarf_par_index;
8680 rtx tmp, reg;
8682 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8683 if (mask & (1 << i))
8684 num_regs++;
8686 if (num_regs == 0 || num_regs > 16)
8687 abort ();
8689 /* We don't record the PC in the dwarf frame information. */
8690 num_dwarf_regs = num_regs;
8691 if (mask & (1 << PC_REGNUM))
8692 num_dwarf_regs--;
8694 /* For the body of the insn we are going to generate an UNSPEC in
8695 parallel with several USEs. This allows the insn to be recognized
8696 by the push_multi pattern in the arm.md file. The insn looks
8697 something like this:
8699 (parallel [
8700 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8701 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8702 (use (reg:SI 11 fp))
8703 (use (reg:SI 12 ip))
8704 (use (reg:SI 14 lr))
8705 (use (reg:SI 15 pc))
8708 For the frame note however, we try to be more explicit and actually
8709 show each register being stored into the stack frame, plus a (single)
8710 decrement of the stack pointer. We do it this way in order to be
8711 friendly to the stack unwinding code, which only wants to see a single
8712 stack decrement per instruction. The RTL we generate for the note looks
8713 something like this:
8715 (sequence [
8716 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8717 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8718 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8719 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8720 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8723 This sequence is used both by the code to support stack unwinding for
8724 exceptions handlers and the code to generate dwarf2 frame debugging. */
8726 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8727 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8728 dwarf_par_index = 1;
8730 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8732 if (mask & (1 << i))
8734 reg = gen_rtx_REG (SImode, i);
8736 XVECEXP (par, 0, 0)
8737 = gen_rtx_SET (VOIDmode,
8738 gen_rtx_MEM (BLKmode,
8739 gen_rtx_PRE_DEC (BLKmode,
8740 stack_pointer_rtx)),
8741 gen_rtx_UNSPEC (BLKmode,
8742 gen_rtvec (1, reg),
8743 UNSPEC_PUSH_MULT));
8745 if (i != PC_REGNUM)
8747 tmp = gen_rtx_SET (VOIDmode,
8748 gen_rtx_MEM (SImode, stack_pointer_rtx),
8749 reg);
8750 RTX_FRAME_RELATED_P (tmp) = 1;
8751 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8752 dwarf_par_index++;
8755 break;
8759 for (j = 1, i++; j < num_regs; i++)
8761 if (mask & (1 << i))
8763 reg = gen_rtx_REG (SImode, i);
8765 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8767 if (i != PC_REGNUM)
8769 tmp = gen_rtx_SET (VOIDmode,
8770 gen_rtx_MEM (SImode,
8771 plus_constant (stack_pointer_rtx,
8772 4 * j)),
8773 reg);
8774 RTX_FRAME_RELATED_P (tmp) = 1;
8775 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8778 j++;
8782 par = emit_insn (par);
8784 tmp = gen_rtx_SET (SImode,
8785 stack_pointer_rtx,
8786 gen_rtx_PLUS (SImode,
8787 stack_pointer_rtx,
8788 GEN_INT (-4 * num_regs)));
8789 RTX_FRAME_RELATED_P (tmp) = 1;
8790 XVECEXP (dwarf, 0, 0) = tmp;
8792 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8793 REG_NOTES (par));
8794 return par;
8797 static rtx
8798 emit_sfm (int base_reg, int count)
8800 rtx par;
8801 rtx dwarf;
8802 rtx tmp, reg;
8803 int i;
8805 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8806 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8808 reg = gen_rtx_REG (XFmode, base_reg++);
8810 XVECEXP (par, 0, 0)
8811 = gen_rtx_SET (VOIDmode,
8812 gen_rtx_MEM (BLKmode,
8813 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8814 gen_rtx_UNSPEC (BLKmode,
8815 gen_rtvec (1, reg),
8816 UNSPEC_PUSH_MULT));
8818 = gen_rtx_SET (VOIDmode,
8819 gen_rtx_MEM (XFmode,
8820 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8821 reg);
8822 RTX_FRAME_RELATED_P (tmp) = 1;
8823 XVECEXP (dwarf, 0, count - 1) = tmp;
8825 for (i = 1; i < count; i++)
8827 reg = gen_rtx_REG (XFmode, base_reg++);
8828 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8830 tmp = gen_rtx_SET (VOIDmode,
8831 gen_rtx_MEM (XFmode,
8832 gen_rtx_PRE_DEC (BLKmode,
8833 stack_pointer_rtx)),
8834 reg);
8835 RTX_FRAME_RELATED_P (tmp) = 1;
8836 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8839 par = emit_insn (par);
8840 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8841 REG_NOTES (par));
8842 return par;
8845 /* Compute the distance from register FROM to register TO.
8846 These can be the arg pointer (26), the soft frame pointer (25),
8847 the stack pointer (13) or the hard frame pointer (11).
8848 Typical stack layout looks like this:
8850 old stack pointer -> | |
8851 ----
8852 | | \
8853 | | saved arguments for
8854 | | vararg functions
8855 | | /
8857 hard FP & arg pointer -> | | \
8858 | | stack
8859 | | frame
8860 | | /
8862 | | \
8863 | | call saved
8864 | | registers
8865 soft frame pointer -> | | /
8867 | | \
8868 | | local
8869 | | variables
8870 | | /
8872 | | \
8873 | | outgoing
8874 | | arguments
8875 current stack pointer -> | | /
8878 For a given function some or all of these stack components
8879 may not be needed, giving rise to the possibility of
8880 eliminating some of the registers.
8882 The values returned by this function must reflect the behavior
8883 of arm_expand_prologue() and arm_compute_save_reg_mask().
8885 The sign of the number returned reflects the direction of stack
8886 growth, so the values are positive for all eliminations except
8887 from the soft frame pointer to the hard frame pointer. */
8888 unsigned int
8889 arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
8891 unsigned int local_vars = arm_get_frame_size ();
8892 unsigned int outgoing_args = current_function_outgoing_args_size;
8893 unsigned int stack_frame;
8894 unsigned int call_saved_registers;
8895 unsigned long func_type;
8897 func_type = arm_current_func_type ();
8899 /* Volatile functions never return, so there is
8900 no need to save call saved registers. */
8901 call_saved_registers = 0;
8902 if (! IS_VOLATILE (func_type))
8904 unsigned int reg_mask;
8905 unsigned int reg;
8907 /* Make sure that we compute which registers will be saved
8908 on the stack using the same algorithm that is used by
8909 the prologue creation code. */
8910 reg_mask = arm_compute_save_reg_mask ();
8912 /* Now count the number of bits set in save_reg_mask.
8913 If we have already counted the registers in the stack
8914 frame, do not count them again. Non call-saved registers
8915 might be saved in the call-save area of the stack, if
8916 doing so will preserve the stack's alignment. Hence we
8917 must count them here. For each set bit we need 4 bytes
8918 of stack space. */
8919 if (frame_pointer_needed)
8920 reg_mask &= 0x07ff;
8921 call_saved_registers += 4 * bit_count (reg_mask);
8923 /* If the hard floating point registers are going to be
8924 used then they must be saved on the stack as well.
8925 Each register occupies 12 bytes of stack space. */
8926 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8927 if (regs_ever_live[reg] && ! call_used_regs[reg])
8928 call_saved_registers += 12;
8930 if (TARGET_REALLY_IWMMXT)
8931 /* Check for the call-saved iWMMXt registers. */
8932 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8933 if (regs_ever_live[reg] && ! call_used_regs [reg])
8934 call_saved_registers += 8;
8937 /* The stack frame contains 4 registers - the old frame pointer,
8938 the old stack pointer, the return address and PC of the start
8939 of the function. */
8940 stack_frame = frame_pointer_needed ? 16 : 0;
8942 /* OK, now we have enough information to compute the distances.
8943 There must be an entry in these switch tables for each pair
8944 of registers in ELIMINABLE_REGS, even if some of the entries
8945 seem to be redundant or useless. */
8946 switch (from)
8948 case ARG_POINTER_REGNUM:
8949 switch (to)
8951 case THUMB_HARD_FRAME_POINTER_REGNUM:
8952 return 0;
8954 case FRAME_POINTER_REGNUM:
8955 /* This is the reverse of the soft frame pointer
8956 to hard frame pointer elimination below. */
8957 if (call_saved_registers == 0 && stack_frame == 0)
8958 return 0;
8959 return (call_saved_registers + stack_frame - 4);
8961 case ARM_HARD_FRAME_POINTER_REGNUM:
8962 /* If there is no stack frame then the hard
8963 frame pointer and the arg pointer coincide. */
8964 if (stack_frame == 0 && call_saved_registers != 0)
8965 return 0;
8966 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8967 return (frame_pointer_needed
8968 && current_function_needs_context
8969 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8971 case STACK_POINTER_REGNUM:
8972 /* If nothing has been pushed on the stack at all
8973 then this will return -4. This *is* correct! */
8974 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8976 default:
8977 abort ();
8979 break;
8981 case FRAME_POINTER_REGNUM:
8982 switch (to)
8984 case THUMB_HARD_FRAME_POINTER_REGNUM:
8985 return 0;
8987 case ARM_HARD_FRAME_POINTER_REGNUM:
8988 /* The hard frame pointer points to the top entry in the
8989 stack frame. The soft frame pointer to the bottom entry
8990 in the stack frame. If there is no stack frame at all,
8991 then they are identical. */
8992 if (call_saved_registers == 0 && stack_frame == 0)
8993 return 0;
8994 return - (call_saved_registers + stack_frame - 4);
8996 case STACK_POINTER_REGNUM:
8997 return local_vars + outgoing_args;
8999 default:
9000 abort ();
9002 break;
9004 default:
9005 /* You cannot eliminate from the stack pointer.
9006 In theory you could eliminate from the hard frame
9007 pointer to the stack pointer, but this will never
9008 happen, since if a stack frame is not needed the
9009 hard frame pointer will never be used. */
9010 abort ();
9014 /* Calculate the size of the stack frame, taking into account any
9015 padding that is required to ensure stack-alignment. */
9016 HOST_WIDE_INT
9017 arm_get_frame_size (void)
9019 int regno;
9021 int base_size = ROUND_UP_WORD (get_frame_size ());
9022 int entry_size = 0;
9023 unsigned long func_type = arm_current_func_type ();
9024 int leaf;
9026 if (! TARGET_ARM)
9027 abort();
9029 if (! TARGET_ATPCS)
9030 return base_size;
9032 /* We need to know if we are a leaf function. Unfortunately, it
9033 is possible to be called after start_sequence has been called,
9034 which causes get_insns to return the insns for the sequence,
9035 not the function, which will cause leaf_function_p to return
9036 the incorrect result.
9038 To work around this, we cache the computed frame size. This
9039 works because we will only be calling RTL expanders that need
9040 to know about leaf functions once reload has completed, and the
9041 frame size cannot be changed after that time, so we can safely
9042 use the cached value. */
9044 if (reload_completed)
9045 return cfun->machine->frame_size;
9047 leaf = leaf_function_p ();
9049 /* A leaf function does not need any stack alignment if it has nothing
9050 on the stack. */
9051 if (leaf && base_size == 0)
9053 cfun->machine->frame_size = 0;
9054 return 0;
9057 /* We know that SP will be word aligned on entry, and we must
9058 preserve that condition at any subroutine call. But those are
9059 the only constraints. */
9061 /* Space for variadic functions. */
9062 if (current_function_pretend_args_size)
9063 entry_size += current_function_pretend_args_size;
9065 /* Space for saved registers. */
9066 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
9068 /* Space for saved FPA registers. */
9069 if (! IS_VOLATILE (func_type))
9071 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
9072 if (regs_ever_live[regno] && ! call_used_regs[regno])
9073 entry_size += 12;
9076 if (TARGET_REALLY_IWMMXT)
9078 /* Check for the call-saved iWMMXt registers. */
9079 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
9080 if (regs_ever_live [regno] && ! call_used_regs [regno])
9081 entry_size += 8;
9084 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9085 base_size += 4;
9086 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9087 abort ();
9089 cfun->machine->frame_size = base_size;
9091 return base_size;
9094 /* Generate the prologue instructions for entry into an ARM function. */
9095 void
9096 arm_expand_prologue (void)
9098 int reg;
9099 rtx amount;
9100 rtx insn;
9101 rtx ip_rtx;
9102 unsigned long live_regs_mask;
9103 unsigned long func_type;
9104 int fp_offset = 0;
9105 int saved_pretend_args = 0;
9106 unsigned int args_to_push;
9108 func_type = arm_current_func_type ();
9110 /* Naked functions don't have prologues. */
9111 if (IS_NAKED (func_type))
9112 return;
9114 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
9115 args_to_push = current_function_pretend_args_size;
9117 /* Compute which register we will have to save onto the stack. */
9118 live_regs_mask = arm_compute_save_reg_mask ();
9120 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
9122 if (frame_pointer_needed)
9124 if (IS_INTERRUPT (func_type))
9126 /* Interrupt functions must not corrupt any registers.
9127 Creating a frame pointer however, corrupts the IP
9128 register, so we must push it first. */
9129 insn = emit_multi_reg_push (1 << IP_REGNUM);
9131 /* Do not set RTX_FRAME_RELATED_P on this insn.
9132 The dwarf stack unwinding code only wants to see one
9133 stack decrement per function, and this is not it. If
9134 this instruction is labeled as being part of the frame
9135 creation sequence then dwarf2out_frame_debug_expr will
9136 abort when it encounters the assignment of IP to FP
9137 later on, since the use of SP here establishes SP as
9138 the CFA register and not IP.
9140 Anyway this instruction is not really part of the stack
9141 frame creation although it is part of the prologue. */
9143 else if (IS_NESTED (func_type))
9145 /* The Static chain register is the same as the IP register
9146 used as a scratch register during stack frame creation.
9147 To get around this need to find somewhere to store IP
9148 whilst the frame is being created. We try the following
9149 places in order:
9151 1. The last argument register.
9152 2. A slot on the stack above the frame. (This only
9153 works if the function is not a varargs function).
9154 3. Register r3, after pushing the argument registers
9155 onto the stack.
9157 Note - we only need to tell the dwarf2 backend about the SP
9158 adjustment in the second variant; the static chain register
9159 doesn't need to be unwound, as it doesn't contain a value
9160 inherited from the caller. */
9162 if (regs_ever_live[3] == 0)
9164 insn = gen_rtx_REG (SImode, 3);
9165 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9166 insn = emit_insn (insn);
9168 else if (args_to_push == 0)
9170 rtx dwarf;
9171 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
9172 insn = gen_rtx_MEM (SImode, insn);
9173 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
9174 insn = emit_insn (insn);
9176 fp_offset = 4;
9178 /* Just tell the dwarf backend that we adjusted SP. */
9179 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
9180 gen_rtx_PLUS (SImode, stack_pointer_rtx,
9181 GEN_INT (-fp_offset)));
9182 RTX_FRAME_RELATED_P (insn) = 1;
9183 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9184 dwarf, REG_NOTES (insn));
9186 else
9188 /* Store the args on the stack. */
9189 if (cfun->machine->uses_anonymous_args)
9190 insn = emit_multi_reg_push
9191 ((0xf0 >> (args_to_push / 4)) & 0xf);
9192 else
9193 insn = emit_insn
9194 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9195 GEN_INT (- args_to_push)));
9197 RTX_FRAME_RELATED_P (insn) = 1;
9199 saved_pretend_args = 1;
9200 fp_offset = args_to_push;
9201 args_to_push = 0;
9203 /* Now reuse r3 to preserve IP. */
9204 insn = gen_rtx_REG (SImode, 3);
9205 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9206 (void) emit_insn (insn);
9210 if (fp_offset)
9212 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
9213 insn = gen_rtx_SET (SImode, ip_rtx, insn);
9215 else
9216 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
9218 insn = emit_insn (insn);
9219 RTX_FRAME_RELATED_P (insn) = 1;
9222 if (args_to_push)
9224 /* Push the argument registers, or reserve space for them. */
9225 if (cfun->machine->uses_anonymous_args)
9226 insn = emit_multi_reg_push
9227 ((0xf0 >> (args_to_push / 4)) & 0xf);
9228 else
9229 insn = emit_insn
9230 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9231 GEN_INT (- args_to_push)));
9232 RTX_FRAME_RELATED_P (insn) = 1;
9235 /* If this is an interrupt service routine, and the link register
9236 is going to be pushed, and we are not creating a stack frame,
9237 (which would involve an extra push of IP and a pop in the epilogue)
9238 subtracting four from LR now will mean that the function return
9239 can be done with a single instruction. */
9240 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
9241 && (live_regs_mask & (1 << LR_REGNUM)) != 0
9242 && ! frame_pointer_needed)
9243 emit_insn (gen_rtx_SET (SImode,
9244 gen_rtx_REG (SImode, LR_REGNUM),
9245 gen_rtx_PLUS (SImode,
9246 gen_rtx_REG (SImode, LR_REGNUM),
9247 GEN_INT (-4))));
9249 if (live_regs_mask)
9251 insn = emit_multi_reg_push (live_regs_mask);
9252 RTX_FRAME_RELATED_P (insn) = 1;
9255 if (TARGET_IWMMXT)
9256 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
9257 if (regs_ever_live[reg] && ! call_used_regs [reg])
9259 insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
9260 insn = gen_rtx_MEM (V2SImode, insn);
9261 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9262 gen_rtx_REG (V2SImode, reg)));
9263 RTX_FRAME_RELATED_P (insn) = 1;
9266 if (! IS_VOLATILE (func_type))
9268 /* Save any floating point call-saved registers used by this
9269 function. */
9270 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
9272 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9273 if (regs_ever_live[reg] && !call_used_regs[reg])
9275 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
9276 insn = gen_rtx_MEM (XFmode, insn);
9277 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9278 gen_rtx_REG (XFmode, reg)));
9279 RTX_FRAME_RELATED_P (insn) = 1;
9282 else
9284 int start_reg = LAST_ARM_FP_REGNUM;
9286 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9288 if (regs_ever_live[reg] && !call_used_regs[reg])
9290 if (start_reg - reg == 3)
9292 insn = emit_sfm (reg, 4);
9293 RTX_FRAME_RELATED_P (insn) = 1;
9294 start_reg = reg - 1;
9297 else
9299 if (start_reg != reg)
9301 insn = emit_sfm (reg + 1, start_reg - reg);
9302 RTX_FRAME_RELATED_P (insn) = 1;
9304 start_reg = reg - 1;
9308 if (start_reg != reg)
9310 insn = emit_sfm (reg + 1, start_reg - reg);
9311 RTX_FRAME_RELATED_P (insn) = 1;
9316 if (frame_pointer_needed)
9318 /* Create the new frame pointer. */
9319 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9320 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9321 RTX_FRAME_RELATED_P (insn) = 1;
9323 if (IS_NESTED (func_type))
9325 /* Recover the static chain register. */
9326 if (regs_ever_live [3] == 0
9327 || saved_pretend_args)
9328 insn = gen_rtx_REG (SImode, 3);
9329 else /* if (current_function_pretend_args_size == 0) */
9331 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
9332 GEN_INT (4));
9333 insn = gen_rtx_MEM (SImode, insn);
9336 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9337 /* Add a USE to stop propagate_one_insn() from barfing. */
9338 emit_insn (gen_prologue_use (ip_rtx));
9342 amount = GEN_INT (-(arm_get_frame_size ()
9343 + current_function_outgoing_args_size));
9345 if (amount != const0_rtx)
9347 /* This add can produce multiple insns for a large constant, so we
9348 need to get tricky. */
9349 rtx last = get_last_insn ();
9350 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9351 amount));
9354 last = last ? NEXT_INSN (last) : get_insns ();
9355 RTX_FRAME_RELATED_P (last) = 1;
9357 while (last != insn);
9359 /* If the frame pointer is needed, emit a special barrier that
9360 will prevent the scheduler from moving stores to the frame
9361 before the stack adjustment. */
9362 if (frame_pointer_needed)
9363 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9364 hard_frame_pointer_rtx));
9367 /* If we are profiling, make sure no instructions are scheduled before
9368 the call to mcount. Similarly if the user has requested no
9369 scheduling in the prolog. */
9370 if (current_function_profile || TARGET_NO_SCHED_PRO)
9371 emit_insn (gen_blockage ());
9373 /* If the link register is being kept alive, with the return address in it,
9374 then make sure that it does not get reused by the ce2 pass. */
9375 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9377 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9378 cfun->machine->lr_save_eliminated = 1;
9382 /* If CODE is 'd', then the X is a condition operand and the instruction
9383 should only be executed if the condition is true.
9384 if CODE is 'D', then the X is a condition operand and the instruction
9385 should only be executed if the condition is false: however, if the mode
9386 of the comparison is CCFPEmode, then always execute the instruction -- we
9387 do this because in these circumstances !GE does not necessarily imply LT;
9388 in these cases the instruction pattern will take care to make sure that
9389 an instruction containing %d will follow, thereby undoing the effects of
9390 doing this instruction unconditionally.
9391 If CODE is 'N' then X is a floating point operand that must be negated
9392 before output.
9393 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9394 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9395 void
9396 arm_print_operand (FILE *stream, rtx x, int code)
9398 switch (code)
9400 case '@':
9401 fputs (ASM_COMMENT_START, stream);
9402 return;
9404 case '_':
9405 fputs (user_label_prefix, stream);
9406 return;
9408 case '|':
9409 fputs (REGISTER_PREFIX, stream);
9410 return;
9412 case '?':
9413 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9415 if (TARGET_THUMB || current_insn_predicate != NULL)
9416 abort ();
9418 fputs (arm_condition_codes[arm_current_cc], stream);
9420 else if (current_insn_predicate)
9422 enum arm_cond_code code;
9424 if (TARGET_THUMB)
9425 abort ();
9427 code = get_arm_condition_code (current_insn_predicate);
9428 fputs (arm_condition_codes[code], stream);
9430 return;
9432 case 'N':
9434 REAL_VALUE_TYPE r;
9435 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9436 r = REAL_VALUE_NEGATE (r);
9437 fprintf (stream, "%s", fp_const_from_val (&r));
9439 return;
9441 case 'B':
9442 if (GET_CODE (x) == CONST_INT)
9444 HOST_WIDE_INT val;
9445 val = ARM_SIGN_EXTEND (~INTVAL (x));
9446 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9448 else
9450 putc ('~', stream);
9451 output_addr_const (stream, x);
9453 return;
9455 case 'i':
9456 fprintf (stream, "%s", arithmetic_instr (x, 1));
9457 return;
9459 /* Truncate Cirrus shift counts. */
9460 case 's':
9461 if (GET_CODE (x) == CONST_INT)
9463 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9464 return;
9466 arm_print_operand (stream, x, 0);
9467 return;
9469 case 'I':
9470 fprintf (stream, "%s", arithmetic_instr (x, 0));
9471 return;
9473 case 'S':
9475 HOST_WIDE_INT val;
9476 const char * shift = shift_op (x, &val);
9478 if (shift)
9480 fprintf (stream, ", %s ", shift_op (x, &val));
9481 if (val == -1)
9482 arm_print_operand (stream, XEXP (x, 1), 0);
9483 else
9484 fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
9487 return;
9489 /* An explanation of the 'Q', 'R' and 'H' register operands:
9491 In a pair of registers containing a DI or DF value the 'Q'
9492 operand returns the register number of the register containing
9493 the least significant part of the value. The 'R' operand returns
9494 the register number of the register containing the most
9495 significant part of the value.
9497 The 'H' operand returns the higher of the two register numbers.
9498 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9499 same as the 'Q' operand, since the most significant part of the
9500 value is held in the lower number register. The reverse is true
9501 on systems where WORDS_BIG_ENDIAN is false.
9503 The purpose of these operands is to distinguish between cases
9504 where the endian-ness of the values is important (for example
9505 when they are added together), and cases where the endian-ness
9506 is irrelevant, but the order of register operations is important.
9507 For example when loading a value from memory into a register
9508 pair, the endian-ness does not matter. Provided that the value
9509 from the lower memory address is put into the lower numbered
9510 register, and the value from the higher address is put into the
9511 higher numbered register, the load will work regardless of whether
9512 the value being loaded is big-wordian or little-wordian. The
9513 order of the two register loads can matter however, if the address
9514 of the memory location is actually held in one of the registers
9515 being overwritten by the load. */
9516 case 'Q':
9517 if (REGNO (x) > LAST_ARM_REGNUM)
9518 abort ();
9519 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9520 return;
9522 case 'R':
9523 if (REGNO (x) > LAST_ARM_REGNUM)
9524 abort ();
9525 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9526 return;
9528 case 'H':
9529 if (REGNO (x) > LAST_ARM_REGNUM)
9530 abort ();
9531 asm_fprintf (stream, "%r", REGNO (x) + 1);
9532 return;
9534 case 'm':
9535 asm_fprintf (stream, "%r",
9536 GET_CODE (XEXP (x, 0)) == REG
9537 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9538 return;
9540 case 'M':
9541 asm_fprintf (stream, "{%r-%r}",
9542 REGNO (x),
9543 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9544 return;
9546 case 'd':
9547 /* CONST_TRUE_RTX means always -- that's the default. */
9548 if (x == const_true_rtx)
9549 return;
9551 if (TARGET_ARM)
9552 fputs (arm_condition_codes[get_arm_condition_code (x)],
9553 stream);
9554 else
9555 fputs (thumb_condition_code (x, 0), stream);
9556 return;
9558 case 'D':
9559 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9560 want to do that. */
9561 if (x == const_true_rtx)
9562 abort ();
9564 if (TARGET_ARM)
9565 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9566 (get_arm_condition_code (x))],
9567 stream);
9568 else
9569 fputs (thumb_condition_code (x, 1), stream);
9570 return;
9572 /* Cirrus registers can be accessed in a variety of ways:
9573 single floating point (f)
9574 double floating point (d)
9575 32bit integer (fx)
9576 64bit integer (dx). */
9577 case 'W': /* Cirrus register in F mode. */
9578 case 'X': /* Cirrus register in D mode. */
9579 case 'Y': /* Cirrus register in FX mode. */
9580 case 'Z': /* Cirrus register in DX mode. */
9581 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9582 abort ();
9584 fprintf (stream, "mv%s%s",
9585 code == 'W' ? "f"
9586 : code == 'X' ? "d"
9587 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9589 return;
9591 /* Print cirrus register in the mode specified by the register's mode. */
9592 case 'V':
9594 int mode = GET_MODE (x);
9596 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9597 abort ();
9599 fprintf (stream, "mv%s%s",
9600 mode == DFmode ? "d"
9601 : mode == SImode ? "fx"
9602 : mode == DImode ? "dx"
9603 : "f", reg_names[REGNO (x)] + 2);
9605 return;
9608 case 'U':
9609 if (GET_CODE (x) != REG
9610 || REGNO (x) < FIRST_IWMMXT_GR_REGNUM
9611 || REGNO (x) > LAST_IWMMXT_GR_REGNUM)
9612 /* Bad value for wCG register number. */
9613 abort ();
9614 else
9615 fprintf (stream, "%d", REGNO (x) - FIRST_IWMMXT_GR_REGNUM);
9616 return;
9618 /* Print an iWMMXt control register name. */
9619 case 'w':
9620 if (GET_CODE (x) != CONST_INT
9621 || INTVAL (x) < 0
9622 || INTVAL (x) >= 16)
9623 /* Bad value for wC register number. */
9624 abort ();
9625 else
9627 static const char * wc_reg_names [16] =
9629 "wCID", "wCon", "wCSSF", "wCASF",
9630 "wC4", "wC5", "wC6", "wC7",
9631 "wCGR0", "wCGR1", "wCGR2", "wCGR3",
9632 "wC12", "wC13", "wC14", "wC15"
9635 fprintf (stream, wc_reg_names [INTVAL (x)]);
9637 return;
9639 default:
9640 if (x == 0)
9641 abort ();
9643 if (GET_CODE (x) == REG)
9644 asm_fprintf (stream, "%r", REGNO (x));
9645 else if (GET_CODE (x) == MEM)
9647 output_memory_reference_mode = GET_MODE (x);
9648 output_address (XEXP (x, 0));
9650 else if (GET_CODE (x) == CONST_DOUBLE)
9651 fprintf (stream, "#%s", fp_immediate_constant (x));
9652 else if (GET_CODE (x) == NEG)
9653 abort (); /* This should never happen now. */
9654 else
9656 fputc ('#', stream);
9657 output_addr_const (stream, x);
9662 #ifndef AOF_ASSEMBLER
9663 /* Target hook for assembling integer objects. The ARM version needs to
9664 handle word-sized values specially. */
9665 static bool
9666 arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
9668 if (size == UNITS_PER_WORD && aligned_p)
9670 fputs ("\t.word\t", asm_out_file);
9671 output_addr_const (asm_out_file, x);
9673 /* Mark symbols as position independent. We only do this in the
9674 .text segment, not in the .data segment. */
9675 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9676 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9678 if (GET_CODE (x) == SYMBOL_REF
9679 && (CONSTANT_POOL_ADDRESS_P (x)
9680 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9681 fputs ("(GOTOFF)", asm_out_file);
9682 else if (GET_CODE (x) == LABEL_REF)
9683 fputs ("(GOTOFF)", asm_out_file);
9684 else
9685 fputs ("(GOT)", asm_out_file);
9687 fputc ('\n', asm_out_file);
9688 return true;
9691 if (VECTOR_MODE_SUPPORTED_P (GET_MODE (x)))
9693 int i, units;
9695 if (GET_CODE (x) != CONST_VECTOR)
9696 abort ();
9698 units = CONST_VECTOR_NUNITS (x);
9700 switch (GET_MODE (x))
9702 case V2SImode: size = 4; break;
9703 case V4HImode: size = 2; break;
9704 case V8QImode: size = 1; break;
9705 default:
9706 abort ();
9709 for (i = 0; i < units; i++)
9711 rtx elt;
9713 elt = CONST_VECTOR_ELT (x, i);
9714 assemble_integer
9715 (elt, size, i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, 1);
9718 return true;
9721 return default_assemble_integer (x, size, aligned_p);
9723 #endif
9725 /* A finite state machine takes care of noticing whether or not instructions
9726 can be conditionally executed, and thus decrease execution time and code
9727 size by deleting branch instructions. The fsm is controlled by
9728 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9730 /* The state of the fsm controlling condition codes are:
9731 0: normal, do nothing special
9732 1: make ASM_OUTPUT_OPCODE not output this instruction
9733 2: make ASM_OUTPUT_OPCODE not output this instruction
9734 3: make instructions conditional
9735 4: make instructions conditional
9737 State transitions (state->state by whom under condition):
9738 0 -> 1 final_prescan_insn if the `target' is a label
9739 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9740 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9741 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9742 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9743 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9744 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9745 (the target insn is arm_target_insn).
9747 If the jump clobbers the conditions then we use states 2 and 4.
9749 A similar thing can be done with conditional return insns.
9751 XXX In case the `target' is an unconditional branch, this conditionalising
9752 of the instructions always reduces code size, but not always execution
9753 time. But then, I want to reduce the code size to somewhere near what
9754 /bin/cc produces. */
9756 /* Returns the index of the ARM condition code string in
9757 `arm_condition_codes'. COMPARISON should be an rtx like
9758 `(eq (...) (...))'. */
9759 static enum arm_cond_code
9760 get_arm_condition_code (rtx comparison)
9762 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9763 int code;
9764 enum rtx_code comp_code = GET_CODE (comparison);
9766 if (GET_MODE_CLASS (mode) != MODE_CC)
9767 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9768 XEXP (comparison, 1));
9770 switch (mode)
9772 case CC_DNEmode: code = ARM_NE; goto dominance;
9773 case CC_DEQmode: code = ARM_EQ; goto dominance;
9774 case CC_DGEmode: code = ARM_GE; goto dominance;
9775 case CC_DGTmode: code = ARM_GT; goto dominance;
9776 case CC_DLEmode: code = ARM_LE; goto dominance;
9777 case CC_DLTmode: code = ARM_LT; goto dominance;
9778 case CC_DGEUmode: code = ARM_CS; goto dominance;
9779 case CC_DGTUmode: code = ARM_HI; goto dominance;
9780 case CC_DLEUmode: code = ARM_LS; goto dominance;
9781 case CC_DLTUmode: code = ARM_CC;
9783 dominance:
9784 if (comp_code != EQ && comp_code != NE)
9785 abort ();
9787 if (comp_code == EQ)
9788 return ARM_INVERSE_CONDITION_CODE (code);
9789 return code;
9791 case CC_NOOVmode:
9792 switch (comp_code)
9794 case NE: return ARM_NE;
9795 case EQ: return ARM_EQ;
9796 case GE: return ARM_PL;
9797 case LT: return ARM_MI;
9798 default: abort ();
9801 case CC_Zmode:
9802 switch (comp_code)
9804 case NE: return ARM_NE;
9805 case EQ: return ARM_EQ;
9806 default: abort ();
9809 case CCFPEmode:
9810 case CCFPmode:
9811 /* These encodings assume that AC=1 in the FPA system control
9812 byte. This allows us to handle all cases except UNEQ and
9813 LTGT. */
9814 switch (comp_code)
9816 case GE: return ARM_GE;
9817 case GT: return ARM_GT;
9818 case LE: return ARM_LS;
9819 case LT: return ARM_MI;
9820 case NE: return ARM_NE;
9821 case EQ: return ARM_EQ;
9822 case ORDERED: return ARM_VC;
9823 case UNORDERED: return ARM_VS;
9824 case UNLT: return ARM_LT;
9825 case UNLE: return ARM_LE;
9826 case UNGT: return ARM_HI;
9827 case UNGE: return ARM_PL;
9828 /* UNEQ and LTGT do not have a representation. */
9829 case UNEQ: /* Fall through. */
9830 case LTGT: /* Fall through. */
9831 default: abort ();
9834 case CC_SWPmode:
9835 switch (comp_code)
9837 case NE: return ARM_NE;
9838 case EQ: return ARM_EQ;
9839 case GE: return ARM_LE;
9840 case GT: return ARM_LT;
9841 case LE: return ARM_GE;
9842 case LT: return ARM_GT;
9843 case GEU: return ARM_LS;
9844 case GTU: return ARM_CC;
9845 case LEU: return ARM_CS;
9846 case LTU: return ARM_HI;
9847 default: abort ();
9850 case CC_Cmode:
9851 switch (comp_code)
9853 case LTU: return ARM_CS;
9854 case GEU: return ARM_CC;
9855 default: abort ();
9858 case CCmode:
9859 switch (comp_code)
9861 case NE: return ARM_NE;
9862 case EQ: return ARM_EQ;
9863 case GE: return ARM_GE;
9864 case GT: return ARM_GT;
9865 case LE: return ARM_LE;
9866 case LT: return ARM_LT;
9867 case GEU: return ARM_CS;
9868 case GTU: return ARM_HI;
9869 case LEU: return ARM_LS;
9870 case LTU: return ARM_CC;
9871 default: abort ();
9874 default: abort ();
9877 abort ();
9880 void
9881 arm_final_prescan_insn (rtx insn)
9883 /* BODY will hold the body of INSN. */
9884 rtx body = PATTERN (insn);
9886 /* This will be 1 if trying to repeat the trick, and things need to be
9887 reversed if it appears to fail. */
9888 int reverse = 0;
9890 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9891 taken are clobbered, even if the rtl suggests otherwise. It also
9892 means that we have to grub around within the jump expression to find
9893 out what the conditions are when the jump isn't taken. */
9894 int jump_clobbers = 0;
9896 /* If we start with a return insn, we only succeed if we find another one. */
9897 int seeking_return = 0;
9899 /* START_INSN will hold the insn from where we start looking. This is the
9900 first insn after the following code_label if REVERSE is true. */
9901 rtx start_insn = insn;
9903 /* If in state 4, check if the target branch is reached, in order to
9904 change back to state 0. */
9905 if (arm_ccfsm_state == 4)
9907 if (insn == arm_target_insn)
9909 arm_target_insn = NULL;
9910 arm_ccfsm_state = 0;
9912 return;
9915 /* If in state 3, it is possible to repeat the trick, if this insn is an
9916 unconditional branch to a label, and immediately following this branch
9917 is the previous target label which is only used once, and the label this
9918 branch jumps to is not too far off. */
9919 if (arm_ccfsm_state == 3)
9921 if (simplejump_p (insn))
9923 start_insn = next_nonnote_insn (start_insn);
9924 if (GET_CODE (start_insn) == BARRIER)
9926 /* XXX Isn't this always a barrier? */
9927 start_insn = next_nonnote_insn (start_insn);
9929 if (GET_CODE (start_insn) == CODE_LABEL
9930 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9931 && LABEL_NUSES (start_insn) == 1)
9932 reverse = TRUE;
9933 else
9934 return;
9936 else if (GET_CODE (body) == RETURN)
9938 start_insn = next_nonnote_insn (start_insn);
9939 if (GET_CODE (start_insn) == BARRIER)
9940 start_insn = next_nonnote_insn (start_insn);
9941 if (GET_CODE (start_insn) == CODE_LABEL
9942 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9943 && LABEL_NUSES (start_insn) == 1)
9945 reverse = TRUE;
9946 seeking_return = 1;
9948 else
9949 return;
9951 else
9952 return;
9955 if (arm_ccfsm_state != 0 && !reverse)
9956 abort ();
9957 if (GET_CODE (insn) != JUMP_INSN)
9958 return;
9960 /* This jump might be paralleled with a clobber of the condition codes
9961 the jump should always come first */
9962 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9963 body = XVECEXP (body, 0, 0);
9965 if (reverse
9966 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9967 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9969 int insns_skipped;
9970 int fail = FALSE, succeed = FALSE;
9971 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9972 int then_not_else = TRUE;
9973 rtx this_insn = start_insn, label = 0;
9975 /* If the jump cannot be done with one instruction, we cannot
9976 conditionally execute the instruction in the inverse case. */
9977 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9979 jump_clobbers = 1;
9980 return;
9983 /* Register the insn jumped to. */
9984 if (reverse)
9986 if (!seeking_return)
9987 label = XEXP (SET_SRC (body), 0);
9989 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9990 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9991 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9993 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9994 then_not_else = FALSE;
9996 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9997 seeking_return = 1;
9998 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
10000 seeking_return = 1;
10001 then_not_else = FALSE;
10003 else
10004 abort ();
10006 /* See how many insns this branch skips, and what kind of insns. If all
10007 insns are okay, and the label or unconditional branch to the same
10008 label is not too far away, succeed. */
10009 for (insns_skipped = 0;
10010 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
10012 rtx scanbody;
10014 this_insn = next_nonnote_insn (this_insn);
10015 if (!this_insn)
10016 break;
10018 switch (GET_CODE (this_insn))
10020 case CODE_LABEL:
10021 /* Succeed if it is the target label, otherwise fail since
10022 control falls in from somewhere else. */
10023 if (this_insn == label)
10025 if (jump_clobbers)
10027 arm_ccfsm_state = 2;
10028 this_insn = next_nonnote_insn (this_insn);
10030 else
10031 arm_ccfsm_state = 1;
10032 succeed = TRUE;
10034 else
10035 fail = TRUE;
10036 break;
10038 case BARRIER:
10039 /* Succeed if the following insn is the target label.
10040 Otherwise fail.
10041 If return insns are used then the last insn in a function
10042 will be a barrier. */
10043 this_insn = next_nonnote_insn (this_insn);
10044 if (this_insn && this_insn == label)
10046 if (jump_clobbers)
10048 arm_ccfsm_state = 2;
10049 this_insn = next_nonnote_insn (this_insn);
10051 else
10052 arm_ccfsm_state = 1;
10053 succeed = TRUE;
10055 else
10056 fail = TRUE;
10057 break;
10059 case CALL_INSN:
10060 /* If using 32-bit addresses the cc is not preserved over
10061 calls. */
10062 if (TARGET_APCS_32)
10064 /* Succeed if the following insn is the target label,
10065 or if the following two insns are a barrier and
10066 the target label. */
10067 this_insn = next_nonnote_insn (this_insn);
10068 if (this_insn && GET_CODE (this_insn) == BARRIER)
10069 this_insn = next_nonnote_insn (this_insn);
10071 if (this_insn && this_insn == label
10072 && insns_skipped < max_insns_skipped)
10074 if (jump_clobbers)
10076 arm_ccfsm_state = 2;
10077 this_insn = next_nonnote_insn (this_insn);
10079 else
10080 arm_ccfsm_state = 1;
10081 succeed = TRUE;
10083 else
10084 fail = TRUE;
10086 break;
10088 case JUMP_INSN:
10089 /* If this is an unconditional branch to the same label, succeed.
10090 If it is to another label, do nothing. If it is conditional,
10091 fail. */
10092 /* XXX Probably, the tests for SET and the PC are
10093 unnecessary. */
10095 scanbody = PATTERN (this_insn);
10096 if (GET_CODE (scanbody) == SET
10097 && GET_CODE (SET_DEST (scanbody)) == PC)
10099 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
10100 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
10102 arm_ccfsm_state = 2;
10103 succeed = TRUE;
10105 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
10106 fail = TRUE;
10108 /* Fail if a conditional return is undesirable (eg on a
10109 StrongARM), but still allow this if optimizing for size. */
10110 else if (GET_CODE (scanbody) == RETURN
10111 && !use_return_insn (TRUE)
10112 && !optimize_size)
10113 fail = TRUE;
10114 else if (GET_CODE (scanbody) == RETURN
10115 && seeking_return)
10117 arm_ccfsm_state = 2;
10118 succeed = TRUE;
10120 else if (GET_CODE (scanbody) == PARALLEL)
10122 switch (get_attr_conds (this_insn))
10124 case CONDS_NOCOND:
10125 break;
10126 default:
10127 fail = TRUE;
10128 break;
10131 else
10132 fail = TRUE; /* Unrecognized jump (eg epilogue). */
10134 break;
10136 case INSN:
10137 /* Instructions using or affecting the condition codes make it
10138 fail. */
10139 scanbody = PATTERN (this_insn);
10140 if (!(GET_CODE (scanbody) == SET
10141 || GET_CODE (scanbody) == PARALLEL)
10142 || get_attr_conds (this_insn) != CONDS_NOCOND)
10143 fail = TRUE;
10145 /* A conditional cirrus instruction must be followed by
10146 a non Cirrus instruction. However, since we
10147 conditionalize instructions in this function and by
10148 the time we get here we can't add instructions
10149 (nops), because shorten_branches() has already been
10150 called, we will disable conditionalizing Cirrus
10151 instructions to be safe. */
10152 if (GET_CODE (scanbody) != USE
10153 && GET_CODE (scanbody) != CLOBBER
10154 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
10155 fail = TRUE;
10156 break;
10158 default:
10159 break;
10162 if (succeed)
10164 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
10165 arm_target_label = CODE_LABEL_NUMBER (label);
10166 else if (seeking_return || arm_ccfsm_state == 2)
10168 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
10170 this_insn = next_nonnote_insn (this_insn);
10171 if (this_insn && (GET_CODE (this_insn) == BARRIER
10172 || GET_CODE (this_insn) == CODE_LABEL))
10173 abort ();
10175 if (!this_insn)
10177 /* Oh, dear! we ran off the end.. give up */
10178 recog (PATTERN (insn), insn, NULL);
10179 arm_ccfsm_state = 0;
10180 arm_target_insn = NULL;
10181 return;
10183 arm_target_insn = this_insn;
10185 else
10186 abort ();
10187 if (jump_clobbers)
10189 if (reverse)
10190 abort ();
10191 arm_current_cc =
10192 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
10193 0), 0), 1));
10194 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
10195 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10196 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
10197 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10199 else
10201 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
10202 what it was. */
10203 if (!reverse)
10204 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
10205 0));
10208 if (reverse || then_not_else)
10209 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10212 /* Restore recog_data (getting the attributes of other insns can
10213 destroy this array, but final.c assumes that it remains intact
10214 across this call; since the insn has been recognized already we
10215 call recog direct). */
10216 recog (PATTERN (insn), insn, NULL);
10220 /* Returns true if REGNO is a valid register
10221 for holding a quantity of tyoe MODE. */
10223 arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
10225 if (GET_MODE_CLASS (mode) == MODE_CC)
10226 return regno == CC_REGNUM;
10228 if (TARGET_THUMB)
10229 /* For the Thumb we only allow values bigger than SImode in
10230 registers 0 - 6, so that there is always a second low
10231 register available to hold the upper part of the value.
10232 We probably we ought to ensure that the register is the
10233 start of an even numbered register pair. */
10234 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
10236 if (IS_CIRRUS_REGNUM (regno))
10237 /* We have outlawed SI values in Cirrus registers because they
10238 reside in the lower 32 bits, but SF values reside in the
10239 upper 32 bits. This causes gcc all sorts of grief. We can't
10240 even split the registers into pairs because Cirrus SI values
10241 get sign extended to 64bits-- aldyh. */
10242 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
10244 if (IS_IWMMXT_GR_REGNUM (regno))
10245 return mode == SImode;
10247 if (IS_IWMMXT_REGNUM (regno))
10248 return VALID_IWMMXT_REG_MODE (mode);
10250 if (regno <= LAST_ARM_REGNUM)
10251 /* We allow any value to be stored in the general registers. */
10252 return 1;
10254 if ( regno == FRAME_POINTER_REGNUM
10255 || regno == ARG_POINTER_REGNUM)
10256 /* We only allow integers in the fake hard registers. */
10257 return GET_MODE_CLASS (mode) == MODE_INT;
10259 /* The only registers left are the FPA registers
10260 which we only allow to hold FP values. */
10261 return GET_MODE_CLASS (mode) == MODE_FLOAT
10262 && regno >= FIRST_ARM_FP_REGNUM
10263 && regno <= LAST_ARM_FP_REGNUM;
10267 arm_regno_class (int regno)
10269 if (TARGET_THUMB)
10271 if (regno == STACK_POINTER_REGNUM)
10272 return STACK_REG;
10273 if (regno == CC_REGNUM)
10274 return CC_REG;
10275 if (regno < 8)
10276 return LO_REGS;
10277 return HI_REGS;
10280 if ( regno <= LAST_ARM_REGNUM
10281 || regno == FRAME_POINTER_REGNUM
10282 || regno == ARG_POINTER_REGNUM)
10283 return GENERAL_REGS;
10285 if (regno == CC_REGNUM)
10286 return NO_REGS;
10288 if (IS_CIRRUS_REGNUM (regno))
10289 return CIRRUS_REGS;
10291 if (IS_IWMMXT_REGNUM (regno))
10292 return IWMMXT_REGS;
10294 return FPA_REGS;
10297 /* Handle a special case when computing the offset
10298 of an argument from the frame pointer. */
10300 arm_debugger_arg_offset (int value, rtx addr)
10302 rtx insn;
10304 /* We are only interested if dbxout_parms() failed to compute the offset. */
10305 if (value != 0)
10306 return 0;
10308 /* We can only cope with the case where the address is held in a register. */
10309 if (GET_CODE (addr) != REG)
10310 return 0;
10312 /* If we are using the frame pointer to point at the argument, then
10313 an offset of 0 is correct. */
10314 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
10315 return 0;
10317 /* If we are using the stack pointer to point at the
10318 argument, then an offset of 0 is correct. */
10319 if ((TARGET_THUMB || !frame_pointer_needed)
10320 && REGNO (addr) == SP_REGNUM)
10321 return 0;
10323 /* Oh dear. The argument is pointed to by a register rather
10324 than being held in a register, or being stored at a known
10325 offset from the frame pointer. Since GDB only understands
10326 those two kinds of argument we must translate the address
10327 held in the register into an offset from the frame pointer.
10328 We do this by searching through the insns for the function
10329 looking to see where this register gets its value. If the
10330 register is initialized from the frame pointer plus an offset
10331 then we are in luck and we can continue, otherwise we give up.
10333 This code is exercised by producing debugging information
10334 for a function with arguments like this:
10336 double func (double a, double b, int c, double d) {return d;}
10338 Without this code the stab for parameter 'd' will be set to
10339 an offset of 0 from the frame pointer, rather than 8. */
10341 /* The if() statement says:
10343 If the insn is a normal instruction
10344 and if the insn is setting the value in a register
10345 and if the register being set is the register holding the address of the argument
10346 and if the address is computing by an addition
10347 that involves adding to a register
10348 which is the frame pointer
10349 a constant integer
10351 then... */
10353 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10355 if ( GET_CODE (insn) == INSN
10356 && GET_CODE (PATTERN (insn)) == SET
10357 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10358 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10359 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
10360 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
10361 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10364 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10366 break;
10370 if (value == 0)
10372 debug_rtx (addr);
10373 warning ("unable to compute real location of stacked parameter");
10374 value = 8; /* XXX magic hack */
10377 return value;
10380 #define def_mbuiltin(MASK, NAME, TYPE, CODE) \
10381 do \
10383 if ((MASK) & insn_flags) \
10384 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE); \
10386 while (0)
10388 struct builtin_description
10390 const unsigned int mask;
10391 const enum insn_code icode;
10392 const char * const name;
10393 const enum arm_builtins code;
10394 const enum rtx_code comparison;
10395 const unsigned int flag;
10398 static const struct builtin_description bdesc_2arg[] =
10400 #define IWMMXT_BUILTIN(code, string, builtin) \
10401 { FL_IWMMXT, CODE_FOR_##code, "__builtin_arm_" string, \
10402 ARM_BUILTIN_##builtin, 0, 0 },
10404 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
10405 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
10406 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
10407 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
10408 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
10409 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
10410 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
10411 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
10412 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
10413 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
10414 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
10415 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
10416 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
10417 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
10418 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
10419 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
10420 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
10421 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
10422 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
10423 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsh", WMULSH)
10424 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmuluh", WMULUH)
10425 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
10426 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
10427 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
10428 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
10429 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
10430 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
10431 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
10432 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
10433 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
10434 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
10435 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
10436 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
10437 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
10438 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
10439 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
10440 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
10441 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
10442 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
10443 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
10444 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
10445 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
10446 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
10447 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
10448 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
10449 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
10450 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
10451 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
10452 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
10453 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
10454 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
10455 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
10456 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
10457 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
10458 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
10459 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
10460 IWMMXT_BUILTIN (iwmmxt_wmadds, "wmadds", WMADDS)
10461 IWMMXT_BUILTIN (iwmmxt_wmaddu, "wmaddu", WMADDU)
10463 #define IWMMXT_BUILTIN2(code, builtin) \
10464 { FL_IWMMXT, CODE_FOR_##code, NULL, ARM_BUILTIN_##builtin, 0, 0 },
10466 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
10467 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
10468 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
10469 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
10470 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
10471 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
10472 IWMMXT_BUILTIN2 (ashlv4hi3_di, WSLLH)
10473 IWMMXT_BUILTIN2 (ashlv4hi3, WSLLHI)
10474 IWMMXT_BUILTIN2 (ashlv2si3_di, WSLLW)
10475 IWMMXT_BUILTIN2 (ashlv2si3, WSLLWI)
10476 IWMMXT_BUILTIN2 (ashldi3_di, WSLLD)
10477 IWMMXT_BUILTIN2 (ashldi3_iwmmxt, WSLLDI)
10478 IWMMXT_BUILTIN2 (lshrv4hi3_di, WSRLH)
10479 IWMMXT_BUILTIN2 (lshrv4hi3, WSRLHI)
10480 IWMMXT_BUILTIN2 (lshrv2si3_di, WSRLW)
10481 IWMMXT_BUILTIN2 (lshrv2si3, WSRLWI)
10482 IWMMXT_BUILTIN2 (lshrdi3_di, WSRLD)
10483 IWMMXT_BUILTIN2 (lshrdi3, WSRLDI)
10484 IWMMXT_BUILTIN2 (ashrv4hi3_di, WSRAH)
10485 IWMMXT_BUILTIN2 (ashrv4hi3, WSRAHI)
10486 IWMMXT_BUILTIN2 (ashrv2si3_di, WSRAW)
10487 IWMMXT_BUILTIN2 (ashrv2si3, WSRAWI)
10488 IWMMXT_BUILTIN2 (ashrdi3_di, WSRAD)
10489 IWMMXT_BUILTIN2 (ashrdi3, WSRADI)
10490 IWMMXT_BUILTIN2 (rorv4hi3_di, WRORH)
10491 IWMMXT_BUILTIN2 (rorv4hi3, WRORHI)
10492 IWMMXT_BUILTIN2 (rorv2si3_di, WRORW)
10493 IWMMXT_BUILTIN2 (rorv2si3, WRORWI)
10494 IWMMXT_BUILTIN2 (rordi3_di, WRORD)
10495 IWMMXT_BUILTIN2 (rordi3, WRORDI)
10496 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
10497 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
10500 static const struct builtin_description bdesc_1arg[] =
10502 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
10503 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
10504 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
10505 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
10506 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
10507 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
10508 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
10509 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
10510 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
10511 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
10512 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
10513 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
10514 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
10515 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
10516 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
10517 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
10518 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
10519 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
10522 /* Set up all the iWMMXt builtins. This is
10523 not called if TARGET_IWMMXT is zero. */
10525 static void
10526 arm_init_iwmmxt_builtins (void)
10528 const struct builtin_description * d;
10529 size_t i;
10530 tree endlink = void_list_node;
10532 tree int_ftype_int
10533 = build_function_type (integer_type_node,
10534 tree_cons (NULL_TREE, integer_type_node, endlink));
10535 tree v8qi_ftype_v8qi_v8qi_int
10536 = build_function_type (V8QI_type_node,
10537 tree_cons (NULL_TREE, V8QI_type_node,
10538 tree_cons (NULL_TREE, V8QI_type_node,
10539 tree_cons (NULL_TREE,
10540 integer_type_node,
10541 endlink))));
10542 tree v4hi_ftype_v4hi_int
10543 = build_function_type (V4HI_type_node,
10544 tree_cons (NULL_TREE, V4HI_type_node,
10545 tree_cons (NULL_TREE, integer_type_node,
10546 endlink)));
10547 tree v2si_ftype_v2si_int
10548 = build_function_type (V2SI_type_node,
10549 tree_cons (NULL_TREE, V2SI_type_node,
10550 tree_cons (NULL_TREE, integer_type_node,
10551 endlink)));
10552 tree v2si_ftype_di_di
10553 = build_function_type (V2SI_type_node,
10554 tree_cons (NULL_TREE, long_long_integer_type_node,
10555 tree_cons (NULL_TREE, long_long_integer_type_node,
10556 endlink)));
10557 tree di_ftype_di_int
10558 = build_function_type (long_long_integer_type_node,
10559 tree_cons (NULL_TREE, long_long_integer_type_node,
10560 tree_cons (NULL_TREE, integer_type_node,
10561 endlink)));
10562 tree di_ftype_di_int_int
10563 = build_function_type (long_long_integer_type_node,
10564 tree_cons (NULL_TREE, long_long_integer_type_node,
10565 tree_cons (NULL_TREE, integer_type_node,
10566 tree_cons (NULL_TREE,
10567 integer_type_node,
10568 endlink))));
10569 tree int_ftype_v8qi
10570 = build_function_type (integer_type_node,
10571 tree_cons (NULL_TREE, V8QI_type_node,
10572 endlink));
10573 tree int_ftype_v4hi
10574 = build_function_type (integer_type_node,
10575 tree_cons (NULL_TREE, V4HI_type_node,
10576 endlink));
10577 tree int_ftype_v2si
10578 = build_function_type (integer_type_node,
10579 tree_cons (NULL_TREE, V2SI_type_node,
10580 endlink));
10581 tree int_ftype_v8qi_int
10582 = build_function_type (integer_type_node,
10583 tree_cons (NULL_TREE, V8QI_type_node,
10584 tree_cons (NULL_TREE, integer_type_node,
10585 endlink)));
10586 tree int_ftype_v4hi_int
10587 = build_function_type (integer_type_node,
10588 tree_cons (NULL_TREE, V4HI_type_node,
10589 tree_cons (NULL_TREE, integer_type_node,
10590 endlink)));
10591 tree int_ftype_v2si_int
10592 = build_function_type (integer_type_node,
10593 tree_cons (NULL_TREE, V2SI_type_node,
10594 tree_cons (NULL_TREE, integer_type_node,
10595 endlink)));
10596 tree v8qi_ftype_v8qi_int_int
10597 = build_function_type (V8QI_type_node,
10598 tree_cons (NULL_TREE, V8QI_type_node,
10599 tree_cons (NULL_TREE, integer_type_node,
10600 tree_cons (NULL_TREE,
10601 integer_type_node,
10602 endlink))));
10603 tree v4hi_ftype_v4hi_int_int
10604 = build_function_type (V4HI_type_node,
10605 tree_cons (NULL_TREE, V4HI_type_node,
10606 tree_cons (NULL_TREE, integer_type_node,
10607 tree_cons (NULL_TREE,
10608 integer_type_node,
10609 endlink))));
10610 tree v2si_ftype_v2si_int_int
10611 = build_function_type (V2SI_type_node,
10612 tree_cons (NULL_TREE, V2SI_type_node,
10613 tree_cons (NULL_TREE, integer_type_node,
10614 tree_cons (NULL_TREE,
10615 integer_type_node,
10616 endlink))));
10617 /* Miscellaneous. */
10618 tree v8qi_ftype_v4hi_v4hi
10619 = build_function_type (V8QI_type_node,
10620 tree_cons (NULL_TREE, V4HI_type_node,
10621 tree_cons (NULL_TREE, V4HI_type_node,
10622 endlink)));
10623 tree v4hi_ftype_v2si_v2si
10624 = build_function_type (V4HI_type_node,
10625 tree_cons (NULL_TREE, V2SI_type_node,
10626 tree_cons (NULL_TREE, V2SI_type_node,
10627 endlink)));
10628 tree v2si_ftype_v4hi_v4hi
10629 = build_function_type (V2SI_type_node,
10630 tree_cons (NULL_TREE, V4HI_type_node,
10631 tree_cons (NULL_TREE, V4HI_type_node,
10632 endlink)));
10633 tree v2si_ftype_v8qi_v8qi
10634 = build_function_type (V2SI_type_node,
10635 tree_cons (NULL_TREE, V8QI_type_node,
10636 tree_cons (NULL_TREE, V8QI_type_node,
10637 endlink)));
10638 tree v4hi_ftype_v4hi_di
10639 = build_function_type (V4HI_type_node,
10640 tree_cons (NULL_TREE, V4HI_type_node,
10641 tree_cons (NULL_TREE,
10642 long_long_integer_type_node,
10643 endlink)));
10644 tree v2si_ftype_v2si_di
10645 = build_function_type (V2SI_type_node,
10646 tree_cons (NULL_TREE, V2SI_type_node,
10647 tree_cons (NULL_TREE,
10648 long_long_integer_type_node,
10649 endlink)));
10650 tree void_ftype_int_int
10651 = build_function_type (void_type_node,
10652 tree_cons (NULL_TREE, integer_type_node,
10653 tree_cons (NULL_TREE, integer_type_node,
10654 endlink)));
10655 tree di_ftype_void
10656 = build_function_type (long_long_unsigned_type_node, endlink);
10657 tree di_ftype_v8qi
10658 = build_function_type (long_long_integer_type_node,
10659 tree_cons (NULL_TREE, V8QI_type_node,
10660 endlink));
10661 tree di_ftype_v4hi
10662 = build_function_type (long_long_integer_type_node,
10663 tree_cons (NULL_TREE, V4HI_type_node,
10664 endlink));
10665 tree di_ftype_v2si
10666 = build_function_type (long_long_integer_type_node,
10667 tree_cons (NULL_TREE, V2SI_type_node,
10668 endlink));
10669 tree v2si_ftype_v4hi
10670 = build_function_type (V2SI_type_node,
10671 tree_cons (NULL_TREE, V4HI_type_node,
10672 endlink));
10673 tree v4hi_ftype_v8qi
10674 = build_function_type (V4HI_type_node,
10675 tree_cons (NULL_TREE, V8QI_type_node,
10676 endlink));
10678 tree di_ftype_di_v4hi_v4hi
10679 = build_function_type (long_long_unsigned_type_node,
10680 tree_cons (NULL_TREE,
10681 long_long_unsigned_type_node,
10682 tree_cons (NULL_TREE, V4HI_type_node,
10683 tree_cons (NULL_TREE,
10684 V4HI_type_node,
10685 endlink))));
10687 tree di_ftype_v4hi_v4hi
10688 = build_function_type (long_long_unsigned_type_node,
10689 tree_cons (NULL_TREE, V4HI_type_node,
10690 tree_cons (NULL_TREE, V4HI_type_node,
10691 endlink)));
10693 /* Normal vector binops. */
10694 tree v8qi_ftype_v8qi_v8qi
10695 = build_function_type (V8QI_type_node,
10696 tree_cons (NULL_TREE, V8QI_type_node,
10697 tree_cons (NULL_TREE, V8QI_type_node,
10698 endlink)));
10699 tree v4hi_ftype_v4hi_v4hi
10700 = build_function_type (V4HI_type_node,
10701 tree_cons (NULL_TREE, V4HI_type_node,
10702 tree_cons (NULL_TREE, V4HI_type_node,
10703 endlink)));
10704 tree v2si_ftype_v2si_v2si
10705 = build_function_type (V2SI_type_node,
10706 tree_cons (NULL_TREE, V2SI_type_node,
10707 tree_cons (NULL_TREE, V2SI_type_node,
10708 endlink)));
10709 tree di_ftype_di_di
10710 = build_function_type (long_long_unsigned_type_node,
10711 tree_cons (NULL_TREE, long_long_unsigned_type_node,
10712 tree_cons (NULL_TREE,
10713 long_long_unsigned_type_node,
10714 endlink)));
10716 /* Add all builtins that are more or less simple operations on two
10717 operands. */
10718 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
10720 /* Use one of the operands; the target can have a different mode for
10721 mask-generating compares. */
10722 enum machine_mode mode;
10723 tree type;
10725 if (d->name == 0)
10726 continue;
10728 mode = insn_data[d->icode].operand[1].mode;
10730 switch (mode)
10732 case V8QImode:
10733 type = v8qi_ftype_v8qi_v8qi;
10734 break;
10735 case V4HImode:
10736 type = v4hi_ftype_v4hi_v4hi;
10737 break;
10738 case V2SImode:
10739 type = v2si_ftype_v2si_v2si;
10740 break;
10741 case DImode:
10742 type = di_ftype_di_di;
10743 break;
10745 default:
10746 abort ();
10749 def_mbuiltin (d->mask, d->name, type, d->code);
10752 /* Add the remaining MMX insns with somewhat more complicated types. */
10753 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wzero", di_ftype_void, ARM_BUILTIN_WZERO);
10754 def_mbuiltin (FL_IWMMXT, "__builtin_arm_setwcx", void_ftype_int_int, ARM_BUILTIN_SETWCX);
10755 def_mbuiltin (FL_IWMMXT, "__builtin_arm_getwcx", int_ftype_int, ARM_BUILTIN_GETWCX);
10757 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSLLH);
10758 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllw", v2si_ftype_v2si_di, ARM_BUILTIN_WSLLW);
10759 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslld", di_ftype_di_di, ARM_BUILTIN_WSLLD);
10760 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSLLHI);
10761 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSLLWI);
10762 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslldi", di_ftype_di_int, ARM_BUILTIN_WSLLDI);
10764 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRLH);
10765 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRLW);
10766 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrld", di_ftype_di_di, ARM_BUILTIN_WSRLD);
10767 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRLHI);
10768 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRLWI);
10769 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrldi", di_ftype_di_int, ARM_BUILTIN_WSRLDI);
10771 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrah", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRAH);
10772 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsraw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRAW);
10773 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrad", di_ftype_di_di, ARM_BUILTIN_WSRAD);
10774 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrahi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRAHI);
10775 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrawi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRAWI);
10776 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsradi", di_ftype_di_int, ARM_BUILTIN_WSRADI);
10778 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WRORH);
10779 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorw", v2si_ftype_v2si_di, ARM_BUILTIN_WRORW);
10780 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrord", di_ftype_di_di, ARM_BUILTIN_WRORD);
10781 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WRORHI);
10782 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorwi", v2si_ftype_v2si_int, ARM_BUILTIN_WRORWI);
10783 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrordi", di_ftype_di_int, ARM_BUILTIN_WRORDI);
10785 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wshufh", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSHUFH);
10787 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadb", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADB);
10788 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadh", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADH);
10789 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadbz", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADBZ);
10790 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadhz", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADHZ);
10792 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsb", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMSB);
10793 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMSH);
10794 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMSW);
10795 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmub", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMUB);
10796 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMUH);
10797 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMUW);
10798 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrb", v8qi_ftype_v8qi_int_int, ARM_BUILTIN_TINSRB);
10799 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrh", v4hi_ftype_v4hi_int_int, ARM_BUILTIN_TINSRH);
10800 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrw", v2si_ftype_v2si_int_int, ARM_BUILTIN_TINSRW);
10802 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccb", di_ftype_v8qi, ARM_BUILTIN_WACCB);
10803 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wacch", di_ftype_v4hi, ARM_BUILTIN_WACCH);
10804 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccw", di_ftype_v2si, ARM_BUILTIN_WACCW);
10806 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskb", int_ftype_v8qi, ARM_BUILTIN_TMOVMSKB);
10807 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskh", int_ftype_v4hi, ARM_BUILTIN_TMOVMSKH);
10808 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskw", int_ftype_v2si, ARM_BUILTIN_TMOVMSKW);
10810 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhss", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHSS);
10811 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhus", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHUS);
10812 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwus", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWUS);
10813 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwss", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWSS);
10814 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdus", v2si_ftype_di_di, ARM_BUILTIN_WPACKDUS);
10815 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdss", v2si_ftype_di_di, ARM_BUILTIN_WPACKDSS);
10817 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHUB);
10818 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHUH);
10819 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHUW);
10820 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHSB);
10821 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHSH);
10822 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHSW);
10823 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELUB);
10824 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELUH);
10825 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELUW);
10826 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELSB);
10827 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELSH);
10828 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELSW);
10830 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacs", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACS);
10831 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacsz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACSZ);
10832 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacu", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACU);
10833 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacuz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACUZ);
10835 def_mbuiltin (FL_IWMMXT, "__builtin_arm_walign", v8qi_ftype_v8qi_v8qi_int, ARM_BUILTIN_WALIGN);
10836 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmia", di_ftype_di_int_int, ARM_BUILTIN_TMIA);
10837 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiaph", di_ftype_di_int_int, ARM_BUILTIN_TMIAPH);
10838 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabb", di_ftype_di_int_int, ARM_BUILTIN_TMIABB);
10839 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabt", di_ftype_di_int_int, ARM_BUILTIN_TMIABT);
10840 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatb", di_ftype_di_int_int, ARM_BUILTIN_TMIATB);
10841 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatt", di_ftype_di_int_int, ARM_BUILTIN_TMIATT);
10844 static void
10845 arm_init_builtins (void)
10847 if (TARGET_REALLY_IWMMXT)
10848 arm_init_iwmmxt_builtins ();
10851 /* Errors in the source file can cause expand_expr to return const0_rtx
10852 where we expect a vector. To avoid crashing, use one of the vector
10853 clear instructions. */
10855 static rtx
10856 safe_vector_operand (rtx x, enum machine_mode mode)
10858 if (x != const0_rtx)
10859 return x;
10860 x = gen_reg_rtx (mode);
10862 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
10863 : gen_rtx_SUBREG (DImode, x, 0)));
10864 return x;
10867 /* Subroutine of arm_expand_builtin to take care of binop insns. */
10869 static rtx
10870 arm_expand_binop_builtin (enum insn_code icode,
10871 tree arglist, rtx target)
10873 rtx pat;
10874 tree arg0 = TREE_VALUE (arglist);
10875 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
10876 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10877 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10878 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10879 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10880 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10882 if (VECTOR_MODE_P (mode0))
10883 op0 = safe_vector_operand (op0, mode0);
10884 if (VECTOR_MODE_P (mode1))
10885 op1 = safe_vector_operand (op1, mode1);
10887 if (! target
10888 || GET_MODE (target) != tmode
10889 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10890 target = gen_reg_rtx (tmode);
10892 /* In case the insn wants input operands in modes different from
10893 the result, abort. */
10894 if (GET_MODE (op0) != mode0 || GET_MODE (op1) != mode1)
10895 abort ();
10897 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10898 op0 = copy_to_mode_reg (mode0, op0);
10899 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10900 op1 = copy_to_mode_reg (mode1, op1);
10902 pat = GEN_FCN (icode) (target, op0, op1);
10903 if (! pat)
10904 return 0;
10905 emit_insn (pat);
10906 return target;
10909 /* Subroutine of arm_expand_builtin to take care of unop insns. */
10911 static rtx
10912 arm_expand_unop_builtin (enum insn_code icode,
10913 tree arglist, rtx target, int do_load)
10915 rtx pat;
10916 tree arg0 = TREE_VALUE (arglist);
10917 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10918 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10919 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10921 if (! target
10922 || GET_MODE (target) != tmode
10923 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10924 target = gen_reg_rtx (tmode);
10925 if (do_load)
10926 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
10927 else
10929 if (VECTOR_MODE_P (mode0))
10930 op0 = safe_vector_operand (op0, mode0);
10932 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10933 op0 = copy_to_mode_reg (mode0, op0);
10936 pat = GEN_FCN (icode) (target, op0);
10937 if (! pat)
10938 return 0;
10939 emit_insn (pat);
10940 return target;
10943 /* Expand an expression EXP that calls a built-in function,
10944 with result going to TARGET if that's convenient
10945 (and in mode MODE if that's convenient).
10946 SUBTARGET may be used as the target for computing one of EXP's operands.
10947 IGNORE is nonzero if the value is to be ignored. */
10949 static rtx
10950 arm_expand_builtin (tree exp,
10951 rtx target,
10952 rtx subtarget ATTRIBUTE_UNUSED,
10953 enum machine_mode mode ATTRIBUTE_UNUSED,
10954 int ignore ATTRIBUTE_UNUSED)
10956 const struct builtin_description * d;
10957 enum insn_code icode;
10958 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10959 tree arglist = TREE_OPERAND (exp, 1);
10960 tree arg0;
10961 tree arg1;
10962 tree arg2;
10963 rtx op0;
10964 rtx op1;
10965 rtx op2;
10966 rtx pat;
10967 int fcode = DECL_FUNCTION_CODE (fndecl);
10968 size_t i;
10969 enum machine_mode tmode;
10970 enum machine_mode mode0;
10971 enum machine_mode mode1;
10972 enum machine_mode mode2;
10974 switch (fcode)
10976 case ARM_BUILTIN_TEXTRMSB:
10977 case ARM_BUILTIN_TEXTRMUB:
10978 case ARM_BUILTIN_TEXTRMSH:
10979 case ARM_BUILTIN_TEXTRMUH:
10980 case ARM_BUILTIN_TEXTRMSW:
10981 case ARM_BUILTIN_TEXTRMUW:
10982 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
10983 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
10984 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
10985 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
10986 : CODE_FOR_iwmmxt_textrmw);
10988 arg0 = TREE_VALUE (arglist);
10989 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
10990 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10991 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10992 tmode = insn_data[icode].operand[0].mode;
10993 mode0 = insn_data[icode].operand[1].mode;
10994 mode1 = insn_data[icode].operand[2].mode;
10996 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10997 op0 = copy_to_mode_reg (mode0, op0);
10998 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11000 /* @@@ better error message */
11001 error ("selector must be an immediate");
11002 return gen_reg_rtx (tmode);
11004 if (target == 0
11005 || GET_MODE (target) != tmode
11006 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11007 target = gen_reg_rtx (tmode);
11008 pat = GEN_FCN (icode) (target, op0, op1);
11009 if (! pat)
11010 return 0;
11011 emit_insn (pat);
11012 return target;
11014 case ARM_BUILTIN_TINSRB:
11015 case ARM_BUILTIN_TINSRH:
11016 case ARM_BUILTIN_TINSRW:
11017 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
11018 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
11019 : CODE_FOR_iwmmxt_tinsrw);
11020 arg0 = TREE_VALUE (arglist);
11021 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11022 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11023 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11024 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11025 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11026 tmode = insn_data[icode].operand[0].mode;
11027 mode0 = insn_data[icode].operand[1].mode;
11028 mode1 = insn_data[icode].operand[2].mode;
11029 mode2 = insn_data[icode].operand[3].mode;
11031 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11032 op0 = copy_to_mode_reg (mode0, op0);
11033 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11034 op1 = copy_to_mode_reg (mode1, op1);
11035 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11037 /* @@@ better error message */
11038 error ("selector must be an immediate");
11039 return const0_rtx;
11041 if (target == 0
11042 || GET_MODE (target) != tmode
11043 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11044 target = gen_reg_rtx (tmode);
11045 pat = GEN_FCN (icode) (target, op0, op1, op2);
11046 if (! pat)
11047 return 0;
11048 emit_insn (pat);
11049 return target;
11051 case ARM_BUILTIN_SETWCX:
11052 arg0 = TREE_VALUE (arglist);
11053 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11054 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11055 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11056 emit_insn (gen_iwmmxt_tmcr (op0, op1));
11057 return 0;
11059 case ARM_BUILTIN_GETWCX:
11060 arg0 = TREE_VALUE (arglist);
11061 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11062 target = gen_reg_rtx (SImode);
11063 emit_insn (gen_iwmmxt_tmrc (target, op0));
11064 return target;
11066 case ARM_BUILTIN_WSHUFH:
11067 icode = CODE_FOR_iwmmxt_wshufh;
11068 arg0 = TREE_VALUE (arglist);
11069 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11070 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11071 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11072 tmode = insn_data[icode].operand[0].mode;
11073 mode1 = insn_data[icode].operand[1].mode;
11074 mode2 = insn_data[icode].operand[2].mode;
11076 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
11077 op0 = copy_to_mode_reg (mode1, op0);
11078 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
11080 /* @@@ better error message */
11081 error ("mask must be an immediate");
11082 return const0_rtx;
11084 if (target == 0
11085 || GET_MODE (target) != tmode
11086 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11087 target = gen_reg_rtx (tmode);
11088 pat = GEN_FCN (icode) (target, op0, op1);
11089 if (! pat)
11090 return 0;
11091 emit_insn (pat);
11092 return target;
11094 case ARM_BUILTIN_WSADB:
11095 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadb, arglist, target);
11096 case ARM_BUILTIN_WSADH:
11097 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadh, arglist, target);
11098 case ARM_BUILTIN_WSADBZ:
11099 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, arglist, target);
11100 case ARM_BUILTIN_WSADHZ:
11101 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, arglist, target);
11103 /* Several three-argument builtins. */
11104 case ARM_BUILTIN_WMACS:
11105 case ARM_BUILTIN_WMACU:
11106 case ARM_BUILTIN_WALIGN:
11107 case ARM_BUILTIN_TMIA:
11108 case ARM_BUILTIN_TMIAPH:
11109 case ARM_BUILTIN_TMIATT:
11110 case ARM_BUILTIN_TMIATB:
11111 case ARM_BUILTIN_TMIABT:
11112 case ARM_BUILTIN_TMIABB:
11113 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
11114 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
11115 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
11116 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
11117 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
11118 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
11119 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
11120 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
11121 : CODE_FOR_iwmmxt_walign);
11122 arg0 = TREE_VALUE (arglist);
11123 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11124 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11125 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11126 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11127 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11128 tmode = insn_data[icode].operand[0].mode;
11129 mode0 = insn_data[icode].operand[1].mode;
11130 mode1 = insn_data[icode].operand[2].mode;
11131 mode2 = insn_data[icode].operand[3].mode;
11133 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11134 op0 = copy_to_mode_reg (mode0, op0);
11135 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11136 op1 = copy_to_mode_reg (mode1, op1);
11137 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11138 op2 = copy_to_mode_reg (mode2, op2);
11139 if (target == 0
11140 || GET_MODE (target) != tmode
11141 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11142 target = gen_reg_rtx (tmode);
11143 pat = GEN_FCN (icode) (target, op0, op1, op2);
11144 if (! pat)
11145 return 0;
11146 emit_insn (pat);
11147 return target;
11149 case ARM_BUILTIN_WZERO:
11150 target = gen_reg_rtx (DImode);
11151 emit_insn (gen_iwmmxt_clrdi (target));
11152 return target;
11154 default:
11155 break;
11158 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
11159 if (d->code == (const enum arm_builtins) fcode)
11160 return arm_expand_binop_builtin (d->icode, arglist, target);
11162 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
11163 if (d->code == (const enum arm_builtins) fcode)
11164 return arm_expand_unop_builtin (d->icode, arglist, target, 0);
11166 /* @@@ Should really do something sensible here. */
11167 return NULL_RTX;
11170 /* Recursively search through all of the blocks in a function
11171 checking to see if any of the variables created in that
11172 function match the RTX called 'orig'. If they do then
11173 replace them with the RTX called 'new'. */
11174 static void
11175 replace_symbols_in_block (tree block, rtx orig, rtx new)
11177 for (; block; block = BLOCK_CHAIN (block))
11179 tree sym;
11181 if (!TREE_USED (block))
11182 continue;
11184 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
11186 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
11187 || DECL_IGNORED_P (sym)
11188 || TREE_CODE (sym) != VAR_DECL
11189 || DECL_EXTERNAL (sym)
11190 || !rtx_equal_p (DECL_RTL (sym), orig)
11192 continue;
11194 SET_DECL_RTL (sym, new);
11197 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
11201 /* Return the number (counting from 0) of
11202 the least significant set bit in MASK. */
11204 inline static int
11205 number_of_first_bit_set (int mask)
11207 int bit;
11209 for (bit = 0;
11210 (mask & (1 << bit)) == 0;
11211 ++bit)
11212 continue;
11214 return bit;
11217 /* Generate code to return from a thumb function.
11218 If 'reg_containing_return_addr' is -1, then the return address is
11219 actually on the stack, at the stack pointer. */
11220 static void
11221 thumb_exit (FILE *f, int reg_containing_return_addr, rtx eh_ofs)
11223 unsigned regs_available_for_popping;
11224 unsigned regs_to_pop;
11225 int pops_needed;
11226 unsigned available;
11227 unsigned required;
11228 int mode;
11229 int size;
11230 int restore_a4 = FALSE;
11232 /* Compute the registers we need to pop. */
11233 regs_to_pop = 0;
11234 pops_needed = 0;
11236 /* There is an assumption here, that if eh_ofs is not NULL, the
11237 normal return address will have been pushed. */
11238 if (reg_containing_return_addr == -1 || eh_ofs)
11240 /* When we are generating a return for __builtin_eh_return,
11241 reg_containing_return_addr must specify the return regno. */
11242 if (eh_ofs && reg_containing_return_addr == -1)
11243 abort ();
11245 regs_to_pop |= 1 << LR_REGNUM;
11246 ++pops_needed;
11249 if (TARGET_BACKTRACE)
11251 /* Restore the (ARM) frame pointer and stack pointer. */
11252 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
11253 pops_needed += 2;
11256 /* If there is nothing to pop then just emit the BX instruction and
11257 return. */
11258 if (pops_needed == 0)
11260 if (eh_ofs)
11261 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11263 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11264 return;
11266 /* Otherwise if we are not supporting interworking and we have not created
11267 a backtrace structure and the function was not entered in ARM mode then
11268 just pop the return address straight into the PC. */
11269 else if (!TARGET_INTERWORK
11270 && !TARGET_BACKTRACE
11271 && !is_called_in_ARM_mode (current_function_decl))
11273 if (eh_ofs)
11275 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
11276 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11277 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11279 else
11280 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
11282 return;
11285 /* Find out how many of the (return) argument registers we can corrupt. */
11286 regs_available_for_popping = 0;
11288 /* If returning via __builtin_eh_return, the bottom three registers
11289 all contain information needed for the return. */
11290 if (eh_ofs)
11291 size = 12;
11292 else
11294 #ifdef RTX_CODE
11295 /* If we can deduce the registers used from the function's
11296 return value. This is more reliable that examining
11297 regs_ever_live[] because that will be set if the register is
11298 ever used in the function, not just if the register is used
11299 to hold a return value. */
11301 if (current_function_return_rtx != 0)
11302 mode = GET_MODE (current_function_return_rtx);
11303 else
11304 #endif
11305 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11307 size = GET_MODE_SIZE (mode);
11309 if (size == 0)
11311 /* In a void function we can use any argument register.
11312 In a function that returns a structure on the stack
11313 we can use the second and third argument registers. */
11314 if (mode == VOIDmode)
11315 regs_available_for_popping =
11316 (1 << ARG_REGISTER (1))
11317 | (1 << ARG_REGISTER (2))
11318 | (1 << ARG_REGISTER (3));
11319 else
11320 regs_available_for_popping =
11321 (1 << ARG_REGISTER (2))
11322 | (1 << ARG_REGISTER (3));
11324 else if (size <= 4)
11325 regs_available_for_popping =
11326 (1 << ARG_REGISTER (2))
11327 | (1 << ARG_REGISTER (3));
11328 else if (size <= 8)
11329 regs_available_for_popping =
11330 (1 << ARG_REGISTER (3));
11333 /* Match registers to be popped with registers into which we pop them. */
11334 for (available = regs_available_for_popping,
11335 required = regs_to_pop;
11336 required != 0 && available != 0;
11337 available &= ~(available & - available),
11338 required &= ~(required & - required))
11339 -- pops_needed;
11341 /* If we have any popping registers left over, remove them. */
11342 if (available > 0)
11343 regs_available_for_popping &= ~available;
11345 /* Otherwise if we need another popping register we can use
11346 the fourth argument register. */
11347 else if (pops_needed)
11349 /* If we have not found any free argument registers and
11350 reg a4 contains the return address, we must move it. */
11351 if (regs_available_for_popping == 0
11352 && reg_containing_return_addr == LAST_ARG_REGNUM)
11354 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11355 reg_containing_return_addr = LR_REGNUM;
11357 else if (size > 12)
11359 /* Register a4 is being used to hold part of the return value,
11360 but we have dire need of a free, low register. */
11361 restore_a4 = TRUE;
11363 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
11366 if (reg_containing_return_addr != LAST_ARG_REGNUM)
11368 /* The fourth argument register is available. */
11369 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
11371 --pops_needed;
11375 /* Pop as many registers as we can. */
11376 thumb_pushpop (f, regs_available_for_popping, FALSE);
11378 /* Process the registers we popped. */
11379 if (reg_containing_return_addr == -1)
11381 /* The return address was popped into the lowest numbered register. */
11382 regs_to_pop &= ~(1 << LR_REGNUM);
11384 reg_containing_return_addr =
11385 number_of_first_bit_set (regs_available_for_popping);
11387 /* Remove this register for the mask of available registers, so that
11388 the return address will not be corrupted by further pops. */
11389 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
11392 /* If we popped other registers then handle them here. */
11393 if (regs_available_for_popping)
11395 int frame_pointer;
11397 /* Work out which register currently contains the frame pointer. */
11398 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
11400 /* Move it into the correct place. */
11401 asm_fprintf (f, "\tmov\t%r, %r\n",
11402 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
11404 /* (Temporarily) remove it from the mask of popped registers. */
11405 regs_available_for_popping &= ~(1 << frame_pointer);
11406 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
11408 if (regs_available_for_popping)
11410 int stack_pointer;
11412 /* We popped the stack pointer as well,
11413 find the register that contains it. */
11414 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
11416 /* Move it into the stack register. */
11417 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
11419 /* At this point we have popped all necessary registers, so
11420 do not worry about restoring regs_available_for_popping
11421 to its correct value:
11423 assert (pops_needed == 0)
11424 assert (regs_available_for_popping == (1 << frame_pointer))
11425 assert (regs_to_pop == (1 << STACK_POINTER)) */
11427 else
11429 /* Since we have just move the popped value into the frame
11430 pointer, the popping register is available for reuse, and
11431 we know that we still have the stack pointer left to pop. */
11432 regs_available_for_popping |= (1 << frame_pointer);
11436 /* If we still have registers left on the stack, but we no longer have
11437 any registers into which we can pop them, then we must move the return
11438 address into the link register and make available the register that
11439 contained it. */
11440 if (regs_available_for_popping == 0 && pops_needed > 0)
11442 regs_available_for_popping |= 1 << reg_containing_return_addr;
11444 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
11445 reg_containing_return_addr);
11447 reg_containing_return_addr = LR_REGNUM;
11450 /* If we have registers left on the stack then pop some more.
11451 We know that at most we will want to pop FP and SP. */
11452 if (pops_needed > 0)
11454 int popped_into;
11455 int move_to;
11457 thumb_pushpop (f, regs_available_for_popping, FALSE);
11459 /* We have popped either FP or SP.
11460 Move whichever one it is into the correct register. */
11461 popped_into = number_of_first_bit_set (regs_available_for_popping);
11462 move_to = number_of_first_bit_set (regs_to_pop);
11464 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
11466 regs_to_pop &= ~(1 << move_to);
11468 --pops_needed;
11471 /* If we still have not popped everything then we must have only
11472 had one register available to us and we are now popping the SP. */
11473 if (pops_needed > 0)
11475 int popped_into;
11477 thumb_pushpop (f, regs_available_for_popping, FALSE);
11479 popped_into = number_of_first_bit_set (regs_available_for_popping);
11481 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
11483 assert (regs_to_pop == (1 << STACK_POINTER))
11484 assert (pops_needed == 1)
11488 /* If necessary restore the a4 register. */
11489 if (restore_a4)
11491 if (reg_containing_return_addr != LR_REGNUM)
11493 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11494 reg_containing_return_addr = LR_REGNUM;
11497 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11500 if (eh_ofs)
11501 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11503 /* Return to caller. */
11504 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11507 /* Emit code to push or pop registers to or from the stack. */
11508 static void
11509 thumb_pushpop (FILE *f, int mask, int push)
11511 int regno;
11512 int lo_mask = mask & 0xFF;
11514 if (lo_mask == 0 && !push && (mask & (1 << 15)))
11516 /* Special case. Do not generate a POP PC statement here, do it in
11517 thumb_exit() */
11518 thumb_exit (f, -1, NULL_RTX);
11519 return;
11522 fprintf (f, "\t%s\t{", push ? "push" : "pop");
11524 /* Look at the low registers first. */
11525 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
11527 if (lo_mask & 1)
11529 asm_fprintf (f, "%r", regno);
11531 if ((lo_mask & ~1) != 0)
11532 fprintf (f, ", ");
11536 if (push && (mask & (1 << LR_REGNUM)))
11538 /* Catch pushing the LR. */
11539 if (mask & 0xFF)
11540 fprintf (f, ", ");
11542 asm_fprintf (f, "%r", LR_REGNUM);
11544 else if (!push && (mask & (1 << PC_REGNUM)))
11546 /* Catch popping the PC. */
11547 if (TARGET_INTERWORK || TARGET_BACKTRACE)
11549 /* The PC is never poped directly, instead
11550 it is popped into r3 and then BX is used. */
11551 fprintf (f, "}\n");
11553 thumb_exit (f, -1, NULL_RTX);
11555 return;
11557 else
11559 if (mask & 0xFF)
11560 fprintf (f, ", ");
11562 asm_fprintf (f, "%r", PC_REGNUM);
11566 fprintf (f, "}\n");
11569 void
11570 thumb_final_prescan_insn (rtx insn)
11572 if (flag_print_asm_name)
11573 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
11574 INSN_ADDRESSES (INSN_UID (insn)));
11578 thumb_shiftable_const (unsigned HOST_WIDE_INT val)
11580 unsigned HOST_WIDE_INT mask = 0xff;
11581 int i;
11583 if (val == 0) /* XXX */
11584 return 0;
11586 for (i = 0; i < 25; i++)
11587 if ((val & (mask << i)) == val)
11588 return 1;
11590 return 0;
11593 /* Returns nonzero if the current function contains,
11594 or might contain a far jump. */
11596 thumb_far_jump_used_p (int in_prologue)
11598 rtx insn;
11600 /* This test is only important for leaf functions. */
11601 /* assert (!leaf_function_p ()); */
11603 /* If we have already decided that far jumps may be used,
11604 do not bother checking again, and always return true even if
11605 it turns out that they are not being used. Once we have made
11606 the decision that far jumps are present (and that hence the link
11607 register will be pushed onto the stack) we cannot go back on it. */
11608 if (cfun->machine->far_jump_used)
11609 return 1;
11611 /* If this function is not being called from the prologue/epilogue
11612 generation code then it must be being called from the
11613 INITIAL_ELIMINATION_OFFSET macro. */
11614 if (!in_prologue)
11616 /* In this case we know that we are being asked about the elimination
11617 of the arg pointer register. If that register is not being used,
11618 then there are no arguments on the stack, and we do not have to
11619 worry that a far jump might force the prologue to push the link
11620 register, changing the stack offsets. In this case we can just
11621 return false, since the presence of far jumps in the function will
11622 not affect stack offsets.
11624 If the arg pointer is live (or if it was live, but has now been
11625 eliminated and so set to dead) then we do have to test to see if
11626 the function might contain a far jump. This test can lead to some
11627 false negatives, since before reload is completed, then length of
11628 branch instructions is not known, so gcc defaults to returning their
11629 longest length, which in turn sets the far jump attribute to true.
11631 A false negative will not result in bad code being generated, but it
11632 will result in a needless push and pop of the link register. We
11633 hope that this does not occur too often. */
11634 if (regs_ever_live [ARG_POINTER_REGNUM])
11635 cfun->machine->arg_pointer_live = 1;
11636 else if (!cfun->machine->arg_pointer_live)
11637 return 0;
11640 /* Check to see if the function contains a branch
11641 insn with the far jump attribute set. */
11642 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11644 if (GET_CODE (insn) == JUMP_INSN
11645 /* Ignore tablejump patterns. */
11646 && GET_CODE (PATTERN (insn)) != ADDR_VEC
11647 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
11648 && get_attr_far_jump (insn) == FAR_JUMP_YES
11651 /* Record the fact that we have decided that
11652 the function does use far jumps. */
11653 cfun->machine->far_jump_used = 1;
11654 return 1;
11658 return 0;
11661 /* Return nonzero if FUNC must be entered in ARM mode. */
11663 is_called_in_ARM_mode (tree func)
11665 if (TREE_CODE (func) != FUNCTION_DECL)
11666 abort ();
11668 /* Ignore the problem about functions whoes address is taken. */
11669 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
11670 return TRUE;
11672 #ifdef ARM_PE
11673 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
11674 #else
11675 return FALSE;
11676 #endif
11679 /* The bits which aren't usefully expanded as rtl. */
11680 const char *
11681 thumb_unexpanded_epilogue (void)
11683 int regno;
11684 int live_regs_mask = 0;
11685 int high_regs_pushed = 0;
11686 int leaf_function = leaf_function_p ();
11687 int had_to_push_lr;
11688 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
11690 if (return_used_this_function)
11691 return "";
11693 if (IS_NAKED (arm_current_func_type ()))
11694 return "";
11696 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11697 if (THUMB_REG_PUSHED_P (regno))
11698 live_regs_mask |= 1 << regno;
11700 for (regno = 8; regno < 13; regno++)
11701 if (THUMB_REG_PUSHED_P (regno))
11702 high_regs_pushed++;
11704 /* The prolog may have pushed some high registers to use as
11705 work registers. eg the testsuite file:
11706 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
11707 compiles to produce:
11708 push {r4, r5, r6, r7, lr}
11709 mov r7, r9
11710 mov r6, r8
11711 push {r6, r7}
11712 as part of the prolog. We have to undo that pushing here. */
11714 if (high_regs_pushed)
11716 int mask = live_regs_mask;
11717 int next_hi_reg;
11718 int size;
11719 int mode;
11721 #ifdef RTX_CODE
11722 /* If we can deduce the registers used from the function's return value.
11723 This is more reliable that examining regs_ever_live[] because that
11724 will be set if the register is ever used in the function, not just if
11725 the register is used to hold a return value. */
11727 if (current_function_return_rtx != 0)
11728 mode = GET_MODE (current_function_return_rtx);
11729 else
11730 #endif
11731 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11733 size = GET_MODE_SIZE (mode);
11735 /* Unless we are returning a type of size > 12 register r3 is
11736 available. */
11737 if (size < 13)
11738 mask |= 1 << 3;
11740 if (mask == 0)
11741 /* Oh dear! We have no low registers into which we can pop
11742 high registers! */
11743 internal_error
11744 ("no low registers available for popping high registers");
11746 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
11747 if (THUMB_REG_PUSHED_P (next_hi_reg))
11748 break;
11750 while (high_regs_pushed)
11752 /* Find lo register(s) into which the high register(s) can
11753 be popped. */
11754 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11756 if (mask & (1 << regno))
11757 high_regs_pushed--;
11758 if (high_regs_pushed == 0)
11759 break;
11762 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
11764 /* Pop the values into the low register(s). */
11765 thumb_pushpop (asm_out_file, mask, 0);
11767 /* Move the value(s) into the high registers. */
11768 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11770 if (mask & (1 << regno))
11772 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
11773 regno);
11775 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
11776 if (THUMB_REG_PUSHED_P (next_hi_reg))
11777 break;
11783 had_to_push_lr = (live_regs_mask || !leaf_function
11784 || thumb_far_jump_used_p (1));
11786 if (TARGET_BACKTRACE
11787 && ((live_regs_mask & 0xFF) == 0)
11788 && regs_ever_live [LAST_ARG_REGNUM] != 0)
11790 /* The stack backtrace structure creation code had to
11791 push R7 in order to get a work register, so we pop
11792 it now. */
11793 live_regs_mask |= (1 << LAST_LO_REGNUM);
11796 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
11798 if (had_to_push_lr
11799 && !is_called_in_ARM_mode (current_function_decl)
11800 && !eh_ofs)
11801 live_regs_mask |= 1 << PC_REGNUM;
11803 /* Either no argument registers were pushed or a backtrace
11804 structure was created which includes an adjusted stack
11805 pointer, so just pop everything. */
11806 if (live_regs_mask)
11807 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11809 if (eh_ofs)
11810 thumb_exit (asm_out_file, 2, eh_ofs);
11811 /* We have either just popped the return address into the
11812 PC or it is was kept in LR for the entire function or
11813 it is still on the stack because we do not want to
11814 return by doing a pop {pc}. */
11815 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
11816 thumb_exit (asm_out_file,
11817 (had_to_push_lr
11818 && is_called_in_ARM_mode (current_function_decl)) ?
11819 -1 : LR_REGNUM, NULL_RTX);
11821 else
11823 /* Pop everything but the return address. */
11824 live_regs_mask &= ~(1 << PC_REGNUM);
11826 if (live_regs_mask)
11827 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11829 if (had_to_push_lr)
11830 /* Get the return address into a temporary register. */
11831 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
11833 /* Remove the argument registers that were pushed onto the stack. */
11834 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
11835 SP_REGNUM, SP_REGNUM,
11836 current_function_pretend_args_size);
11838 if (eh_ofs)
11839 thumb_exit (asm_out_file, 2, eh_ofs);
11840 else
11841 thumb_exit (asm_out_file,
11842 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
11845 return "";
11848 /* Functions to save and restore machine-specific function data. */
11849 static struct machine_function *
11850 arm_init_machine_status (void)
11852 struct machine_function *machine;
11853 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
11855 #if ARM_FT_UNKNOWN != 0
11856 machine->func_type = ARM_FT_UNKNOWN;
11857 #endif
11858 return machine;
11861 /* Return an RTX indicating where the return address to the
11862 calling function can be found. */
11864 arm_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
11866 if (count != 0)
11867 return NULL_RTX;
11869 if (TARGET_APCS_32)
11870 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
11871 else
11873 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
11874 GEN_INT (RETURN_ADDR_MASK26));
11875 return get_func_hard_reg_initial_val (cfun, lr);
11879 /* Do anything needed before RTL is emitted for each function. */
11880 void
11881 arm_init_expanders (void)
11883 /* Arrange to initialize and mark the machine per-function status. */
11884 init_machine_status = arm_init_machine_status;
11887 HOST_WIDE_INT
11888 thumb_get_frame_size (void)
11890 int regno;
11892 int base_size = ROUND_UP_WORD (get_frame_size ());
11893 int count_regs = 0;
11894 int entry_size = 0;
11895 int leaf;
11897 if (! TARGET_THUMB)
11898 abort ();
11900 if (! TARGET_ATPCS)
11901 return base_size;
11903 /* We need to know if we are a leaf function. Unfortunately, it
11904 is possible to be called after start_sequence has been called,
11905 which causes get_insns to return the insns for the sequence,
11906 not the function, which will cause leaf_function_p to return
11907 the incorrect result.
11909 To work around this, we cache the computed frame size. This
11910 works because we will only be calling RTL expanders that need
11911 to know about leaf functions once reload has completed, and the
11912 frame size cannot be changed after that time, so we can safely
11913 use the cached value. */
11915 if (reload_completed)
11916 return cfun->machine->frame_size;
11918 leaf = leaf_function_p ();
11920 /* A leaf function does not need any stack alignment if it has nothing
11921 on the stack. */
11922 if (leaf && base_size == 0)
11924 cfun->machine->frame_size = 0;
11925 return 0;
11928 /* We know that SP will be word aligned on entry, and we must
11929 preserve that condition at any subroutine call. But those are
11930 the only constraints. */
11932 /* Space for variadic functions. */
11933 if (current_function_pretend_args_size)
11934 entry_size += current_function_pretend_args_size;
11936 /* Space for pushed lo registers. */
11937 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11938 if (THUMB_REG_PUSHED_P (regno))
11939 count_regs++;
11941 /* Space for backtrace structure. */
11942 if (TARGET_BACKTRACE)
11944 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
11945 entry_size += 20;
11946 else
11947 entry_size += 16;
11950 if (count_regs || !leaf || thumb_far_jump_used_p (1))
11951 count_regs++; /* LR */
11953 entry_size += count_regs * 4;
11954 count_regs = 0;
11956 /* Space for pushed hi regs. */
11957 for (regno = 8; regno < 13; regno++)
11958 if (THUMB_REG_PUSHED_P (regno))
11959 count_regs++;
11961 entry_size += count_regs * 4;
11963 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11964 base_size += 4;
11965 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11966 abort ();
11968 cfun->machine->frame_size = base_size;
11970 return base_size;
11973 /* Generate the rest of a function's prologue. */
11974 void
11975 thumb_expand_prologue (void)
11977 HOST_WIDE_INT amount = (thumb_get_frame_size ()
11978 + current_function_outgoing_args_size);
11979 unsigned long func_type;
11981 func_type = arm_current_func_type ();
11983 /* Naked functions don't have prologues. */
11984 if (IS_NAKED (func_type))
11985 return;
11987 if (IS_INTERRUPT (func_type))
11989 error ("interrupt Service Routines cannot be coded in Thumb mode");
11990 return;
11993 if (frame_pointer_needed)
11994 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
11996 if (amount)
11998 amount = ROUND_UP_WORD (amount);
12000 if (amount < 512)
12001 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12002 GEN_INT (- amount)));
12003 else
12005 int regno;
12006 rtx reg;
12008 /* The stack decrement is too big for an immediate value in a single
12009 insn. In theory we could issue multiple subtracts, but after
12010 three of them it becomes more space efficient to place the full
12011 value in the constant pool and load into a register. (Also the
12012 ARM debugger really likes to see only one stack decrement per
12013 function). So instead we look for a scratch register into which
12014 we can load the decrement, and then we subtract this from the
12015 stack pointer. Unfortunately on the thumb the only available
12016 scratch registers are the argument registers, and we cannot use
12017 these as they may hold arguments to the function. Instead we
12018 attempt to locate a call preserved register which is used by this
12019 function. If we can find one, then we know that it will have
12020 been pushed at the start of the prologue and so we can corrupt
12021 it now. */
12022 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
12023 if (THUMB_REG_PUSHED_P (regno)
12024 && !(frame_pointer_needed
12025 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
12026 break;
12028 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
12030 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
12032 /* Choose an arbitrary, non-argument low register. */
12033 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
12035 /* Save it by copying it into a high, scratch register. */
12036 emit_insn (gen_movsi (spare, reg));
12037 /* Add a USE to stop propagate_one_insn() from barfing. */
12038 emit_insn (gen_prologue_use (spare));
12040 /* Decrement the stack. */
12041 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
12042 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12043 reg));
12045 /* Restore the low register's original value. */
12046 emit_insn (gen_movsi (reg, spare));
12048 /* Emit a USE of the restored scratch register, so that flow
12049 analysis will not consider the restore redundant. The
12050 register won't be used again in this function and isn't
12051 restored by the epilogue. */
12052 emit_insn (gen_prologue_use (reg));
12054 else
12056 reg = gen_rtx (REG, SImode, regno);
12058 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
12059 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12060 reg));
12065 if (current_function_profile || TARGET_NO_SCHED_PRO)
12066 emit_insn (gen_blockage ());
12069 void
12070 thumb_expand_epilogue (void)
12072 HOST_WIDE_INT amount = (thumb_get_frame_size ()
12073 + current_function_outgoing_args_size);
12075 /* Naked functions don't have prologues. */
12076 if (IS_NAKED (arm_current_func_type ()))
12077 return;
12079 if (frame_pointer_needed)
12080 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
12081 else if (amount)
12083 amount = ROUND_UP_WORD (amount);
12085 if (amount < 512)
12086 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12087 GEN_INT (amount)));
12088 else
12090 /* r3 is always free in the epilogue. */
12091 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
12093 emit_insn (gen_movsi (reg, GEN_INT (amount)));
12094 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
12098 /* Emit a USE (stack_pointer_rtx), so that
12099 the stack adjustment will not be deleted. */
12100 emit_insn (gen_prologue_use (stack_pointer_rtx));
12102 if (current_function_profile || TARGET_NO_SCHED_PRO)
12103 emit_insn (gen_blockage ());
12106 static void
12107 thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12109 int live_regs_mask = 0;
12110 int high_regs_pushed = 0;
12111 int regno;
12113 if (IS_NAKED (arm_current_func_type ()))
12114 return;
12116 if (is_called_in_ARM_mode (current_function_decl))
12118 const char * name;
12120 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
12121 abort ();
12122 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
12123 abort ();
12124 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12126 /* Generate code sequence to switch us into Thumb mode. */
12127 /* The .code 32 directive has already been emitted by
12128 ASM_DECLARE_FUNCTION_NAME. */
12129 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
12130 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
12132 /* Generate a label, so that the debugger will notice the
12133 change in instruction sets. This label is also used by
12134 the assembler to bypass the ARM code when this function
12135 is called from a Thumb encoded function elsewhere in the
12136 same file. Hence the definition of STUB_NAME here must
12137 agree with the definition in gas/config/tc-arm.c */
12139 #define STUB_NAME ".real_start_of"
12141 fprintf (f, "\t.code\t16\n");
12142 #ifdef ARM_PE
12143 if (arm_dllexport_name_p (name))
12144 name = arm_strip_name_encoding (name);
12145 #endif
12146 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
12147 fprintf (f, "\t.thumb_func\n");
12148 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
12151 if (current_function_pretend_args_size)
12153 if (cfun->machine->uses_anonymous_args)
12155 int num_pushes;
12157 fprintf (f, "\tpush\t{");
12159 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
12161 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
12162 regno <= LAST_ARG_REGNUM;
12163 regno++)
12164 asm_fprintf (f, "%r%s", regno,
12165 regno == LAST_ARG_REGNUM ? "" : ", ");
12167 fprintf (f, "}\n");
12169 else
12170 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
12171 SP_REGNUM, SP_REGNUM,
12172 current_function_pretend_args_size);
12175 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
12176 if (THUMB_REG_PUSHED_P (regno))
12177 live_regs_mask |= 1 << regno;
12179 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
12180 live_regs_mask |= 1 << LR_REGNUM;
12182 if (TARGET_BACKTRACE)
12184 int offset;
12185 int work_register = 0;
12186 int wr;
12188 /* We have been asked to create a stack backtrace structure.
12189 The code looks like this:
12191 0 .align 2
12192 0 func:
12193 0 sub SP, #16 Reserve space for 4 registers.
12194 2 push {R7} Get a work register.
12195 4 add R7, SP, #20 Get the stack pointer before the push.
12196 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
12197 8 mov R7, PC Get hold of the start of this code plus 12.
12198 10 str R7, [SP, #16] Store it.
12199 12 mov R7, FP Get hold of the current frame pointer.
12200 14 str R7, [SP, #4] Store it.
12201 16 mov R7, LR Get hold of the current return address.
12202 18 str R7, [SP, #12] Store it.
12203 20 add R7, SP, #16 Point at the start of the backtrace structure.
12204 22 mov FP, R7 Put this value into the frame pointer. */
12206 if ((live_regs_mask & 0xFF) == 0)
12208 /* See if the a4 register is free. */
12210 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
12211 work_register = LAST_ARG_REGNUM;
12212 else /* We must push a register of our own */
12213 live_regs_mask |= (1 << LAST_LO_REGNUM);
12216 if (work_register == 0)
12218 /* Select a register from the list that will be pushed to
12219 use as our work register. */
12220 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
12221 if ((1 << work_register) & live_regs_mask)
12222 break;
12225 asm_fprintf
12226 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
12227 SP_REGNUM, SP_REGNUM);
12229 if (live_regs_mask)
12230 thumb_pushpop (f, live_regs_mask, 1);
12232 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
12233 if (wr & live_regs_mask)
12234 offset += 4;
12236 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12237 offset + 16 + current_function_pretend_args_size);
12239 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12240 offset + 4);
12242 /* Make sure that the instruction fetching the PC is in the right place
12243 to calculate "start of backtrace creation code + 12". */
12244 if (live_regs_mask)
12246 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12247 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12248 offset + 12);
12249 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12250 ARM_HARD_FRAME_POINTER_REGNUM);
12251 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12252 offset);
12254 else
12256 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12257 ARM_HARD_FRAME_POINTER_REGNUM);
12258 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12259 offset);
12260 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12261 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12262 offset + 12);
12265 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
12266 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12267 offset + 8);
12268 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12269 offset + 12);
12270 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
12271 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
12273 else if (live_regs_mask)
12274 thumb_pushpop (f, live_regs_mask, 1);
12276 for (regno = 8; regno < 13; regno++)
12277 if (THUMB_REG_PUSHED_P (regno))
12278 high_regs_pushed++;
12280 if (high_regs_pushed)
12282 int pushable_regs = 0;
12283 int mask = live_regs_mask & 0xff;
12284 int next_hi_reg;
12286 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
12287 if (THUMB_REG_PUSHED_P (next_hi_reg))
12288 break;
12290 pushable_regs = mask;
12292 if (pushable_regs == 0)
12294 /* Desperation time -- this probably will never happen. */
12295 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
12296 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
12297 mask = 1 << LAST_ARG_REGNUM;
12300 while (high_regs_pushed > 0)
12302 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
12304 if (mask & (1 << regno))
12306 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
12308 high_regs_pushed--;
12310 if (high_regs_pushed)
12312 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
12313 next_hi_reg--)
12314 if (THUMB_REG_PUSHED_P (next_hi_reg))
12315 break;
12317 else
12319 mask &= ~((1 << regno) - 1);
12320 break;
12325 thumb_pushpop (f, mask, 1);
12328 if (pushable_regs == 0
12329 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
12330 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
12334 /* Handle the case of a double word load into a low register from
12335 a computed memory address. The computed address may involve a
12336 register which is overwritten by the load. */
12337 const char *
12338 thumb_load_double_from_address (rtx *operands)
12340 rtx addr;
12341 rtx base;
12342 rtx offset;
12343 rtx arg1;
12344 rtx arg2;
12346 if (GET_CODE (operands[0]) != REG)
12347 abort ();
12349 if (GET_CODE (operands[1]) != MEM)
12350 abort ();
12352 /* Get the memory address. */
12353 addr = XEXP (operands[1], 0);
12355 /* Work out how the memory address is computed. */
12356 switch (GET_CODE (addr))
12358 case REG:
12359 operands[2] = gen_rtx (MEM, SImode,
12360 plus_constant (XEXP (operands[1], 0), 4));
12362 if (REGNO (operands[0]) == REGNO (addr))
12364 output_asm_insn ("ldr\t%H0, %2", operands);
12365 output_asm_insn ("ldr\t%0, %1", operands);
12367 else
12369 output_asm_insn ("ldr\t%0, %1", operands);
12370 output_asm_insn ("ldr\t%H0, %2", operands);
12372 break;
12374 case CONST:
12375 /* Compute <address> + 4 for the high order load. */
12376 operands[2] = gen_rtx (MEM, SImode,
12377 plus_constant (XEXP (operands[1], 0), 4));
12379 output_asm_insn ("ldr\t%0, %1", operands);
12380 output_asm_insn ("ldr\t%H0, %2", operands);
12381 break;
12383 case PLUS:
12384 arg1 = XEXP (addr, 0);
12385 arg2 = XEXP (addr, 1);
12387 if (CONSTANT_P (arg1))
12388 base = arg2, offset = arg1;
12389 else
12390 base = arg1, offset = arg2;
12392 if (GET_CODE (base) != REG)
12393 abort ();
12395 /* Catch the case of <address> = <reg> + <reg> */
12396 if (GET_CODE (offset) == REG)
12398 int reg_offset = REGNO (offset);
12399 int reg_base = REGNO (base);
12400 int reg_dest = REGNO (operands[0]);
12402 /* Add the base and offset registers together into the
12403 higher destination register. */
12404 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
12405 reg_dest + 1, reg_base, reg_offset);
12407 /* Load the lower destination register from the address in
12408 the higher destination register. */
12409 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
12410 reg_dest, reg_dest + 1);
12412 /* Load the higher destination register from its own address
12413 plus 4. */
12414 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
12415 reg_dest + 1, reg_dest + 1);
12417 else
12419 /* Compute <address> + 4 for the high order load. */
12420 operands[2] = gen_rtx (MEM, SImode,
12421 plus_constant (XEXP (operands[1], 0), 4));
12423 /* If the computed address is held in the low order register
12424 then load the high order register first, otherwise always
12425 load the low order register first. */
12426 if (REGNO (operands[0]) == REGNO (base))
12428 output_asm_insn ("ldr\t%H0, %2", operands);
12429 output_asm_insn ("ldr\t%0, %1", operands);
12431 else
12433 output_asm_insn ("ldr\t%0, %1", operands);
12434 output_asm_insn ("ldr\t%H0, %2", operands);
12437 break;
12439 case LABEL_REF:
12440 /* With no registers to worry about we can just load the value
12441 directly. */
12442 operands[2] = gen_rtx (MEM, SImode,
12443 plus_constant (XEXP (operands[1], 0), 4));
12445 output_asm_insn ("ldr\t%H0, %2", operands);
12446 output_asm_insn ("ldr\t%0, %1", operands);
12447 break;
12449 default:
12450 abort ();
12451 break;
12454 return "";
12457 const char *
12458 thumb_output_move_mem_multiple (int n, rtx *operands)
12460 rtx tmp;
12462 switch (n)
12464 case 2:
12465 if (REGNO (operands[4]) > REGNO (operands[5]))
12467 tmp = operands[4];
12468 operands[4] = operands[5];
12469 operands[5] = tmp;
12471 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
12472 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
12473 break;
12475 case 3:
12476 if (REGNO (operands[4]) > REGNO (operands[5]))
12478 tmp = operands[4];
12479 operands[4] = operands[5];
12480 operands[5] = tmp;
12482 if (REGNO (operands[5]) > REGNO (operands[6]))
12484 tmp = operands[5];
12485 operands[5] = operands[6];
12486 operands[6] = tmp;
12488 if (REGNO (operands[4]) > REGNO (operands[5]))
12490 tmp = operands[4];
12491 operands[4] = operands[5];
12492 operands[5] = tmp;
12495 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
12496 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
12497 break;
12499 default:
12500 abort ();
12503 return "";
12506 /* Routines for generating rtl. */
12507 void
12508 thumb_expand_movstrqi (rtx *operands)
12510 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
12511 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
12512 HOST_WIDE_INT len = INTVAL (operands[2]);
12513 HOST_WIDE_INT offset = 0;
12515 while (len >= 12)
12517 emit_insn (gen_movmem12b (out, in, out, in));
12518 len -= 12;
12521 if (len >= 8)
12523 emit_insn (gen_movmem8b (out, in, out, in));
12524 len -= 8;
12527 if (len >= 4)
12529 rtx reg = gen_reg_rtx (SImode);
12530 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
12531 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
12532 len -= 4;
12533 offset += 4;
12536 if (len >= 2)
12538 rtx reg = gen_reg_rtx (HImode);
12539 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
12540 plus_constant (in, offset))));
12541 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
12542 reg));
12543 len -= 2;
12544 offset += 2;
12547 if (len)
12549 rtx reg = gen_reg_rtx (QImode);
12550 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
12551 plus_constant (in, offset))));
12552 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
12553 reg));
12558 thumb_cmp_operand (rtx op, enum machine_mode mode)
12560 return ((GET_CODE (op) == CONST_INT
12561 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
12562 || register_operand (op, mode));
12565 static const char *
12566 thumb_condition_code (rtx x, int invert)
12568 static const char * const conds[] =
12570 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
12571 "hi", "ls", "ge", "lt", "gt", "le"
12573 int val;
12575 switch (GET_CODE (x))
12577 case EQ: val = 0; break;
12578 case NE: val = 1; break;
12579 case GEU: val = 2; break;
12580 case LTU: val = 3; break;
12581 case GTU: val = 8; break;
12582 case LEU: val = 9; break;
12583 case GE: val = 10; break;
12584 case LT: val = 11; break;
12585 case GT: val = 12; break;
12586 case LE: val = 13; break;
12587 default:
12588 abort ();
12591 return conds[val ^ invert];
12594 /* Handle storing a half-word to memory during reload. */
12595 void
12596 thumb_reload_out_hi (rtx *operands)
12598 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
12601 /* Handle reading a half-word from memory during reload. */
12602 void
12603 thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
12605 abort ();
12608 /* Return the length of a function name prefix
12609 that starts with the character 'c'. */
12610 static int
12611 arm_get_strip_length (int c)
12613 switch (c)
12615 ARM_NAME_ENCODING_LENGTHS
12616 default: return 0;
12620 /* Return a pointer to a function's name with any
12621 and all prefix encodings stripped from it. */
12622 const char *
12623 arm_strip_name_encoding (const char *name)
12625 int skip;
12627 while ((skip = arm_get_strip_length (* name)))
12628 name += skip;
12630 return name;
12633 /* If there is a '*' anywhere in the name's prefix, then
12634 emit the stripped name verbatim, otherwise prepend an
12635 underscore if leading underscores are being used. */
12636 void
12637 arm_asm_output_labelref (FILE *stream, const char *name)
12639 int skip;
12640 int verbatim = 0;
12642 while ((skip = arm_get_strip_length (* name)))
12644 verbatim |= (*name == '*');
12645 name += skip;
12648 if (verbatim)
12649 fputs (name, stream);
12650 else
12651 asm_fprintf (stream, "%U%s", name);
12654 rtx aof_pic_label;
12656 #ifdef AOF_ASSEMBLER
12657 /* Special functions only needed when producing AOF syntax assembler. */
12659 struct pic_chain
12661 struct pic_chain * next;
12662 const char * symname;
12665 static struct pic_chain * aof_pic_chain = NULL;
12668 aof_pic_entry (rtx x)
12670 struct pic_chain ** chainp;
12671 int offset;
12673 if (aof_pic_label == NULL_RTX)
12675 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
12678 for (offset = 0, chainp = &aof_pic_chain; *chainp;
12679 offset += 4, chainp = &(*chainp)->next)
12680 if ((*chainp)->symname == XSTR (x, 0))
12681 return plus_constant (aof_pic_label, offset);
12683 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
12684 (*chainp)->next = NULL;
12685 (*chainp)->symname = XSTR (x, 0);
12686 return plus_constant (aof_pic_label, offset);
12689 void
12690 aof_dump_pic_table (FILE *f)
12692 struct pic_chain * chain;
12694 if (aof_pic_chain == NULL)
12695 return;
12697 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
12698 PIC_OFFSET_TABLE_REGNUM,
12699 PIC_OFFSET_TABLE_REGNUM);
12700 fputs ("|x$adcons|\n", f);
12702 for (chain = aof_pic_chain; chain; chain = chain->next)
12704 fputs ("\tDCD\t", f);
12705 assemble_name (f, chain->symname);
12706 fputs ("\n", f);
12710 int arm_text_section_count = 1;
12712 char *
12713 aof_text_section (void )
12715 static char buf[100];
12716 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
12717 arm_text_section_count++);
12718 if (flag_pic)
12719 strcat (buf, ", PIC, REENTRANT");
12720 return buf;
12723 static int arm_data_section_count = 1;
12725 char *
12726 aof_data_section (void)
12728 static char buf[100];
12729 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
12730 return buf;
12733 /* The AOF assembler is religiously strict about declarations of
12734 imported and exported symbols, so that it is impossible to declare
12735 a function as imported near the beginning of the file, and then to
12736 export it later on. It is, however, possible to delay the decision
12737 until all the functions in the file have been compiled. To get
12738 around this, we maintain a list of the imports and exports, and
12739 delete from it any that are subsequently defined. At the end of
12740 compilation we spit the remainder of the list out before the END
12741 directive. */
12743 struct import
12745 struct import * next;
12746 const char * name;
12749 static struct import * imports_list = NULL;
12751 void
12752 aof_add_import (const char *name)
12754 struct import * new;
12756 for (new = imports_list; new; new = new->next)
12757 if (new->name == name)
12758 return;
12760 new = (struct import *) xmalloc (sizeof (struct import));
12761 new->next = imports_list;
12762 imports_list = new;
12763 new->name = name;
12766 void
12767 aof_delete_import (const char *name)
12769 struct import ** old;
12771 for (old = &imports_list; *old; old = & (*old)->next)
12773 if ((*old)->name == name)
12775 *old = (*old)->next;
12776 return;
12781 int arm_main_function = 0;
12783 static void
12784 aof_dump_imports (FILE *f)
12786 /* The AOF assembler needs this to cause the startup code to be extracted
12787 from the library. Brining in __main causes the whole thing to work
12788 automagically. */
12789 if (arm_main_function)
12791 text_section ();
12792 fputs ("\tIMPORT __main\n", f);
12793 fputs ("\tDCD __main\n", f);
12796 /* Now dump the remaining imports. */
12797 while (imports_list)
12799 fprintf (f, "\tIMPORT\t");
12800 assemble_name (f, imports_list->name);
12801 fputc ('\n', f);
12802 imports_list = imports_list->next;
12806 static void
12807 aof_globalize_label (FILE *stream, const char *name)
12809 default_globalize_label (stream, name);
12810 if (! strcmp (name, "main"))
12811 arm_main_function = 1;
12814 static void
12815 aof_file_start ()
12817 fputs ("__r0\tRN\t0\n", asm_out_file);
12818 fputs ("__a1\tRN\t0\n", asm_out_file);
12819 fputs ("__a2\tRN\t1\n", asm_out_file);
12820 fputs ("__a3\tRN\t2\n", asm_out_file);
12821 fputs ("__a4\tRN\t3\n", asm_out_file);
12822 fputs ("__v1\tRN\t4\n", asm_out_file);
12823 fputs ("__v2\tRN\t5\n", asm_out_file);
12824 fputs ("__v3\tRN\t6\n", asm_out_file);
12825 fputs ("__v4\tRN\t7\n", asm_out_file);
12826 fputs ("__v5\tRN\t8\n", asm_out_file);
12827 fputs ("__v6\tRN\t9\n", asm_out_file);
12828 fputs ("__sl\tRN\t10\n", asm_out_file);
12829 fputs ("__fp\tRN\t11\n", asm_out_file);
12830 fputs ("__ip\tRN\t12\n", asm_out_file);
12831 fputs ("__sp\tRN\t13\n", asm_out_file);
12832 fputs ("__lr\tRN\t14\n", asm_out_file);
12833 fputs ("__pc\tRN\t15\n", asm_out_file);
12834 fputs ("__f0\tFN\t0\n", asm_out_file);
12835 fputs ("__f1\tFN\t1\n", asm_out_file);
12836 fputs ("__f2\tFN\t2\n", asm_out_file);
12837 fputs ("__f3\tFN\t3\n", asm_out_file);
12838 fputs ("__f4\tFN\t4\n", asm_out_file);
12839 fputs ("__f5\tFN\t5\n", asm_out_file);
12840 fputs ("__f6\tFN\t6\n", asm_out_file);
12841 fputs ("__f7\tFN\t7\n", asm_out_file);
12842 text_section ();
12845 static void
12846 aof_file_end (void)
12848 if (flag_pic)
12849 aof_dump_pic_table (asm_out_file);
12850 aof_dump_imports (asm_out_file);
12851 fputs ("\tEND\n", asm_out_file);
12853 #endif /* AOF_ASSEMBLER */
12855 #ifdef OBJECT_FORMAT_ELF
12856 /* Switch to an arbitrary section NAME with attributes as specified
12857 by FLAGS. ALIGN specifies any known alignment requirements for
12858 the section; 0 if the default should be used.
12860 Differs from the default elf version only in the prefix character
12861 used before the section type. */
12863 static void
12864 arm_elf_asm_named_section (const char *name, unsigned int flags)
12866 char flagchars[10], *f = flagchars;
12868 if (! named_section_first_declaration (name))
12870 fprintf (asm_out_file, "\t.section\t%s\n", name);
12871 return;
12874 if (!(flags & SECTION_DEBUG))
12875 *f++ = 'a';
12876 if (flags & SECTION_WRITE)
12877 *f++ = 'w';
12878 if (flags & SECTION_CODE)
12879 *f++ = 'x';
12880 if (flags & SECTION_SMALL)
12881 *f++ = 's';
12882 if (flags & SECTION_MERGE)
12883 *f++ = 'M';
12884 if (flags & SECTION_STRINGS)
12885 *f++ = 'S';
12886 if (flags & SECTION_TLS)
12887 *f++ = 'T';
12888 *f = '\0';
12890 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
12892 if (!(flags & SECTION_NOTYPE))
12894 const char *type;
12896 if (flags & SECTION_BSS)
12897 type = "nobits";
12898 else
12899 type = "progbits";
12901 fprintf (asm_out_file, ",%%%s", type);
12903 if (flags & SECTION_ENTSIZE)
12904 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
12907 putc ('\n', asm_out_file);
12909 #endif
12911 #ifndef ARM_PE
12912 /* Symbols in the text segment can be accessed without indirecting via the
12913 constant pool; it may take an extra binary operation, but this is still
12914 faster than indirecting via memory. Don't do this when not optimizing,
12915 since we won't be calculating al of the offsets necessary to do this
12916 simplification. */
12918 static void
12919 arm_encode_section_info (tree decl, rtx rtl, int first)
12921 /* This doesn't work with AOF syntax, since the string table may be in
12922 a different AREA. */
12923 #ifndef AOF_ASSEMBLER
12924 if (optimize > 0 && TREE_CONSTANT (decl)
12925 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
12926 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
12927 #endif
12929 /* If we are referencing a function that is weak then encode a long call
12930 flag in the function name, otherwise if the function is static or
12931 or known to be defined in this file then encode a short call flag. */
12932 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
12934 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
12935 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
12936 else if (! TREE_PUBLIC (decl))
12937 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
12940 #endif /* !ARM_PE */
12942 static void
12943 arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
12945 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
12946 && !strcmp (prefix, "L"))
12948 arm_ccfsm_state = 0;
12949 arm_target_insn = NULL;
12951 default_internal_label (stream, prefix, labelno);
12954 /* Output code to add DELTA to the first argument, and then jump
12955 to FUNCTION. Used for C++ multiple inheritance. */
12956 static void
12957 arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
12958 HOST_WIDE_INT delta,
12959 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
12960 tree function)
12962 int mi_delta = delta;
12963 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
12964 int shift = 0;
12965 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function)
12966 ? 1 : 0);
12967 if (mi_delta < 0)
12968 mi_delta = - mi_delta;
12969 while (mi_delta != 0)
12971 if ((mi_delta & (3 << shift)) == 0)
12972 shift += 2;
12973 else
12975 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
12976 mi_op, this_regno, this_regno,
12977 mi_delta & (0xff << shift));
12978 mi_delta &= ~(0xff << shift);
12979 shift += 8;
12982 fputs ("\tb\t", file);
12983 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
12984 if (NEED_PLT_RELOC)
12985 fputs ("(PLT)", file);
12986 fputc ('\n', file);
12990 arm_emit_vector_const (file, x)
12991 FILE * file;
12992 rtx x;
12994 int i;
12995 const char * pattern;
12997 if (GET_CODE (x) != CONST_VECTOR)
12998 abort ();
13000 switch (GET_MODE (x))
13002 case V2SImode: pattern = "%08x"; break;
13003 case V4HImode: pattern = "%04x"; break;
13004 case V8QImode: pattern = "%02x"; break;
13005 default: abort ();
13008 fprintf (file, "0x");
13009 for (i = CONST_VECTOR_NUNITS (x); i--;)
13011 rtx element;
13013 element = CONST_VECTOR_ELT (x, i);
13014 fprintf (file, pattern, INTVAL (element));
13017 return 1;
13020 const char *
13021 arm_output_load_gr (operands)
13022 rtx * operands;
13024 rtx reg;
13025 rtx offset;
13026 rtx wcgr;
13027 rtx sum;
13029 if (GET_CODE (operands [1]) != MEM
13030 || GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
13031 || GET_CODE (reg = XEXP (sum, 0)) != REG
13032 || GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
13033 || ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
13034 return "wldrw%?\t%0, %1";
13036 /* Fix up an out-of-range load of a GR register. */
13037 output_asm_insn ("str%?\t%0, [sp, #-4]!\t@ Start of GR load expansion", & reg);
13038 wcgr = operands[0];
13039 operands[0] = reg;
13040 output_asm_insn ("ldr%?\t%0, %1", operands);
13042 operands[0] = wcgr;
13043 operands[1] = reg;
13044 output_asm_insn ("tmcr%?\t%0, %1", operands);
13045 output_asm_insn ("ldr%?\t%0, [sp], #4\t@ End of GR load expansion", & reg);
13047 return "";