* config.gcc: Add an extra_header for ARM targets.
[official-gcc.git] / gcc / config / arm / arm.c
blobe5b1324c54660e711b9d4a68a70bc95ff73a15b2
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 const struct attribute_spec arm_attribute_table[];
60 /* Forward function declarations. */
61 static void arm_add_gc_roots (void);
62 static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
63 rtx, rtx, int, int);
64 static unsigned bit_count (unsigned long);
65 static int arm_address_register_rtx_p (rtx, int);
66 static int arm_legitimate_index_p (enum machine_mode, rtx, int);
67 static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
68 inline static int thumb_index_register_rtx_p (rtx, int);
69 static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
70 static rtx emit_multi_reg_push (int);
71 static rtx emit_sfm (int, int);
72 #ifndef AOF_ASSEMBLER
73 static bool arm_assemble_integer (rtx, unsigned int, int);
74 #endif
75 static const char *fp_const_from_val (REAL_VALUE_TYPE *);
76 static arm_cc get_arm_condition_code (rtx);
77 static void init_fpa_table (void);
78 static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
79 static rtx is_jump_table (rtx);
80 static const char *output_multi_immediate (rtx *, const char *, const char *,
81 int, HOST_WIDE_INT);
82 static void print_multi_reg (FILE *, const char *, int, int);
83 static const char *shift_op (rtx, HOST_WIDE_INT *);
84 static struct machine_function *arm_init_machine_status (void);
85 static int number_of_first_bit_set (int);
86 static void replace_symbols_in_block (tree, rtx, rtx);
87 static void thumb_exit (FILE *, int, rtx);
88 static void thumb_pushpop (FILE *, int, int);
89 static const char *thumb_condition_code (rtx, int);
90 static rtx is_jump_table (rtx);
91 static HOST_WIDE_INT get_jump_table_size (rtx);
92 static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
93 static Mnode *add_minipool_forward_ref (Mfix *);
94 static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
95 static Mnode *add_minipool_backward_ref (Mfix *);
96 static void assign_minipool_offsets (Mfix *);
97 static void arm_print_value (FILE *, rtx);
98 static void dump_minipool (rtx);
99 static int arm_barrier_cost (rtx);
100 static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
101 static void push_minipool_barrier (rtx, HOST_WIDE_INT);
102 static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
103 rtx);
104 static void arm_reorg (void);
105 static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
106 static int current_file_function_operand (rtx);
107 static unsigned long arm_compute_save_reg0_reg12_mask (void);
108 static unsigned long arm_compute_save_reg_mask (void);
109 static unsigned long arm_isr_value (tree);
110 static unsigned long arm_compute_func_type (void);
111 static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
112 static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
113 static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
114 static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
115 static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
116 static int arm_comp_type_attributes (tree, tree);
117 static void arm_set_default_type_attributes (tree);
118 static int arm_adjust_cost (rtx, rtx, rtx, int);
119 static int arm_use_dfa_pipeline_interface (void);
120 static int count_insns_for_constant (HOST_WIDE_INT, int);
121 static int arm_get_strip_length (int);
122 static bool arm_function_ok_for_sibcall (tree, tree);
123 static void arm_internal_label (FILE *, const char *, unsigned long);
124 static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 tree);
126 static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
127 static bool arm_rtx_costs (rtx, int, int, int *);
128 static int arm_address_cost (rtx);
129 static bool arm_memory_load_p (rtx);
130 static bool arm_cirrus_insn_p (rtx);
131 static void cirrus_reorg (rtx);
132 static void arm_init_builtins (void);
133 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
134 static void arm_init_iwmmxt_builtins (void);
135 static rtx safe_vector_operand (rtx, enum machine_mode);
136 static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
137 static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
138 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
140 #ifdef OBJECT_FORMAT_ELF
141 static void arm_elf_asm_named_section (const char *, unsigned int);
142 #endif
143 #ifndef ARM_PE
144 static void arm_encode_section_info (tree, rtx, int);
145 #endif
146 #ifdef AOF_ASSEMBLER
147 static void aof_globalize_label (FILE *, const char *);
148 static void aof_dump_imports (FILE *);
149 static void aof_dump_pic_table (FILE *);
150 static void aof_file_end (void);
151 #endif
154 /* Initialize the GCC target structure. */
155 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
156 #undef TARGET_MERGE_DECL_ATTRIBUTES
157 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
158 #endif
160 #undef TARGET_ATTRIBUTE_TABLE
161 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
163 #ifdef AOF_ASSEMBLER
164 #undef TARGET_ASM_BYTE_OP
165 #define TARGET_ASM_BYTE_OP "\tDCB\t"
166 #undef TARGET_ASM_ALIGNED_HI_OP
167 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
168 #undef TARGET_ASM_ALIGNED_SI_OP
169 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
170 #undef TARGET_ASM_GLOBALIZE_LABEL
171 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
172 #undef TARGET_ASM_FILE_END
173 #define TARGET_ASM_FILE_END aof_file_end
174 #else
175 #undef TARGET_ASM_ALIGNED_SI_OP
176 #define TARGET_ASM_ALIGNED_SI_OP NULL
177 #undef TARGET_ASM_INTEGER
178 #define TARGET_ASM_INTEGER arm_assemble_integer
179 #endif
181 #undef TARGET_ASM_FUNCTION_PROLOGUE
182 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
184 #undef TARGET_ASM_FUNCTION_EPILOGUE
185 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
187 #undef TARGET_COMP_TYPE_ATTRIBUTES
188 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
190 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
191 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
193 #undef TARGET_SCHED_ADJUST_COST
194 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
196 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
197 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE arm_use_dfa_pipeline_interface
199 #undef TARGET_ENCODE_SECTION_INFO
200 #ifdef ARM_PE
201 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
202 #else
203 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
204 #endif
206 #undef TARGET_STRIP_NAME_ENCODING
207 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
209 #undef TARGET_ASM_INTERNAL_LABEL
210 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
212 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
213 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
215 #undef TARGET_ASM_OUTPUT_MI_THUNK
216 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
217 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
218 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
220 #undef TARGET_RTX_COSTS
221 #define TARGET_RTX_COSTS arm_rtx_costs
222 #undef TARGET_ADDRESS_COST
223 #define TARGET_ADDRESS_COST arm_address_cost
225 #undef TARGET_MACHINE_DEPENDENT_REORG
226 #define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
228 #undef TARGET_INIT_BUILTINS
229 #define TARGET_INIT_BUILTINS arm_init_builtins
230 #undef TARGET_EXPAND_BUILTIN
231 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
233 struct gcc_target targetm = TARGET_INITIALIZER;
235 /* Obstack for minipool constant handling. */
236 static struct obstack minipool_obstack;
237 static char * minipool_startobj;
239 /* The maximum number of insns skipped which
240 will be conditionalised if possible. */
241 static int max_insns_skipped = 5;
243 extern FILE * asm_out_file;
245 /* True if we are currently building a constant table. */
246 int making_const_table;
248 /* Define the information needed to generate branch insns. This is
249 stored from the compare operation. */
250 rtx arm_compare_op0, arm_compare_op1;
252 /* What type of floating point are we tuning for? */
253 enum fputype arm_fpu_tune;
255 /* What type of floating point instructions are available? */
256 enum fputype arm_fpu_arch;
258 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
259 enum prog_mode_type arm_prgmode;
261 /* Set by the -mfp=... option. */
262 const char * target_fp_name = NULL;
264 /* Used to parse -mstructure_size_boundary command line option. */
265 const char * structure_size_string = NULL;
266 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
268 /* Bit values used to identify processor capabilities. */
269 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
270 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
271 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
272 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
273 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
274 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
275 #define FL_THUMB (1 << 6) /* Thumb aware */
276 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
277 #define FL_STRONG (1 << 8) /* StrongARM */
278 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
279 #define FL_XSCALE (1 << 10) /* XScale */
280 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
281 #define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */
283 /* The bits in this mask specify which
284 instructions we are allowed to generate. */
285 static unsigned long insn_flags = 0;
287 /* The bits in this mask specify which instruction scheduling options should
288 be used. Note - there is an overlap with the FL_FAST_MULT. For some
289 hardware we want to be able to generate the multiply instructions, but to
290 tune as if they were not present in the architecture. */
291 static unsigned long tune_flags = 0;
293 /* The following are used in the arm.md file as equivalents to bits
294 in the above two flag variables. */
296 /* Nonzero if this is an "M" variant of the processor. */
297 int arm_fast_multiply = 0;
299 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
300 int arm_arch4 = 0;
302 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
303 int arm_arch5 = 0;
305 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
306 int arm_arch5e = 0;
308 /* Nonzero if this chip can benefit from load scheduling. */
309 int arm_ld_sched = 0;
311 /* Nonzero if this chip is a StrongARM. */
312 int arm_is_strong = 0;
314 /* Nonzero if this chip supports Intel Wireless MMX technology. */
315 int arm_arch_iwmmxt = 0;
317 /* Nonzero if this chip is an XScale. */
318 int arm_arch_xscale = 0;
320 /* Nonzero if tuning for XScale */
321 int arm_tune_xscale = 0;
323 /* Nonzero if this chip is an ARM6 or an ARM7. */
324 int arm_is_6_or_7 = 0;
326 /* Nonzero if this chip is a Cirrus/DSP. */
327 int arm_is_cirrus = 0;
329 /* Nonzero if generating Thumb instructions. */
330 int thumb_code = 0;
332 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
333 must report the mode of the memory reference from PRINT_OPERAND to
334 PRINT_OPERAND_ADDRESS. */
335 enum machine_mode output_memory_reference_mode;
337 /* The register number to be used for the PIC offset register. */
338 const char * arm_pic_register_string = NULL;
339 int arm_pic_register = INVALID_REGNUM;
341 /* Set to 1 when a return insn is output, this means that the epilogue
342 is not needed. */
343 int return_used_this_function;
345 /* Set to 1 after arm_reorg has started. Reset to start at the start of
346 the next function. */
347 static int after_arm_reorg = 0;
349 /* The maximum number of insns to be used when loading a constant. */
350 static int arm_constant_limit = 3;
352 /* For an explanation of these variables, see final_prescan_insn below. */
353 int arm_ccfsm_state;
354 enum arm_cond_code arm_current_cc;
355 rtx arm_target_insn;
356 int arm_target_label;
358 /* The condition codes of the ARM, and the inverse function. */
359 static const char * const arm_condition_codes[] =
361 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
362 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
365 #define streq(string1, string2) (strcmp (string1, string2) == 0)
367 /* Initialization code. */
369 struct processors
371 const char *const name;
372 const unsigned long flags;
375 /* Not all of these give usefully different compilation alternatives,
376 but there is no simple way of generalizing them. */
377 static const struct processors all_cores[] =
379 /* ARM Cores */
381 {"arm2", FL_CO_PROC | FL_MODE26 },
382 {"arm250", FL_CO_PROC | FL_MODE26 },
383 {"arm3", FL_CO_PROC | FL_MODE26 },
384 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
385 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
386 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
387 {"arm610", FL_MODE26 | FL_MODE32 },
388 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
389 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
390 /* arm7m doesn't exist on its own, but only with D, (and I), but
391 those don't alter the code, so arm7m is sometimes used. */
392 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
393 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
394 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
395 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
396 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
397 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
398 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
399 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
400 {"arm710", FL_MODE26 | FL_MODE32 },
401 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
402 {"arm720", FL_MODE26 | FL_MODE32 },
403 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
404 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
405 {"arm710c", FL_MODE26 | FL_MODE32 },
406 {"arm7100", FL_MODE26 | FL_MODE32 },
407 {"arm7500", FL_MODE26 | FL_MODE32 },
408 /* Doesn't have an external co-proc, but does have embedded fpa. */
409 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
410 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
411 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
412 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
413 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
414 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
415 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
416 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
417 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
418 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
419 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
420 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
421 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
422 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
423 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
424 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
425 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
426 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
427 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
429 {NULL, 0}
432 static const struct processors all_architectures[] =
434 /* ARM Architectures */
436 { "armv2", FL_CO_PROC | FL_MODE26 },
437 { "armv2a", FL_CO_PROC | FL_MODE26 },
438 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
439 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
440 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
441 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
442 implementations that support it, so we will leave it out for now. */
443 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
444 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
445 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
446 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
447 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
448 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
449 { NULL, 0 }
452 /* This is a magic stucture. The 'string' field is magically filled in
453 with a pointer to the value specified by the user on the command line
454 assuming that the user has specified such a value. */
456 struct arm_cpu_select arm_select[] =
458 /* string name processors */
459 { NULL, "-mcpu=", all_cores },
460 { NULL, "-march=", all_architectures },
461 { NULL, "-mtune=", all_cores }
464 /* Return the number of bits set in VALUE. */
465 static unsigned
466 bit_count (unsigned long value)
468 unsigned long count = 0;
470 while (value)
472 count++;
473 value &= value - 1; /* Clear the least-significant set bit. */
476 return count;
479 /* Fix up any incompatible options that the user has specified.
480 This has now turned into a maze. */
481 void
482 arm_override_options (void)
484 unsigned i;
486 /* Set up the flags based on the cpu/architecture selected by the user. */
487 for (i = ARRAY_SIZE (arm_select); i--;)
489 struct arm_cpu_select * ptr = arm_select + i;
491 if (ptr->string != NULL && ptr->string[0] != '\0')
493 const struct processors * sel;
495 for (sel = ptr->processors; sel->name != NULL; sel++)
496 if (streq (ptr->string, sel->name))
498 if (i == 2)
499 tune_flags = sel->flags;
500 else
502 /* If we have been given an architecture and a processor
503 make sure that they are compatible. We only generate
504 a warning though, and we prefer the CPU over the
505 architecture. */
506 if (insn_flags != 0 && (insn_flags ^ sel->flags))
507 warning ("switch -mcpu=%s conflicts with -march= switch",
508 ptr->string);
510 insn_flags = sel->flags;
513 break;
516 if (sel->name == NULL)
517 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
521 /* If the user did not specify a processor, choose one for them. */
522 if (insn_flags == 0)
524 const struct processors * sel;
525 unsigned int sought;
526 static const struct cpu_default
528 const int cpu;
529 const char *const name;
531 cpu_defaults[] =
533 { TARGET_CPU_arm2, "arm2" },
534 { TARGET_CPU_arm6, "arm6" },
535 { TARGET_CPU_arm610, "arm610" },
536 { TARGET_CPU_arm710, "arm710" },
537 { TARGET_CPU_arm7m, "arm7m" },
538 { TARGET_CPU_arm7500fe, "arm7500fe" },
539 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
540 { TARGET_CPU_arm8, "arm8" },
541 { TARGET_CPU_arm810, "arm810" },
542 { TARGET_CPU_arm9, "arm9" },
543 { TARGET_CPU_strongarm, "strongarm" },
544 { TARGET_CPU_xscale, "xscale" },
545 { TARGET_CPU_ep9312, "ep9312" },
546 { TARGET_CPU_iwmmxt, "iwmmxt" },
547 { TARGET_CPU_generic, "arm" },
548 { 0, 0 }
550 const struct cpu_default * def;
552 /* Find the default. */
553 for (def = cpu_defaults; def->name; def++)
554 if (def->cpu == TARGET_CPU_DEFAULT)
555 break;
557 /* Make sure we found the default CPU. */
558 if (def->name == NULL)
559 abort ();
561 /* Find the default CPU's flags. */
562 for (sel = all_cores; sel->name != NULL; sel++)
563 if (streq (def->name, sel->name))
564 break;
566 if (sel->name == NULL)
567 abort ();
569 insn_flags = sel->flags;
571 /* Now check to see if the user has specified some command line
572 switch that require certain abilities from the cpu. */
573 sought = 0;
575 if (TARGET_INTERWORK || TARGET_THUMB)
577 sought |= (FL_THUMB | FL_MODE32);
579 /* Force apcs-32 to be used for interworking. */
580 target_flags |= ARM_FLAG_APCS_32;
582 /* There are no ARM processors that support both APCS-26 and
583 interworking. Therefore we force FL_MODE26 to be removed
584 from insn_flags here (if it was set), so that the search
585 below will always be able to find a compatible processor. */
586 insn_flags &= ~FL_MODE26;
588 else if (!TARGET_APCS_32)
589 sought |= FL_MODE26;
591 if (sought != 0 && ((sought & insn_flags) != sought))
593 /* Try to locate a CPU type that supports all of the abilities
594 of the default CPU, plus the extra abilities requested by
595 the user. */
596 for (sel = all_cores; sel->name != NULL; sel++)
597 if ((sel->flags & sought) == (sought | insn_flags))
598 break;
600 if (sel->name == NULL)
602 unsigned current_bit_count = 0;
603 const struct processors * best_fit = NULL;
605 /* Ideally we would like to issue an error message here
606 saying that it was not possible to find a CPU compatible
607 with the default CPU, but which also supports the command
608 line options specified by the programmer, and so they
609 ought to use the -mcpu=<name> command line option to
610 override the default CPU type.
612 Unfortunately this does not work with multilibing. We
613 need to be able to support multilibs for -mapcs-26 and for
614 -mthumb-interwork and there is no CPU that can support both
615 options. Instead if we cannot find a cpu that has both the
616 characteristics of the default cpu and the given command line
617 options we scan the array again looking for a best match. */
618 for (sel = all_cores; sel->name != NULL; sel++)
619 if ((sel->flags & sought) == sought)
621 unsigned count;
623 count = bit_count (sel->flags & insn_flags);
625 if (count >= current_bit_count)
627 best_fit = sel;
628 current_bit_count = count;
632 if (best_fit == NULL)
633 abort ();
634 else
635 sel = best_fit;
638 insn_flags = sel->flags;
642 /* If tuning has not been specified, tune for whichever processor or
643 architecture has been selected. */
644 if (tune_flags == 0)
645 tune_flags = insn_flags;
647 /* Make sure that the processor choice does not conflict with any of the
648 other command line choices. */
649 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
651 /* If APCS-32 was not the default then it must have been set by the
652 user, so issue a warning message. If the user has specified
653 "-mapcs-32 -mcpu=arm2" then we loose here. */
654 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
655 warning ("target CPU does not support APCS-32" );
656 target_flags &= ~ARM_FLAG_APCS_32;
658 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
660 warning ("target CPU does not support APCS-26" );
661 target_flags |= ARM_FLAG_APCS_32;
664 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
666 warning ("target CPU does not support interworking" );
667 target_flags &= ~ARM_FLAG_INTERWORK;
670 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
672 warning ("target CPU does not support THUMB instructions");
673 target_flags &= ~ARM_FLAG_THUMB;
676 if (TARGET_APCS_FRAME && TARGET_THUMB)
678 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
679 target_flags &= ~ARM_FLAG_APCS_FRAME;
682 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
683 from here where no function is being compiled currently. */
684 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
685 && TARGET_ARM)
686 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
688 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
689 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
691 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
692 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
694 /* If interworking is enabled then APCS-32 must be selected as well. */
695 if (TARGET_INTERWORK)
697 if (!TARGET_APCS_32)
698 warning ("interworking forces APCS-32 to be used" );
699 target_flags |= ARM_FLAG_APCS_32;
702 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
704 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
705 target_flags |= ARM_FLAG_APCS_FRAME;
708 if (TARGET_POKE_FUNCTION_NAME)
709 target_flags |= ARM_FLAG_APCS_FRAME;
711 if (TARGET_APCS_REENT && flag_pic)
712 error ("-fpic and -mapcs-reent are incompatible");
714 if (TARGET_APCS_REENT)
715 warning ("APCS reentrant code not supported. Ignored");
717 /* If this target is normally configured to use APCS frames, warn if they
718 are turned off and debugging is turned on. */
719 if (TARGET_ARM
720 && write_symbols != NO_DEBUG
721 && !TARGET_APCS_FRAME
722 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
723 warning ("-g with -mno-apcs-frame may not give sensible debugging");
725 /* If stack checking is disabled, we can use r10 as the PIC register,
726 which keeps r9 available. */
727 if (flag_pic)
728 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
730 if (TARGET_APCS_FLOAT)
731 warning ("passing floating point arguments in fp regs not yet supported");
733 /* Initialize boolean versions of the flags, for use in the arm.md file. */
734 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
735 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
736 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
737 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
738 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
740 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
741 arm_is_strong = (tune_flags & FL_STRONG) != 0;
742 thumb_code = (TARGET_ARM == 0);
743 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
744 && !(tune_flags & FL_ARCH4))) != 0;
745 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
746 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
747 arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
749 if (TARGET_IWMMXT && (! TARGET_ATPCS))
750 target_flags |= ARM_FLAG_ATPCS;
752 if (arm_is_cirrus)
754 arm_fpu_tune = FPUTYPE_MAVERICK;
756 /* Ignore -mhard-float if -mcpu=ep9312. */
757 if (TARGET_HARD_FLOAT)
758 target_flags ^= ARM_FLAG_SOFT_FLOAT;
760 else
761 /* Default value for floating point code... if no co-processor
762 bus, then schedule for emulated floating point. Otherwise,
763 assume the user has an FPA.
764 Note: this does not prevent use of floating point instructions,
765 -msoft-float does that. */
766 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
768 if (target_fp_name)
770 if (streq (target_fp_name, "2"))
771 arm_fpu_arch = FPUTYPE_FPA_EMU2;
772 else if (streq (target_fp_name, "3"))
773 arm_fpu_arch = FPUTYPE_FPA_EMU3;
774 else
775 error ("invalid floating point emulation option: -mfpe-%s",
776 target_fp_name);
778 else
779 arm_fpu_arch = FPUTYPE_DEFAULT;
781 if (TARGET_FPE)
783 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
784 arm_fpu_tune = FPUTYPE_FPA_EMU2;
785 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
786 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
787 else if (arm_fpu_tune != FPUTYPE_FPA)
788 arm_fpu_tune = FPUTYPE_FPA_EMU2;
791 /* For arm2/3 there is no need to do any scheduling if there is only
792 a floating point emulator, or we are doing software floating-point. */
793 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
794 && (tune_flags & FL_MODE32) == 0)
795 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
797 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
799 if (structure_size_string != NULL)
801 int size = strtol (structure_size_string, NULL, 0);
803 if (size == 8 || size == 32)
804 arm_structure_size_boundary = size;
805 else
806 warning ("structure size boundary can only be set to 8 or 32");
809 if (arm_pic_register_string != NULL)
811 int pic_register = decode_reg_name (arm_pic_register_string);
813 if (!flag_pic)
814 warning ("-mpic-register= is useless without -fpic");
816 /* Prevent the user from choosing an obviously stupid PIC register. */
817 else if (pic_register < 0 || call_used_regs[pic_register]
818 || pic_register == HARD_FRAME_POINTER_REGNUM
819 || pic_register == STACK_POINTER_REGNUM
820 || pic_register >= PC_REGNUM)
821 error ("unable to use '%s' for PIC register", arm_pic_register_string);
822 else
823 arm_pic_register = pic_register;
826 if (TARGET_THUMB && flag_schedule_insns)
828 /* Don't warn since it's on by default in -O2. */
829 flag_schedule_insns = 0;
832 /* If optimizing for space, don't synthesize constants.
833 For processors with load scheduling, it never costs more than 2 cycles
834 to load a constant, and the load scheduler may well reduce that to 1. */
835 if (optimize_size || (tune_flags & FL_LDSCHED))
836 arm_constant_limit = 1;
838 if (arm_arch_xscale)
839 arm_constant_limit = 2;
841 /* If optimizing for size, bump the number of instructions that we
842 are prepared to conditionally execute (even on a StrongARM).
843 Otherwise for the StrongARM, which has early execution of branches,
844 a sequence that is worth skipping is shorter. */
845 if (optimize_size)
846 max_insns_skipped = 6;
847 else if (arm_is_strong)
848 max_insns_skipped = 3;
850 /* Register global variables with the garbage collector. */
851 arm_add_gc_roots ();
854 static void
855 arm_add_gc_roots (void)
857 gcc_obstack_init(&minipool_obstack);
858 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
861 /* A table of known ARM exception types.
862 For use with the interrupt function attribute. */
864 typedef struct
866 const char *const arg;
867 const unsigned long return_value;
869 isr_attribute_arg;
871 static const isr_attribute_arg isr_attribute_args [] =
873 { "IRQ", ARM_FT_ISR },
874 { "irq", ARM_FT_ISR },
875 { "FIQ", ARM_FT_FIQ },
876 { "fiq", ARM_FT_FIQ },
877 { "ABORT", ARM_FT_ISR },
878 { "abort", ARM_FT_ISR },
879 { "ABORT", ARM_FT_ISR },
880 { "abort", ARM_FT_ISR },
881 { "UNDEF", ARM_FT_EXCEPTION },
882 { "undef", ARM_FT_EXCEPTION },
883 { "SWI", ARM_FT_EXCEPTION },
884 { "swi", ARM_FT_EXCEPTION },
885 { NULL, ARM_FT_NORMAL }
888 /* Returns the (interrupt) function type of the current
889 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
891 static unsigned long
892 arm_isr_value (tree argument)
894 const isr_attribute_arg * ptr;
895 const char * arg;
897 /* No argument - default to IRQ. */
898 if (argument == NULL_TREE)
899 return ARM_FT_ISR;
901 /* Get the value of the argument. */
902 if (TREE_VALUE (argument) == NULL_TREE
903 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
904 return ARM_FT_UNKNOWN;
906 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
908 /* Check it against the list of known arguments. */
909 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
910 if (streq (arg, ptr->arg))
911 return ptr->return_value;
913 /* An unrecognized interrupt type. */
914 return ARM_FT_UNKNOWN;
917 /* Computes the type of the current function. */
919 static unsigned long
920 arm_compute_func_type (void)
922 unsigned long type = ARM_FT_UNKNOWN;
923 tree a;
924 tree attr;
926 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
927 abort ();
929 /* Decide if the current function is volatile. Such functions
930 never return, and many memory cycles can be saved by not storing
931 register values that will never be needed again. This optimization
932 was added to speed up context switching in a kernel application. */
933 if (optimize > 0
934 && current_function_nothrow
935 && TREE_THIS_VOLATILE (current_function_decl))
936 type |= ARM_FT_VOLATILE;
938 if (current_function_needs_context)
939 type |= ARM_FT_NESTED;
941 attr = DECL_ATTRIBUTES (current_function_decl);
943 a = lookup_attribute ("naked", attr);
944 if (a != NULL_TREE)
945 type |= ARM_FT_NAKED;
947 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
948 type |= ARM_FT_EXCEPTION_HANDLER;
949 else
951 a = lookup_attribute ("isr", attr);
952 if (a == NULL_TREE)
953 a = lookup_attribute ("interrupt", attr);
955 if (a == NULL_TREE)
956 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
957 else
958 type |= arm_isr_value (TREE_VALUE (a));
961 return type;
964 /* Returns the type of the current function. */
966 unsigned long
967 arm_current_func_type (void)
969 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
970 cfun->machine->func_type = arm_compute_func_type ();
972 return cfun->machine->func_type;
975 /* Return 1 if it is possible to return using a single instruction. */
978 use_return_insn (int iscond)
980 int regno;
981 unsigned int func_type;
982 unsigned long saved_int_regs;
984 /* Never use a return instruction before reload has run. */
985 if (!reload_completed)
986 return 0;
988 func_type = arm_current_func_type ();
990 /* Naked functions and volatile functions need special
991 consideration. */
992 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
993 return 0;
995 /* So do interrupt functions that use the frame pointer. */
996 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
997 return 0;
999 /* As do variadic functions. */
1000 if (current_function_pretend_args_size
1001 || cfun->machine->uses_anonymous_args
1002 /* Of if the function calls __builtin_eh_return () */
1003 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
1004 /* Or if there is no frame pointer and there is a stack adjustment. */
1005 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
1006 && !frame_pointer_needed))
1007 return 0;
1009 saved_int_regs = arm_compute_save_reg_mask ();
1011 /* Can't be done if interworking with Thumb, and any registers have been
1012 stacked. */
1013 if (TARGET_INTERWORK && saved_int_regs != 0)
1014 return 0;
1016 /* On StrongARM, conditional returns are expensive if they aren't
1017 taken and multiple registers have been stacked. */
1018 if (iscond && arm_is_strong)
1020 /* Conditional return when just the LR is stored is a simple
1021 conditional-load instruction, that's not expensive. */
1022 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1023 return 0;
1025 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1026 return 0;
1029 /* If there are saved registers but the LR isn't saved, then we need
1030 two instructions for the return. */
1031 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1032 return 0;
1034 /* Can't be done if any of the FPA regs are pushed,
1035 since this also requires an insn. */
1036 if (TARGET_HARD_FLOAT)
1037 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1038 if (regs_ever_live[regno] && !call_used_regs[regno])
1039 return 0;
1041 if (TARGET_REALLY_IWMMXT)
1042 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
1043 if (regs_ever_live[regno] && ! call_used_regs [regno])
1044 return 0;
1046 return 1;
1049 /* Return TRUE if int I is a valid immediate ARM constant. */
1052 const_ok_for_arm (HOST_WIDE_INT i)
1054 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1056 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1057 be all zero, or all one. */
1058 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1059 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1060 != ((~(unsigned HOST_WIDE_INT) 0)
1061 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1062 return FALSE;
1064 /* Fast return for 0 and powers of 2 */
1065 if ((i & (i - 1)) == 0)
1066 return TRUE;
1070 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1071 return TRUE;
1072 mask =
1073 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1074 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1076 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1078 return FALSE;
1081 /* Return true if I is a valid constant for the operation CODE. */
1082 static int
1083 const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
1085 if (const_ok_for_arm (i))
1086 return 1;
1088 switch (code)
1090 case PLUS:
1091 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1093 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1094 case XOR:
1095 case IOR:
1096 return 0;
1098 case AND:
1099 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1101 default:
1102 abort ();
1106 /* Emit a sequence of insns to handle a large constant.
1107 CODE is the code of the operation required, it can be any of SET, PLUS,
1108 IOR, AND, XOR, MINUS;
1109 MODE is the mode in which the operation is being performed;
1110 VAL is the integer to operate on;
1111 SOURCE is the other operand (a register, or a null-pointer for SET);
1112 SUBTARGETS means it is safe to create scratch registers if that will
1113 either produce a simpler sequence, or we will want to cse the values.
1114 Return value is the number of insns emitted. */
1117 arm_split_constant (enum rtx_code code, enum machine_mode mode,
1118 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
1120 if (subtargets || code == SET
1121 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1122 && REGNO (target) != REGNO (source)))
1124 /* After arm_reorg has been called, we can't fix up expensive
1125 constants by pushing them into memory so we must synthesize
1126 them in-line, regardless of the cost. This is only likely to
1127 be more costly on chips that have load delay slots and we are
1128 compiling without running the scheduler (so no splitting
1129 occurred before the final instruction emission).
1131 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1133 if (!after_arm_reorg
1134 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1135 > arm_constant_limit + (code != SET)))
1137 if (code == SET)
1139 /* Currently SET is the only monadic value for CODE, all
1140 the rest are diadic. */
1141 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1142 return 1;
1144 else
1146 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1148 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1149 /* For MINUS, the value is subtracted from, since we never
1150 have subtraction of a constant. */
1151 if (code == MINUS)
1152 emit_insn (gen_rtx_SET (VOIDmode, target,
1153 gen_rtx_MINUS (mode, temp, source)));
1154 else
1155 emit_insn (gen_rtx_SET (VOIDmode, target,
1156 gen_rtx (code, mode, source, temp)));
1157 return 2;
1162 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1165 static int
1166 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1168 HOST_WIDE_INT temp1;
1169 int num_insns = 0;
1172 int end;
1174 if (i <= 0)
1175 i += 32;
1176 if (remainder & (3 << (i - 2)))
1178 end = i - 8;
1179 if (end < 0)
1180 end += 32;
1181 temp1 = remainder & ((0x0ff << end)
1182 | ((i < end) ? (0xff >> (32 - end)) : 0));
1183 remainder &= ~temp1;
1184 num_insns++;
1185 i -= 6;
1187 i -= 2;
1188 } while (remainder);
1189 return num_insns;
1192 /* As above, but extra parameter GENERATE which, if clear, suppresses
1193 RTL generation. */
1195 static int
1196 arm_gen_constant (enum rtx_code code, enum machine_mode mode,
1197 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1198 int generate)
1200 int can_invert = 0;
1201 int can_negate = 0;
1202 int can_negate_initial = 0;
1203 int can_shift = 0;
1204 int i;
1205 int num_bits_set = 0;
1206 int set_sign_bit_copies = 0;
1207 int clear_sign_bit_copies = 0;
1208 int clear_zero_bit_copies = 0;
1209 int set_zero_bit_copies = 0;
1210 int insns = 0;
1211 unsigned HOST_WIDE_INT temp1, temp2;
1212 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1214 /* Find out which operations are safe for a given CODE. Also do a quick
1215 check for degenerate cases; these can occur when DImode operations
1216 are split. */
1217 switch (code)
1219 case SET:
1220 can_invert = 1;
1221 can_shift = 1;
1222 can_negate = 1;
1223 break;
1225 case PLUS:
1226 can_negate = 1;
1227 can_negate_initial = 1;
1228 break;
1230 case IOR:
1231 if (remainder == 0xffffffff)
1233 if (generate)
1234 emit_insn (gen_rtx_SET (VOIDmode, target,
1235 GEN_INT (ARM_SIGN_EXTEND (val))));
1236 return 1;
1238 if (remainder == 0)
1240 if (reload_completed && rtx_equal_p (target, source))
1241 return 0;
1242 if (generate)
1243 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1244 return 1;
1246 break;
1248 case AND:
1249 if (remainder == 0)
1251 if (generate)
1252 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1253 return 1;
1255 if (remainder == 0xffffffff)
1257 if (reload_completed && rtx_equal_p (target, source))
1258 return 0;
1259 if (generate)
1260 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1261 return 1;
1263 can_invert = 1;
1264 break;
1266 case XOR:
1267 if (remainder == 0)
1269 if (reload_completed && rtx_equal_p (target, source))
1270 return 0;
1271 if (generate)
1272 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1273 return 1;
1275 if (remainder == 0xffffffff)
1277 if (generate)
1278 emit_insn (gen_rtx_SET (VOIDmode, target,
1279 gen_rtx_NOT (mode, source)));
1280 return 1;
1283 /* We don't know how to handle this yet below. */
1284 abort ();
1286 case MINUS:
1287 /* We treat MINUS as (val - source), since (source - val) is always
1288 passed as (source + (-val)). */
1289 if (remainder == 0)
1291 if (generate)
1292 emit_insn (gen_rtx_SET (VOIDmode, target,
1293 gen_rtx_NEG (mode, source)));
1294 return 1;
1296 if (const_ok_for_arm (val))
1298 if (generate)
1299 emit_insn (gen_rtx_SET (VOIDmode, target,
1300 gen_rtx_MINUS (mode, GEN_INT (val),
1301 source)));
1302 return 1;
1304 can_negate = 1;
1306 break;
1308 default:
1309 abort ();
1312 /* If we can do it in one insn get out quickly. */
1313 if (const_ok_for_arm (val)
1314 || (can_negate_initial && const_ok_for_arm (-val))
1315 || (can_invert && const_ok_for_arm (~val)))
1317 if (generate)
1318 emit_insn (gen_rtx_SET (VOIDmode, target,
1319 (source ? gen_rtx (code, mode, source,
1320 GEN_INT (val))
1321 : GEN_INT (val))));
1322 return 1;
1325 /* Calculate a few attributes that may be useful for specific
1326 optimizations. */
1327 for (i = 31; i >= 0; i--)
1329 if ((remainder & (1 << i)) == 0)
1330 clear_sign_bit_copies++;
1331 else
1332 break;
1335 for (i = 31; i >= 0; i--)
1337 if ((remainder & (1 << i)) != 0)
1338 set_sign_bit_copies++;
1339 else
1340 break;
1343 for (i = 0; i <= 31; i++)
1345 if ((remainder & (1 << i)) == 0)
1346 clear_zero_bit_copies++;
1347 else
1348 break;
1351 for (i = 0; i <= 31; i++)
1353 if ((remainder & (1 << i)) != 0)
1354 set_zero_bit_copies++;
1355 else
1356 break;
1359 switch (code)
1361 case SET:
1362 /* See if we can do this by sign_extending a constant that is known
1363 to be negative. This is a good, way of doing it, since the shift
1364 may well merge into a subsequent insn. */
1365 if (set_sign_bit_copies > 1)
1367 if (const_ok_for_arm
1368 (temp1 = ARM_SIGN_EXTEND (remainder
1369 << (set_sign_bit_copies - 1))))
1371 if (generate)
1373 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1374 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1375 GEN_INT (temp1)));
1376 emit_insn (gen_ashrsi3 (target, new_src,
1377 GEN_INT (set_sign_bit_copies - 1)));
1379 return 2;
1381 /* For an inverted constant, we will need to set the low bits,
1382 these will be shifted out of harm's way. */
1383 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1384 if (const_ok_for_arm (~temp1))
1386 if (generate)
1388 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1389 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1390 GEN_INT (temp1)));
1391 emit_insn (gen_ashrsi3 (target, new_src,
1392 GEN_INT (set_sign_bit_copies - 1)));
1394 return 2;
1398 /* See if we can generate this by setting the bottom (or the top)
1399 16 bits, and then shifting these into the other half of the
1400 word. We only look for the simplest cases, to do more would cost
1401 too much. Be careful, however, not to generate this when the
1402 alternative would take fewer insns. */
1403 if (val & 0xffff0000)
1405 temp1 = remainder & 0xffff0000;
1406 temp2 = remainder & 0x0000ffff;
1408 /* Overlaps outside this range are best done using other methods. */
1409 for (i = 9; i < 24; i++)
1411 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1412 && !const_ok_for_arm (temp2))
1414 rtx new_src = (subtargets
1415 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1416 : target);
1417 insns = arm_gen_constant (code, mode, temp2, new_src,
1418 source, subtargets, generate);
1419 source = new_src;
1420 if (generate)
1421 emit_insn (gen_rtx_SET
1422 (VOIDmode, target,
1423 gen_rtx_IOR (mode,
1424 gen_rtx_ASHIFT (mode, source,
1425 GEN_INT (i)),
1426 source)));
1427 return insns + 1;
1431 /* Don't duplicate cases already considered. */
1432 for (i = 17; i < 24; i++)
1434 if (((temp1 | (temp1 >> i)) == remainder)
1435 && !const_ok_for_arm (temp1))
1437 rtx new_src = (subtargets
1438 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1439 : target);
1440 insns = arm_gen_constant (code, mode, temp1, new_src,
1441 source, subtargets, generate);
1442 source = new_src;
1443 if (generate)
1444 emit_insn
1445 (gen_rtx_SET (VOIDmode, target,
1446 gen_rtx_IOR
1447 (mode,
1448 gen_rtx_LSHIFTRT (mode, source,
1449 GEN_INT (i)),
1450 source)));
1451 return insns + 1;
1455 break;
1457 case IOR:
1458 case XOR:
1459 /* If we have IOR or XOR, and the constant can be loaded in a
1460 single instruction, and we can find a temporary to put it in,
1461 then this can be done in two instructions instead of 3-4. */
1462 if (subtargets
1463 /* TARGET can't be NULL if SUBTARGETS is 0 */
1464 || (reload_completed && !reg_mentioned_p (target, source)))
1466 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1468 if (generate)
1470 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1472 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1473 emit_insn (gen_rtx_SET (VOIDmode, target,
1474 gen_rtx (code, mode, source, sub)));
1476 return 2;
1480 if (code == XOR)
1481 break;
1483 if (set_sign_bit_copies > 8
1484 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1486 if (generate)
1488 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1489 rtx shift = GEN_INT (set_sign_bit_copies);
1491 emit_insn (gen_rtx_SET (VOIDmode, sub,
1492 gen_rtx_NOT (mode,
1493 gen_rtx_ASHIFT (mode,
1494 source,
1495 shift))));
1496 emit_insn (gen_rtx_SET (VOIDmode, target,
1497 gen_rtx_NOT (mode,
1498 gen_rtx_LSHIFTRT (mode, sub,
1499 shift))));
1501 return 2;
1504 if (set_zero_bit_copies > 8
1505 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1507 if (generate)
1509 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1510 rtx shift = GEN_INT (set_zero_bit_copies);
1512 emit_insn (gen_rtx_SET (VOIDmode, sub,
1513 gen_rtx_NOT (mode,
1514 gen_rtx_LSHIFTRT (mode,
1515 source,
1516 shift))));
1517 emit_insn (gen_rtx_SET (VOIDmode, target,
1518 gen_rtx_NOT (mode,
1519 gen_rtx_ASHIFT (mode, sub,
1520 shift))));
1522 return 2;
1525 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1527 if (generate)
1529 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1530 emit_insn (gen_rtx_SET (VOIDmode, sub,
1531 gen_rtx_NOT (mode, source)));
1532 source = sub;
1533 if (subtargets)
1534 sub = gen_reg_rtx (mode);
1535 emit_insn (gen_rtx_SET (VOIDmode, sub,
1536 gen_rtx_AND (mode, source,
1537 GEN_INT (temp1))));
1538 emit_insn (gen_rtx_SET (VOIDmode, target,
1539 gen_rtx_NOT (mode, sub)));
1541 return 3;
1543 break;
1545 case AND:
1546 /* See if two shifts will do 2 or more insn's worth of work. */
1547 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1549 HOST_WIDE_INT shift_mask = ((0xffffffff
1550 << (32 - clear_sign_bit_copies))
1551 & 0xffffffff);
1553 if ((remainder | shift_mask) != 0xffffffff)
1555 if (generate)
1557 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1558 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1559 new_src, source, subtargets, 1);
1560 source = new_src;
1562 else
1564 rtx targ = subtargets ? NULL_RTX : target;
1565 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1566 targ, source, subtargets, 0);
1570 if (generate)
1572 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1573 rtx shift = GEN_INT (clear_sign_bit_copies);
1575 emit_insn (gen_ashlsi3 (new_src, source, shift));
1576 emit_insn (gen_lshrsi3 (target, new_src, shift));
1579 return insns + 2;
1582 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1584 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1586 if ((remainder | shift_mask) != 0xffffffff)
1588 if (generate)
1590 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1592 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1593 new_src, source, subtargets, 1);
1594 source = new_src;
1596 else
1598 rtx targ = subtargets ? NULL_RTX : target;
1600 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1601 targ, source, subtargets, 0);
1605 if (generate)
1607 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1608 rtx shift = GEN_INT (clear_zero_bit_copies);
1610 emit_insn (gen_lshrsi3 (new_src, source, shift));
1611 emit_insn (gen_ashlsi3 (target, new_src, shift));
1614 return insns + 2;
1617 break;
1619 default:
1620 break;
1623 for (i = 0; i < 32; i++)
1624 if (remainder & (1 << i))
1625 num_bits_set++;
1627 if (code == AND || (can_invert && num_bits_set > 16))
1628 remainder = (~remainder) & 0xffffffff;
1629 else if (code == PLUS && num_bits_set > 16)
1630 remainder = (-remainder) & 0xffffffff;
1631 else
1633 can_invert = 0;
1634 can_negate = 0;
1637 /* Now try and find a way of doing the job in either two or three
1638 instructions.
1639 We start by looking for the largest block of zeros that are aligned on
1640 a 2-bit boundary, we then fill up the temps, wrapping around to the
1641 top of the word when we drop off the bottom.
1642 In the worst case this code should produce no more than four insns. */
1644 int best_start = 0;
1645 int best_consecutive_zeros = 0;
1647 for (i = 0; i < 32; i += 2)
1649 int consecutive_zeros = 0;
1651 if (!(remainder & (3 << i)))
1653 while ((i < 32) && !(remainder & (3 << i)))
1655 consecutive_zeros += 2;
1656 i += 2;
1658 if (consecutive_zeros > best_consecutive_zeros)
1660 best_consecutive_zeros = consecutive_zeros;
1661 best_start = i - consecutive_zeros;
1663 i -= 2;
1667 /* So long as it won't require any more insns to do so, it's
1668 desirable to emit a small constant (in bits 0...9) in the last
1669 insn. This way there is more chance that it can be combined with
1670 a later addressing insn to form a pre-indexed load or store
1671 operation. Consider:
1673 *((volatile int *)0xe0000100) = 1;
1674 *((volatile int *)0xe0000110) = 2;
1676 We want this to wind up as:
1678 mov rA, #0xe0000000
1679 mov rB, #1
1680 str rB, [rA, #0x100]
1681 mov rB, #2
1682 str rB, [rA, #0x110]
1684 rather than having to synthesize both large constants from scratch.
1686 Therefore, we calculate how many insns would be required to emit
1687 the constant starting from `best_start', and also starting from
1688 zero (ie with bit 31 first to be output). If `best_start' doesn't
1689 yield a shorter sequence, we may as well use zero. */
1690 if (best_start != 0
1691 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1692 && (count_insns_for_constant (remainder, 0) <=
1693 count_insns_for_constant (remainder, best_start)))
1694 best_start = 0;
1696 /* Now start emitting the insns. */
1697 i = best_start;
1700 int end;
1702 if (i <= 0)
1703 i += 32;
1704 if (remainder & (3 << (i - 2)))
1706 end = i - 8;
1707 if (end < 0)
1708 end += 32;
1709 temp1 = remainder & ((0x0ff << end)
1710 | ((i < end) ? (0xff >> (32 - end)) : 0));
1711 remainder &= ~temp1;
1713 if (generate)
1715 rtx new_src, temp1_rtx;
1717 if (code == SET || code == MINUS)
1719 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1720 if (can_invert && code != MINUS)
1721 temp1 = ~temp1;
1723 else
1725 if (remainder && subtargets)
1726 new_src = gen_reg_rtx (mode);
1727 else
1728 new_src = target;
1729 if (can_invert)
1730 temp1 = ~temp1;
1731 else if (can_negate)
1732 temp1 = -temp1;
1735 temp1 = trunc_int_for_mode (temp1, mode);
1736 temp1_rtx = GEN_INT (temp1);
1738 if (code == SET)
1740 else if (code == MINUS)
1741 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1742 else
1743 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1745 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1746 source = new_src;
1749 if (code == SET)
1751 can_invert = 0;
1752 code = PLUS;
1754 else if (code == MINUS)
1755 code = PLUS;
1757 insns++;
1758 i -= 6;
1760 i -= 2;
1762 while (remainder);
1765 return insns;
1768 /* Canonicalize a comparison so that we are more likely to recognize it.
1769 This can be done for a few constant compares, where we can make the
1770 immediate value easier to load. */
1772 enum rtx_code
1773 arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
1775 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1777 switch (code)
1779 case EQ:
1780 case NE:
1781 return code;
1783 case GT:
1784 case LE:
1785 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1786 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1788 *op1 = GEN_INT (i + 1);
1789 return code == GT ? GE : LT;
1791 break;
1793 case GE:
1794 case LT:
1795 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1796 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1798 *op1 = GEN_INT (i - 1);
1799 return code == GE ? GT : LE;
1801 break;
1803 case GTU:
1804 case LEU:
1805 if (i != ~((unsigned HOST_WIDE_INT) 0)
1806 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1808 *op1 = GEN_INT (i + 1);
1809 return code == GTU ? GEU : LTU;
1811 break;
1813 case GEU:
1814 case LTU:
1815 if (i != 0
1816 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1818 *op1 = GEN_INT (i - 1);
1819 return code == GEU ? GTU : LEU;
1821 break;
1823 default:
1824 abort ();
1827 return code;
1830 /* Decide whether a type should be returned in memory (true)
1831 or in a register (false). This is called by the macro
1832 RETURN_IN_MEMORY. */
1834 arm_return_in_memory (tree type)
1836 HOST_WIDE_INT size;
1838 if (!AGGREGATE_TYPE_P (type))
1839 /* All simple types are returned in registers. */
1840 return 0;
1842 size = int_size_in_bytes (type);
1844 if (TARGET_ATPCS)
1846 /* ATPCS returns aggregate types in memory only if they are
1847 larger than a word (or are variable size). */
1848 return (size < 0 || size > UNITS_PER_WORD);
1851 /* For the arm-wince targets we choose to be compatible with Microsoft's
1852 ARM and Thumb compilers, which always return aggregates in memory. */
1853 #ifndef ARM_WINCE
1854 /* All structures/unions bigger than one word are returned in memory.
1855 Also catch the case where int_size_in_bytes returns -1. In this case
1856 the aggregate is either huge or of variable size, and in either case
1857 we will want to return it via memory and not in a register. */
1858 if (size < 0 || size > UNITS_PER_WORD)
1859 return 1;
1861 if (TREE_CODE (type) == RECORD_TYPE)
1863 tree field;
1865 /* For a struct the APCS says that we only return in a register
1866 if the type is 'integer like' and every addressable element
1867 has an offset of zero. For practical purposes this means
1868 that the structure can have at most one non bit-field element
1869 and that this element must be the first one in the structure. */
1871 /* Find the first field, ignoring non FIELD_DECL things which will
1872 have been created by C++. */
1873 for (field = TYPE_FIELDS (type);
1874 field && TREE_CODE (field) != FIELD_DECL;
1875 field = TREE_CHAIN (field))
1876 continue;
1878 if (field == NULL)
1879 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1881 /* Check that the first field is valid for returning in a register. */
1883 /* ... Floats are not allowed */
1884 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1885 return 1;
1887 /* ... Aggregates that are not themselves valid for returning in
1888 a register are not allowed. */
1889 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1890 return 1;
1892 /* Now check the remaining fields, if any. Only bitfields are allowed,
1893 since they are not addressable. */
1894 for (field = TREE_CHAIN (field);
1895 field;
1896 field = TREE_CHAIN (field))
1898 if (TREE_CODE (field) != FIELD_DECL)
1899 continue;
1901 if (!DECL_BIT_FIELD_TYPE (field))
1902 return 1;
1905 return 0;
1908 if (TREE_CODE (type) == UNION_TYPE)
1910 tree field;
1912 /* Unions can be returned in registers if every element is
1913 integral, or can be returned in an integer register. */
1914 for (field = TYPE_FIELDS (type);
1915 field;
1916 field = TREE_CHAIN (field))
1918 if (TREE_CODE (field) != FIELD_DECL)
1919 continue;
1921 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1922 return 1;
1924 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1925 return 1;
1928 return 0;
1930 #endif /* not ARM_WINCE */
1932 /* Return all other types in memory. */
1933 return 1;
1936 /* Indicate whether or not words of a double are in big-endian order. */
1939 arm_float_words_big_endian (void)
1941 if (TARGET_CIRRUS)
1942 return 0;
1944 /* For FPA, float words are always big-endian. For VFP, floats words
1945 follow the memory system mode. */
1947 if (TARGET_HARD_FLOAT)
1949 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1950 return 1;
1953 if (TARGET_VFP)
1954 return (TARGET_BIG_END ? 1 : 0);
1956 return 1;
1959 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1960 for a call to a function whose data type is FNTYPE.
1961 For a library call, FNTYPE is NULL. */
1962 void
1963 arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
1964 rtx libname ATTRIBUTE_UNUSED,
1965 tree fndecl ATTRIBUTE_UNUSED)
1967 /* On the ARM, the offset starts at 0. */
1968 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1969 pcum->iwmmxt_nregs = 0;
1971 pcum->call_cookie = CALL_NORMAL;
1973 if (TARGET_LONG_CALLS)
1974 pcum->call_cookie = CALL_LONG;
1976 /* Check for long call/short call attributes. The attributes
1977 override any command line option. */
1978 if (fntype)
1980 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1981 pcum->call_cookie = CALL_SHORT;
1982 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1983 pcum->call_cookie = CALL_LONG;
1986 /* Varargs vectors are treated the same as long long.
1987 named_count avoids having to change the way arm handles 'named' */
1988 pcum->named_count = 0;
1989 pcum->nargs = 0;
1991 if (TARGET_REALLY_IWMMXT && fntype)
1993 tree fn_arg;
1995 for (fn_arg = TYPE_ARG_TYPES (fntype);
1996 fn_arg;
1997 fn_arg = TREE_CHAIN (fn_arg))
1998 pcum->named_count += 1;
2000 if (! pcum->named_count)
2001 pcum->named_count = INT_MAX;
2005 /* Determine where to put an argument to a function.
2006 Value is zero to push the argument on the stack,
2007 or a hard register in which to store the argument.
2009 MODE is the argument's machine mode.
2010 TYPE is the data type of the argument (as a tree).
2011 This is null for libcalls where that information may
2012 not be available.
2013 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2014 the preceding args and about the function being called.
2015 NAMED is nonzero if this argument is a named parameter
2016 (otherwise it is an extra parameter matching an ellipsis). */
2019 arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
2020 tree type ATTRIBUTE_UNUSED, int named)
2022 if (TARGET_REALLY_IWMMXT)
2024 if (VECTOR_MODE_SUPPORTED_P (mode))
2026 /* varargs vectors are treated the same as long long.
2027 named_count avoids having to change the way arm handles 'named' */
2028 if (pcum->named_count <= pcum->nargs + 1)
2030 if (pcum->nregs == 1)
2031 pcum->nregs += 1;
2032 if (pcum->nregs <= 2)
2033 return gen_rtx_REG (mode, pcum->nregs);
2034 else
2035 return NULL_RTX;
2037 else if (pcum->iwmmxt_nregs <= 9)
2038 return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
2039 else
2040 return NULL_RTX;
2042 else if ((mode == DImode || mode == DFmode) && pcum->nregs & 1)
2043 pcum->nregs += 1;
2046 if (mode == VOIDmode)
2047 /* Compute operand 2 of the call insn. */
2048 return GEN_INT (pcum->call_cookie);
2050 if (!named || pcum->nregs >= NUM_ARG_REGS)
2051 return NULL_RTX;
2053 return gen_rtx_REG (mode, pcum->nregs);
2056 /* Variable sized types are passed by reference. This is a GCC
2057 extension to the ARM ABI. */
2060 arm_function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2061 enum machine_mode mode ATTRIBUTE_UNUSED,
2062 tree type, int named ATTRIBUTE_UNUSED)
2064 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2067 /* Implement va_arg. */
2070 arm_va_arg (tree valist, tree type)
2072 /* Variable sized types are passed by reference. */
2073 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2075 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2076 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2079 if (FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), NULL) == IWMMXT_ALIGNMENT)
2081 tree minus_eight;
2082 tree t;
2084 /* Maintain 64-bit alignment of the valist pointer by
2085 contructing: valist = ((valist + (8 - 1)) & -8). */
2086 minus_eight = build_int_2 (- (IWMMXT_ALIGNMENT / BITS_PER_UNIT), -1);
2087 t = build_int_2 ((IWMMXT_ALIGNMENT / BITS_PER_UNIT) - 1, 0);
2088 t = build (PLUS_EXPR, TREE_TYPE (valist), valist, t);
2089 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, minus_eight);
2090 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2091 TREE_SIDE_EFFECTS (t) = 1;
2092 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2094 /* This is to stop the combine pass optimising
2095 away the alignment adjustment. */
2096 mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
2099 return std_expand_builtin_va_arg (valist, type);
2102 /* Encode the current state of the #pragma [no_]long_calls. */
2103 typedef enum
2105 OFF, /* No #pramgma [no_]long_calls is in effect. */
2106 LONG, /* #pragma long_calls is in effect. */
2107 SHORT /* #pragma no_long_calls is in effect. */
2108 } arm_pragma_enum;
2110 static arm_pragma_enum arm_pragma_long_calls = OFF;
2112 void
2113 arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2115 arm_pragma_long_calls = LONG;
2118 void
2119 arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2121 arm_pragma_long_calls = SHORT;
2124 void
2125 arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2127 arm_pragma_long_calls = OFF;
2130 /* Table of machine attributes. */
2131 const struct attribute_spec arm_attribute_table[] =
2133 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2134 /* Function calls made to this symbol must be done indirectly, because
2135 it may lie outside of the 26 bit addressing range of a normal function
2136 call. */
2137 { "long_call", 0, 0, false, true, true, NULL },
2138 /* Whereas these functions are always known to reside within the 26 bit
2139 addressing range. */
2140 { "short_call", 0, 0, false, true, true, NULL },
2141 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2142 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2143 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2144 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2145 #ifdef ARM_PE
2146 /* ARM/PE has three new attributes:
2147 interfacearm - ?
2148 dllexport - for exporting a function/variable that will live in a dll
2149 dllimport - for importing a function/variable from a dll
2151 Microsoft allows multiple declspecs in one __declspec, separating
2152 them with spaces. We do NOT support this. Instead, use __declspec
2153 multiple times.
2155 { "dllimport", 0, 0, true, false, false, NULL },
2156 { "dllexport", 0, 0, true, false, false, NULL },
2157 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2158 #endif
2159 { NULL, 0, 0, false, false, false, NULL }
2162 /* Handle an attribute requiring a FUNCTION_DECL;
2163 arguments as in struct attribute_spec.handler. */
2164 static tree
2165 arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2166 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
2168 if (TREE_CODE (*node) != FUNCTION_DECL)
2170 warning ("`%s' attribute only applies to functions",
2171 IDENTIFIER_POINTER (name));
2172 *no_add_attrs = true;
2175 return NULL_TREE;
2178 /* Handle an "interrupt" or "isr" attribute;
2179 arguments as in struct attribute_spec.handler. */
2180 static tree
2181 arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2182 bool *no_add_attrs)
2184 if (DECL_P (*node))
2186 if (TREE_CODE (*node) != FUNCTION_DECL)
2188 warning ("`%s' attribute only applies to functions",
2189 IDENTIFIER_POINTER (name));
2190 *no_add_attrs = true;
2192 /* FIXME: the argument if any is checked for type attributes;
2193 should it be checked for decl ones? */
2195 else
2197 if (TREE_CODE (*node) == FUNCTION_TYPE
2198 || TREE_CODE (*node) == METHOD_TYPE)
2200 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2202 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2203 *no_add_attrs = true;
2206 else if (TREE_CODE (*node) == POINTER_TYPE
2207 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2208 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2209 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2211 *node = build_type_copy (*node);
2212 TREE_TYPE (*node) = build_type_attribute_variant
2213 (TREE_TYPE (*node),
2214 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2215 *no_add_attrs = true;
2217 else
2219 /* Possibly pass this attribute on from the type to a decl. */
2220 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2221 | (int) ATTR_FLAG_FUNCTION_NEXT
2222 | (int) ATTR_FLAG_ARRAY_NEXT))
2224 *no_add_attrs = true;
2225 return tree_cons (name, args, NULL_TREE);
2227 else
2229 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2234 return NULL_TREE;
2237 /* Return 0 if the attributes for two types are incompatible, 1 if they
2238 are compatible, and 2 if they are nearly compatible (which causes a
2239 warning to be generated). */
2240 static int
2241 arm_comp_type_attributes (tree type1, tree type2)
2243 int l1, l2, s1, s2;
2245 /* Check for mismatch of non-default calling convention. */
2246 if (TREE_CODE (type1) != FUNCTION_TYPE)
2247 return 1;
2249 /* Check for mismatched call attributes. */
2250 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2251 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2252 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2253 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2255 /* Only bother to check if an attribute is defined. */
2256 if (l1 | l2 | s1 | s2)
2258 /* If one type has an attribute, the other must have the same attribute. */
2259 if ((l1 != l2) || (s1 != s2))
2260 return 0;
2262 /* Disallow mixed attributes. */
2263 if ((l1 & s2) || (l2 & s1))
2264 return 0;
2267 /* Check for mismatched ISR attribute. */
2268 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2269 if (! l1)
2270 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2271 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2272 if (! l2)
2273 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2274 if (l1 != l2)
2275 return 0;
2277 return 1;
2280 /* Encode long_call or short_call attribute by prefixing
2281 symbol name in DECL with a special character FLAG. */
2282 void
2283 arm_encode_call_attribute (tree decl, int flag)
2285 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2286 int len = strlen (str);
2287 char * newstr;
2289 /* Do not allow weak functions to be treated as short call. */
2290 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2291 return;
2293 newstr = alloca (len + 2);
2294 newstr[0] = flag;
2295 strcpy (newstr + 1, str);
2297 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2298 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2301 /* Assigns default attributes to newly defined type. This is used to
2302 set short_call/long_call attributes for function types of
2303 functions defined inside corresponding #pragma scopes. */
2304 static void
2305 arm_set_default_type_attributes (tree type)
2307 /* Add __attribute__ ((long_call)) to all functions, when
2308 inside #pragma long_calls or __attribute__ ((short_call)),
2309 when inside #pragma no_long_calls. */
2310 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2312 tree type_attr_list, attr_name;
2313 type_attr_list = TYPE_ATTRIBUTES (type);
2315 if (arm_pragma_long_calls == LONG)
2316 attr_name = get_identifier ("long_call");
2317 else if (arm_pragma_long_calls == SHORT)
2318 attr_name = get_identifier ("short_call");
2319 else
2320 return;
2322 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2323 TYPE_ATTRIBUTES (type) = type_attr_list;
2327 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2328 defined within the current compilation unit. If this cannot be
2329 determined, then 0 is returned. */
2330 static int
2331 current_file_function_operand (rtx sym_ref)
2333 /* This is a bit of a fib. A function will have a short call flag
2334 applied to its name if it has the short call attribute, or it has
2335 already been defined within the current compilation unit. */
2336 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2337 return 1;
2339 /* The current function is always defined within the current compilation
2340 unit. if it s a weak definition however, then this may not be the real
2341 definition of the function, and so we have to say no. */
2342 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2343 && !DECL_WEAK (current_function_decl))
2344 return 1;
2346 /* We cannot make the determination - default to returning 0. */
2347 return 0;
2350 /* Return nonzero if a 32 bit "long_call" should be generated for
2351 this call. We generate a long_call if the function:
2353 a. has an __attribute__((long call))
2354 or b. is within the scope of a #pragma long_calls
2355 or c. the -mlong-calls command line switch has been specified
2357 However we do not generate a long call if the function:
2359 d. has an __attribute__ ((short_call))
2360 or e. is inside the scope of a #pragma no_long_calls
2361 or f. has an __attribute__ ((section))
2362 or g. is defined within the current compilation unit.
2364 This function will be called by C fragments contained in the machine
2365 description file. CALL_REF and CALL_COOKIE correspond to the matched
2366 rtl operands. CALL_SYMBOL is used to distinguish between
2367 two different callers of the function. It is set to 1 in the
2368 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2369 and "call_value" patterns. This is because of the difference in the
2370 SYM_REFs passed by these patterns. */
2372 arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
2374 if (!call_symbol)
2376 if (GET_CODE (sym_ref) != MEM)
2377 return 0;
2379 sym_ref = XEXP (sym_ref, 0);
2382 if (GET_CODE (sym_ref) != SYMBOL_REF)
2383 return 0;
2385 if (call_cookie & CALL_SHORT)
2386 return 0;
2388 if (TARGET_LONG_CALLS && flag_function_sections)
2389 return 1;
2391 if (current_file_function_operand (sym_ref))
2392 return 0;
2394 return (call_cookie & CALL_LONG)
2395 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2396 || TARGET_LONG_CALLS;
2399 /* Return nonzero if it is ok to make a tail-call to DECL. */
2400 static bool
2401 arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2403 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2405 if (cfun->machine->sibcall_blocked)
2406 return false;
2408 /* Never tailcall something for which we have no decl, or if we
2409 are in Thumb mode. */
2410 if (decl == NULL || TARGET_THUMB)
2411 return false;
2413 /* Get the calling method. */
2414 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2415 call_type = CALL_SHORT;
2416 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2417 call_type = CALL_LONG;
2419 /* Cannot tail-call to long calls, since these are out of range of
2420 a branch instruction. However, if not compiling PIC, we know
2421 we can reach the symbol if it is in this compilation unit. */
2422 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2423 return false;
2425 /* If we are interworking and the function is not declared static
2426 then we can't tail-call it unless we know that it exists in this
2427 compilation unit (since it might be a Thumb routine). */
2428 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2429 return false;
2431 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2432 if (IS_INTERRUPT (arm_current_func_type ()))
2433 return false;
2435 /* Everything else is ok. */
2436 return true;
2440 /* Addressing mode support functions. */
2442 /* Return nonzero if X is a legitimate immediate operand when compiling
2443 for PIC. */
2445 legitimate_pic_operand_p (rtx x)
2447 if (CONSTANT_P (x)
2448 && flag_pic
2449 && (GET_CODE (x) == SYMBOL_REF
2450 || (GET_CODE (x) == CONST
2451 && GET_CODE (XEXP (x, 0)) == PLUS
2452 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2453 return 0;
2455 return 1;
2459 legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
2461 if (GET_CODE (orig) == SYMBOL_REF
2462 || GET_CODE (orig) == LABEL_REF)
2464 #ifndef AOF_ASSEMBLER
2465 rtx pic_ref, address;
2466 #endif
2467 rtx insn;
2468 int subregs = 0;
2470 if (reg == 0)
2472 if (no_new_pseudos)
2473 abort ();
2474 else
2475 reg = gen_reg_rtx (Pmode);
2477 subregs = 1;
2480 #ifdef AOF_ASSEMBLER
2481 /* The AOF assembler can generate relocations for these directly, and
2482 understands that the PIC register has to be added into the offset. */
2483 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2484 #else
2485 if (subregs)
2486 address = gen_reg_rtx (Pmode);
2487 else
2488 address = reg;
2490 if (TARGET_ARM)
2491 emit_insn (gen_pic_load_addr_arm (address, orig));
2492 else
2493 emit_insn (gen_pic_load_addr_thumb (address, orig));
2495 if ((GET_CODE (orig) == LABEL_REF
2496 || (GET_CODE (orig) == SYMBOL_REF &&
2497 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2498 && NEED_GOT_RELOC)
2499 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2500 else
2502 pic_ref = gen_rtx_MEM (Pmode,
2503 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2504 address));
2505 RTX_UNCHANGING_P (pic_ref) = 1;
2508 insn = emit_move_insn (reg, pic_ref);
2509 #endif
2510 current_function_uses_pic_offset_table = 1;
2511 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2512 by loop. */
2513 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2514 REG_NOTES (insn));
2515 return reg;
2517 else if (GET_CODE (orig) == CONST)
2519 rtx base, offset;
2521 if (GET_CODE (XEXP (orig, 0)) == PLUS
2522 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2523 return orig;
2525 if (reg == 0)
2527 if (no_new_pseudos)
2528 abort ();
2529 else
2530 reg = gen_reg_rtx (Pmode);
2533 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2535 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2536 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2537 base == reg ? 0 : reg);
2539 else
2540 abort ();
2542 if (GET_CODE (offset) == CONST_INT)
2544 /* The base register doesn't really matter, we only want to
2545 test the index for the appropriate mode. */
2546 if (!arm_legitimate_index_p (mode, offset, 0))
2548 if (!no_new_pseudos)
2549 offset = force_reg (Pmode, offset);
2550 else
2551 abort ();
2554 if (GET_CODE (offset) == CONST_INT)
2555 return plus_constant (base, INTVAL (offset));
2558 if (GET_MODE_SIZE (mode) > 4
2559 && (GET_MODE_CLASS (mode) == MODE_INT
2560 || TARGET_SOFT_FLOAT))
2562 emit_insn (gen_addsi3 (reg, base, offset));
2563 return reg;
2566 return gen_rtx_PLUS (Pmode, base, offset);
2569 return orig;
2572 /* Generate code to load the PIC register. PROLOGUE is true if
2573 called from arm_expand_prologue (in which case we want the
2574 generated insns at the start of the function); false if called
2575 by an exception receiver that needs the PIC register reloaded
2576 (in which case the insns are just dumped at the current location). */
2577 void
2578 arm_finalize_pic (int prologue ATTRIBUTE_UNUSED)
2580 #ifndef AOF_ASSEMBLER
2581 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2582 rtx global_offset_table;
2584 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2585 return;
2587 if (!flag_pic)
2588 abort ();
2590 start_sequence ();
2591 l1 = gen_label_rtx ();
2593 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2594 /* On the ARM the PC register contains 'dot + 8' at the time of the
2595 addition, on the Thumb it is 'dot + 4'. */
2596 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2597 if (GOT_PCREL)
2598 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2599 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2600 else
2601 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2603 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2605 if (TARGET_ARM)
2607 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2608 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2610 else
2612 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2613 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2616 seq = get_insns ();
2617 end_sequence ();
2618 if (prologue)
2619 emit_insn_after (seq, get_insns ());
2620 else
2621 emit_insn (seq);
2623 /* Need to emit this whether or not we obey regdecls,
2624 since setjmp/longjmp can cause life info to screw up. */
2625 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2626 #endif /* AOF_ASSEMBLER */
2629 /* Return nonzero if X is valid as an ARM state addressing register. */
2630 static int
2631 arm_address_register_rtx_p (rtx x, int strict_p)
2633 int regno;
2635 if (GET_CODE (x) != REG)
2636 return 0;
2638 regno = REGNO (x);
2640 if (strict_p)
2641 return ARM_REGNO_OK_FOR_BASE_P (regno);
2643 return (regno <= LAST_ARM_REGNUM
2644 || regno >= FIRST_PSEUDO_REGISTER
2645 || regno == FRAME_POINTER_REGNUM
2646 || regno == ARG_POINTER_REGNUM);
2649 /* Return nonzero if X is a valid ARM state address operand. */
2651 arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2653 if (arm_address_register_rtx_p (x, strict_p))
2654 return 1;
2656 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2657 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2659 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2660 && GET_MODE_SIZE (mode) <= 4
2661 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2662 && GET_CODE (XEXP (x, 1)) == PLUS
2663 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2664 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2666 /* After reload constants split into minipools will have addresses
2667 from a LABEL_REF. */
2668 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2669 && (GET_CODE (x) == LABEL_REF
2670 || (GET_CODE (x) == CONST
2671 && GET_CODE (XEXP (x, 0)) == PLUS
2672 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2673 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2674 return 1;
2676 else if (mode == TImode)
2677 return 0;
2679 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2681 if (GET_CODE (x) == PLUS
2682 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2683 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2685 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2687 if (val == 4 || val == -4 || val == -8)
2688 return 1;
2692 else if (GET_CODE (x) == PLUS)
2694 rtx xop0 = XEXP (x, 0);
2695 rtx xop1 = XEXP (x, 1);
2697 return ((arm_address_register_rtx_p (xop0, strict_p)
2698 && arm_legitimate_index_p (mode, xop1, strict_p))
2699 || (arm_address_register_rtx_p (xop1, strict_p)
2700 && arm_legitimate_index_p (mode, xop0, strict_p)));
2703 #if 0
2704 /* Reload currently can't handle MINUS, so disable this for now */
2705 else if (GET_CODE (x) == MINUS)
2707 rtx xop0 = XEXP (x, 0);
2708 rtx xop1 = XEXP (x, 1);
2710 return (arm_address_register_rtx_p (xop0, strict_p)
2711 && arm_legitimate_index_p (mode, xop1, strict_p));
2713 #endif
2715 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2716 && GET_CODE (x) == SYMBOL_REF
2717 && CONSTANT_POOL_ADDRESS_P (x)
2718 && ! (flag_pic
2719 && symbol_mentioned_p (get_pool_constant (x))))
2720 return 1;
2722 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2723 && (GET_MODE_SIZE (mode) <= 4)
2724 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2725 return 1;
2727 return 0;
2730 /* Return nonzero if INDEX is valid for an address index operand in
2731 ARM state. */
2732 static int
2733 arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
2735 HOST_WIDE_INT range;
2736 enum rtx_code code = GET_CODE (index);
2738 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2739 return (code == CONST_INT && INTVAL (index) < 1024
2740 && INTVAL (index) > -1024
2741 && (INTVAL (index) & 3) == 0);
2743 if (TARGET_CIRRUS
2744 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2745 return (code == CONST_INT
2746 && INTVAL (index) < 255
2747 && INTVAL (index) > -255);
2749 if (arm_address_register_rtx_p (index, strict_p)
2750 && GET_MODE_SIZE (mode) <= 4)
2751 return 1;
2753 if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
2754 return (code == CONST_INT
2755 && INTVAL (index) < 256
2756 && INTVAL (index) > -256);
2758 /* XXX What about ldrsb? */
2759 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2760 && (!arm_arch4 || (mode) != HImode))
2762 rtx xiop0 = XEXP (index, 0);
2763 rtx xiop1 = XEXP (index, 1);
2765 return ((arm_address_register_rtx_p (xiop0, strict_p)
2766 && power_of_two_operand (xiop1, SImode))
2767 || (arm_address_register_rtx_p (xiop1, strict_p)
2768 && power_of_two_operand (xiop0, SImode)));
2771 if (GET_MODE_SIZE (mode) <= 4
2772 && (code == LSHIFTRT || code == ASHIFTRT
2773 || code == ASHIFT || code == ROTATERT)
2774 && (!arm_arch4 || (mode) != HImode))
2776 rtx op = XEXP (index, 1);
2778 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2779 && GET_CODE (op) == CONST_INT
2780 && INTVAL (op) > 0
2781 && INTVAL (op) <= 31);
2784 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2785 load, but that has a restricted addressing range and we are unable
2786 to tell here whether that is the case. To be safe we restrict all
2787 loads to that range. */
2788 range = ((mode) == HImode || (mode) == QImode)
2789 ? (arm_arch4 ? 256 : 4095) : 4096;
2791 return (code == CONST_INT
2792 && INTVAL (index) < range
2793 && INTVAL (index) > -range);
2796 /* Return nonzero if X is valid as an ARM state addressing register. */
2797 static int
2798 thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
2800 int regno;
2802 if (GET_CODE (x) != REG)
2803 return 0;
2805 regno = REGNO (x);
2807 if (strict_p)
2808 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2810 return (regno <= LAST_LO_REGNUM
2811 || regno >= FIRST_PSEUDO_REGISTER
2812 || regno == FRAME_POINTER_REGNUM
2813 || (GET_MODE_SIZE (mode) >= 4
2814 && (regno == STACK_POINTER_REGNUM
2815 || x == hard_frame_pointer_rtx
2816 || x == arg_pointer_rtx)));
2819 /* Return nonzero if x is a legitimate index register. This is the case
2820 for any base register that can access a QImode object. */
2821 inline static int
2822 thumb_index_register_rtx_p (rtx x, int strict_p)
2824 return thumb_base_register_rtx_p (x, QImode, strict_p);
2827 /* Return nonzero if x is a legitimate Thumb-state address.
2829 The AP may be eliminated to either the SP or the FP, so we use the
2830 least common denominator, e.g. SImode, and offsets from 0 to 64.
2832 ??? Verify whether the above is the right approach.
2834 ??? Also, the FP may be eliminated to the SP, so perhaps that
2835 needs special handling also.
2837 ??? Look at how the mips16 port solves this problem. It probably uses
2838 better ways to solve some of these problems.
2840 Although it is not incorrect, we don't accept QImode and HImode
2841 addresses based on the frame pointer or arg pointer until the
2842 reload pass starts. This is so that eliminating such addresses
2843 into stack based ones won't produce impossible code. */
2845 thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2847 /* ??? Not clear if this is right. Experiment. */
2848 if (GET_MODE_SIZE (mode) < 4
2849 && !(reload_in_progress || reload_completed)
2850 && (reg_mentioned_p (frame_pointer_rtx, x)
2851 || reg_mentioned_p (arg_pointer_rtx, x)
2852 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2853 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2854 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2855 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2856 return 0;
2858 /* Accept any base register. SP only in SImode or larger. */
2859 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2860 return 1;
2862 /* This is PC relative data before arm_reorg runs. */
2863 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2864 && GET_CODE (x) == SYMBOL_REF
2865 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2866 return 1;
2868 /* This is PC relative data after arm_reorg runs. */
2869 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2870 && (GET_CODE (x) == LABEL_REF
2871 || (GET_CODE (x) == CONST
2872 && GET_CODE (XEXP (x, 0)) == PLUS
2873 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2874 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2875 return 1;
2877 /* Post-inc indexing only supported for SImode and larger. */
2878 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2879 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2880 return 1;
2882 else if (GET_CODE (x) == PLUS)
2884 /* REG+REG address can be any two index registers. */
2885 /* We disallow FRAME+REG addressing since we know that FRAME
2886 will be replaced with STACK, and SP relative addressing only
2887 permits SP+OFFSET. */
2888 if (GET_MODE_SIZE (mode) <= 4
2889 && XEXP (x, 0) != frame_pointer_rtx
2890 && XEXP (x, 1) != frame_pointer_rtx
2891 && XEXP (x, 0) != virtual_stack_vars_rtx
2892 && XEXP (x, 1) != virtual_stack_vars_rtx
2893 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2894 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2895 return 1;
2897 /* REG+const has 5-7 bit offset for non-SP registers. */
2898 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2899 || XEXP (x, 0) == arg_pointer_rtx)
2900 && GET_CODE (XEXP (x, 1)) == CONST_INT
2901 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2902 return 1;
2904 /* REG+const has 10 bit offset for SP, but only SImode and
2905 larger is supported. */
2906 /* ??? Should probably check for DI/DFmode overflow here
2907 just like GO_IF_LEGITIMATE_OFFSET does. */
2908 else if (GET_CODE (XEXP (x, 0)) == REG
2909 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2910 && GET_MODE_SIZE (mode) >= 4
2911 && GET_CODE (XEXP (x, 1)) == CONST_INT
2912 && INTVAL (XEXP (x, 1)) >= 0
2913 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2914 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2915 return 1;
2917 else if (GET_CODE (XEXP (x, 0)) == REG
2918 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2919 && GET_MODE_SIZE (mode) >= 4
2920 && GET_CODE (XEXP (x, 1)) == CONST_INT
2921 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2922 return 1;
2925 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2926 && GET_CODE (x) == SYMBOL_REF
2927 && CONSTANT_POOL_ADDRESS_P (x)
2928 && !(flag_pic
2929 && symbol_mentioned_p (get_pool_constant (x))))
2930 return 1;
2932 return 0;
2935 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2936 instruction of mode MODE. */
2938 thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
2940 switch (GET_MODE_SIZE (mode))
2942 case 1:
2943 return val >= 0 && val < 32;
2945 case 2:
2946 return val >= 0 && val < 64 && (val & 1) == 0;
2948 default:
2949 return (val >= 0
2950 && (val + GET_MODE_SIZE (mode)) <= 128
2951 && (val & 3) == 0);
2955 /* Try machine-dependent ways of modifying an illegitimate address
2956 to be legitimate. If we find one, return the new, valid address. */
2958 arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
2960 if (GET_CODE (x) == PLUS)
2962 rtx xop0 = XEXP (x, 0);
2963 rtx xop1 = XEXP (x, 1);
2965 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2966 xop0 = force_reg (SImode, xop0);
2968 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
2969 xop1 = force_reg (SImode, xop1);
2971 if (ARM_BASE_REGISTER_RTX_P (xop0)
2972 && GET_CODE (xop1) == CONST_INT)
2974 HOST_WIDE_INT n, low_n;
2975 rtx base_reg, val;
2976 n = INTVAL (xop1);
2978 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2980 low_n = n & 0x0f;
2981 n &= ~0x0f;
2982 if (low_n > 4)
2984 n += 16;
2985 low_n -= 16;
2988 else
2990 low_n = ((mode) == TImode ? 0
2991 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
2992 n -= low_n;
2995 base_reg = gen_reg_rtx (SImode);
2996 val = force_operand (gen_rtx_PLUS (SImode, xop0,
2997 GEN_INT (n)), NULL_RTX);
2998 emit_move_insn (base_reg, val);
2999 x = (low_n == 0 ? base_reg
3000 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
3002 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3003 x = gen_rtx_PLUS (SImode, xop0, xop1);
3006 /* XXX We don't allow MINUS any more -- see comment in
3007 arm_legitimate_address_p (). */
3008 else if (GET_CODE (x) == MINUS)
3010 rtx xop0 = XEXP (x, 0);
3011 rtx xop1 = XEXP (x, 1);
3013 if (CONSTANT_P (xop0))
3014 xop0 = force_reg (SImode, xop0);
3016 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3017 xop1 = force_reg (SImode, xop1);
3019 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3020 x = gen_rtx_MINUS (SImode, xop0, xop1);
3023 if (flag_pic)
3025 /* We need to find and carefully transform any SYMBOL and LABEL
3026 references; so go back to the original address expression. */
3027 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3029 if (new_x != orig_x)
3030 x = new_x;
3033 return x;
3038 #define REG_OR_SUBREG_REG(X) \
3039 (GET_CODE (X) == REG \
3040 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3042 #define REG_OR_SUBREG_RTX(X) \
3043 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3045 #ifndef COSTS_N_INSNS
3046 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3047 #endif
3048 /* Worker routine for arm_rtx_costs. */
3049 static inline int
3050 arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
3052 enum machine_mode mode = GET_MODE (x);
3053 enum rtx_code subcode;
3054 int extra_cost;
3056 if (TARGET_THUMB)
3058 switch (code)
3060 case ASHIFT:
3061 case ASHIFTRT:
3062 case LSHIFTRT:
3063 case ROTATERT:
3064 case PLUS:
3065 case MINUS:
3066 case COMPARE:
3067 case NEG:
3068 case NOT:
3069 return COSTS_N_INSNS (1);
3071 case MULT:
3072 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3074 int cycles = 0;
3075 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3077 while (i)
3079 i >>= 2;
3080 cycles++;
3082 return COSTS_N_INSNS (2) + cycles;
3084 return COSTS_N_INSNS (1) + 16;
3086 case SET:
3087 return (COSTS_N_INSNS (1)
3088 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3089 + GET_CODE (SET_DEST (x)) == MEM));
3091 case CONST_INT:
3092 if (outer == SET)
3094 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3095 return 0;
3096 if (thumb_shiftable_const (INTVAL (x)))
3097 return COSTS_N_INSNS (2);
3098 return COSTS_N_INSNS (3);
3100 else if (outer == PLUS
3101 && INTVAL (x) < 256 && INTVAL (x) > -256)
3102 return 0;
3103 else if (outer == COMPARE
3104 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3105 return 0;
3106 else if (outer == ASHIFT || outer == ASHIFTRT
3107 || outer == LSHIFTRT)
3108 return 0;
3109 return COSTS_N_INSNS (2);
3111 case CONST:
3112 case CONST_DOUBLE:
3113 case LABEL_REF:
3114 case SYMBOL_REF:
3115 return COSTS_N_INSNS (3);
3117 case UDIV:
3118 case UMOD:
3119 case DIV:
3120 case MOD:
3121 return 100;
3123 case TRUNCATE:
3124 return 99;
3126 case AND:
3127 case XOR:
3128 case IOR:
3129 /* XXX guess. */
3130 return 8;
3132 case ADDRESSOF:
3133 case MEM:
3134 /* XXX another guess. */
3135 /* Memory costs quite a lot for the first word, but subsequent words
3136 load at the equivalent of a single insn each. */
3137 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3138 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3139 ? 4 : 0));
3141 case IF_THEN_ELSE:
3142 /* XXX a guess. */
3143 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3144 return 14;
3145 return 2;
3147 case ZERO_EXTEND:
3148 /* XXX still guessing. */
3149 switch (GET_MODE (XEXP (x, 0)))
3151 case QImode:
3152 return (1 + (mode == DImode ? 4 : 0)
3153 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3155 case HImode:
3156 return (4 + (mode == DImode ? 4 : 0)
3157 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3159 case SImode:
3160 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3162 default:
3163 return 99;
3166 default:
3167 return 99;
3168 #if 0
3169 case FFS:
3170 case FLOAT:
3171 case FIX:
3172 case UNSIGNED_FIX:
3173 /* XXX guess */
3174 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3175 rtx_name[code]);
3176 abort ();
3177 #endif
3181 switch (code)
3183 case MEM:
3184 /* Memory costs quite a lot for the first word, but subsequent words
3185 load at the equivalent of a single insn each. */
3186 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3187 + (GET_CODE (x) == SYMBOL_REF
3188 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3190 case DIV:
3191 case MOD:
3192 return 100;
3194 case ROTATE:
3195 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3196 return 4;
3197 /* Fall through */
3198 case ROTATERT:
3199 if (mode != SImode)
3200 return 8;
3201 /* Fall through */
3202 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3203 if (mode == DImode)
3204 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3205 + ((GET_CODE (XEXP (x, 0)) == REG
3206 || (GET_CODE (XEXP (x, 0)) == SUBREG
3207 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3208 ? 0 : 8));
3209 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3210 || (GET_CODE (XEXP (x, 0)) == SUBREG
3211 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3212 ? 0 : 4)
3213 + ((GET_CODE (XEXP (x, 1)) == REG
3214 || (GET_CODE (XEXP (x, 1)) == SUBREG
3215 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3216 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3217 ? 0 : 4));
3219 case MINUS:
3220 if (mode == DImode)
3221 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3222 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3223 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3224 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3225 ? 0 : 8));
3227 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3228 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3229 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3230 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3231 ? 0 : 8)
3232 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3233 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3234 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
3235 ? 0 : 8));
3237 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3238 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3239 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3240 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3241 || subcode == ASHIFTRT || subcode == LSHIFTRT
3242 || subcode == ROTATE || subcode == ROTATERT
3243 || (subcode == MULT
3244 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3245 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3246 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3247 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3248 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3249 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3250 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3251 return 1;
3252 /* Fall through */
3254 case PLUS:
3255 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3256 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3257 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3258 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3259 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3260 ? 0 : 8));
3262 /* Fall through */
3263 case AND: case XOR: case IOR:
3264 extra_cost = 0;
3266 /* Normally the frame registers will be spilt into reg+const during
3267 reload, so it is a bad idea to combine them with other instructions,
3268 since then they might not be moved outside of loops. As a compromise
3269 we allow integration with ops that have a constant as their second
3270 operand. */
3271 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3272 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3273 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3274 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3275 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3276 extra_cost = 4;
3278 if (mode == DImode)
3279 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3280 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3281 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3282 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3283 ? 0 : 8));
3285 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3286 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3287 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3288 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3289 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3290 ? 0 : 4));
3292 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3293 return (1 + extra_cost
3294 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3295 || subcode == LSHIFTRT || subcode == ASHIFTRT
3296 || subcode == ROTATE || subcode == ROTATERT
3297 || (subcode == MULT
3298 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3299 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3300 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3301 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3302 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3303 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3304 ? 0 : 4));
3306 return 8;
3308 case MULT:
3309 /* There is no point basing this on the tuning, since it is always the
3310 fast variant if it exists at all. */
3311 if (arm_fast_multiply && mode == DImode
3312 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3313 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3314 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3315 return 8;
3317 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3318 || mode == DImode)
3319 return 30;
3321 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3323 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3324 & (unsigned HOST_WIDE_INT) 0xffffffff);
3325 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3326 int j;
3328 /* Tune as appropriate. */
3329 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3331 for (j = 0; i && j < 32; j += booth_unit_size)
3333 i >>= booth_unit_size;
3334 add_cost += 2;
3337 return add_cost;
3340 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3341 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3342 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3344 case TRUNCATE:
3345 if (arm_fast_multiply && mode == SImode
3346 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3347 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3348 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3349 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3350 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3351 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3352 return 8;
3353 return 99;
3355 case NEG:
3356 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3357 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3358 /* Fall through */
3359 case NOT:
3360 if (mode == DImode)
3361 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3363 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3365 case IF_THEN_ELSE:
3366 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3367 return 14;
3368 return 2;
3370 case COMPARE:
3371 return 1;
3373 case ABS:
3374 return 4 + (mode == DImode ? 4 : 0);
3376 case SIGN_EXTEND:
3377 if (GET_MODE (XEXP (x, 0)) == QImode)
3378 return (4 + (mode == DImode ? 4 : 0)
3379 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3380 /* Fall through */
3381 case ZERO_EXTEND:
3382 switch (GET_MODE (XEXP (x, 0)))
3384 case QImode:
3385 return (1 + (mode == DImode ? 4 : 0)
3386 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3388 case HImode:
3389 return (4 + (mode == DImode ? 4 : 0)
3390 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3392 case SImode:
3393 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3395 case V8QImode:
3396 case V4HImode:
3397 case V2SImode:
3398 case V4QImode:
3399 case V2HImode:
3400 return 1;
3402 default:
3403 break;
3405 abort ();
3407 case CONST_INT:
3408 if (const_ok_for_arm (INTVAL (x)))
3409 return outer == SET ? 2 : -1;
3410 else if (outer == AND
3411 && const_ok_for_arm (~INTVAL (x)))
3412 return -1;
3413 else if ((outer == COMPARE
3414 || outer == PLUS || outer == MINUS)
3415 && const_ok_for_arm (-INTVAL (x)))
3416 return -1;
3417 else
3418 return 5;
3420 case CONST:
3421 case LABEL_REF:
3422 case SYMBOL_REF:
3423 return 6;
3425 case CONST_DOUBLE:
3426 if (const_double_rtx_ok_for_fpa (x))
3427 return outer == SET ? 2 : -1;
3428 else if ((outer == COMPARE || outer == PLUS)
3429 && neg_const_double_rtx_ok_for_fpa (x))
3430 return -1;
3431 return 7;
3433 default:
3434 return 99;
3438 static bool
3439 arm_rtx_costs (rtx x, int code, int outer_code, int *total)
3441 *total = arm_rtx_costs_1 (x, code, outer_code);
3442 return true;
3445 /* All address computations that can be done are free, but rtx cost returns
3446 the same for practically all of them. So we weight the different types
3447 of address here in the order (most pref first):
3448 PRE/POST_INC/DEC, SHIFT or NON-INT sum, INT sum, REG, MEM or LABEL. */
3449 static int
3450 arm_address_cost (rtx x)
3452 #define ARM_ADDRESS_COST(X) \
3453 (10 - ((GET_CODE (X) == MEM || GET_CODE (X) == LABEL_REF \
3454 || GET_CODE (X) == SYMBOL_REF) \
3455 ? 0 \
3456 : ((GET_CODE (X) == PRE_INC || GET_CODE (X) == PRE_DEC \
3457 || GET_CODE (X) == POST_INC || GET_CODE (X) == POST_DEC) \
3458 ? 10 \
3459 : (((GET_CODE (X) == PLUS || GET_CODE (X) == MINUS) \
3460 ? 6 + (GET_CODE (XEXP (X, 1)) == CONST_INT ? 2 \
3461 : ((GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == '2' \
3462 || GET_RTX_CLASS (GET_CODE (XEXP (X, 0))) == 'c' \
3463 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == '2' \
3464 || GET_RTX_CLASS (GET_CODE (XEXP (X, 1))) == 'c') \
3465 ? 1 : 0)) \
3466 : 4)))))
3468 #define THUMB_ADDRESS_COST(X) \
3469 ((GET_CODE (X) == REG \
3470 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 0)) == REG \
3471 && GET_CODE (XEXP (X, 1)) == CONST_INT)) \
3472 ? 1 : 2)
3474 return (TARGET_ARM ? ARM_ADDRESS_COST (x) : THUMB_ADDRESS_COST (x));
3477 static int
3478 arm_use_dfa_pipeline_interface (void)
3480 return true;
3483 static int
3484 arm_adjust_cost (rtx insn, rtx link, rtx dep, int cost)
3486 rtx i_pat, d_pat;
3488 /* Some true dependencies can have a higher cost depending
3489 on precisely how certain input operands are used. */
3490 if (arm_tune_xscale
3491 && REG_NOTE_KIND (link) == 0
3492 && recog_memoized (insn) >= 0
3493 && recog_memoized (dep) >= 0)
3495 int shift_opnum = get_attr_shift (insn);
3496 enum attr_type attr_type = get_attr_type (dep);
3498 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3499 operand for INSN. If we have a shifted input operand and the
3500 instruction we depend on is another ALU instruction, then we may
3501 have to account for an additional stall. */
3502 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3504 rtx shifted_operand;
3505 int opno;
3507 /* Get the shifted operand. */
3508 extract_insn (insn);
3509 shifted_operand = recog_data.operand[shift_opnum];
3511 /* Iterate over all the operands in DEP. If we write an operand
3512 that overlaps with SHIFTED_OPERAND, then we have increase the
3513 cost of this dependency. */
3514 extract_insn (dep);
3515 preprocess_constraints ();
3516 for (opno = 0; opno < recog_data.n_operands; opno++)
3518 /* We can ignore strict inputs. */
3519 if (recog_data.operand_type[opno] == OP_IN)
3520 continue;
3522 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3523 shifted_operand))
3524 return 2;
3529 /* XXX This is not strictly true for the FPA. */
3530 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3531 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3532 return 0;
3534 /* Call insns don't incur a stall, even if they follow a load. */
3535 if (REG_NOTE_KIND (link) == 0
3536 && GET_CODE (insn) == CALL_INSN)
3537 return 1;
3539 if ((i_pat = single_set (insn)) != NULL
3540 && GET_CODE (SET_SRC (i_pat)) == MEM
3541 && (d_pat = single_set (dep)) != NULL
3542 && GET_CODE (SET_DEST (d_pat)) == MEM)
3544 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3545 /* This is a load after a store, there is no conflict if the load reads
3546 from a cached area. Assume that loads from the stack, and from the
3547 constant pool are cached, and that others will miss. This is a
3548 hack. */
3550 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3551 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3552 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3553 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3554 return 1;
3557 return cost;
3560 static int fpa_consts_inited = 0;
3562 static const char * const strings_fpa[8] =
3564 "0", "1", "2", "3",
3565 "4", "5", "0.5", "10"
3568 static REAL_VALUE_TYPE values_fpa[8];
3570 static void
3571 init_fpa_table (void)
3573 int i;
3574 REAL_VALUE_TYPE r;
3576 for (i = 0; i < 8; i++)
3578 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3579 values_fpa[i] = r;
3582 fpa_consts_inited = 1;
3585 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3587 const_double_rtx_ok_for_fpa (rtx x)
3589 REAL_VALUE_TYPE r;
3590 int i;
3592 if (!fpa_consts_inited)
3593 init_fpa_table ();
3595 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3596 if (REAL_VALUE_MINUS_ZERO (r))
3597 return 0;
3599 for (i = 0; i < 8; i++)
3600 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3601 return 1;
3603 return 0;
3606 /* Return TRUE if rtx X is a valid immediate FPA constant. */
3608 neg_const_double_rtx_ok_for_fpa (rtx x)
3610 REAL_VALUE_TYPE r;
3611 int i;
3613 if (!fpa_consts_inited)
3614 init_fpa_table ();
3616 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3617 r = REAL_VALUE_NEGATE (r);
3618 if (REAL_VALUE_MINUS_ZERO (r))
3619 return 0;
3621 for (i = 0; i < 8; i++)
3622 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3623 return 1;
3625 return 0;
3628 /* Predicates for `match_operand' and `match_operator'. */
3630 /* s_register_operand is the same as register_operand, but it doesn't accept
3631 (SUBREG (MEM)...).
3633 This function exists because at the time it was put in it led to better
3634 code. SUBREG(MEM) always needs a reload in the places where
3635 s_register_operand is used, and this seemed to lead to excessive
3636 reloading. */
3638 s_register_operand (rtx op, enum machine_mode mode)
3640 if (GET_MODE (op) != mode && mode != VOIDmode)
3641 return 0;
3643 if (GET_CODE (op) == SUBREG)
3644 op = SUBREG_REG (op);
3646 /* We don't consider registers whose class is NO_REGS
3647 to be a register operand. */
3648 /* XXX might have to check for lo regs only for thumb ??? */
3649 return (GET_CODE (op) == REG
3650 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3651 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3654 /* A hard register operand (even before reload. */
3656 arm_hard_register_operand (rtx op, enum machine_mode mode)
3658 if (GET_MODE (op) != mode && mode != VOIDmode)
3659 return 0;
3661 return (GET_CODE (op) == REG
3662 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3665 /* Only accept reg, subreg(reg), const_int. */
3667 reg_or_int_operand (rtx op, enum machine_mode mode)
3669 if (GET_CODE (op) == CONST_INT)
3670 return 1;
3672 if (GET_MODE (op) != mode && mode != VOIDmode)
3673 return 0;
3675 if (GET_CODE (op) == SUBREG)
3676 op = SUBREG_REG (op);
3678 /* We don't consider registers whose class is NO_REGS
3679 to be a register operand. */
3680 return (GET_CODE (op) == REG
3681 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3682 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3685 /* Return 1 if OP is an item in memory, given that we are in reload. */
3687 arm_reload_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3689 int regno = true_regnum (op);
3691 return (!CONSTANT_P (op)
3692 && (regno == -1
3693 || (GET_CODE (op) == REG
3694 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3697 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3698 memory access (architecture V4).
3699 MODE is QImode if called when computing constraints, or VOIDmode when
3700 emitting patterns. In this latter case we cannot use memory_operand()
3701 because it will fail on badly formed MEMs, which is precisely what we are
3702 trying to catch. */
3704 bad_signed_byte_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3706 #if 0
3707 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3708 return 0;
3709 #endif
3710 if (GET_CODE (op) != MEM)
3711 return 0;
3713 op = XEXP (op, 0);
3715 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3716 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3717 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3718 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3719 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3720 return 1;
3722 /* Big constants are also bad. */
3723 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3724 && (INTVAL (XEXP (op, 1)) > 0xff
3725 || -INTVAL (XEXP (op, 1)) > 0xff))
3726 return 1;
3728 /* Everything else is good, or can will automatically be made so. */
3729 return 0;
3732 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3734 arm_rhs_operand (rtx op, enum machine_mode mode)
3736 return (s_register_operand (op, mode)
3737 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3740 /* Return TRUE for valid operands for the
3741 rhs of an ARM instruction, or a load. */
3743 arm_rhsm_operand (rtx op, enum machine_mode mode)
3745 return (s_register_operand (op, mode)
3746 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3747 || memory_operand (op, mode));
3750 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3751 constant that is valid when negated. */
3753 arm_add_operand (rtx op, enum machine_mode mode)
3755 if (TARGET_THUMB)
3756 return thumb_cmp_operand (op, mode);
3758 return (s_register_operand (op, mode)
3759 || (GET_CODE (op) == CONST_INT
3760 && (const_ok_for_arm (INTVAL (op))
3761 || const_ok_for_arm (-INTVAL (op)))));
3765 arm_not_operand (rtx op, enum machine_mode mode)
3767 return (s_register_operand (op, mode)
3768 || (GET_CODE (op) == CONST_INT
3769 && (const_ok_for_arm (INTVAL (op))
3770 || const_ok_for_arm (~INTVAL (op)))));
3773 /* Return TRUE if the operand is a memory reference which contains an
3774 offsettable address. */
3776 offsettable_memory_operand (rtx op, enum machine_mode mode)
3778 if (mode == VOIDmode)
3779 mode = GET_MODE (op);
3781 return (mode == GET_MODE (op)
3782 && GET_CODE (op) == MEM
3783 && offsettable_address_p (reload_completed | reload_in_progress,
3784 mode, XEXP (op, 0)));
3787 /* Return TRUE if the operand is a memory reference which is, or can be
3788 made word aligned by adjusting the offset. */
3790 alignable_memory_operand (rtx op, enum machine_mode mode)
3792 rtx reg;
3794 if (mode == VOIDmode)
3795 mode = GET_MODE (op);
3797 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3798 return 0;
3800 op = XEXP (op, 0);
3802 return ((GET_CODE (reg = op) == REG
3803 || (GET_CODE (op) == SUBREG
3804 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3805 || (GET_CODE (op) == PLUS
3806 && GET_CODE (XEXP (op, 1)) == CONST_INT
3807 && (GET_CODE (reg = XEXP (op, 0)) == REG
3808 || (GET_CODE (XEXP (op, 0)) == SUBREG
3809 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3810 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3813 /* Similar to s_register_operand, but does not allow hard integer
3814 registers. */
3816 f_register_operand (rtx op, enum machine_mode mode)
3818 if (GET_MODE (op) != mode && mode != VOIDmode)
3819 return 0;
3821 if (GET_CODE (op) == SUBREG)
3822 op = SUBREG_REG (op);
3824 /* We don't consider registers whose class is NO_REGS
3825 to be a register operand. */
3826 return (GET_CODE (op) == REG
3827 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3828 || REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
3831 /* Return TRUE for valid operands for the rhs of an FPA instruction. */
3833 fpa_rhs_operand (rtx op, enum machine_mode mode)
3835 if (s_register_operand (op, mode))
3836 return TRUE;
3838 if (GET_MODE (op) != mode && mode != VOIDmode)
3839 return FALSE;
3841 if (GET_CODE (op) == CONST_DOUBLE)
3842 return const_double_rtx_ok_for_fpa (op);
3844 return FALSE;
3848 fpa_add_operand (rtx op, enum machine_mode mode)
3850 if (s_register_operand (op, mode))
3851 return TRUE;
3853 if (GET_MODE (op) != mode && mode != VOIDmode)
3854 return FALSE;
3856 if (GET_CODE (op) == CONST_DOUBLE)
3857 return (const_double_rtx_ok_for_fpa (op)
3858 || neg_const_double_rtx_ok_for_fpa (op));
3860 return FALSE;
3863 /* Return nonzero if OP is a valid Cirrus memory address pattern. */
3865 cirrus_memory_offset (rtx op)
3867 /* Reject eliminable registers. */
3868 if (! (reload_in_progress || reload_completed)
3869 && ( reg_mentioned_p (frame_pointer_rtx, op)
3870 || reg_mentioned_p (arg_pointer_rtx, op)
3871 || reg_mentioned_p (virtual_incoming_args_rtx, op)
3872 || reg_mentioned_p (virtual_outgoing_args_rtx, op)
3873 || reg_mentioned_p (virtual_stack_dynamic_rtx, op)
3874 || reg_mentioned_p (virtual_stack_vars_rtx, op)))
3875 return 0;
3877 if (GET_CODE (op) == MEM)
3879 rtx ind;
3881 ind = XEXP (op, 0);
3883 /* Match: (mem (reg)). */
3884 if (GET_CODE (ind) == REG)
3885 return 1;
3887 /* Match:
3888 (mem (plus (reg)
3889 (const))). */
3890 if (GET_CODE (ind) == PLUS
3891 && GET_CODE (XEXP (ind, 0)) == REG
3892 && REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
3893 && GET_CODE (XEXP (ind, 1)) == CONST_INT)
3894 return 1;
3897 return 0;
3900 /* Return nonzero if OP is a Cirrus or general register. */
3902 cirrus_register_operand (rtx op, enum machine_mode mode)
3904 if (GET_MODE (op) != mode && mode != VOIDmode)
3905 return FALSE;
3907 if (GET_CODE (op) == SUBREG)
3908 op = SUBREG_REG (op);
3910 return (GET_CODE (op) == REG
3911 && (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
3912 || REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
3915 /* Return nonzero if OP is a cirrus FP register. */
3917 cirrus_fp_register (rtx op, enum machine_mode mode)
3919 if (GET_MODE (op) != mode && mode != VOIDmode)
3920 return FALSE;
3922 if (GET_CODE (op) == SUBREG)
3923 op = SUBREG_REG (op);
3925 return (GET_CODE (op) == REG
3926 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3927 || REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
3930 /* Return nonzero if OP is a 6bit constant (0..63). */
3932 cirrus_shift_const (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
3934 return (GET_CODE (op) == CONST_INT
3935 && INTVAL (op) >= 0
3936 && INTVAL (op) < 64);
3939 /* Returns TRUE if INSN is an "LDR REG, ADDR" instruction.
3940 Use by the Cirrus Maverick code which has to workaround
3941 a hardware bug triggered by such instructions. */
3942 static bool
3943 arm_memory_load_p (rtx insn)
3945 rtx body, lhs, rhs;;
3947 if (insn == NULL_RTX || GET_CODE (insn) != INSN)
3948 return false;
3950 body = PATTERN (insn);
3952 if (GET_CODE (body) != SET)
3953 return false;
3955 lhs = XEXP (body, 0);
3956 rhs = XEXP (body, 1);
3958 lhs = REG_OR_SUBREG_RTX (lhs);
3960 /* If the destination is not a general purpose
3961 register we do not have to worry. */
3962 if (GET_CODE (lhs) != REG
3963 || REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
3964 return false;
3966 /* As well as loads from memory we also have to react
3967 to loads of invalid constants which will be turned
3968 into loads from the minipool. */
3969 return (GET_CODE (rhs) == MEM
3970 || GET_CODE (rhs) == SYMBOL_REF
3971 || note_invalid_constants (insn, -1, false));
3974 /* Return TRUE if INSN is a Cirrus instruction. */
3975 static bool
3976 arm_cirrus_insn_p (rtx insn)
3978 enum attr_cirrus attr;
3980 /* get_attr aborts on USE and CLOBBER. */
3981 if (!insn
3982 || GET_CODE (insn) != INSN
3983 || GET_CODE (PATTERN (insn)) == USE
3984 || GET_CODE (PATTERN (insn)) == CLOBBER)
3985 return 0;
3987 attr = get_attr_cirrus (insn);
3989 return attr != CIRRUS_NOT;
3992 /* Cirrus reorg for invalid instruction combinations. */
3993 static void
3994 cirrus_reorg (rtx first)
3996 enum attr_cirrus attr;
3997 rtx body = PATTERN (first);
3998 rtx t;
3999 int nops;
4001 /* Any branch must be followed by 2 non Cirrus instructions. */
4002 if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
4004 nops = 0;
4005 t = next_nonnote_insn (first);
4007 if (arm_cirrus_insn_p (t))
4008 ++ nops;
4010 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4011 ++ nops;
4013 while (nops --)
4014 emit_insn_after (gen_nop (), first);
4016 return;
4019 /* (float (blah)) is in parallel with a clobber. */
4020 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4021 body = XVECEXP (body, 0, 0);
4023 if (GET_CODE (body) == SET)
4025 rtx lhs = XEXP (body, 0), rhs = XEXP (body, 1);
4027 /* cfldrd, cfldr64, cfstrd, cfstr64 must
4028 be followed by a non Cirrus insn. */
4029 if (get_attr_cirrus (first) == CIRRUS_DOUBLE)
4031 if (arm_cirrus_insn_p (next_nonnote_insn (first)))
4032 emit_insn_after (gen_nop (), first);
4034 return;
4036 else if (arm_memory_load_p (first))
4038 unsigned int arm_regno;
4040 /* Any ldr/cfmvdlr, ldr/cfmvdhr, ldr/cfmvsr, ldr/cfmv64lr,
4041 ldr/cfmv64hr combination where the Rd field is the same
4042 in both instructions must be split with a non Cirrus
4043 insn. Example:
4045 ldr r0, blah
4047 cfmvsr mvf0, r0. */
4049 /* Get Arm register number for ldr insn. */
4050 if (GET_CODE (lhs) == REG)
4051 arm_regno = REGNO (lhs);
4052 else if (GET_CODE (rhs) == REG)
4053 arm_regno = REGNO (rhs);
4054 else
4055 abort ();
4057 /* Next insn. */
4058 first = next_nonnote_insn (first);
4060 if (! arm_cirrus_insn_p (first))
4061 return;
4063 body = PATTERN (first);
4065 /* (float (blah)) is in parallel with a clobber. */
4066 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0))
4067 body = XVECEXP (body, 0, 0);
4069 if (GET_CODE (body) == FLOAT)
4070 body = XEXP (body, 0);
4072 if (get_attr_cirrus (first) == CIRRUS_MOVE
4073 && GET_CODE (XEXP (body, 1)) == REG
4074 && arm_regno == REGNO (XEXP (body, 1)))
4075 emit_insn_after (gen_nop (), first);
4077 return;
4081 /* get_attr aborts on USE and CLOBBER. */
4082 if (!first
4083 || GET_CODE (first) != INSN
4084 || GET_CODE (PATTERN (first)) == USE
4085 || GET_CODE (PATTERN (first)) == CLOBBER)
4086 return;
4088 attr = get_attr_cirrus (first);
4090 /* Any coprocessor compare instruction (cfcmps, cfcmpd, ...)
4091 must be followed by a non-coprocessor instruction. */
4092 if (attr == CIRRUS_COMPARE)
4094 nops = 0;
4096 t = next_nonnote_insn (first);
4098 if (arm_cirrus_insn_p (t))
4099 ++ nops;
4101 if (arm_cirrus_insn_p (next_nonnote_insn (t)))
4102 ++ nops;
4104 while (nops --)
4105 emit_insn_after (gen_nop (), first);
4107 return;
4111 /* Return nonzero if OP is a constant power of two. */
4113 power_of_two_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4115 if (GET_CODE (op) == CONST_INT)
4117 HOST_WIDE_INT value = INTVAL (op);
4119 return value != 0 && (value & (value - 1)) == 0;
4122 return FALSE;
4125 /* Return TRUE for a valid operand of a DImode operation.
4126 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4127 Note that this disallows MEM(REG+REG), but allows
4128 MEM(PRE/POST_INC/DEC(REG)). */
4130 di_operand (rtx op, enum machine_mode mode)
4132 if (s_register_operand (op, mode))
4133 return TRUE;
4135 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4136 return FALSE;
4138 if (GET_CODE (op) == SUBREG)
4139 op = SUBREG_REG (op);
4141 switch (GET_CODE (op))
4143 case CONST_DOUBLE:
4144 case CONST_INT:
4145 return TRUE;
4147 case MEM:
4148 return memory_address_p (DImode, XEXP (op, 0));
4150 default:
4151 return FALSE;
4155 /* Like di_operand, but don't accept constants. */
4157 nonimmediate_di_operand (rtx op, enum machine_mode mode)
4159 if (s_register_operand (op, mode))
4160 return TRUE;
4162 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
4163 return FALSE;
4165 if (GET_CODE (op) == SUBREG)
4166 op = SUBREG_REG (op);
4168 if (GET_CODE (op) == MEM)
4169 return memory_address_p (DImode, XEXP (op, 0));
4171 return FALSE;
4174 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
4175 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
4176 Note that this disallows MEM(REG+REG), but allows
4177 MEM(PRE/POST_INC/DEC(REG)). */
4179 soft_df_operand (rtx op, enum machine_mode mode)
4181 if (s_register_operand (op, mode))
4182 return TRUE;
4184 if (mode != VOIDmode && GET_MODE (op) != mode)
4185 return FALSE;
4187 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
4188 return FALSE;
4190 if (GET_CODE (op) == SUBREG)
4191 op = SUBREG_REG (op);
4193 switch (GET_CODE (op))
4195 case CONST_DOUBLE:
4196 return TRUE;
4198 case MEM:
4199 return memory_address_p (DFmode, XEXP (op, 0));
4201 default:
4202 return FALSE;
4206 /* Like soft_df_operand, but don't accept constants. */
4208 nonimmediate_soft_df_operand (rtx op, enum machine_mode mode)
4210 if (s_register_operand (op, mode))
4211 return TRUE;
4213 if (mode != VOIDmode && GET_MODE (op) != mode)
4214 return FALSE;
4216 if (GET_CODE (op) == SUBREG)
4217 op = SUBREG_REG (op);
4219 if (GET_CODE (op) == MEM)
4220 return memory_address_p (DFmode, XEXP (op, 0));
4221 return FALSE;
4224 /* Return TRUE for valid index operands. */
4226 index_operand (rtx op, enum machine_mode mode)
4228 return (s_register_operand (op, mode)
4229 || (immediate_operand (op, mode)
4230 && (GET_CODE (op) != CONST_INT
4231 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
4234 /* Return TRUE for valid shifts by a constant. This also accepts any
4235 power of two on the (somewhat overly relaxed) assumption that the
4236 shift operator in this case was a mult. */
4238 const_shift_operand (rtx op, enum machine_mode mode)
4240 return (power_of_two_operand (op, mode)
4241 || (immediate_operand (op, mode)
4242 && (GET_CODE (op) != CONST_INT
4243 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
4246 /* Return TRUE for arithmetic operators which can be combined with a multiply
4247 (shift). */
4249 shiftable_operator (rtx x, enum machine_mode mode)
4251 enum rtx_code code;
4253 if (GET_MODE (x) != mode)
4254 return FALSE;
4256 code = GET_CODE (x);
4258 return (code == PLUS || code == MINUS
4259 || code == IOR || code == XOR || code == AND);
4262 /* Return TRUE for binary logical operators. */
4264 logical_binary_operator (rtx x, enum machine_mode mode)
4266 enum rtx_code code;
4268 if (GET_MODE (x) != mode)
4269 return FALSE;
4271 code = GET_CODE (x);
4273 return (code == IOR || code == XOR || code == AND);
4276 /* Return TRUE for shift operators. */
4278 shift_operator (rtx x,enum machine_mode mode)
4280 enum rtx_code code;
4282 if (GET_MODE (x) != mode)
4283 return FALSE;
4285 code = GET_CODE (x);
4287 if (code == MULT)
4288 return power_of_two_operand (XEXP (x, 1), mode);
4290 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
4291 || code == ROTATERT);
4294 /* Return TRUE if x is EQ or NE. */
4296 equality_operator (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
4298 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
4301 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
4303 arm_comparison_operator (rtx x, enum machine_mode mode)
4305 return (comparison_operator (x, mode)
4306 && GET_CODE (x) != LTGT
4307 && GET_CODE (x) != UNEQ);
4310 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
4312 minmax_operator (rtx x, enum machine_mode mode)
4314 enum rtx_code code = GET_CODE (x);
4316 if (GET_MODE (x) != mode)
4317 return FALSE;
4319 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
4322 /* Return TRUE if this is the condition code register, if we aren't given
4323 a mode, accept any class CCmode register. */
4325 cc_register (rtx x, enum machine_mode mode)
4327 if (mode == VOIDmode)
4329 mode = GET_MODE (x);
4331 if (GET_MODE_CLASS (mode) != MODE_CC)
4332 return FALSE;
4335 if ( GET_MODE (x) == mode
4336 && GET_CODE (x) == REG
4337 && REGNO (x) == CC_REGNUM)
4338 return TRUE;
4340 return FALSE;
4343 /* Return TRUE if this is the condition code register, if we aren't given
4344 a mode, accept any class CCmode register which indicates a dominance
4345 expression. */
4347 dominant_cc_register (rtx x, enum machine_mode mode)
4349 if (mode == VOIDmode)
4351 mode = GET_MODE (x);
4353 if (GET_MODE_CLASS (mode) != MODE_CC)
4354 return FALSE;
4357 if (mode != CC_DNEmode && mode != CC_DEQmode
4358 && mode != CC_DLEmode && mode != CC_DLTmode
4359 && mode != CC_DGEmode && mode != CC_DGTmode
4360 && mode != CC_DLEUmode && mode != CC_DLTUmode
4361 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4362 return FALSE;
4364 return cc_register (x, mode);
4367 /* Return TRUE if X references a SYMBOL_REF. */
4369 symbol_mentioned_p (rtx x)
4371 const char * fmt;
4372 int i;
4374 if (GET_CODE (x) == SYMBOL_REF)
4375 return 1;
4377 fmt = GET_RTX_FORMAT (GET_CODE (x));
4379 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4381 if (fmt[i] == 'E')
4383 int j;
4385 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4386 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4387 return 1;
4389 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4390 return 1;
4393 return 0;
4396 /* Return TRUE if X references a LABEL_REF. */
4398 label_mentioned_p (rtx x)
4400 const char * fmt;
4401 int i;
4403 if (GET_CODE (x) == LABEL_REF)
4404 return 1;
4406 fmt = GET_RTX_FORMAT (GET_CODE (x));
4407 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4409 if (fmt[i] == 'E')
4411 int j;
4413 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4414 if (label_mentioned_p (XVECEXP (x, i, j)))
4415 return 1;
4417 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4418 return 1;
4421 return 0;
4424 enum rtx_code
4425 minmax_code (rtx x)
4427 enum rtx_code code = GET_CODE (x);
4429 if (code == SMAX)
4430 return GE;
4431 else if (code == SMIN)
4432 return LE;
4433 else if (code == UMIN)
4434 return LEU;
4435 else if (code == UMAX)
4436 return GEU;
4438 abort ();
4441 /* Return 1 if memory locations are adjacent. */
4443 adjacent_mem_locations (rtx a, rtx b)
4445 if ((GET_CODE (XEXP (a, 0)) == REG
4446 || (GET_CODE (XEXP (a, 0)) == PLUS
4447 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4448 && (GET_CODE (XEXP (b, 0)) == REG
4449 || (GET_CODE (XEXP (b, 0)) == PLUS
4450 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4452 int val0 = 0, val1 = 0;
4453 int reg0, reg1;
4455 if (GET_CODE (XEXP (a, 0)) == PLUS)
4457 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4458 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4460 else
4461 reg0 = REGNO (XEXP (a, 0));
4463 if (GET_CODE (XEXP (b, 0)) == PLUS)
4465 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4466 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4468 else
4469 reg1 = REGNO (XEXP (b, 0));
4471 /* Don't accept any offset that will require multiple
4472 instructions to handle, since this would cause the
4473 arith_adjacentmem pattern to output an overlong sequence. */
4474 if (!const_ok_for_op (PLUS, val0) || !const_ok_for_op (PLUS, val1))
4475 return 0;
4477 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4479 return 0;
4482 /* Return 1 if OP is a load multiple operation. It is known to be
4483 parallel and the first section will be tested. */
4485 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4487 HOST_WIDE_INT count = XVECLEN (op, 0);
4488 int dest_regno;
4489 rtx src_addr;
4490 HOST_WIDE_INT i = 1, base = 0;
4491 rtx elt;
4493 if (count <= 1
4494 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4495 return 0;
4497 /* Check to see if this might be a write-back. */
4498 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4500 i++;
4501 base = 1;
4503 /* Now check it more carefully. */
4504 if (GET_CODE (SET_DEST (elt)) != REG
4505 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4506 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4507 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4508 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4509 return 0;
4512 /* Perform a quick check so we don't blow up below. */
4513 if (count <= i
4514 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4515 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4516 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4517 return 0;
4519 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4520 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4522 for (; i < count; i++)
4524 elt = XVECEXP (op, 0, i);
4526 if (GET_CODE (elt) != SET
4527 || GET_CODE (SET_DEST (elt)) != REG
4528 || GET_MODE (SET_DEST (elt)) != SImode
4529 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4530 || GET_CODE (SET_SRC (elt)) != MEM
4531 || GET_MODE (SET_SRC (elt)) != SImode
4532 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4533 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4534 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4535 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4536 return 0;
4539 return 1;
4542 /* Return 1 if OP is a store multiple operation. It is known to be
4543 parallel and the first section will be tested. */
4545 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
4547 HOST_WIDE_INT count = XVECLEN (op, 0);
4548 int src_regno;
4549 rtx dest_addr;
4550 HOST_WIDE_INT i = 1, base = 0;
4551 rtx elt;
4553 if (count <= 1
4554 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4555 return 0;
4557 /* Check to see if this might be a write-back. */
4558 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4560 i++;
4561 base = 1;
4563 /* Now check it more carefully. */
4564 if (GET_CODE (SET_DEST (elt)) != REG
4565 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4566 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4567 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4568 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4569 return 0;
4572 /* Perform a quick check so we don't blow up below. */
4573 if (count <= i
4574 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4575 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4576 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4577 return 0;
4579 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4580 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4582 for (; i < count; i++)
4584 elt = XVECEXP (op, 0, i);
4586 if (GET_CODE (elt) != SET
4587 || GET_CODE (SET_SRC (elt)) != REG
4588 || GET_MODE (SET_SRC (elt)) != SImode
4589 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4590 || GET_CODE (SET_DEST (elt)) != MEM
4591 || GET_MODE (SET_DEST (elt)) != SImode
4592 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4593 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4594 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4595 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4596 return 0;
4599 return 1;
4603 load_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4604 HOST_WIDE_INT *load_offset)
4606 int unsorted_regs[4];
4607 HOST_WIDE_INT unsorted_offsets[4];
4608 int order[4];
4609 int base_reg = -1;
4610 int i;
4612 /* Can only handle 2, 3, or 4 insns at present,
4613 though could be easily extended if required. */
4614 if (nops < 2 || nops > 4)
4615 abort ();
4617 /* Loop over the operands and check that the memory references are
4618 suitable (ie immediate offsets from the same base register). At
4619 the same time, extract the target register, and the memory
4620 offsets. */
4621 for (i = 0; i < nops; i++)
4623 rtx reg;
4624 rtx offset;
4626 /* Convert a subreg of a mem into the mem itself. */
4627 if (GET_CODE (operands[nops + i]) == SUBREG)
4628 operands[nops + i] = alter_subreg (operands + (nops + i));
4630 if (GET_CODE (operands[nops + i]) != MEM)
4631 abort ();
4633 /* Don't reorder volatile memory references; it doesn't seem worth
4634 looking for the case where the order is ok anyway. */
4635 if (MEM_VOLATILE_P (operands[nops + i]))
4636 return 0;
4638 offset = const0_rtx;
4640 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4641 || (GET_CODE (reg) == SUBREG
4642 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4643 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4644 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4645 == REG)
4646 || (GET_CODE (reg) == SUBREG
4647 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4648 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4649 == CONST_INT)))
4651 if (i == 0)
4653 base_reg = REGNO (reg);
4654 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4655 ? REGNO (operands[i])
4656 : REGNO (SUBREG_REG (operands[i])));
4657 order[0] = 0;
4659 else
4661 if (base_reg != (int) REGNO (reg))
4662 /* Not addressed from the same base register. */
4663 return 0;
4665 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4666 ? REGNO (operands[i])
4667 : REGNO (SUBREG_REG (operands[i])));
4668 if (unsorted_regs[i] < unsorted_regs[order[0]])
4669 order[0] = i;
4672 /* If it isn't an integer register, or if it overwrites the
4673 base register but isn't the last insn in the list, then
4674 we can't do this. */
4675 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4676 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4677 return 0;
4679 unsorted_offsets[i] = INTVAL (offset);
4681 else
4682 /* Not a suitable memory address. */
4683 return 0;
4686 /* All the useful information has now been extracted from the
4687 operands into unsorted_regs and unsorted_offsets; additionally,
4688 order[0] has been set to the lowest numbered register in the
4689 list. Sort the registers into order, and check that the memory
4690 offsets are ascending and adjacent. */
4692 for (i = 1; i < nops; i++)
4694 int j;
4696 order[i] = order[i - 1];
4697 for (j = 0; j < nops; j++)
4698 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4699 && (order[i] == order[i - 1]
4700 || unsorted_regs[j] < unsorted_regs[order[i]]))
4701 order[i] = j;
4703 /* Have we found a suitable register? if not, one must be used more
4704 than once. */
4705 if (order[i] == order[i - 1])
4706 return 0;
4708 /* Is the memory address adjacent and ascending? */
4709 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4710 return 0;
4713 if (base)
4715 *base = base_reg;
4717 for (i = 0; i < nops; i++)
4718 regs[i] = unsorted_regs[order[i]];
4720 *load_offset = unsorted_offsets[order[0]];
4723 if (unsorted_offsets[order[0]] == 0)
4724 return 1; /* ldmia */
4726 if (unsorted_offsets[order[0]] == 4)
4727 return 2; /* ldmib */
4729 if (unsorted_offsets[order[nops - 1]] == 0)
4730 return 3; /* ldmda */
4732 if (unsorted_offsets[order[nops - 1]] == -4)
4733 return 4; /* ldmdb */
4735 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4736 if the offset isn't small enough. The reason 2 ldrs are faster
4737 is because these ARMs are able to do more than one cache access
4738 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4739 whilst the ARM8 has a double bandwidth cache. This means that
4740 these cores can do both an instruction fetch and a data fetch in
4741 a single cycle, so the trick of calculating the address into a
4742 scratch register (one of the result regs) and then doing a load
4743 multiple actually becomes slower (and no smaller in code size).
4744 That is the transformation
4746 ldr rd1, [rbase + offset]
4747 ldr rd2, [rbase + offset + 4]
4751 add rd1, rbase, offset
4752 ldmia rd1, {rd1, rd2}
4754 produces worse code -- '3 cycles + any stalls on rd2' instead of
4755 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4756 access per cycle, the first sequence could never complete in less
4757 than 6 cycles, whereas the ldm sequence would only take 5 and
4758 would make better use of sequential accesses if not hitting the
4759 cache.
4761 We cheat here and test 'arm_ld_sched' which we currently know to
4762 only be true for the ARM8, ARM9 and StrongARM. If this ever
4763 changes, then the test below needs to be reworked. */
4764 if (nops == 2 && arm_ld_sched)
4765 return 0;
4767 /* Can't do it without setting up the offset, only do this if it takes
4768 no more than one insn. */
4769 return (const_ok_for_arm (unsorted_offsets[order[0]])
4770 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4773 const char *
4774 emit_ldm_seq (rtx *operands, int nops)
4776 int regs[4];
4777 int base_reg;
4778 HOST_WIDE_INT offset;
4779 char buf[100];
4780 int i;
4782 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4784 case 1:
4785 strcpy (buf, "ldm%?ia\t");
4786 break;
4788 case 2:
4789 strcpy (buf, "ldm%?ib\t");
4790 break;
4792 case 3:
4793 strcpy (buf, "ldm%?da\t");
4794 break;
4796 case 4:
4797 strcpy (buf, "ldm%?db\t");
4798 break;
4800 case 5:
4801 if (offset >= 0)
4802 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4803 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4804 (long) offset);
4805 else
4806 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4807 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4808 (long) -offset);
4809 output_asm_insn (buf, operands);
4810 base_reg = regs[0];
4811 strcpy (buf, "ldm%?ia\t");
4812 break;
4814 default:
4815 abort ();
4818 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4819 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4821 for (i = 1; i < nops; i++)
4822 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4823 reg_names[regs[i]]);
4825 strcat (buf, "}\t%@ phole ldm");
4827 output_asm_insn (buf, operands);
4828 return "";
4832 store_multiple_sequence (rtx *operands, int nops, int *regs, int *base,
4833 HOST_WIDE_INT * load_offset)
4835 int unsorted_regs[4];
4836 HOST_WIDE_INT unsorted_offsets[4];
4837 int order[4];
4838 int base_reg = -1;
4839 int i;
4841 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4842 extended if required. */
4843 if (nops < 2 || nops > 4)
4844 abort ();
4846 /* Loop over the operands and check that the memory references are
4847 suitable (ie immediate offsets from the same base register). At
4848 the same time, extract the target register, and the memory
4849 offsets. */
4850 for (i = 0; i < nops; i++)
4852 rtx reg;
4853 rtx offset;
4855 /* Convert a subreg of a mem into the mem itself. */
4856 if (GET_CODE (operands[nops + i]) == SUBREG)
4857 operands[nops + i] = alter_subreg (operands + (nops + i));
4859 if (GET_CODE (operands[nops + i]) != MEM)
4860 abort ();
4862 /* Don't reorder volatile memory references; it doesn't seem worth
4863 looking for the case where the order is ok anyway. */
4864 if (MEM_VOLATILE_P (operands[nops + i]))
4865 return 0;
4867 offset = const0_rtx;
4869 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4870 || (GET_CODE (reg) == SUBREG
4871 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4872 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4873 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4874 == REG)
4875 || (GET_CODE (reg) == SUBREG
4876 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4877 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4878 == CONST_INT)))
4880 if (i == 0)
4882 base_reg = REGNO (reg);
4883 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4884 ? REGNO (operands[i])
4885 : REGNO (SUBREG_REG (operands[i])));
4886 order[0] = 0;
4888 else
4890 if (base_reg != (int) REGNO (reg))
4891 /* Not addressed from the same base register. */
4892 return 0;
4894 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4895 ? REGNO (operands[i])
4896 : REGNO (SUBREG_REG (operands[i])));
4897 if (unsorted_regs[i] < unsorted_regs[order[0]])
4898 order[0] = i;
4901 /* If it isn't an integer register, then we can't do this. */
4902 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4903 return 0;
4905 unsorted_offsets[i] = INTVAL (offset);
4907 else
4908 /* Not a suitable memory address. */
4909 return 0;
4912 /* All the useful information has now been extracted from the
4913 operands into unsorted_regs and unsorted_offsets; additionally,
4914 order[0] has been set to the lowest numbered register in the
4915 list. Sort the registers into order, and check that the memory
4916 offsets are ascending and adjacent. */
4918 for (i = 1; i < nops; i++)
4920 int j;
4922 order[i] = order[i - 1];
4923 for (j = 0; j < nops; j++)
4924 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4925 && (order[i] == order[i - 1]
4926 || unsorted_regs[j] < unsorted_regs[order[i]]))
4927 order[i] = j;
4929 /* Have we found a suitable register? if not, one must be used more
4930 than once. */
4931 if (order[i] == order[i - 1])
4932 return 0;
4934 /* Is the memory address adjacent and ascending? */
4935 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4936 return 0;
4939 if (base)
4941 *base = base_reg;
4943 for (i = 0; i < nops; i++)
4944 regs[i] = unsorted_regs[order[i]];
4946 *load_offset = unsorted_offsets[order[0]];
4949 if (unsorted_offsets[order[0]] == 0)
4950 return 1; /* stmia */
4952 if (unsorted_offsets[order[0]] == 4)
4953 return 2; /* stmib */
4955 if (unsorted_offsets[order[nops - 1]] == 0)
4956 return 3; /* stmda */
4958 if (unsorted_offsets[order[nops - 1]] == -4)
4959 return 4; /* stmdb */
4961 return 0;
4964 const char *
4965 emit_stm_seq (rtx *operands, int nops)
4967 int regs[4];
4968 int base_reg;
4969 HOST_WIDE_INT offset;
4970 char buf[100];
4971 int i;
4973 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4975 case 1:
4976 strcpy (buf, "stm%?ia\t");
4977 break;
4979 case 2:
4980 strcpy (buf, "stm%?ib\t");
4981 break;
4983 case 3:
4984 strcpy (buf, "stm%?da\t");
4985 break;
4987 case 4:
4988 strcpy (buf, "stm%?db\t");
4989 break;
4991 default:
4992 abort ();
4995 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4996 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4998 for (i = 1; i < nops; i++)
4999 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
5000 reg_names[regs[i]]);
5002 strcat (buf, "}\t%@ phole stm");
5004 output_asm_insn (buf, operands);
5005 return "";
5009 multi_register_push (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
5011 if (GET_CODE (op) != PARALLEL
5012 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
5013 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
5014 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
5015 return 0;
5017 return 1;
5020 /* Routines for use in generating RTL. */
5023 arm_gen_load_multiple (int base_regno, int count, rtx from, int up,
5024 int write_back, int unchanging_p, int in_struct_p,
5025 int scalar_p)
5027 int i = 0, j;
5028 rtx result;
5029 int sign = up ? 1 : -1;
5030 rtx mem;
5032 /* XScale has load-store double instructions, but they have stricter
5033 alignment requirements than load-store multiple, so we can not
5034 use them.
5036 For XScale ldm requires 2 + NREGS cycles to complete and blocks
5037 the pipeline until completion.
5039 NREGS CYCLES
5045 An ldr instruction takes 1-3 cycles, but does not block the
5046 pipeline.
5048 NREGS CYCLES
5049 1 1-3
5050 2 2-6
5051 3 3-9
5052 4 4-12
5054 Best case ldr will always win. However, the more ldr instructions
5055 we issue, the less likely we are to be able to schedule them well.
5056 Using ldr instructions also increases code size.
5058 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
5059 for counts of 3 or 4 regs. */
5060 if (arm_tune_xscale && count <= 2 && ! optimize_size)
5062 rtx seq;
5064 start_sequence ();
5066 for (i = 0; i < count; i++)
5068 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
5069 RTX_UNCHANGING_P (mem) = unchanging_p;
5070 MEM_IN_STRUCT_P (mem) = in_struct_p;
5071 MEM_SCALAR_P (mem) = scalar_p;
5072 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
5075 if (write_back)
5076 emit_move_insn (from, plus_constant (from, count * 4 * sign));
5078 seq = get_insns ();
5079 end_sequence ();
5081 return seq;
5084 result = gen_rtx_PARALLEL (VOIDmode,
5085 rtvec_alloc (count + (write_back ? 1 : 0)));
5086 if (write_back)
5088 XVECEXP (result, 0, 0)
5089 = gen_rtx_SET (GET_MODE (from), from,
5090 plus_constant (from, count * 4 * sign));
5091 i = 1;
5092 count++;
5095 for (j = 0; i < count; i++, j++)
5097 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
5098 RTX_UNCHANGING_P (mem) = unchanging_p;
5099 MEM_IN_STRUCT_P (mem) = in_struct_p;
5100 MEM_SCALAR_P (mem) = scalar_p;
5101 XVECEXP (result, 0, i)
5102 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
5105 return result;
5109 arm_gen_store_multiple (int base_regno, int count, rtx to, int up,
5110 int write_back, int unchanging_p, int in_struct_p,
5111 int scalar_p)
5113 int i = 0, j;
5114 rtx result;
5115 int sign = up ? 1 : -1;
5116 rtx mem;
5118 /* See arm_gen_load_multiple for discussion of
5119 the pros/cons of ldm/stm usage for XScale. */
5120 if (arm_tune_xscale && count <= 2 && ! optimize_size)
5122 rtx seq;
5124 start_sequence ();
5126 for (i = 0; i < count; i++)
5128 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
5129 RTX_UNCHANGING_P (mem) = unchanging_p;
5130 MEM_IN_STRUCT_P (mem) = in_struct_p;
5131 MEM_SCALAR_P (mem) = scalar_p;
5132 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
5135 if (write_back)
5136 emit_move_insn (to, plus_constant (to, count * 4 * sign));
5138 seq = get_insns ();
5139 end_sequence ();
5141 return seq;
5144 result = gen_rtx_PARALLEL (VOIDmode,
5145 rtvec_alloc (count + (write_back ? 1 : 0)));
5146 if (write_back)
5148 XVECEXP (result, 0, 0)
5149 = gen_rtx_SET (GET_MODE (to), to,
5150 plus_constant (to, count * 4 * sign));
5151 i = 1;
5152 count++;
5155 for (j = 0; i < count; i++, j++)
5157 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
5158 RTX_UNCHANGING_P (mem) = unchanging_p;
5159 MEM_IN_STRUCT_P (mem) = in_struct_p;
5160 MEM_SCALAR_P (mem) = scalar_p;
5162 XVECEXP (result, 0, i)
5163 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
5166 return result;
5170 arm_gen_movstrqi (rtx *operands)
5172 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
5173 int i;
5174 rtx src, dst;
5175 rtx st_src, st_dst, fin_src, fin_dst;
5176 rtx part_bytes_reg = NULL;
5177 rtx mem;
5178 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
5179 int dst_scalar_p, src_scalar_p;
5181 if (GET_CODE (operands[2]) != CONST_INT
5182 || GET_CODE (operands[3]) != CONST_INT
5183 || INTVAL (operands[2]) > 64
5184 || INTVAL (operands[3]) & 3)
5185 return 0;
5187 st_dst = XEXP (operands[0], 0);
5188 st_src = XEXP (operands[1], 0);
5190 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
5191 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
5192 dst_scalar_p = MEM_SCALAR_P (operands[0]);
5193 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
5194 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
5195 src_scalar_p = MEM_SCALAR_P (operands[1]);
5197 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
5198 fin_src = src = copy_to_mode_reg (SImode, st_src);
5200 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
5201 out_words_to_go = INTVAL (operands[2]) / 4;
5202 last_bytes = INTVAL (operands[2]) & 3;
5204 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
5205 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
5207 for (i = 0; in_words_to_go >= 2; i+=4)
5209 if (in_words_to_go > 4)
5210 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
5211 src_unchanging_p,
5212 src_in_struct_p,
5213 src_scalar_p));
5214 else
5215 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
5216 FALSE, src_unchanging_p,
5217 src_in_struct_p, src_scalar_p));
5219 if (out_words_to_go)
5221 if (out_words_to_go > 4)
5222 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
5223 dst_unchanging_p,
5224 dst_in_struct_p,
5225 dst_scalar_p));
5226 else if (out_words_to_go != 1)
5227 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
5228 dst, TRUE,
5229 (last_bytes == 0
5230 ? FALSE : TRUE),
5231 dst_unchanging_p,
5232 dst_in_struct_p,
5233 dst_scalar_p));
5234 else
5236 mem = gen_rtx_MEM (SImode, dst);
5237 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5238 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5239 MEM_SCALAR_P (mem) = dst_scalar_p;
5240 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
5241 if (last_bytes != 0)
5242 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
5246 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
5247 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
5250 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
5251 if (out_words_to_go)
5253 rtx sreg;
5255 mem = gen_rtx_MEM (SImode, src);
5256 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5257 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5258 MEM_SCALAR_P (mem) = src_scalar_p;
5259 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
5260 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
5262 mem = gen_rtx_MEM (SImode, dst);
5263 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5264 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5265 MEM_SCALAR_P (mem) = dst_scalar_p;
5266 emit_move_insn (mem, sreg);
5267 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
5268 in_words_to_go--;
5270 if (in_words_to_go) /* Sanity check */
5271 abort ();
5274 if (in_words_to_go)
5276 if (in_words_to_go < 0)
5277 abort ();
5279 mem = gen_rtx_MEM (SImode, src);
5280 RTX_UNCHANGING_P (mem) = src_unchanging_p;
5281 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
5282 MEM_SCALAR_P (mem) = src_scalar_p;
5283 part_bytes_reg = copy_to_mode_reg (SImode, mem);
5286 if (last_bytes && part_bytes_reg == NULL)
5287 abort ();
5289 if (BYTES_BIG_ENDIAN && last_bytes)
5291 rtx tmp = gen_reg_rtx (SImode);
5293 /* The bytes we want are in the top end of the word. */
5294 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
5295 GEN_INT (8 * (4 - last_bytes))));
5296 part_bytes_reg = tmp;
5298 while (last_bytes)
5300 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
5301 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5302 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5303 MEM_SCALAR_P (mem) = dst_scalar_p;
5304 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5306 if (--last_bytes)
5308 tmp = gen_reg_rtx (SImode);
5309 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5310 part_bytes_reg = tmp;
5315 else
5317 if (last_bytes > 1)
5319 mem = gen_rtx_MEM (HImode, dst);
5320 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5321 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5322 MEM_SCALAR_P (mem) = dst_scalar_p;
5323 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5324 last_bytes -= 2;
5325 if (last_bytes)
5327 rtx tmp = gen_reg_rtx (SImode);
5329 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5330 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5331 part_bytes_reg = tmp;
5335 if (last_bytes)
5337 mem = gen_rtx_MEM (QImode, dst);
5338 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5339 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5340 MEM_SCALAR_P (mem) = dst_scalar_p;
5341 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5345 return 1;
5348 /* Generate a memory reference for a half word, such that it will be loaded
5349 into the top 16 bits of the word. We can assume that the address is
5350 known to be alignable and of the form reg, or plus (reg, const). */
5353 arm_gen_rotated_half_load (rtx memref)
5355 HOST_WIDE_INT offset = 0;
5356 rtx base = XEXP (memref, 0);
5358 if (GET_CODE (base) == PLUS)
5360 offset = INTVAL (XEXP (base, 1));
5361 base = XEXP (base, 0);
5364 /* If we aren't allowed to generate unaligned addresses, then fail. */
5365 if (TARGET_MMU_TRAPS
5366 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5367 return NULL;
5369 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5371 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5372 return base;
5374 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5377 /* Select a dominance comparison mode if possible for a test of the general
5378 form (OP (COND_OR (X) (Y)) (const_int 0)). We support three forms.
5379 COND_OR == DOM_CC_X_AND_Y => (X && Y)
5380 COND_OR == DOM_CC_NX_OR_Y => ((! X) || Y)
5381 COND_OR == DOM_CC_X_OR_Y => (X || Y)
5382 In all cases OP will be either EQ or NE, but we don't need to know which
5383 here. If we are unable to support a dominance comparison we return
5384 CC mode. This will then fail to match for the RTL expressions that
5385 generate this call. */
5386 enum machine_mode
5387 arm_select_dominance_cc_mode (rtx x, rtx y, HOST_WIDE_INT cond_or)
5389 enum rtx_code cond1, cond2;
5390 int swapped = 0;
5392 /* Currently we will probably get the wrong result if the individual
5393 comparisons are not simple. This also ensures that it is safe to
5394 reverse a comparison if necessary. */
5395 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5396 != CCmode)
5397 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5398 != CCmode))
5399 return CCmode;
5401 /* The if_then_else variant of this tests the second condition if the
5402 first passes, but is true if the first fails. Reverse the first
5403 condition to get a true "inclusive-or" expression. */
5404 if (cond_or == DOM_CC_NX_OR_Y)
5405 cond1 = reverse_condition (cond1);
5407 /* If the comparisons are not equal, and one doesn't dominate the other,
5408 then we can't do this. */
5409 if (cond1 != cond2
5410 && !comparison_dominates_p (cond1, cond2)
5411 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5412 return CCmode;
5414 if (swapped)
5416 enum rtx_code temp = cond1;
5417 cond1 = cond2;
5418 cond2 = temp;
5421 switch (cond1)
5423 case EQ:
5424 if (cond2 == EQ || cond_or == DOM_CC_X_AND_Y)
5425 return CC_DEQmode;
5427 switch (cond2)
5429 case LE: return CC_DLEmode;
5430 case LEU: return CC_DLEUmode;
5431 case GE: return CC_DGEmode;
5432 case GEU: return CC_DGEUmode;
5433 default: break;
5436 break;
5438 case LT:
5439 if (cond2 == LT || cond_or == DOM_CC_X_AND_Y)
5440 return CC_DLTmode;
5441 if (cond2 == LE)
5442 return CC_DLEmode;
5443 if (cond2 == NE)
5444 return CC_DNEmode;
5445 break;
5447 case GT:
5448 if (cond2 == GT || cond_or == DOM_CC_X_AND_Y)
5449 return CC_DGTmode;
5450 if (cond2 == GE)
5451 return CC_DGEmode;
5452 if (cond2 == NE)
5453 return CC_DNEmode;
5454 break;
5456 case LTU:
5457 if (cond2 == LTU || cond_or == DOM_CC_X_AND_Y)
5458 return CC_DLTUmode;
5459 if (cond2 == LEU)
5460 return CC_DLEUmode;
5461 if (cond2 == NE)
5462 return CC_DNEmode;
5463 break;
5465 case GTU:
5466 if (cond2 == GTU || cond_or == DOM_CC_X_AND_Y)
5467 return CC_DGTUmode;
5468 if (cond2 == GEU)
5469 return CC_DGEUmode;
5470 if (cond2 == NE)
5471 return CC_DNEmode;
5472 break;
5474 /* The remaining cases only occur when both comparisons are the
5475 same. */
5476 case NE:
5477 return CC_DNEmode;
5479 case LE:
5480 return CC_DLEmode;
5482 case GE:
5483 return CC_DGEmode;
5485 case LEU:
5486 return CC_DLEUmode;
5488 case GEU:
5489 return CC_DGEUmode;
5491 default:
5492 break;
5495 abort ();
5498 enum machine_mode
5499 arm_select_cc_mode (enum rtx_code op, rtx x, rtx y)
5501 /* All floating point compares return CCFP if it is an equality
5502 comparison, and CCFPE otherwise. */
5503 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5505 switch (op)
5507 case EQ:
5508 case NE:
5509 case UNORDERED:
5510 case ORDERED:
5511 case UNLT:
5512 case UNLE:
5513 case UNGT:
5514 case UNGE:
5515 case UNEQ:
5516 case LTGT:
5517 return CCFPmode;
5519 case LT:
5520 case LE:
5521 case GT:
5522 case GE:
5523 if (TARGET_CIRRUS)
5524 return CCFPmode;
5525 return CCFPEmode;
5527 default:
5528 abort ();
5532 /* A compare with a shifted operand. Because of canonicalization, the
5533 comparison will have to be swapped when we emit the assembler. */
5534 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5535 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5536 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5537 || GET_CODE (x) == ROTATERT))
5538 return CC_SWPmode;
5540 /* This is a special case that is used by combine to allow a
5541 comparison of a shifted byte load to be split into a zero-extend
5542 followed by a comparison of the shifted integer (only valid for
5543 equalities and unsigned inequalities). */
5544 if (GET_MODE (x) == SImode
5545 && GET_CODE (x) == ASHIFT
5546 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5547 && GET_CODE (XEXP (x, 0)) == SUBREG
5548 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5549 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5550 && (op == EQ || op == NE
5551 || op == GEU || op == GTU || op == LTU || op == LEU)
5552 && GET_CODE (y) == CONST_INT)
5553 return CC_Zmode;
5555 /* A construct for a conditional compare, if the false arm contains
5556 0, then both conditions must be true, otherwise either condition
5557 must be true. Not all conditions are possible, so CCmode is
5558 returned if it can't be done. */
5559 if (GET_CODE (x) == IF_THEN_ELSE
5560 && (XEXP (x, 2) == const0_rtx
5561 || XEXP (x, 2) == const1_rtx)
5562 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5563 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5564 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5565 INTVAL (XEXP (x, 2)));
5567 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5568 if (GET_CODE (x) == AND
5569 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5570 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5571 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5572 DOM_CC_X_AND_Y);
5574 if (GET_CODE (x) == IOR
5575 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5576 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5577 return arm_select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5578 DOM_CC_X_OR_Y);
5580 /* An operation that sets the condition codes as a side-effect, the
5581 V flag is not set correctly, so we can only use comparisons where
5582 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5583 instead. */
5584 if (GET_MODE (x) == SImode
5585 && y == const0_rtx
5586 && (op == EQ || op == NE || op == LT || op == GE)
5587 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5588 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5589 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5590 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5591 || GET_CODE (x) == LSHIFTRT
5592 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5593 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5594 return CC_NOOVmode;
5596 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5597 return CC_Zmode;
5599 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5600 && GET_CODE (x) == PLUS
5601 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5602 return CC_Cmode;
5604 return CCmode;
5607 /* X and Y are two things to compare using CODE. Emit the compare insn and
5608 return the rtx for register 0 in the proper mode. FP means this is a
5609 floating point compare: I don't think that it is needed on the arm. */
5611 arm_gen_compare_reg (enum rtx_code code, rtx x, rtx y)
5613 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5614 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5616 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5617 gen_rtx_COMPARE (mode, x, y)));
5619 return cc_reg;
5622 /* Generate a sequence of insns that will generate the correct return
5623 address mask depending on the physical architecture that the program
5624 is running on. */
5626 arm_gen_return_addr_mask (void)
5628 rtx reg = gen_reg_rtx (Pmode);
5630 emit_insn (gen_return_addr_mask (reg));
5631 return reg;
5634 void
5635 arm_reload_in_hi (rtx *operands)
5637 rtx ref = operands[1];
5638 rtx base, scratch;
5639 HOST_WIDE_INT offset = 0;
5641 if (GET_CODE (ref) == SUBREG)
5643 offset = SUBREG_BYTE (ref);
5644 ref = SUBREG_REG (ref);
5647 if (GET_CODE (ref) == REG)
5649 /* We have a pseudo which has been spilt onto the stack; there
5650 are two cases here: the first where there is a simple
5651 stack-slot replacement and a second where the stack-slot is
5652 out of range, or is used as a subreg. */
5653 if (reg_equiv_mem[REGNO (ref)])
5655 ref = reg_equiv_mem[REGNO (ref)];
5656 base = find_replacement (&XEXP (ref, 0));
5658 else
5659 /* The slot is out of range, or was dressed up in a SUBREG. */
5660 base = reg_equiv_address[REGNO (ref)];
5662 else
5663 base = find_replacement (&XEXP (ref, 0));
5665 /* Handle the case where the address is too complex to be offset by 1. */
5666 if (GET_CODE (base) == MINUS
5667 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5669 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5671 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5672 base = base_plus;
5674 else if (GET_CODE (base) == PLUS)
5676 /* The addend must be CONST_INT, or we would have dealt with it above. */
5677 HOST_WIDE_INT hi, lo;
5679 offset += INTVAL (XEXP (base, 1));
5680 base = XEXP (base, 0);
5682 /* Rework the address into a legal sequence of insns. */
5683 /* Valid range for lo is -4095 -> 4095 */
5684 lo = (offset >= 0
5685 ? (offset & 0xfff)
5686 : -((-offset) & 0xfff));
5688 /* Corner case, if lo is the max offset then we would be out of range
5689 once we have added the additional 1 below, so bump the msb into the
5690 pre-loading insn(s). */
5691 if (lo == 4095)
5692 lo &= 0x7ff;
5694 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5695 ^ (HOST_WIDE_INT) 0x80000000)
5696 - (HOST_WIDE_INT) 0x80000000);
5698 if (hi + lo != offset)
5699 abort ();
5701 if (hi != 0)
5703 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5705 /* Get the base address; addsi3 knows how to handle constants
5706 that require more than one insn. */
5707 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5708 base = base_plus;
5709 offset = lo;
5713 /* Operands[2] may overlap operands[0] (though it won't overlap
5714 operands[1]), that's why we asked for a DImode reg -- so we can
5715 use the bit that does not overlap. */
5716 if (REGNO (operands[2]) == REGNO (operands[0]))
5717 scratch = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5718 else
5719 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5721 emit_insn (gen_zero_extendqisi2 (scratch,
5722 gen_rtx_MEM (QImode,
5723 plus_constant (base,
5724 offset))));
5725 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5726 gen_rtx_MEM (QImode,
5727 plus_constant (base,
5728 offset + 1))));
5729 if (!BYTES_BIG_ENDIAN)
5730 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5731 gen_rtx_IOR (SImode,
5732 gen_rtx_ASHIFT
5733 (SImode,
5734 gen_rtx_SUBREG (SImode, operands[0], 0),
5735 GEN_INT (8)),
5736 scratch)));
5737 else
5738 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5739 gen_rtx_IOR (SImode,
5740 gen_rtx_ASHIFT (SImode, scratch,
5741 GEN_INT (8)),
5742 gen_rtx_SUBREG (SImode, operands[0],
5743 0))));
5746 /* Handle storing a half-word to memory during reload by synthesizing as two
5747 byte stores. Take care not to clobber the input values until after we
5748 have moved them somewhere safe. This code assumes that if the DImode
5749 scratch in operands[2] overlaps either the input value or output address
5750 in some way, then that value must die in this insn (we absolutely need
5751 two scratch registers for some corner cases). */
5752 void
5753 arm_reload_out_hi (rtx *operands)
5755 rtx ref = operands[0];
5756 rtx outval = operands[1];
5757 rtx base, scratch;
5758 HOST_WIDE_INT offset = 0;
5760 if (GET_CODE (ref) == SUBREG)
5762 offset = SUBREG_BYTE (ref);
5763 ref = SUBREG_REG (ref);
5766 if (GET_CODE (ref) == REG)
5768 /* We have a pseudo which has been spilt onto the stack; there
5769 are two cases here: the first where there is a simple
5770 stack-slot replacement and a second where the stack-slot is
5771 out of range, or is used as a subreg. */
5772 if (reg_equiv_mem[REGNO (ref)])
5774 ref = reg_equiv_mem[REGNO (ref)];
5775 base = find_replacement (&XEXP (ref, 0));
5777 else
5778 /* The slot is out of range, or was dressed up in a SUBREG. */
5779 base = reg_equiv_address[REGNO (ref)];
5781 else
5782 base = find_replacement (&XEXP (ref, 0));
5784 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5786 /* Handle the case where the address is too complex to be offset by 1. */
5787 if (GET_CODE (base) == MINUS
5788 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5790 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5792 /* Be careful not to destroy OUTVAL. */
5793 if (reg_overlap_mentioned_p (base_plus, outval))
5795 /* Updating base_plus might destroy outval, see if we can
5796 swap the scratch and base_plus. */
5797 if (!reg_overlap_mentioned_p (scratch, outval))
5799 rtx tmp = scratch;
5800 scratch = base_plus;
5801 base_plus = tmp;
5803 else
5805 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5807 /* Be conservative and copy OUTVAL into the scratch now,
5808 this should only be necessary if outval is a subreg
5809 of something larger than a word. */
5810 /* XXX Might this clobber base? I can't see how it can,
5811 since scratch is known to overlap with OUTVAL, and
5812 must be wider than a word. */
5813 emit_insn (gen_movhi (scratch_hi, outval));
5814 outval = scratch_hi;
5818 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5819 base = base_plus;
5821 else if (GET_CODE (base) == PLUS)
5823 /* The addend must be CONST_INT, or we would have dealt with it above. */
5824 HOST_WIDE_INT hi, lo;
5826 offset += INTVAL (XEXP (base, 1));
5827 base = XEXP (base, 0);
5829 /* Rework the address into a legal sequence of insns. */
5830 /* Valid range for lo is -4095 -> 4095 */
5831 lo = (offset >= 0
5832 ? (offset & 0xfff)
5833 : -((-offset) & 0xfff));
5835 /* Corner case, if lo is the max offset then we would be out of range
5836 once we have added the additional 1 below, so bump the msb into the
5837 pre-loading insn(s). */
5838 if (lo == 4095)
5839 lo &= 0x7ff;
5841 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5842 ^ (HOST_WIDE_INT) 0x80000000)
5843 - (HOST_WIDE_INT) 0x80000000);
5845 if (hi + lo != offset)
5846 abort ();
5848 if (hi != 0)
5850 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5852 /* Be careful not to destroy OUTVAL. */
5853 if (reg_overlap_mentioned_p (base_plus, outval))
5855 /* Updating base_plus might destroy outval, see if we
5856 can swap the scratch and base_plus. */
5857 if (!reg_overlap_mentioned_p (scratch, outval))
5859 rtx tmp = scratch;
5860 scratch = base_plus;
5861 base_plus = tmp;
5863 else
5865 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5867 /* Be conservative and copy outval into scratch now,
5868 this should only be necessary if outval is a
5869 subreg of something larger than a word. */
5870 /* XXX Might this clobber base? I can't see how it
5871 can, since scratch is known to overlap with
5872 outval. */
5873 emit_insn (gen_movhi (scratch_hi, outval));
5874 outval = scratch_hi;
5878 /* Get the base address; addsi3 knows how to handle constants
5879 that require more than one insn. */
5880 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5881 base = base_plus;
5882 offset = lo;
5886 if (BYTES_BIG_ENDIAN)
5888 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5889 plus_constant (base, offset + 1)),
5890 gen_lowpart (QImode, outval)));
5891 emit_insn (gen_lshrsi3 (scratch,
5892 gen_rtx_SUBREG (SImode, outval, 0),
5893 GEN_INT (8)));
5894 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5895 gen_lowpart (QImode, scratch)));
5897 else
5899 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5900 gen_lowpart (QImode, outval)));
5901 emit_insn (gen_lshrsi3 (scratch,
5902 gen_rtx_SUBREG (SImode, outval, 0),
5903 GEN_INT (8)));
5904 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5905 plus_constant (base, offset + 1)),
5906 gen_lowpart (QImode, scratch)));
5910 /* Print a symbolic form of X to the debug file, F. */
5911 static void
5912 arm_print_value (FILE *f, rtx x)
5914 switch (GET_CODE (x))
5916 case CONST_INT:
5917 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5918 return;
5920 case CONST_DOUBLE:
5921 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5922 return;
5924 case CONST_VECTOR:
5926 int i;
5928 fprintf (f, "<");
5929 for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
5931 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (CONST_VECTOR_ELT (x, i)));
5932 if (i < (CONST_VECTOR_NUNITS (x) - 1))
5933 fputc (',', f);
5935 fprintf (f, ">");
5937 return;
5939 case CONST_STRING:
5940 fprintf (f, "\"%s\"", XSTR (x, 0));
5941 return;
5943 case SYMBOL_REF:
5944 fprintf (f, "`%s'", XSTR (x, 0));
5945 return;
5947 case LABEL_REF:
5948 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5949 return;
5951 case CONST:
5952 arm_print_value (f, XEXP (x, 0));
5953 return;
5955 case PLUS:
5956 arm_print_value (f, XEXP (x, 0));
5957 fprintf (f, "+");
5958 arm_print_value (f, XEXP (x, 1));
5959 return;
5961 case PC:
5962 fprintf (f, "pc");
5963 return;
5965 default:
5966 fprintf (f, "????");
5967 return;
5971 /* Routines for manipulation of the constant pool. */
5973 /* Arm instructions cannot load a large constant directly into a
5974 register; they have to come from a pc relative load. The constant
5975 must therefore be placed in the addressable range of the pc
5976 relative load. Depending on the precise pc relative load
5977 instruction the range is somewhere between 256 bytes and 4k. This
5978 means that we often have to dump a constant inside a function, and
5979 generate code to branch around it.
5981 It is important to minimize this, since the branches will slow
5982 things down and make the code larger.
5984 Normally we can hide the table after an existing unconditional
5985 branch so that there is no interruption of the flow, but in the
5986 worst case the code looks like this:
5988 ldr rn, L1
5990 b L2
5991 align
5992 L1: .long value
5996 ldr rn, L3
5998 b L4
5999 align
6000 L3: .long value
6004 We fix this by performing a scan after scheduling, which notices
6005 which instructions need to have their operands fetched from the
6006 constant table and builds the table.
6008 The algorithm starts by building a table of all the constants that
6009 need fixing up and all the natural barriers in the function (places
6010 where a constant table can be dropped without breaking the flow).
6011 For each fixup we note how far the pc-relative replacement will be
6012 able to reach and the offset of the instruction into the function.
6014 Having built the table we then group the fixes together to form
6015 tables that are as large as possible (subject to addressing
6016 constraints) and emit each table of constants after the last
6017 barrier that is within range of all the instructions in the group.
6018 If a group does not contain a barrier, then we forcibly create one
6019 by inserting a jump instruction into the flow. Once the table has
6020 been inserted, the insns are then modified to reference the
6021 relevant entry in the pool.
6023 Possible enhancements to the algorithm (not implemented) are:
6025 1) For some processors and object formats, there may be benefit in
6026 aligning the pools to the start of cache lines; this alignment
6027 would need to be taken into account when calculating addressability
6028 of a pool. */
6030 /* These typedefs are located at the start of this file, so that
6031 they can be used in the prototypes there. This comment is to
6032 remind readers of that fact so that the following structures
6033 can be understood more easily.
6035 typedef struct minipool_node Mnode;
6036 typedef struct minipool_fixup Mfix; */
6038 struct minipool_node
6040 /* Doubly linked chain of entries. */
6041 Mnode * next;
6042 Mnode * prev;
6043 /* The maximum offset into the code that this entry can be placed. While
6044 pushing fixes for forward references, all entries are sorted in order
6045 of increasing max_address. */
6046 HOST_WIDE_INT max_address;
6047 /* Similarly for an entry inserted for a backwards ref. */
6048 HOST_WIDE_INT min_address;
6049 /* The number of fixes referencing this entry. This can become zero
6050 if we "unpush" an entry. In this case we ignore the entry when we
6051 come to emit the code. */
6052 int refcount;
6053 /* The offset from the start of the minipool. */
6054 HOST_WIDE_INT offset;
6055 /* The value in table. */
6056 rtx value;
6057 /* The mode of value. */
6058 enum machine_mode mode;
6059 /* The size of the value. With iWMMXt enabled
6060 sizes > 4 also imply an alignment of 8-bytes. */
6061 int fix_size;
6064 struct minipool_fixup
6066 Mfix * next;
6067 rtx insn;
6068 HOST_WIDE_INT address;
6069 rtx * loc;
6070 enum machine_mode mode;
6071 int fix_size;
6072 rtx value;
6073 Mnode * minipool;
6074 HOST_WIDE_INT forwards;
6075 HOST_WIDE_INT backwards;
6078 /* Fixes less than a word need padding out to a word boundary. */
6079 #define MINIPOOL_FIX_SIZE(mode) \
6080 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
6082 static Mnode * minipool_vector_head;
6083 static Mnode * minipool_vector_tail;
6084 static rtx minipool_vector_label;
6086 /* The linked list of all minipool fixes required for this function. */
6087 Mfix * minipool_fix_head;
6088 Mfix * minipool_fix_tail;
6089 /* The fix entry for the current minipool, once it has been placed. */
6090 Mfix * minipool_barrier;
6092 /* Determines if INSN is the start of a jump table. Returns the end
6093 of the TABLE or NULL_RTX. */
6094 static rtx
6095 is_jump_table (rtx insn)
6097 rtx table;
6099 if (GET_CODE (insn) == JUMP_INSN
6100 && JUMP_LABEL (insn) != NULL
6101 && ((table = next_real_insn (JUMP_LABEL (insn)))
6102 == next_real_insn (insn))
6103 && table != NULL
6104 && GET_CODE (table) == JUMP_INSN
6105 && (GET_CODE (PATTERN (table)) == ADDR_VEC
6106 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
6107 return table;
6109 return NULL_RTX;
6112 #ifndef JUMP_TABLES_IN_TEXT_SECTION
6113 #define JUMP_TABLES_IN_TEXT_SECTION 0
6114 #endif
6116 static HOST_WIDE_INT
6117 get_jump_table_size (rtx insn)
6119 /* ADDR_VECs only take room if read-only data does into the text
6120 section. */
6121 if (JUMP_TABLES_IN_TEXT_SECTION
6122 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
6123 || 1
6124 #endif
6127 rtx body = PATTERN (insn);
6128 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
6130 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
6133 return 0;
6136 /* Move a minipool fix MP from its current location to before MAX_MP.
6137 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
6138 contrains may need updating. */
6139 static Mnode *
6140 move_minipool_fix_forward_ref (Mnode *mp, Mnode *max_mp,
6141 HOST_WIDE_INT max_address)
6143 /* This should never be true and the code below assumes these are
6144 different. */
6145 if (mp == max_mp)
6146 abort ();
6148 if (max_mp == NULL)
6150 if (max_address < mp->max_address)
6151 mp->max_address = max_address;
6153 else
6155 if (max_address > max_mp->max_address - mp->fix_size)
6156 mp->max_address = max_mp->max_address - mp->fix_size;
6157 else
6158 mp->max_address = max_address;
6160 /* Unlink MP from its current position. Since max_mp is non-null,
6161 mp->prev must be non-null. */
6162 mp->prev->next = mp->next;
6163 if (mp->next != NULL)
6164 mp->next->prev = mp->prev;
6165 else
6166 minipool_vector_tail = mp->prev;
6168 /* Re-insert it before MAX_MP. */
6169 mp->next = max_mp;
6170 mp->prev = max_mp->prev;
6171 max_mp->prev = mp;
6173 if (mp->prev != NULL)
6174 mp->prev->next = mp;
6175 else
6176 minipool_vector_head = mp;
6179 /* Save the new entry. */
6180 max_mp = mp;
6182 /* Scan over the preceding entries and adjust their addresses as
6183 required. */
6184 while (mp->prev != NULL
6185 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6187 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6188 mp = mp->prev;
6191 return max_mp;
6194 /* Add a constant to the minipool for a forward reference. Returns the
6195 node added or NULL if the constant will not fit in this pool. */
6196 static Mnode *
6197 add_minipool_forward_ref (Mfix *fix)
6199 /* If set, max_mp is the first pool_entry that has a lower
6200 constraint than the one we are trying to add. */
6201 Mnode * max_mp = NULL;
6202 HOST_WIDE_INT max_address = fix->address + fix->forwards;
6203 Mnode * mp;
6205 /* If this fix's address is greater than the address of the first
6206 entry, then we can't put the fix in this pool. We subtract the
6207 size of the current fix to ensure that if the table is fully
6208 packed we still have enough room to insert this value by suffling
6209 the other fixes forwards. */
6210 if (minipool_vector_head &&
6211 fix->address >= minipool_vector_head->max_address - fix->fix_size)
6212 return NULL;
6214 /* Scan the pool to see if a constant with the same value has
6215 already been added. While we are doing this, also note the
6216 location where we must insert the constant if it doesn't already
6217 exist. */
6218 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6220 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6221 && fix->mode == mp->mode
6222 && (GET_CODE (fix->value) != CODE_LABEL
6223 || (CODE_LABEL_NUMBER (fix->value)
6224 == CODE_LABEL_NUMBER (mp->value)))
6225 && rtx_equal_p (fix->value, mp->value))
6227 /* More than one fix references this entry. */
6228 mp->refcount++;
6229 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
6232 /* Note the insertion point if necessary. */
6233 if (max_mp == NULL
6234 && mp->max_address > max_address)
6235 max_mp = mp;
6237 /* If we are inserting an 8-bytes aligned quantity and
6238 we have not already found an insertion point, then
6239 make sure that all such 8-byte aligned quantities are
6240 placed at the start of the pool. */
6241 if (TARGET_REALLY_IWMMXT
6242 && max_mp == NULL
6243 && fix->fix_size == 8
6244 && mp->fix_size != 8)
6246 max_mp = mp;
6247 max_address = mp->max_address;
6251 /* The value is not currently in the minipool, so we need to create
6252 a new entry for it. If MAX_MP is NULL, the entry will be put on
6253 the end of the list since the placement is less constrained than
6254 any existing entry. Otherwise, we insert the new fix before
6255 MAX_MP and, if necessary, adjust the constraints on the other
6256 entries. */
6257 mp = xmalloc (sizeof (* mp));
6258 mp->fix_size = fix->fix_size;
6259 mp->mode = fix->mode;
6260 mp->value = fix->value;
6261 mp->refcount = 1;
6262 /* Not yet required for a backwards ref. */
6263 mp->min_address = -65536;
6265 if (max_mp == NULL)
6267 mp->max_address = max_address;
6268 mp->next = NULL;
6269 mp->prev = minipool_vector_tail;
6271 if (mp->prev == NULL)
6273 minipool_vector_head = mp;
6274 minipool_vector_label = gen_label_rtx ();
6276 else
6277 mp->prev->next = mp;
6279 minipool_vector_tail = mp;
6281 else
6283 if (max_address > max_mp->max_address - mp->fix_size)
6284 mp->max_address = max_mp->max_address - mp->fix_size;
6285 else
6286 mp->max_address = max_address;
6288 mp->next = max_mp;
6289 mp->prev = max_mp->prev;
6290 max_mp->prev = mp;
6291 if (mp->prev != NULL)
6292 mp->prev->next = mp;
6293 else
6294 minipool_vector_head = mp;
6297 /* Save the new entry. */
6298 max_mp = mp;
6300 /* Scan over the preceding entries and adjust their addresses as
6301 required. */
6302 while (mp->prev != NULL
6303 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
6305 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
6306 mp = mp->prev;
6309 return max_mp;
6312 static Mnode *
6313 move_minipool_fix_backward_ref (Mnode *mp, Mnode *min_mp,
6314 HOST_WIDE_INT min_address)
6316 HOST_WIDE_INT offset;
6318 /* This should never be true, and the code below assumes these are
6319 different. */
6320 if (mp == min_mp)
6321 abort ();
6323 if (min_mp == NULL)
6325 if (min_address > mp->min_address)
6326 mp->min_address = min_address;
6328 else
6330 /* We will adjust this below if it is too loose. */
6331 mp->min_address = min_address;
6333 /* Unlink MP from its current position. Since min_mp is non-null,
6334 mp->next must be non-null. */
6335 mp->next->prev = mp->prev;
6336 if (mp->prev != NULL)
6337 mp->prev->next = mp->next;
6338 else
6339 minipool_vector_head = mp->next;
6341 /* Reinsert it after MIN_MP. */
6342 mp->prev = min_mp;
6343 mp->next = min_mp->next;
6344 min_mp->next = mp;
6345 if (mp->next != NULL)
6346 mp->next->prev = mp;
6347 else
6348 minipool_vector_tail = mp;
6351 min_mp = mp;
6353 offset = 0;
6354 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6356 mp->offset = offset;
6357 if (mp->refcount > 0)
6358 offset += mp->fix_size;
6360 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6361 mp->next->min_address = mp->min_address + mp->fix_size;
6364 return min_mp;
6367 /* Add a constant to the minipool for a backward reference. Returns the
6368 node added or NULL if the constant will not fit in this pool.
6370 Note that the code for insertion for a backwards reference can be
6371 somewhat confusing because the calculated offsets for each fix do
6372 not take into account the size of the pool (which is still under
6373 construction. */
6374 static Mnode *
6375 add_minipool_backward_ref (Mfix *fix)
6377 /* If set, min_mp is the last pool_entry that has a lower constraint
6378 than the one we are trying to add. */
6379 Mnode *min_mp = NULL;
6380 /* This can be negative, since it is only a constraint. */
6381 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6382 Mnode *mp;
6384 /* If we can't reach the current pool from this insn, or if we can't
6385 insert this entry at the end of the pool without pushing other
6386 fixes out of range, then we don't try. This ensures that we
6387 can't fail later on. */
6388 if (min_address >= minipool_barrier->address
6389 || (minipool_vector_tail->min_address + fix->fix_size
6390 >= minipool_barrier->address))
6391 return NULL;
6393 /* Scan the pool to see if a constant with the same value has
6394 already been added. While we are doing this, also note the
6395 location where we must insert the constant if it doesn't already
6396 exist. */
6397 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6399 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6400 && fix->mode == mp->mode
6401 && (GET_CODE (fix->value) != CODE_LABEL
6402 || (CODE_LABEL_NUMBER (fix->value)
6403 == CODE_LABEL_NUMBER (mp->value)))
6404 && rtx_equal_p (fix->value, mp->value)
6405 /* Check that there is enough slack to move this entry to the
6406 end of the table (this is conservative). */
6407 && (mp->max_address
6408 > (minipool_barrier->address
6409 + minipool_vector_tail->offset
6410 + minipool_vector_tail->fix_size)))
6412 mp->refcount++;
6413 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6416 if (min_mp != NULL)
6417 mp->min_address += fix->fix_size;
6418 else
6420 /* Note the insertion point if necessary. */
6421 if (mp->min_address < min_address)
6423 /* For now, we do not allow the insertion of 8-byte alignment
6424 requiring nodes anywhere but at the start of the pool. */
6425 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8 && mp->fix_size != 8)
6426 return NULL;
6427 else
6428 min_mp = mp;
6430 else if (mp->max_address
6431 < minipool_barrier->address + mp->offset + fix->fix_size)
6433 /* Inserting before this entry would push the fix beyond
6434 its maximum address (which can happen if we have
6435 re-located a forwards fix); force the new fix to come
6436 after it. */
6437 min_mp = mp;
6438 min_address = mp->min_address + fix->fix_size;
6440 /* If we are inserting an 8-bytes aligned quantity and
6441 we have not already found an insertion point, then
6442 make sure that all such 8-byte aligned quantities are
6443 placed at the start of the pool. */
6444 else if (TARGET_REALLY_IWMMXT
6445 && min_mp == NULL
6446 && fix->fix_size == 8
6447 && mp->fix_size < 8)
6449 min_mp = mp;
6450 min_address = mp->min_address + fix->fix_size;
6455 /* We need to create a new entry. */
6456 mp = xmalloc (sizeof (* mp));
6457 mp->fix_size = fix->fix_size;
6458 mp->mode = fix->mode;
6459 mp->value = fix->value;
6460 mp->refcount = 1;
6461 mp->max_address = minipool_barrier->address + 65536;
6463 mp->min_address = min_address;
6465 if (min_mp == NULL)
6467 mp->prev = NULL;
6468 mp->next = minipool_vector_head;
6470 if (mp->next == NULL)
6472 minipool_vector_tail = mp;
6473 minipool_vector_label = gen_label_rtx ();
6475 else
6476 mp->next->prev = mp;
6478 minipool_vector_head = mp;
6480 else
6482 mp->next = min_mp->next;
6483 mp->prev = min_mp;
6484 min_mp->next = mp;
6486 if (mp->next != NULL)
6487 mp->next->prev = mp;
6488 else
6489 minipool_vector_tail = mp;
6492 /* Save the new entry. */
6493 min_mp = mp;
6495 if (mp->prev)
6496 mp = mp->prev;
6497 else
6498 mp->offset = 0;
6500 /* Scan over the following entries and adjust their offsets. */
6501 while (mp->next != NULL)
6503 if (mp->next->min_address < mp->min_address + mp->fix_size)
6504 mp->next->min_address = mp->min_address + mp->fix_size;
6506 if (mp->refcount)
6507 mp->next->offset = mp->offset + mp->fix_size;
6508 else
6509 mp->next->offset = mp->offset;
6511 mp = mp->next;
6514 return min_mp;
6517 static void
6518 assign_minipool_offsets (Mfix *barrier)
6520 HOST_WIDE_INT offset = 0;
6521 Mnode *mp;
6523 minipool_barrier = barrier;
6525 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6527 mp->offset = offset;
6529 if (mp->refcount > 0)
6530 offset += mp->fix_size;
6534 /* Output the literal table */
6535 static void
6536 dump_minipool (rtx scan)
6538 Mnode * mp;
6539 Mnode * nmp;
6540 int align64 = 0;
6542 if (TARGET_REALLY_IWMMXT)
6543 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6544 if (mp->refcount > 0 && mp->fix_size == 8)
6546 align64 = 1;
6547 break;
6550 if (rtl_dump_file)
6551 fprintf (rtl_dump_file,
6552 ";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
6553 INSN_UID (scan), (unsigned long) minipool_barrier->address, align64 ? 8 : 4);
6555 scan = emit_label_after (gen_label_rtx (), scan);
6556 scan = emit_insn_after (align64 ? gen_align_8 () : gen_align_4 (), scan);
6557 scan = emit_label_after (minipool_vector_label, scan);
6559 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6561 if (mp->refcount > 0)
6563 if (rtl_dump_file)
6565 fprintf (rtl_dump_file,
6566 ";; Offset %u, min %ld, max %ld ",
6567 (unsigned) mp->offset, (unsigned long) mp->min_address,
6568 (unsigned long) mp->max_address);
6569 arm_print_value (rtl_dump_file, mp->value);
6570 fputc ('\n', rtl_dump_file);
6573 switch (mp->fix_size)
6575 #ifdef HAVE_consttable_1
6576 case 1:
6577 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6578 break;
6580 #endif
6581 #ifdef HAVE_consttable_2
6582 case 2:
6583 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6584 break;
6586 #endif
6587 #ifdef HAVE_consttable_4
6588 case 4:
6589 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6590 break;
6592 #endif
6593 #ifdef HAVE_consttable_8
6594 case 8:
6595 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6596 break;
6598 #endif
6599 default:
6600 abort ();
6601 break;
6605 nmp = mp->next;
6606 free (mp);
6609 minipool_vector_head = minipool_vector_tail = NULL;
6610 scan = emit_insn_after (gen_consttable_end (), scan);
6611 scan = emit_barrier_after (scan);
6614 /* Return the cost of forcibly inserting a barrier after INSN. */
6615 static int
6616 arm_barrier_cost (rtx insn)
6618 /* Basing the location of the pool on the loop depth is preferable,
6619 but at the moment, the basic block information seems to be
6620 corrupt by this stage of the compilation. */
6621 int base_cost = 50;
6622 rtx next = next_nonnote_insn (insn);
6624 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6625 base_cost -= 20;
6627 switch (GET_CODE (insn))
6629 case CODE_LABEL:
6630 /* It will always be better to place the table before the label, rather
6631 than after it. */
6632 return 50;
6634 case INSN:
6635 case CALL_INSN:
6636 return base_cost;
6638 case JUMP_INSN:
6639 return base_cost - 10;
6641 default:
6642 return base_cost + 10;
6646 /* Find the best place in the insn stream in the range
6647 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6648 Create the barrier by inserting a jump and add a new fix entry for
6649 it. */
6650 static Mfix *
6651 create_fix_barrier (Mfix *fix, HOST_WIDE_INT max_address)
6653 HOST_WIDE_INT count = 0;
6654 rtx barrier;
6655 rtx from = fix->insn;
6656 rtx selected = from;
6657 int selected_cost;
6658 HOST_WIDE_INT selected_address;
6659 Mfix * new_fix;
6660 HOST_WIDE_INT max_count = max_address - fix->address;
6661 rtx label = gen_label_rtx ();
6663 selected_cost = arm_barrier_cost (from);
6664 selected_address = fix->address;
6666 while (from && count < max_count)
6668 rtx tmp;
6669 int new_cost;
6671 /* This code shouldn't have been called if there was a natural barrier
6672 within range. */
6673 if (GET_CODE (from) == BARRIER)
6674 abort ();
6676 /* Count the length of this insn. */
6677 count += get_attr_length (from);
6679 /* If there is a jump table, add its length. */
6680 tmp = is_jump_table (from);
6681 if (tmp != NULL)
6683 count += get_jump_table_size (tmp);
6685 /* Jump tables aren't in a basic block, so base the cost on
6686 the dispatch insn. If we select this location, we will
6687 still put the pool after the table. */
6688 new_cost = arm_barrier_cost (from);
6690 if (count < max_count && new_cost <= selected_cost)
6692 selected = tmp;
6693 selected_cost = new_cost;
6694 selected_address = fix->address + count;
6697 /* Continue after the dispatch table. */
6698 from = NEXT_INSN (tmp);
6699 continue;
6702 new_cost = arm_barrier_cost (from);
6704 if (count < max_count && new_cost <= selected_cost)
6706 selected = from;
6707 selected_cost = new_cost;
6708 selected_address = fix->address + count;
6711 from = NEXT_INSN (from);
6714 /* Create a new JUMP_INSN that branches around a barrier. */
6715 from = emit_jump_insn_after (gen_jump (label), selected);
6716 JUMP_LABEL (from) = label;
6717 barrier = emit_barrier_after (from);
6718 emit_label_after (label, barrier);
6720 /* Create a minipool barrier entry for the new barrier. */
6721 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6722 new_fix->insn = barrier;
6723 new_fix->address = selected_address;
6724 new_fix->next = fix->next;
6725 fix->next = new_fix;
6727 return new_fix;
6730 /* Record that there is a natural barrier in the insn stream at
6731 ADDRESS. */
6732 static void
6733 push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
6735 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6737 fix->insn = insn;
6738 fix->address = address;
6740 fix->next = NULL;
6741 if (minipool_fix_head != NULL)
6742 minipool_fix_tail->next = fix;
6743 else
6744 minipool_fix_head = fix;
6746 minipool_fix_tail = fix;
6749 /* Record INSN, which will need fixing up to load a value from the
6750 minipool. ADDRESS is the offset of the insn since the start of the
6751 function; LOC is a pointer to the part of the insn which requires
6752 fixing; VALUE is the constant that must be loaded, which is of type
6753 MODE. */
6754 static void
6755 push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx *loc,
6756 enum machine_mode mode, rtx value)
6758 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6760 #ifdef AOF_ASSEMBLER
6761 /* PIC symbol refereneces need to be converted into offsets into the
6762 based area. */
6763 /* XXX This shouldn't be done here. */
6764 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6765 value = aof_pic_entry (value);
6766 #endif /* AOF_ASSEMBLER */
6768 fix->insn = insn;
6769 fix->address = address;
6770 fix->loc = loc;
6771 fix->mode = mode;
6772 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6773 fix->value = value;
6774 fix->forwards = get_attr_pool_range (insn);
6775 fix->backwards = get_attr_neg_pool_range (insn);
6776 fix->minipool = NULL;
6778 /* If an insn doesn't have a range defined for it, then it isn't
6779 expecting to be reworked by this code. Better to abort now than
6780 to generate duff assembly code. */
6781 if (fix->forwards == 0 && fix->backwards == 0)
6782 abort ();
6784 /* With iWMMXt enabled, the pool is aligned to an 8-byte boundary.
6785 So there might be an empty word before the start of the pool.
6786 Hence we reduce the forward range by 4 to allow for this
6787 possibility. */
6788 if (TARGET_REALLY_IWMMXT && fix->fix_size == 8)
6789 fix->forwards -= 4;
6791 if (rtl_dump_file)
6793 fprintf (rtl_dump_file,
6794 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6795 GET_MODE_NAME (mode),
6796 INSN_UID (insn), (unsigned long) address,
6797 -1 * (long)fix->backwards, (long)fix->forwards);
6798 arm_print_value (rtl_dump_file, fix->value);
6799 fprintf (rtl_dump_file, "\n");
6802 /* Add it to the chain of fixes. */
6803 fix->next = NULL;
6805 if (minipool_fix_head != NULL)
6806 minipool_fix_tail->next = fix;
6807 else
6808 minipool_fix_head = fix;
6810 minipool_fix_tail = fix;
6813 /* Scan INSN and note any of its operands that need fixing.
6814 If DO_PUSHES is false we do not actually push any of the fixups
6815 needed. The function returns TRUE is any fixups were needed/pushed.
6816 This is used by arm_memory_load_p() which needs to know about loads
6817 of constants that will be converted into minipool loads. */
6818 static bool
6819 note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
6821 bool result = false;
6822 int opno;
6824 extract_insn (insn);
6826 if (!constrain_operands (1))
6827 fatal_insn_not_found (insn);
6829 /* Fill in recog_op_alt with information about the constraints of this insn. */
6830 preprocess_constraints ();
6832 for (opno = 0; opno < recog_data.n_operands; opno++)
6834 /* Things we need to fix can only occur in inputs. */
6835 if (recog_data.operand_type[opno] != OP_IN)
6836 continue;
6838 /* If this alternative is a memory reference, then any mention
6839 of constants in this alternative is really to fool reload
6840 into allowing us to accept one there. We need to fix them up
6841 now so that we output the right code. */
6842 if (recog_op_alt[opno][which_alternative].memory_ok)
6844 rtx op = recog_data.operand[opno];
6846 if (CONSTANT_P (op))
6848 if (do_pushes)
6849 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6850 recog_data.operand_mode[opno], op);
6851 result = true;
6853 else if (GET_CODE (op) == MEM
6854 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6855 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6857 if (do_pushes)
6858 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6859 recog_data.operand_mode[opno],
6860 get_pool_constant (XEXP (op, 0)));
6862 result = true;
6867 return result;
6870 /* Gcc puts the pool in the wrong place for ARM, since we can only
6871 load addresses a limited distance around the pc. We do some
6872 special munging to move the constant pool values to the correct
6873 point in the code. */
6874 static void
6875 arm_reorg (void)
6877 rtx insn;
6878 HOST_WIDE_INT address = 0;
6879 Mfix * fix;
6881 minipool_fix_head = minipool_fix_tail = NULL;
6883 /* The first insn must always be a note, or the code below won't
6884 scan it properly. */
6885 insn = get_insns ();
6886 if (GET_CODE (insn) != NOTE)
6887 abort ();
6889 /* Scan all the insns and record the operands that will need fixing. */
6890 for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
6892 if (TARGET_CIRRUS_FIX_INVALID_INSNS
6893 && (arm_cirrus_insn_p (insn)
6894 || GET_CODE (insn) == JUMP_INSN
6895 || arm_memory_load_p (insn)))
6896 cirrus_reorg (insn);
6898 if (GET_CODE (insn) == BARRIER)
6899 push_minipool_barrier (insn, address);
6900 else if (INSN_P (insn))
6902 rtx table;
6904 note_invalid_constants (insn, address, true);
6905 address += get_attr_length (insn);
6907 /* If the insn is a vector jump, add the size of the table
6908 and skip the table. */
6909 if ((table = is_jump_table (insn)) != NULL)
6911 address += get_jump_table_size (table);
6912 insn = table;
6917 fix = minipool_fix_head;
6919 /* Now scan the fixups and perform the required changes. */
6920 while (fix)
6922 Mfix * ftmp;
6923 Mfix * fdel;
6924 Mfix * last_added_fix;
6925 Mfix * last_barrier = NULL;
6926 Mfix * this_fix;
6928 /* Skip any further barriers before the next fix. */
6929 while (fix && GET_CODE (fix->insn) == BARRIER)
6930 fix = fix->next;
6932 /* No more fixes. */
6933 if (fix == NULL)
6934 break;
6936 last_added_fix = NULL;
6938 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6940 if (GET_CODE (ftmp->insn) == BARRIER)
6942 if (ftmp->address >= minipool_vector_head->max_address)
6943 break;
6945 last_barrier = ftmp;
6947 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6948 break;
6950 last_added_fix = ftmp; /* Keep track of the last fix added. */
6953 /* If we found a barrier, drop back to that; any fixes that we
6954 could have reached but come after the barrier will now go in
6955 the next mini-pool. */
6956 if (last_barrier != NULL)
6958 /* Reduce the refcount for those fixes that won't go into this
6959 pool after all. */
6960 for (fdel = last_barrier->next;
6961 fdel && fdel != ftmp;
6962 fdel = fdel->next)
6964 fdel->minipool->refcount--;
6965 fdel->minipool = NULL;
6968 ftmp = last_barrier;
6970 else
6972 /* ftmp is first fix that we can't fit into this pool and
6973 there no natural barriers that we could use. Insert a
6974 new barrier in the code somewhere between the previous
6975 fix and this one, and arrange to jump around it. */
6976 HOST_WIDE_INT max_address;
6978 /* The last item on the list of fixes must be a barrier, so
6979 we can never run off the end of the list of fixes without
6980 last_barrier being set. */
6981 if (ftmp == NULL)
6982 abort ();
6984 max_address = minipool_vector_head->max_address;
6985 /* Check that there isn't another fix that is in range that
6986 we couldn't fit into this pool because the pool was
6987 already too large: we need to put the pool before such an
6988 instruction. */
6989 if (ftmp->address < max_address)
6990 max_address = ftmp->address;
6992 last_barrier = create_fix_barrier (last_added_fix, max_address);
6995 assign_minipool_offsets (last_barrier);
6997 while (ftmp)
6999 if (GET_CODE (ftmp->insn) != BARRIER
7000 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
7001 == NULL))
7002 break;
7004 ftmp = ftmp->next;
7007 /* Scan over the fixes we have identified for this pool, fixing them
7008 up and adding the constants to the pool itself. */
7009 for (this_fix = fix; this_fix && ftmp != this_fix;
7010 this_fix = this_fix->next)
7011 if (GET_CODE (this_fix->insn) != BARRIER)
7013 rtx addr
7014 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
7015 minipool_vector_label),
7016 this_fix->minipool->offset);
7017 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
7020 dump_minipool (last_barrier->insn);
7021 fix = ftmp;
7024 /* From now on we must synthesize any constants that we can't handle
7025 directly. This can happen if the RTL gets split during final
7026 instruction generation. */
7027 after_arm_reorg = 1;
7029 /* Free the minipool memory. */
7030 obstack_free (&minipool_obstack, minipool_startobj);
7033 /* Routines to output assembly language. */
7035 /* If the rtx is the correct value then return the string of the number.
7036 In this way we can ensure that valid double constants are generated even
7037 when cross compiling. */
7038 const char *
7039 fp_immediate_constant (rtx x)
7041 REAL_VALUE_TYPE r;
7042 int i;
7044 if (!fpa_consts_inited)
7045 init_fpa_table ();
7047 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7048 for (i = 0; i < 8; i++)
7049 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
7050 return strings_fpa[i];
7052 abort ();
7055 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
7056 static const char *
7057 fp_const_from_val (REAL_VALUE_TYPE *r)
7059 int i;
7061 if (!fpa_consts_inited)
7062 init_fpa_table ();
7064 for (i = 0; i < 8; i++)
7065 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
7066 return strings_fpa[i];
7068 abort ();
7071 /* Output the operands of a LDM/STM instruction to STREAM.
7072 MASK is the ARM register set mask of which only bits 0-15 are important.
7073 REG is the base register, either the frame pointer or the stack pointer,
7074 INSTR is the possibly suffixed load or store instruction. */
7075 static void
7076 print_multi_reg (FILE *stream, const char *instr, int reg, int mask)
7078 int i;
7079 int not_first = FALSE;
7081 fputc ('\t', stream);
7082 asm_fprintf (stream, instr, reg);
7083 fputs (", {", stream);
7085 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7086 if (mask & (1 << i))
7088 if (not_first)
7089 fprintf (stream, ", ");
7091 asm_fprintf (stream, "%r", i);
7092 not_first = TRUE;
7095 fprintf (stream, "}");
7097 /* Add a ^ character for the 26-bit ABI, but only if we were loading
7098 the PC. Otherwise we would generate an UNPREDICTABLE instruction.
7099 Strictly speaking the instruction would be unpredicatble only if
7100 we were writing back the base register as well, but since we never
7101 want to generate an LDM type 2 instruction (register bank switching)
7102 which is what you get if the PC is not being loaded, we do not need
7103 to check for writeback. */
7104 if (! TARGET_APCS_32
7105 && ((mask & (1 << PC_REGNUM)) != 0))
7106 fprintf (stream, "^");
7108 fprintf (stream, "\n");
7111 /* Output a 'call' insn. */
7112 const char *
7113 output_call (rtx *operands)
7115 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
7117 if (REGNO (operands[0]) == LR_REGNUM)
7119 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
7120 output_asm_insn ("mov%?\t%0, %|lr", operands);
7123 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7125 if (TARGET_INTERWORK)
7126 output_asm_insn ("bx%?\t%0", operands);
7127 else
7128 output_asm_insn ("mov%?\t%|pc, %0", operands);
7130 return "";
7133 /* Output a 'call' insn that is a reference in memory. */
7134 const char *
7135 output_call_mem (rtx *operands)
7137 if (TARGET_INTERWORK)
7139 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7140 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7141 output_asm_insn ("bx%?\t%|ip", operands);
7143 else if (regno_use_in (LR_REGNUM, operands[0]))
7145 /* LR is used in the memory address. We load the address in the
7146 first instruction. It's safe to use IP as the target of the
7147 load since the call will kill it anyway. */
7148 output_asm_insn ("ldr%?\t%|ip, %0", operands);
7149 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7150 output_asm_insn ("mov%?\t%|pc, %|ip", operands);
7152 else
7154 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
7155 output_asm_insn ("ldr%?\t%|pc, %0", operands);
7158 return "";
7162 /* Output a move from arm registers to an fpa registers.
7163 OPERANDS[0] is an fpa register.
7164 OPERANDS[1] is the first registers of an arm register pair. */
7165 const char *
7166 output_mov_long_double_fpa_from_arm (rtx *operands)
7168 int arm_reg0 = REGNO (operands[1]);
7169 rtx ops[3];
7171 if (arm_reg0 == IP_REGNUM)
7172 abort ();
7174 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7175 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7176 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7178 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
7179 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
7181 return "";
7184 /* Output a move from an fpa register to arm registers.
7185 OPERANDS[0] is the first registers of an arm register pair.
7186 OPERANDS[1] is an fpa register. */
7187 const char *
7188 output_mov_long_double_arm_from_fpa (rtx *operands)
7190 int arm_reg0 = REGNO (operands[0]);
7191 rtx ops[3];
7193 if (arm_reg0 == IP_REGNUM)
7194 abort ();
7196 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7197 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7198 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
7200 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
7201 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
7202 return "";
7205 /* Output a move from arm registers to arm registers of a long double
7206 OPERANDS[0] is the destination.
7207 OPERANDS[1] is the source. */
7208 const char *
7209 output_mov_long_double_arm_from_arm (rtx *operands)
7211 /* We have to be careful here because the two might overlap. */
7212 int dest_start = REGNO (operands[0]);
7213 int src_start = REGNO (operands[1]);
7214 rtx ops[2];
7215 int i;
7217 if (dest_start < src_start)
7219 for (i = 0; i < 3; i++)
7221 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7222 ops[1] = gen_rtx_REG (SImode, src_start + i);
7223 output_asm_insn ("mov%?\t%0, %1", ops);
7226 else
7228 for (i = 2; i >= 0; i--)
7230 ops[0] = gen_rtx_REG (SImode, dest_start + i);
7231 ops[1] = gen_rtx_REG (SImode, src_start + i);
7232 output_asm_insn ("mov%?\t%0, %1", ops);
7236 return "";
7240 /* Output a move from arm registers to an fpa registers.
7241 OPERANDS[0] is an fpa register.
7242 OPERANDS[1] is the first registers of an arm register pair. */
7243 const char *
7244 output_mov_double_fpa_from_arm (rtx *operands)
7246 int arm_reg0 = REGNO (operands[1]);
7247 rtx ops[2];
7249 if (arm_reg0 == IP_REGNUM)
7250 abort ();
7252 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7253 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7254 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
7255 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
7256 return "";
7259 /* Output a move from an fpa register to arm registers.
7260 OPERANDS[0] is the first registers of an arm register pair.
7261 OPERANDS[1] is an fpa register. */
7262 const char *
7263 output_mov_double_arm_from_fpa (rtx *operands)
7265 int arm_reg0 = REGNO (operands[0]);
7266 rtx ops[2];
7268 if (arm_reg0 == IP_REGNUM)
7269 abort ();
7271 ops[0] = gen_rtx_REG (SImode, arm_reg0);
7272 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
7273 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
7274 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
7275 return "";
7278 /* Output a move between double words.
7279 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
7280 or MEM<-REG and all MEMs must be offsettable addresses. */
7281 const char *
7282 output_move_double (rtx *operands)
7284 enum rtx_code code0 = GET_CODE (operands[0]);
7285 enum rtx_code code1 = GET_CODE (operands[1]);
7286 rtx otherops[3];
7288 if (code0 == REG)
7290 int reg0 = REGNO (operands[0]);
7292 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
7294 if (code1 == REG)
7296 int reg1 = REGNO (operands[1]);
7297 if (reg1 == IP_REGNUM)
7298 abort ();
7300 /* Ensure the second source is not overwritten. */
7301 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
7302 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
7303 else
7304 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
7306 else if (code1 == CONST_VECTOR)
7308 HOST_WIDE_INT hint = 0;
7310 switch (GET_MODE (operands[1]))
7312 case V2SImode:
7313 otherops[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 1)));
7314 operands[1] = GEN_INT (INTVAL (CONST_VECTOR_ELT (operands[1], 0)));
7315 break;
7317 case V4HImode:
7318 if (BYTES_BIG_ENDIAN)
7320 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7321 hint <<= 16;
7322 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7324 else
7326 hint = INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7327 hint <<= 16;
7328 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7331 otherops[1] = GEN_INT (hint);
7332 hint = 0;
7334 if (BYTES_BIG_ENDIAN)
7336 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7337 hint <<= 16;
7338 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7340 else
7342 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7343 hint <<= 16;
7344 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7347 operands[1] = GEN_INT (hint);
7348 break;
7350 case V8QImode:
7351 if (BYTES_BIG_ENDIAN)
7353 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7354 hint <<= 8;
7355 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7356 hint <<= 8;
7357 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7358 hint <<= 8;
7359 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7361 else
7363 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 7));
7364 hint <<= 8;
7365 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 6));
7366 hint <<= 8;
7367 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 5));
7368 hint <<= 8;
7369 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 4));
7372 otherops[1] = GEN_INT (hint);
7373 hint = 0;
7375 if (BYTES_BIG_ENDIAN)
7377 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7378 hint <<= 8;
7379 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7380 hint <<= 8;
7381 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7382 hint <<= 8;
7383 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7385 else
7387 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 3));
7388 hint <<= 8;
7389 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 2));
7390 hint <<= 8;
7391 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 1));
7392 hint <<= 8;
7393 hint |= INTVAL (CONST_VECTOR_ELT (operands[1], 0));
7396 operands[1] = GEN_INT (hint);
7397 break;
7399 default:
7400 abort ();
7402 output_mov_immediate (operands);
7403 output_mov_immediate (otherops);
7405 else if (code1 == CONST_DOUBLE)
7407 if (GET_MODE (operands[1]) == DFmode)
7409 REAL_VALUE_TYPE r;
7410 long l[2];
7412 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7413 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7414 otherops[1] = GEN_INT (l[1]);
7415 operands[1] = GEN_INT (l[0]);
7417 else if (GET_MODE (operands[1]) != VOIDmode)
7418 abort ();
7419 else if (WORDS_BIG_ENDIAN)
7421 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7422 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7424 else
7426 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7427 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7430 output_mov_immediate (operands);
7431 output_mov_immediate (otherops);
7433 else if (code1 == CONST_INT)
7435 #if HOST_BITS_PER_WIDE_INT > 32
7436 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7437 what the upper word is. */
7438 if (WORDS_BIG_ENDIAN)
7440 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7441 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7443 else
7445 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7446 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7448 #else
7449 /* Sign extend the intval into the high-order word. */
7450 if (WORDS_BIG_ENDIAN)
7452 otherops[1] = operands[1];
7453 operands[1] = (INTVAL (operands[1]) < 0
7454 ? constm1_rtx : const0_rtx);
7456 else
7457 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7458 #endif
7459 output_mov_immediate (otherops);
7460 output_mov_immediate (operands);
7462 else if (code1 == MEM)
7464 switch (GET_CODE (XEXP (operands[1], 0)))
7466 case REG:
7467 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7468 break;
7470 case PRE_INC:
7471 abort (); /* Should never happen now. */
7472 break;
7474 case PRE_DEC:
7475 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7476 break;
7478 case POST_INC:
7479 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7480 break;
7482 case POST_DEC:
7483 abort (); /* Should never happen now. */
7484 break;
7486 case LABEL_REF:
7487 case CONST:
7488 output_asm_insn ("adr%?\t%0, %1", operands);
7489 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7490 break;
7492 default:
7493 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7494 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7496 otherops[0] = operands[0];
7497 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7498 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7500 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7502 if (GET_CODE (otherops[2]) == CONST_INT)
7504 switch ((int) INTVAL (otherops[2]))
7506 case -8:
7507 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7508 return "";
7509 case -4:
7510 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7511 return "";
7512 case 4:
7513 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7514 return "";
7517 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7518 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7519 else
7520 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7522 else
7523 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7525 else
7526 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7528 return "ldm%?ia\t%0, %M0";
7530 else
7532 otherops[1] = adjust_address (operands[1], SImode, 4);
7533 /* Take care of overlapping base/data reg. */
7534 if (reg_mentioned_p (operands[0], operands[1]))
7536 output_asm_insn ("ldr%?\t%0, %1", otherops);
7537 output_asm_insn ("ldr%?\t%0, %1", operands);
7539 else
7541 output_asm_insn ("ldr%?\t%0, %1", operands);
7542 output_asm_insn ("ldr%?\t%0, %1", otherops);
7547 else
7548 abort (); /* Constraints should prevent this. */
7550 else if (code0 == MEM && code1 == REG)
7552 if (REGNO (operands[1]) == IP_REGNUM)
7553 abort ();
7555 switch (GET_CODE (XEXP (operands[0], 0)))
7557 case REG:
7558 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7559 break;
7561 case PRE_INC:
7562 abort (); /* Should never happen now. */
7563 break;
7565 case PRE_DEC:
7566 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7567 break;
7569 case POST_INC:
7570 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7571 break;
7573 case POST_DEC:
7574 abort (); /* Should never happen now. */
7575 break;
7577 case PLUS:
7578 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7580 switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7582 case -8:
7583 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7584 return "";
7586 case -4:
7587 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7588 return "";
7590 case 4:
7591 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7592 return "";
7595 /* Fall through */
7597 default:
7598 otherops[0] = adjust_address (operands[0], SImode, 4);
7599 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7600 output_asm_insn ("str%?\t%1, %0", operands);
7601 output_asm_insn ("str%?\t%1, %0", otherops);
7604 else
7605 /* Constraints should prevent this. */
7606 abort ();
7608 return "";
7612 /* Output an arbitrary MOV reg, #n.
7613 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7614 const char *
7615 output_mov_immediate (rtx *operands)
7617 HOST_WIDE_INT n = INTVAL (operands[1]);
7619 /* Try to use one MOV. */
7620 if (const_ok_for_arm (n))
7621 output_asm_insn ("mov%?\t%0, %1", operands);
7623 /* Try to use one MVN. */
7624 else if (const_ok_for_arm (~n))
7626 operands[1] = GEN_INT (~n);
7627 output_asm_insn ("mvn%?\t%0, %1", operands);
7629 else
7631 int n_ones = 0;
7632 int i;
7634 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7635 for (i = 0; i < 32; i++)
7636 if (n & 1 << i)
7637 n_ones++;
7639 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7640 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7641 else
7642 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7645 return "";
7648 /* Output an ADD r, s, #n where n may be too big for one instruction.
7649 If adding zero to one register, output nothing. */
7650 const char *
7651 output_add_immediate (rtx *operands)
7653 HOST_WIDE_INT n = INTVAL (operands[2]);
7655 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7657 if (n < 0)
7658 output_multi_immediate (operands,
7659 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7660 -n);
7661 else
7662 output_multi_immediate (operands,
7663 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7667 return "";
7670 /* Output a multiple immediate operation.
7671 OPERANDS is the vector of operands referred to in the output patterns.
7672 INSTR1 is the output pattern to use for the first constant.
7673 INSTR2 is the output pattern to use for subsequent constants.
7674 IMMED_OP is the index of the constant slot in OPERANDS.
7675 N is the constant value. */
7676 static const char *
7677 output_multi_immediate (rtx *operands, const char *instr1, const char *instr2,
7678 int immed_op, HOST_WIDE_INT n)
7680 #if HOST_BITS_PER_WIDE_INT > 32
7681 n &= 0xffffffff;
7682 #endif
7684 if (n == 0)
7686 /* Quick and easy output. */
7687 operands[immed_op] = const0_rtx;
7688 output_asm_insn (instr1, operands);
7690 else
7692 int i;
7693 const char * instr = instr1;
7695 /* Note that n is never zero here (which would give no output). */
7696 for (i = 0; i < 32; i += 2)
7698 if (n & (3 << i))
7700 operands[immed_op] = GEN_INT (n & (255 << i));
7701 output_asm_insn (instr, operands);
7702 instr = instr2;
7703 i += 6;
7708 return "";
7711 /* Return the appropriate ARM instruction for the operation code.
7712 The returned result should not be overwritten. OP is the rtx of the
7713 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7714 was shifted. */
7715 const char *
7716 arithmetic_instr (rtx op, int shift_first_arg)
7718 switch (GET_CODE (op))
7720 case PLUS:
7721 return "add";
7723 case MINUS:
7724 return shift_first_arg ? "rsb" : "sub";
7726 case IOR:
7727 return "orr";
7729 case XOR:
7730 return "eor";
7732 case AND:
7733 return "and";
7735 default:
7736 abort ();
7740 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7741 for the operation code. The returned result should not be overwritten.
7742 OP is the rtx code of the shift.
7743 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7744 shift. */
7745 static const char *
7746 shift_op (rtx op, HOST_WIDE_INT *amountp)
7748 const char * mnem;
7749 enum rtx_code code = GET_CODE (op);
7751 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7752 *amountp = -1;
7753 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7754 *amountp = INTVAL (XEXP (op, 1));
7755 else
7756 abort ();
7758 switch (code)
7760 case ASHIFT:
7761 mnem = "asl";
7762 break;
7764 case ASHIFTRT:
7765 mnem = "asr";
7766 break;
7768 case LSHIFTRT:
7769 mnem = "lsr";
7770 break;
7772 case ROTATERT:
7773 mnem = "ror";
7774 break;
7776 case MULT:
7777 /* We never have to worry about the amount being other than a
7778 power of 2, since this case can never be reloaded from a reg. */
7779 if (*amountp != -1)
7780 *amountp = int_log2 (*amountp);
7781 else
7782 abort ();
7783 return "asl";
7785 default:
7786 abort ();
7789 if (*amountp != -1)
7791 /* This is not 100% correct, but follows from the desire to merge
7792 multiplication by a power of 2 with the recognizer for a
7793 shift. >=32 is not a valid shift for "asl", so we must try and
7794 output a shift that produces the correct arithmetical result.
7795 Using lsr #32 is identical except for the fact that the carry bit
7796 is not set correctly if we set the flags; but we never use the
7797 carry bit from such an operation, so we can ignore that. */
7798 if (code == ROTATERT)
7799 /* Rotate is just modulo 32. */
7800 *amountp &= 31;
7801 else if (*amountp != (*amountp & 31))
7803 if (code == ASHIFT)
7804 mnem = "lsr";
7805 *amountp = 32;
7808 /* Shifts of 0 are no-ops. */
7809 if (*amountp == 0)
7810 return NULL;
7813 return mnem;
7816 /* Obtain the shift from the POWER of two. */
7818 static HOST_WIDE_INT
7819 int_log2 (HOST_WIDE_INT power)
7821 HOST_WIDE_INT shift = 0;
7823 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7825 if (shift > 31)
7826 abort ();
7827 shift++;
7830 return shift;
7833 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7834 /bin/as is horribly restrictive. */
7835 #define MAX_ASCII_LEN 51
7837 void
7838 output_ascii_pseudo_op (FILE *stream, const unsigned char *p, int len)
7840 int i;
7841 int len_so_far = 0;
7843 fputs ("\t.ascii\t\"", stream);
7845 for (i = 0; i < len; i++)
7847 int c = p[i];
7849 if (len_so_far >= MAX_ASCII_LEN)
7851 fputs ("\"\n\t.ascii\t\"", stream);
7852 len_so_far = 0;
7855 switch (c)
7857 case TARGET_TAB:
7858 fputs ("\\t", stream);
7859 len_so_far += 2;
7860 break;
7862 case TARGET_FF:
7863 fputs ("\\f", stream);
7864 len_so_far += 2;
7865 break;
7867 case TARGET_BS:
7868 fputs ("\\b", stream);
7869 len_so_far += 2;
7870 break;
7872 case TARGET_CR:
7873 fputs ("\\r", stream);
7874 len_so_far += 2;
7875 break;
7877 case TARGET_NEWLINE:
7878 fputs ("\\n", stream);
7879 c = p [i + 1];
7880 if ((c >= ' ' && c <= '~')
7881 || c == TARGET_TAB)
7882 /* This is a good place for a line break. */
7883 len_so_far = MAX_ASCII_LEN;
7884 else
7885 len_so_far += 2;
7886 break;
7888 case '\"':
7889 case '\\':
7890 putc ('\\', stream);
7891 len_so_far++;
7892 /* drop through. */
7894 default:
7895 if (c >= ' ' && c <= '~')
7897 putc (c, stream);
7898 len_so_far++;
7900 else
7902 fprintf (stream, "\\%03o", c);
7903 len_so_far += 4;
7905 break;
7909 fputs ("\"\n", stream);
7912 /* Compute the register sabe mask for registers 0 through 12
7913 inclusive. This code is used by both arm_compute_save_reg_mask
7914 and arm_compute_initial_elimination_offset. */
7915 static unsigned long
7916 arm_compute_save_reg0_reg12_mask (void)
7918 unsigned long func_type = arm_current_func_type ();
7919 unsigned int save_reg_mask = 0;
7920 unsigned int reg;
7922 if (IS_INTERRUPT (func_type))
7924 unsigned int max_reg;
7925 /* Interrupt functions must not corrupt any registers,
7926 even call clobbered ones. If this is a leaf function
7927 we can just examine the registers used by the RTL, but
7928 otherwise we have to assume that whatever function is
7929 called might clobber anything, and so we have to save
7930 all the call-clobbered registers as well. */
7931 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7932 /* FIQ handlers have registers r8 - r12 banked, so
7933 we only need to check r0 - r7, Normal ISRs only
7934 bank r14 and r15, so we must check up to r12.
7935 r13 is the stack pointer which is always preserved,
7936 so we do not need to consider it here. */
7937 max_reg = 7;
7938 else
7939 max_reg = 12;
7941 for (reg = 0; reg <= max_reg; reg++)
7942 if (regs_ever_live[reg]
7943 || (! current_function_is_leaf && call_used_regs [reg]))
7944 save_reg_mask |= (1 << reg);
7946 else
7948 /* In the normal case we only need to save those registers
7949 which are call saved and which are used by this function. */
7950 for (reg = 0; reg <= 10; reg++)
7951 if (regs_ever_live[reg] && ! call_used_regs [reg])
7952 save_reg_mask |= (1 << reg);
7954 /* Handle the frame pointer as a special case. */
7955 if (! TARGET_APCS_FRAME
7956 && ! frame_pointer_needed
7957 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7958 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7959 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7961 /* If we aren't loading the PIC register,
7962 don't stack it even though it may be live. */
7963 if (flag_pic
7964 && ! TARGET_SINGLE_PIC_BASE
7965 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7966 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7969 return save_reg_mask;
7972 /* Compute a bit mask of which registers need to be
7973 saved on the stack for the current function. */
7975 static unsigned long
7976 arm_compute_save_reg_mask (void)
7978 unsigned int save_reg_mask = 0;
7979 unsigned long func_type = arm_current_func_type ();
7981 if (IS_NAKED (func_type))
7982 /* This should never really happen. */
7983 return 0;
7985 /* If we are creating a stack frame, then we must save the frame pointer,
7986 IP (which will hold the old stack pointer), LR and the PC. */
7987 if (frame_pointer_needed)
7988 save_reg_mask |=
7989 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7990 | (1 << IP_REGNUM)
7991 | (1 << LR_REGNUM)
7992 | (1 << PC_REGNUM);
7994 /* Volatile functions do not return, so there
7995 is no need to save any other registers. */
7996 if (IS_VOLATILE (func_type))
7997 return save_reg_mask;
7999 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
8001 /* Decide if we need to save the link register.
8002 Interrupt routines have their own banked link register,
8003 so they never need to save it.
8004 Otherwise if we do not use the link register we do not need to save
8005 it. If we are pushing other registers onto the stack however, we
8006 can save an instruction in the epilogue by pushing the link register
8007 now and then popping it back into the PC. This incurs extra memory
8008 accesses though, so we only do it when optimizing for size, and only
8009 if we know that we will not need a fancy return sequence. */
8010 if (regs_ever_live [LR_REGNUM]
8011 || (save_reg_mask
8012 && optimize_size
8013 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
8014 save_reg_mask |= 1 << LR_REGNUM;
8016 if (cfun->machine->lr_save_eliminated)
8017 save_reg_mask &= ~ (1 << LR_REGNUM);
8019 if (TARGET_REALLY_IWMMXT
8020 && ((bit_count (save_reg_mask)
8021 + ARM_NUM_INTS (current_function_pretend_args_size)) % 2) != 0)
8023 unsigned int reg;
8025 /* The total number of registers that are going to be pushed
8026 onto the stack is odd. We need to ensure that the stack
8027 is 64-bit aligned before we start to save iWMMXt registers,
8028 and also before we start to create locals. (A local variable
8029 might be a double or long long which we will load/store using
8030 an iWMMXt instruction). Therefore we need to push another
8031 ARM register, so that the stack will be 64-bit aligned. We
8032 try to avoid using the arg registers (r0 -r3) as they might be
8033 used to pass values in a tail call. */
8034 for (reg = 4; reg <= 12; reg++)
8035 if ((save_reg_mask & (1 << reg)) == 0)
8036 break;
8038 if (reg <= 12)
8039 save_reg_mask |= (1 << reg);
8040 else
8042 cfun->machine->sibcall_blocked = 1;
8043 save_reg_mask |= (1 << 3);
8047 return save_reg_mask;
8050 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
8051 everything bar the final return instruction. */
8052 const char *
8053 output_return_instruction (rtx operand, int really_return, int reverse)
8055 char conditional[10];
8056 char instr[100];
8057 int reg;
8058 unsigned long live_regs_mask;
8059 unsigned long func_type;
8061 func_type = arm_current_func_type ();
8063 if (IS_NAKED (func_type))
8064 return "";
8066 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8068 /* If this function was declared non-returning, and we have found a tail
8069 call, then we have to trust that the called function won't return. */
8070 if (really_return)
8072 rtx ops[2];
8074 /* Otherwise, trap an attempted return by aborting. */
8075 ops[0] = operand;
8076 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
8077 : "abort");
8078 assemble_external_libcall (ops[1]);
8079 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
8082 return "";
8085 if (current_function_calls_alloca && !really_return)
8086 abort ();
8088 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
8090 return_used_this_function = 1;
8092 live_regs_mask = arm_compute_save_reg_mask ();
8094 if (live_regs_mask)
8096 const char * return_reg;
8098 /* If we do not have any special requirements for function exit
8099 (eg interworking, or ISR) then we can load the return address
8100 directly into the PC. Otherwise we must load it into LR. */
8101 if (really_return
8102 && ! TARGET_INTERWORK)
8103 return_reg = reg_names[PC_REGNUM];
8104 else
8105 return_reg = reg_names[LR_REGNUM];
8107 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
8108 /* There are two possible reasons for the IP register being saved.
8109 Either a stack frame was created, in which case IP contains the
8110 old stack pointer, or an ISR routine corrupted it. If this in an
8111 ISR routine then just restore IP, otherwise restore IP into SP. */
8112 if (! IS_INTERRUPT (func_type))
8114 live_regs_mask &= ~ (1 << IP_REGNUM);
8115 live_regs_mask |= (1 << SP_REGNUM);
8118 /* On some ARM architectures it is faster to use LDR rather than
8119 LDM to load a single register. On other architectures, the
8120 cost is the same. In 26 bit mode, or for exception handlers,
8121 we have to use LDM to load the PC so that the CPSR is also
8122 restored. */
8123 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8125 if (live_regs_mask == (unsigned int)(1 << reg))
8126 break;
8128 if (reg <= LAST_ARM_REGNUM
8129 && (reg != LR_REGNUM
8130 || ! really_return
8131 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
8133 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
8134 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
8136 else
8138 char *p;
8139 int first = 1;
8141 /* Generate the load multiple instruction to restore the registers. */
8142 if (frame_pointer_needed)
8143 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
8144 else if (live_regs_mask & (1 << SP_REGNUM))
8145 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
8146 else
8147 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
8149 p = instr + strlen (instr);
8151 for (reg = 0; reg <= SP_REGNUM; reg++)
8152 if (live_regs_mask & (1 << reg))
8154 int l = strlen (reg_names[reg]);
8156 if (first)
8157 first = 0;
8158 else
8160 memcpy (p, ", ", 2);
8161 p += 2;
8164 memcpy (p, "%|", 2);
8165 memcpy (p + 2, reg_names[reg], l);
8166 p += l + 2;
8169 if (live_regs_mask & (1 << LR_REGNUM))
8171 sprintf (p, "%s%%|%s}", first ? "" : ", ", return_reg);
8172 /* Decide if we need to add the ^ symbol to the end of the
8173 register list. This causes the saved condition codes
8174 register to be copied into the current condition codes
8175 register. We do the copy if we are conforming to the 32-bit
8176 ABI and this is an interrupt function, or if we are
8177 conforming to the 26-bit ABI. There is a special case for
8178 the 26-bit ABI however, which is if we are writing back the
8179 stack pointer but not loading the PC. In this case adding
8180 the ^ symbol would create a type 2 LDM instruction, where
8181 writeback is UNPREDICTABLE. We are safe in leaving the ^
8182 character off in this case however, since the actual return
8183 instruction will be a MOVS which will restore the CPSR. */
8184 if ((TARGET_APCS_32 && IS_INTERRUPT (func_type))
8185 || (! TARGET_APCS_32 && really_return))
8186 strcat (p, "^");
8188 else
8189 strcpy (p, "}");
8192 output_asm_insn (instr, & operand);
8194 /* See if we need to generate an extra instruction to
8195 perform the actual function return. */
8196 if (really_return
8197 && func_type != ARM_FT_INTERWORKED
8198 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8200 /* The return has already been handled
8201 by loading the LR into the PC. */
8202 really_return = 0;
8206 if (really_return)
8208 switch ((int) ARM_FUNC_TYPE (func_type))
8210 case ARM_FT_ISR:
8211 case ARM_FT_FIQ:
8212 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
8213 break;
8215 case ARM_FT_INTERWORKED:
8216 sprintf (instr, "bx%s\t%%|lr", conditional);
8217 break;
8219 case ARM_FT_EXCEPTION:
8220 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
8221 break;
8223 default:
8224 /* ARMv5 implementations always provide BX, so interworking
8225 is the default unless APCS-26 is in use. */
8226 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
8227 sprintf (instr, "bx%s\t%%|lr", conditional);
8228 else
8229 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
8230 conditional, TARGET_APCS_32 ? "" : "s");
8231 break;
8234 output_asm_insn (instr, & operand);
8237 return "";
8240 /* Write the function name into the code section, directly preceding
8241 the function prologue.
8243 Code will be output similar to this:
8245 .ascii "arm_poke_function_name", 0
8246 .align
8248 .word 0xff000000 + (t1 - t0)
8249 arm_poke_function_name
8250 mov ip, sp
8251 stmfd sp!, {fp, ip, lr, pc}
8252 sub fp, ip, #4
8254 When performing a stack backtrace, code can inspect the value
8255 of 'pc' stored at 'fp' + 0. If the trace function then looks
8256 at location pc - 12 and the top 8 bits are set, then we know
8257 that there is a function name embedded immediately preceding this
8258 location and has length ((pc[-3]) & 0xff000000).
8260 We assume that pc is declared as a pointer to an unsigned long.
8262 It is of no benefit to output the function name if we are assembling
8263 a leaf function. These function types will not contain a stack
8264 backtrace structure, therefore it is not possible to determine the
8265 function name. */
8266 void
8267 arm_poke_function_name (FILE *stream, const char *name)
8269 unsigned long alignlength;
8270 unsigned long length;
8271 rtx x;
8273 length = strlen (name) + 1;
8274 alignlength = ROUND_UP_WORD (length);
8276 ASM_OUTPUT_ASCII (stream, name, length);
8277 ASM_OUTPUT_ALIGN (stream, 2);
8278 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
8279 assemble_aligned_integer (UNITS_PER_WORD, x);
8282 /* Place some comments into the assembler stream
8283 describing the current function. */
8284 static void
8285 arm_output_function_prologue (FILE *f, HOST_WIDE_INT frame_size)
8287 unsigned long func_type;
8289 if (!TARGET_ARM)
8291 thumb_output_function_prologue (f, frame_size);
8292 return;
8295 /* Sanity check. */
8296 if (arm_ccfsm_state || arm_target_insn)
8297 abort ();
8299 func_type = arm_current_func_type ();
8301 switch ((int) ARM_FUNC_TYPE (func_type))
8303 default:
8304 case ARM_FT_NORMAL:
8305 break;
8306 case ARM_FT_INTERWORKED:
8307 asm_fprintf (f, "\t%@ Function supports interworking.\n");
8308 break;
8309 case ARM_FT_EXCEPTION_HANDLER:
8310 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
8311 break;
8312 case ARM_FT_ISR:
8313 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
8314 break;
8315 case ARM_FT_FIQ:
8316 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
8317 break;
8318 case ARM_FT_EXCEPTION:
8319 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
8320 break;
8323 if (IS_NAKED (func_type))
8324 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
8326 if (IS_VOLATILE (func_type))
8327 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
8329 if (IS_NESTED (func_type))
8330 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
8332 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %wd\n",
8333 current_function_args_size,
8334 current_function_pretend_args_size, frame_size);
8336 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
8337 frame_pointer_needed,
8338 cfun->machine->uses_anonymous_args);
8340 if (cfun->machine->lr_save_eliminated)
8341 asm_fprintf (f, "\t%@ link register save eliminated.\n");
8343 #ifdef AOF_ASSEMBLER
8344 if (flag_pic)
8345 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
8346 #endif
8348 return_used_this_function = 0;
8351 const char *
8352 arm_output_epilogue (int really_return)
8354 int reg;
8355 unsigned long saved_regs_mask;
8356 unsigned long func_type;
8357 /* Floats_offset is the offset from the "virtual" frame. In an APCS
8358 frame that is $fp + 4 for a non-variadic function. */
8359 int floats_offset = 0;
8360 rtx operands[3];
8361 int frame_size = arm_get_frame_size ();
8362 FILE * f = asm_out_file;
8363 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8364 unsigned int lrm_count = 0;
8366 /* If we have already generated the return instruction
8367 then it is futile to generate anything else. */
8368 if (use_return_insn (FALSE) && return_used_this_function)
8369 return "";
8371 func_type = arm_current_func_type ();
8373 if (IS_NAKED (func_type))
8374 /* Naked functions don't have epilogues. */
8375 return "";
8377 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
8379 rtx op;
8381 /* A volatile function should never return. Call abort. */
8382 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
8383 assemble_external_libcall (op);
8384 output_asm_insn ("bl\t%a0", &op);
8386 return "";
8389 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
8390 && ! really_return)
8391 /* If we are throwing an exception, then we really must
8392 be doing a return, so we can't tail-call. */
8393 abort ();
8395 saved_regs_mask = arm_compute_save_reg_mask ();
8397 if (TARGET_IWMMXT)
8398 lrm_count = bit_count (saved_regs_mask);
8400 /* XXX We should adjust floats_offset for any anonymous args, and then
8401 re-adjust vfp_offset below to compensate. */
8403 /* Compute how far away the floats will be. */
8404 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
8405 if (saved_regs_mask & (1 << reg))
8406 floats_offset += 4;
8408 if (frame_pointer_needed)
8410 int vfp_offset = 4;
8412 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8414 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8415 if (regs_ever_live[reg] && !call_used_regs[reg])
8417 floats_offset += 12;
8418 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8419 reg, FP_REGNUM, floats_offset - vfp_offset);
8422 else
8424 int start_reg = LAST_ARM_FP_REGNUM;
8426 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8428 if (regs_ever_live[reg] && !call_used_regs[reg])
8430 floats_offset += 12;
8432 /* We can't unstack more than four registers at once. */
8433 if (start_reg - reg == 3)
8435 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8436 reg, FP_REGNUM, floats_offset - vfp_offset);
8437 start_reg = reg - 1;
8440 else
8442 if (reg != start_reg)
8443 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8444 reg + 1, start_reg - reg,
8445 FP_REGNUM, floats_offset - vfp_offset);
8446 start_reg = reg - 1;
8450 /* Just in case the last register checked also needs unstacking. */
8451 if (reg != start_reg)
8452 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8453 reg + 1, start_reg - reg,
8454 FP_REGNUM, floats_offset - vfp_offset);
8457 if (TARGET_IWMMXT)
8459 /* The frame pointer is guaranteed to be non-double-word aligned.
8460 This is because it is set to (old_stack_pointer - 4) and the
8461 old_stack_pointer was double word aligned. Thus the offset to
8462 the iWMMXt registers to be loaded must also be non-double-word
8463 sized, so that the resultant address *is* double-word aligned.
8464 We can ignore floats_offset since that was already included in
8465 the live_regs_mask. */
8466 lrm_count += (lrm_count % 2 ? 2 : 1);
8468 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8469 if (regs_ever_live[reg] && !call_used_regs[reg])
8471 asm_fprintf (f, "\twldrd\t%r, [%r, #-%d]\n",
8472 reg, FP_REGNUM, lrm_count * 4);
8473 lrm_count += 2;
8477 /* saved_regs_mask should contain the IP, which at the time of stack
8478 frame generation actually contains the old stack pointer. So a
8479 quick way to unwind the stack is just pop the IP register directly
8480 into the stack pointer. */
8481 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8482 abort ();
8483 saved_regs_mask &= ~ (1 << IP_REGNUM);
8484 saved_regs_mask |= (1 << SP_REGNUM);
8486 /* There are two registers left in saved_regs_mask - LR and PC. We
8487 only need to restore the LR register (the return address), but to
8488 save time we can load it directly into the PC, unless we need a
8489 special function exit sequence, or we are not really returning. */
8490 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8491 /* Delete the LR from the register mask, so that the LR on
8492 the stack is loaded into the PC in the register mask. */
8493 saved_regs_mask &= ~ (1 << LR_REGNUM);
8494 else
8495 saved_regs_mask &= ~ (1 << PC_REGNUM);
8497 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8499 if (IS_INTERRUPT (func_type))
8500 /* Interrupt handlers will have pushed the
8501 IP onto the stack, so restore it now. */
8502 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, 1 << IP_REGNUM);
8504 else
8506 /* Restore stack pointer if necessary. */
8507 if (frame_size + current_function_outgoing_args_size != 0)
8509 operands[0] = operands[1] = stack_pointer_rtx;
8510 operands[2] = GEN_INT (frame_size
8511 + current_function_outgoing_args_size);
8512 output_add_immediate (operands);
8515 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
8517 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8518 if (regs_ever_live[reg] && !call_used_regs[reg])
8519 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8520 reg, SP_REGNUM);
8522 else
8524 int start_reg = FIRST_ARM_FP_REGNUM;
8526 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8528 if (regs_ever_live[reg] && !call_used_regs[reg])
8530 if (reg - start_reg == 3)
8532 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8533 start_reg, SP_REGNUM);
8534 start_reg = reg + 1;
8537 else
8539 if (reg != start_reg)
8540 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8541 start_reg, reg - start_reg,
8542 SP_REGNUM);
8544 start_reg = reg + 1;
8548 /* Just in case the last register checked also needs unstacking. */
8549 if (reg != start_reg)
8550 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8551 start_reg, reg - start_reg, SP_REGNUM);
8554 if (TARGET_IWMMXT)
8555 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8556 if (regs_ever_live[reg] && !call_used_regs[reg])
8557 asm_fprintf (f, "\twldrd\t%r, [%r, #+8]!\n", reg, SP_REGNUM);
8559 /* If we can, restore the LR into the PC. */
8560 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8561 && really_return
8562 && current_function_pretend_args_size == 0
8563 && saved_regs_mask & (1 << LR_REGNUM))
8565 saved_regs_mask &= ~ (1 << LR_REGNUM);
8566 saved_regs_mask |= (1 << PC_REGNUM);
8569 /* Load the registers off the stack. If we only have one register
8570 to load use the LDR instruction - it is faster. */
8571 if (saved_regs_mask == (1 << LR_REGNUM))
8573 /* The exception handler ignores the LR, so we do
8574 not really need to load it off the stack. */
8575 if (eh_ofs)
8576 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8577 else
8578 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8580 else if (saved_regs_mask)
8582 if (saved_regs_mask & (1 << SP_REGNUM))
8583 /* Note - write back to the stack register is not enabled
8584 (ie "ldmfd sp!..."). We know that the stack pointer is
8585 in the list of registers and if we add writeback the
8586 instruction becomes UNPREDICTABLE. */
8587 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8588 else
8589 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8592 if (current_function_pretend_args_size)
8594 /* Unwind the pre-pushed regs. */
8595 operands[0] = operands[1] = stack_pointer_rtx;
8596 operands[2] = GEN_INT (current_function_pretend_args_size);
8597 output_add_immediate (operands);
8601 #if 0
8602 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8603 /* Adjust the stack to remove the exception handler stuff. */
8604 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8605 REGNO (eh_ofs));
8606 #endif
8608 if (! really_return
8609 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8610 && current_function_pretend_args_size == 0
8611 && saved_regs_mask & (1 << PC_REGNUM)))
8612 return "";
8614 /* Generate the return instruction. */
8615 switch ((int) ARM_FUNC_TYPE (func_type))
8617 case ARM_FT_EXCEPTION_HANDLER:
8618 /* Even in 26-bit mode we do a mov (rather than a movs)
8619 because we don't have the PSR bits set in the address. */
8620 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8621 break;
8623 case ARM_FT_ISR:
8624 case ARM_FT_FIQ:
8625 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8626 break;
8628 case ARM_FT_EXCEPTION:
8629 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8630 break;
8632 case ARM_FT_INTERWORKED:
8633 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8634 break;
8636 default:
8637 if (frame_pointer_needed)
8638 /* If we used the frame pointer then the return address
8639 will have been loaded off the stack directly into the
8640 PC, so there is no need to issue a MOV instruction
8641 here. */
8643 else if (current_function_pretend_args_size == 0
8644 && (saved_regs_mask & (1 << LR_REGNUM)))
8645 /* Similarly we may have been able to load LR into the PC
8646 even if we did not create a stack frame. */
8648 else if (TARGET_APCS_32)
8649 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8650 else
8651 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8652 break;
8655 return "";
8658 static void
8659 arm_output_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
8660 HOST_WIDE_INT frame_size)
8662 if (TARGET_THUMB)
8664 /* ??? Probably not safe to set this here, since it assumes that a
8665 function will be emitted as assembly immediately after we generate
8666 RTL for it. This does not happen for inline functions. */
8667 return_used_this_function = 0;
8669 else
8671 /* We need to take into account any stack-frame rounding. */
8672 frame_size = arm_get_frame_size ();
8674 if (use_return_insn (FALSE)
8675 && return_used_this_function
8676 && (frame_size + current_function_outgoing_args_size) != 0
8677 && !frame_pointer_needed)
8678 abort ();
8680 /* Reset the ARM-specific per-function variables. */
8681 after_arm_reorg = 0;
8685 /* Generate and emit an insn that we will recognize as a push_multi.
8686 Unfortunately, since this insn does not reflect very well the actual
8687 semantics of the operation, we need to annotate the insn for the benefit
8688 of DWARF2 frame unwind information. */
8689 static rtx
8690 emit_multi_reg_push (int mask)
8692 int num_regs = 0;
8693 int num_dwarf_regs;
8694 int i, j;
8695 rtx par;
8696 rtx dwarf;
8697 int dwarf_par_index;
8698 rtx tmp, reg;
8700 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8701 if (mask & (1 << i))
8702 num_regs++;
8704 if (num_regs == 0 || num_regs > 16)
8705 abort ();
8707 /* We don't record the PC in the dwarf frame information. */
8708 num_dwarf_regs = num_regs;
8709 if (mask & (1 << PC_REGNUM))
8710 num_dwarf_regs--;
8712 /* For the body of the insn we are going to generate an UNSPEC in
8713 parallel with several USEs. This allows the insn to be recognized
8714 by the push_multi pattern in the arm.md file. The insn looks
8715 something like this:
8717 (parallel [
8718 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8719 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8720 (use (reg:SI 11 fp))
8721 (use (reg:SI 12 ip))
8722 (use (reg:SI 14 lr))
8723 (use (reg:SI 15 pc))
8726 For the frame note however, we try to be more explicit and actually
8727 show each register being stored into the stack frame, plus a (single)
8728 decrement of the stack pointer. We do it this way in order to be
8729 friendly to the stack unwinding code, which only wants to see a single
8730 stack decrement per instruction. The RTL we generate for the note looks
8731 something like this:
8733 (sequence [
8734 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8735 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8736 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8737 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8738 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8741 This sequence is used both by the code to support stack unwinding for
8742 exceptions handlers and the code to generate dwarf2 frame debugging. */
8744 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8745 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8746 dwarf_par_index = 1;
8748 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8750 if (mask & (1 << i))
8752 reg = gen_rtx_REG (SImode, i);
8754 XVECEXP (par, 0, 0)
8755 = gen_rtx_SET (VOIDmode,
8756 gen_rtx_MEM (BLKmode,
8757 gen_rtx_PRE_DEC (BLKmode,
8758 stack_pointer_rtx)),
8759 gen_rtx_UNSPEC (BLKmode,
8760 gen_rtvec (1, reg),
8761 UNSPEC_PUSH_MULT));
8763 if (i != PC_REGNUM)
8765 tmp = gen_rtx_SET (VOIDmode,
8766 gen_rtx_MEM (SImode, stack_pointer_rtx),
8767 reg);
8768 RTX_FRAME_RELATED_P (tmp) = 1;
8769 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8770 dwarf_par_index++;
8773 break;
8777 for (j = 1, i++; j < num_regs; i++)
8779 if (mask & (1 << i))
8781 reg = gen_rtx_REG (SImode, i);
8783 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8785 if (i != PC_REGNUM)
8787 tmp = gen_rtx_SET (VOIDmode,
8788 gen_rtx_MEM (SImode,
8789 plus_constant (stack_pointer_rtx,
8790 4 * j)),
8791 reg);
8792 RTX_FRAME_RELATED_P (tmp) = 1;
8793 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8796 j++;
8800 par = emit_insn (par);
8802 tmp = gen_rtx_SET (SImode,
8803 stack_pointer_rtx,
8804 gen_rtx_PLUS (SImode,
8805 stack_pointer_rtx,
8806 GEN_INT (-4 * num_regs)));
8807 RTX_FRAME_RELATED_P (tmp) = 1;
8808 XVECEXP (dwarf, 0, 0) = tmp;
8810 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8811 REG_NOTES (par));
8812 return par;
8815 static rtx
8816 emit_sfm (int base_reg, int count)
8818 rtx par;
8819 rtx dwarf;
8820 rtx tmp, reg;
8821 int i;
8823 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8824 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8826 reg = gen_rtx_REG (XFmode, base_reg++);
8828 XVECEXP (par, 0, 0)
8829 = gen_rtx_SET (VOIDmode,
8830 gen_rtx_MEM (BLKmode,
8831 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8832 gen_rtx_UNSPEC (BLKmode,
8833 gen_rtvec (1, reg),
8834 UNSPEC_PUSH_MULT));
8836 = gen_rtx_SET (VOIDmode,
8837 gen_rtx_MEM (XFmode,
8838 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8839 reg);
8840 RTX_FRAME_RELATED_P (tmp) = 1;
8841 XVECEXP (dwarf, 0, count - 1) = tmp;
8843 for (i = 1; i < count; i++)
8845 reg = gen_rtx_REG (XFmode, base_reg++);
8846 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8848 tmp = gen_rtx_SET (VOIDmode,
8849 gen_rtx_MEM (XFmode,
8850 gen_rtx_PRE_DEC (BLKmode,
8851 stack_pointer_rtx)),
8852 reg);
8853 RTX_FRAME_RELATED_P (tmp) = 1;
8854 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8857 par = emit_insn (par);
8858 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8859 REG_NOTES (par));
8860 return par;
8863 /* Compute the distance from register FROM to register TO.
8864 These can be the arg pointer (26), the soft frame pointer (25),
8865 the stack pointer (13) or the hard frame pointer (11).
8866 Typical stack layout looks like this:
8868 old stack pointer -> | |
8869 ----
8870 | | \
8871 | | saved arguments for
8872 | | vararg functions
8873 | | /
8875 hard FP & arg pointer -> | | \
8876 | | stack
8877 | | frame
8878 | | /
8880 | | \
8881 | | call saved
8882 | | registers
8883 soft frame pointer -> | | /
8885 | | \
8886 | | local
8887 | | variables
8888 | | /
8890 | | \
8891 | | outgoing
8892 | | arguments
8893 current stack pointer -> | | /
8896 For a given function some or all of these stack components
8897 may not be needed, giving rise to the possibility of
8898 eliminating some of the registers.
8900 The values returned by this function must reflect the behavior
8901 of arm_expand_prologue() and arm_compute_save_reg_mask().
8903 The sign of the number returned reflects the direction of stack
8904 growth, so the values are positive for all eliminations except
8905 from the soft frame pointer to the hard frame pointer. */
8906 unsigned int
8907 arm_compute_initial_elimination_offset (unsigned int from, unsigned int to)
8909 unsigned int local_vars = arm_get_frame_size ();
8910 unsigned int outgoing_args = current_function_outgoing_args_size;
8911 unsigned int stack_frame;
8912 unsigned int call_saved_registers;
8913 unsigned long func_type;
8915 func_type = arm_current_func_type ();
8917 /* Volatile functions never return, so there is
8918 no need to save call saved registers. */
8919 call_saved_registers = 0;
8920 if (! IS_VOLATILE (func_type))
8922 unsigned int reg_mask;
8923 unsigned int reg;
8925 /* Make sure that we compute which registers will be saved
8926 on the stack using the same algorithm that is used by
8927 the prologue creation code. */
8928 reg_mask = arm_compute_save_reg_mask ();
8930 /* Now count the number of bits set in save_reg_mask.
8931 If we have already counted the registers in the stack
8932 frame, do not count them again. Non call-saved registers
8933 might be saved in the call-save area of the stack, if
8934 doing so will preserve the stack's alignment. Hence we
8935 must count them here. For each set bit we need 4 bytes
8936 of stack space. */
8937 if (frame_pointer_needed)
8938 reg_mask &= 0x07ff;
8939 call_saved_registers += 4 * bit_count (reg_mask);
8941 /* If the hard floating point registers are going to be
8942 used then they must be saved on the stack as well.
8943 Each register occupies 12 bytes of stack space. */
8944 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8945 if (regs_ever_live[reg] && ! call_used_regs[reg])
8946 call_saved_registers += 12;
8948 if (TARGET_REALLY_IWMMXT)
8949 /* Check for the call-saved iWMMXt registers. */
8950 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
8951 if (regs_ever_live[reg] && ! call_used_regs [reg])
8952 call_saved_registers += 8;
8955 /* The stack frame contains 4 registers - the old frame pointer,
8956 the old stack pointer, the return address and PC of the start
8957 of the function. */
8958 stack_frame = frame_pointer_needed ? 16 : 0;
8960 /* OK, now we have enough information to compute the distances.
8961 There must be an entry in these switch tables for each pair
8962 of registers in ELIMINABLE_REGS, even if some of the entries
8963 seem to be redundant or useless. */
8964 switch (from)
8966 case ARG_POINTER_REGNUM:
8967 switch (to)
8969 case THUMB_HARD_FRAME_POINTER_REGNUM:
8970 return 0;
8972 case FRAME_POINTER_REGNUM:
8973 /* This is the reverse of the soft frame pointer
8974 to hard frame pointer elimination below. */
8975 if (call_saved_registers == 0 && stack_frame == 0)
8976 return 0;
8977 return (call_saved_registers + stack_frame - 4);
8979 case ARM_HARD_FRAME_POINTER_REGNUM:
8980 /* If there is no stack frame then the hard
8981 frame pointer and the arg pointer coincide. */
8982 if (stack_frame == 0 && call_saved_registers != 0)
8983 return 0;
8984 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8985 return (frame_pointer_needed
8986 && current_function_needs_context
8987 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8989 case STACK_POINTER_REGNUM:
8990 /* If nothing has been pushed on the stack at all
8991 then this will return -4. This *is* correct! */
8992 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8994 default:
8995 abort ();
8997 break;
8999 case FRAME_POINTER_REGNUM:
9000 switch (to)
9002 case THUMB_HARD_FRAME_POINTER_REGNUM:
9003 return 0;
9005 case ARM_HARD_FRAME_POINTER_REGNUM:
9006 /* The hard frame pointer points to the top entry in the
9007 stack frame. The soft frame pointer to the bottom entry
9008 in the stack frame. If there is no stack frame at all,
9009 then they are identical. */
9010 if (call_saved_registers == 0 && stack_frame == 0)
9011 return 0;
9012 return - (call_saved_registers + stack_frame - 4);
9014 case STACK_POINTER_REGNUM:
9015 return local_vars + outgoing_args;
9017 default:
9018 abort ();
9020 break;
9022 default:
9023 /* You cannot eliminate from the stack pointer.
9024 In theory you could eliminate from the hard frame
9025 pointer to the stack pointer, but this will never
9026 happen, since if a stack frame is not needed the
9027 hard frame pointer will never be used. */
9028 abort ();
9032 /* Calculate the size of the stack frame, taking into account any
9033 padding that is required to ensure stack-alignment. */
9034 HOST_WIDE_INT
9035 arm_get_frame_size (void)
9037 int regno;
9039 int base_size = ROUND_UP_WORD (get_frame_size ());
9040 int entry_size = 0;
9041 unsigned long func_type = arm_current_func_type ();
9042 int leaf;
9044 if (! TARGET_ARM)
9045 abort();
9047 if (! TARGET_ATPCS)
9048 return base_size;
9050 /* We need to know if we are a leaf function. Unfortunately, it
9051 is possible to be called after start_sequence has been called,
9052 which causes get_insns to return the insns for the sequence,
9053 not the function, which will cause leaf_function_p to return
9054 the incorrect result.
9056 To work around this, we cache the computed frame size. This
9057 works because we will only be calling RTL expanders that need
9058 to know about leaf functions once reload has completed, and the
9059 frame size cannot be changed after that time, so we can safely
9060 use the cached value. */
9062 if (reload_completed)
9063 return cfun->machine->frame_size;
9065 leaf = leaf_function_p ();
9067 /* A leaf function does not need any stack alignment if it has nothing
9068 on the stack. */
9069 if (leaf && base_size == 0)
9071 cfun->machine->frame_size = 0;
9072 return 0;
9075 /* We know that SP will be word aligned on entry, and we must
9076 preserve that condition at any subroutine call. But those are
9077 the only constraints. */
9079 /* Space for variadic functions. */
9080 if (current_function_pretend_args_size)
9081 entry_size += current_function_pretend_args_size;
9083 /* Space for saved registers. */
9084 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
9086 /* Space for saved FPA registers. */
9087 if (! IS_VOLATILE (func_type))
9089 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
9090 if (regs_ever_live[regno] && ! call_used_regs[regno])
9091 entry_size += 12;
9094 if (TARGET_REALLY_IWMMXT)
9096 /* Check for the call-saved iWMMXt registers. */
9097 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
9098 if (regs_ever_live [regno] && ! call_used_regs [regno])
9099 entry_size += 8;
9102 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9103 base_size += 4;
9104 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
9105 abort ();
9107 cfun->machine->frame_size = base_size;
9109 return base_size;
9112 /* Generate the prologue instructions for entry into an ARM function. */
9113 void
9114 arm_expand_prologue (void)
9116 int reg;
9117 rtx amount;
9118 rtx insn;
9119 rtx ip_rtx;
9120 unsigned long live_regs_mask;
9121 unsigned long func_type;
9122 int fp_offset = 0;
9123 int saved_pretend_args = 0;
9124 unsigned int args_to_push;
9126 func_type = arm_current_func_type ();
9128 /* Naked functions don't have prologues. */
9129 if (IS_NAKED (func_type))
9130 return;
9132 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
9133 args_to_push = current_function_pretend_args_size;
9135 /* Compute which register we will have to save onto the stack. */
9136 live_regs_mask = arm_compute_save_reg_mask ();
9138 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
9140 if (frame_pointer_needed)
9142 if (IS_INTERRUPT (func_type))
9144 /* Interrupt functions must not corrupt any registers.
9145 Creating a frame pointer however, corrupts the IP
9146 register, so we must push it first. */
9147 insn = emit_multi_reg_push (1 << IP_REGNUM);
9149 /* Do not set RTX_FRAME_RELATED_P on this insn.
9150 The dwarf stack unwinding code only wants to see one
9151 stack decrement per function, and this is not it. If
9152 this instruction is labeled as being part of the frame
9153 creation sequence then dwarf2out_frame_debug_expr will
9154 abort when it encounters the assignment of IP to FP
9155 later on, since the use of SP here establishes SP as
9156 the CFA register and not IP.
9158 Anyway this instruction is not really part of the stack
9159 frame creation although it is part of the prologue. */
9161 else if (IS_NESTED (func_type))
9163 /* The Static chain register is the same as the IP register
9164 used as a scratch register during stack frame creation.
9165 To get around this need to find somewhere to store IP
9166 whilst the frame is being created. We try the following
9167 places in order:
9169 1. The last argument register.
9170 2. A slot on the stack above the frame. (This only
9171 works if the function is not a varargs function).
9172 3. Register r3, after pushing the argument registers
9173 onto the stack.
9175 Note - we only need to tell the dwarf2 backend about the SP
9176 adjustment in the second variant; the static chain register
9177 doesn't need to be unwound, as it doesn't contain a value
9178 inherited from the caller. */
9180 if (regs_ever_live[3] == 0)
9182 insn = gen_rtx_REG (SImode, 3);
9183 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9184 insn = emit_insn (insn);
9186 else if (args_to_push == 0)
9188 rtx dwarf;
9189 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
9190 insn = gen_rtx_MEM (SImode, insn);
9191 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
9192 insn = emit_insn (insn);
9194 fp_offset = 4;
9196 /* Just tell the dwarf backend that we adjusted SP. */
9197 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
9198 gen_rtx_PLUS (SImode, stack_pointer_rtx,
9199 GEN_INT (-fp_offset)));
9200 RTX_FRAME_RELATED_P (insn) = 1;
9201 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
9202 dwarf, REG_NOTES (insn));
9204 else
9206 /* Store the args on the stack. */
9207 if (cfun->machine->uses_anonymous_args)
9208 insn = emit_multi_reg_push
9209 ((0xf0 >> (args_to_push / 4)) & 0xf);
9210 else
9211 insn = emit_insn
9212 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9213 GEN_INT (- args_to_push)));
9215 RTX_FRAME_RELATED_P (insn) = 1;
9217 saved_pretend_args = 1;
9218 fp_offset = args_to_push;
9219 args_to_push = 0;
9221 /* Now reuse r3 to preserve IP. */
9222 insn = gen_rtx_REG (SImode, 3);
9223 insn = gen_rtx_SET (SImode, insn, ip_rtx);
9224 (void) emit_insn (insn);
9228 if (fp_offset)
9230 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
9231 insn = gen_rtx_SET (SImode, ip_rtx, insn);
9233 else
9234 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
9236 insn = emit_insn (insn);
9237 RTX_FRAME_RELATED_P (insn) = 1;
9240 if (args_to_push)
9242 /* Push the argument registers, or reserve space for them. */
9243 if (cfun->machine->uses_anonymous_args)
9244 insn = emit_multi_reg_push
9245 ((0xf0 >> (args_to_push / 4)) & 0xf);
9246 else
9247 insn = emit_insn
9248 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9249 GEN_INT (- args_to_push)));
9250 RTX_FRAME_RELATED_P (insn) = 1;
9253 /* If this is an interrupt service routine, and the link register
9254 is going to be pushed, and we are not creating a stack frame,
9255 (which would involve an extra push of IP and a pop in the epilogue)
9256 subtracting four from LR now will mean that the function return
9257 can be done with a single instruction. */
9258 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
9259 && (live_regs_mask & (1 << LR_REGNUM)) != 0
9260 && ! frame_pointer_needed)
9261 emit_insn (gen_rtx_SET (SImode,
9262 gen_rtx_REG (SImode, LR_REGNUM),
9263 gen_rtx_PLUS (SImode,
9264 gen_rtx_REG (SImode, LR_REGNUM),
9265 GEN_INT (-4))));
9267 if (live_regs_mask)
9269 insn = emit_multi_reg_push (live_regs_mask);
9270 RTX_FRAME_RELATED_P (insn) = 1;
9273 if (TARGET_IWMMXT)
9274 for (reg = FIRST_IWMMXT_REGNUM; reg <= LAST_IWMMXT_REGNUM; reg++)
9275 if (regs_ever_live[reg] && ! call_used_regs [reg])
9277 insn = gen_rtx_PRE_DEC (V2SImode, stack_pointer_rtx);
9278 insn = gen_rtx_MEM (V2SImode, insn);
9279 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9280 gen_rtx_REG (V2SImode, reg)));
9281 RTX_FRAME_RELATED_P (insn) = 1;
9284 if (! IS_VOLATILE (func_type))
9286 /* Save any floating point call-saved registers used by this
9287 function. */
9288 if (arm_fpu_arch == FPUTYPE_FPA_EMU2)
9290 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9291 if (regs_ever_live[reg] && !call_used_regs[reg])
9293 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
9294 insn = gen_rtx_MEM (XFmode, insn);
9295 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
9296 gen_rtx_REG (XFmode, reg)));
9297 RTX_FRAME_RELATED_P (insn) = 1;
9300 else
9302 int start_reg = LAST_ARM_FP_REGNUM;
9304 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
9306 if (regs_ever_live[reg] && !call_used_regs[reg])
9308 if (start_reg - reg == 3)
9310 insn = emit_sfm (reg, 4);
9311 RTX_FRAME_RELATED_P (insn) = 1;
9312 start_reg = reg - 1;
9315 else
9317 if (start_reg != reg)
9319 insn = emit_sfm (reg + 1, start_reg - reg);
9320 RTX_FRAME_RELATED_P (insn) = 1;
9322 start_reg = reg - 1;
9326 if (start_reg != reg)
9328 insn = emit_sfm (reg + 1, start_reg - reg);
9329 RTX_FRAME_RELATED_P (insn) = 1;
9334 if (frame_pointer_needed)
9336 /* Create the new frame pointer. */
9337 insn = GEN_INT (-(4 + args_to_push + fp_offset));
9338 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
9339 RTX_FRAME_RELATED_P (insn) = 1;
9341 if (IS_NESTED (func_type))
9343 /* Recover the static chain register. */
9344 if (regs_ever_live [3] == 0
9345 || saved_pretend_args)
9346 insn = gen_rtx_REG (SImode, 3);
9347 else /* if (current_function_pretend_args_size == 0) */
9349 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx,
9350 GEN_INT (4));
9351 insn = gen_rtx_MEM (SImode, insn);
9354 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
9355 /* Add a USE to stop propagate_one_insn() from barfing. */
9356 emit_insn (gen_prologue_use (ip_rtx));
9360 amount = GEN_INT (-(arm_get_frame_size ()
9361 + current_function_outgoing_args_size));
9363 if (amount != const0_rtx)
9365 /* This add can produce multiple insns for a large constant, so we
9366 need to get tricky. */
9367 rtx last = get_last_insn ();
9368 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9369 amount));
9372 last = last ? NEXT_INSN (last) : get_insns ();
9373 RTX_FRAME_RELATED_P (last) = 1;
9375 while (last != insn);
9377 /* If the frame pointer is needed, emit a special barrier that
9378 will prevent the scheduler from moving stores to the frame
9379 before the stack adjustment. */
9380 if (frame_pointer_needed)
9381 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
9382 hard_frame_pointer_rtx));
9385 /* If we are profiling, make sure no instructions are scheduled before
9386 the call to mcount. Similarly if the user has requested no
9387 scheduling in the prolog. */
9388 if (current_function_profile || TARGET_NO_SCHED_PRO)
9389 emit_insn (gen_blockage ());
9391 /* If the link register is being kept alive, with the return address in it,
9392 then make sure that it does not get reused by the ce2 pass. */
9393 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
9395 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
9396 cfun->machine->lr_save_eliminated = 1;
9400 /* If CODE is 'd', then the X is a condition operand and the instruction
9401 should only be executed if the condition is true.
9402 if CODE is 'D', then the X is a condition operand and the instruction
9403 should only be executed if the condition is false: however, if the mode
9404 of the comparison is CCFPEmode, then always execute the instruction -- we
9405 do this because in these circumstances !GE does not necessarily imply LT;
9406 in these cases the instruction pattern will take care to make sure that
9407 an instruction containing %d will follow, thereby undoing the effects of
9408 doing this instruction unconditionally.
9409 If CODE is 'N' then X is a floating point operand that must be negated
9410 before output.
9411 If CODE is 'B' then output a bitwise inverted value of X (a const int).
9412 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
9413 void
9414 arm_print_operand (FILE *stream, rtx x, int code)
9416 switch (code)
9418 case '@':
9419 fputs (ASM_COMMENT_START, stream);
9420 return;
9422 case '_':
9423 fputs (user_label_prefix, stream);
9424 return;
9426 case '|':
9427 fputs (REGISTER_PREFIX, stream);
9428 return;
9430 case '?':
9431 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
9433 if (TARGET_THUMB || current_insn_predicate != NULL)
9434 abort ();
9436 fputs (arm_condition_codes[arm_current_cc], stream);
9438 else if (current_insn_predicate)
9440 enum arm_cond_code code;
9442 if (TARGET_THUMB)
9443 abort ();
9445 code = get_arm_condition_code (current_insn_predicate);
9446 fputs (arm_condition_codes[code], stream);
9448 return;
9450 case 'N':
9452 REAL_VALUE_TYPE r;
9453 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9454 r = REAL_VALUE_NEGATE (r);
9455 fprintf (stream, "%s", fp_const_from_val (&r));
9457 return;
9459 case 'B':
9460 if (GET_CODE (x) == CONST_INT)
9462 HOST_WIDE_INT val;
9463 val = ARM_SIGN_EXTEND (~INTVAL (x));
9464 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9466 else
9468 putc ('~', stream);
9469 output_addr_const (stream, x);
9471 return;
9473 case 'i':
9474 fprintf (stream, "%s", arithmetic_instr (x, 1));
9475 return;
9477 /* Truncate Cirrus shift counts. */
9478 case 's':
9479 if (GET_CODE (x) == CONST_INT)
9481 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
9482 return;
9484 arm_print_operand (stream, x, 0);
9485 return;
9487 case 'I':
9488 fprintf (stream, "%s", arithmetic_instr (x, 0));
9489 return;
9491 case 'S':
9493 HOST_WIDE_INT val;
9494 const char * shift = shift_op (x, &val);
9496 if (shift)
9498 fprintf (stream, ", %s ", shift_op (x, &val));
9499 if (val == -1)
9500 arm_print_operand (stream, XEXP (x, 1), 0);
9501 else
9502 fprintf (stream, "#" HOST_WIDE_INT_PRINT_DEC, val);
9505 return;
9507 /* An explanation of the 'Q', 'R' and 'H' register operands:
9509 In a pair of registers containing a DI or DF value the 'Q'
9510 operand returns the register number of the register containing
9511 the least signficant part of the value. The 'R' operand returns
9512 the register number of the register containing the most
9513 significant part of the value.
9515 The 'H' operand returns the higher of the two register numbers.
9516 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9517 same as the 'Q' operand, since the most signficant part of the
9518 value is held in the lower number register. The reverse is true
9519 on systems where WORDS_BIG_ENDIAN is false.
9521 The purpose of these operands is to distinguish between cases
9522 where the endian-ness of the values is important (for example
9523 when they are added together), and cases where the endian-ness
9524 is irrelevant, but the order of register operations is important.
9525 For example when loading a value from memory into a register
9526 pair, the endian-ness does not matter. Provided that the value
9527 from the lower memory address is put into the lower numbered
9528 register, and the value from the higher address is put into the
9529 higher numbered register, the load will work regardless of whether
9530 the value being loaded is big-wordian or little-wordian. The
9531 order of the two register loads can matter however, if the address
9532 of the memory location is actually held in one of the registers
9533 being overwritten by the load. */
9534 case 'Q':
9535 if (REGNO (x) > LAST_ARM_REGNUM)
9536 abort ();
9537 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9538 return;
9540 case 'R':
9541 if (REGNO (x) > LAST_ARM_REGNUM)
9542 abort ();
9543 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9544 return;
9546 case 'H':
9547 if (REGNO (x) > LAST_ARM_REGNUM)
9548 abort ();
9549 asm_fprintf (stream, "%r", REGNO (x) + 1);
9550 return;
9552 case 'm':
9553 asm_fprintf (stream, "%r",
9554 GET_CODE (XEXP (x, 0)) == REG
9555 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9556 return;
9558 case 'M':
9559 asm_fprintf (stream, "{%r-%r}",
9560 REGNO (x),
9561 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9562 return;
9564 case 'd':
9565 /* CONST_TRUE_RTX means always -- that's the default. */
9566 if (x == const_true_rtx)
9567 return;
9569 if (TARGET_ARM)
9570 fputs (arm_condition_codes[get_arm_condition_code (x)],
9571 stream);
9572 else
9573 fputs (thumb_condition_code (x, 0), stream);
9574 return;
9576 case 'D':
9577 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9578 want to do that. */
9579 if (x == const_true_rtx)
9580 abort ();
9582 if (TARGET_ARM)
9583 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9584 (get_arm_condition_code (x))],
9585 stream);
9586 else
9587 fputs (thumb_condition_code (x, 1), stream);
9588 return;
9590 /* Cirrus registers can be accessed in a variety of ways:
9591 single floating point (f)
9592 double floating point (d)
9593 32bit integer (fx)
9594 64bit integer (dx). */
9595 case 'W': /* Cirrus register in F mode. */
9596 case 'X': /* Cirrus register in D mode. */
9597 case 'Y': /* Cirrus register in FX mode. */
9598 case 'Z': /* Cirrus register in DX mode. */
9599 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9600 abort ();
9602 fprintf (stream, "mv%s%s",
9603 code == 'W' ? "f"
9604 : code == 'X' ? "d"
9605 : code == 'Y' ? "fx" : "dx", reg_names[REGNO (x)] + 2);
9607 return;
9609 /* Print cirrus register in the mode specified by the register's mode. */
9610 case 'V':
9612 int mode = GET_MODE (x);
9614 if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
9615 abort ();
9617 fprintf (stream, "mv%s%s",
9618 mode == DFmode ? "d"
9619 : mode == SImode ? "fx"
9620 : mode == DImode ? "dx"
9621 : "f", reg_names[REGNO (x)] + 2);
9623 return;
9626 case 'U':
9627 if (GET_CODE (x) != REG
9628 || REGNO (x) < FIRST_IWMMXT_GR_REGNUM
9629 || REGNO (x) > LAST_IWMMXT_GR_REGNUM)
9630 /* Bad value for wCG register number. */
9631 abort ();
9632 else
9633 fprintf (stream, "%d", REGNO (x) - FIRST_IWMMXT_GR_REGNUM);
9634 return;
9636 /* Print an iWMMXt control register name. */
9637 case 'w':
9638 if (GET_CODE (x) != CONST_INT
9639 || INTVAL (x) < 0
9640 || INTVAL (x) >= 16)
9641 /* Bad value for wC register number. */
9642 abort ();
9643 else
9645 static const char * wc_reg_names [16] =
9647 "wCID", "wCon", "wCSSF", "wCASF",
9648 "wC4", "wC5", "wC6", "wC7",
9649 "wCGR0", "wCGR1", "wCGR2", "wCGR3",
9650 "wC12", "wC13", "wC14", "wC15"
9653 fprintf (stream, wc_reg_names [INTVAL (x)]);
9655 return;
9657 default:
9658 if (x == 0)
9659 abort ();
9661 if (GET_CODE (x) == REG)
9662 asm_fprintf (stream, "%r", REGNO (x));
9663 else if (GET_CODE (x) == MEM)
9665 output_memory_reference_mode = GET_MODE (x);
9666 output_address (XEXP (x, 0));
9668 else if (GET_CODE (x) == CONST_DOUBLE)
9669 fprintf (stream, "#%s", fp_immediate_constant (x));
9670 else if (GET_CODE (x) == NEG)
9671 abort (); /* This should never happen now. */
9672 else
9674 fputc ('#', stream);
9675 output_addr_const (stream, x);
9680 #ifndef AOF_ASSEMBLER
9681 /* Target hook for assembling integer objects. The ARM version needs to
9682 handle word-sized values specially. */
9683 static bool
9684 arm_assemble_integer (rtx x, unsigned int size, int aligned_p)
9686 if (size == UNITS_PER_WORD && aligned_p)
9688 fputs ("\t.word\t", asm_out_file);
9689 output_addr_const (asm_out_file, x);
9691 /* Mark symbols as position independent. We only do this in the
9692 .text segment, not in the .data segment. */
9693 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9694 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9696 if (GET_CODE (x) == SYMBOL_REF
9697 && (CONSTANT_POOL_ADDRESS_P (x)
9698 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9699 fputs ("(GOTOFF)", asm_out_file);
9700 else if (GET_CODE (x) == LABEL_REF)
9701 fputs ("(GOTOFF)", asm_out_file);
9702 else
9703 fputs ("(GOT)", asm_out_file);
9705 fputc ('\n', asm_out_file);
9706 return true;
9709 if (VECTOR_MODE_SUPPORTED_P (GET_MODE (x)))
9711 int i, units;
9713 if (GET_CODE (x) != CONST_VECTOR)
9714 abort ();
9716 units = CONST_VECTOR_NUNITS (x);
9718 switch (GET_MODE (x))
9720 case V2SImode: size = 4; break;
9721 case V4HImode: size = 2; break;
9722 case V8QImode: size = 1; break;
9723 default:
9724 abort ();
9727 for (i = 0; i < units; i++)
9729 rtx elt;
9731 elt = CONST_VECTOR_ELT (x, i);
9732 assemble_integer
9733 (elt, size, i == 0 ? BIGGEST_ALIGNMENT : size * BITS_PER_UNIT, 1);
9736 return true;
9739 return default_assemble_integer (x, size, aligned_p);
9741 #endif
9743 /* A finite state machine takes care of noticing whether or not instructions
9744 can be conditionally executed, and thus decrease execution time and code
9745 size by deleting branch instructions. The fsm is controlled by
9746 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9748 /* The state of the fsm controlling condition codes are:
9749 0: normal, do nothing special
9750 1: make ASM_OUTPUT_OPCODE not output this instruction
9751 2: make ASM_OUTPUT_OPCODE not output this instruction
9752 3: make instructions conditional
9753 4: make instructions conditional
9755 State transitions (state->state by whom under condition):
9756 0 -> 1 final_prescan_insn if the `target' is a label
9757 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9758 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9759 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9760 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9761 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9762 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9763 (the target insn is arm_target_insn).
9765 If the jump clobbers the conditions then we use states 2 and 4.
9767 A similar thing can be done with conditional return insns.
9769 XXX In case the `target' is an unconditional branch, this conditionalising
9770 of the instructions always reduces code size, but not always execution
9771 time. But then, I want to reduce the code size to somewhere near what
9772 /bin/cc produces. */
9774 /* Returns the index of the ARM condition code string in
9775 `arm_condition_codes'. COMPARISON should be an rtx like
9776 `(eq (...) (...))'. */
9777 static enum arm_cond_code
9778 get_arm_condition_code (rtx comparison)
9780 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9781 int code;
9782 enum rtx_code comp_code = GET_CODE (comparison);
9784 if (GET_MODE_CLASS (mode) != MODE_CC)
9785 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9786 XEXP (comparison, 1));
9788 switch (mode)
9790 case CC_DNEmode: code = ARM_NE; goto dominance;
9791 case CC_DEQmode: code = ARM_EQ; goto dominance;
9792 case CC_DGEmode: code = ARM_GE; goto dominance;
9793 case CC_DGTmode: code = ARM_GT; goto dominance;
9794 case CC_DLEmode: code = ARM_LE; goto dominance;
9795 case CC_DLTmode: code = ARM_LT; goto dominance;
9796 case CC_DGEUmode: code = ARM_CS; goto dominance;
9797 case CC_DGTUmode: code = ARM_HI; goto dominance;
9798 case CC_DLEUmode: code = ARM_LS; goto dominance;
9799 case CC_DLTUmode: code = ARM_CC;
9801 dominance:
9802 if (comp_code != EQ && comp_code != NE)
9803 abort ();
9805 if (comp_code == EQ)
9806 return ARM_INVERSE_CONDITION_CODE (code);
9807 return code;
9809 case CC_NOOVmode:
9810 switch (comp_code)
9812 case NE: return ARM_NE;
9813 case EQ: return ARM_EQ;
9814 case GE: return ARM_PL;
9815 case LT: return ARM_MI;
9816 default: abort ();
9819 case CC_Zmode:
9820 switch (comp_code)
9822 case NE: return ARM_NE;
9823 case EQ: return ARM_EQ;
9824 default: abort ();
9827 case CCFPEmode:
9828 case CCFPmode:
9829 /* These encodings assume that AC=1 in the FPA system control
9830 byte. This allows us to handle all cases except UNEQ and
9831 LTGT. */
9832 switch (comp_code)
9834 case GE: return ARM_GE;
9835 case GT: return ARM_GT;
9836 case LE: return ARM_LS;
9837 case LT: return ARM_MI;
9838 case NE: return ARM_NE;
9839 case EQ: return ARM_EQ;
9840 case ORDERED: return ARM_VC;
9841 case UNORDERED: return ARM_VS;
9842 case UNLT: return ARM_LT;
9843 case UNLE: return ARM_LE;
9844 case UNGT: return ARM_HI;
9845 case UNGE: return ARM_PL;
9846 /* UNEQ and LTGT do not have a representation. */
9847 case UNEQ: /* Fall through. */
9848 case LTGT: /* Fall through. */
9849 default: abort ();
9852 case CC_SWPmode:
9853 switch (comp_code)
9855 case NE: return ARM_NE;
9856 case EQ: return ARM_EQ;
9857 case GE: return ARM_LE;
9858 case GT: return ARM_LT;
9859 case LE: return ARM_GE;
9860 case LT: return ARM_GT;
9861 case GEU: return ARM_LS;
9862 case GTU: return ARM_CC;
9863 case LEU: return ARM_CS;
9864 case LTU: return ARM_HI;
9865 default: abort ();
9868 case CC_Cmode:
9869 switch (comp_code)
9871 case LTU: return ARM_CS;
9872 case GEU: return ARM_CC;
9873 default: abort ();
9876 case CCmode:
9877 switch (comp_code)
9879 case NE: return ARM_NE;
9880 case EQ: return ARM_EQ;
9881 case GE: return ARM_GE;
9882 case GT: return ARM_GT;
9883 case LE: return ARM_LE;
9884 case LT: return ARM_LT;
9885 case GEU: return ARM_CS;
9886 case GTU: return ARM_HI;
9887 case LEU: return ARM_LS;
9888 case LTU: return ARM_CC;
9889 default: abort ();
9892 default: abort ();
9895 abort ();
9898 void
9899 arm_final_prescan_insn (rtx insn)
9901 /* BODY will hold the body of INSN. */
9902 rtx body = PATTERN (insn);
9904 /* This will be 1 if trying to repeat the trick, and things need to be
9905 reversed if it appears to fail. */
9906 int reverse = 0;
9908 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9909 taken are clobbered, even if the rtl suggests otherwise. It also
9910 means that we have to grub around within the jump expression to find
9911 out what the conditions are when the jump isn't taken. */
9912 int jump_clobbers = 0;
9914 /* If we start with a return insn, we only succeed if we find another one. */
9915 int seeking_return = 0;
9917 /* START_INSN will hold the insn from where we start looking. This is the
9918 first insn after the following code_label if REVERSE is true. */
9919 rtx start_insn = insn;
9921 /* If in state 4, check if the target branch is reached, in order to
9922 change back to state 0. */
9923 if (arm_ccfsm_state == 4)
9925 if (insn == arm_target_insn)
9927 arm_target_insn = NULL;
9928 arm_ccfsm_state = 0;
9930 return;
9933 /* If in state 3, it is possible to repeat the trick, if this insn is an
9934 unconditional branch to a label, and immediately following this branch
9935 is the previous target label which is only used once, and the label this
9936 branch jumps to is not too far off. */
9937 if (arm_ccfsm_state == 3)
9939 if (simplejump_p (insn))
9941 start_insn = next_nonnote_insn (start_insn);
9942 if (GET_CODE (start_insn) == BARRIER)
9944 /* XXX Isn't this always a barrier? */
9945 start_insn = next_nonnote_insn (start_insn);
9947 if (GET_CODE (start_insn) == CODE_LABEL
9948 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9949 && LABEL_NUSES (start_insn) == 1)
9950 reverse = TRUE;
9951 else
9952 return;
9954 else if (GET_CODE (body) == RETURN)
9956 start_insn = next_nonnote_insn (start_insn);
9957 if (GET_CODE (start_insn) == BARRIER)
9958 start_insn = next_nonnote_insn (start_insn);
9959 if (GET_CODE (start_insn) == CODE_LABEL
9960 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9961 && LABEL_NUSES (start_insn) == 1)
9963 reverse = TRUE;
9964 seeking_return = 1;
9966 else
9967 return;
9969 else
9970 return;
9973 if (arm_ccfsm_state != 0 && !reverse)
9974 abort ();
9975 if (GET_CODE (insn) != JUMP_INSN)
9976 return;
9978 /* This jump might be paralleled with a clobber of the condition codes
9979 the jump should always come first */
9980 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9981 body = XVECEXP (body, 0, 0);
9983 #if 0
9984 /* If this is a conditional return then we don't want to know */
9985 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9986 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9987 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9988 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9989 return;
9990 #endif
9992 if (reverse
9993 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9994 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9996 int insns_skipped;
9997 int fail = FALSE, succeed = FALSE;
9998 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9999 int then_not_else = TRUE;
10000 rtx this_insn = start_insn, label = 0;
10002 /* If the jump cannot be done with one instruction, we cannot
10003 conditionally execute the instruction in the inverse case. */
10004 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
10006 jump_clobbers = 1;
10007 return;
10010 /* Register the insn jumped to. */
10011 if (reverse)
10013 if (!seeking_return)
10014 label = XEXP (SET_SRC (body), 0);
10016 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
10017 label = XEXP (XEXP (SET_SRC (body), 1), 0);
10018 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
10020 label = XEXP (XEXP (SET_SRC (body), 2), 0);
10021 then_not_else = FALSE;
10023 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
10024 seeking_return = 1;
10025 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
10027 seeking_return = 1;
10028 then_not_else = FALSE;
10030 else
10031 abort ();
10033 /* See how many insns this branch skips, and what kind of insns. If all
10034 insns are okay, and the label or unconditional branch to the same
10035 label is not too far away, succeed. */
10036 for (insns_skipped = 0;
10037 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
10039 rtx scanbody;
10041 this_insn = next_nonnote_insn (this_insn);
10042 if (!this_insn)
10043 break;
10045 switch (GET_CODE (this_insn))
10047 case CODE_LABEL:
10048 /* Succeed if it is the target label, otherwise fail since
10049 control falls in from somewhere else. */
10050 if (this_insn == label)
10052 if (jump_clobbers)
10054 arm_ccfsm_state = 2;
10055 this_insn = next_nonnote_insn (this_insn);
10057 else
10058 arm_ccfsm_state = 1;
10059 succeed = TRUE;
10061 else
10062 fail = TRUE;
10063 break;
10065 case BARRIER:
10066 /* Succeed if the following insn is the target label.
10067 Otherwise fail.
10068 If return insns are used then the last insn in a function
10069 will be a barrier. */
10070 this_insn = next_nonnote_insn (this_insn);
10071 if (this_insn && this_insn == label)
10073 if (jump_clobbers)
10075 arm_ccfsm_state = 2;
10076 this_insn = next_nonnote_insn (this_insn);
10078 else
10079 arm_ccfsm_state = 1;
10080 succeed = TRUE;
10082 else
10083 fail = TRUE;
10084 break;
10086 case CALL_INSN:
10087 /* If using 32-bit addresses the cc is not preserved over
10088 calls. */
10089 if (TARGET_APCS_32)
10091 /* Succeed if the following insn is the target label,
10092 or if the following two insns are a barrier and
10093 the target label. */
10094 this_insn = next_nonnote_insn (this_insn);
10095 if (this_insn && GET_CODE (this_insn) == BARRIER)
10096 this_insn = next_nonnote_insn (this_insn);
10098 if (this_insn && this_insn == label
10099 && insns_skipped < max_insns_skipped)
10101 if (jump_clobbers)
10103 arm_ccfsm_state = 2;
10104 this_insn = next_nonnote_insn (this_insn);
10106 else
10107 arm_ccfsm_state = 1;
10108 succeed = TRUE;
10110 else
10111 fail = TRUE;
10113 break;
10115 case JUMP_INSN:
10116 /* If this is an unconditional branch to the same label, succeed.
10117 If it is to another label, do nothing. If it is conditional,
10118 fail. */
10119 /* XXX Probably, the tests for SET and the PC are
10120 unnecessary. */
10122 scanbody = PATTERN (this_insn);
10123 if (GET_CODE (scanbody) == SET
10124 && GET_CODE (SET_DEST (scanbody)) == PC)
10126 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
10127 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
10129 arm_ccfsm_state = 2;
10130 succeed = TRUE;
10132 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
10133 fail = TRUE;
10135 /* Fail if a conditional return is undesirable (eg on a
10136 StrongARM), but still allow this if optimizing for size. */
10137 else if (GET_CODE (scanbody) == RETURN
10138 && !use_return_insn (TRUE)
10139 && !optimize_size)
10140 fail = TRUE;
10141 else if (GET_CODE (scanbody) == RETURN
10142 && seeking_return)
10144 arm_ccfsm_state = 2;
10145 succeed = TRUE;
10147 else if (GET_CODE (scanbody) == PARALLEL)
10149 switch (get_attr_conds (this_insn))
10151 case CONDS_NOCOND:
10152 break;
10153 default:
10154 fail = TRUE;
10155 break;
10158 else
10159 fail = TRUE; /* Unrecognized jump (eg epilogue). */
10161 break;
10163 case INSN:
10164 /* Instructions using or affecting the condition codes make it
10165 fail. */
10166 scanbody = PATTERN (this_insn);
10167 if (!(GET_CODE (scanbody) == SET
10168 || GET_CODE (scanbody) == PARALLEL)
10169 || get_attr_conds (this_insn) != CONDS_NOCOND)
10170 fail = TRUE;
10172 /* A conditional cirrus instruction must be followed by
10173 a non Cirrus instruction. However, since we
10174 conditionalize instructions in this function and by
10175 the time we get here we can't add instructions
10176 (nops), because shorten_branches() has already been
10177 called, we will disable conditionalizing Cirrus
10178 instructions to be safe. */
10179 if (GET_CODE (scanbody) != USE
10180 && GET_CODE (scanbody) != CLOBBER
10181 && get_attr_cirrus (this_insn) != CIRRUS_NOT)
10182 fail = TRUE;
10183 break;
10185 default:
10186 break;
10189 if (succeed)
10191 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
10192 arm_target_label = CODE_LABEL_NUMBER (label);
10193 else if (seeking_return || arm_ccfsm_state == 2)
10195 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
10197 this_insn = next_nonnote_insn (this_insn);
10198 if (this_insn && (GET_CODE (this_insn) == BARRIER
10199 || GET_CODE (this_insn) == CODE_LABEL))
10200 abort ();
10202 if (!this_insn)
10204 /* Oh, dear! we ran off the end.. give up */
10205 recog (PATTERN (insn), insn, NULL);
10206 arm_ccfsm_state = 0;
10207 arm_target_insn = NULL;
10208 return;
10210 arm_target_insn = this_insn;
10212 else
10213 abort ();
10214 if (jump_clobbers)
10216 if (reverse)
10217 abort ();
10218 arm_current_cc =
10219 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
10220 0), 0), 1));
10221 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
10222 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10223 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
10224 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10226 else
10228 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
10229 what it was. */
10230 if (!reverse)
10231 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
10232 0));
10235 if (reverse || then_not_else)
10236 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
10239 /* Restore recog_data (getting the attributes of other insns can
10240 destroy this array, but final.c assumes that it remains intact
10241 across this call; since the insn has been recognized already we
10242 call recog direct). */
10243 recog (PATTERN (insn), insn, NULL);
10247 /* Returns true if REGNO is a valid register
10248 for holding a quantity of tyoe MODE. */
10250 arm_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
10252 if (GET_MODE_CLASS (mode) == MODE_CC)
10253 return regno == CC_REGNUM;
10255 if (TARGET_THUMB)
10256 /* For the Thumb we only allow values bigger than SImode in
10257 registers 0 - 6, so that there is always a second low
10258 register available to hold the upper part of the value.
10259 We probably we ought to ensure that the register is the
10260 start of an even numbered register pair. */
10261 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
10263 if (IS_CIRRUS_REGNUM (regno))
10264 /* We have outlawed SI values in Cirrus registers because they
10265 reside in the lower 32 bits, but SF values reside in the
10266 upper 32 bits. This causes gcc all sorts of grief. We can't
10267 even split the registers into pairs because Cirrus SI values
10268 get sign extended to 64bits-- aldyh. */
10269 return (GET_MODE_CLASS (mode) == MODE_FLOAT) || (mode == DImode);
10271 if (IS_IWMMXT_GR_REGNUM (regno))
10272 return mode == SImode;
10274 if (IS_IWMMXT_REGNUM (regno))
10275 return VALID_IWMMXT_REG_MODE (mode);
10277 if (regno <= LAST_ARM_REGNUM)
10278 /* We allow any value to be stored in the general regisetrs. */
10279 return 1;
10281 if ( regno == FRAME_POINTER_REGNUM
10282 || regno == ARG_POINTER_REGNUM)
10283 /* We only allow integers in the fake hard registers. */
10284 return GET_MODE_CLASS (mode) == MODE_INT;
10286 /* The only registers left are the FPA registers
10287 which we only allow to hold FP values. */
10288 return GET_MODE_CLASS (mode) == MODE_FLOAT
10289 && regno >= FIRST_ARM_FP_REGNUM
10290 && regno <= LAST_ARM_FP_REGNUM;
10294 arm_regno_class (int regno)
10296 if (TARGET_THUMB)
10298 if (regno == STACK_POINTER_REGNUM)
10299 return STACK_REG;
10300 if (regno == CC_REGNUM)
10301 return CC_REG;
10302 if (regno < 8)
10303 return LO_REGS;
10304 return HI_REGS;
10307 if ( regno <= LAST_ARM_REGNUM
10308 || regno == FRAME_POINTER_REGNUM
10309 || regno == ARG_POINTER_REGNUM)
10310 return GENERAL_REGS;
10312 if (regno == CC_REGNUM)
10313 return NO_REGS;
10315 if (IS_CIRRUS_REGNUM (regno))
10316 return CIRRUS_REGS;
10318 if (IS_IWMMXT_REGNUM (regno))
10319 return IWMMXT_REGS;
10321 return FPA_REGS;
10324 /* Handle a special case when computing the offset
10325 of an argument from the frame pointer. */
10327 arm_debugger_arg_offset (int value, rtx addr)
10329 rtx insn;
10331 /* We are only interested if dbxout_parms() failed to compute the offset. */
10332 if (value != 0)
10333 return 0;
10335 /* We can only cope with the case where the address is held in a register. */
10336 if (GET_CODE (addr) != REG)
10337 return 0;
10339 /* If we are using the frame pointer to point at the argument, then
10340 an offset of 0 is correct. */
10341 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
10342 return 0;
10344 /* If we are using the stack pointer to point at the
10345 argument, then an offset of 0 is correct. */
10346 if ((TARGET_THUMB || !frame_pointer_needed)
10347 && REGNO (addr) == SP_REGNUM)
10348 return 0;
10350 /* Oh dear. The argument is pointed to by a register rather
10351 than being held in a register, or being stored at a known
10352 offset from the frame pointer. Since GDB only understands
10353 those two kinds of argument we must translate the address
10354 held in the register into an offset from the frame pointer.
10355 We do this by searching through the insns for the function
10356 looking to see where this register gets its value. If the
10357 register is initialized from the frame pointer plus an offset
10358 then we are in luck and we can continue, otherwise we give up.
10360 This code is exercised by producing debugging information
10361 for a function with arguments like this:
10363 double func (double a, double b, int c, double d) {return d;}
10365 Without this code the stab for parameter 'd' will be set to
10366 an offset of 0 from the frame pointer, rather than 8. */
10368 /* The if() statement says:
10370 If the insn is a normal instruction
10371 and if the insn is setting the value in a register
10372 and if the register being set is the register holding the address of the argument
10373 and if the address is computing by an addition
10374 that involves adding to a register
10375 which is the frame pointer
10376 a constant integer
10378 then... */
10380 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10382 if ( GET_CODE (insn) == INSN
10383 && GET_CODE (PATTERN (insn)) == SET
10384 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
10385 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
10386 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
10387 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
10388 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
10391 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
10393 break;
10397 if (value == 0)
10399 debug_rtx (addr);
10400 warning ("unable to compute real location of stacked parameter");
10401 value = 8; /* XXX magic hack */
10404 return value;
10407 #define def_mbuiltin(MASK, NAME, TYPE, CODE) \
10408 do \
10410 if ((MASK) & insn_flags) \
10411 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE); \
10413 while (0)
10415 struct builtin_description
10417 const unsigned int mask;
10418 const enum insn_code icode;
10419 const char * const name;
10420 const enum arm_builtins code;
10421 const enum rtx_code comparison;
10422 const unsigned int flag;
10425 static const struct builtin_description bdesc_2arg[] =
10427 #define IWMMXT_BUILTIN(code, string, builtin) \
10428 { FL_IWMMXT, CODE_FOR_##code, "__builtin_arm_" string, \
10429 ARM_BUILTIN_##builtin, 0, 0 },
10431 IWMMXT_BUILTIN (addv8qi3, "waddb", WADDB)
10432 IWMMXT_BUILTIN (addv4hi3, "waddh", WADDH)
10433 IWMMXT_BUILTIN (addv2si3, "waddw", WADDW)
10434 IWMMXT_BUILTIN (subv8qi3, "wsubb", WSUBB)
10435 IWMMXT_BUILTIN (subv4hi3, "wsubh", WSUBH)
10436 IWMMXT_BUILTIN (subv2si3, "wsubw", WSUBW)
10437 IWMMXT_BUILTIN (ssaddv8qi3, "waddbss", WADDSSB)
10438 IWMMXT_BUILTIN (ssaddv4hi3, "waddhss", WADDSSH)
10439 IWMMXT_BUILTIN (ssaddv2si3, "waddwss", WADDSSW)
10440 IWMMXT_BUILTIN (sssubv8qi3, "wsubbss", WSUBSSB)
10441 IWMMXT_BUILTIN (sssubv4hi3, "wsubhss", WSUBSSH)
10442 IWMMXT_BUILTIN (sssubv2si3, "wsubwss", WSUBSSW)
10443 IWMMXT_BUILTIN (usaddv8qi3, "waddbus", WADDUSB)
10444 IWMMXT_BUILTIN (usaddv4hi3, "waddhus", WADDUSH)
10445 IWMMXT_BUILTIN (usaddv2si3, "waddwus", WADDUSW)
10446 IWMMXT_BUILTIN (ussubv8qi3, "wsubbus", WSUBUSB)
10447 IWMMXT_BUILTIN (ussubv4hi3, "wsubhus", WSUBUSH)
10448 IWMMXT_BUILTIN (ussubv2si3, "wsubwus", WSUBUSW)
10449 IWMMXT_BUILTIN (mulv4hi3, "wmulul", WMULUL)
10450 IWMMXT_BUILTIN (smulv4hi3_highpart, "wmulsh", WMULSH)
10451 IWMMXT_BUILTIN (umulv4hi3_highpart, "wmuluh", WMULUH)
10452 IWMMXT_BUILTIN (eqv8qi3, "wcmpeqb", WCMPEQB)
10453 IWMMXT_BUILTIN (eqv4hi3, "wcmpeqh", WCMPEQH)
10454 IWMMXT_BUILTIN (eqv2si3, "wcmpeqw", WCMPEQW)
10455 IWMMXT_BUILTIN (gtuv8qi3, "wcmpgtub", WCMPGTUB)
10456 IWMMXT_BUILTIN (gtuv4hi3, "wcmpgtuh", WCMPGTUH)
10457 IWMMXT_BUILTIN (gtuv2si3, "wcmpgtuw", WCMPGTUW)
10458 IWMMXT_BUILTIN (gtv8qi3, "wcmpgtsb", WCMPGTSB)
10459 IWMMXT_BUILTIN (gtv4hi3, "wcmpgtsh", WCMPGTSH)
10460 IWMMXT_BUILTIN (gtv2si3, "wcmpgtsw", WCMPGTSW)
10461 IWMMXT_BUILTIN (umaxv8qi3, "wmaxub", WMAXUB)
10462 IWMMXT_BUILTIN (smaxv8qi3, "wmaxsb", WMAXSB)
10463 IWMMXT_BUILTIN (umaxv4hi3, "wmaxuh", WMAXUH)
10464 IWMMXT_BUILTIN (smaxv4hi3, "wmaxsh", WMAXSH)
10465 IWMMXT_BUILTIN (umaxv2si3, "wmaxuw", WMAXUW)
10466 IWMMXT_BUILTIN (smaxv2si3, "wmaxsw", WMAXSW)
10467 IWMMXT_BUILTIN (uminv8qi3, "wminub", WMINUB)
10468 IWMMXT_BUILTIN (sminv8qi3, "wminsb", WMINSB)
10469 IWMMXT_BUILTIN (uminv4hi3, "wminuh", WMINUH)
10470 IWMMXT_BUILTIN (sminv4hi3, "wminsh", WMINSH)
10471 IWMMXT_BUILTIN (uminv2si3, "wminuw", WMINUW)
10472 IWMMXT_BUILTIN (sminv2si3, "wminsw", WMINSW)
10473 IWMMXT_BUILTIN (iwmmxt_anddi3, "wand", WAND)
10474 IWMMXT_BUILTIN (iwmmxt_nanddi3, "wandn", WANDN)
10475 IWMMXT_BUILTIN (iwmmxt_iordi3, "wor", WOR)
10476 IWMMXT_BUILTIN (iwmmxt_xordi3, "wxor", WXOR)
10477 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3, "wavg2b", WAVG2B)
10478 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3, "wavg2h", WAVG2H)
10479 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3, "wavg2br", WAVG2BR)
10480 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3, "wavg2hr", WAVG2HR)
10481 IWMMXT_BUILTIN (iwmmxt_wunpckilb, "wunpckilb", WUNPCKILB)
10482 IWMMXT_BUILTIN (iwmmxt_wunpckilh, "wunpckilh", WUNPCKILH)
10483 IWMMXT_BUILTIN (iwmmxt_wunpckilw, "wunpckilw", WUNPCKILW)
10484 IWMMXT_BUILTIN (iwmmxt_wunpckihb, "wunpckihb", WUNPCKIHB)
10485 IWMMXT_BUILTIN (iwmmxt_wunpckihh, "wunpckihh", WUNPCKIHH)
10486 IWMMXT_BUILTIN (iwmmxt_wunpckihw, "wunpckihw", WUNPCKIHW)
10487 IWMMXT_BUILTIN (iwmmxt_wmadds, "wmadds", WMADDS)
10488 IWMMXT_BUILTIN (iwmmxt_wmaddu, "wmaddu", WMADDU)
10490 #define IWMMXT_BUILTIN2(code, builtin) \
10491 { FL_IWMMXT, CODE_FOR_##code, NULL, ARM_BUILTIN_##builtin, 0, 0 },
10493 IWMMXT_BUILTIN2 (iwmmxt_wpackhss, WPACKHSS)
10494 IWMMXT_BUILTIN2 (iwmmxt_wpackwss, WPACKWSS)
10495 IWMMXT_BUILTIN2 (iwmmxt_wpackdss, WPACKDSS)
10496 IWMMXT_BUILTIN2 (iwmmxt_wpackhus, WPACKHUS)
10497 IWMMXT_BUILTIN2 (iwmmxt_wpackwus, WPACKWUS)
10498 IWMMXT_BUILTIN2 (iwmmxt_wpackdus, WPACKDUS)
10499 IWMMXT_BUILTIN2 (ashlv4hi3_di, WSLLH)
10500 IWMMXT_BUILTIN2 (ashlv4hi3, WSLLHI)
10501 IWMMXT_BUILTIN2 (ashlv2si3_di, WSLLW)
10502 IWMMXT_BUILTIN2 (ashlv2si3, WSLLWI)
10503 IWMMXT_BUILTIN2 (ashldi3_di, WSLLD)
10504 IWMMXT_BUILTIN2 (ashldi3_iwmmxt, WSLLDI)
10505 IWMMXT_BUILTIN2 (lshrv4hi3_di, WSRLH)
10506 IWMMXT_BUILTIN2 (lshrv4hi3, WSRLHI)
10507 IWMMXT_BUILTIN2 (lshrv2si3_di, WSRLW)
10508 IWMMXT_BUILTIN2 (lshrv2si3, WSRLWI)
10509 IWMMXT_BUILTIN2 (lshrdi3_di, WSRLD)
10510 IWMMXT_BUILTIN2 (lshrdi3, WSRLDI)
10511 IWMMXT_BUILTIN2 (ashrv4hi3_di, WSRAH)
10512 IWMMXT_BUILTIN2 (ashrv4hi3, WSRAHI)
10513 IWMMXT_BUILTIN2 (ashrv2si3_di, WSRAW)
10514 IWMMXT_BUILTIN2 (ashrv2si3, WSRAWI)
10515 IWMMXT_BUILTIN2 (ashrdi3_di, WSRAD)
10516 IWMMXT_BUILTIN2 (ashrdi3, WSRADI)
10517 IWMMXT_BUILTIN2 (rorv4hi3_di, WRORH)
10518 IWMMXT_BUILTIN2 (rorv4hi3, WRORHI)
10519 IWMMXT_BUILTIN2 (rorv2si3_di, WRORW)
10520 IWMMXT_BUILTIN2 (rorv2si3, WRORWI)
10521 IWMMXT_BUILTIN2 (rordi3_di, WRORD)
10522 IWMMXT_BUILTIN2 (rordi3, WRORDI)
10523 IWMMXT_BUILTIN2 (iwmmxt_wmacuz, WMACUZ)
10524 IWMMXT_BUILTIN2 (iwmmxt_wmacsz, WMACSZ)
10527 static const struct builtin_description bdesc_1arg[] =
10529 IWMMXT_BUILTIN (iwmmxt_tmovmskb, "tmovmskb", TMOVMSKB)
10530 IWMMXT_BUILTIN (iwmmxt_tmovmskh, "tmovmskh", TMOVMSKH)
10531 IWMMXT_BUILTIN (iwmmxt_tmovmskw, "tmovmskw", TMOVMSKW)
10532 IWMMXT_BUILTIN (iwmmxt_waccb, "waccb", WACCB)
10533 IWMMXT_BUILTIN (iwmmxt_wacch, "wacch", WACCH)
10534 IWMMXT_BUILTIN (iwmmxt_waccw, "waccw", WACCW)
10535 IWMMXT_BUILTIN (iwmmxt_wunpckehub, "wunpckehub", WUNPCKEHUB)
10536 IWMMXT_BUILTIN (iwmmxt_wunpckehuh, "wunpckehuh", WUNPCKEHUH)
10537 IWMMXT_BUILTIN (iwmmxt_wunpckehuw, "wunpckehuw", WUNPCKEHUW)
10538 IWMMXT_BUILTIN (iwmmxt_wunpckehsb, "wunpckehsb", WUNPCKEHSB)
10539 IWMMXT_BUILTIN (iwmmxt_wunpckehsh, "wunpckehsh", WUNPCKEHSH)
10540 IWMMXT_BUILTIN (iwmmxt_wunpckehsw, "wunpckehsw", WUNPCKEHSW)
10541 IWMMXT_BUILTIN (iwmmxt_wunpckelub, "wunpckelub", WUNPCKELUB)
10542 IWMMXT_BUILTIN (iwmmxt_wunpckeluh, "wunpckeluh", WUNPCKELUH)
10543 IWMMXT_BUILTIN (iwmmxt_wunpckeluw, "wunpckeluw", WUNPCKELUW)
10544 IWMMXT_BUILTIN (iwmmxt_wunpckelsb, "wunpckelsb", WUNPCKELSB)
10545 IWMMXT_BUILTIN (iwmmxt_wunpckelsh, "wunpckelsh", WUNPCKELSH)
10546 IWMMXT_BUILTIN (iwmmxt_wunpckelsw, "wunpckelsw", WUNPCKELSW)
10549 /* Set up all the iWMMXt builtins. This is
10550 not called if TARGET_IWMMXT is zero. */
10552 static void
10553 arm_init_iwmmxt_builtins (void)
10555 const struct builtin_description * d;
10556 size_t i;
10557 tree endlink = void_list_node;
10559 tree int_ftype_int
10560 = build_function_type (integer_type_node,
10561 tree_cons (NULL_TREE, integer_type_node, endlink));
10562 tree v8qi_ftype_v8qi_v8qi_int
10563 = build_function_type (V8QI_type_node,
10564 tree_cons (NULL_TREE, V8QI_type_node,
10565 tree_cons (NULL_TREE, V8QI_type_node,
10566 tree_cons (NULL_TREE,
10567 integer_type_node,
10568 endlink))));
10569 tree v4hi_ftype_v4hi_int
10570 = build_function_type (V4HI_type_node,
10571 tree_cons (NULL_TREE, V4HI_type_node,
10572 tree_cons (NULL_TREE, integer_type_node,
10573 endlink)));
10574 tree v2si_ftype_v2si_int
10575 = build_function_type (V2SI_type_node,
10576 tree_cons (NULL_TREE, V2SI_type_node,
10577 tree_cons (NULL_TREE, integer_type_node,
10578 endlink)));
10579 tree v2si_ftype_di_di
10580 = build_function_type (V2SI_type_node,
10581 tree_cons (NULL_TREE, long_long_integer_type_node,
10582 tree_cons (NULL_TREE, long_long_integer_type_node,
10583 endlink)));
10584 tree di_ftype_di_int
10585 = build_function_type (long_long_integer_type_node,
10586 tree_cons (NULL_TREE, long_long_integer_type_node,
10587 tree_cons (NULL_TREE, integer_type_node,
10588 endlink)));
10589 tree di_ftype_di_int_int
10590 = build_function_type (long_long_integer_type_node,
10591 tree_cons (NULL_TREE, long_long_integer_type_node,
10592 tree_cons (NULL_TREE, integer_type_node,
10593 tree_cons (NULL_TREE,
10594 integer_type_node,
10595 endlink))));
10596 tree int_ftype_v8qi
10597 = build_function_type (integer_type_node,
10598 tree_cons (NULL_TREE, V8QI_type_node,
10599 endlink));
10600 tree int_ftype_v4hi
10601 = build_function_type (integer_type_node,
10602 tree_cons (NULL_TREE, V4HI_type_node,
10603 endlink));
10604 tree int_ftype_v2si
10605 = build_function_type (integer_type_node,
10606 tree_cons (NULL_TREE, V2SI_type_node,
10607 endlink));
10608 tree int_ftype_v8qi_int
10609 = build_function_type (integer_type_node,
10610 tree_cons (NULL_TREE, V8QI_type_node,
10611 tree_cons (NULL_TREE, integer_type_node,
10612 endlink)));
10613 tree int_ftype_v4hi_int
10614 = build_function_type (integer_type_node,
10615 tree_cons (NULL_TREE, V4HI_type_node,
10616 tree_cons (NULL_TREE, integer_type_node,
10617 endlink)));
10618 tree int_ftype_v2si_int
10619 = build_function_type (integer_type_node,
10620 tree_cons (NULL_TREE, V2SI_type_node,
10621 tree_cons (NULL_TREE, integer_type_node,
10622 endlink)));
10623 tree v8qi_ftype_v8qi_int_int
10624 = build_function_type (V8QI_type_node,
10625 tree_cons (NULL_TREE, V8QI_type_node,
10626 tree_cons (NULL_TREE, integer_type_node,
10627 tree_cons (NULL_TREE,
10628 integer_type_node,
10629 endlink))));
10630 tree v4hi_ftype_v4hi_int_int
10631 = build_function_type (V4HI_type_node,
10632 tree_cons (NULL_TREE, V4HI_type_node,
10633 tree_cons (NULL_TREE, integer_type_node,
10634 tree_cons (NULL_TREE,
10635 integer_type_node,
10636 endlink))));
10637 tree v2si_ftype_v2si_int_int
10638 = build_function_type (V2SI_type_node,
10639 tree_cons (NULL_TREE, V2SI_type_node,
10640 tree_cons (NULL_TREE, integer_type_node,
10641 tree_cons (NULL_TREE,
10642 integer_type_node,
10643 endlink))));
10644 /* Miscellaneous. */
10645 tree v8qi_ftype_v4hi_v4hi
10646 = build_function_type (V8QI_type_node,
10647 tree_cons (NULL_TREE, V4HI_type_node,
10648 tree_cons (NULL_TREE, V4HI_type_node,
10649 endlink)));
10650 tree v4hi_ftype_v2si_v2si
10651 = build_function_type (V4HI_type_node,
10652 tree_cons (NULL_TREE, V2SI_type_node,
10653 tree_cons (NULL_TREE, V2SI_type_node,
10654 endlink)));
10655 tree v2si_ftype_v4hi_v4hi
10656 = build_function_type (V2SI_type_node,
10657 tree_cons (NULL_TREE, V4HI_type_node,
10658 tree_cons (NULL_TREE, V4HI_type_node,
10659 endlink)));
10660 tree v2si_ftype_v8qi_v8qi
10661 = build_function_type (V2SI_type_node,
10662 tree_cons (NULL_TREE, V8QI_type_node,
10663 tree_cons (NULL_TREE, V8QI_type_node,
10664 endlink)));
10665 tree v4hi_ftype_v4hi_di
10666 = build_function_type (V4HI_type_node,
10667 tree_cons (NULL_TREE, V4HI_type_node,
10668 tree_cons (NULL_TREE,
10669 long_long_integer_type_node,
10670 endlink)));
10671 tree v2si_ftype_v2si_di
10672 = build_function_type (V2SI_type_node,
10673 tree_cons (NULL_TREE, V2SI_type_node,
10674 tree_cons (NULL_TREE,
10675 long_long_integer_type_node,
10676 endlink)));
10677 tree void_ftype_int_int
10678 = build_function_type (void_type_node,
10679 tree_cons (NULL_TREE, integer_type_node,
10680 tree_cons (NULL_TREE, integer_type_node,
10681 endlink)));
10682 tree di_ftype_void
10683 = build_function_type (long_long_unsigned_type_node, endlink);
10684 tree di_ftype_v8qi
10685 = build_function_type (long_long_integer_type_node,
10686 tree_cons (NULL_TREE, V8QI_type_node,
10687 endlink));
10688 tree di_ftype_v4hi
10689 = build_function_type (long_long_integer_type_node,
10690 tree_cons (NULL_TREE, V4HI_type_node,
10691 endlink));
10692 tree di_ftype_v2si
10693 = build_function_type (long_long_integer_type_node,
10694 tree_cons (NULL_TREE, V2SI_type_node,
10695 endlink));
10696 tree v2si_ftype_v4hi
10697 = build_function_type (V2SI_type_node,
10698 tree_cons (NULL_TREE, V4HI_type_node,
10699 endlink));
10700 tree v4hi_ftype_v8qi
10701 = build_function_type (V4HI_type_node,
10702 tree_cons (NULL_TREE, V8QI_type_node,
10703 endlink));
10705 tree di_ftype_di_v4hi_v4hi
10706 = build_function_type (long_long_unsigned_type_node,
10707 tree_cons (NULL_TREE,
10708 long_long_unsigned_type_node,
10709 tree_cons (NULL_TREE, V4HI_type_node,
10710 tree_cons (NULL_TREE,
10711 V4HI_type_node,
10712 endlink))));
10714 tree di_ftype_v4hi_v4hi
10715 = build_function_type (long_long_unsigned_type_node,
10716 tree_cons (NULL_TREE, V4HI_type_node,
10717 tree_cons (NULL_TREE, V4HI_type_node,
10718 endlink)));
10720 /* Normal vector binops. */
10721 tree v8qi_ftype_v8qi_v8qi
10722 = build_function_type (V8QI_type_node,
10723 tree_cons (NULL_TREE, V8QI_type_node,
10724 tree_cons (NULL_TREE, V8QI_type_node,
10725 endlink)));
10726 tree v4hi_ftype_v4hi_v4hi
10727 = build_function_type (V4HI_type_node,
10728 tree_cons (NULL_TREE, V4HI_type_node,
10729 tree_cons (NULL_TREE, V4HI_type_node,
10730 endlink)));
10731 tree v2si_ftype_v2si_v2si
10732 = build_function_type (V2SI_type_node,
10733 tree_cons (NULL_TREE, V2SI_type_node,
10734 tree_cons (NULL_TREE, V2SI_type_node,
10735 endlink)));
10736 tree di_ftype_di_di
10737 = build_function_type (long_long_unsigned_type_node,
10738 tree_cons (NULL_TREE, long_long_unsigned_type_node,
10739 tree_cons (NULL_TREE,
10740 long_long_unsigned_type_node,
10741 endlink)));
10743 /* Add all builtins that are more or less simple operations on two
10744 operands. */
10745 for (i = 0, d = bdesc_2arg; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
10747 /* Use one of the operands; the target can have a different mode for
10748 mask-generating compares. */
10749 enum machine_mode mode;
10750 tree type;
10752 if (d->name == 0)
10753 continue;
10755 mode = insn_data[d->icode].operand[1].mode;
10757 switch (mode)
10759 case V8QImode:
10760 type = v8qi_ftype_v8qi_v8qi;
10761 break;
10762 case V4HImode:
10763 type = v4hi_ftype_v4hi_v4hi;
10764 break;
10765 case V2SImode:
10766 type = v2si_ftype_v2si_v2si;
10767 break;
10768 case DImode:
10769 type = di_ftype_di_di;
10770 break;
10772 default:
10773 abort ();
10776 def_mbuiltin (d->mask, d->name, type, d->code);
10779 /* Add the remaining MMX insns with somewhat more complicated types. */
10780 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wzero", di_ftype_void, ARM_BUILTIN_WZERO);
10781 def_mbuiltin (FL_IWMMXT, "__builtin_arm_setwcx", void_ftype_int_int, ARM_BUILTIN_SETWCX);
10782 def_mbuiltin (FL_IWMMXT, "__builtin_arm_getwcx", int_ftype_int, ARM_BUILTIN_GETWCX);
10784 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSLLH);
10785 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllw", v2si_ftype_v2si_di, ARM_BUILTIN_WSLLW);
10786 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslld", di_ftype_di_di, ARM_BUILTIN_WSLLD);
10787 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSLLHI);
10788 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsllwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSLLWI);
10789 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wslldi", di_ftype_di_int, ARM_BUILTIN_WSLLDI);
10791 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRLH);
10792 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRLW);
10793 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrld", di_ftype_di_di, ARM_BUILTIN_WSRLD);
10794 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRLHI);
10795 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrlwi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRLWI);
10796 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrldi", di_ftype_di_int, ARM_BUILTIN_WSRLDI);
10798 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrah", v4hi_ftype_v4hi_di, ARM_BUILTIN_WSRAH);
10799 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsraw", v2si_ftype_v2si_di, ARM_BUILTIN_WSRAW);
10800 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrad", di_ftype_di_di, ARM_BUILTIN_WSRAD);
10801 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrahi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSRAHI);
10802 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsrawi", v2si_ftype_v2si_int, ARM_BUILTIN_WSRAWI);
10803 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsradi", di_ftype_di_int, ARM_BUILTIN_WSRADI);
10805 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorh", v4hi_ftype_v4hi_di, ARM_BUILTIN_WRORH);
10806 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorw", v2si_ftype_v2si_di, ARM_BUILTIN_WRORW);
10807 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrord", di_ftype_di_di, ARM_BUILTIN_WRORD);
10808 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorhi", v4hi_ftype_v4hi_int, ARM_BUILTIN_WRORHI);
10809 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrorwi", v2si_ftype_v2si_int, ARM_BUILTIN_WRORWI);
10810 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wrordi", di_ftype_di_int, ARM_BUILTIN_WRORDI);
10812 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wshufh", v4hi_ftype_v4hi_int, ARM_BUILTIN_WSHUFH);
10814 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadb", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADB);
10815 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadh", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADH);
10816 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadbz", v2si_ftype_v8qi_v8qi, ARM_BUILTIN_WSADBZ);
10817 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wsadhz", v2si_ftype_v4hi_v4hi, ARM_BUILTIN_WSADHZ);
10819 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsb", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMSB);
10820 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMSH);
10821 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmsw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMSW);
10822 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmub", int_ftype_v8qi_int, ARM_BUILTIN_TEXTRMUB);
10823 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuh", int_ftype_v4hi_int, ARM_BUILTIN_TEXTRMUH);
10824 def_mbuiltin (FL_IWMMXT, "__builtin_arm_textrmuw", int_ftype_v2si_int, ARM_BUILTIN_TEXTRMUW);
10825 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrb", v8qi_ftype_v8qi_int_int, ARM_BUILTIN_TINSRB);
10826 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrh", v4hi_ftype_v4hi_int_int, ARM_BUILTIN_TINSRH);
10827 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tinsrw", v2si_ftype_v2si_int_int, ARM_BUILTIN_TINSRW);
10829 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccb", di_ftype_v8qi, ARM_BUILTIN_WACCB);
10830 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wacch", di_ftype_v4hi, ARM_BUILTIN_WACCH);
10831 def_mbuiltin (FL_IWMMXT, "__builtin_arm_waccw", di_ftype_v2si, ARM_BUILTIN_WACCW);
10833 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskb", int_ftype_v8qi, ARM_BUILTIN_TMOVMSKB);
10834 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskh", int_ftype_v4hi, ARM_BUILTIN_TMOVMSKH);
10835 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmovmskw", int_ftype_v2si, ARM_BUILTIN_TMOVMSKW);
10837 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhss", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHSS);
10838 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackhus", v8qi_ftype_v4hi_v4hi, ARM_BUILTIN_WPACKHUS);
10839 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwus", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWUS);
10840 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackwss", v4hi_ftype_v2si_v2si, ARM_BUILTIN_WPACKWSS);
10841 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdus", v2si_ftype_di_di, ARM_BUILTIN_WPACKDUS);
10842 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wpackdss", v2si_ftype_di_di, ARM_BUILTIN_WPACKDSS);
10844 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHUB);
10845 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHUH);
10846 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehuw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHUW);
10847 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKEHSB);
10848 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKEHSH);
10849 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckehsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKEHSW);
10850 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelub", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELUB);
10851 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELUH);
10852 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckeluw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELUW);
10853 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsb", v4hi_ftype_v8qi, ARM_BUILTIN_WUNPCKELSB);
10854 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsh", v2si_ftype_v4hi, ARM_BUILTIN_WUNPCKELSH);
10855 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wunpckelsw", di_ftype_v2si, ARM_BUILTIN_WUNPCKELSW);
10857 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacs", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACS);
10858 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacsz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACSZ);
10859 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacu", di_ftype_di_v4hi_v4hi, ARM_BUILTIN_WMACU);
10860 def_mbuiltin (FL_IWMMXT, "__builtin_arm_wmacuz", di_ftype_v4hi_v4hi, ARM_BUILTIN_WMACUZ);
10862 def_mbuiltin (FL_IWMMXT, "__builtin_arm_walign", v8qi_ftype_v8qi_v8qi_int, ARM_BUILTIN_WALIGN);
10863 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmia", di_ftype_di_int_int, ARM_BUILTIN_TMIA);
10864 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiaph", di_ftype_di_int_int, ARM_BUILTIN_TMIAPH);
10865 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabb", di_ftype_di_int_int, ARM_BUILTIN_TMIABB);
10866 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiabt", di_ftype_di_int_int, ARM_BUILTIN_TMIABT);
10867 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatb", di_ftype_di_int_int, ARM_BUILTIN_TMIATB);
10868 def_mbuiltin (FL_IWMMXT, "__builtin_arm_tmiatt", di_ftype_di_int_int, ARM_BUILTIN_TMIATT);
10871 static void
10872 arm_init_builtins (void)
10874 if (TARGET_REALLY_IWMMXT)
10875 arm_init_iwmmxt_builtins ();
10878 /* Errors in the source file can cause expand_expr to return const0_rtx
10879 where we expect a vector. To avoid crashing, use one of the vector
10880 clear instructions. */
10882 static rtx
10883 safe_vector_operand (rtx x, enum machine_mode mode)
10885 if (x != const0_rtx)
10886 return x;
10887 x = gen_reg_rtx (mode);
10889 emit_insn (gen_iwmmxt_clrdi (mode == DImode ? x
10890 : gen_rtx_SUBREG (DImode, x, 0)));
10891 return x;
10894 /* Subroutine of arm_expand_builtin to take care of binop insns. */
10896 static rtx
10897 arm_expand_binop_builtin (enum insn_code icode,
10898 tree arglist, rtx target)
10900 rtx pat;
10901 tree arg0 = TREE_VALUE (arglist);
10902 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
10903 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10904 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10905 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10906 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10907 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10909 if (VECTOR_MODE_P (mode0))
10910 op0 = safe_vector_operand (op0, mode0);
10911 if (VECTOR_MODE_P (mode1))
10912 op1 = safe_vector_operand (op1, mode1);
10914 if (! target
10915 || GET_MODE (target) != tmode
10916 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10917 target = gen_reg_rtx (tmode);
10919 /* In case the insn wants input operands in modes different from
10920 the result, abort. */
10921 if (GET_MODE (op0) != mode0 || GET_MODE (op1) != mode1)
10922 abort ();
10924 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10925 op0 = copy_to_mode_reg (mode0, op0);
10926 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10927 op1 = copy_to_mode_reg (mode1, op1);
10929 pat = GEN_FCN (icode) (target, op0, op1);
10930 if (! pat)
10931 return 0;
10932 emit_insn (pat);
10933 return target;
10936 /* Subroutine of arm_expand_builtin to take care of unop insns. */
10938 static rtx
10939 arm_expand_unop_builtin (enum insn_code icode,
10940 tree arglist, rtx target, int do_load)
10942 rtx pat;
10943 tree arg0 = TREE_VALUE (arglist);
10944 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
10945 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10946 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10948 if (! target
10949 || GET_MODE (target) != tmode
10950 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10951 target = gen_reg_rtx (tmode);
10952 if (do_load)
10953 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
10954 else
10956 if (VECTOR_MODE_P (mode0))
10957 op0 = safe_vector_operand (op0, mode0);
10959 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10960 op0 = copy_to_mode_reg (mode0, op0);
10963 pat = GEN_FCN (icode) (target, op0);
10964 if (! pat)
10965 return 0;
10966 emit_insn (pat);
10967 return target;
10970 /* Expand an expression EXP that calls a built-in function,
10971 with result going to TARGET if that's convenient
10972 (and in mode MODE if that's convenient).
10973 SUBTARGET may be used as the target for computing one of EXP's operands.
10974 IGNORE is nonzero if the value is to be ignored. */
10976 static rtx
10977 arm_expand_builtin (tree exp,
10978 rtx target,
10979 rtx subtarget ATTRIBUTE_UNUSED,
10980 enum machine_mode mode ATTRIBUTE_UNUSED,
10981 int ignore ATTRIBUTE_UNUSED)
10983 const struct builtin_description * d;
10984 enum insn_code icode;
10985 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10986 tree arglist = TREE_OPERAND (exp, 1);
10987 tree arg0;
10988 tree arg1;
10989 tree arg2;
10990 rtx op0;
10991 rtx op1;
10992 rtx op2;
10993 rtx pat;
10994 int fcode = DECL_FUNCTION_CODE (fndecl);
10995 size_t i;
10996 enum machine_mode tmode;
10997 enum machine_mode mode0;
10998 enum machine_mode mode1;
10999 enum machine_mode mode2;
11001 switch (fcode)
11003 case ARM_BUILTIN_TEXTRMSB:
11004 case ARM_BUILTIN_TEXTRMUB:
11005 case ARM_BUILTIN_TEXTRMSH:
11006 case ARM_BUILTIN_TEXTRMUH:
11007 case ARM_BUILTIN_TEXTRMSW:
11008 case ARM_BUILTIN_TEXTRMUW:
11009 icode = (fcode == ARM_BUILTIN_TEXTRMSB ? CODE_FOR_iwmmxt_textrmsb
11010 : fcode == ARM_BUILTIN_TEXTRMUB ? CODE_FOR_iwmmxt_textrmub
11011 : fcode == ARM_BUILTIN_TEXTRMSH ? CODE_FOR_iwmmxt_textrmsh
11012 : fcode == ARM_BUILTIN_TEXTRMUH ? CODE_FOR_iwmmxt_textrmuh
11013 : CODE_FOR_iwmmxt_textrmw);
11015 arg0 = TREE_VALUE (arglist);
11016 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11017 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11018 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11019 tmode = insn_data[icode].operand[0].mode;
11020 mode0 = insn_data[icode].operand[1].mode;
11021 mode1 = insn_data[icode].operand[2].mode;
11023 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11024 op0 = copy_to_mode_reg (mode0, op0);
11025 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11027 /* @@@ better error message */
11028 error ("selector must be an immediate");
11029 return gen_reg_rtx (tmode);
11031 if (target == 0
11032 || GET_MODE (target) != tmode
11033 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11034 target = gen_reg_rtx (tmode);
11035 pat = GEN_FCN (icode) (target, op0, op1);
11036 if (! pat)
11037 return 0;
11038 emit_insn (pat);
11039 return target;
11041 case ARM_BUILTIN_TINSRB:
11042 case ARM_BUILTIN_TINSRH:
11043 case ARM_BUILTIN_TINSRW:
11044 icode = (fcode == ARM_BUILTIN_TINSRB ? CODE_FOR_iwmmxt_tinsrb
11045 : fcode == ARM_BUILTIN_TINSRH ? CODE_FOR_iwmmxt_tinsrh
11046 : CODE_FOR_iwmmxt_tinsrw);
11047 arg0 = TREE_VALUE (arglist);
11048 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11049 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11050 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11051 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11052 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11053 tmode = insn_data[icode].operand[0].mode;
11054 mode0 = insn_data[icode].operand[1].mode;
11055 mode1 = insn_data[icode].operand[2].mode;
11056 mode2 = insn_data[icode].operand[3].mode;
11058 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11059 op0 = copy_to_mode_reg (mode0, op0);
11060 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11061 op1 = copy_to_mode_reg (mode1, op1);
11062 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11064 /* @@@ better error message */
11065 error ("selector must be an immediate");
11066 return const0_rtx;
11068 if (target == 0
11069 || GET_MODE (target) != tmode
11070 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11071 target = gen_reg_rtx (tmode);
11072 pat = GEN_FCN (icode) (target, op0, op1, op2);
11073 if (! pat)
11074 return 0;
11075 emit_insn (pat);
11076 return target;
11078 case ARM_BUILTIN_SETWCX:
11079 arg0 = TREE_VALUE (arglist);
11080 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11081 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11082 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11083 emit_insn (gen_iwmmxt_tmcr (op0, op1));
11084 return 0;
11086 case ARM_BUILTIN_GETWCX:
11087 arg0 = TREE_VALUE (arglist);
11088 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11089 target = gen_reg_rtx (SImode);
11090 emit_insn (gen_iwmmxt_tmrc (target, op0));
11091 return target;
11093 case ARM_BUILTIN_WSHUFH:
11094 icode = CODE_FOR_iwmmxt_wshufh;
11095 arg0 = TREE_VALUE (arglist);
11096 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11097 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11098 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11099 tmode = insn_data[icode].operand[0].mode;
11100 mode1 = insn_data[icode].operand[1].mode;
11101 mode2 = insn_data[icode].operand[2].mode;
11103 if (! (*insn_data[icode].operand[1].predicate) (op0, mode1))
11104 op0 = copy_to_mode_reg (mode1, op0);
11105 if (! (*insn_data[icode].operand[2].predicate) (op1, mode2))
11107 /* @@@ better error message */
11108 error ("mask must be an immediate");
11109 return const0_rtx;
11111 if (target == 0
11112 || GET_MODE (target) != tmode
11113 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11114 target = gen_reg_rtx (tmode);
11115 pat = GEN_FCN (icode) (target, op0, op1);
11116 if (! pat)
11117 return 0;
11118 emit_insn (pat);
11119 return target;
11121 case ARM_BUILTIN_WSADB:
11122 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadb, arglist, target);
11123 case ARM_BUILTIN_WSADH:
11124 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadh, arglist, target);
11125 case ARM_BUILTIN_WSADBZ:
11126 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz, arglist, target);
11127 case ARM_BUILTIN_WSADHZ:
11128 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz, arglist, target);
11130 /* Several three-argument builtins. */
11131 case ARM_BUILTIN_WMACS:
11132 case ARM_BUILTIN_WMACU:
11133 case ARM_BUILTIN_WALIGN:
11134 case ARM_BUILTIN_TMIA:
11135 case ARM_BUILTIN_TMIAPH:
11136 case ARM_BUILTIN_TMIATT:
11137 case ARM_BUILTIN_TMIATB:
11138 case ARM_BUILTIN_TMIABT:
11139 case ARM_BUILTIN_TMIABB:
11140 icode = (fcode == ARM_BUILTIN_WMACS ? CODE_FOR_iwmmxt_wmacs
11141 : fcode == ARM_BUILTIN_WMACU ? CODE_FOR_iwmmxt_wmacu
11142 : fcode == ARM_BUILTIN_TMIA ? CODE_FOR_iwmmxt_tmia
11143 : fcode == ARM_BUILTIN_TMIAPH ? CODE_FOR_iwmmxt_tmiaph
11144 : fcode == ARM_BUILTIN_TMIABB ? CODE_FOR_iwmmxt_tmiabb
11145 : fcode == ARM_BUILTIN_TMIABT ? CODE_FOR_iwmmxt_tmiabt
11146 : fcode == ARM_BUILTIN_TMIATB ? CODE_FOR_iwmmxt_tmiatb
11147 : fcode == ARM_BUILTIN_TMIATT ? CODE_FOR_iwmmxt_tmiatt
11148 : CODE_FOR_iwmmxt_walign);
11149 arg0 = TREE_VALUE (arglist);
11150 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
11151 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
11152 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
11153 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11154 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
11155 tmode = insn_data[icode].operand[0].mode;
11156 mode0 = insn_data[icode].operand[1].mode;
11157 mode1 = insn_data[icode].operand[2].mode;
11158 mode2 = insn_data[icode].operand[3].mode;
11160 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
11161 op0 = copy_to_mode_reg (mode0, op0);
11162 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
11163 op1 = copy_to_mode_reg (mode1, op1);
11164 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
11165 op2 = copy_to_mode_reg (mode2, op2);
11166 if (target == 0
11167 || GET_MODE (target) != tmode
11168 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11169 target = gen_reg_rtx (tmode);
11170 pat = GEN_FCN (icode) (target, op0, op1, op2);
11171 if (! pat)
11172 return 0;
11173 emit_insn (pat);
11174 return target;
11176 case ARM_BUILTIN_WZERO:
11177 target = gen_reg_rtx (DImode);
11178 emit_insn (gen_iwmmxt_clrdi (target));
11179 return target;
11181 default:
11182 break;
11185 for (i = 0, d = bdesc_2arg; i < sizeof (bdesc_2arg) / sizeof *d; i++, d++)
11186 if (d->code == (const enum arm_builtins) fcode)
11187 return arm_expand_binop_builtin (d->icode, arglist, target);
11189 for (i = 0, d = bdesc_1arg; i < sizeof (bdesc_1arg) / sizeof *d; i++, d++)
11190 if (d->code == (const enum arm_builtins) fcode)
11191 return arm_expand_unop_builtin (d->icode, arglist, target, 0);
11193 /* @@@ Should really do something sensible here. */
11194 return NULL_RTX;
11197 /* Recursively search through all of the blocks in a function
11198 checking to see if any of the variables created in that
11199 function match the RTX called 'orig'. If they do then
11200 replace them with the RTX called 'new'. */
11201 static void
11202 replace_symbols_in_block (tree block, rtx orig, rtx new)
11204 for (; block; block = BLOCK_CHAIN (block))
11206 tree sym;
11208 if (!TREE_USED (block))
11209 continue;
11211 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
11213 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
11214 || DECL_IGNORED_P (sym)
11215 || TREE_CODE (sym) != VAR_DECL
11216 || DECL_EXTERNAL (sym)
11217 || !rtx_equal_p (DECL_RTL (sym), orig)
11219 continue;
11221 SET_DECL_RTL (sym, new);
11224 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
11228 /* Return the number (counting from 0) of
11229 the least significant set bit in MASK. */
11231 inline static int
11232 number_of_first_bit_set (int mask)
11234 int bit;
11236 for (bit = 0;
11237 (mask & (1 << bit)) == 0;
11238 ++bit)
11239 continue;
11241 return bit;
11244 /* Generate code to return from a thumb function.
11245 If 'reg_containing_return_addr' is -1, then the return address is
11246 actually on the stack, at the stack pointer. */
11247 static void
11248 thumb_exit (FILE *f, int reg_containing_return_addr, rtx eh_ofs)
11250 unsigned regs_available_for_popping;
11251 unsigned regs_to_pop;
11252 int pops_needed;
11253 unsigned available;
11254 unsigned required;
11255 int mode;
11256 int size;
11257 int restore_a4 = FALSE;
11259 /* Compute the registers we need to pop. */
11260 regs_to_pop = 0;
11261 pops_needed = 0;
11263 /* There is an assumption here, that if eh_ofs is not NULL, the
11264 normal return address will have been pushed. */
11265 if (reg_containing_return_addr == -1 || eh_ofs)
11267 /* When we are generating a return for __builtin_eh_return,
11268 reg_containing_return_addr must specify the return regno. */
11269 if (eh_ofs && reg_containing_return_addr == -1)
11270 abort ();
11272 regs_to_pop |= 1 << LR_REGNUM;
11273 ++pops_needed;
11276 if (TARGET_BACKTRACE)
11278 /* Restore the (ARM) frame pointer and stack pointer. */
11279 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
11280 pops_needed += 2;
11283 /* If there is nothing to pop then just emit the BX instruction and
11284 return. */
11285 if (pops_needed == 0)
11287 if (eh_ofs)
11288 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11290 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11291 return;
11293 /* Otherwise if we are not supporting interworking and we have not created
11294 a backtrace structure and the function was not entered in ARM mode then
11295 just pop the return address straight into the PC. */
11296 else if (!TARGET_INTERWORK
11297 && !TARGET_BACKTRACE
11298 && !is_called_in_ARM_mode (current_function_decl))
11300 if (eh_ofs)
11302 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
11303 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11304 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11306 else
11307 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
11309 return;
11312 /* Find out how many of the (return) argument registers we can corrupt. */
11313 regs_available_for_popping = 0;
11315 /* If returning via __builtin_eh_return, the bottom three registers
11316 all contain information needed for the return. */
11317 if (eh_ofs)
11318 size = 12;
11319 else
11321 #ifdef RTX_CODE
11322 /* If we can deduce the registers used from the function's
11323 return value. This is more reliable that examining
11324 regs_ever_live[] because that will be set if the register is
11325 ever used in the function, not just if the register is used
11326 to hold a return value. */
11328 if (current_function_return_rtx != 0)
11329 mode = GET_MODE (current_function_return_rtx);
11330 else
11331 #endif
11332 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11334 size = GET_MODE_SIZE (mode);
11336 if (size == 0)
11338 /* In a void function we can use any argument register.
11339 In a function that returns a structure on the stack
11340 we can use the second and third argument registers. */
11341 if (mode == VOIDmode)
11342 regs_available_for_popping =
11343 (1 << ARG_REGISTER (1))
11344 | (1 << ARG_REGISTER (2))
11345 | (1 << ARG_REGISTER (3));
11346 else
11347 regs_available_for_popping =
11348 (1 << ARG_REGISTER (2))
11349 | (1 << ARG_REGISTER (3));
11351 else if (size <= 4)
11352 regs_available_for_popping =
11353 (1 << ARG_REGISTER (2))
11354 | (1 << ARG_REGISTER (3));
11355 else if (size <= 8)
11356 regs_available_for_popping =
11357 (1 << ARG_REGISTER (3));
11360 /* Match registers to be popped with registers into which we pop them. */
11361 for (available = regs_available_for_popping,
11362 required = regs_to_pop;
11363 required != 0 && available != 0;
11364 available &= ~(available & - available),
11365 required &= ~(required & - required))
11366 -- pops_needed;
11368 /* If we have any popping registers left over, remove them. */
11369 if (available > 0)
11370 regs_available_for_popping &= ~available;
11372 /* Otherwise if we need another popping register we can use
11373 the fourth argument register. */
11374 else if (pops_needed)
11376 /* If we have not found any free argument registers and
11377 reg a4 contains the return address, we must move it. */
11378 if (regs_available_for_popping == 0
11379 && reg_containing_return_addr == LAST_ARG_REGNUM)
11381 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11382 reg_containing_return_addr = LR_REGNUM;
11384 else if (size > 12)
11386 /* Register a4 is being used to hold part of the return value,
11387 but we have dire need of a free, low register. */
11388 restore_a4 = TRUE;
11390 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
11393 if (reg_containing_return_addr != LAST_ARG_REGNUM)
11395 /* The fourth argument register is available. */
11396 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
11398 --pops_needed;
11402 /* Pop as many registers as we can. */
11403 thumb_pushpop (f, regs_available_for_popping, FALSE);
11405 /* Process the registers we popped. */
11406 if (reg_containing_return_addr == -1)
11408 /* The return address was popped into the lowest numbered register. */
11409 regs_to_pop &= ~(1 << LR_REGNUM);
11411 reg_containing_return_addr =
11412 number_of_first_bit_set (regs_available_for_popping);
11414 /* Remove this register for the mask of available registers, so that
11415 the return address will not be corrupted by further pops. */
11416 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
11419 /* If we popped other registers then handle them here. */
11420 if (regs_available_for_popping)
11422 int frame_pointer;
11424 /* Work out which register currently contains the frame pointer. */
11425 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
11427 /* Move it into the correct place. */
11428 asm_fprintf (f, "\tmov\t%r, %r\n",
11429 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
11431 /* (Temporarily) remove it from the mask of popped registers. */
11432 regs_available_for_popping &= ~(1 << frame_pointer);
11433 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
11435 if (regs_available_for_popping)
11437 int stack_pointer;
11439 /* We popped the stack pointer as well,
11440 find the register that contains it. */
11441 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
11443 /* Move it into the stack register. */
11444 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
11446 /* At this point we have popped all necessary registers, so
11447 do not worry about restoring regs_available_for_popping
11448 to its correct value:
11450 assert (pops_needed == 0)
11451 assert (regs_available_for_popping == (1 << frame_pointer))
11452 assert (regs_to_pop == (1 << STACK_POINTER)) */
11454 else
11456 /* Since we have just move the popped value into the frame
11457 pointer, the popping register is available for reuse, and
11458 we know that we still have the stack pointer left to pop. */
11459 regs_available_for_popping |= (1 << frame_pointer);
11463 /* If we still have registers left on the stack, but we no longer have
11464 any registers into which we can pop them, then we must move the return
11465 address into the link register and make available the register that
11466 contained it. */
11467 if (regs_available_for_popping == 0 && pops_needed > 0)
11469 regs_available_for_popping |= 1 << reg_containing_return_addr;
11471 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
11472 reg_containing_return_addr);
11474 reg_containing_return_addr = LR_REGNUM;
11477 /* If we have registers left on the stack then pop some more.
11478 We know that at most we will want to pop FP and SP. */
11479 if (pops_needed > 0)
11481 int popped_into;
11482 int move_to;
11484 thumb_pushpop (f, regs_available_for_popping, FALSE);
11486 /* We have popped either FP or SP.
11487 Move whichever one it is into the correct register. */
11488 popped_into = number_of_first_bit_set (regs_available_for_popping);
11489 move_to = number_of_first_bit_set (regs_to_pop);
11491 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
11493 regs_to_pop &= ~(1 << move_to);
11495 --pops_needed;
11498 /* If we still have not popped everything then we must have only
11499 had one register available to us and we are now popping the SP. */
11500 if (pops_needed > 0)
11502 int popped_into;
11504 thumb_pushpop (f, regs_available_for_popping, FALSE);
11506 popped_into = number_of_first_bit_set (regs_available_for_popping);
11508 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
11510 assert (regs_to_pop == (1 << STACK_POINTER))
11511 assert (pops_needed == 1)
11515 /* If necessary restore the a4 register. */
11516 if (restore_a4)
11518 if (reg_containing_return_addr != LR_REGNUM)
11520 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
11521 reg_containing_return_addr = LR_REGNUM;
11524 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11527 if (eh_ofs)
11528 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
11530 /* Return to caller. */
11531 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
11534 /* Emit code to push or pop registers to or from the stack. */
11535 static void
11536 thumb_pushpop (FILE *f, int mask, int push)
11538 int regno;
11539 int lo_mask = mask & 0xFF;
11541 if (lo_mask == 0 && !push && (mask & (1 << 15)))
11543 /* Special case. Do not generate a POP PC statement here, do it in
11544 thumb_exit() */
11545 thumb_exit (f, -1, NULL_RTX);
11546 return;
11549 fprintf (f, "\t%s\t{", push ? "push" : "pop");
11551 /* Look at the low registers first. */
11552 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
11554 if (lo_mask & 1)
11556 asm_fprintf (f, "%r", regno);
11558 if ((lo_mask & ~1) != 0)
11559 fprintf (f, ", ");
11563 if (push && (mask & (1 << LR_REGNUM)))
11565 /* Catch pushing the LR. */
11566 if (mask & 0xFF)
11567 fprintf (f, ", ");
11569 asm_fprintf (f, "%r", LR_REGNUM);
11571 else if (!push && (mask & (1 << PC_REGNUM)))
11573 /* Catch popping the PC. */
11574 if (TARGET_INTERWORK || TARGET_BACKTRACE)
11576 /* The PC is never poped directly, instead
11577 it is popped into r3 and then BX is used. */
11578 fprintf (f, "}\n");
11580 thumb_exit (f, -1, NULL_RTX);
11582 return;
11584 else
11586 if (mask & 0xFF)
11587 fprintf (f, ", ");
11589 asm_fprintf (f, "%r", PC_REGNUM);
11593 fprintf (f, "}\n");
11596 void
11597 thumb_final_prescan_insn (rtx insn)
11599 if (flag_print_asm_name)
11600 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
11601 INSN_ADDRESSES (INSN_UID (insn)));
11605 thumb_shiftable_const (unsigned HOST_WIDE_INT val)
11607 unsigned HOST_WIDE_INT mask = 0xff;
11608 int i;
11610 if (val == 0) /* XXX */
11611 return 0;
11613 for (i = 0; i < 25; i++)
11614 if ((val & (mask << i)) == val)
11615 return 1;
11617 return 0;
11620 /* Returns nonzero if the current function contains,
11621 or might contain a far jump. */
11623 thumb_far_jump_used_p (int in_prologue)
11625 rtx insn;
11627 /* This test is only important for leaf functions. */
11628 /* assert (!leaf_function_p ()); */
11630 /* If we have already decided that far jumps may be used,
11631 do not bother checking again, and always return true even if
11632 it turns out that they are not being used. Once we have made
11633 the decision that far jumps are present (and that hence the link
11634 register will be pushed onto the stack) we cannot go back on it. */
11635 if (cfun->machine->far_jump_used)
11636 return 1;
11638 /* If this function is not being called from the prologue/epilogue
11639 generation code then it must be being called from the
11640 INITIAL_ELIMINATION_OFFSET macro. */
11641 if (!in_prologue)
11643 /* In this case we know that we are being asked about the elimination
11644 of the arg pointer register. If that register is not being used,
11645 then there are no arguments on the stack, and we do not have to
11646 worry that a far jump might force the prologue to push the link
11647 register, changing the stack offsets. In this case we can just
11648 return false, since the presence of far jumps in the function will
11649 not affect stack offsets.
11651 If the arg pointer is live (or if it was live, but has now been
11652 eliminated and so set to dead) then we do have to test to see if
11653 the function might contain a far jump. This test can lead to some
11654 false negatives, since before reload is completed, then length of
11655 branch instructions is not known, so gcc defaults to returning their
11656 longest length, which in turn sets the far jump attribute to true.
11658 A false negative will not result in bad code being generated, but it
11659 will result in a needless push and pop of the link register. We
11660 hope that this does not occur too often. */
11661 if (regs_ever_live [ARG_POINTER_REGNUM])
11662 cfun->machine->arg_pointer_live = 1;
11663 else if (!cfun->machine->arg_pointer_live)
11664 return 0;
11667 /* Check to see if the function contains a branch
11668 insn with the far jump attribute set. */
11669 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11671 if (GET_CODE (insn) == JUMP_INSN
11672 /* Ignore tablejump patterns. */
11673 && GET_CODE (PATTERN (insn)) != ADDR_VEC
11674 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
11675 && get_attr_far_jump (insn) == FAR_JUMP_YES
11678 /* Record the fact that we have decied that
11679 the function does use far jumps. */
11680 cfun->machine->far_jump_used = 1;
11681 return 1;
11685 return 0;
11688 /* Return nonzero if FUNC must be entered in ARM mode. */
11690 is_called_in_ARM_mode (tree func)
11692 if (TREE_CODE (func) != FUNCTION_DECL)
11693 abort ();
11695 /* Ignore the problem about functions whoes address is taken. */
11696 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
11697 return TRUE;
11699 #ifdef ARM_PE
11700 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
11701 #else
11702 return FALSE;
11703 #endif
11706 /* The bits which aren't usefully expanded as rtl. */
11707 const char *
11708 thumb_unexpanded_epilogue (void)
11710 int regno;
11711 int live_regs_mask = 0;
11712 int high_regs_pushed = 0;
11713 int leaf_function = leaf_function_p ();
11714 int had_to_push_lr;
11715 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
11717 if (return_used_this_function)
11718 return "";
11720 if (IS_NAKED (arm_current_func_type ()))
11721 return "";
11723 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11724 if (THUMB_REG_PUSHED_P (regno))
11725 live_regs_mask |= 1 << regno;
11727 for (regno = 8; regno < 13; regno++)
11728 if (THUMB_REG_PUSHED_P (regno))
11729 high_regs_pushed++;
11731 /* The prolog may have pushed some high registers to use as
11732 work registers. eg the testuite file:
11733 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
11734 compiles to produce:
11735 push {r4, r5, r6, r7, lr}
11736 mov r7, r9
11737 mov r6, r8
11738 push {r6, r7}
11739 as part of the prolog. We have to undo that pushing here. */
11741 if (high_regs_pushed)
11743 int mask = live_regs_mask;
11744 int next_hi_reg;
11745 int size;
11746 int mode;
11748 #ifdef RTX_CODE
11749 /* If we can deduce the registers used from the function's return value.
11750 This is more reliable that examining regs_ever_live[] because that
11751 will be set if the register is ever used in the function, not just if
11752 the register is used to hold a return value. */
11754 if (current_function_return_rtx != 0)
11755 mode = GET_MODE (current_function_return_rtx);
11756 else
11757 #endif
11758 mode = DECL_MODE (DECL_RESULT (current_function_decl));
11760 size = GET_MODE_SIZE (mode);
11762 /* Unless we are returning a type of size > 12 register r3 is
11763 available. */
11764 if (size < 13)
11765 mask |= 1 << 3;
11767 if (mask == 0)
11768 /* Oh dear! We have no low registers into which we can pop
11769 high registers! */
11770 internal_error
11771 ("no low registers available for popping high registers");
11773 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
11774 if (THUMB_REG_PUSHED_P (next_hi_reg))
11775 break;
11777 while (high_regs_pushed)
11779 /* Find lo register(s) into which the high register(s) can
11780 be popped. */
11781 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11783 if (mask & (1 << regno))
11784 high_regs_pushed--;
11785 if (high_regs_pushed == 0)
11786 break;
11789 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
11791 /* Pop the values into the low register(s). */
11792 thumb_pushpop (asm_out_file, mask, 0);
11794 /* Move the value(s) into the high registers. */
11795 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11797 if (mask & (1 << regno))
11799 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
11800 regno);
11802 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
11803 if (THUMB_REG_PUSHED_P (next_hi_reg))
11804 break;
11810 had_to_push_lr = (live_regs_mask || !leaf_function
11811 || thumb_far_jump_used_p (1));
11813 if (TARGET_BACKTRACE
11814 && ((live_regs_mask & 0xFF) == 0)
11815 && regs_ever_live [LAST_ARG_REGNUM] != 0)
11817 /* The stack backtrace structure creation code had to
11818 push R7 in order to get a work register, so we pop
11819 it now. */
11820 live_regs_mask |= (1 << LAST_LO_REGNUM);
11823 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
11825 if (had_to_push_lr
11826 && !is_called_in_ARM_mode (current_function_decl)
11827 && !eh_ofs)
11828 live_regs_mask |= 1 << PC_REGNUM;
11830 /* Either no argument registers were pushed or a backtrace
11831 structure was created which includes an adjusted stack
11832 pointer, so just pop everything. */
11833 if (live_regs_mask)
11834 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11836 if (eh_ofs)
11837 thumb_exit (asm_out_file, 2, eh_ofs);
11838 /* We have either just popped the return address into the
11839 PC or it is was kept in LR for the entire function or
11840 it is still on the stack because we do not want to
11841 return by doing a pop {pc}. */
11842 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
11843 thumb_exit (asm_out_file,
11844 (had_to_push_lr
11845 && is_called_in_ARM_mode (current_function_decl)) ?
11846 -1 : LR_REGNUM, NULL_RTX);
11848 else
11850 /* Pop everything but the return address. */
11851 live_regs_mask &= ~(1 << PC_REGNUM);
11853 if (live_regs_mask)
11854 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
11856 if (had_to_push_lr)
11857 /* Get the return address into a temporary register. */
11858 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
11860 /* Remove the argument registers that were pushed onto the stack. */
11861 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
11862 SP_REGNUM, SP_REGNUM,
11863 current_function_pretend_args_size);
11865 if (eh_ofs)
11866 thumb_exit (asm_out_file, 2, eh_ofs);
11867 else
11868 thumb_exit (asm_out_file,
11869 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
11872 return "";
11875 /* Functions to save and restore machine-specific function data. */
11876 static struct machine_function *
11877 arm_init_machine_status (void)
11879 struct machine_function *machine;
11880 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
11882 #if ARM_FT_UNKNOWN != 0
11883 machine->func_type = ARM_FT_UNKNOWN;
11884 #endif
11885 return machine;
11888 /* Return an RTX indicating where the return address to the
11889 calling function can be found. */
11891 arm_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
11893 if (count != 0)
11894 return NULL_RTX;
11896 if (TARGET_APCS_32)
11897 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
11898 else
11900 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
11901 GEN_INT (RETURN_ADDR_MASK26));
11902 return get_func_hard_reg_initial_val (cfun, lr);
11906 /* Do anything needed before RTL is emitted for each function. */
11907 void
11908 arm_init_expanders (void)
11910 /* Arrange to initialize and mark the machine per-function status. */
11911 init_machine_status = arm_init_machine_status;
11914 HOST_WIDE_INT
11915 thumb_get_frame_size (void)
11917 int regno;
11919 int base_size = ROUND_UP_WORD (get_frame_size ());
11920 int count_regs = 0;
11921 int entry_size = 0;
11922 int leaf;
11924 if (! TARGET_THUMB)
11925 abort ();
11927 if (! TARGET_ATPCS)
11928 return base_size;
11930 /* We need to know if we are a leaf function. Unfortunately, it
11931 is possible to be called after start_sequence has been called,
11932 which causes get_insns to return the insns for the sequence,
11933 not the function, which will cause leaf_function_p to return
11934 the incorrect result.
11936 To work around this, we cache the computed frame size. This
11937 works because we will only be calling RTL expanders that need
11938 to know about leaf functions once reload has completed, and the
11939 frame size cannot be changed after that time, so we can safely
11940 use the cached value. */
11942 if (reload_completed)
11943 return cfun->machine->frame_size;
11945 leaf = leaf_function_p ();
11947 /* A leaf function does not need any stack alignment if it has nothing
11948 on the stack. */
11949 if (leaf && base_size == 0)
11951 cfun->machine->frame_size = 0;
11952 return 0;
11955 /* We know that SP will be word aligned on entry, and we must
11956 preserve that condition at any subroutine call. But those are
11957 the only constraints. */
11959 /* Space for variadic functions. */
11960 if (current_function_pretend_args_size)
11961 entry_size += current_function_pretend_args_size;
11963 /* Space for pushed lo registers. */
11964 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
11965 if (THUMB_REG_PUSHED_P (regno))
11966 count_regs++;
11968 /* Space for backtrace structure. */
11969 if (TARGET_BACKTRACE)
11971 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
11972 entry_size += 20;
11973 else
11974 entry_size += 16;
11977 if (count_regs || !leaf || thumb_far_jump_used_p (1))
11978 count_regs++; /* LR */
11980 entry_size += count_regs * 4;
11981 count_regs = 0;
11983 /* Space for pushed hi regs. */
11984 for (regno = 8; regno < 13; regno++)
11985 if (THUMB_REG_PUSHED_P (regno))
11986 count_regs++;
11988 entry_size += count_regs * 4;
11990 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11991 base_size += 4;
11992 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
11993 abort ();
11995 cfun->machine->frame_size = base_size;
11997 return base_size;
12000 /* Generate the rest of a function's prologue. */
12001 void
12002 thumb_expand_prologue (void)
12004 HOST_WIDE_INT amount = (thumb_get_frame_size ()
12005 + current_function_outgoing_args_size);
12006 unsigned long func_type;
12008 func_type = arm_current_func_type ();
12010 /* Naked functions don't have prologues. */
12011 if (IS_NAKED (func_type))
12012 return;
12014 if (IS_INTERRUPT (func_type))
12016 error ("interrupt Service Routines cannot be coded in Thumb mode");
12017 return;
12020 if (frame_pointer_needed)
12021 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
12023 if (amount)
12025 amount = ROUND_UP_WORD (amount);
12027 if (amount < 512)
12028 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12029 GEN_INT (- amount)));
12030 else
12032 int regno;
12033 rtx reg;
12035 /* The stack decrement is too big for an immediate value in a single
12036 insn. In theory we could issue multiple subtracts, but after
12037 three of them it becomes more space efficient to place the full
12038 value in the constant pool and load into a register. (Also the
12039 ARM debugger really likes to see only one stack decrement per
12040 function). So instead we look for a scratch register into which
12041 we can load the decrement, and then we subtract this from the
12042 stack pointer. Unfortunately on the thumb the only available
12043 scratch registers are the argument registers, and we cannot use
12044 these as they may hold arguments to the function. Instead we
12045 attempt to locate a call preserved register which is used by this
12046 function. If we can find one, then we know that it will have
12047 been pushed at the start of the prologue and so we can corrupt
12048 it now. */
12049 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
12050 if (THUMB_REG_PUSHED_P (regno)
12051 && !(frame_pointer_needed
12052 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
12053 break;
12055 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
12057 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
12059 /* Choose an arbitrary, non-argument low register. */
12060 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
12062 /* Save it by copying it into a high, scratch register. */
12063 emit_insn (gen_movsi (spare, reg));
12064 /* Add a USE to stop propagate_one_insn() from barfing. */
12065 emit_insn (gen_prologue_use (spare));
12067 /* Decrement the stack. */
12068 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
12069 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12070 reg));
12072 /* Restore the low register's original value. */
12073 emit_insn (gen_movsi (reg, spare));
12075 /* Emit a USE of the restored scratch register, so that flow
12076 analysis will not consider the restore redundant. The
12077 register won't be used again in this function and isn't
12078 restored by the epilogue. */
12079 emit_insn (gen_prologue_use (reg));
12081 else
12083 reg = gen_rtx (REG, SImode, regno);
12085 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
12086 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12087 reg));
12092 if (current_function_profile || TARGET_NO_SCHED_PRO)
12093 emit_insn (gen_blockage ());
12096 void
12097 thumb_expand_epilogue (void)
12099 HOST_WIDE_INT amount = (thumb_get_frame_size ()
12100 + current_function_outgoing_args_size);
12102 /* Naked functions don't have prologues. */
12103 if (IS_NAKED (arm_current_func_type ()))
12104 return;
12106 if (frame_pointer_needed)
12107 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
12108 else if (amount)
12110 amount = ROUND_UP_WORD (amount);
12112 if (amount < 512)
12113 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
12114 GEN_INT (amount)));
12115 else
12117 /* r3 is always free in the epilogue. */
12118 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
12120 emit_insn (gen_movsi (reg, GEN_INT (amount)));
12121 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
12125 /* Emit a USE (stack_pointer_rtx), so that
12126 the stack adjustment will not be deleted. */
12127 emit_insn (gen_prologue_use (stack_pointer_rtx));
12129 if (current_function_profile || TARGET_NO_SCHED_PRO)
12130 emit_insn (gen_blockage ());
12133 static void
12134 thumb_output_function_prologue (FILE *f, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12136 int live_regs_mask = 0;
12137 int high_regs_pushed = 0;
12138 int regno;
12140 if (IS_NAKED (arm_current_func_type ()))
12141 return;
12143 if (is_called_in_ARM_mode (current_function_decl))
12145 const char * name;
12147 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
12148 abort ();
12149 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
12150 abort ();
12151 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12153 /* Generate code sequence to switch us into Thumb mode. */
12154 /* The .code 32 directive has already been emitted by
12155 ASM_DECLARE_FUNCTION_NAME. */
12156 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
12157 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
12159 /* Generate a label, so that the debugger will notice the
12160 change in instruction sets. This label is also used by
12161 the assembler to bypass the ARM code when this function
12162 is called from a Thumb encoded function elsewhere in the
12163 same file. Hence the definition of STUB_NAME here must
12164 agree with the definition in gas/config/tc-arm.c */
12166 #define STUB_NAME ".real_start_of"
12168 fprintf (f, "\t.code\t16\n");
12169 #ifdef ARM_PE
12170 if (arm_dllexport_name_p (name))
12171 name = arm_strip_name_encoding (name);
12172 #endif
12173 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
12174 fprintf (f, "\t.thumb_func\n");
12175 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
12178 if (current_function_pretend_args_size)
12180 if (cfun->machine->uses_anonymous_args)
12182 int num_pushes;
12184 fprintf (f, "\tpush\t{");
12186 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
12188 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
12189 regno <= LAST_ARG_REGNUM;
12190 regno++)
12191 asm_fprintf (f, "%r%s", regno,
12192 regno == LAST_ARG_REGNUM ? "" : ", ");
12194 fprintf (f, "}\n");
12196 else
12197 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
12198 SP_REGNUM, SP_REGNUM,
12199 current_function_pretend_args_size);
12202 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
12203 if (THUMB_REG_PUSHED_P (regno))
12204 live_regs_mask |= 1 << regno;
12206 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
12207 live_regs_mask |= 1 << LR_REGNUM;
12209 if (TARGET_BACKTRACE)
12211 int offset;
12212 int work_register = 0;
12213 int wr;
12215 /* We have been asked to create a stack backtrace structure.
12216 The code looks like this:
12218 0 .align 2
12219 0 func:
12220 0 sub SP, #16 Reserve space for 4 registers.
12221 2 push {R7} Get a work register.
12222 4 add R7, SP, #20 Get the stack pointer before the push.
12223 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
12224 8 mov R7, PC Get hold of the start of this code plus 12.
12225 10 str R7, [SP, #16] Store it.
12226 12 mov R7, FP Get hold of the current frame pointer.
12227 14 str R7, [SP, #4] Store it.
12228 16 mov R7, LR Get hold of the current return address.
12229 18 str R7, [SP, #12] Store it.
12230 20 add R7, SP, #16 Point at the start of the backtrace structure.
12231 22 mov FP, R7 Put this value into the frame pointer. */
12233 if ((live_regs_mask & 0xFF) == 0)
12235 /* See if the a4 register is free. */
12237 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
12238 work_register = LAST_ARG_REGNUM;
12239 else /* We must push a register of our own */
12240 live_regs_mask |= (1 << LAST_LO_REGNUM);
12243 if (work_register == 0)
12245 /* Select a register from the list that will be pushed to
12246 use as our work register. */
12247 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
12248 if ((1 << work_register) & live_regs_mask)
12249 break;
12252 asm_fprintf
12253 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
12254 SP_REGNUM, SP_REGNUM);
12256 if (live_regs_mask)
12257 thumb_pushpop (f, live_regs_mask, 1);
12259 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
12260 if (wr & live_regs_mask)
12261 offset += 4;
12263 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12264 offset + 16 + current_function_pretend_args_size);
12266 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12267 offset + 4);
12269 /* Make sure that the instruction fetching the PC is in the right place
12270 to calculate "start of backtrace creation code + 12". */
12271 if (live_regs_mask)
12273 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12274 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12275 offset + 12);
12276 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12277 ARM_HARD_FRAME_POINTER_REGNUM);
12278 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12279 offset);
12281 else
12283 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
12284 ARM_HARD_FRAME_POINTER_REGNUM);
12285 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12286 offset);
12287 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
12288 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12289 offset + 12);
12292 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
12293 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
12294 offset + 8);
12295 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
12296 offset + 12);
12297 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
12298 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
12300 else if (live_regs_mask)
12301 thumb_pushpop (f, live_regs_mask, 1);
12303 for (regno = 8; regno < 13; regno++)
12304 if (THUMB_REG_PUSHED_P (regno))
12305 high_regs_pushed++;
12307 if (high_regs_pushed)
12309 int pushable_regs = 0;
12310 int mask = live_regs_mask & 0xff;
12311 int next_hi_reg;
12313 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
12314 if (THUMB_REG_PUSHED_P (next_hi_reg))
12315 break;
12317 pushable_regs = mask;
12319 if (pushable_regs == 0)
12321 /* Desperation time -- this probably will never happen. */
12322 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
12323 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
12324 mask = 1 << LAST_ARG_REGNUM;
12327 while (high_regs_pushed > 0)
12329 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
12331 if (mask & (1 << regno))
12333 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
12335 high_regs_pushed--;
12337 if (high_regs_pushed)
12339 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
12340 next_hi_reg--)
12341 if (THUMB_REG_PUSHED_P (next_hi_reg))
12342 break;
12344 else
12346 mask &= ~((1 << regno) - 1);
12347 break;
12352 thumb_pushpop (f, mask, 1);
12355 if (pushable_regs == 0
12356 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
12357 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
12361 /* Handle the case of a double word load into a low register from
12362 a computed memory address. The computed address may involve a
12363 register which is overwritten by the load. */
12364 const char *
12365 thumb_load_double_from_address (rtx *operands)
12367 rtx addr;
12368 rtx base;
12369 rtx offset;
12370 rtx arg1;
12371 rtx arg2;
12373 if (GET_CODE (operands[0]) != REG)
12374 abort ();
12376 if (GET_CODE (operands[1]) != MEM)
12377 abort ();
12379 /* Get the memory address. */
12380 addr = XEXP (operands[1], 0);
12382 /* Work out how the memory address is computed. */
12383 switch (GET_CODE (addr))
12385 case REG:
12386 operands[2] = gen_rtx (MEM, SImode,
12387 plus_constant (XEXP (operands[1], 0), 4));
12389 if (REGNO (operands[0]) == REGNO (addr))
12391 output_asm_insn ("ldr\t%H0, %2", operands);
12392 output_asm_insn ("ldr\t%0, %1", operands);
12394 else
12396 output_asm_insn ("ldr\t%0, %1", operands);
12397 output_asm_insn ("ldr\t%H0, %2", operands);
12399 break;
12401 case CONST:
12402 /* Compute <address> + 4 for the high order load. */
12403 operands[2] = gen_rtx (MEM, SImode,
12404 plus_constant (XEXP (operands[1], 0), 4));
12406 output_asm_insn ("ldr\t%0, %1", operands);
12407 output_asm_insn ("ldr\t%H0, %2", operands);
12408 break;
12410 case PLUS:
12411 arg1 = XEXP (addr, 0);
12412 arg2 = XEXP (addr, 1);
12414 if (CONSTANT_P (arg1))
12415 base = arg2, offset = arg1;
12416 else
12417 base = arg1, offset = arg2;
12419 if (GET_CODE (base) != REG)
12420 abort ();
12422 /* Catch the case of <address> = <reg> + <reg> */
12423 if (GET_CODE (offset) == REG)
12425 int reg_offset = REGNO (offset);
12426 int reg_base = REGNO (base);
12427 int reg_dest = REGNO (operands[0]);
12429 /* Add the base and offset registers together into the
12430 higher destination register. */
12431 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
12432 reg_dest + 1, reg_base, reg_offset);
12434 /* Load the lower destination register from the address in
12435 the higher destination register. */
12436 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
12437 reg_dest, reg_dest + 1);
12439 /* Load the higher destination register from its own address
12440 plus 4. */
12441 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
12442 reg_dest + 1, reg_dest + 1);
12444 else
12446 /* Compute <address> + 4 for the high order load. */
12447 operands[2] = gen_rtx (MEM, SImode,
12448 plus_constant (XEXP (operands[1], 0), 4));
12450 /* If the computed address is held in the low order register
12451 then load the high order register first, otherwise always
12452 load the low order register first. */
12453 if (REGNO (operands[0]) == REGNO (base))
12455 output_asm_insn ("ldr\t%H0, %2", operands);
12456 output_asm_insn ("ldr\t%0, %1", operands);
12458 else
12460 output_asm_insn ("ldr\t%0, %1", operands);
12461 output_asm_insn ("ldr\t%H0, %2", operands);
12464 break;
12466 case LABEL_REF:
12467 /* With no registers to worry about we can just load the value
12468 directly. */
12469 operands[2] = gen_rtx (MEM, SImode,
12470 plus_constant (XEXP (operands[1], 0), 4));
12472 output_asm_insn ("ldr\t%H0, %2", operands);
12473 output_asm_insn ("ldr\t%0, %1", operands);
12474 break;
12476 default:
12477 abort ();
12478 break;
12481 return "";
12484 const char *
12485 thumb_output_move_mem_multiple (int n, rtx *operands)
12487 rtx tmp;
12489 switch (n)
12491 case 2:
12492 if (REGNO (operands[4]) > REGNO (operands[5]))
12494 tmp = operands[4];
12495 operands[4] = operands[5];
12496 operands[5] = tmp;
12498 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
12499 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
12500 break;
12502 case 3:
12503 if (REGNO (operands[4]) > REGNO (operands[5]))
12505 tmp = operands[4];
12506 operands[4] = operands[5];
12507 operands[5] = tmp;
12509 if (REGNO (operands[5]) > REGNO (operands[6]))
12511 tmp = operands[5];
12512 operands[5] = operands[6];
12513 operands[6] = tmp;
12515 if (REGNO (operands[4]) > REGNO (operands[5]))
12517 tmp = operands[4];
12518 operands[4] = operands[5];
12519 operands[5] = tmp;
12522 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
12523 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
12524 break;
12526 default:
12527 abort ();
12530 return "";
12533 /* Routines for generating rtl. */
12534 void
12535 thumb_expand_movstrqi (rtx *operands)
12537 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
12538 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
12539 HOST_WIDE_INT len = INTVAL (operands[2]);
12540 HOST_WIDE_INT offset = 0;
12542 while (len >= 12)
12544 emit_insn (gen_movmem12b (out, in, out, in));
12545 len -= 12;
12548 if (len >= 8)
12550 emit_insn (gen_movmem8b (out, in, out, in));
12551 len -= 8;
12554 if (len >= 4)
12556 rtx reg = gen_reg_rtx (SImode);
12557 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
12558 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
12559 len -= 4;
12560 offset += 4;
12563 if (len >= 2)
12565 rtx reg = gen_reg_rtx (HImode);
12566 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
12567 plus_constant (in, offset))));
12568 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
12569 reg));
12570 len -= 2;
12571 offset += 2;
12574 if (len)
12576 rtx reg = gen_reg_rtx (QImode);
12577 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
12578 plus_constant (in, offset))));
12579 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
12580 reg));
12585 thumb_cmp_operand (rtx op, enum machine_mode mode)
12587 return ((GET_CODE (op) == CONST_INT
12588 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
12589 || register_operand (op, mode));
12592 static const char *
12593 thumb_condition_code (rtx x, int invert)
12595 static const char * const conds[] =
12597 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
12598 "hi", "ls", "ge", "lt", "gt", "le"
12600 int val;
12602 switch (GET_CODE (x))
12604 case EQ: val = 0; break;
12605 case NE: val = 1; break;
12606 case GEU: val = 2; break;
12607 case LTU: val = 3; break;
12608 case GTU: val = 8; break;
12609 case LEU: val = 9; break;
12610 case GE: val = 10; break;
12611 case LT: val = 11; break;
12612 case GT: val = 12; break;
12613 case LE: val = 13; break;
12614 default:
12615 abort ();
12618 return conds[val ^ invert];
12621 /* Handle storing a half-word to memory during reload. */
12622 void
12623 thumb_reload_out_hi (rtx *operands)
12625 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
12628 /* Handle reading a half-word from memory during reload. */
12629 void
12630 thumb_reload_in_hi (rtx *operands ATTRIBUTE_UNUSED)
12632 abort ();
12635 /* Return the length of a function name prefix
12636 that starts with the character 'c'. */
12637 static int
12638 arm_get_strip_length (int c)
12640 switch (c)
12642 ARM_NAME_ENCODING_LENGTHS
12643 default: return 0;
12647 /* Return a pointer to a function's name with any
12648 and all prefix encodings stripped from it. */
12649 const char *
12650 arm_strip_name_encoding (const char *name)
12652 int skip;
12654 while ((skip = arm_get_strip_length (* name)))
12655 name += skip;
12657 return name;
12660 /* If there is a '*' anywhere in the name's prefix, then
12661 emit the stripped name verbatim, otherwise prepend an
12662 underscore if leading underscores are being used. */
12663 void
12664 arm_asm_output_labelref (FILE *stream, const char *name)
12666 int skip;
12667 int verbatim = 0;
12669 while ((skip = arm_get_strip_length (* name)))
12671 verbatim |= (*name == '*');
12672 name += skip;
12675 if (verbatim)
12676 fputs (name, stream);
12677 else
12678 asm_fprintf (stream, "%U%s", name);
12681 rtx aof_pic_label;
12683 #ifdef AOF_ASSEMBLER
12684 /* Special functions only needed when producing AOF syntax assembler. */
12686 struct pic_chain
12688 struct pic_chain * next;
12689 const char * symname;
12692 static struct pic_chain * aof_pic_chain = NULL;
12695 aof_pic_entry (rtx x)
12697 struct pic_chain ** chainp;
12698 int offset;
12700 if (aof_pic_label == NULL_RTX)
12702 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
12705 for (offset = 0, chainp = &aof_pic_chain; *chainp;
12706 offset += 4, chainp = &(*chainp)->next)
12707 if ((*chainp)->symname == XSTR (x, 0))
12708 return plus_constant (aof_pic_label, offset);
12710 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
12711 (*chainp)->next = NULL;
12712 (*chainp)->symname = XSTR (x, 0);
12713 return plus_constant (aof_pic_label, offset);
12716 void
12717 aof_dump_pic_table (FILE *f)
12719 struct pic_chain * chain;
12721 if (aof_pic_chain == NULL)
12722 return;
12724 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
12725 PIC_OFFSET_TABLE_REGNUM,
12726 PIC_OFFSET_TABLE_REGNUM);
12727 fputs ("|x$adcons|\n", f);
12729 for (chain = aof_pic_chain; chain; chain = chain->next)
12731 fputs ("\tDCD\t", f);
12732 assemble_name (f, chain->symname);
12733 fputs ("\n", f);
12737 int arm_text_section_count = 1;
12739 char *
12740 aof_text_section (void )
12742 static char buf[100];
12743 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
12744 arm_text_section_count++);
12745 if (flag_pic)
12746 strcat (buf, ", PIC, REENTRANT");
12747 return buf;
12750 static int arm_data_section_count = 1;
12752 char *
12753 aof_data_section (void)
12755 static char buf[100];
12756 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
12757 return buf;
12760 /* The AOF assembler is religiously strict about declarations of
12761 imported and exported symbols, so that it is impossible to declare
12762 a function as imported near the beginning of the file, and then to
12763 export it later on. It is, however, possible to delay the decision
12764 until all the functions in the file have been compiled. To get
12765 around this, we maintain a list of the imports and exports, and
12766 delete from it any that are subsequently defined. At the end of
12767 compilation we spit the remainder of the list out before the END
12768 directive. */
12770 struct import
12772 struct import * next;
12773 const char * name;
12776 static struct import * imports_list = NULL;
12778 void
12779 aof_add_import (const char *name)
12781 struct import * new;
12783 for (new = imports_list; new; new = new->next)
12784 if (new->name == name)
12785 return;
12787 new = (struct import *) xmalloc (sizeof (struct import));
12788 new->next = imports_list;
12789 imports_list = new;
12790 new->name = name;
12793 void
12794 aof_delete_import (const char *name)
12796 struct import ** old;
12798 for (old = &imports_list; *old; old = & (*old)->next)
12800 if ((*old)->name == name)
12802 *old = (*old)->next;
12803 return;
12808 int arm_main_function = 0;
12810 static void
12811 aof_dump_imports (FILE *f)
12813 /* The AOF assembler needs this to cause the startup code to be extracted
12814 from the library. Brining in __main causes the whole thing to work
12815 automagically. */
12816 if (arm_main_function)
12818 text_section ();
12819 fputs ("\tIMPORT __main\n", f);
12820 fputs ("\tDCD __main\n", f);
12823 /* Now dump the remaining imports. */
12824 while (imports_list)
12826 fprintf (f, "\tIMPORT\t");
12827 assemble_name (f, imports_list->name);
12828 fputc ('\n', f);
12829 imports_list = imports_list->next;
12833 static void
12834 aof_globalize_label (FILE *stream, const char *name)
12836 default_globalize_label (stream, name);
12837 if (! strcmp (name, "main"))
12838 arm_main_function = 1;
12841 static void
12842 aof_file_end (void)
12844 if (flag_pic)
12845 aof_dump_pic_table (asm_out_file);
12846 aof_dump_imports (asm_out_file);
12847 fputs ("\tEND\n", asm_out_file);
12849 #endif /* AOF_ASSEMBLER */
12851 #ifdef OBJECT_FORMAT_ELF
12852 /* Switch to an arbitrary section NAME with attributes as specified
12853 by FLAGS. ALIGN specifies any known alignment requirements for
12854 the section; 0 if the default should be used.
12856 Differs from the default elf version only in the prefix character
12857 used before the section type. */
12859 static void
12860 arm_elf_asm_named_section (const char *name, unsigned int flags)
12862 char flagchars[10], *f = flagchars;
12864 if (! named_section_first_declaration (name))
12866 fprintf (asm_out_file, "\t.section\t%s\n", name);
12867 return;
12870 if (!(flags & SECTION_DEBUG))
12871 *f++ = 'a';
12872 if (flags & SECTION_WRITE)
12873 *f++ = 'w';
12874 if (flags & SECTION_CODE)
12875 *f++ = 'x';
12876 if (flags & SECTION_SMALL)
12877 *f++ = 's';
12878 if (flags & SECTION_MERGE)
12879 *f++ = 'M';
12880 if (flags & SECTION_STRINGS)
12881 *f++ = 'S';
12882 if (flags & SECTION_TLS)
12883 *f++ = 'T';
12884 *f = '\0';
12886 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
12888 if (!(flags & SECTION_NOTYPE))
12890 const char *type;
12892 if (flags & SECTION_BSS)
12893 type = "nobits";
12894 else
12895 type = "progbits";
12897 fprintf (asm_out_file, ",%%%s", type);
12899 if (flags & SECTION_ENTSIZE)
12900 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
12903 putc ('\n', asm_out_file);
12905 #endif
12907 #ifndef ARM_PE
12908 /* Symbols in the text segment can be accessed without indirecting via the
12909 constant pool; it may take an extra binary operation, but this is still
12910 faster than indirecting via memory. Don't do this when not optimizing,
12911 since we won't be calculating al of the offsets necessary to do this
12912 simplification. */
12914 static void
12915 arm_encode_section_info (tree decl, rtx rtl, int first)
12917 /* This doesn't work with AOF syntax, since the string table may be in
12918 a different AREA. */
12919 #ifndef AOF_ASSEMBLER
12920 if (optimize > 0 && TREE_CONSTANT (decl)
12921 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
12922 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
12923 #endif
12925 /* If we are referencing a function that is weak then encode a long call
12926 flag in the function name, otherwise if the function is static or
12927 or known to be defined in this file then encode a short call flag. */
12928 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
12930 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
12931 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
12932 else if (! TREE_PUBLIC (decl))
12933 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
12936 #endif /* !ARM_PE */
12938 static void
12939 arm_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
12941 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
12942 && !strcmp (prefix, "L"))
12944 arm_ccfsm_state = 0;
12945 arm_target_insn = NULL;
12947 default_internal_label (stream, prefix, labelno);
12950 /* Output code to add DELTA to the first argument, and then jump
12951 to FUNCTION. Used for C++ multiple inheritance. */
12952 static void
12953 arm_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
12954 HOST_WIDE_INT delta,
12955 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
12956 tree function)
12958 int mi_delta = delta;
12959 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
12960 int shift = 0;
12961 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
12962 ? 1 : 0);
12963 if (mi_delta < 0)
12964 mi_delta = - mi_delta;
12965 while (mi_delta != 0)
12967 if ((mi_delta & (3 << shift)) == 0)
12968 shift += 2;
12969 else
12971 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
12972 mi_op, this_regno, this_regno,
12973 mi_delta & (0xff << shift));
12974 mi_delta &= ~(0xff << shift);
12975 shift += 8;
12978 fputs ("\tb\t", file);
12979 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
12980 if (NEED_PLT_RELOC)
12981 fputs ("(PLT)", file);
12982 fputc ('\n', file);
12986 arm_emit_vector_const (file, x)
12987 FILE * file;
12988 rtx x;
12990 int i;
12991 const char * pattern;
12993 if (GET_CODE (x) != CONST_VECTOR)
12994 abort ();
12996 switch (GET_MODE (x))
12998 case V2SImode: pattern = "%08x"; break;
12999 case V4HImode: pattern = "%04x"; break;
13000 case V8QImode: pattern = "%02x"; break;
13001 default: abort ();
13004 fprintf (file, "0x");
13005 for (i = CONST_VECTOR_NUNITS (x); i--;)
13007 rtx element;
13009 element = CONST_VECTOR_ELT (x, i);
13010 fprintf (file, pattern, INTVAL (element));
13013 return 1;
13016 const char *
13017 arm_output_load_gr (operands)
13018 rtx * operands;
13020 rtx reg;
13021 rtx offset;
13022 rtx wcgr;
13023 rtx sum;
13025 if (GET_CODE (operands [1]) != MEM
13026 || GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
13027 || GET_CODE (reg = XEXP (sum, 0)) != REG
13028 || GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
13029 || ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
13030 return "wldrw%?\t%0, %1";
13032 /* Fix up an out-of-range load of a GR register. */
13033 output_asm_insn ("str%?\t%0, [sp, #-4]!\t@ Start of GR load expansion", & reg);
13034 wcgr = operands[0];
13035 operands[0] = reg;
13036 output_asm_insn ("ldr%?\t%0, %1", operands);
13038 operands[0] = wcgr;
13039 operands[1] = reg;
13040 output_asm_insn ("tmcr%?\t%0, %1", operands);
13041 output_asm_insn ("ldr%?\t%0, [sp], #4\t@ End of GR load expansion", & reg);
13043 return "";