* arm.c (thumb_base_register_rtx_p): New function.
[official-gcc.git] / gcc / config / arm / arm.c
blob4bffcb5258321330013fc7956465687299147bce
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "obstack.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "real.h"
35 #include "insn-config.h"
36 #include "conditions.h"
37 #include "output.h"
38 #include "insn-attr.h"
39 #include "flags.h"
40 #include "reload.h"
41 #include "function.h"
42 #include "expr.h"
43 #include "optabs.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "ggc.h"
47 #include "except.h"
48 #include "c-pragma.h"
49 #include "integrate.h"
50 #include "tm_p.h"
51 #include "target.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 /* In order to improve the layout of the prototypes below
59 some short type abbreviations are defined here. */
60 #define Hint HOST_WIDE_INT
61 #define Mmode enum machine_mode
62 #define Ulong unsigned long
63 #define Ccstar const char *
65 const struct attribute_spec arm_attribute_table[];
67 /* Forward function declarations. */
68 static void arm_add_gc_roots PARAMS ((void));
69 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
70 static unsigned bit_count PARAMS ((Ulong));
71 static int arm_address_register_rtx_p PARAMS ((rtx, int));
72 static int arm_legitimate_index_p PARAMS ((enum machine_mode,
73 rtx, int));
74 static int thumb_base_register_rtx_p PARAMS ((rtx,
75 enum machine_mode,
76 int));
77 inline static int thumb_index_register_rtx_p PARAMS ((rtx, int));
78 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
79 static int eliminate_lr2ip PARAMS ((rtx *));
80 static rtx emit_multi_reg_push PARAMS ((int));
81 static rtx emit_sfm PARAMS ((int, int));
82 #ifndef AOF_ASSEMBLER
83 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
84 #endif
85 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
86 static arm_cc get_arm_condition_code PARAMS ((rtx));
87 static void init_fpa_table PARAMS ((void));
88 static Hint int_log2 PARAMS ((Hint));
89 static rtx is_jump_table PARAMS ((rtx));
90 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
91 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
92 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
93 static Ccstar shift_op PARAMS ((rtx, Hint *));
94 static struct machine_function * arm_init_machine_status PARAMS ((void));
95 static int number_of_first_bit_set PARAMS ((int));
96 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
97 static void thumb_exit PARAMS ((FILE *, int, rtx));
98 static void thumb_pushpop PARAMS ((FILE *, int, int));
99 static Ccstar thumb_condition_code PARAMS ((rtx, int));
100 static rtx is_jump_table PARAMS ((rtx));
101 static Hint get_jump_table_size PARAMS ((rtx));
102 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
103 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
104 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
105 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
106 static void assign_minipool_offsets PARAMS ((Mfix *));
107 static void arm_print_value PARAMS ((FILE *, rtx));
108 static void dump_minipool PARAMS ((rtx));
109 static int arm_barrier_cost PARAMS ((rtx));
110 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
111 static void push_minipool_barrier PARAMS ((rtx, Hint));
112 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
113 static void note_invalid_constants PARAMS ((rtx, Hint));
114 static int current_file_function_operand PARAMS ((rtx));
115 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
116 static Ulong arm_compute_save_reg_mask PARAMS ((void));
117 static Ulong arm_isr_value PARAMS ((tree));
118 static Ulong arm_compute_func_type PARAMS ((void));
119 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
120 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
121 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
122 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
123 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
124 static int arm_comp_type_attributes PARAMS ((tree, tree));
125 static void arm_set_default_type_attributes PARAMS ((tree));
126 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
127 static int count_insns_for_constant PARAMS ((HOST_WIDE_INT, int));
128 static int arm_get_strip_length PARAMS ((int));
129 static bool arm_function_ok_for_sibcall PARAMS ((tree, tree));
130 #ifdef OBJECT_FORMAT_ELF
131 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
132 #endif
133 #ifndef ARM_PE
134 static void arm_encode_section_info PARAMS ((tree, int));
135 #endif
136 #ifdef AOF_ASSEMBLER
137 static void aof_globalize_label PARAMS ((FILE *, const char *));
138 #endif
139 static void arm_internal_label PARAMS ((FILE *, const char *, unsigned long));
140 static void arm_output_mi_thunk PARAMS ((FILE *, tree,
141 HOST_WIDE_INT,
142 HOST_WIDE_INT, tree));
144 #undef Hint
145 #undef Mmode
146 #undef Ulong
147 #undef Ccstar
149 /* Initialize the GCC target structure. */
150 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
151 #undef TARGET_MERGE_DECL_ATTRIBUTES
152 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
153 #endif
155 #undef TARGET_ATTRIBUTE_TABLE
156 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
158 #ifdef AOF_ASSEMBLER
159 #undef TARGET_ASM_BYTE_OP
160 #define TARGET_ASM_BYTE_OP "\tDCB\t"
161 #undef TARGET_ASM_ALIGNED_HI_OP
162 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
163 #undef TARGET_ASM_ALIGNED_SI_OP
164 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
165 #undef TARGET_ASM_GLOBALIZE_LABEL
166 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
167 #else
168 #undef TARGET_ASM_ALIGNED_SI_OP
169 #define TARGET_ASM_ALIGNED_SI_OP NULL
170 #undef TARGET_ASM_INTEGER
171 #define TARGET_ASM_INTEGER arm_assemble_integer
172 #endif
174 #undef TARGET_ASM_FUNCTION_PROLOGUE
175 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
177 #undef TARGET_ASM_FUNCTION_EPILOGUE
178 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
180 #undef TARGET_COMP_TYPE_ATTRIBUTES
181 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
183 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
184 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
186 #undef TARGET_INIT_BUILTINS
187 #define TARGET_INIT_BUILTINS arm_init_builtins
189 #undef TARGET_EXPAND_BUILTIN
190 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
192 #undef TARGET_SCHED_ADJUST_COST
193 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
195 #undef TARGET_ENCODE_SECTION_INFO
196 #ifdef ARM_PE
197 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
198 #else
199 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
200 #endif
202 #undef TARGET_STRIP_NAME_ENCODING
203 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
205 #undef TARGET_ASM_INTERNAL_LABEL
206 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
208 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
209 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
211 #undef TARGET_ASM_OUTPUT_MI_THUNK
212 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
213 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
214 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
216 struct gcc_target targetm = TARGET_INITIALIZER;
218 /* Obstack for minipool constant handling. */
219 static struct obstack minipool_obstack;
220 static char * minipool_startobj;
222 /* The maximum number of insns skipped which
223 will be conditionalised if possible. */
224 static int max_insns_skipped = 5;
226 extern FILE * asm_out_file;
228 /* True if we are currently building a constant table. */
229 int making_const_table;
231 /* Define the information needed to generate branch insns. This is
232 stored from the compare operation. */
233 rtx arm_compare_op0, arm_compare_op1;
235 /* What type of floating point are we tuning for? */
236 enum floating_point_type arm_fpu;
238 /* What type of floating point instructions are available? */
239 enum floating_point_type arm_fpu_arch;
241 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
242 enum prog_mode_type arm_prgmode;
244 /* Set by the -mfp=... option. */
245 const char * target_fp_name = NULL;
247 /* Used to parse -mstructure_size_boundary command line option. */
248 const char * structure_size_string = NULL;
249 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
251 /* Bit values used to identify processor capabilities. */
252 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
253 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
254 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
255 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
256 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
257 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
258 #define FL_THUMB (1 << 6) /* Thumb aware */
259 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
260 #define FL_STRONG (1 << 8) /* StrongARM */
261 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
262 #define FL_XSCALE (1 << 10) /* XScale */
264 /* The bits in this mask specify which
265 instructions we are allowed to generate. */
266 static unsigned long insn_flags = 0;
268 /* The bits in this mask specify which instruction scheduling options should
269 be used. Note - there is an overlap with the FL_FAST_MULT. For some
270 hardware we want to be able to generate the multiply instructions, but to
271 tune as if they were not present in the architecture. */
272 static unsigned long tune_flags = 0;
274 /* The following are used in the arm.md file as equivalents to bits
275 in the above two flag variables. */
277 /* Nonzero if this is an "M" variant of the processor. */
278 int arm_fast_multiply = 0;
280 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
281 int arm_arch4 = 0;
283 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
284 int arm_arch5 = 0;
286 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
287 int arm_arch5e = 0;
289 /* Nonzero if this chip can benefit from load scheduling. */
290 int arm_ld_sched = 0;
292 /* Nonzero if this chip is a StrongARM. */
293 int arm_is_strong = 0;
295 /* Nonzero if this chip is an XScale. */
296 int arm_is_xscale = 0;
298 /* Nonzero if this chip is an ARM6 or an ARM7. */
299 int arm_is_6_or_7 = 0;
301 /* Nonzero if generating Thumb instructions. */
302 int thumb_code = 0;
304 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
305 must report the mode of the memory reference from PRINT_OPERAND to
306 PRINT_OPERAND_ADDRESS. */
307 enum machine_mode output_memory_reference_mode;
309 /* The register number to be used for the PIC offset register. */
310 const char * arm_pic_register_string = NULL;
311 int arm_pic_register = INVALID_REGNUM;
313 /* Set to 1 when a return insn is output, this means that the epilogue
314 is not needed. */
315 int return_used_this_function;
317 /* Set to 1 after arm_reorg has started. Reset to start at the start of
318 the next function. */
319 static int after_arm_reorg = 0;
321 /* The maximum number of insns to be used when loading a constant. */
322 static int arm_constant_limit = 3;
324 /* For an explanation of these variables, see final_prescan_insn below. */
325 int arm_ccfsm_state;
326 enum arm_cond_code arm_current_cc;
327 rtx arm_target_insn;
328 int arm_target_label;
330 /* The condition codes of the ARM, and the inverse function. */
331 static const char * const arm_condition_codes[] =
333 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
334 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
337 #define streq(string1, string2) (strcmp (string1, string2) == 0)
339 /* Initialization code. */
341 struct processors
343 const char *const name;
344 const unsigned long flags;
347 /* Not all of these give usefully different compilation alternatives,
348 but there is no simple way of generalizing them. */
349 static const struct processors all_cores[] =
351 /* ARM Cores */
353 {"arm2", FL_CO_PROC | FL_MODE26 },
354 {"arm250", FL_CO_PROC | FL_MODE26 },
355 {"arm3", FL_CO_PROC | FL_MODE26 },
356 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
357 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
358 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
359 {"arm610", FL_MODE26 | FL_MODE32 },
360 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
361 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
362 /* arm7m doesn't exist on its own, but only with D, (and I), but
363 those don't alter the code, so arm7m is sometimes used. */
364 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
365 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
366 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
367 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
368 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
369 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
370 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
371 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
372 {"arm710", FL_MODE26 | FL_MODE32 },
373 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
374 {"arm720", FL_MODE26 | FL_MODE32 },
375 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
376 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
377 {"arm710c", FL_MODE26 | FL_MODE32 },
378 {"arm7100", FL_MODE26 | FL_MODE32 },
379 {"arm7500", FL_MODE26 | FL_MODE32 },
380 /* Doesn't have an external co-proc, but does have embedded fpu. */
381 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
382 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
383 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
384 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
385 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
386 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
387 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
388 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
389 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
390 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
391 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
392 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
393 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
394 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
395 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
396 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
397 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
399 {NULL, 0}
402 static const struct processors all_architectures[] =
404 /* ARM Architectures */
406 { "armv2", FL_CO_PROC | FL_MODE26 },
407 { "armv2a", FL_CO_PROC | FL_MODE26 },
408 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
409 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
410 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
411 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
412 implementations that support it, so we will leave it out for now. */
413 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
414 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
415 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
416 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
417 { NULL, 0 }
420 /* This is a magic stucture. The 'string' field is magically filled in
421 with a pointer to the value specified by the user on the command line
422 assuming that the user has specified such a value. */
424 struct arm_cpu_select arm_select[] =
426 /* string name processors */
427 { NULL, "-mcpu=", all_cores },
428 { NULL, "-march=", all_architectures },
429 { NULL, "-mtune=", all_cores }
432 /* Return the number of bits set in VALUE. */
433 static unsigned
434 bit_count (value)
435 unsigned long value;
437 unsigned long count = 0;
439 while (value)
441 count++;
442 value &= value - 1; /* Clear the least-significant set bit. */
445 return count;
448 /* Fix up any incompatible options that the user has specified.
449 This has now turned into a maze. */
450 void
451 arm_override_options ()
453 unsigned i;
455 /* Set up the flags based on the cpu/architecture selected by the user. */
456 for (i = ARRAY_SIZE (arm_select); i--;)
458 struct arm_cpu_select * ptr = arm_select + i;
460 if (ptr->string != NULL && ptr->string[0] != '\0')
462 const struct processors * sel;
464 for (sel = ptr->processors; sel->name != NULL; sel++)
465 if (streq (ptr->string, sel->name))
467 if (i == 2)
468 tune_flags = sel->flags;
469 else
471 /* If we have been given an architecture and a processor
472 make sure that they are compatible. We only generate
473 a warning though, and we prefer the CPU over the
474 architecture. */
475 if (insn_flags != 0 && (insn_flags ^ sel->flags))
476 warning ("switch -mcpu=%s conflicts with -march= switch",
477 ptr->string);
479 insn_flags = sel->flags;
482 break;
485 if (sel->name == NULL)
486 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
490 /* If the user did not specify a processor, choose one for them. */
491 if (insn_flags == 0)
493 const struct processors * sel;
494 unsigned int sought;
495 static const struct cpu_default
497 const int cpu;
498 const char *const name;
500 cpu_defaults[] =
502 { TARGET_CPU_arm2, "arm2" },
503 { TARGET_CPU_arm6, "arm6" },
504 { TARGET_CPU_arm610, "arm610" },
505 { TARGET_CPU_arm710, "arm710" },
506 { TARGET_CPU_arm7m, "arm7m" },
507 { TARGET_CPU_arm7500fe, "arm7500fe" },
508 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
509 { TARGET_CPU_arm8, "arm8" },
510 { TARGET_CPU_arm810, "arm810" },
511 { TARGET_CPU_arm9, "arm9" },
512 { TARGET_CPU_strongarm, "strongarm" },
513 { TARGET_CPU_xscale, "xscale" },
514 { TARGET_CPU_generic, "arm" },
515 { 0, 0 }
517 const struct cpu_default * def;
519 /* Find the default. */
520 for (def = cpu_defaults; def->name; def++)
521 if (def->cpu == TARGET_CPU_DEFAULT)
522 break;
524 /* Make sure we found the default CPU. */
525 if (def->name == NULL)
526 abort ();
528 /* Find the default CPU's flags. */
529 for (sel = all_cores; sel->name != NULL; sel++)
530 if (streq (def->name, sel->name))
531 break;
533 if (sel->name == NULL)
534 abort ();
536 insn_flags = sel->flags;
538 /* Now check to see if the user has specified some command line
539 switch that require certain abilities from the cpu. */
540 sought = 0;
542 if (TARGET_INTERWORK || TARGET_THUMB)
544 sought |= (FL_THUMB | FL_MODE32);
546 /* Force apcs-32 to be used for interworking. */
547 target_flags |= ARM_FLAG_APCS_32;
549 /* There are no ARM processors that support both APCS-26 and
550 interworking. Therefore we force FL_MODE26 to be removed
551 from insn_flags here (if it was set), so that the search
552 below will always be able to find a compatible processor. */
553 insn_flags &= ~FL_MODE26;
555 else if (!TARGET_APCS_32)
556 sought |= FL_MODE26;
558 if (sought != 0 && ((sought & insn_flags) != sought))
560 /* Try to locate a CPU type that supports all of the abilities
561 of the default CPU, plus the extra abilities requested by
562 the user. */
563 for (sel = all_cores; sel->name != NULL; sel++)
564 if ((sel->flags & sought) == (sought | insn_flags))
565 break;
567 if (sel->name == NULL)
569 unsigned current_bit_count = 0;
570 const struct processors * best_fit = NULL;
572 /* Ideally we would like to issue an error message here
573 saying that it was not possible to find a CPU compatible
574 with the default CPU, but which also supports the command
575 line options specified by the programmer, and so they
576 ought to use the -mcpu=<name> command line option to
577 override the default CPU type.
579 Unfortunately this does not work with multilibing. We
580 need to be able to support multilibs for -mapcs-26 and for
581 -mthumb-interwork and there is no CPU that can support both
582 options. Instead if we cannot find a cpu that has both the
583 characteristics of the default cpu and the given command line
584 options we scan the array again looking for a best match. */
585 for (sel = all_cores; sel->name != NULL; sel++)
586 if ((sel->flags & sought) == sought)
588 unsigned count;
590 count = bit_count (sel->flags & insn_flags);
592 if (count >= current_bit_count)
594 best_fit = sel;
595 current_bit_count = count;
599 if (best_fit == NULL)
600 abort ();
601 else
602 sel = best_fit;
605 insn_flags = sel->flags;
609 /* If tuning has not been specified, tune for whichever processor or
610 architecture has been selected. */
611 if (tune_flags == 0)
612 tune_flags = insn_flags;
614 /* Make sure that the processor choice does not conflict with any of the
615 other command line choices. */
616 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
618 /* If APCS-32 was not the default then it must have been set by the
619 user, so issue a warning message. If the user has specified
620 "-mapcs-32 -mcpu=arm2" then we loose here. */
621 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
622 warning ("target CPU does not support APCS-32" );
623 target_flags &= ~ARM_FLAG_APCS_32;
625 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
627 warning ("target CPU does not support APCS-26" );
628 target_flags |= ARM_FLAG_APCS_32;
631 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
633 warning ("target CPU does not support interworking" );
634 target_flags &= ~ARM_FLAG_INTERWORK;
637 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
639 warning ("target CPU does not support THUMB instructions");
640 target_flags &= ~ARM_FLAG_THUMB;
643 if (TARGET_APCS_FRAME && TARGET_THUMB)
645 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
646 target_flags &= ~ARM_FLAG_APCS_FRAME;
649 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
650 from here where no function is being compiled currently. */
651 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
652 && TARGET_ARM)
653 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
655 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
656 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
658 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
659 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
661 /* If interworking is enabled then APCS-32 must be selected as well. */
662 if (TARGET_INTERWORK)
664 if (!TARGET_APCS_32)
665 warning ("interworking forces APCS-32 to be used" );
666 target_flags |= ARM_FLAG_APCS_32;
669 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
671 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
672 target_flags |= ARM_FLAG_APCS_FRAME;
675 if (TARGET_POKE_FUNCTION_NAME)
676 target_flags |= ARM_FLAG_APCS_FRAME;
678 if (TARGET_APCS_REENT && flag_pic)
679 error ("-fpic and -mapcs-reent are incompatible");
681 if (TARGET_APCS_REENT)
682 warning ("APCS reentrant code not supported. Ignored");
684 /* If this target is normally configured to use APCS frames, warn if they
685 are turned off and debugging is turned on. */
686 if (TARGET_ARM
687 && write_symbols != NO_DEBUG
688 && !TARGET_APCS_FRAME
689 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
690 warning ("-g with -mno-apcs-frame may not give sensible debugging");
692 /* If stack checking is disabled, we can use r10 as the PIC register,
693 which keeps r9 available. */
694 if (flag_pic)
695 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
697 if (TARGET_APCS_FLOAT)
698 warning ("passing floating point arguments in fp regs not yet supported");
700 /* Initialize boolean versions of the flags, for use in the arm.md file. */
701 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
702 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
703 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
704 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
705 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
707 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
708 arm_is_strong = (tune_flags & FL_STRONG) != 0;
709 thumb_code = (TARGET_ARM == 0);
710 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
711 && !(tune_flags & FL_ARCH4))) != 0;
713 /* Default value for floating point code... if no co-processor
714 bus, then schedule for emulated floating point. Otherwise,
715 assume the user has an FPA.
716 Note: this does not prevent use of floating point instructions,
717 -msoft-float does that. */
718 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
720 if (target_fp_name)
722 if (streq (target_fp_name, "2"))
723 arm_fpu_arch = FP_SOFT2;
724 else if (streq (target_fp_name, "3"))
725 arm_fpu_arch = FP_SOFT3;
726 else
727 error ("invalid floating point emulation option: -mfpe-%s",
728 target_fp_name);
730 else
731 arm_fpu_arch = FP_DEFAULT;
733 if (TARGET_FPE && arm_fpu != FP_HARD)
734 arm_fpu = FP_SOFT2;
736 /* For arm2/3 there is no need to do any scheduling if there is only
737 a floating point emulator, or we are doing software floating-point. */
738 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
739 && (tune_flags & FL_MODE32) == 0)
740 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
742 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
744 if (structure_size_string != NULL)
746 int size = strtol (structure_size_string, NULL, 0);
748 if (size == 8 || size == 32)
749 arm_structure_size_boundary = size;
750 else
751 warning ("structure size boundary can only be set to 8 or 32");
754 if (arm_pic_register_string != NULL)
756 int pic_register = decode_reg_name (arm_pic_register_string);
758 if (!flag_pic)
759 warning ("-mpic-register= is useless without -fpic");
761 /* Prevent the user from choosing an obviously stupid PIC register. */
762 else if (pic_register < 0 || call_used_regs[pic_register]
763 || pic_register == HARD_FRAME_POINTER_REGNUM
764 || pic_register == STACK_POINTER_REGNUM
765 || pic_register >= PC_REGNUM)
766 error ("unable to use '%s' for PIC register", arm_pic_register_string);
767 else
768 arm_pic_register = pic_register;
771 if (TARGET_THUMB && flag_schedule_insns)
773 /* Don't warn since it's on by default in -O2. */
774 flag_schedule_insns = 0;
777 /* If optimizing for space, don't synthesize constants.
778 For processors with load scheduling, it never costs more than 2 cycles
779 to load a constant, and the load scheduler may well reduce that to 1. */
780 if (optimize_size || (tune_flags & FL_LDSCHED))
781 arm_constant_limit = 1;
783 if (arm_is_xscale)
784 arm_constant_limit = 2;
786 /* If optimizing for size, bump the number of instructions that we
787 are prepared to conditionally execute (even on a StrongARM).
788 Otherwise for the StrongARM, which has early execution of branches,
789 a sequence that is worth skipping is shorter. */
790 if (optimize_size)
791 max_insns_skipped = 6;
792 else if (arm_is_strong)
793 max_insns_skipped = 3;
795 /* Register global variables with the garbage collector. */
796 arm_add_gc_roots ();
799 static void
800 arm_add_gc_roots ()
802 gcc_obstack_init(&minipool_obstack);
803 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
806 /* A table of known ARM exception types.
807 For use with the interrupt function attribute. */
809 typedef struct
811 const char *const arg;
812 const unsigned long return_value;
814 isr_attribute_arg;
816 static const isr_attribute_arg isr_attribute_args [] =
818 { "IRQ", ARM_FT_ISR },
819 { "irq", ARM_FT_ISR },
820 { "FIQ", ARM_FT_FIQ },
821 { "fiq", ARM_FT_FIQ },
822 { "ABORT", ARM_FT_ISR },
823 { "abort", ARM_FT_ISR },
824 { "ABORT", ARM_FT_ISR },
825 { "abort", ARM_FT_ISR },
826 { "UNDEF", ARM_FT_EXCEPTION },
827 { "undef", ARM_FT_EXCEPTION },
828 { "SWI", ARM_FT_EXCEPTION },
829 { "swi", ARM_FT_EXCEPTION },
830 { NULL, ARM_FT_NORMAL }
833 /* Returns the (interrupt) function type of the current
834 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
836 static unsigned long
837 arm_isr_value (argument)
838 tree argument;
840 const isr_attribute_arg * ptr;
841 const char * arg;
843 /* No argument - default to IRQ. */
844 if (argument == NULL_TREE)
845 return ARM_FT_ISR;
847 /* Get the value of the argument. */
848 if (TREE_VALUE (argument) == NULL_TREE
849 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
850 return ARM_FT_UNKNOWN;
852 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
854 /* Check it against the list of known arguments. */
855 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
856 if (streq (arg, ptr->arg))
857 return ptr->return_value;
859 /* An unrecognized interrupt type. */
860 return ARM_FT_UNKNOWN;
863 /* Computes the type of the current function. */
865 static unsigned long
866 arm_compute_func_type ()
868 unsigned long type = ARM_FT_UNKNOWN;
869 tree a;
870 tree attr;
872 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
873 abort ();
875 /* Decide if the current function is volatile. Such functions
876 never return, and many memory cycles can be saved by not storing
877 register values that will never be needed again. This optimization
878 was added to speed up context switching in a kernel application. */
879 if (optimize > 0
880 && current_function_nothrow
881 && TREE_THIS_VOLATILE (current_function_decl))
882 type |= ARM_FT_VOLATILE;
884 if (current_function_needs_context)
885 type |= ARM_FT_NESTED;
887 attr = DECL_ATTRIBUTES (current_function_decl);
889 a = lookup_attribute ("naked", attr);
890 if (a != NULL_TREE)
891 type |= ARM_FT_NAKED;
893 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
894 type |= ARM_FT_EXCEPTION_HANDLER;
895 else
897 a = lookup_attribute ("isr", attr);
898 if (a == NULL_TREE)
899 a = lookup_attribute ("interrupt", attr);
901 if (a == NULL_TREE)
902 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
903 else
904 type |= arm_isr_value (TREE_VALUE (a));
907 return type;
910 /* Returns the type of the current function. */
912 unsigned long
913 arm_current_func_type ()
915 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
916 cfun->machine->func_type = arm_compute_func_type ();
918 return cfun->machine->func_type;
921 /* Return 1 if it is possible to return using a single instruction. */
924 use_return_insn (iscond)
925 int iscond;
927 int regno;
928 unsigned int func_type;
929 unsigned long saved_int_regs;
931 /* Never use a return instruction before reload has run. */
932 if (!reload_completed)
933 return 0;
935 func_type = arm_current_func_type ();
937 /* Naked functions and volatile functions need special
938 consideration. */
939 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
940 return 0;
942 /* As do variadic functions. */
943 if (current_function_pretend_args_size
944 || cfun->machine->uses_anonymous_args
945 /* Of if the function calls __builtin_eh_return () */
946 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
947 /* Or if there is no frame pointer and there is a stack adjustment. */
948 || ((arm_get_frame_size () + current_function_outgoing_args_size != 0)
949 && !frame_pointer_needed))
950 return 0;
952 saved_int_regs = arm_compute_save_reg_mask ();
954 /* Can't be done if interworking with Thumb, and any registers have been
955 stacked. */
956 if (TARGET_INTERWORK && saved_int_regs != 0)
957 return 0;
959 /* On StrongARM, conditional returns are expensive if they aren't
960 taken and multiple registers have been stacked. */
961 if (iscond && arm_is_strong)
963 /* Conditional return when just the LR is stored is a simple
964 conditional-load instruction, that's not expensive. */
965 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
966 return 0;
968 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
969 return 0;
972 /* If there are saved registers but the LR isn't saved, then we need
973 two instructions for the return. */
974 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
975 return 0;
977 /* Can't be done if any of the FPU regs are pushed,
978 since this also requires an insn. */
979 if (TARGET_HARD_FLOAT)
980 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
981 if (regs_ever_live[regno] && !call_used_regs[regno])
982 return 0;
984 return 1;
987 /* Return TRUE if int I is a valid immediate ARM constant. */
990 const_ok_for_arm (i)
991 HOST_WIDE_INT i;
993 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
995 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
996 be all zero, or all one. */
997 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
998 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
999 != ((~(unsigned HOST_WIDE_INT) 0)
1000 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1001 return FALSE;
1003 /* Fast return for 0 and powers of 2 */
1004 if ((i & (i - 1)) == 0)
1005 return TRUE;
1009 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1010 return TRUE;
1011 mask =
1012 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1013 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1015 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1017 return FALSE;
1020 /* Return true if I is a valid constant for the operation CODE. */
1021 static int
1022 const_ok_for_op (i, code)
1023 HOST_WIDE_INT i;
1024 enum rtx_code code;
1026 if (const_ok_for_arm (i))
1027 return 1;
1029 switch (code)
1031 case PLUS:
1032 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1034 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1035 case XOR:
1036 case IOR:
1037 return 0;
1039 case AND:
1040 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1042 default:
1043 abort ();
1047 /* Emit a sequence of insns to handle a large constant.
1048 CODE is the code of the operation required, it can be any of SET, PLUS,
1049 IOR, AND, XOR, MINUS;
1050 MODE is the mode in which the operation is being performed;
1051 VAL is the integer to operate on;
1052 SOURCE is the other operand (a register, or a null-pointer for SET);
1053 SUBTARGETS means it is safe to create scratch registers if that will
1054 either produce a simpler sequence, or we will want to cse the values.
1055 Return value is the number of insns emitted. */
1058 arm_split_constant (code, mode, val, target, source, subtargets)
1059 enum rtx_code code;
1060 enum machine_mode mode;
1061 HOST_WIDE_INT val;
1062 rtx target;
1063 rtx source;
1064 int subtargets;
1066 if (subtargets || code == SET
1067 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1068 && REGNO (target) != REGNO (source)))
1070 /* After arm_reorg has been called, we can't fix up expensive
1071 constants by pushing them into memory so we must synthesize
1072 them in-line, regardless of the cost. This is only likely to
1073 be more costly on chips that have load delay slots and we are
1074 compiling without running the scheduler (so no splitting
1075 occurred before the final instruction emission).
1077 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1079 if (!after_arm_reorg
1080 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1081 > arm_constant_limit + (code != SET)))
1083 if (code == SET)
1085 /* Currently SET is the only monadic value for CODE, all
1086 the rest are diadic. */
1087 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1088 return 1;
1090 else
1092 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1094 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1095 /* For MINUS, the value is subtracted from, since we never
1096 have subtraction of a constant. */
1097 if (code == MINUS)
1098 emit_insn (gen_rtx_SET (VOIDmode, target,
1099 gen_rtx_MINUS (mode, temp, source)));
1100 else
1101 emit_insn (gen_rtx_SET (VOIDmode, target,
1102 gen_rtx (code, mode, source, temp)));
1103 return 2;
1108 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1111 static int
1112 count_insns_for_constant (remainder, i)
1113 HOST_WIDE_INT remainder;
1114 int i;
1116 HOST_WIDE_INT temp1;
1117 int num_insns = 0;
1120 int end;
1122 if (i <= 0)
1123 i += 32;
1124 if (remainder & (3 << (i - 2)))
1126 end = i - 8;
1127 if (end < 0)
1128 end += 32;
1129 temp1 = remainder & ((0x0ff << end)
1130 | ((i < end) ? (0xff >> (32 - end)) : 0));
1131 remainder &= ~temp1;
1132 num_insns++;
1133 i -= 6;
1135 i -= 2;
1136 } while (remainder);
1137 return num_insns;
1140 /* As above, but extra parameter GENERATE which, if clear, suppresses
1141 RTL generation. */
1143 static int
1144 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1145 enum rtx_code code;
1146 enum machine_mode mode;
1147 HOST_WIDE_INT val;
1148 rtx target;
1149 rtx source;
1150 int subtargets;
1151 int generate;
1153 int can_invert = 0;
1154 int can_negate = 0;
1155 int can_negate_initial = 0;
1156 int can_shift = 0;
1157 int i;
1158 int num_bits_set = 0;
1159 int set_sign_bit_copies = 0;
1160 int clear_sign_bit_copies = 0;
1161 int clear_zero_bit_copies = 0;
1162 int set_zero_bit_copies = 0;
1163 int insns = 0;
1164 unsigned HOST_WIDE_INT temp1, temp2;
1165 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1167 /* Find out which operations are safe for a given CODE. Also do a quick
1168 check for degenerate cases; these can occur when DImode operations
1169 are split. */
1170 switch (code)
1172 case SET:
1173 can_invert = 1;
1174 can_shift = 1;
1175 can_negate = 1;
1176 break;
1178 case PLUS:
1179 can_negate = 1;
1180 can_negate_initial = 1;
1181 break;
1183 case IOR:
1184 if (remainder == 0xffffffff)
1186 if (generate)
1187 emit_insn (gen_rtx_SET (VOIDmode, target,
1188 GEN_INT (ARM_SIGN_EXTEND (val))));
1189 return 1;
1191 if (remainder == 0)
1193 if (reload_completed && rtx_equal_p (target, source))
1194 return 0;
1195 if (generate)
1196 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1197 return 1;
1199 break;
1201 case AND:
1202 if (remainder == 0)
1204 if (generate)
1205 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1206 return 1;
1208 if (remainder == 0xffffffff)
1210 if (reload_completed && rtx_equal_p (target, source))
1211 return 0;
1212 if (generate)
1213 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1214 return 1;
1216 can_invert = 1;
1217 break;
1219 case XOR:
1220 if (remainder == 0)
1222 if (reload_completed && rtx_equal_p (target, source))
1223 return 0;
1224 if (generate)
1225 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1226 return 1;
1228 if (remainder == 0xffffffff)
1230 if (generate)
1231 emit_insn (gen_rtx_SET (VOIDmode, target,
1232 gen_rtx_NOT (mode, source)));
1233 return 1;
1236 /* We don't know how to handle this yet below. */
1237 abort ();
1239 case MINUS:
1240 /* We treat MINUS as (val - source), since (source - val) is always
1241 passed as (source + (-val)). */
1242 if (remainder == 0)
1244 if (generate)
1245 emit_insn (gen_rtx_SET (VOIDmode, target,
1246 gen_rtx_NEG (mode, source)));
1247 return 1;
1249 if (const_ok_for_arm (val))
1251 if (generate)
1252 emit_insn (gen_rtx_SET (VOIDmode, target,
1253 gen_rtx_MINUS (mode, GEN_INT (val),
1254 source)));
1255 return 1;
1257 can_negate = 1;
1259 break;
1261 default:
1262 abort ();
1265 /* If we can do it in one insn get out quickly. */
1266 if (const_ok_for_arm (val)
1267 || (can_negate_initial && const_ok_for_arm (-val))
1268 || (can_invert && const_ok_for_arm (~val)))
1270 if (generate)
1271 emit_insn (gen_rtx_SET (VOIDmode, target,
1272 (source ? gen_rtx (code, mode, source,
1273 GEN_INT (val))
1274 : GEN_INT (val))));
1275 return 1;
1278 /* Calculate a few attributes that may be useful for specific
1279 optimizations. */
1280 for (i = 31; i >= 0; i--)
1282 if ((remainder & (1 << i)) == 0)
1283 clear_sign_bit_copies++;
1284 else
1285 break;
1288 for (i = 31; i >= 0; i--)
1290 if ((remainder & (1 << i)) != 0)
1291 set_sign_bit_copies++;
1292 else
1293 break;
1296 for (i = 0; i <= 31; i++)
1298 if ((remainder & (1 << i)) == 0)
1299 clear_zero_bit_copies++;
1300 else
1301 break;
1304 for (i = 0; i <= 31; i++)
1306 if ((remainder & (1 << i)) != 0)
1307 set_zero_bit_copies++;
1308 else
1309 break;
1312 switch (code)
1314 case SET:
1315 /* See if we can do this by sign_extending a constant that is known
1316 to be negative. This is a good, way of doing it, since the shift
1317 may well merge into a subsequent insn. */
1318 if (set_sign_bit_copies > 1)
1320 if (const_ok_for_arm
1321 (temp1 = ARM_SIGN_EXTEND (remainder
1322 << (set_sign_bit_copies - 1))))
1324 if (generate)
1326 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1327 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1328 GEN_INT (temp1)));
1329 emit_insn (gen_ashrsi3 (target, new_src,
1330 GEN_INT (set_sign_bit_copies - 1)));
1332 return 2;
1334 /* For an inverted constant, we will need to set the low bits,
1335 these will be shifted out of harm's way. */
1336 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1337 if (const_ok_for_arm (~temp1))
1339 if (generate)
1341 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1342 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1343 GEN_INT (temp1)));
1344 emit_insn (gen_ashrsi3 (target, new_src,
1345 GEN_INT (set_sign_bit_copies - 1)));
1347 return 2;
1351 /* See if we can generate this by setting the bottom (or the top)
1352 16 bits, and then shifting these into the other half of the
1353 word. We only look for the simplest cases, to do more would cost
1354 too much. Be careful, however, not to generate this when the
1355 alternative would take fewer insns. */
1356 if (val & 0xffff0000)
1358 temp1 = remainder & 0xffff0000;
1359 temp2 = remainder & 0x0000ffff;
1361 /* Overlaps outside this range are best done using other methods. */
1362 for (i = 9; i < 24; i++)
1364 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1365 && !const_ok_for_arm (temp2))
1367 rtx new_src = (subtargets
1368 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1369 : target);
1370 insns = arm_gen_constant (code, mode, temp2, new_src,
1371 source, subtargets, generate);
1372 source = new_src;
1373 if (generate)
1374 emit_insn (gen_rtx_SET
1375 (VOIDmode, target,
1376 gen_rtx_IOR (mode,
1377 gen_rtx_ASHIFT (mode, source,
1378 GEN_INT (i)),
1379 source)));
1380 return insns + 1;
1384 /* Don't duplicate cases already considered. */
1385 for (i = 17; i < 24; i++)
1387 if (((temp1 | (temp1 >> i)) == remainder)
1388 && !const_ok_for_arm (temp1))
1390 rtx new_src = (subtargets
1391 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1392 : target);
1393 insns = arm_gen_constant (code, mode, temp1, new_src,
1394 source, subtargets, generate);
1395 source = new_src;
1396 if (generate)
1397 emit_insn
1398 (gen_rtx_SET (VOIDmode, target,
1399 gen_rtx_IOR
1400 (mode,
1401 gen_rtx_LSHIFTRT (mode, source,
1402 GEN_INT (i)),
1403 source)));
1404 return insns + 1;
1408 break;
1410 case IOR:
1411 case XOR:
1412 /* If we have IOR or XOR, and the constant can be loaded in a
1413 single instruction, and we can find a temporary to put it in,
1414 then this can be done in two instructions instead of 3-4. */
1415 if (subtargets
1416 /* TARGET can't be NULL if SUBTARGETS is 0 */
1417 || (reload_completed && !reg_mentioned_p (target, source)))
1419 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1421 if (generate)
1423 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1425 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1426 emit_insn (gen_rtx_SET (VOIDmode, target,
1427 gen_rtx (code, mode, source, sub)));
1429 return 2;
1433 if (code == XOR)
1434 break;
1436 if (set_sign_bit_copies > 8
1437 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1439 if (generate)
1441 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1442 rtx shift = GEN_INT (set_sign_bit_copies);
1444 emit_insn (gen_rtx_SET (VOIDmode, sub,
1445 gen_rtx_NOT (mode,
1446 gen_rtx_ASHIFT (mode,
1447 source,
1448 shift))));
1449 emit_insn (gen_rtx_SET (VOIDmode, target,
1450 gen_rtx_NOT (mode,
1451 gen_rtx_LSHIFTRT (mode, sub,
1452 shift))));
1454 return 2;
1457 if (set_zero_bit_copies > 8
1458 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1460 if (generate)
1462 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1463 rtx shift = GEN_INT (set_zero_bit_copies);
1465 emit_insn (gen_rtx_SET (VOIDmode, sub,
1466 gen_rtx_NOT (mode,
1467 gen_rtx_LSHIFTRT (mode,
1468 source,
1469 shift))));
1470 emit_insn (gen_rtx_SET (VOIDmode, target,
1471 gen_rtx_NOT (mode,
1472 gen_rtx_ASHIFT (mode, sub,
1473 shift))));
1475 return 2;
1478 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1480 if (generate)
1482 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1483 emit_insn (gen_rtx_SET (VOIDmode, sub,
1484 gen_rtx_NOT (mode, source)));
1485 source = sub;
1486 if (subtargets)
1487 sub = gen_reg_rtx (mode);
1488 emit_insn (gen_rtx_SET (VOIDmode, sub,
1489 gen_rtx_AND (mode, source,
1490 GEN_INT (temp1))));
1491 emit_insn (gen_rtx_SET (VOIDmode, target,
1492 gen_rtx_NOT (mode, sub)));
1494 return 3;
1496 break;
1498 case AND:
1499 /* See if two shifts will do 2 or more insn's worth of work. */
1500 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1502 HOST_WIDE_INT shift_mask = ((0xffffffff
1503 << (32 - clear_sign_bit_copies))
1504 & 0xffffffff);
1506 if ((remainder | shift_mask) != 0xffffffff)
1508 if (generate)
1510 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1511 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1512 new_src, source, subtargets, 1);
1513 source = new_src;
1515 else
1517 rtx targ = subtargets ? NULL_RTX : target;
1518 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1519 targ, source, subtargets, 0);
1523 if (generate)
1525 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1526 rtx shift = GEN_INT (clear_sign_bit_copies);
1528 emit_insn (gen_ashlsi3 (new_src, source, shift));
1529 emit_insn (gen_lshrsi3 (target, new_src, shift));
1532 return insns + 2;
1535 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1537 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1539 if ((remainder | shift_mask) != 0xffffffff)
1541 if (generate)
1543 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1545 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1546 new_src, source, subtargets, 1);
1547 source = new_src;
1549 else
1551 rtx targ = subtargets ? NULL_RTX : target;
1553 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1554 targ, source, subtargets, 0);
1558 if (generate)
1560 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1561 rtx shift = GEN_INT (clear_zero_bit_copies);
1563 emit_insn (gen_lshrsi3 (new_src, source, shift));
1564 emit_insn (gen_ashlsi3 (target, new_src, shift));
1567 return insns + 2;
1570 break;
1572 default:
1573 break;
1576 for (i = 0; i < 32; i++)
1577 if (remainder & (1 << i))
1578 num_bits_set++;
1580 if (code == AND || (can_invert && num_bits_set > 16))
1581 remainder = (~remainder) & 0xffffffff;
1582 else if (code == PLUS && num_bits_set > 16)
1583 remainder = (-remainder) & 0xffffffff;
1584 else
1586 can_invert = 0;
1587 can_negate = 0;
1590 /* Now try and find a way of doing the job in either two or three
1591 instructions.
1592 We start by looking for the largest block of zeros that are aligned on
1593 a 2-bit boundary, we then fill up the temps, wrapping around to the
1594 top of the word when we drop off the bottom.
1595 In the worst case this code should produce no more than four insns. */
1597 int best_start = 0;
1598 int best_consecutive_zeros = 0;
1600 for (i = 0; i < 32; i += 2)
1602 int consecutive_zeros = 0;
1604 if (!(remainder & (3 << i)))
1606 while ((i < 32) && !(remainder & (3 << i)))
1608 consecutive_zeros += 2;
1609 i += 2;
1611 if (consecutive_zeros > best_consecutive_zeros)
1613 best_consecutive_zeros = consecutive_zeros;
1614 best_start = i - consecutive_zeros;
1616 i -= 2;
1620 /* So long as it won't require any more insns to do so, it's
1621 desirable to emit a small constant (in bits 0...9) in the last
1622 insn. This way there is more chance that it can be combined with
1623 a later addressing insn to form a pre-indexed load or store
1624 operation. Consider:
1626 *((volatile int *)0xe0000100) = 1;
1627 *((volatile int *)0xe0000110) = 2;
1629 We want this to wind up as:
1631 mov rA, #0xe0000000
1632 mov rB, #1
1633 str rB, [rA, #0x100]
1634 mov rB, #2
1635 str rB, [rA, #0x110]
1637 rather than having to synthesize both large constants from scratch.
1639 Therefore, we calculate how many insns would be required to emit
1640 the constant starting from `best_start', and also starting from
1641 zero (ie with bit 31 first to be output). If `best_start' doesn't
1642 yield a shorter sequence, we may as well use zero. */
1643 if (best_start != 0
1644 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1645 && (count_insns_for_constant (remainder, 0) <=
1646 count_insns_for_constant (remainder, best_start)))
1647 best_start = 0;
1649 /* Now start emitting the insns. */
1650 i = best_start;
1653 int end;
1655 if (i <= 0)
1656 i += 32;
1657 if (remainder & (3 << (i - 2)))
1659 end = i - 8;
1660 if (end < 0)
1661 end += 32;
1662 temp1 = remainder & ((0x0ff << end)
1663 | ((i < end) ? (0xff >> (32 - end)) : 0));
1664 remainder &= ~temp1;
1666 if (generate)
1668 rtx new_src, temp1_rtx;
1670 if (code == SET || code == MINUS)
1672 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1673 if (can_invert && code != MINUS)
1674 temp1 = ~temp1;
1676 else
1678 if (remainder && subtargets)
1679 new_src = gen_reg_rtx (mode);
1680 else
1681 new_src = target;
1682 if (can_invert)
1683 temp1 = ~temp1;
1684 else if (can_negate)
1685 temp1 = -temp1;
1688 temp1 = trunc_int_for_mode (temp1, mode);
1689 temp1_rtx = GEN_INT (temp1);
1691 if (code == SET)
1693 else if (code == MINUS)
1694 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1695 else
1696 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1698 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1699 source = new_src;
1702 if (code == SET)
1704 can_invert = 0;
1705 code = PLUS;
1707 else if (code == MINUS)
1708 code = PLUS;
1710 insns++;
1711 i -= 6;
1713 i -= 2;
1715 while (remainder);
1718 return insns;
1721 /* Canonicalize a comparison so that we are more likely to recognize it.
1722 This can be done for a few constant compares, where we can make the
1723 immediate value easier to load. */
1725 enum rtx_code
1726 arm_canonicalize_comparison (code, op1)
1727 enum rtx_code code;
1728 rtx * op1;
1730 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1732 switch (code)
1734 case EQ:
1735 case NE:
1736 return code;
1738 case GT:
1739 case LE:
1740 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1741 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1743 *op1 = GEN_INT (i + 1);
1744 return code == GT ? GE : LT;
1746 break;
1748 case GE:
1749 case LT:
1750 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1751 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1753 *op1 = GEN_INT (i - 1);
1754 return code == GE ? GT : LE;
1756 break;
1758 case GTU:
1759 case LEU:
1760 if (i != ~((unsigned HOST_WIDE_INT) 0)
1761 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1763 *op1 = GEN_INT (i + 1);
1764 return code == GTU ? GEU : LTU;
1766 break;
1768 case GEU:
1769 case LTU:
1770 if (i != 0
1771 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1773 *op1 = GEN_INT (i - 1);
1774 return code == GEU ? GTU : LEU;
1776 break;
1778 default:
1779 abort ();
1782 return code;
1785 /* Decide whether a type should be returned in memory (true)
1786 or in a register (false). This is called by the macro
1787 RETURN_IN_MEMORY. */
1790 arm_return_in_memory (type)
1791 tree type;
1793 HOST_WIDE_INT size;
1795 if (!AGGREGATE_TYPE_P (type))
1796 /* All simple types are returned in registers. */
1797 return 0;
1799 size = int_size_in_bytes (type);
1801 if (TARGET_ATPCS)
1803 /* ATPCS returns aggregate types in memory only if they are
1804 larger than a word (or are variable size). */
1805 return (size < 0 || size > UNITS_PER_WORD);
1808 /* For the arm-wince targets we choose to be compitable with Microsoft's
1809 ARM and Thumb compilers, which always return aggregates in memory. */
1810 #ifndef ARM_WINCE
1811 /* All structures/unions bigger than one word are returned in memory.
1812 Also catch the case where int_size_in_bytes returns -1. In this case
1813 the aggregate is either huge or of varaible size, and in either case
1814 we will want to return it via memory and not in a register. */
1815 if (size < 0 || size > UNITS_PER_WORD)
1816 return 1;
1818 if (TREE_CODE (type) == RECORD_TYPE)
1820 tree field;
1822 /* For a struct the APCS says that we only return in a register
1823 if the type is 'integer like' and every addressable element
1824 has an offset of zero. For practical purposes this means
1825 that the structure can have at most one non bit-field element
1826 and that this element must be the first one in the structure. */
1828 /* Find the first field, ignoring non FIELD_DECL things which will
1829 have been created by C++. */
1830 for (field = TYPE_FIELDS (type);
1831 field && TREE_CODE (field) != FIELD_DECL;
1832 field = TREE_CHAIN (field))
1833 continue;
1835 if (field == NULL)
1836 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1838 /* Check that the first field is valid for returning in a register. */
1840 /* ... Floats are not allowed */
1841 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1842 return 1;
1844 /* ... Aggregates that are not themselves valid for returning in
1845 a register are not allowed. */
1846 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1847 return 1;
1849 /* Now check the remaining fields, if any. Only bitfields are allowed,
1850 since they are not addressable. */
1851 for (field = TREE_CHAIN (field);
1852 field;
1853 field = TREE_CHAIN (field))
1855 if (TREE_CODE (field) != FIELD_DECL)
1856 continue;
1858 if (!DECL_BIT_FIELD_TYPE (field))
1859 return 1;
1862 return 0;
1865 if (TREE_CODE (type) == UNION_TYPE)
1867 tree field;
1869 /* Unions can be returned in registers if every element is
1870 integral, or can be returned in an integer register. */
1871 for (field = TYPE_FIELDS (type);
1872 field;
1873 field = TREE_CHAIN (field))
1875 if (TREE_CODE (field) != FIELD_DECL)
1876 continue;
1878 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1879 return 1;
1881 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1882 return 1;
1885 return 0;
1887 #endif /* not ARM_WINCE */
1889 /* Return all other types in memory. */
1890 return 1;
1893 /* Indicate whether or not words of a double are in big-endian order. */
1896 arm_float_words_big_endian ()
1899 /* For FPA, float words are always big-endian. For VFP, floats words
1900 follow the memory system mode. */
1902 if (TARGET_HARD_FLOAT)
1904 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1905 return 1;
1908 if (TARGET_VFP)
1909 return (TARGET_BIG_END ? 1 : 0);
1911 return 1;
1914 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1915 for a call to a function whose data type is FNTYPE.
1916 For a library call, FNTYPE is NULL. */
1917 void
1918 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1919 CUMULATIVE_ARGS * pcum;
1920 tree fntype;
1921 rtx libname ATTRIBUTE_UNUSED;
1922 int indirect ATTRIBUTE_UNUSED;
1924 /* On the ARM, the offset starts at 0. */
1925 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1927 pcum->call_cookie = CALL_NORMAL;
1929 if (TARGET_LONG_CALLS)
1930 pcum->call_cookie = CALL_LONG;
1932 /* Check for long call/short call attributes. The attributes
1933 override any command line option. */
1934 if (fntype)
1936 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1937 pcum->call_cookie = CALL_SHORT;
1938 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1939 pcum->call_cookie = CALL_LONG;
1943 /* Determine where to put an argument to a function.
1944 Value is zero to push the argument on the stack,
1945 or a hard register in which to store the argument.
1947 MODE is the argument's machine mode.
1948 TYPE is the data type of the argument (as a tree).
1949 This is null for libcalls where that information may
1950 not be available.
1951 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1952 the preceding args and about the function being called.
1953 NAMED is nonzero if this argument is a named parameter
1954 (otherwise it is an extra parameter matching an ellipsis). */
1957 arm_function_arg (pcum, mode, type, named)
1958 CUMULATIVE_ARGS * pcum;
1959 enum machine_mode mode;
1960 tree type ATTRIBUTE_UNUSED;
1961 int named;
1963 if (mode == VOIDmode)
1964 /* Compute operand 2 of the call insn. */
1965 return GEN_INT (pcum->call_cookie);
1967 if (!named || pcum->nregs >= NUM_ARG_REGS)
1968 return NULL_RTX;
1970 return gen_rtx_REG (mode, pcum->nregs);
1973 /* Variable sized types are passed by reference. This is a GCC
1974 extension to the ARM ABI. */
1977 arm_function_arg_pass_by_reference (cum, mode, type, named)
1978 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
1979 enum machine_mode mode ATTRIBUTE_UNUSED;
1980 tree type;
1981 int named ATTRIBUTE_UNUSED;
1983 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1986 /* Implement va_arg. */
1989 arm_va_arg (valist, type)
1990 tree valist, type;
1992 /* Variable sized types are passed by reference. */
1993 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1995 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
1996 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
1999 return std_expand_builtin_va_arg (valist, type);
2002 /* Encode the current state of the #pragma [no_]long_calls. */
2003 typedef enum
2005 OFF, /* No #pramgma [no_]long_calls is in effect. */
2006 LONG, /* #pragma long_calls is in effect. */
2007 SHORT /* #pragma no_long_calls is in effect. */
2008 } arm_pragma_enum;
2010 static arm_pragma_enum arm_pragma_long_calls = OFF;
2012 void
2013 arm_pr_long_calls (pfile)
2014 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2016 arm_pragma_long_calls = LONG;
2019 void
2020 arm_pr_no_long_calls (pfile)
2021 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2023 arm_pragma_long_calls = SHORT;
2026 void
2027 arm_pr_long_calls_off (pfile)
2028 struct cpp_reader * pfile ATTRIBUTE_UNUSED;
2030 arm_pragma_long_calls = OFF;
2033 /* Table of machine attributes. */
2034 const struct attribute_spec arm_attribute_table[] =
2036 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2037 /* Function calls made to this symbol must be done indirectly, because
2038 it may lie outside of the 26 bit addressing range of a normal function
2039 call. */
2040 { "long_call", 0, 0, false, true, true, NULL },
2041 /* Whereas these functions are always known to reside within the 26 bit
2042 addressing range. */
2043 { "short_call", 0, 0, false, true, true, NULL },
2044 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2045 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2046 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2047 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2048 #ifdef ARM_PE
2049 /* ARM/PE has three new attributes:
2050 interfacearm - ?
2051 dllexport - for exporting a function/variable that will live in a dll
2052 dllimport - for importing a function/variable from a dll
2054 Microsoft allows multiple declspecs in one __declspec, separating
2055 them with spaces. We do NOT support this. Instead, use __declspec
2056 multiple times.
2058 { "dllimport", 0, 0, true, false, false, NULL },
2059 { "dllexport", 0, 0, true, false, false, NULL },
2060 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2061 #endif
2062 { NULL, 0, 0, false, false, false, NULL }
2065 /* Handle an attribute requiring a FUNCTION_DECL;
2066 arguments as in struct attribute_spec.handler. */
2068 static tree
2069 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
2070 tree * node;
2071 tree name;
2072 tree args ATTRIBUTE_UNUSED;
2073 int flags ATTRIBUTE_UNUSED;
2074 bool * no_add_attrs;
2076 if (TREE_CODE (*node) != FUNCTION_DECL)
2078 warning ("`%s' attribute only applies to functions",
2079 IDENTIFIER_POINTER (name));
2080 *no_add_attrs = true;
2083 return NULL_TREE;
2086 /* Handle an "interrupt" or "isr" attribute;
2087 arguments as in struct attribute_spec.handler. */
2089 static tree
2090 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
2091 tree * node;
2092 tree name;
2093 tree args;
2094 int flags;
2095 bool * no_add_attrs;
2097 if (DECL_P (*node))
2099 if (TREE_CODE (*node) != FUNCTION_DECL)
2101 warning ("`%s' attribute only applies to functions",
2102 IDENTIFIER_POINTER (name));
2103 *no_add_attrs = true;
2105 /* FIXME: the argument if any is checked for type attributes;
2106 should it be checked for decl ones? */
2108 else
2110 if (TREE_CODE (*node) == FUNCTION_TYPE
2111 || TREE_CODE (*node) == METHOD_TYPE)
2113 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2115 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2116 *no_add_attrs = true;
2119 else if (TREE_CODE (*node) == POINTER_TYPE
2120 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2121 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2122 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2124 *node = build_type_copy (*node);
2125 TREE_TYPE (*node) = build_type_attribute_variant
2126 (TREE_TYPE (*node),
2127 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2128 *no_add_attrs = true;
2130 else
2132 /* Possibly pass this attribute on from the type to a decl. */
2133 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2134 | (int) ATTR_FLAG_FUNCTION_NEXT
2135 | (int) ATTR_FLAG_ARRAY_NEXT))
2137 *no_add_attrs = true;
2138 return tree_cons (name, args, NULL_TREE);
2140 else
2142 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2147 return NULL_TREE;
2150 /* Return 0 if the attributes for two types are incompatible, 1 if they
2151 are compatible, and 2 if they are nearly compatible (which causes a
2152 warning to be generated). */
2154 static int
2155 arm_comp_type_attributes (type1, type2)
2156 tree type1;
2157 tree type2;
2159 int l1, l2, s1, s2;
2161 /* Check for mismatch of non-default calling convention. */
2162 if (TREE_CODE (type1) != FUNCTION_TYPE)
2163 return 1;
2165 /* Check for mismatched call attributes. */
2166 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2167 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2168 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2169 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2171 /* Only bother to check if an attribute is defined. */
2172 if (l1 | l2 | s1 | s2)
2174 /* If one type has an attribute, the other must have the same attribute. */
2175 if ((l1 != l2) || (s1 != s2))
2176 return 0;
2178 /* Disallow mixed attributes. */
2179 if ((l1 & s2) || (l2 & s1))
2180 return 0;
2183 /* Check for mismatched ISR attribute. */
2184 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2185 if (! l1)
2186 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2187 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2188 if (! l2)
2189 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2190 if (l1 != l2)
2191 return 0;
2193 return 1;
2196 /* Encode long_call or short_call attribute by prefixing
2197 symbol name in DECL with a special character FLAG. */
2199 void
2200 arm_encode_call_attribute (decl, flag)
2201 tree decl;
2202 int flag;
2204 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2205 int len = strlen (str);
2206 char * newstr;
2208 /* Do not allow weak functions to be treated as short call. */
2209 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2210 return;
2212 newstr = alloca (len + 2);
2213 newstr[0] = flag;
2214 strcpy (newstr + 1, str);
2216 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2217 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2220 /* Assigns default attributes to newly defined type. This is used to
2221 set short_call/long_call attributes for function types of
2222 functions defined inside corresponding #pragma scopes. */
2224 static void
2225 arm_set_default_type_attributes (type)
2226 tree type;
2228 /* Add __attribute__ ((long_call)) to all functions, when
2229 inside #pragma long_calls or __attribute__ ((short_call)),
2230 when inside #pragma no_long_calls. */
2231 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2233 tree type_attr_list, attr_name;
2234 type_attr_list = TYPE_ATTRIBUTES (type);
2236 if (arm_pragma_long_calls == LONG)
2237 attr_name = get_identifier ("long_call");
2238 else if (arm_pragma_long_calls == SHORT)
2239 attr_name = get_identifier ("short_call");
2240 else
2241 return;
2243 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2244 TYPE_ATTRIBUTES (type) = type_attr_list;
2248 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2249 defined within the current compilation unit. If this caanot be
2250 determined, then 0 is returned. */
2252 static int
2253 current_file_function_operand (sym_ref)
2254 rtx sym_ref;
2256 /* This is a bit of a fib. A function will have a short call flag
2257 applied to its name if it has the short call attribute, or it has
2258 already been defined within the current compilation unit. */
2259 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2260 return 1;
2262 /* The current function is always defined within the current compilation
2263 unit. if it s a weak definition however, then this may not be the real
2264 definition of the function, and so we have to say no. */
2265 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2266 && !DECL_WEAK (current_function_decl))
2267 return 1;
2269 /* We cannot make the determination - default to returning 0. */
2270 return 0;
2273 /* Return nonzero if a 32 bit "long_call" should be generated for
2274 this call. We generate a long_call if the function:
2276 a. has an __attribute__((long call))
2277 or b. is within the scope of a #pragma long_calls
2278 or c. the -mlong-calls command line switch has been specified
2280 However we do not generate a long call if the function:
2282 d. has an __attribute__ ((short_call))
2283 or e. is inside the scope of a #pragma no_long_calls
2284 or f. has an __attribute__ ((section))
2285 or g. is defined within the current compilation unit.
2287 This function will be called by C fragments contained in the machine
2288 description file. CALL_REF and CALL_COOKIE correspond to the matched
2289 rtl operands. CALL_SYMBOL is used to distinguish between
2290 two different callers of the function. It is set to 1 in the
2291 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2292 and "call_value" patterns. This is because of the difference in the
2293 SYM_REFs passed by these patterns. */
2296 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2297 rtx sym_ref;
2298 int call_cookie;
2299 int call_symbol;
2301 if (!call_symbol)
2303 if (GET_CODE (sym_ref) != MEM)
2304 return 0;
2306 sym_ref = XEXP (sym_ref, 0);
2309 if (GET_CODE (sym_ref) != SYMBOL_REF)
2310 return 0;
2312 if (call_cookie & CALL_SHORT)
2313 return 0;
2315 if (TARGET_LONG_CALLS && flag_function_sections)
2316 return 1;
2318 if (current_file_function_operand (sym_ref))
2319 return 0;
2321 return (call_cookie & CALL_LONG)
2322 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2323 || TARGET_LONG_CALLS;
2326 /* Return nonzero if it is ok to make a tail-call to DECL. */
2328 static bool
2329 arm_function_ok_for_sibcall (decl, exp)
2330 tree decl;
2331 tree exp ATTRIBUTE_UNUSED;
2333 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2335 /* Never tailcall something for which we have no decl, or if we
2336 are in Thumb mode. */
2337 if (decl == NULL || TARGET_THUMB)
2338 return false;
2340 /* Get the calling method. */
2341 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2342 call_type = CALL_SHORT;
2343 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2344 call_type = CALL_LONG;
2346 /* Cannot tail-call to long calls, since these are out of range of
2347 a branch instruction. However, if not compiling PIC, we know
2348 we can reach the symbol if it is in this compilation unit. */
2349 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2350 return false;
2352 /* If we are interworking and the function is not declared static
2353 then we can't tail-call it unless we know that it exists in this
2354 compilation unit (since it might be a Thumb routine). */
2355 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2356 return false;
2358 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2359 if (IS_INTERRUPT (arm_current_func_type ()))
2360 return false;
2362 /* Everything else is ok. */
2363 return true;
2367 /* Addressing mode support functions. */
2369 /* Return non-zero if X is a legitimate immediate operand when compiling
2370 for PIC. */
2372 legitimate_pic_operand_p (x)
2373 rtx x;
2375 if (CONSTANT_P (x)
2376 && flag_pic
2377 && (GET_CODE (x) == SYMBOL_REF
2378 || (GET_CODE (x) == CONST
2379 && GET_CODE (XEXP (x, 0)) == PLUS
2380 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2381 return 0;
2383 return 1;
2387 legitimize_pic_address (orig, mode, reg)
2388 rtx orig;
2389 enum machine_mode mode;
2390 rtx reg;
2392 if (GET_CODE (orig) == SYMBOL_REF
2393 || GET_CODE (orig) == LABEL_REF)
2395 #ifndef AOF_ASSEMBLER
2396 rtx pic_ref, address;
2397 #endif
2398 rtx insn;
2399 int subregs = 0;
2401 if (reg == 0)
2403 if (no_new_pseudos)
2404 abort ();
2405 else
2406 reg = gen_reg_rtx (Pmode);
2408 subregs = 1;
2411 #ifdef AOF_ASSEMBLER
2412 /* The AOF assembler can generate relocations for these directly, and
2413 understands that the PIC register has to be added into the offset. */
2414 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2415 #else
2416 if (subregs)
2417 address = gen_reg_rtx (Pmode);
2418 else
2419 address = reg;
2421 if (TARGET_ARM)
2422 emit_insn (gen_pic_load_addr_arm (address, orig));
2423 else
2424 emit_insn (gen_pic_load_addr_thumb (address, orig));
2426 if ((GET_CODE (orig) == LABEL_REF
2427 || (GET_CODE (orig) == SYMBOL_REF &&
2428 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2429 && NEED_GOT_RELOC)
2430 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2431 else
2433 pic_ref = gen_rtx_MEM (Pmode,
2434 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2435 address));
2436 RTX_UNCHANGING_P (pic_ref) = 1;
2439 insn = emit_move_insn (reg, pic_ref);
2440 #endif
2441 current_function_uses_pic_offset_table = 1;
2442 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2443 by loop. */
2444 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2445 REG_NOTES (insn));
2446 return reg;
2448 else if (GET_CODE (orig) == CONST)
2450 rtx base, offset;
2452 if (GET_CODE (XEXP (orig, 0)) == PLUS
2453 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2454 return orig;
2456 if (reg == 0)
2458 if (no_new_pseudos)
2459 abort ();
2460 else
2461 reg = gen_reg_rtx (Pmode);
2464 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2466 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2467 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2468 base == reg ? 0 : reg);
2470 else
2471 abort ();
2473 if (GET_CODE (offset) == CONST_INT)
2475 /* The base register doesn't really matter, we only want to
2476 test the index for the appropriate mode. */
2477 if (!arm_legitimate_index_p (mode, offset, 0))
2479 if (!no_new_pseudos)
2480 offset = force_reg (Pmode, offset);
2481 else
2482 abort ();
2485 if (GET_CODE (offset) == CONST_INT)
2486 return plus_constant (base, INTVAL (offset));
2489 if (GET_MODE_SIZE (mode) > 4
2490 && (GET_MODE_CLASS (mode) == MODE_INT
2491 || TARGET_SOFT_FLOAT))
2493 emit_insn (gen_addsi3 (reg, base, offset));
2494 return reg;
2497 return gen_rtx_PLUS (Pmode, base, offset);
2500 return orig;
2503 /* Generate code to load the PIC register. PROLOGUE is true if
2504 called from arm_expand_prologue (in which case we want the
2505 generated insns at the start of the function); false if called
2506 by an exception receiver that needs the PIC register reloaded
2507 (in which case the insns are just dumped at the current location). */
2509 void
2510 arm_finalize_pic (prologue)
2511 int prologue ATTRIBUTE_UNUSED;
2513 #ifndef AOF_ASSEMBLER
2514 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2515 rtx global_offset_table;
2517 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2518 return;
2520 if (!flag_pic)
2521 abort ();
2523 start_sequence ();
2524 l1 = gen_label_rtx ();
2526 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2527 /* On the ARM the PC register contains 'dot + 8' at the time of the
2528 addition, on the Thumb it is 'dot + 4'. */
2529 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2530 if (GOT_PCREL)
2531 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2532 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2533 else
2534 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2536 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2538 if (TARGET_ARM)
2540 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2541 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2543 else
2545 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2546 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2549 seq = get_insns ();
2550 end_sequence ();
2551 if (prologue)
2552 emit_insn_after (seq, get_insns ());
2553 else
2554 emit_insn (seq);
2556 /* Need to emit this whether or not we obey regdecls,
2557 since setjmp/longjmp can cause life info to screw up. */
2558 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2559 #endif /* AOF_ASSEMBLER */
2562 /* Return nonzero if X is valid as an ARM state addressing register. */
2563 static int
2564 arm_address_register_rtx_p (x, strict_p)
2565 rtx x;
2566 int strict_p;
2568 int regno;
2570 if (GET_CODE (x) != REG)
2571 return 0;
2573 regno = REGNO (x);
2575 if (strict_p)
2576 return ARM_REGNO_OK_FOR_BASE_P (regno);
2578 return (regno <= LAST_ARM_REGNUM
2579 || regno >= FIRST_PSEUDO_REGISTER
2580 || regno == FRAME_POINTER_REGNUM
2581 || regno == ARG_POINTER_REGNUM);
2584 /* Return nonzero if X is a valid ARM state address operand. */
2586 arm_legitimate_address_p (mode, x, strict_p)
2587 enum machine_mode mode;
2588 rtx x;
2589 int strict_p;
2591 if (arm_address_register_rtx_p (x, strict_p))
2592 return 1;
2594 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2595 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2597 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2598 && GET_MODE_SIZE (mode) <= 4
2599 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2600 && GET_CODE (XEXP (x, 1)) == PLUS
2601 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2602 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2604 /* After reload constants split into minipools will have addresses
2605 from a LABEL_REF. */
2606 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2607 && (GET_CODE (x) == LABEL_REF
2608 || (GET_CODE (x) == CONST
2609 && GET_CODE (XEXP (x, 0)) == PLUS
2610 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2611 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2612 return 1;
2614 else if (mode == TImode)
2615 return 0;
2617 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2619 if (GET_CODE (x) == PLUS
2620 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2621 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2623 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2625 if (val == 4 || val == -4 || val == -8)
2626 return 1;
2630 else if (GET_CODE (x) == PLUS)
2632 rtx xop0 = XEXP (x, 0);
2633 rtx xop1 = XEXP (x, 1);
2635 return ((arm_address_register_rtx_p (xop0, strict_p)
2636 && arm_legitimate_index_p (mode, xop1, strict_p))
2637 || (arm_address_register_rtx_p (xop1, strict_p)
2638 && arm_legitimate_index_p (mode, xop0, strict_p)));
2641 #if 0
2642 /* Reload currently can't handle MINUS, so disable this for now */
2643 else if (GET_CODE (x) == MINUS)
2645 rtx xop0 = XEXP (x, 0);
2646 rtx xop1 = XEXP (x, 1);
2648 return (arm_address_register_rtx_p (xop0, strict_p)
2649 && arm_legitimate_index_p (mode, xop1, strict_p));
2651 #endif
2653 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2654 && GET_CODE (x) == SYMBOL_REF
2655 && CONSTANT_POOL_ADDRESS_P (x)
2656 && ! (flag_pic
2657 && symbol_mentioned_p (get_pool_constant (x))))
2658 return 1;
2660 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2661 && (GET_MODE_SIZE (mode) <= 4)
2662 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2663 return 1;
2665 return 0;
2668 /* Return nonzero if INDEX is valid for an address index operand in
2669 ARM state. */
2670 static int
2671 arm_legitimate_index_p (mode, index, strict_p)
2672 enum machine_mode mode;
2673 rtx index;
2674 int strict_p;
2676 HOST_WIDE_INT range;
2677 enum rtx_code code = GET_CODE (index);
2679 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2680 return (code == CONST_INT && INTVAL (index) < 1024
2681 && INTVAL (index) > -1024
2682 && (INTVAL (index) & 3) == 0);
2684 if (arm_address_register_rtx_p (index, strict_p)
2685 && GET_MODE_SIZE (mode) <= 4)
2686 return 1;
2688 /* XXX What about ldrsb? */
2689 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2690 && (!arm_arch4 || (mode) != HImode))
2692 rtx xiop0 = XEXP (index, 0);
2693 rtx xiop1 = XEXP (index, 1);
2695 return ((arm_address_register_rtx_p (xiop0, strict_p)
2696 && power_of_two_operand (xiop1, SImode))
2697 || (arm_address_register_rtx_p (xiop1, strict_p)
2698 && power_of_two_operand (xiop0, SImode)));
2701 if (GET_MODE_SIZE (mode) <= 4
2702 && (code == LSHIFTRT || code == ASHIFTRT
2703 || code == ASHIFT || code == ROTATERT)
2704 && (!arm_arch4 || (mode) != HImode))
2706 rtx op = XEXP (index, 1);
2708 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2709 && GET_CODE (op) == CONST_INT
2710 && INTVAL (op) > 0
2711 && INTVAL (op) <= 31);
2714 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2715 load, but that has a restricted addressing range and we are unable
2716 to tell here whether that is the case. To be safe we restrict all
2717 loads to that range. */
2718 range = ((mode) == HImode || (mode) == QImode)
2719 ? (arm_arch4 ? 256 : 4095) : 4096;
2721 return (code == CONST_INT
2722 && INTVAL (index) < range
2723 && INTVAL (index) > -range);
2726 /* Return nonzero if X is valid as an ARM state addressing register. */
2727 static int
2728 thumb_base_register_rtx_p (x, mode, strict_p)
2729 rtx x;
2730 enum machine_mode mode;
2731 int strict_p;
2733 int regno;
2735 if (GET_CODE (x) != REG)
2736 return 0;
2738 regno = REGNO (x);
2740 if (strict_p)
2741 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2743 return (regno <= LAST_LO_REGNUM
2744 || regno >= FIRST_PSEUDO_REGISTER
2745 || regno == FRAME_POINTER_REGNUM
2746 || (GET_MODE_SIZE (mode) >= 4
2747 && (regno == STACK_POINTER_REGNUM
2748 || x == hard_frame_pointer_rtx
2749 || x == arg_pointer_rtx)));
2752 /* Return nonzero if x is a legitimate index register. This is the case
2753 for any base register that can access a QImode object. */
2754 inline static int
2755 thumb_index_register_rtx_p (x, strict_p)
2756 rtx x;
2757 int strict_p;
2759 return thumb_base_register_rtx_p (x, QImode, strict_p);
2762 /* Return nonzero if x is a legitimate Thumb-state address.
2764 The AP may be eliminated to either the SP or the FP, so we use the
2765 least common denominator, e.g. SImode, and offsets from 0 to 64.
2767 ??? Verify whether the above is the right approach.
2769 ??? Also, the FP may be eliminated to the SP, so perhaps that
2770 needs special handling also.
2772 ??? Look at how the mips16 port solves this problem. It probably uses
2773 better ways to solve some of these problems.
2775 Although it is not incorrect, we don't accept QImode and HImode
2776 addresses based on the frame pointer or arg pointer until the
2777 reload pass starts. This is so that eliminating such addresses
2778 into stack based ones won't produce impossible code. */
2780 thumb_legitimate_address_p (mode, x, strict_p)
2781 enum machine_mode mode;
2782 rtx x;
2783 int strict_p;
2785 /* ??? Not clear if this is right. Experiment. */
2786 if (GET_MODE_SIZE (mode) < 4
2787 && !(reload_in_progress || reload_completed)
2788 && (reg_mentioned_p (frame_pointer_rtx, x)
2789 || reg_mentioned_p (arg_pointer_rtx, x)
2790 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2791 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2792 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2793 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2794 return 0;
2796 /* Accept any base register. SP only in SImode or larger. */
2797 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2798 return 1;
2800 /* This is PC relative data before MACHINE_DEPENDENT_REORG runs. */
2801 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2802 && GET_CODE (x) == SYMBOL_REF
2803 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2804 return 1;
2806 /* This is PC relative data after MACHINE_DEPENDENT_REORG runs. */
2807 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2808 && (GET_CODE (x) == LABEL_REF
2809 || (GET_CODE (x) == CONST
2810 && GET_CODE (XEXP (x, 0)) == PLUS
2811 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2812 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2813 return 1;
2815 /* Post-inc indexing only supported for SImode and larger. */
2816 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2817 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2818 return 1;
2820 else if (GET_CODE (x) == PLUS)
2822 /* REG+REG address can be any two index registers. */
2823 /* We disallow FRAME+REG addressing since we know that FRAME
2824 will be replaced with STACK, and SP relative addressing only
2825 permits SP+OFFSET. */
2826 if (GET_MODE_SIZE (mode) <= 4
2827 && XEXP (x, 0) != frame_pointer_rtx
2828 && XEXP (x, 1) != frame_pointer_rtx
2829 && XEXP (x, 0) != virtual_stack_vars_rtx
2830 && XEXP (x, 1) != virtual_stack_vars_rtx
2831 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2832 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2833 return 1;
2835 /* REG+const has 5-7 bit offset for non-SP registers. */
2836 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2837 || XEXP (x, 0) == arg_pointer_rtx)
2838 && GET_CODE (XEXP (x, 1)) == CONST_INT
2839 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2840 return 1;
2842 /* REG+const has 10 bit offset for SP, but only SImode and
2843 larger is supported. */
2844 /* ??? Should probably check for DI/DFmode overflow here
2845 just like GO_IF_LEGITIMATE_OFFSET does. */
2846 else if (GET_CODE (XEXP (x, 0)) == REG
2847 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2848 && GET_MODE_SIZE (mode) >= 4
2849 && GET_CODE (XEXP (x, 1)) == CONST_INT
2850 && INTVAL (XEXP (x, 1)) >= 0
2851 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2852 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2853 return 1;
2855 else if (GET_CODE (XEXP (x, 0)) == REG
2856 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2857 && GET_MODE_SIZE (mode) >= 4
2858 && GET_CODE (XEXP (x, 1)) == CONST_INT
2859 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2860 return 1;
2863 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2864 && GET_CODE (x) == SYMBOL_REF
2865 && CONSTANT_POOL_ADDRESS_P (x)
2866 && !(flag_pic
2867 && symbol_mentioned_p (get_pool_constant (x))))
2868 return 1;
2870 return 0;
2873 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2874 instruction of mode MODE. */
2876 thumb_legitimate_offset_p (mode, val)
2877 enum machine_mode mode;
2878 HOST_WIDE_INT val;
2880 switch (GET_MODE_SIZE (mode))
2882 case 1:
2883 return val >= 0 && val < 32;
2885 case 2:
2886 return val >= 0 && val < 64 && (val & 1) == 0;
2888 default:
2889 return (val >= 0
2890 && (val + GET_MODE_SIZE (mode)) <= 128
2891 && (val & 3) == 0);
2897 #define REG_OR_SUBREG_REG(X) \
2898 (GET_CODE (X) == REG \
2899 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2901 #define REG_OR_SUBREG_RTX(X) \
2902 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2904 #ifndef COSTS_N_INSNS
2905 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2906 #endif
2909 arm_rtx_costs (x, code, outer)
2910 rtx x;
2911 enum rtx_code code;
2912 enum rtx_code outer;
2914 enum machine_mode mode = GET_MODE (x);
2915 enum rtx_code subcode;
2916 int extra_cost;
2918 if (TARGET_THUMB)
2920 switch (code)
2922 case ASHIFT:
2923 case ASHIFTRT:
2924 case LSHIFTRT:
2925 case ROTATERT:
2926 case PLUS:
2927 case MINUS:
2928 case COMPARE:
2929 case NEG:
2930 case NOT:
2931 return COSTS_N_INSNS (1);
2933 case MULT:
2934 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2936 int cycles = 0;
2937 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2939 while (i)
2941 i >>= 2;
2942 cycles++;
2944 return COSTS_N_INSNS (2) + cycles;
2946 return COSTS_N_INSNS (1) + 16;
2948 case SET:
2949 return (COSTS_N_INSNS (1)
2950 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2951 + GET_CODE (SET_DEST (x)) == MEM));
2953 case CONST_INT:
2954 if (outer == SET)
2956 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2957 return 0;
2958 if (thumb_shiftable_const (INTVAL (x)))
2959 return COSTS_N_INSNS (2);
2960 return COSTS_N_INSNS (3);
2962 else if (outer == PLUS
2963 && INTVAL (x) < 256 && INTVAL (x) > -256)
2964 return 0;
2965 else if (outer == COMPARE
2966 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2967 return 0;
2968 else if (outer == ASHIFT || outer == ASHIFTRT
2969 || outer == LSHIFTRT)
2970 return 0;
2971 return COSTS_N_INSNS (2);
2973 case CONST:
2974 case CONST_DOUBLE:
2975 case LABEL_REF:
2976 case SYMBOL_REF:
2977 return COSTS_N_INSNS (3);
2979 case UDIV:
2980 case UMOD:
2981 case DIV:
2982 case MOD:
2983 return 100;
2985 case TRUNCATE:
2986 return 99;
2988 case AND:
2989 case XOR:
2990 case IOR:
2991 /* XXX guess. */
2992 return 8;
2994 case ADDRESSOF:
2995 case MEM:
2996 /* XXX another guess. */
2997 /* Memory costs quite a lot for the first word, but subsequent words
2998 load at the equivalent of a single insn each. */
2999 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3000 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3001 ? 4 : 0));
3003 case IF_THEN_ELSE:
3004 /* XXX a guess. */
3005 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3006 return 14;
3007 return 2;
3009 case ZERO_EXTEND:
3010 /* XXX still guessing. */
3011 switch (GET_MODE (XEXP (x, 0)))
3013 case QImode:
3014 return (1 + (mode == DImode ? 4 : 0)
3015 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3017 case HImode:
3018 return (4 + (mode == DImode ? 4 : 0)
3019 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3021 case SImode:
3022 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3024 default:
3025 return 99;
3028 default:
3029 return 99;
3030 #if 0
3031 case FFS:
3032 case FLOAT:
3033 case FIX:
3034 case UNSIGNED_FIX:
3035 /* XXX guess */
3036 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
3037 rtx_name[code]);
3038 abort ();
3039 #endif
3043 switch (code)
3045 case MEM:
3046 /* Memory costs quite a lot for the first word, but subsequent words
3047 load at the equivalent of a single insn each. */
3048 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3049 + (GET_CODE (x) == SYMBOL_REF
3050 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3052 case DIV:
3053 case MOD:
3054 return 100;
3056 case ROTATE:
3057 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3058 return 4;
3059 /* Fall through */
3060 case ROTATERT:
3061 if (mode != SImode)
3062 return 8;
3063 /* Fall through */
3064 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3065 if (mode == DImode)
3066 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3067 + ((GET_CODE (XEXP (x, 0)) == REG
3068 || (GET_CODE (XEXP (x, 0)) == SUBREG
3069 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3070 ? 0 : 8));
3071 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3072 || (GET_CODE (XEXP (x, 0)) == SUBREG
3073 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3074 ? 0 : 4)
3075 + ((GET_CODE (XEXP (x, 1)) == REG
3076 || (GET_CODE (XEXP (x, 1)) == SUBREG
3077 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3078 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3079 ? 0 : 4));
3081 case MINUS:
3082 if (mode == DImode)
3083 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3084 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3085 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3086 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3087 ? 0 : 8));
3089 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3090 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3091 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3092 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3093 ? 0 : 8)
3094 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3095 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3096 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
3097 ? 0 : 8));
3099 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3100 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3101 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3102 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3103 || subcode == ASHIFTRT || subcode == LSHIFTRT
3104 || subcode == ROTATE || subcode == ROTATERT
3105 || (subcode == MULT
3106 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3107 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3108 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3109 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3110 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3111 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3112 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3113 return 1;
3114 /* Fall through */
3116 case PLUS:
3117 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3118 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3119 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3120 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3121 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
3122 ? 0 : 8));
3124 /* Fall through */
3125 case AND: case XOR: case IOR:
3126 extra_cost = 0;
3128 /* Normally the frame registers will be spilt into reg+const during
3129 reload, so it is a bad idea to combine them with other instructions,
3130 since then they might not be moved outside of loops. As a compromise
3131 we allow integration with ops that have a constant as their second
3132 operand. */
3133 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3134 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3135 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3136 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3137 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3138 extra_cost = 4;
3140 if (mode == DImode)
3141 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3142 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3143 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3144 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3145 ? 0 : 8));
3147 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3148 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3149 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3150 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3151 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3152 ? 0 : 4));
3154 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3155 return (1 + extra_cost
3156 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3157 || subcode == LSHIFTRT || subcode == ASHIFTRT
3158 || subcode == ROTATE || subcode == ROTATERT
3159 || (subcode == MULT
3160 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3161 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3162 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3163 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3164 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3165 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3166 ? 0 : 4));
3168 return 8;
3170 case MULT:
3171 /* There is no point basing this on the tuning, since it is always the
3172 fast variant if it exists at all. */
3173 if (arm_fast_multiply && mode == DImode
3174 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3175 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3176 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
3177 return 8;
3179 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3180 || mode == DImode)
3181 return 30;
3183 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3185 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
3186 & (unsigned HOST_WIDE_INT) 0xffffffff);
3187 int add_cost = const_ok_for_arm (i) ? 4 : 8;
3188 int j;
3190 /* Tune as appropriate. */
3191 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
3193 for (j = 0; i && j < 32; j += booth_unit_size)
3195 i >>= booth_unit_size;
3196 add_cost += 2;
3199 return add_cost;
3202 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
3203 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
3204 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
3206 case TRUNCATE:
3207 if (arm_fast_multiply && mode == SImode
3208 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
3209 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3210 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
3211 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
3212 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
3213 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
3214 return 8;
3215 return 99;
3217 case NEG:
3218 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3219 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
3220 /* Fall through */
3221 case NOT:
3222 if (mode == DImode)
3223 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3225 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
3227 case IF_THEN_ELSE:
3228 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3229 return 14;
3230 return 2;
3232 case COMPARE:
3233 return 1;
3235 case ABS:
3236 return 4 + (mode == DImode ? 4 : 0);
3238 case SIGN_EXTEND:
3239 if (GET_MODE (XEXP (x, 0)) == QImode)
3240 return (4 + (mode == DImode ? 4 : 0)
3241 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3242 /* Fall through */
3243 case ZERO_EXTEND:
3244 switch (GET_MODE (XEXP (x, 0)))
3246 case QImode:
3247 return (1 + (mode == DImode ? 4 : 0)
3248 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3250 case HImode:
3251 return (4 + (mode == DImode ? 4 : 0)
3252 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3254 case SImode:
3255 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3257 default:
3258 break;
3260 abort ();
3262 case CONST_INT:
3263 if (const_ok_for_arm (INTVAL (x)))
3264 return outer == SET ? 2 : -1;
3265 else if (outer == AND
3266 && const_ok_for_arm (~INTVAL (x)))
3267 return -1;
3268 else if ((outer == COMPARE
3269 || outer == PLUS || outer == MINUS)
3270 && const_ok_for_arm (-INTVAL (x)))
3271 return -1;
3272 else
3273 return 5;
3275 case CONST:
3276 case LABEL_REF:
3277 case SYMBOL_REF:
3278 return 6;
3280 case CONST_DOUBLE:
3281 if (const_double_rtx_ok_for_fpu (x))
3282 return outer == SET ? 2 : -1;
3283 else if ((outer == COMPARE || outer == PLUS)
3284 && neg_const_double_rtx_ok_for_fpu (x))
3285 return -1;
3286 return 7;
3288 default:
3289 return 99;
3293 static int
3294 arm_adjust_cost (insn, link, dep, cost)
3295 rtx insn;
3296 rtx link;
3297 rtx dep;
3298 int cost;
3300 rtx i_pat, d_pat;
3302 /* Some true dependencies can have a higher cost depending
3303 on precisely how certain input operands are used. */
3304 if (arm_is_xscale
3305 && REG_NOTE_KIND (link) == 0
3306 && recog_memoized (insn) < 0
3307 && recog_memoized (dep) < 0)
3309 int shift_opnum = get_attr_shift (insn);
3310 enum attr_type attr_type = get_attr_type (dep);
3312 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
3313 operand for INSN. If we have a shifted input operand and the
3314 instruction we depend on is another ALU instruction, then we may
3315 have to account for an additional stall. */
3316 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
3318 rtx shifted_operand;
3319 int opno;
3321 /* Get the shifted operand. */
3322 extract_insn (insn);
3323 shifted_operand = recog_data.operand[shift_opnum];
3325 /* Iterate over all the operands in DEP. If we write an operand
3326 that overlaps with SHIFTED_OPERAND, then we have increase the
3327 cost of this dependency. */
3328 extract_insn (dep);
3329 preprocess_constraints ();
3330 for (opno = 0; opno < recog_data.n_operands; opno++)
3332 /* We can ignore strict inputs. */
3333 if (recog_data.operand_type[opno] == OP_IN)
3334 continue;
3336 if (reg_overlap_mentioned_p (recog_data.operand[opno],
3337 shifted_operand))
3338 return 2;
3343 /* XXX This is not strictly true for the FPA. */
3344 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
3345 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
3346 return 0;
3348 /* Call insns don't incur a stall, even if they follow a load. */
3349 if (REG_NOTE_KIND (link) == 0
3350 && GET_CODE (insn) == CALL_INSN)
3351 return 1;
3353 if ((i_pat = single_set (insn)) != NULL
3354 && GET_CODE (SET_SRC (i_pat)) == MEM
3355 && (d_pat = single_set (dep)) != NULL
3356 && GET_CODE (SET_DEST (d_pat)) == MEM)
3358 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
3359 /* This is a load after a store, there is no conflict if the load reads
3360 from a cached area. Assume that loads from the stack, and from the
3361 constant pool are cached, and that others will miss. This is a
3362 hack. */
3364 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
3365 || reg_mentioned_p (stack_pointer_rtx, src_mem)
3366 || reg_mentioned_p (frame_pointer_rtx, src_mem)
3367 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
3368 return 1;
3371 return cost;
3374 /* This code has been fixed for cross compilation. */
3376 static int fpa_consts_inited = 0;
3378 static const char * const strings_fpa[8] =
3380 "0", "1", "2", "3",
3381 "4", "5", "0.5", "10"
3384 static REAL_VALUE_TYPE values_fpa[8];
3386 static void
3387 init_fpa_table ()
3389 int i;
3390 REAL_VALUE_TYPE r;
3392 for (i = 0; i < 8; i++)
3394 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
3395 values_fpa[i] = r;
3398 fpa_consts_inited = 1;
3401 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3404 const_double_rtx_ok_for_fpu (x)
3405 rtx x;
3407 REAL_VALUE_TYPE r;
3408 int i;
3410 if (!fpa_consts_inited)
3411 init_fpa_table ();
3413 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3414 if (REAL_VALUE_MINUS_ZERO (r))
3415 return 0;
3417 for (i = 0; i < 8; i++)
3418 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3419 return 1;
3421 return 0;
3424 /* Return TRUE if rtx X is a valid immediate FPU constant. */
3427 neg_const_double_rtx_ok_for_fpu (x)
3428 rtx x;
3430 REAL_VALUE_TYPE r;
3431 int i;
3433 if (!fpa_consts_inited)
3434 init_fpa_table ();
3436 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3437 r = REAL_VALUE_NEGATE (r);
3438 if (REAL_VALUE_MINUS_ZERO (r))
3439 return 0;
3441 for (i = 0; i < 8; i++)
3442 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3443 return 1;
3445 return 0;
3448 /* Predicates for `match_operand' and `match_operator'. */
3450 /* s_register_operand is the same as register_operand, but it doesn't accept
3451 (SUBREG (MEM)...).
3453 This function exists because at the time it was put in it led to better
3454 code. SUBREG(MEM) always needs a reload in the places where
3455 s_register_operand is used, and this seemed to lead to excessive
3456 reloading. */
3459 s_register_operand (op, mode)
3460 rtx op;
3461 enum machine_mode mode;
3463 if (GET_MODE (op) != mode && mode != VOIDmode)
3464 return 0;
3466 if (GET_CODE (op) == SUBREG)
3467 op = SUBREG_REG (op);
3469 /* We don't consider registers whose class is NO_REGS
3470 to be a register operand. */
3471 /* XXX might have to check for lo regs only for thumb ??? */
3472 return (GET_CODE (op) == REG
3473 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3474 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3477 /* A hard register operand (even before reload. */
3480 arm_hard_register_operand (op, mode)
3481 rtx op;
3482 enum machine_mode mode;
3484 if (GET_MODE (op) != mode && mode != VOIDmode)
3485 return 0;
3487 return (GET_CODE (op) == REG
3488 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3491 /* Only accept reg, subreg(reg), const_int. */
3494 reg_or_int_operand (op, mode)
3495 rtx op;
3496 enum machine_mode mode;
3498 if (GET_CODE (op) == CONST_INT)
3499 return 1;
3501 if (GET_MODE (op) != mode && mode != VOIDmode)
3502 return 0;
3504 if (GET_CODE (op) == SUBREG)
3505 op = SUBREG_REG (op);
3507 /* We don't consider registers whose class is NO_REGS
3508 to be a register operand. */
3509 return (GET_CODE (op) == REG
3510 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3511 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3514 /* Return 1 if OP is an item in memory, given that we are in reload. */
3517 arm_reload_memory_operand (op, mode)
3518 rtx op;
3519 enum machine_mode mode ATTRIBUTE_UNUSED;
3521 int regno = true_regnum (op);
3523 return (!CONSTANT_P (op)
3524 && (regno == -1
3525 || (GET_CODE (op) == REG
3526 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3529 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3530 memory access (architecture V4).
3531 MODE is QImode if called when computing constraints, or VOIDmode when
3532 emitting patterns. In this latter case we cannot use memory_operand()
3533 because it will fail on badly formed MEMs, which is precisly what we are
3534 trying to catch. */
3537 bad_signed_byte_operand (op, mode)
3538 rtx op;
3539 enum machine_mode mode ATTRIBUTE_UNUSED;
3541 #if 0
3542 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3543 return 0;
3544 #endif
3545 if (GET_CODE (op) != MEM)
3546 return 0;
3548 op = XEXP (op, 0);
3550 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3551 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3552 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3553 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3554 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3555 return 1;
3557 /* Big constants are also bad. */
3558 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3559 && (INTVAL (XEXP (op, 1)) > 0xff
3560 || -INTVAL (XEXP (op, 1)) > 0xff))
3561 return 1;
3563 /* Everything else is good, or can will automatically be made so. */
3564 return 0;
3567 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3570 arm_rhs_operand (op, mode)
3571 rtx op;
3572 enum machine_mode mode;
3574 return (s_register_operand (op, mode)
3575 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3578 /* Return TRUE for valid operands for the
3579 rhs of an ARM instruction, or a load. */
3582 arm_rhsm_operand (op, mode)
3583 rtx op;
3584 enum machine_mode mode;
3586 return (s_register_operand (op, mode)
3587 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3588 || memory_operand (op, mode));
3591 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3592 constant that is valid when negated. */
3595 arm_add_operand (op, mode)
3596 rtx op;
3597 enum machine_mode mode;
3599 if (TARGET_THUMB)
3600 return thumb_cmp_operand (op, mode);
3602 return (s_register_operand (op, mode)
3603 || (GET_CODE (op) == CONST_INT
3604 && (const_ok_for_arm (INTVAL (op))
3605 || const_ok_for_arm (-INTVAL (op)))));
3609 arm_not_operand (op, mode)
3610 rtx op;
3611 enum machine_mode mode;
3613 return (s_register_operand (op, mode)
3614 || (GET_CODE (op) == CONST_INT
3615 && (const_ok_for_arm (INTVAL (op))
3616 || const_ok_for_arm (~INTVAL (op)))));
3619 /* Return TRUE if the operand is a memory reference which contains an
3620 offsettable address. */
3623 offsettable_memory_operand (op, mode)
3624 rtx op;
3625 enum machine_mode mode;
3627 if (mode == VOIDmode)
3628 mode = GET_MODE (op);
3630 return (mode == GET_MODE (op)
3631 && GET_CODE (op) == MEM
3632 && offsettable_address_p (reload_completed | reload_in_progress,
3633 mode, XEXP (op, 0)));
3636 /* Return TRUE if the operand is a memory reference which is, or can be
3637 made word aligned by adjusting the offset. */
3640 alignable_memory_operand (op, mode)
3641 rtx op;
3642 enum machine_mode mode;
3644 rtx reg;
3646 if (mode == VOIDmode)
3647 mode = GET_MODE (op);
3649 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3650 return 0;
3652 op = XEXP (op, 0);
3654 return ((GET_CODE (reg = op) == REG
3655 || (GET_CODE (op) == SUBREG
3656 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3657 || (GET_CODE (op) == PLUS
3658 && GET_CODE (XEXP (op, 1)) == CONST_INT
3659 && (GET_CODE (reg = XEXP (op, 0)) == REG
3660 || (GET_CODE (XEXP (op, 0)) == SUBREG
3661 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3662 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3665 /* Similar to s_register_operand, but does not allow hard integer
3666 registers. */
3669 f_register_operand (op, mode)
3670 rtx op;
3671 enum machine_mode mode;
3673 if (GET_MODE (op) != mode && mode != VOIDmode)
3674 return 0;
3676 if (GET_CODE (op) == SUBREG)
3677 op = SUBREG_REG (op);
3679 /* We don't consider registers whose class is NO_REGS
3680 to be a register operand. */
3681 return (GET_CODE (op) == REG
3682 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3683 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3686 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3689 fpu_rhs_operand (op, mode)
3690 rtx op;
3691 enum machine_mode mode;
3693 if (s_register_operand (op, mode))
3694 return TRUE;
3696 if (GET_MODE (op) != mode && mode != VOIDmode)
3697 return FALSE;
3699 if (GET_CODE (op) == CONST_DOUBLE)
3700 return const_double_rtx_ok_for_fpu (op);
3702 return FALSE;
3706 fpu_add_operand (op, mode)
3707 rtx op;
3708 enum machine_mode mode;
3710 if (s_register_operand (op, mode))
3711 return TRUE;
3713 if (GET_MODE (op) != mode && mode != VOIDmode)
3714 return FALSE;
3716 if (GET_CODE (op) == CONST_DOUBLE)
3717 return (const_double_rtx_ok_for_fpu (op)
3718 || neg_const_double_rtx_ok_for_fpu (op));
3720 return FALSE;
3723 /* Return nonzero if OP is a constant power of two. */
3726 power_of_two_operand (op, mode)
3727 rtx op;
3728 enum machine_mode mode ATTRIBUTE_UNUSED;
3730 if (GET_CODE (op) == CONST_INT)
3732 HOST_WIDE_INT value = INTVAL (op);
3734 return value != 0 && (value & (value - 1)) == 0;
3737 return FALSE;
3740 /* Return TRUE for a valid operand of a DImode operation.
3741 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3742 Note that this disallows MEM(REG+REG), but allows
3743 MEM(PRE/POST_INC/DEC(REG)). */
3746 di_operand (op, mode)
3747 rtx op;
3748 enum machine_mode mode;
3750 if (s_register_operand (op, mode))
3751 return TRUE;
3753 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3754 return FALSE;
3756 if (GET_CODE (op) == SUBREG)
3757 op = SUBREG_REG (op);
3759 switch (GET_CODE (op))
3761 case CONST_DOUBLE:
3762 case CONST_INT:
3763 return TRUE;
3765 case MEM:
3766 return memory_address_p (DImode, XEXP (op, 0));
3768 default:
3769 return FALSE;
3773 /* Like di_operand, but don't accept constants. */
3776 nonimmediate_di_operand (op, mode)
3777 rtx op;
3778 enum machine_mode mode;
3780 if (s_register_operand (op, mode))
3781 return TRUE;
3783 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3784 return FALSE;
3786 if (GET_CODE (op) == SUBREG)
3787 op = SUBREG_REG (op);
3789 if (GET_CODE (op) == MEM)
3790 return memory_address_p (DImode, XEXP (op, 0));
3792 return FALSE;
3795 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3796 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3797 Note that this disallows MEM(REG+REG), but allows
3798 MEM(PRE/POST_INC/DEC(REG)). */
3801 soft_df_operand (op, mode)
3802 rtx op;
3803 enum machine_mode mode;
3805 if (s_register_operand (op, mode))
3806 return TRUE;
3808 if (mode != VOIDmode && GET_MODE (op) != mode)
3809 return FALSE;
3811 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3812 return FALSE;
3814 if (GET_CODE (op) == SUBREG)
3815 op = SUBREG_REG (op);
3817 switch (GET_CODE (op))
3819 case CONST_DOUBLE:
3820 return TRUE;
3822 case MEM:
3823 return memory_address_p (DFmode, XEXP (op, 0));
3825 default:
3826 return FALSE;
3830 /* Like soft_df_operand, but don't accept constants. */
3833 nonimmediate_soft_df_operand (op, mode)
3834 rtx op;
3835 enum machine_mode mode;
3837 if (s_register_operand (op, mode))
3838 return TRUE;
3840 if (mode != VOIDmode && GET_MODE (op) != mode)
3841 return FALSE;
3843 if (GET_CODE (op) == SUBREG)
3844 op = SUBREG_REG (op);
3846 if (GET_CODE (op) == MEM)
3847 return memory_address_p (DFmode, XEXP (op, 0));
3848 return FALSE;
3851 /* Return TRUE for valid index operands. */
3854 index_operand (op, mode)
3855 rtx op;
3856 enum machine_mode mode;
3858 return (s_register_operand (op, mode)
3859 || (immediate_operand (op, mode)
3860 && (GET_CODE (op) != CONST_INT
3861 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3864 /* Return TRUE for valid shifts by a constant. This also accepts any
3865 power of two on the (somewhat overly relaxed) assumption that the
3866 shift operator in this case was a mult. */
3869 const_shift_operand (op, mode)
3870 rtx op;
3871 enum machine_mode mode;
3873 return (power_of_two_operand (op, mode)
3874 || (immediate_operand (op, mode)
3875 && (GET_CODE (op) != CONST_INT
3876 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3879 /* Return TRUE for arithmetic operators which can be combined with a multiply
3880 (shift). */
3883 shiftable_operator (x, mode)
3884 rtx x;
3885 enum machine_mode mode;
3887 enum rtx_code code;
3889 if (GET_MODE (x) != mode)
3890 return FALSE;
3892 code = GET_CODE (x);
3894 return (code == PLUS || code == MINUS
3895 || code == IOR || code == XOR || code == AND);
3898 /* Return TRUE for binary logical operators. */
3901 logical_binary_operator (x, mode)
3902 rtx x;
3903 enum machine_mode mode;
3905 enum rtx_code code;
3907 if (GET_MODE (x) != mode)
3908 return FALSE;
3910 code = GET_CODE (x);
3912 return (code == IOR || code == XOR || code == AND);
3915 /* Return TRUE for shift operators. */
3918 shift_operator (x, mode)
3919 rtx x;
3920 enum machine_mode mode;
3922 enum rtx_code code;
3924 if (GET_MODE (x) != mode)
3925 return FALSE;
3927 code = GET_CODE (x);
3929 if (code == MULT)
3930 return power_of_two_operand (XEXP (x, 1), mode);
3932 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3933 || code == ROTATERT);
3936 /* Return TRUE if x is EQ or NE. */
3939 equality_operator (x, mode)
3940 rtx x;
3941 enum machine_mode mode ATTRIBUTE_UNUSED;
3943 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3946 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3949 arm_comparison_operator (x, mode)
3950 rtx x;
3951 enum machine_mode mode;
3953 return (comparison_operator (x, mode)
3954 && GET_CODE (x) != LTGT
3955 && GET_CODE (x) != UNEQ);
3958 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3961 minmax_operator (x, mode)
3962 rtx x;
3963 enum machine_mode mode;
3965 enum rtx_code code = GET_CODE (x);
3967 if (GET_MODE (x) != mode)
3968 return FALSE;
3970 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3973 /* Return TRUE if this is the condition code register, if we aren't given
3974 a mode, accept any class CCmode register. */
3977 cc_register (x, mode)
3978 rtx x;
3979 enum machine_mode mode;
3981 if (mode == VOIDmode)
3983 mode = GET_MODE (x);
3985 if (GET_MODE_CLASS (mode) != MODE_CC)
3986 return FALSE;
3989 if ( GET_MODE (x) == mode
3990 && GET_CODE (x) == REG
3991 && REGNO (x) == CC_REGNUM)
3992 return TRUE;
3994 return FALSE;
3997 /* Return TRUE if this is the condition code register, if we aren't given
3998 a mode, accept any class CCmode register which indicates a dominance
3999 expression. */
4002 dominant_cc_register (x, mode)
4003 rtx x;
4004 enum machine_mode mode;
4006 if (mode == VOIDmode)
4008 mode = GET_MODE (x);
4010 if (GET_MODE_CLASS (mode) != MODE_CC)
4011 return FALSE;
4014 if ( mode != CC_DNEmode && mode != CC_DEQmode
4015 && mode != CC_DLEmode && mode != CC_DLTmode
4016 && mode != CC_DGEmode && mode != CC_DGTmode
4017 && mode != CC_DLEUmode && mode != CC_DLTUmode
4018 && mode != CC_DGEUmode && mode != CC_DGTUmode)
4019 return FALSE;
4021 return cc_register (x, mode);
4024 /* Return TRUE if X references a SYMBOL_REF. */
4027 symbol_mentioned_p (x)
4028 rtx x;
4030 const char * fmt;
4031 int i;
4033 if (GET_CODE (x) == SYMBOL_REF)
4034 return 1;
4036 fmt = GET_RTX_FORMAT (GET_CODE (x));
4038 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4040 if (fmt[i] == 'E')
4042 int j;
4044 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4045 if (symbol_mentioned_p (XVECEXP (x, i, j)))
4046 return 1;
4048 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
4049 return 1;
4052 return 0;
4055 /* Return TRUE if X references a LABEL_REF. */
4058 label_mentioned_p (x)
4059 rtx x;
4061 const char * fmt;
4062 int i;
4064 if (GET_CODE (x) == LABEL_REF)
4065 return 1;
4067 fmt = GET_RTX_FORMAT (GET_CODE (x));
4068 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4070 if (fmt[i] == 'E')
4072 int j;
4074 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4075 if (label_mentioned_p (XVECEXP (x, i, j)))
4076 return 1;
4078 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
4079 return 1;
4082 return 0;
4085 enum rtx_code
4086 minmax_code (x)
4087 rtx x;
4089 enum rtx_code code = GET_CODE (x);
4091 if (code == SMAX)
4092 return GE;
4093 else if (code == SMIN)
4094 return LE;
4095 else if (code == UMIN)
4096 return LEU;
4097 else if (code == UMAX)
4098 return GEU;
4100 abort ();
4103 /* Return 1 if memory locations are adjacent. */
4106 adjacent_mem_locations (a, b)
4107 rtx a, b;
4109 if ((GET_CODE (XEXP (a, 0)) == REG
4110 || (GET_CODE (XEXP (a, 0)) == PLUS
4111 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
4112 && (GET_CODE (XEXP (b, 0)) == REG
4113 || (GET_CODE (XEXP (b, 0)) == PLUS
4114 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
4116 int val0 = 0, val1 = 0;
4117 int reg0, reg1;
4119 if (GET_CODE (XEXP (a, 0)) == PLUS)
4121 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
4122 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
4124 else
4125 reg0 = REGNO (XEXP (a, 0));
4127 if (GET_CODE (XEXP (b, 0)) == PLUS)
4129 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
4130 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
4132 else
4133 reg1 = REGNO (XEXP (b, 0));
4135 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
4137 return 0;
4140 /* Return 1 if OP is a load multiple operation. It is known to be
4141 parallel and the first section will be tested. */
4144 load_multiple_operation (op, mode)
4145 rtx op;
4146 enum machine_mode mode ATTRIBUTE_UNUSED;
4148 HOST_WIDE_INT count = XVECLEN (op, 0);
4149 int dest_regno;
4150 rtx src_addr;
4151 HOST_WIDE_INT i = 1, base = 0;
4152 rtx elt;
4154 if (count <= 1
4155 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4156 return 0;
4158 /* Check to see if this might be a write-back. */
4159 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4161 i++;
4162 base = 1;
4164 /* Now check it more carefully. */
4165 if (GET_CODE (SET_DEST (elt)) != REG
4166 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4167 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4168 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4169 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4170 return 0;
4173 /* Perform a quick check so we don't blow up below. */
4174 if (count <= i
4175 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4176 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
4177 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
4178 return 0;
4180 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
4181 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
4183 for (; i < count; i++)
4185 elt = XVECEXP (op, 0, i);
4187 if (GET_CODE (elt) != SET
4188 || GET_CODE (SET_DEST (elt)) != REG
4189 || GET_MODE (SET_DEST (elt)) != SImode
4190 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
4191 || GET_CODE (SET_SRC (elt)) != MEM
4192 || GET_MODE (SET_SRC (elt)) != SImode
4193 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
4194 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
4195 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
4196 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
4197 return 0;
4200 return 1;
4203 /* Return 1 if OP is a store multiple operation. It is known to be
4204 parallel and the first section will be tested. */
4207 store_multiple_operation (op, mode)
4208 rtx op;
4209 enum machine_mode mode ATTRIBUTE_UNUSED;
4211 HOST_WIDE_INT count = XVECLEN (op, 0);
4212 int src_regno;
4213 rtx dest_addr;
4214 HOST_WIDE_INT i = 1, base = 0;
4215 rtx elt;
4217 if (count <= 1
4218 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
4219 return 0;
4221 /* Check to see if this might be a write-back. */
4222 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
4224 i++;
4225 base = 1;
4227 /* Now check it more carefully. */
4228 if (GET_CODE (SET_DEST (elt)) != REG
4229 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
4230 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
4231 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
4232 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
4233 return 0;
4236 /* Perform a quick check so we don't blow up below. */
4237 if (count <= i
4238 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
4239 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
4240 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
4241 return 0;
4243 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
4244 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
4246 for (; i < count; i++)
4248 elt = XVECEXP (op, 0, i);
4250 if (GET_CODE (elt) != SET
4251 || GET_CODE (SET_SRC (elt)) != REG
4252 || GET_MODE (SET_SRC (elt)) != SImode
4253 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
4254 || GET_CODE (SET_DEST (elt)) != MEM
4255 || GET_MODE (SET_DEST (elt)) != SImode
4256 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
4257 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
4258 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
4259 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
4260 return 0;
4263 return 1;
4267 load_multiple_sequence (operands, nops, regs, base, load_offset)
4268 rtx * operands;
4269 int nops;
4270 int * regs;
4271 int * base;
4272 HOST_WIDE_INT * load_offset;
4274 int unsorted_regs[4];
4275 HOST_WIDE_INT unsorted_offsets[4];
4276 int order[4];
4277 int base_reg = -1;
4278 int i;
4280 /* Can only handle 2, 3, or 4 insns at present,
4281 though could be easily extended if required. */
4282 if (nops < 2 || nops > 4)
4283 abort ();
4285 /* Loop over the operands and check that the memory references are
4286 suitable (ie immediate offsets from the same base register). At
4287 the same time, extract the target register, and the memory
4288 offsets. */
4289 for (i = 0; i < nops; i++)
4291 rtx reg;
4292 rtx offset;
4294 /* Convert a subreg of a mem into the mem itself. */
4295 if (GET_CODE (operands[nops + i]) == SUBREG)
4296 operands[nops + i] = alter_subreg (operands + (nops + i));
4298 if (GET_CODE (operands[nops + i]) != MEM)
4299 abort ();
4301 /* Don't reorder volatile memory references; it doesn't seem worth
4302 looking for the case where the order is ok anyway. */
4303 if (MEM_VOLATILE_P (operands[nops + i]))
4304 return 0;
4306 offset = const0_rtx;
4308 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4309 || (GET_CODE (reg) == SUBREG
4310 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4311 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4312 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4313 == REG)
4314 || (GET_CODE (reg) == SUBREG
4315 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4316 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4317 == CONST_INT)))
4319 if (i == 0)
4321 base_reg = REGNO (reg);
4322 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4323 ? REGNO (operands[i])
4324 : REGNO (SUBREG_REG (operands[i])));
4325 order[0] = 0;
4327 else
4329 if (base_reg != (int) REGNO (reg))
4330 /* Not addressed from the same base register. */
4331 return 0;
4333 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4334 ? REGNO (operands[i])
4335 : REGNO (SUBREG_REG (operands[i])));
4336 if (unsorted_regs[i] < unsorted_regs[order[0]])
4337 order[0] = i;
4340 /* If it isn't an integer register, or if it overwrites the
4341 base register but isn't the last insn in the list, then
4342 we can't do this. */
4343 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
4344 || (i != nops - 1 && unsorted_regs[i] == base_reg))
4345 return 0;
4347 unsorted_offsets[i] = INTVAL (offset);
4349 else
4350 /* Not a suitable memory address. */
4351 return 0;
4354 /* All the useful information has now been extracted from the
4355 operands into unsorted_regs and unsorted_offsets; additionally,
4356 order[0] has been set to the lowest numbered register in the
4357 list. Sort the registers into order, and check that the memory
4358 offsets are ascending and adjacent. */
4360 for (i = 1; i < nops; i++)
4362 int j;
4364 order[i] = order[i - 1];
4365 for (j = 0; j < nops; j++)
4366 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4367 && (order[i] == order[i - 1]
4368 || unsorted_regs[j] < unsorted_regs[order[i]]))
4369 order[i] = j;
4371 /* Have we found a suitable register? if not, one must be used more
4372 than once. */
4373 if (order[i] == order[i - 1])
4374 return 0;
4376 /* Is the memory address adjacent and ascending? */
4377 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4378 return 0;
4381 if (base)
4383 *base = base_reg;
4385 for (i = 0; i < nops; i++)
4386 regs[i] = unsorted_regs[order[i]];
4388 *load_offset = unsorted_offsets[order[0]];
4391 if (unsorted_offsets[order[0]] == 0)
4392 return 1; /* ldmia */
4394 if (unsorted_offsets[order[0]] == 4)
4395 return 2; /* ldmib */
4397 if (unsorted_offsets[order[nops - 1]] == 0)
4398 return 3; /* ldmda */
4400 if (unsorted_offsets[order[nops - 1]] == -4)
4401 return 4; /* ldmdb */
4403 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
4404 if the offset isn't small enough. The reason 2 ldrs are faster
4405 is because these ARMs are able to do more than one cache access
4406 in a single cycle. The ARM9 and StrongARM have Harvard caches,
4407 whilst the ARM8 has a double bandwidth cache. This means that
4408 these cores can do both an instruction fetch and a data fetch in
4409 a single cycle, so the trick of calculating the address into a
4410 scratch register (one of the result regs) and then doing a load
4411 multiple actually becomes slower (and no smaller in code size).
4412 That is the transformation
4414 ldr rd1, [rbase + offset]
4415 ldr rd2, [rbase + offset + 4]
4419 add rd1, rbase, offset
4420 ldmia rd1, {rd1, rd2}
4422 produces worse code -- '3 cycles + any stalls on rd2' instead of
4423 '2 cycles + any stalls on rd2'. On ARMs with only one cache
4424 access per cycle, the first sequence could never complete in less
4425 than 6 cycles, whereas the ldm sequence would only take 5 and
4426 would make better use of sequential accesses if not hitting the
4427 cache.
4429 We cheat here and test 'arm_ld_sched' which we currently know to
4430 only be true for the ARM8, ARM9 and StrongARM. If this ever
4431 changes, then the test below needs to be reworked. */
4432 if (nops == 2 && arm_ld_sched)
4433 return 0;
4435 /* Can't do it without setting up the offset, only do this if it takes
4436 no more than one insn. */
4437 return (const_ok_for_arm (unsorted_offsets[order[0]])
4438 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4441 const char *
4442 emit_ldm_seq (operands, nops)
4443 rtx * operands;
4444 int nops;
4446 int regs[4];
4447 int base_reg;
4448 HOST_WIDE_INT offset;
4449 char buf[100];
4450 int i;
4452 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4454 case 1:
4455 strcpy (buf, "ldm%?ia\t");
4456 break;
4458 case 2:
4459 strcpy (buf, "ldm%?ib\t");
4460 break;
4462 case 3:
4463 strcpy (buf, "ldm%?da\t");
4464 break;
4466 case 4:
4467 strcpy (buf, "ldm%?db\t");
4468 break;
4470 case 5:
4471 if (offset >= 0)
4472 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4473 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4474 (long) offset);
4475 else
4476 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4477 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4478 (long) -offset);
4479 output_asm_insn (buf, operands);
4480 base_reg = regs[0];
4481 strcpy (buf, "ldm%?ia\t");
4482 break;
4484 default:
4485 abort ();
4488 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4489 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4491 for (i = 1; i < nops; i++)
4492 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4493 reg_names[regs[i]]);
4495 strcat (buf, "}\t%@ phole ldm");
4497 output_asm_insn (buf, operands);
4498 return "";
4502 store_multiple_sequence (operands, nops, regs, base, load_offset)
4503 rtx * operands;
4504 int nops;
4505 int * regs;
4506 int * base;
4507 HOST_WIDE_INT * load_offset;
4509 int unsorted_regs[4];
4510 HOST_WIDE_INT unsorted_offsets[4];
4511 int order[4];
4512 int base_reg = -1;
4513 int i;
4515 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4516 extended if required. */
4517 if (nops < 2 || nops > 4)
4518 abort ();
4520 /* Loop over the operands and check that the memory references are
4521 suitable (ie immediate offsets from the same base register). At
4522 the same time, extract the target register, and the memory
4523 offsets. */
4524 for (i = 0; i < nops; i++)
4526 rtx reg;
4527 rtx offset;
4529 /* Convert a subreg of a mem into the mem itself. */
4530 if (GET_CODE (operands[nops + i]) == SUBREG)
4531 operands[nops + i] = alter_subreg (operands + (nops + i));
4533 if (GET_CODE (operands[nops + i]) != MEM)
4534 abort ();
4536 /* Don't reorder volatile memory references; it doesn't seem worth
4537 looking for the case where the order is ok anyway. */
4538 if (MEM_VOLATILE_P (operands[nops + i]))
4539 return 0;
4541 offset = const0_rtx;
4543 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4544 || (GET_CODE (reg) == SUBREG
4545 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4546 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4547 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4548 == REG)
4549 || (GET_CODE (reg) == SUBREG
4550 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4551 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4552 == CONST_INT)))
4554 if (i == 0)
4556 base_reg = REGNO (reg);
4557 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4558 ? REGNO (operands[i])
4559 : REGNO (SUBREG_REG (operands[i])));
4560 order[0] = 0;
4562 else
4564 if (base_reg != (int) REGNO (reg))
4565 /* Not addressed from the same base register. */
4566 return 0;
4568 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4569 ? REGNO (operands[i])
4570 : REGNO (SUBREG_REG (operands[i])));
4571 if (unsorted_regs[i] < unsorted_regs[order[0]])
4572 order[0] = i;
4575 /* If it isn't an integer register, then we can't do this. */
4576 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4577 return 0;
4579 unsorted_offsets[i] = INTVAL (offset);
4581 else
4582 /* Not a suitable memory address. */
4583 return 0;
4586 /* All the useful information has now been extracted from the
4587 operands into unsorted_regs and unsorted_offsets; additionally,
4588 order[0] has been set to the lowest numbered register in the
4589 list. Sort the registers into order, and check that the memory
4590 offsets are ascending and adjacent. */
4592 for (i = 1; i < nops; i++)
4594 int j;
4596 order[i] = order[i - 1];
4597 for (j = 0; j < nops; j++)
4598 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4599 && (order[i] == order[i - 1]
4600 || unsorted_regs[j] < unsorted_regs[order[i]]))
4601 order[i] = j;
4603 /* Have we found a suitable register? if not, one must be used more
4604 than once. */
4605 if (order[i] == order[i - 1])
4606 return 0;
4608 /* Is the memory address adjacent and ascending? */
4609 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4610 return 0;
4613 if (base)
4615 *base = base_reg;
4617 for (i = 0; i < nops; i++)
4618 regs[i] = unsorted_regs[order[i]];
4620 *load_offset = unsorted_offsets[order[0]];
4623 if (unsorted_offsets[order[0]] == 0)
4624 return 1; /* stmia */
4626 if (unsorted_offsets[order[0]] == 4)
4627 return 2; /* stmib */
4629 if (unsorted_offsets[order[nops - 1]] == 0)
4630 return 3; /* stmda */
4632 if (unsorted_offsets[order[nops - 1]] == -4)
4633 return 4; /* stmdb */
4635 return 0;
4638 const char *
4639 emit_stm_seq (operands, nops)
4640 rtx * operands;
4641 int nops;
4643 int regs[4];
4644 int base_reg;
4645 HOST_WIDE_INT offset;
4646 char buf[100];
4647 int i;
4649 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4651 case 1:
4652 strcpy (buf, "stm%?ia\t");
4653 break;
4655 case 2:
4656 strcpy (buf, "stm%?ib\t");
4657 break;
4659 case 3:
4660 strcpy (buf, "stm%?da\t");
4661 break;
4663 case 4:
4664 strcpy (buf, "stm%?db\t");
4665 break;
4667 default:
4668 abort ();
4671 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4672 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4674 for (i = 1; i < nops; i++)
4675 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4676 reg_names[regs[i]]);
4678 strcat (buf, "}\t%@ phole stm");
4680 output_asm_insn (buf, operands);
4681 return "";
4685 multi_register_push (op, mode)
4686 rtx op;
4687 enum machine_mode mode ATTRIBUTE_UNUSED;
4689 if (GET_CODE (op) != PARALLEL
4690 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4691 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4692 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4693 return 0;
4695 return 1;
4698 /* Routines for use in generating RTL. */
4701 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4702 in_struct_p, scalar_p)
4703 int base_regno;
4704 int count;
4705 rtx from;
4706 int up;
4707 int write_back;
4708 int unchanging_p;
4709 int in_struct_p;
4710 int scalar_p;
4712 int i = 0, j;
4713 rtx result;
4714 int sign = up ? 1 : -1;
4715 rtx mem;
4717 /* XScale has load-store double instructions, but they have stricter
4718 alignment requirements than load-store multiple, so we can not
4719 use them.
4721 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4722 the pipeline until completion.
4724 NREGS CYCLES
4730 An ldr instruction takes 1-3 cycles, but does not block the
4731 pipeline.
4733 NREGS CYCLES
4734 1 1-3
4735 2 2-6
4736 3 3-9
4737 4 4-12
4739 Best case ldr will always win. However, the more ldr instructions
4740 we issue, the less likely we are to be able to schedule them well.
4741 Using ldr instructions also increases code size.
4743 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4744 for counts of 3 or 4 regs. */
4745 if (arm_is_xscale && count <= 2 && ! optimize_size)
4747 rtx seq;
4749 start_sequence ();
4751 for (i = 0; i < count; i++)
4753 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4754 RTX_UNCHANGING_P (mem) = unchanging_p;
4755 MEM_IN_STRUCT_P (mem) = in_struct_p;
4756 MEM_SCALAR_P (mem) = scalar_p;
4757 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4760 if (write_back)
4761 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4763 seq = get_insns ();
4764 end_sequence ();
4766 return seq;
4769 result = gen_rtx_PARALLEL (VOIDmode,
4770 rtvec_alloc (count + (write_back ? 1 : 0)));
4771 if (write_back)
4773 XVECEXP (result, 0, 0)
4774 = gen_rtx_SET (GET_MODE (from), from,
4775 plus_constant (from, count * 4 * sign));
4776 i = 1;
4777 count++;
4780 for (j = 0; i < count; i++, j++)
4782 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4783 RTX_UNCHANGING_P (mem) = unchanging_p;
4784 MEM_IN_STRUCT_P (mem) = in_struct_p;
4785 MEM_SCALAR_P (mem) = scalar_p;
4786 XVECEXP (result, 0, i)
4787 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4790 return result;
4794 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4795 in_struct_p, scalar_p)
4796 int base_regno;
4797 int count;
4798 rtx to;
4799 int up;
4800 int write_back;
4801 int unchanging_p;
4802 int in_struct_p;
4803 int scalar_p;
4805 int i = 0, j;
4806 rtx result;
4807 int sign = up ? 1 : -1;
4808 rtx mem;
4810 /* See arm_gen_load_multiple for discussion of
4811 the pros/cons of ldm/stm usage for XScale. */
4812 if (arm_is_xscale && count <= 2 && ! optimize_size)
4814 rtx seq;
4816 start_sequence ();
4818 for (i = 0; i < count; i++)
4820 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4821 RTX_UNCHANGING_P (mem) = unchanging_p;
4822 MEM_IN_STRUCT_P (mem) = in_struct_p;
4823 MEM_SCALAR_P (mem) = scalar_p;
4824 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4827 if (write_back)
4828 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4830 seq = get_insns ();
4831 end_sequence ();
4833 return seq;
4836 result = gen_rtx_PARALLEL (VOIDmode,
4837 rtvec_alloc (count + (write_back ? 1 : 0)));
4838 if (write_back)
4840 XVECEXP (result, 0, 0)
4841 = gen_rtx_SET (GET_MODE (to), to,
4842 plus_constant (to, count * 4 * sign));
4843 i = 1;
4844 count++;
4847 for (j = 0; i < count; i++, j++)
4849 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4850 RTX_UNCHANGING_P (mem) = unchanging_p;
4851 MEM_IN_STRUCT_P (mem) = in_struct_p;
4852 MEM_SCALAR_P (mem) = scalar_p;
4854 XVECEXP (result, 0, i)
4855 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4858 return result;
4862 arm_gen_movstrqi (operands)
4863 rtx * operands;
4865 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4866 int i;
4867 rtx src, dst;
4868 rtx st_src, st_dst, fin_src, fin_dst;
4869 rtx part_bytes_reg = NULL;
4870 rtx mem;
4871 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4872 int dst_scalar_p, src_scalar_p;
4874 if (GET_CODE (operands[2]) != CONST_INT
4875 || GET_CODE (operands[3]) != CONST_INT
4876 || INTVAL (operands[2]) > 64
4877 || INTVAL (operands[3]) & 3)
4878 return 0;
4880 st_dst = XEXP (operands[0], 0);
4881 st_src = XEXP (operands[1], 0);
4883 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4884 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4885 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4886 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4887 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4888 src_scalar_p = MEM_SCALAR_P (operands[1]);
4890 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4891 fin_src = src = copy_to_mode_reg (SImode, st_src);
4893 in_words_to_go = ARM_NUM_INTS (INTVAL (operands[2]));
4894 out_words_to_go = INTVAL (operands[2]) / 4;
4895 last_bytes = INTVAL (operands[2]) & 3;
4897 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4898 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4900 for (i = 0; in_words_to_go >= 2; i+=4)
4902 if (in_words_to_go > 4)
4903 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4904 src_unchanging_p,
4905 src_in_struct_p,
4906 src_scalar_p));
4907 else
4908 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4909 FALSE, src_unchanging_p,
4910 src_in_struct_p, src_scalar_p));
4912 if (out_words_to_go)
4914 if (out_words_to_go > 4)
4915 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4916 dst_unchanging_p,
4917 dst_in_struct_p,
4918 dst_scalar_p));
4919 else if (out_words_to_go != 1)
4920 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4921 dst, TRUE,
4922 (last_bytes == 0
4923 ? FALSE : TRUE),
4924 dst_unchanging_p,
4925 dst_in_struct_p,
4926 dst_scalar_p));
4927 else
4929 mem = gen_rtx_MEM (SImode, dst);
4930 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4931 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4932 MEM_SCALAR_P (mem) = dst_scalar_p;
4933 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4934 if (last_bytes != 0)
4935 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4939 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4940 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4943 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4944 if (out_words_to_go)
4946 rtx sreg;
4948 mem = gen_rtx_MEM (SImode, src);
4949 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4950 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4951 MEM_SCALAR_P (mem) = src_scalar_p;
4952 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4953 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4955 mem = gen_rtx_MEM (SImode, dst);
4956 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4957 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4958 MEM_SCALAR_P (mem) = dst_scalar_p;
4959 emit_move_insn (mem, sreg);
4960 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4961 in_words_to_go--;
4963 if (in_words_to_go) /* Sanity check */
4964 abort ();
4967 if (in_words_to_go)
4969 if (in_words_to_go < 0)
4970 abort ();
4972 mem = gen_rtx_MEM (SImode, src);
4973 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4974 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4975 MEM_SCALAR_P (mem) = src_scalar_p;
4976 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4979 if (last_bytes && part_bytes_reg == NULL)
4980 abort ();
4982 if (BYTES_BIG_ENDIAN && last_bytes)
4984 rtx tmp = gen_reg_rtx (SImode);
4986 /* The bytes we want are in the top end of the word. */
4987 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4988 GEN_INT (8 * (4 - last_bytes))));
4989 part_bytes_reg = tmp;
4991 while (last_bytes)
4993 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4994 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4995 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4996 MEM_SCALAR_P (mem) = dst_scalar_p;
4997 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4999 if (--last_bytes)
5001 tmp = gen_reg_rtx (SImode);
5002 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
5003 part_bytes_reg = tmp;
5008 else
5010 if (last_bytes > 1)
5012 mem = gen_rtx_MEM (HImode, dst);
5013 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5014 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5015 MEM_SCALAR_P (mem) = dst_scalar_p;
5016 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
5017 last_bytes -= 2;
5018 if (last_bytes)
5020 rtx tmp = gen_reg_rtx (SImode);
5022 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
5023 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
5024 part_bytes_reg = tmp;
5028 if (last_bytes)
5030 mem = gen_rtx_MEM (QImode, dst);
5031 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
5032 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
5033 MEM_SCALAR_P (mem) = dst_scalar_p;
5034 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
5038 return 1;
5041 /* Generate a memory reference for a half word, such that it will be loaded
5042 into the top 16 bits of the word. We can assume that the address is
5043 known to be alignable and of the form reg, or plus (reg, const). */
5046 arm_gen_rotated_half_load (memref)
5047 rtx memref;
5049 HOST_WIDE_INT offset = 0;
5050 rtx base = XEXP (memref, 0);
5052 if (GET_CODE (base) == PLUS)
5054 offset = INTVAL (XEXP (base, 1));
5055 base = XEXP (base, 0);
5058 /* If we aren't allowed to generate unaligned addresses, then fail. */
5059 if (TARGET_MMU_TRAPS
5060 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
5061 return NULL;
5063 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
5065 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
5066 return base;
5068 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
5071 /* Select a dominance comparison mode if possible. We support three forms.
5072 COND_OR == 0 => (X && Y)
5073 COND_OR == 1 => ((! X( || Y)
5074 COND_OR == 2 => (X || Y)
5075 If we are unable to support a dominance comparsison we return CC mode.
5076 This will then fail to match for the RTL expressions that generate this
5077 call. */
5079 static enum machine_mode
5080 select_dominance_cc_mode (x, y, cond_or)
5081 rtx x;
5082 rtx y;
5083 HOST_WIDE_INT cond_or;
5085 enum rtx_code cond1, cond2;
5086 int swapped = 0;
5088 /* Currently we will probably get the wrong result if the individual
5089 comparisons are not simple. This also ensures that it is safe to
5090 reverse a comparison if necessary. */
5091 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
5092 != CCmode)
5093 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
5094 != CCmode))
5095 return CCmode;
5097 /* The if_then_else variant of this tests the second condition if the
5098 first passes, but is true if the first fails. Reverse the first
5099 condition to get a true "inclusive-or" expression. */
5100 if (cond_or == 1)
5101 cond1 = reverse_condition (cond1);
5103 /* If the comparisons are not equal, and one doesn't dominate the other,
5104 then we can't do this. */
5105 if (cond1 != cond2
5106 && !comparison_dominates_p (cond1, cond2)
5107 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
5108 return CCmode;
5110 if (swapped)
5112 enum rtx_code temp = cond1;
5113 cond1 = cond2;
5114 cond2 = temp;
5117 switch (cond1)
5119 case EQ:
5120 if (cond2 == EQ || !cond_or)
5121 return CC_DEQmode;
5123 switch (cond2)
5125 case LE: return CC_DLEmode;
5126 case LEU: return CC_DLEUmode;
5127 case GE: return CC_DGEmode;
5128 case GEU: return CC_DGEUmode;
5129 default: break;
5132 break;
5134 case LT:
5135 if (cond2 == LT || !cond_or)
5136 return CC_DLTmode;
5137 if (cond2 == LE)
5138 return CC_DLEmode;
5139 if (cond2 == NE)
5140 return CC_DNEmode;
5141 break;
5143 case GT:
5144 if (cond2 == GT || !cond_or)
5145 return CC_DGTmode;
5146 if (cond2 == GE)
5147 return CC_DGEmode;
5148 if (cond2 == NE)
5149 return CC_DNEmode;
5150 break;
5152 case LTU:
5153 if (cond2 == LTU || !cond_or)
5154 return CC_DLTUmode;
5155 if (cond2 == LEU)
5156 return CC_DLEUmode;
5157 if (cond2 == NE)
5158 return CC_DNEmode;
5159 break;
5161 case GTU:
5162 if (cond2 == GTU || !cond_or)
5163 return CC_DGTUmode;
5164 if (cond2 == GEU)
5165 return CC_DGEUmode;
5166 if (cond2 == NE)
5167 return CC_DNEmode;
5168 break;
5170 /* The remaining cases only occur when both comparisons are the
5171 same. */
5172 case NE:
5173 return CC_DNEmode;
5175 case LE:
5176 return CC_DLEmode;
5178 case GE:
5179 return CC_DGEmode;
5181 case LEU:
5182 return CC_DLEUmode;
5184 case GEU:
5185 return CC_DGEUmode;
5187 default:
5188 break;
5191 abort ();
5194 enum machine_mode
5195 arm_select_cc_mode (op, x, y)
5196 enum rtx_code op;
5197 rtx x;
5198 rtx y;
5200 /* All floating point compares return CCFP if it is an equality
5201 comparison, and CCFPE otherwise. */
5202 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5204 switch (op)
5206 case EQ:
5207 case NE:
5208 case UNORDERED:
5209 case ORDERED:
5210 case UNLT:
5211 case UNLE:
5212 case UNGT:
5213 case UNGE:
5214 case UNEQ:
5215 case LTGT:
5216 return CCFPmode;
5218 case LT:
5219 case LE:
5220 case GT:
5221 case GE:
5222 return CCFPEmode;
5224 default:
5225 abort ();
5229 /* A compare with a shifted operand. Because of canonicalization, the
5230 comparison will have to be swapped when we emit the assembler. */
5231 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
5232 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5233 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
5234 || GET_CODE (x) == ROTATERT))
5235 return CC_SWPmode;
5237 /* This is a special case that is used by combine to allow a
5238 comparison of a shifted byte load to be split into a zero-extend
5239 followed by a comparison of the shifted integer (only valid for
5240 equalities and unsigned inequalities). */
5241 if (GET_MODE (x) == SImode
5242 && GET_CODE (x) == ASHIFT
5243 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
5244 && GET_CODE (XEXP (x, 0)) == SUBREG
5245 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
5246 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
5247 && (op == EQ || op == NE
5248 || op == GEU || op == GTU || op == LTU || op == LEU)
5249 && GET_CODE (y) == CONST_INT)
5250 return CC_Zmode;
5252 /* A construct for a conditional compare, if the false arm contains
5253 0, then both conditions must be true, otherwise either condition
5254 must be true. Not all conditions are possible, so CCmode is
5255 returned if it can't be done. */
5256 if (GET_CODE (x) == IF_THEN_ELSE
5257 && (XEXP (x, 2) == const0_rtx
5258 || XEXP (x, 2) == const1_rtx)
5259 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5260 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5261 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
5262 INTVAL (XEXP (x, 2)));
5264 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
5265 if (GET_CODE (x) == AND
5266 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5267 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5268 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
5270 if (GET_CODE (x) == IOR
5271 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5272 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
5273 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
5275 /* An operation that sets the condition codes as a side-effect, the
5276 V flag is not set correctly, so we can only use comparisons where
5277 this doesn't matter. (For LT and GE we can use "mi" and "pl"
5278 instead. */
5279 if (GET_MODE (x) == SImode
5280 && y == const0_rtx
5281 && (op == EQ || op == NE || op == LT || op == GE)
5282 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
5283 || GET_CODE (x) == AND || GET_CODE (x) == IOR
5284 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
5285 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
5286 || GET_CODE (x) == LSHIFTRT
5287 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
5288 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
5289 return CC_NOOVmode;
5291 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
5292 return CC_Zmode;
5294 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
5295 && GET_CODE (x) == PLUS
5296 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
5297 return CC_Cmode;
5299 return CCmode;
5302 /* X and Y are two things to compare using CODE. Emit the compare insn and
5303 return the rtx for register 0 in the proper mode. FP means this is a
5304 floating point compare: I don't think that it is needed on the arm. */
5307 arm_gen_compare_reg (code, x, y)
5308 enum rtx_code code;
5309 rtx x, y;
5311 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
5312 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
5314 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
5315 gen_rtx_COMPARE (mode, x, y)));
5317 return cc_reg;
5320 /* Generate a sequence of insns that will generate the correct return
5321 address mask depending on the physical architecture that the program
5322 is running on. */
5325 arm_gen_return_addr_mask ()
5327 rtx reg = gen_reg_rtx (Pmode);
5329 emit_insn (gen_return_addr_mask (reg));
5330 return reg;
5333 void
5334 arm_reload_in_hi (operands)
5335 rtx * operands;
5337 rtx ref = operands[1];
5338 rtx base, scratch;
5339 HOST_WIDE_INT offset = 0;
5341 if (GET_CODE (ref) == SUBREG)
5343 offset = SUBREG_BYTE (ref);
5344 ref = SUBREG_REG (ref);
5347 if (GET_CODE (ref) == REG)
5349 /* We have a pseudo which has been spilt onto the stack; there
5350 are two cases here: the first where there is a simple
5351 stack-slot replacement and a second where the stack-slot is
5352 out of range, or is used as a subreg. */
5353 if (reg_equiv_mem[REGNO (ref)])
5355 ref = reg_equiv_mem[REGNO (ref)];
5356 base = find_replacement (&XEXP (ref, 0));
5358 else
5359 /* The slot is out of range, or was dressed up in a SUBREG. */
5360 base = reg_equiv_address[REGNO (ref)];
5362 else
5363 base = find_replacement (&XEXP (ref, 0));
5365 /* Handle the case where the address is too complex to be offset by 1. */
5366 if (GET_CODE (base) == MINUS
5367 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5369 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5371 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5372 base = base_plus;
5374 else if (GET_CODE (base) == PLUS)
5376 /* The addend must be CONST_INT, or we would have dealt with it above. */
5377 HOST_WIDE_INT hi, lo;
5379 offset += INTVAL (XEXP (base, 1));
5380 base = XEXP (base, 0);
5382 /* Rework the address into a legal sequence of insns. */
5383 /* Valid range for lo is -4095 -> 4095 */
5384 lo = (offset >= 0
5385 ? (offset & 0xfff)
5386 : -((-offset) & 0xfff));
5388 /* Corner case, if lo is the max offset then we would be out of range
5389 once we have added the additional 1 below, so bump the msb into the
5390 pre-loading insn(s). */
5391 if (lo == 4095)
5392 lo &= 0x7ff;
5394 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5395 ^ (HOST_WIDE_INT) 0x80000000)
5396 - (HOST_WIDE_INT) 0x80000000);
5398 if (hi + lo != offset)
5399 abort ();
5401 if (hi != 0)
5403 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5405 /* Get the base address; addsi3 knows how to handle constants
5406 that require more than one insn. */
5407 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5408 base = base_plus;
5409 offset = lo;
5413 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5414 emit_insn (gen_zero_extendqisi2 (scratch,
5415 gen_rtx_MEM (QImode,
5416 plus_constant (base,
5417 offset))));
5418 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
5419 gen_rtx_MEM (QImode,
5420 plus_constant (base,
5421 offset + 1))));
5422 if (!BYTES_BIG_ENDIAN)
5423 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5424 gen_rtx_IOR (SImode,
5425 gen_rtx_ASHIFT
5426 (SImode,
5427 gen_rtx_SUBREG (SImode, operands[0], 0),
5428 GEN_INT (8)),
5429 scratch)));
5430 else
5431 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
5432 gen_rtx_IOR (SImode,
5433 gen_rtx_ASHIFT (SImode, scratch,
5434 GEN_INT (8)),
5435 gen_rtx_SUBREG (SImode, operands[0],
5436 0))));
5439 /* Handle storing a half-word to memory during reload by synthesising as two
5440 byte stores. Take care not to clobber the input values until after we
5441 have moved them somewhere safe. This code assumes that if the DImode
5442 scratch in operands[2] overlaps either the input value or output address
5443 in some way, then that value must die in this insn (we absolutely need
5444 two scratch registers for some corner cases). */
5446 void
5447 arm_reload_out_hi (operands)
5448 rtx * operands;
5450 rtx ref = operands[0];
5451 rtx outval = operands[1];
5452 rtx base, scratch;
5453 HOST_WIDE_INT offset = 0;
5455 if (GET_CODE (ref) == SUBREG)
5457 offset = SUBREG_BYTE (ref);
5458 ref = SUBREG_REG (ref);
5461 if (GET_CODE (ref) == REG)
5463 /* We have a pseudo which has been spilt onto the stack; there
5464 are two cases here: the first where there is a simple
5465 stack-slot replacement and a second where the stack-slot is
5466 out of range, or is used as a subreg. */
5467 if (reg_equiv_mem[REGNO (ref)])
5469 ref = reg_equiv_mem[REGNO (ref)];
5470 base = find_replacement (&XEXP (ref, 0));
5472 else
5473 /* The slot is out of range, or was dressed up in a SUBREG. */
5474 base = reg_equiv_address[REGNO (ref)];
5476 else
5477 base = find_replacement (&XEXP (ref, 0));
5479 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5481 /* Handle the case where the address is too complex to be offset by 1. */
5482 if (GET_CODE (base) == MINUS
5483 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5485 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5487 /* Be careful not to destroy OUTVAL. */
5488 if (reg_overlap_mentioned_p (base_plus, outval))
5490 /* Updating base_plus might destroy outval, see if we can
5491 swap the scratch and base_plus. */
5492 if (!reg_overlap_mentioned_p (scratch, outval))
5494 rtx tmp = scratch;
5495 scratch = base_plus;
5496 base_plus = tmp;
5498 else
5500 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5502 /* Be conservative and copy OUTVAL into the scratch now,
5503 this should only be necessary if outval is a subreg
5504 of something larger than a word. */
5505 /* XXX Might this clobber base? I can't see how it can,
5506 since scratch is known to overlap with OUTVAL, and
5507 must be wider than a word. */
5508 emit_insn (gen_movhi (scratch_hi, outval));
5509 outval = scratch_hi;
5513 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5514 base = base_plus;
5516 else if (GET_CODE (base) == PLUS)
5518 /* The addend must be CONST_INT, or we would have dealt with it above. */
5519 HOST_WIDE_INT hi, lo;
5521 offset += INTVAL (XEXP (base, 1));
5522 base = XEXP (base, 0);
5524 /* Rework the address into a legal sequence of insns. */
5525 /* Valid range for lo is -4095 -> 4095 */
5526 lo = (offset >= 0
5527 ? (offset & 0xfff)
5528 : -((-offset) & 0xfff));
5530 /* Corner case, if lo is the max offset then we would be out of range
5531 once we have added the additional 1 below, so bump the msb into the
5532 pre-loading insn(s). */
5533 if (lo == 4095)
5534 lo &= 0x7ff;
5536 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5537 ^ (HOST_WIDE_INT) 0x80000000)
5538 - (HOST_WIDE_INT) 0x80000000);
5540 if (hi + lo != offset)
5541 abort ();
5543 if (hi != 0)
5545 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5547 /* Be careful not to destroy OUTVAL. */
5548 if (reg_overlap_mentioned_p (base_plus, outval))
5550 /* Updating base_plus might destroy outval, see if we
5551 can swap the scratch and base_plus. */
5552 if (!reg_overlap_mentioned_p (scratch, outval))
5554 rtx tmp = scratch;
5555 scratch = base_plus;
5556 base_plus = tmp;
5558 else
5560 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5562 /* Be conservative and copy outval into scratch now,
5563 this should only be necessary if outval is a
5564 subreg of something larger than a word. */
5565 /* XXX Might this clobber base? I can't see how it
5566 can, since scratch is known to overlap with
5567 outval. */
5568 emit_insn (gen_movhi (scratch_hi, outval));
5569 outval = scratch_hi;
5573 /* Get the base address; addsi3 knows how to handle constants
5574 that require more than one insn. */
5575 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5576 base = base_plus;
5577 offset = lo;
5581 if (BYTES_BIG_ENDIAN)
5583 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5584 plus_constant (base, offset + 1)),
5585 gen_lowpart (QImode, outval)));
5586 emit_insn (gen_lshrsi3 (scratch,
5587 gen_rtx_SUBREG (SImode, outval, 0),
5588 GEN_INT (8)));
5589 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5590 gen_lowpart (QImode, scratch)));
5592 else
5594 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5595 gen_lowpart (QImode, outval)));
5596 emit_insn (gen_lshrsi3 (scratch,
5597 gen_rtx_SUBREG (SImode, outval, 0),
5598 GEN_INT (8)));
5599 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5600 plus_constant (base, offset + 1)),
5601 gen_lowpart (QImode, scratch)));
5605 /* Print a symbolic form of X to the debug file, F. */
5607 static void
5608 arm_print_value (f, x)
5609 FILE * f;
5610 rtx x;
5612 switch (GET_CODE (x))
5614 case CONST_INT:
5615 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5616 return;
5618 case CONST_DOUBLE:
5619 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5620 return;
5622 case CONST_STRING:
5623 fprintf (f, "\"%s\"", XSTR (x, 0));
5624 return;
5626 case SYMBOL_REF:
5627 fprintf (f, "`%s'", XSTR (x, 0));
5628 return;
5630 case LABEL_REF:
5631 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5632 return;
5634 case CONST:
5635 arm_print_value (f, XEXP (x, 0));
5636 return;
5638 case PLUS:
5639 arm_print_value (f, XEXP (x, 0));
5640 fprintf (f, "+");
5641 arm_print_value (f, XEXP (x, 1));
5642 return;
5644 case PC:
5645 fprintf (f, "pc");
5646 return;
5648 default:
5649 fprintf (f, "????");
5650 return;
5654 /* Routines for manipulation of the constant pool. */
5656 /* Arm instructions cannot load a large constant directly into a
5657 register; they have to come from a pc relative load. The constant
5658 must therefore be placed in the addressable range of the pc
5659 relative load. Depending on the precise pc relative load
5660 instruction the range is somewhere between 256 bytes and 4k. This
5661 means that we often have to dump a constant inside a function, and
5662 generate code to branch around it.
5664 It is important to minimize this, since the branches will slow
5665 things down and make the code larger.
5667 Normally we can hide the table after an existing unconditional
5668 branch so that there is no interruption of the flow, but in the
5669 worst case the code looks like this:
5671 ldr rn, L1
5673 b L2
5674 align
5675 L1: .long value
5679 ldr rn, L3
5681 b L4
5682 align
5683 L3: .long value
5687 We fix this by performing a scan after scheduling, which notices
5688 which instructions need to have their operands fetched from the
5689 constant table and builds the table.
5691 The algorithm starts by building a table of all the constants that
5692 need fixing up and all the natural barriers in the function (places
5693 where a constant table can be dropped without breaking the flow).
5694 For each fixup we note how far the pc-relative replacement will be
5695 able to reach and the offset of the instruction into the function.
5697 Having built the table we then group the fixes together to form
5698 tables that are as large as possible (subject to addressing
5699 constraints) and emit each table of constants after the last
5700 barrier that is within range of all the instructions in the group.
5701 If a group does not contain a barrier, then we forcibly create one
5702 by inserting a jump instruction into the flow. Once the table has
5703 been inserted, the insns are then modified to reference the
5704 relevant entry in the pool.
5706 Possible enhancements to the algorithm (not implemented) are:
5708 1) For some processors and object formats, there may be benefit in
5709 aligning the pools to the start of cache lines; this alignment
5710 would need to be taken into account when calculating addressability
5711 of a pool. */
5713 /* These typedefs are located at the start of this file, so that
5714 they can be used in the prototypes there. This comment is to
5715 remind readers of that fact so that the following structures
5716 can be understood more easily.
5718 typedef struct minipool_node Mnode;
5719 typedef struct minipool_fixup Mfix; */
5721 struct minipool_node
5723 /* Doubly linked chain of entries. */
5724 Mnode * next;
5725 Mnode * prev;
5726 /* The maximum offset into the code that this entry can be placed. While
5727 pushing fixes for forward references, all entries are sorted in order
5728 of increasing max_address. */
5729 HOST_WIDE_INT max_address;
5730 /* Similarly for an entry inserted for a backwards ref. */
5731 HOST_WIDE_INT min_address;
5732 /* The number of fixes referencing this entry. This can become zero
5733 if we "unpush" an entry. In this case we ignore the entry when we
5734 come to emit the code. */
5735 int refcount;
5736 /* The offset from the start of the minipool. */
5737 HOST_WIDE_INT offset;
5738 /* The value in table. */
5739 rtx value;
5740 /* The mode of value. */
5741 enum machine_mode mode;
5742 int fix_size;
5745 struct minipool_fixup
5747 Mfix * next;
5748 rtx insn;
5749 HOST_WIDE_INT address;
5750 rtx * loc;
5751 enum machine_mode mode;
5752 int fix_size;
5753 rtx value;
5754 Mnode * minipool;
5755 HOST_WIDE_INT forwards;
5756 HOST_WIDE_INT backwards;
5759 /* Fixes less than a word need padding out to a word boundary. */
5760 #define MINIPOOL_FIX_SIZE(mode) \
5761 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5763 static Mnode * minipool_vector_head;
5764 static Mnode * minipool_vector_tail;
5765 static rtx minipool_vector_label;
5767 /* The linked list of all minipool fixes required for this function. */
5768 Mfix * minipool_fix_head;
5769 Mfix * minipool_fix_tail;
5770 /* The fix entry for the current minipool, once it has been placed. */
5771 Mfix * minipool_barrier;
5773 /* Determines if INSN is the start of a jump table. Returns the end
5774 of the TABLE or NULL_RTX. */
5776 static rtx
5777 is_jump_table (insn)
5778 rtx insn;
5780 rtx table;
5782 if (GET_CODE (insn) == JUMP_INSN
5783 && JUMP_LABEL (insn) != NULL
5784 && ((table = next_real_insn (JUMP_LABEL (insn)))
5785 == next_real_insn (insn))
5786 && table != NULL
5787 && GET_CODE (table) == JUMP_INSN
5788 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5789 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5790 return table;
5792 return NULL_RTX;
5795 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5796 #define JUMP_TABLES_IN_TEXT_SECTION 0
5797 #endif
5799 static HOST_WIDE_INT
5800 get_jump_table_size (insn)
5801 rtx insn;
5803 /* ADDR_VECs only take room if read-only data does into the text
5804 section. */
5805 if (JUMP_TABLES_IN_TEXT_SECTION
5806 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5807 || 1
5808 #endif
5811 rtx body = PATTERN (insn);
5812 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5814 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5817 return 0;
5820 /* Move a minipool fix MP from its current location to before MAX_MP.
5821 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5822 contrains may need updating. */
5824 static Mnode *
5825 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5826 Mnode * mp;
5827 Mnode * max_mp;
5828 HOST_WIDE_INT max_address;
5830 /* This should never be true and the code below assumes these are
5831 different. */
5832 if (mp == max_mp)
5833 abort ();
5835 if (max_mp == NULL)
5837 if (max_address < mp->max_address)
5838 mp->max_address = max_address;
5840 else
5842 if (max_address > max_mp->max_address - mp->fix_size)
5843 mp->max_address = max_mp->max_address - mp->fix_size;
5844 else
5845 mp->max_address = max_address;
5847 /* Unlink MP from its current position. Since max_mp is non-null,
5848 mp->prev must be non-null. */
5849 mp->prev->next = mp->next;
5850 if (mp->next != NULL)
5851 mp->next->prev = mp->prev;
5852 else
5853 minipool_vector_tail = mp->prev;
5855 /* Re-insert it before MAX_MP. */
5856 mp->next = max_mp;
5857 mp->prev = max_mp->prev;
5858 max_mp->prev = mp;
5860 if (mp->prev != NULL)
5861 mp->prev->next = mp;
5862 else
5863 minipool_vector_head = mp;
5866 /* Save the new entry. */
5867 max_mp = mp;
5869 /* Scan over the preceding entries and adjust their addresses as
5870 required. */
5871 while (mp->prev != NULL
5872 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5874 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5875 mp = mp->prev;
5878 return max_mp;
5881 /* Add a constant to the minipool for a forward reference. Returns the
5882 node added or NULL if the constant will not fit in this pool. */
5884 static Mnode *
5885 add_minipool_forward_ref (fix)
5886 Mfix * fix;
5888 /* If set, max_mp is the first pool_entry that has a lower
5889 constraint than the one we are trying to add. */
5890 Mnode * max_mp = NULL;
5891 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5892 Mnode * mp;
5894 /* If this fix's address is greater than the address of the first
5895 entry, then we can't put the fix in this pool. We subtract the
5896 size of the current fix to ensure that if the table is fully
5897 packed we still have enough room to insert this value by suffling
5898 the other fixes forwards. */
5899 if (minipool_vector_head &&
5900 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5901 return NULL;
5903 /* Scan the pool to see if a constant with the same value has
5904 already been added. While we are doing this, also note the
5905 location where we must insert the constant if it doesn't already
5906 exist. */
5907 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5909 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5910 && fix->mode == mp->mode
5911 && (GET_CODE (fix->value) != CODE_LABEL
5912 || (CODE_LABEL_NUMBER (fix->value)
5913 == CODE_LABEL_NUMBER (mp->value)))
5914 && rtx_equal_p (fix->value, mp->value))
5916 /* More than one fix references this entry. */
5917 mp->refcount++;
5918 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5921 /* Note the insertion point if necessary. */
5922 if (max_mp == NULL
5923 && mp->max_address > max_address)
5924 max_mp = mp;
5927 /* The value is not currently in the minipool, so we need to create
5928 a new entry for it. If MAX_MP is NULL, the entry will be put on
5929 the end of the list since the placement is less constrained than
5930 any existing entry. Otherwise, we insert the new fix before
5931 MAX_MP and, if neceesary, adjust the constraints on the other
5932 entries. */
5933 mp = xmalloc (sizeof (* mp));
5934 mp->fix_size = fix->fix_size;
5935 mp->mode = fix->mode;
5936 mp->value = fix->value;
5937 mp->refcount = 1;
5938 /* Not yet required for a backwards ref. */
5939 mp->min_address = -65536;
5941 if (max_mp == NULL)
5943 mp->max_address = max_address;
5944 mp->next = NULL;
5945 mp->prev = minipool_vector_tail;
5947 if (mp->prev == NULL)
5949 minipool_vector_head = mp;
5950 minipool_vector_label = gen_label_rtx ();
5952 else
5953 mp->prev->next = mp;
5955 minipool_vector_tail = mp;
5957 else
5959 if (max_address > max_mp->max_address - mp->fix_size)
5960 mp->max_address = max_mp->max_address - mp->fix_size;
5961 else
5962 mp->max_address = max_address;
5964 mp->next = max_mp;
5965 mp->prev = max_mp->prev;
5966 max_mp->prev = mp;
5967 if (mp->prev != NULL)
5968 mp->prev->next = mp;
5969 else
5970 minipool_vector_head = mp;
5973 /* Save the new entry. */
5974 max_mp = mp;
5976 /* Scan over the preceding entries and adjust their addresses as
5977 required. */
5978 while (mp->prev != NULL
5979 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5981 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5982 mp = mp->prev;
5985 return max_mp;
5988 static Mnode *
5989 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5990 Mnode * mp;
5991 Mnode * min_mp;
5992 HOST_WIDE_INT min_address;
5994 HOST_WIDE_INT offset;
5996 /* This should never be true, and the code below assumes these are
5997 different. */
5998 if (mp == min_mp)
5999 abort ();
6001 if (min_mp == NULL)
6003 if (min_address > mp->min_address)
6004 mp->min_address = min_address;
6006 else
6008 /* We will adjust this below if it is too loose. */
6009 mp->min_address = min_address;
6011 /* Unlink MP from its current position. Since min_mp is non-null,
6012 mp->next must be non-null. */
6013 mp->next->prev = mp->prev;
6014 if (mp->prev != NULL)
6015 mp->prev->next = mp->next;
6016 else
6017 minipool_vector_head = mp->next;
6019 /* Reinsert it after MIN_MP. */
6020 mp->prev = min_mp;
6021 mp->next = min_mp->next;
6022 min_mp->next = mp;
6023 if (mp->next != NULL)
6024 mp->next->prev = mp;
6025 else
6026 minipool_vector_tail = mp;
6029 min_mp = mp;
6031 offset = 0;
6032 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6034 mp->offset = offset;
6035 if (mp->refcount > 0)
6036 offset += mp->fix_size;
6038 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
6039 mp->next->min_address = mp->min_address + mp->fix_size;
6042 return min_mp;
6045 /* Add a constant to the minipool for a backward reference. Returns the
6046 node added or NULL if the constant will not fit in this pool.
6048 Note that the code for insertion for a backwards reference can be
6049 somewhat confusing because the calculated offsets for each fix do
6050 not take into account the size of the pool (which is still under
6051 construction. */
6053 static Mnode *
6054 add_minipool_backward_ref (fix)
6055 Mfix * fix;
6057 /* If set, min_mp is the last pool_entry that has a lower constraint
6058 than the one we are trying to add. */
6059 Mnode * min_mp = NULL;
6060 /* This can be negative, since it is only a constraint. */
6061 HOST_WIDE_INT min_address = fix->address - fix->backwards;
6062 Mnode * mp;
6064 /* If we can't reach the current pool from this insn, or if we can't
6065 insert this entry at the end of the pool without pushing other
6066 fixes out of range, then we don't try. This ensures that we
6067 can't fail later on. */
6068 if (min_address >= minipool_barrier->address
6069 || (minipool_vector_tail->min_address + fix->fix_size
6070 >= minipool_barrier->address))
6071 return NULL;
6073 /* Scan the pool to see if a constant with the same value has
6074 already been added. While we are doing this, also note the
6075 location where we must insert the constant if it doesn't already
6076 exist. */
6077 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
6079 if (GET_CODE (fix->value) == GET_CODE (mp->value)
6080 && fix->mode == mp->mode
6081 && (GET_CODE (fix->value) != CODE_LABEL
6082 || (CODE_LABEL_NUMBER (fix->value)
6083 == CODE_LABEL_NUMBER (mp->value)))
6084 && rtx_equal_p (fix->value, mp->value)
6085 /* Check that there is enough slack to move this entry to the
6086 end of the table (this is conservative). */
6087 && (mp->max_address
6088 > (minipool_barrier->address
6089 + minipool_vector_tail->offset
6090 + minipool_vector_tail->fix_size)))
6092 mp->refcount++;
6093 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
6096 if (min_mp != NULL)
6097 mp->min_address += fix->fix_size;
6098 else
6100 /* Note the insertion point if necessary. */
6101 if (mp->min_address < min_address)
6102 min_mp = mp;
6103 else if (mp->max_address
6104 < minipool_barrier->address + mp->offset + fix->fix_size)
6106 /* Inserting before this entry would push the fix beyond
6107 its maximum address (which can happen if we have
6108 re-located a forwards fix); force the new fix to come
6109 after it. */
6110 min_mp = mp;
6111 min_address = mp->min_address + fix->fix_size;
6116 /* We need to create a new entry. */
6117 mp = xmalloc (sizeof (* mp));
6118 mp->fix_size = fix->fix_size;
6119 mp->mode = fix->mode;
6120 mp->value = fix->value;
6121 mp->refcount = 1;
6122 mp->max_address = minipool_barrier->address + 65536;
6124 mp->min_address = min_address;
6126 if (min_mp == NULL)
6128 mp->prev = NULL;
6129 mp->next = minipool_vector_head;
6131 if (mp->next == NULL)
6133 minipool_vector_tail = mp;
6134 minipool_vector_label = gen_label_rtx ();
6136 else
6137 mp->next->prev = mp;
6139 minipool_vector_head = mp;
6141 else
6143 mp->next = min_mp->next;
6144 mp->prev = min_mp;
6145 min_mp->next = mp;
6147 if (mp->next != NULL)
6148 mp->next->prev = mp;
6149 else
6150 minipool_vector_tail = mp;
6153 /* Save the new entry. */
6154 min_mp = mp;
6156 if (mp->prev)
6157 mp = mp->prev;
6158 else
6159 mp->offset = 0;
6161 /* Scan over the following entries and adjust their offsets. */
6162 while (mp->next != NULL)
6164 if (mp->next->min_address < mp->min_address + mp->fix_size)
6165 mp->next->min_address = mp->min_address + mp->fix_size;
6167 if (mp->refcount)
6168 mp->next->offset = mp->offset + mp->fix_size;
6169 else
6170 mp->next->offset = mp->offset;
6172 mp = mp->next;
6175 return min_mp;
6178 static void
6179 assign_minipool_offsets (barrier)
6180 Mfix * barrier;
6182 HOST_WIDE_INT offset = 0;
6183 Mnode * mp;
6185 minipool_barrier = barrier;
6187 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
6189 mp->offset = offset;
6191 if (mp->refcount > 0)
6192 offset += mp->fix_size;
6196 /* Output the literal table */
6197 static void
6198 dump_minipool (scan)
6199 rtx scan;
6201 Mnode * mp;
6202 Mnode * nmp;
6204 if (rtl_dump_file)
6205 fprintf (rtl_dump_file,
6206 ";; Emitting minipool after insn %u; address %ld\n",
6207 INSN_UID (scan), (unsigned long) minipool_barrier->address);
6209 scan = emit_label_after (gen_label_rtx (), scan);
6210 scan = emit_insn_after (gen_align_4 (), scan);
6211 scan = emit_label_after (minipool_vector_label, scan);
6213 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
6215 if (mp->refcount > 0)
6217 if (rtl_dump_file)
6219 fprintf (rtl_dump_file,
6220 ";; Offset %u, min %ld, max %ld ",
6221 (unsigned) mp->offset, (unsigned long) mp->min_address,
6222 (unsigned long) mp->max_address);
6223 arm_print_value (rtl_dump_file, mp->value);
6224 fputc ('\n', rtl_dump_file);
6227 switch (mp->fix_size)
6229 #ifdef HAVE_consttable_1
6230 case 1:
6231 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
6232 break;
6234 #endif
6235 #ifdef HAVE_consttable_2
6236 case 2:
6237 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
6238 break;
6240 #endif
6241 #ifdef HAVE_consttable_4
6242 case 4:
6243 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
6244 break;
6246 #endif
6247 #ifdef HAVE_consttable_8
6248 case 8:
6249 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
6250 break;
6252 #endif
6253 default:
6254 abort ();
6255 break;
6259 nmp = mp->next;
6260 free (mp);
6263 minipool_vector_head = minipool_vector_tail = NULL;
6264 scan = emit_insn_after (gen_consttable_end (), scan);
6265 scan = emit_barrier_after (scan);
6268 /* Return the cost of forcibly inserting a barrier after INSN. */
6270 static int
6271 arm_barrier_cost (insn)
6272 rtx insn;
6274 /* Basing the location of the pool on the loop depth is preferable,
6275 but at the moment, the basic block information seems to be
6276 corrupt by this stage of the compilation. */
6277 int base_cost = 50;
6278 rtx next = next_nonnote_insn (insn);
6280 if (next != NULL && GET_CODE (next) == CODE_LABEL)
6281 base_cost -= 20;
6283 switch (GET_CODE (insn))
6285 case CODE_LABEL:
6286 /* It will always be better to place the table before the label, rather
6287 than after it. */
6288 return 50;
6290 case INSN:
6291 case CALL_INSN:
6292 return base_cost;
6294 case JUMP_INSN:
6295 return base_cost - 10;
6297 default:
6298 return base_cost + 10;
6302 /* Find the best place in the insn stream in the range
6303 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
6304 Create the barrier by inserting a jump and add a new fix entry for
6305 it. */
6307 static Mfix *
6308 create_fix_barrier (fix, max_address)
6309 Mfix * fix;
6310 HOST_WIDE_INT max_address;
6312 HOST_WIDE_INT count = 0;
6313 rtx barrier;
6314 rtx from = fix->insn;
6315 rtx selected = from;
6316 int selected_cost;
6317 HOST_WIDE_INT selected_address;
6318 Mfix * new_fix;
6319 HOST_WIDE_INT max_count = max_address - fix->address;
6320 rtx label = gen_label_rtx ();
6322 selected_cost = arm_barrier_cost (from);
6323 selected_address = fix->address;
6325 while (from && count < max_count)
6327 rtx tmp;
6328 int new_cost;
6330 /* This code shouldn't have been called if there was a natural barrier
6331 within range. */
6332 if (GET_CODE (from) == BARRIER)
6333 abort ();
6335 /* Count the length of this insn. */
6336 count += get_attr_length (from);
6338 /* If there is a jump table, add its length. */
6339 tmp = is_jump_table (from);
6340 if (tmp != NULL)
6342 count += get_jump_table_size (tmp);
6344 /* Jump tables aren't in a basic block, so base the cost on
6345 the dispatch insn. If we select this location, we will
6346 still put the pool after the table. */
6347 new_cost = arm_barrier_cost (from);
6349 if (count < max_count && new_cost <= selected_cost)
6351 selected = tmp;
6352 selected_cost = new_cost;
6353 selected_address = fix->address + count;
6356 /* Continue after the dispatch table. */
6357 from = NEXT_INSN (tmp);
6358 continue;
6361 new_cost = arm_barrier_cost (from);
6363 if (count < max_count && new_cost <= selected_cost)
6365 selected = from;
6366 selected_cost = new_cost;
6367 selected_address = fix->address + count;
6370 from = NEXT_INSN (from);
6373 /* Create a new JUMP_INSN that branches around a barrier. */
6374 from = emit_jump_insn_after (gen_jump (label), selected);
6375 JUMP_LABEL (from) = label;
6376 barrier = emit_barrier_after (from);
6377 emit_label_after (label, barrier);
6379 /* Create a minipool barrier entry for the new barrier. */
6380 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
6381 new_fix->insn = barrier;
6382 new_fix->address = selected_address;
6383 new_fix->next = fix->next;
6384 fix->next = new_fix;
6386 return new_fix;
6389 /* Record that there is a natural barrier in the insn stream at
6390 ADDRESS. */
6391 static void
6392 push_minipool_barrier (insn, address)
6393 rtx insn;
6394 HOST_WIDE_INT address;
6396 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6398 fix->insn = insn;
6399 fix->address = address;
6401 fix->next = NULL;
6402 if (minipool_fix_head != NULL)
6403 minipool_fix_tail->next = fix;
6404 else
6405 minipool_fix_head = fix;
6407 minipool_fix_tail = fix;
6410 /* Record INSN, which will need fixing up to load a value from the
6411 minipool. ADDRESS is the offset of the insn since the start of the
6412 function; LOC is a pointer to the part of the insn which requires
6413 fixing; VALUE is the constant that must be loaded, which is of type
6414 MODE. */
6415 static void
6416 push_minipool_fix (insn, address, loc, mode, value)
6417 rtx insn;
6418 HOST_WIDE_INT address;
6419 rtx * loc;
6420 enum machine_mode mode;
6421 rtx value;
6423 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
6425 #ifdef AOF_ASSEMBLER
6426 /* PIC symbol refereneces need to be converted into offsets into the
6427 based area. */
6428 /* XXX This shouldn't be done here. */
6429 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
6430 value = aof_pic_entry (value);
6431 #endif /* AOF_ASSEMBLER */
6433 fix->insn = insn;
6434 fix->address = address;
6435 fix->loc = loc;
6436 fix->mode = mode;
6437 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
6438 fix->value = value;
6439 fix->forwards = get_attr_pool_range (insn);
6440 fix->backwards = get_attr_neg_pool_range (insn);
6441 fix->minipool = NULL;
6443 /* If an insn doesn't have a range defined for it, then it isn't
6444 expecting to be reworked by this code. Better to abort now than
6445 to generate duff assembly code. */
6446 if (fix->forwards == 0 && fix->backwards == 0)
6447 abort ();
6449 if (rtl_dump_file)
6451 fprintf (rtl_dump_file,
6452 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6453 GET_MODE_NAME (mode),
6454 INSN_UID (insn), (unsigned long) address,
6455 -1 * (long)fix->backwards, (long)fix->forwards);
6456 arm_print_value (rtl_dump_file, fix->value);
6457 fprintf (rtl_dump_file, "\n");
6460 /* Add it to the chain of fixes. */
6461 fix->next = NULL;
6463 if (minipool_fix_head != NULL)
6464 minipool_fix_tail->next = fix;
6465 else
6466 minipool_fix_head = fix;
6468 minipool_fix_tail = fix;
6471 /* Scan INSN and note any of its operands that need fixing. */
6473 static void
6474 note_invalid_constants (insn, address)
6475 rtx insn;
6476 HOST_WIDE_INT address;
6478 int opno;
6480 extract_insn (insn);
6482 if (!constrain_operands (1))
6483 fatal_insn_not_found (insn);
6485 /* Fill in recog_op_alt with information about the constraints of this
6486 insn. */
6487 preprocess_constraints ();
6489 for (opno = 0; opno < recog_data.n_operands; opno++)
6491 /* Things we need to fix can only occur in inputs. */
6492 if (recog_data.operand_type[opno] != OP_IN)
6493 continue;
6495 /* If this alternative is a memory reference, then any mention
6496 of constants in this alternative is really to fool reload
6497 into allowing us to accept one there. We need to fix them up
6498 now so that we output the right code. */
6499 if (recog_op_alt[opno][which_alternative].memory_ok)
6501 rtx op = recog_data.operand[opno];
6503 if (CONSTANT_P (op))
6504 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6505 recog_data.operand_mode[opno], op);
6506 #if 0
6507 /* RWE: Now we look correctly at the operands for the insn,
6508 this shouldn't be needed any more. */
6509 #ifndef AOF_ASSEMBLER
6510 /* XXX Is this still needed? */
6511 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6512 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6513 recog_data.operand_mode[opno],
6514 XVECEXP (op, 0, 0));
6515 #endif
6516 #endif
6517 else if (GET_CODE (op) == MEM
6518 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6519 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6520 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6521 recog_data.operand_mode[opno],
6522 get_pool_constant (XEXP (op, 0)));
6527 void
6528 arm_reorg (first)
6529 rtx first;
6531 rtx insn;
6532 HOST_WIDE_INT address = 0;
6533 Mfix * fix;
6535 minipool_fix_head = minipool_fix_tail = NULL;
6537 /* The first insn must always be a note, or the code below won't
6538 scan it properly. */
6539 if (GET_CODE (first) != NOTE)
6540 abort ();
6542 /* Scan all the insns and record the operands that will need fixing. */
6543 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6545 if (GET_CODE (insn) == BARRIER)
6546 push_minipool_barrier (insn, address);
6547 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6548 || GET_CODE (insn) == JUMP_INSN)
6550 rtx table;
6552 note_invalid_constants (insn, address);
6553 address += get_attr_length (insn);
6555 /* If the insn is a vector jump, add the size of the table
6556 and skip the table. */
6557 if ((table = is_jump_table (insn)) != NULL)
6559 address += get_jump_table_size (table);
6560 insn = table;
6565 fix = minipool_fix_head;
6567 /* Now scan the fixups and perform the required changes. */
6568 while (fix)
6570 Mfix * ftmp;
6571 Mfix * fdel;
6572 Mfix * last_added_fix;
6573 Mfix * last_barrier = NULL;
6574 Mfix * this_fix;
6576 /* Skip any further barriers before the next fix. */
6577 while (fix && GET_CODE (fix->insn) == BARRIER)
6578 fix = fix->next;
6580 /* No more fixes. */
6581 if (fix == NULL)
6582 break;
6584 last_added_fix = NULL;
6586 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6588 if (GET_CODE (ftmp->insn) == BARRIER)
6590 if (ftmp->address >= minipool_vector_head->max_address)
6591 break;
6593 last_barrier = ftmp;
6595 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6596 break;
6598 last_added_fix = ftmp; /* Keep track of the last fix added. */
6601 /* If we found a barrier, drop back to that; any fixes that we
6602 could have reached but come after the barrier will now go in
6603 the next mini-pool. */
6604 if (last_barrier != NULL)
6606 /* Reduce the refcount for those fixes that won't go into this
6607 pool after all. */
6608 for (fdel = last_barrier->next;
6609 fdel && fdel != ftmp;
6610 fdel = fdel->next)
6612 fdel->minipool->refcount--;
6613 fdel->minipool = NULL;
6616 ftmp = last_barrier;
6618 else
6620 /* ftmp is first fix that we can't fit into this pool and
6621 there no natural barriers that we could use. Insert a
6622 new barrier in the code somewhere between the previous
6623 fix and this one, and arrange to jump around it. */
6624 HOST_WIDE_INT max_address;
6626 /* The last item on the list of fixes must be a barrier, so
6627 we can never run off the end of the list of fixes without
6628 last_barrier being set. */
6629 if (ftmp == NULL)
6630 abort ();
6632 max_address = minipool_vector_head->max_address;
6633 /* Check that there isn't another fix that is in range that
6634 we couldn't fit into this pool because the pool was
6635 already too large: we need to put the pool before such an
6636 instruction. */
6637 if (ftmp->address < max_address)
6638 max_address = ftmp->address;
6640 last_barrier = create_fix_barrier (last_added_fix, max_address);
6643 assign_minipool_offsets (last_barrier);
6645 while (ftmp)
6647 if (GET_CODE (ftmp->insn) != BARRIER
6648 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6649 == NULL))
6650 break;
6652 ftmp = ftmp->next;
6655 /* Scan over the fixes we have identified for this pool, fixing them
6656 up and adding the constants to the pool itself. */
6657 for (this_fix = fix; this_fix && ftmp != this_fix;
6658 this_fix = this_fix->next)
6659 if (GET_CODE (this_fix->insn) != BARRIER)
6661 rtx addr
6662 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6663 minipool_vector_label),
6664 this_fix->minipool->offset);
6665 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6668 dump_minipool (last_barrier->insn);
6669 fix = ftmp;
6672 /* From now on we must synthesize any constants that we can't handle
6673 directly. This can happen if the RTL gets split during final
6674 instruction generation. */
6675 after_arm_reorg = 1;
6677 /* Free the minipool memory. */
6678 obstack_free (&minipool_obstack, minipool_startobj);
6681 /* Routines to output assembly language. */
6683 /* If the rtx is the correct value then return the string of the number.
6684 In this way we can ensure that valid double constants are generated even
6685 when cross compiling. */
6687 const char *
6688 fp_immediate_constant (x)
6689 rtx x;
6691 REAL_VALUE_TYPE r;
6692 int i;
6694 if (!fpa_consts_inited)
6695 init_fpa_table ();
6697 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6698 for (i = 0; i < 8; i++)
6699 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6700 return strings_fpa[i];
6702 abort ();
6705 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6707 static const char *
6708 fp_const_from_val (r)
6709 REAL_VALUE_TYPE * r;
6711 int i;
6713 if (!fpa_consts_inited)
6714 init_fpa_table ();
6716 for (i = 0; i < 8; i++)
6717 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6718 return strings_fpa[i];
6720 abort ();
6723 /* Output the operands of a LDM/STM instruction to STREAM.
6724 MASK is the ARM register set mask of which only bits 0-15 are important.
6725 REG is the base register, either the frame pointer or the stack pointer,
6726 INSTR is the possibly suffixed load or store instruction. */
6728 static void
6729 print_multi_reg (stream, instr, reg, mask)
6730 FILE * stream;
6731 const char * instr;
6732 int reg;
6733 int mask;
6735 int i;
6736 int not_first = FALSE;
6738 fputc ('\t', stream);
6739 asm_fprintf (stream, instr, reg);
6740 fputs (", {", stream);
6742 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6743 if (mask & (1 << i))
6745 if (not_first)
6746 fprintf (stream, ", ");
6748 asm_fprintf (stream, "%r", i);
6749 not_first = TRUE;
6752 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6755 /* Output a 'call' insn. */
6757 const char *
6758 output_call (operands)
6759 rtx * operands;
6761 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6763 if (REGNO (operands[0]) == LR_REGNUM)
6765 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6766 output_asm_insn ("mov%?\t%0, %|lr", operands);
6769 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6771 if (TARGET_INTERWORK)
6772 output_asm_insn ("bx%?\t%0", operands);
6773 else
6774 output_asm_insn ("mov%?\t%|pc, %0", operands);
6776 return "";
6779 static int
6780 eliminate_lr2ip (x)
6781 rtx * x;
6783 int something_changed = 0;
6784 rtx x0 = * x;
6785 int code = GET_CODE (x0);
6786 int i, j;
6787 const char * fmt;
6789 switch (code)
6791 case REG:
6792 if (REGNO (x0) == LR_REGNUM)
6794 *x = gen_rtx_REG (SImode, IP_REGNUM);
6795 return 1;
6797 return 0;
6798 default:
6799 /* Scan through the sub-elements and change any references there. */
6800 fmt = GET_RTX_FORMAT (code);
6802 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6803 if (fmt[i] == 'e')
6804 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6805 else if (fmt[i] == 'E')
6806 for (j = 0; j < XVECLEN (x0, i); j++)
6807 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6809 return something_changed;
6813 /* Output a 'call' insn that is a reference in memory. */
6815 const char *
6816 output_call_mem (operands)
6817 rtx * operands;
6819 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6820 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6821 if (eliminate_lr2ip (&operands[0]))
6822 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6824 if (TARGET_INTERWORK)
6826 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6827 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6828 output_asm_insn ("bx%?\t%|ip", operands);
6830 else
6832 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6833 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6836 return "";
6840 /* Output a move from arm registers to an fpu registers.
6841 OPERANDS[0] is an fpu register.
6842 OPERANDS[1] is the first registers of an arm register pair. */
6844 const char *
6845 output_mov_long_double_fpu_from_arm (operands)
6846 rtx * operands;
6848 int arm_reg0 = REGNO (operands[1]);
6849 rtx ops[3];
6851 if (arm_reg0 == IP_REGNUM)
6852 abort ();
6854 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6855 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6856 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6858 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6859 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6861 return "";
6864 /* Output a move from an fpu register to arm registers.
6865 OPERANDS[0] is the first registers of an arm register pair.
6866 OPERANDS[1] is an fpu register. */
6868 const char *
6869 output_mov_long_double_arm_from_fpu (operands)
6870 rtx * operands;
6872 int arm_reg0 = REGNO (operands[0]);
6873 rtx ops[3];
6875 if (arm_reg0 == IP_REGNUM)
6876 abort ();
6878 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6879 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6880 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6882 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6883 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6884 return "";
6887 /* Output a move from arm registers to arm registers of a long double
6888 OPERANDS[0] is the destination.
6889 OPERANDS[1] is the source. */
6891 const char *
6892 output_mov_long_double_arm_from_arm (operands)
6893 rtx * operands;
6895 /* We have to be careful here because the two might overlap. */
6896 int dest_start = REGNO (operands[0]);
6897 int src_start = REGNO (operands[1]);
6898 rtx ops[2];
6899 int i;
6901 if (dest_start < src_start)
6903 for (i = 0; i < 3; i++)
6905 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6906 ops[1] = gen_rtx_REG (SImode, src_start + i);
6907 output_asm_insn ("mov%?\t%0, %1", ops);
6910 else
6912 for (i = 2; i >= 0; i--)
6914 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6915 ops[1] = gen_rtx_REG (SImode, src_start + i);
6916 output_asm_insn ("mov%?\t%0, %1", ops);
6920 return "";
6924 /* Output a move from arm registers to an fpu registers.
6925 OPERANDS[0] is an fpu register.
6926 OPERANDS[1] is the first registers of an arm register pair. */
6928 const char *
6929 output_mov_double_fpu_from_arm (operands)
6930 rtx * operands;
6932 int arm_reg0 = REGNO (operands[1]);
6933 rtx ops[2];
6935 if (arm_reg0 == IP_REGNUM)
6936 abort ();
6938 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6939 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6940 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6941 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6942 return "";
6945 /* Output a move from an fpu register to arm registers.
6946 OPERANDS[0] is the first registers of an arm register pair.
6947 OPERANDS[1] is an fpu register. */
6949 const char *
6950 output_mov_double_arm_from_fpu (operands)
6951 rtx * operands;
6953 int arm_reg0 = REGNO (operands[0]);
6954 rtx ops[2];
6956 if (arm_reg0 == IP_REGNUM)
6957 abort ();
6959 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6960 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6961 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6962 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6963 return "";
6966 /* Output a move between double words.
6967 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6968 or MEM<-REG and all MEMs must be offsettable addresses. */
6970 const char *
6971 output_move_double (operands)
6972 rtx * operands;
6974 enum rtx_code code0 = GET_CODE (operands[0]);
6975 enum rtx_code code1 = GET_CODE (operands[1]);
6976 rtx otherops[3];
6978 if (code0 == REG)
6980 int reg0 = REGNO (operands[0]);
6982 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6984 if (code1 == REG)
6986 int reg1 = REGNO (operands[1]);
6987 if (reg1 == IP_REGNUM)
6988 abort ();
6990 /* Ensure the second source is not overwritten. */
6991 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6992 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6993 else
6994 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6996 else if (code1 == CONST_DOUBLE)
6998 if (GET_MODE (operands[1]) == DFmode)
7000 REAL_VALUE_TYPE r;
7001 long l[2];
7003 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
7004 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
7005 otherops[1] = GEN_INT (l[1]);
7006 operands[1] = GEN_INT (l[0]);
7008 else if (GET_MODE (operands[1]) != VOIDmode)
7009 abort ();
7010 else if (WORDS_BIG_ENDIAN)
7012 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7013 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7015 else
7017 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
7018 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
7021 output_mov_immediate (operands);
7022 output_mov_immediate (otherops);
7024 else if (code1 == CONST_INT)
7026 #if HOST_BITS_PER_WIDE_INT > 32
7027 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
7028 what the upper word is. */
7029 if (WORDS_BIG_ENDIAN)
7031 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7032 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7034 else
7036 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
7037 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
7039 #else
7040 /* Sign extend the intval into the high-order word. */
7041 if (WORDS_BIG_ENDIAN)
7043 otherops[1] = operands[1];
7044 operands[1] = (INTVAL (operands[1]) < 0
7045 ? constm1_rtx : const0_rtx);
7047 else
7048 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
7049 #endif
7050 output_mov_immediate (otherops);
7051 output_mov_immediate (operands);
7053 else if (code1 == MEM)
7055 switch (GET_CODE (XEXP (operands[1], 0)))
7057 case REG:
7058 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
7059 break;
7061 case PRE_INC:
7062 abort (); /* Should never happen now. */
7063 break;
7065 case PRE_DEC:
7066 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
7067 break;
7069 case POST_INC:
7070 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
7071 break;
7073 case POST_DEC:
7074 abort (); /* Should never happen now. */
7075 break;
7077 case LABEL_REF:
7078 case CONST:
7079 output_asm_insn ("adr%?\t%0, %1", operands);
7080 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
7081 break;
7083 default:
7084 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
7085 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
7087 otherops[0] = operands[0];
7088 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
7089 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
7091 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
7093 if (GET_CODE (otherops[2]) == CONST_INT)
7095 switch (INTVAL (otherops[2]))
7097 case -8:
7098 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
7099 return "";
7100 case -4:
7101 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
7102 return "";
7103 case 4:
7104 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
7105 return "";
7108 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
7109 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
7110 else
7111 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7113 else
7114 output_asm_insn ("add%?\t%0, %1, %2", otherops);
7116 else
7117 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
7119 return "ldm%?ia\t%0, %M0";
7121 else
7123 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
7124 /* Take care of overlapping base/data reg. */
7125 if (reg_mentioned_p (operands[0], operands[1]))
7127 output_asm_insn ("ldr%?\t%0, %1", otherops);
7128 output_asm_insn ("ldr%?\t%0, %1", operands);
7130 else
7132 output_asm_insn ("ldr%?\t%0, %1", operands);
7133 output_asm_insn ("ldr%?\t%0, %1", otherops);
7138 else
7139 abort (); /* Constraints should prevent this. */
7141 else if (code0 == MEM && code1 == REG)
7143 if (REGNO (operands[1]) == IP_REGNUM)
7144 abort ();
7146 switch (GET_CODE (XEXP (operands[0], 0)))
7148 case REG:
7149 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
7150 break;
7152 case PRE_INC:
7153 abort (); /* Should never happen now. */
7154 break;
7156 case PRE_DEC:
7157 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
7158 break;
7160 case POST_INC:
7161 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
7162 break;
7164 case POST_DEC:
7165 abort (); /* Should never happen now. */
7166 break;
7168 case PLUS:
7169 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
7171 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
7173 case -8:
7174 output_asm_insn ("stm%?db\t%m0, %M1", operands);
7175 return "";
7177 case -4:
7178 output_asm_insn ("stm%?da\t%m0, %M1", operands);
7179 return "";
7181 case 4:
7182 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
7183 return "";
7186 /* Fall through */
7188 default:
7189 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
7190 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
7191 output_asm_insn ("str%?\t%1, %0", operands);
7192 output_asm_insn ("str%?\t%1, %0", otherops);
7195 else
7196 /* Constraints should prevent this. */
7197 abort ();
7199 return "";
7203 /* Output an arbitrary MOV reg, #n.
7204 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
7206 const char *
7207 output_mov_immediate (operands)
7208 rtx * operands;
7210 HOST_WIDE_INT n = INTVAL (operands[1]);
7212 /* Try to use one MOV. */
7213 if (const_ok_for_arm (n))
7214 output_asm_insn ("mov%?\t%0, %1", operands);
7216 /* Try to use one MVN. */
7217 else if (const_ok_for_arm (~n))
7219 operands[1] = GEN_INT (~n);
7220 output_asm_insn ("mvn%?\t%0, %1", operands);
7222 else
7224 int n_ones = 0;
7225 int i;
7227 /* If all else fails, make it out of ORRs or BICs as appropriate. */
7228 for (i = 0; i < 32; i ++)
7229 if (n & 1 << i)
7230 n_ones ++;
7232 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
7233 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
7234 else
7235 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
7238 return "";
7241 /* Output an ADD r, s, #n where n may be too big for one instruction.
7242 If adding zero to one register, output nothing. */
7244 const char *
7245 output_add_immediate (operands)
7246 rtx * operands;
7248 HOST_WIDE_INT n = INTVAL (operands[2]);
7250 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
7252 if (n < 0)
7253 output_multi_immediate (operands,
7254 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
7255 -n);
7256 else
7257 output_multi_immediate (operands,
7258 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
7262 return "";
7265 /* Output a multiple immediate operation.
7266 OPERANDS is the vector of operands referred to in the output patterns.
7267 INSTR1 is the output pattern to use for the first constant.
7268 INSTR2 is the output pattern to use for subsequent constants.
7269 IMMED_OP is the index of the constant slot in OPERANDS.
7270 N is the constant value. */
7272 static const char *
7273 output_multi_immediate (operands, instr1, instr2, immed_op, n)
7274 rtx * operands;
7275 const char * instr1;
7276 const char * instr2;
7277 int immed_op;
7278 HOST_WIDE_INT n;
7280 #if HOST_BITS_PER_WIDE_INT > 32
7281 n &= 0xffffffff;
7282 #endif
7284 if (n == 0)
7286 /* Quick and easy output. */
7287 operands[immed_op] = const0_rtx;
7288 output_asm_insn (instr1, operands);
7290 else
7292 int i;
7293 const char * instr = instr1;
7295 /* Note that n is never zero here (which would give no output). */
7296 for (i = 0; i < 32; i += 2)
7298 if (n & (3 << i))
7300 operands[immed_op] = GEN_INT (n & (255 << i));
7301 output_asm_insn (instr, operands);
7302 instr = instr2;
7303 i += 6;
7308 return "";
7311 /* Return the appropriate ARM instruction for the operation code.
7312 The returned result should not be overwritten. OP is the rtx of the
7313 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
7314 was shifted. */
7316 const char *
7317 arithmetic_instr (op, shift_first_arg)
7318 rtx op;
7319 int shift_first_arg;
7321 switch (GET_CODE (op))
7323 case PLUS:
7324 return "add";
7326 case MINUS:
7327 return shift_first_arg ? "rsb" : "sub";
7329 case IOR:
7330 return "orr";
7332 case XOR:
7333 return "eor";
7335 case AND:
7336 return "and";
7338 default:
7339 abort ();
7343 /* Ensure valid constant shifts and return the appropriate shift mnemonic
7344 for the operation code. The returned result should not be overwritten.
7345 OP is the rtx code of the shift.
7346 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
7347 shift. */
7349 static const char *
7350 shift_op (op, amountp)
7351 rtx op;
7352 HOST_WIDE_INT *amountp;
7354 const char * mnem;
7355 enum rtx_code code = GET_CODE (op);
7357 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
7358 *amountp = -1;
7359 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
7360 *amountp = INTVAL (XEXP (op, 1));
7361 else
7362 abort ();
7364 switch (code)
7366 case ASHIFT:
7367 mnem = "asl";
7368 break;
7370 case ASHIFTRT:
7371 mnem = "asr";
7372 break;
7374 case LSHIFTRT:
7375 mnem = "lsr";
7376 break;
7378 case ROTATERT:
7379 mnem = "ror";
7380 break;
7382 case MULT:
7383 /* We never have to worry about the amount being other than a
7384 power of 2, since this case can never be reloaded from a reg. */
7385 if (*amountp != -1)
7386 *amountp = int_log2 (*amountp);
7387 else
7388 abort ();
7389 return "asl";
7391 default:
7392 abort ();
7395 if (*amountp != -1)
7397 /* This is not 100% correct, but follows from the desire to merge
7398 multiplication by a power of 2 with the recognizer for a
7399 shift. >=32 is not a valid shift for "asl", so we must try and
7400 output a shift that produces the correct arithmetical result.
7401 Using lsr #32 is identical except for the fact that the carry bit
7402 is not set correctly if we set the flags; but we never use the
7403 carry bit from such an operation, so we can ignore that. */
7404 if (code == ROTATERT)
7405 /* Rotate is just modulo 32. */
7406 *amountp &= 31;
7407 else if (*amountp != (*amountp & 31))
7409 if (code == ASHIFT)
7410 mnem = "lsr";
7411 *amountp = 32;
7414 /* Shifts of 0 are no-ops. */
7415 if (*amountp == 0)
7416 return NULL;
7419 return mnem;
7422 /* Obtain the shift from the POWER of two. */
7424 static HOST_WIDE_INT
7425 int_log2 (power)
7426 HOST_WIDE_INT power;
7428 HOST_WIDE_INT shift = 0;
7430 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
7432 if (shift > 31)
7433 abort ();
7434 shift ++;
7437 return shift;
7440 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
7441 /bin/as is horribly restrictive. */
7442 #define MAX_ASCII_LEN 51
7444 void
7445 output_ascii_pseudo_op (stream, p, len)
7446 FILE * stream;
7447 const unsigned char * p;
7448 int len;
7450 int i;
7451 int len_so_far = 0;
7453 fputs ("\t.ascii\t\"", stream);
7455 for (i = 0; i < len; i++)
7457 int c = p[i];
7459 if (len_so_far >= MAX_ASCII_LEN)
7461 fputs ("\"\n\t.ascii\t\"", stream);
7462 len_so_far = 0;
7465 switch (c)
7467 case TARGET_TAB:
7468 fputs ("\\t", stream);
7469 len_so_far += 2;
7470 break;
7472 case TARGET_FF:
7473 fputs ("\\f", stream);
7474 len_so_far += 2;
7475 break;
7477 case TARGET_BS:
7478 fputs ("\\b", stream);
7479 len_so_far += 2;
7480 break;
7482 case TARGET_CR:
7483 fputs ("\\r", stream);
7484 len_so_far += 2;
7485 break;
7487 case TARGET_NEWLINE:
7488 fputs ("\\n", stream);
7489 c = p [i + 1];
7490 if ((c >= ' ' && c <= '~')
7491 || c == TARGET_TAB)
7492 /* This is a good place for a line break. */
7493 len_so_far = MAX_ASCII_LEN;
7494 else
7495 len_so_far += 2;
7496 break;
7498 case '\"':
7499 case '\\':
7500 putc ('\\', stream);
7501 len_so_far++;
7502 /* drop through. */
7504 default:
7505 if (c >= ' ' && c <= '~')
7507 putc (c, stream);
7508 len_so_far++;
7510 else
7512 fprintf (stream, "\\%03o", c);
7513 len_so_far += 4;
7515 break;
7519 fputs ("\"\n", stream);
7522 /* Compute the register sabe mask for registers 0 through 12
7523 inclusive. This code is used by both arm_compute_save_reg_mask
7524 and arm_compute_initial_elimination_offset. */
7526 static unsigned long
7527 arm_compute_save_reg0_reg12_mask ()
7529 unsigned long func_type = arm_current_func_type ();
7530 unsigned int save_reg_mask = 0;
7531 unsigned int reg;
7533 if (IS_INTERRUPT (func_type))
7535 unsigned int max_reg;
7536 /* Interrupt functions must not corrupt any registers,
7537 even call clobbered ones. If this is a leaf function
7538 we can just examine the registers used by the RTL, but
7539 otherwise we have to assume that whatever function is
7540 called might clobber anything, and so we have to save
7541 all the call-clobbered registers as well. */
7542 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7543 /* FIQ handlers have registers r8 - r12 banked, so
7544 we only need to check r0 - r7, Normal ISRs only
7545 bank r14 and r15, so we must check up to r12.
7546 r13 is the stack pointer which is always preserved,
7547 so we do not need to consider it here. */
7548 max_reg = 7;
7549 else
7550 max_reg = 12;
7552 for (reg = 0; reg <= max_reg; reg++)
7553 if (regs_ever_live[reg]
7554 || (! current_function_is_leaf && call_used_regs [reg]))
7555 save_reg_mask |= (1 << reg);
7557 else
7559 /* In the normal case we only need to save those registers
7560 which are call saved and which are used by this function. */
7561 for (reg = 0; reg <= 10; reg++)
7562 if (regs_ever_live[reg] && ! call_used_regs [reg])
7563 save_reg_mask |= (1 << reg);
7565 /* Handle the frame pointer as a special case. */
7566 if (! TARGET_APCS_FRAME
7567 && ! frame_pointer_needed
7568 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7569 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7570 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7572 /* If we aren't loading the PIC register,
7573 don't stack it even though it may be live. */
7574 if (flag_pic
7575 && ! TARGET_SINGLE_PIC_BASE
7576 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7577 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7580 return save_reg_mask;
7583 /* Compute a bit mask of which registers need to be
7584 saved on the stack for the current function. */
7586 static unsigned long
7587 arm_compute_save_reg_mask ()
7589 unsigned int save_reg_mask = 0;
7590 unsigned long func_type = arm_current_func_type ();
7592 if (IS_NAKED (func_type))
7593 /* This should never really happen. */
7594 return 0;
7596 /* If we are creating a stack frame, then we must save the frame pointer,
7597 IP (which will hold the old stack pointer), LR and the PC. */
7598 if (frame_pointer_needed)
7599 save_reg_mask |=
7600 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7601 | (1 << IP_REGNUM)
7602 | (1 << LR_REGNUM)
7603 | (1 << PC_REGNUM);
7605 /* Volatile functions do not return, so there
7606 is no need to save any other registers. */
7607 if (IS_VOLATILE (func_type))
7608 return save_reg_mask;
7610 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7612 /* Decide if we need to save the link register.
7613 Interrupt routines have their own banked link register,
7614 so they never need to save it.
7615 Otherwise if we do not use the link register we do not need to save
7616 it. If we are pushing other registers onto the stack however, we
7617 can save an instruction in the epilogue by pushing the link register
7618 now and then popping it back into the PC. This incurs extra memory
7619 accesses though, so we only do it when optimising for size, and only
7620 if we know that we will not need a fancy return sequence. */
7621 if (regs_ever_live [LR_REGNUM]
7622 || (save_reg_mask
7623 && optimize_size
7624 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7625 save_reg_mask |= 1 << LR_REGNUM;
7627 if (cfun->machine->lr_save_eliminated)
7628 save_reg_mask &= ~ (1 << LR_REGNUM);
7630 return save_reg_mask;
7633 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7634 everything bar the final return instruction. */
7636 const char *
7637 output_return_instruction (operand, really_return, reverse)
7638 rtx operand;
7639 int really_return;
7640 int reverse;
7642 char conditional[10];
7643 char instr[100];
7644 int reg;
7645 unsigned long live_regs_mask;
7646 unsigned long func_type;
7648 func_type = arm_current_func_type ();
7650 if (IS_NAKED (func_type))
7651 return "";
7653 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7655 /* If this function was declared non-returning, and we have found a tail
7656 call, then we have to trust that the called function won't return. */
7657 if (really_return)
7659 rtx ops[2];
7661 /* Otherwise, trap an attempted return by aborting. */
7662 ops[0] = operand;
7663 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7664 : "abort");
7665 assemble_external_libcall (ops[1]);
7666 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7669 return "";
7672 if (current_function_calls_alloca && !really_return)
7673 abort ();
7675 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7677 return_used_this_function = 1;
7679 live_regs_mask = arm_compute_save_reg_mask ();
7681 if (live_regs_mask)
7683 const char * return_reg;
7685 /* If we do not have any special requirements for function exit
7686 (eg interworking, or ISR) then we can load the return address
7687 directly into the PC. Otherwise we must load it into LR. */
7688 if (really_return
7689 && ! TARGET_INTERWORK)
7690 return_reg = reg_names[PC_REGNUM];
7691 else
7692 return_reg = reg_names[LR_REGNUM];
7694 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7695 /* There are two possible reasons for the IP register being saved.
7696 Either a stack frame was created, in which case IP contains the
7697 old stack pointer, or an ISR routine corrupted it. If this in an
7698 ISR routine then just restore IP, otherwise restore IP into SP. */
7699 if (! IS_INTERRUPT (func_type))
7701 live_regs_mask &= ~ (1 << IP_REGNUM);
7702 live_regs_mask |= (1 << SP_REGNUM);
7705 /* On some ARM architectures it is faster to use LDR rather than
7706 LDM to load a single register. On other architectures, the
7707 cost is the same. In 26 bit mode, or for exception handlers,
7708 we have to use LDM to load the PC so that the CPSR is also
7709 restored. */
7710 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7712 if (live_regs_mask == (unsigned int)(1 << reg))
7713 break;
7715 if (reg <= LAST_ARM_REGNUM
7716 && (reg != LR_REGNUM
7717 || ! really_return
7718 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7720 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7721 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7723 else
7725 char *p;
7726 int first = 1;
7728 /* Generate the load multiple instruction to restore the registers. */
7729 if (frame_pointer_needed)
7730 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7731 else if (live_regs_mask & (1 << SP_REGNUM))
7732 sprintf (instr, "ldm%sfd\t%%|sp, {", conditional);
7733 else
7734 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7736 p = instr + strlen (instr);
7738 for (reg = 0; reg <= SP_REGNUM; reg++)
7739 if (live_regs_mask & (1 << reg))
7741 int l = strlen (reg_names[reg]);
7743 if (first)
7744 first = 0;
7745 else
7747 memcpy (p, ", ", 2);
7748 p += 2;
7751 memcpy (p, "%|", 2);
7752 memcpy (p + 2, reg_names[reg], l);
7753 p += l + 2;
7756 if (live_regs_mask & (1 << LR_REGNUM))
7758 int l = strlen (return_reg);
7760 if (! first)
7762 memcpy (p, ", ", 2);
7763 p += 2;
7766 memcpy (p, "%|", 2);
7767 memcpy (p + 2, return_reg, l);
7768 strcpy (p + 2 + l, ((TARGET_APCS_32
7769 && !IS_INTERRUPT (func_type))
7770 || !really_return)
7771 ? "}" : "}^");
7773 else
7774 strcpy (p, "}");
7777 output_asm_insn (instr, & operand);
7779 /* See if we need to generate an extra instruction to
7780 perform the actual function return. */
7781 if (really_return
7782 && func_type != ARM_FT_INTERWORKED
7783 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7785 /* The return has already been handled
7786 by loading the LR into the PC. */
7787 really_return = 0;
7791 if (really_return)
7793 switch ((int) ARM_FUNC_TYPE (func_type))
7795 case ARM_FT_ISR:
7796 case ARM_FT_FIQ:
7797 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7798 break;
7800 case ARM_FT_INTERWORKED:
7801 sprintf (instr, "bx%s\t%%|lr", conditional);
7802 break;
7804 case ARM_FT_EXCEPTION:
7805 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7806 break;
7808 default:
7809 /* ARMv5 implementations always provide BX, so interworking
7810 is the default unless APCS-26 is in use. */
7811 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7812 sprintf (instr, "bx%s\t%%|lr", conditional);
7813 else
7814 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7815 conditional, TARGET_APCS_32 ? "" : "s");
7816 break;
7819 output_asm_insn (instr, & operand);
7822 return "";
7825 /* Write the function name into the code section, directly preceding
7826 the function prologue.
7828 Code will be output similar to this:
7830 .ascii "arm_poke_function_name", 0
7831 .align
7833 .word 0xff000000 + (t1 - t0)
7834 arm_poke_function_name
7835 mov ip, sp
7836 stmfd sp!, {fp, ip, lr, pc}
7837 sub fp, ip, #4
7839 When performing a stack backtrace, code can inspect the value
7840 of 'pc' stored at 'fp' + 0. If the trace function then looks
7841 at location pc - 12 and the top 8 bits are set, then we know
7842 that there is a function name embedded immediately preceding this
7843 location and has length ((pc[-3]) & 0xff000000).
7845 We assume that pc is declared as a pointer to an unsigned long.
7847 It is of no benefit to output the function name if we are assembling
7848 a leaf function. These function types will not contain a stack
7849 backtrace structure, therefore it is not possible to determine the
7850 function name. */
7852 void
7853 arm_poke_function_name (stream, name)
7854 FILE * stream;
7855 const char * name;
7857 unsigned long alignlength;
7858 unsigned long length;
7859 rtx x;
7861 length = strlen (name) + 1;
7862 alignlength = ROUND_UP_WORD (length);
7864 ASM_OUTPUT_ASCII (stream, name, length);
7865 ASM_OUTPUT_ALIGN (stream, 2);
7866 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7867 assemble_aligned_integer (UNITS_PER_WORD, x);
7870 /* Place some comments into the assembler stream
7871 describing the current function. */
7873 static void
7874 arm_output_function_prologue (f, frame_size)
7875 FILE * f;
7876 HOST_WIDE_INT frame_size;
7878 unsigned long func_type;
7880 if (!TARGET_ARM)
7882 thumb_output_function_prologue (f, frame_size);
7883 return;
7886 /* Sanity check. */
7887 if (arm_ccfsm_state || arm_target_insn)
7888 abort ();
7890 func_type = arm_current_func_type ();
7892 switch ((int) ARM_FUNC_TYPE (func_type))
7894 default:
7895 case ARM_FT_NORMAL:
7896 break;
7897 case ARM_FT_INTERWORKED:
7898 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7899 break;
7900 case ARM_FT_EXCEPTION_HANDLER:
7901 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7902 break;
7903 case ARM_FT_ISR:
7904 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7905 break;
7906 case ARM_FT_FIQ:
7907 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7908 break;
7909 case ARM_FT_EXCEPTION:
7910 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7911 break;
7914 if (IS_NAKED (func_type))
7915 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7917 if (IS_VOLATILE (func_type))
7918 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7920 if (IS_NESTED (func_type))
7921 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7923 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7924 current_function_args_size,
7925 current_function_pretend_args_size, frame_size);
7927 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7928 frame_pointer_needed,
7929 cfun->machine->uses_anonymous_args);
7931 if (cfun->machine->lr_save_eliminated)
7932 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7934 #ifdef AOF_ASSEMBLER
7935 if (flag_pic)
7936 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7937 #endif
7939 return_used_this_function = 0;
7942 const char *
7943 arm_output_epilogue (really_return)
7944 int really_return;
7946 int reg;
7947 unsigned long saved_regs_mask;
7948 unsigned long func_type;
7949 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7950 frame that is $fp + 4 for a non-variadic function. */
7951 int floats_offset = 0;
7952 rtx operands[3];
7953 int frame_size = arm_get_frame_size ();
7954 FILE * f = asm_out_file;
7955 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7957 /* If we have already generated the return instruction
7958 then it is futile to generate anything else. */
7959 if (use_return_insn (FALSE) && return_used_this_function)
7960 return "";
7962 func_type = arm_current_func_type ();
7964 if (IS_NAKED (func_type))
7965 /* Naked functions don't have epilogues. */
7966 return "";
7968 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7970 rtx op;
7972 /* A volatile function should never return. Call abort. */
7973 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7974 assemble_external_libcall (op);
7975 output_asm_insn ("bl\t%a0", &op);
7977 return "";
7980 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7981 && ! really_return)
7982 /* If we are throwing an exception, then we really must
7983 be doing a return, so we can't tail-call. */
7984 abort ();
7986 saved_regs_mask = arm_compute_save_reg_mask ();
7988 /* XXX We should adjust floats_offset for any anonymous args, and then
7989 re-adjust vfp_offset below to compensate. */
7991 /* Compute how far away the floats will be. */
7992 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7993 if (saved_regs_mask & (1 << reg))
7994 floats_offset += 4;
7996 if (frame_pointer_needed)
7998 int vfp_offset = 4;
8000 if (arm_fpu_arch == FP_SOFT2)
8002 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8003 if (regs_ever_live[reg] && !call_used_regs[reg])
8005 floats_offset += 12;
8006 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
8007 reg, FP_REGNUM, floats_offset - vfp_offset);
8010 else
8012 int start_reg = LAST_ARM_FP_REGNUM;
8014 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
8016 if (regs_ever_live[reg] && !call_used_regs[reg])
8018 floats_offset += 12;
8020 /* We can't unstack more than four registers at once. */
8021 if (start_reg - reg == 3)
8023 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
8024 reg, FP_REGNUM, floats_offset - vfp_offset);
8025 start_reg = reg - 1;
8028 else
8030 if (reg != start_reg)
8031 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8032 reg + 1, start_reg - reg,
8033 FP_REGNUM, floats_offset - vfp_offset);
8034 start_reg = reg - 1;
8038 /* Just in case the last register checked also needs unstacking. */
8039 if (reg != start_reg)
8040 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
8041 reg + 1, start_reg - reg,
8042 FP_REGNUM, floats_offset - vfp_offset);
8045 /* saved_regs_mask should contain the IP, which at the time of stack
8046 frame generation actually contains the old stack pointer. So a
8047 quick way to unwind the stack is just pop the IP register directly
8048 into the stack pointer. */
8049 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
8050 abort ();
8051 saved_regs_mask &= ~ (1 << IP_REGNUM);
8052 saved_regs_mask |= (1 << SP_REGNUM);
8054 /* There are two registers left in saved_regs_mask - LR and PC. We
8055 only need to restore the LR register (the return address), but to
8056 save time we can load it directly into the PC, unless we need a
8057 special function exit sequence, or we are not really returning. */
8058 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
8059 /* Delete the LR from the register mask, so that the LR on
8060 the stack is loaded into the PC in the register mask. */
8061 saved_regs_mask &= ~ (1 << LR_REGNUM);
8062 else
8063 saved_regs_mask &= ~ (1 << PC_REGNUM);
8065 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
8067 if (IS_INTERRUPT (func_type))
8068 /* Interrupt handlers will have pushed the
8069 IP onto the stack, so restore it now. */
8070 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
8072 else
8074 /* Restore stack pointer if necessary. */
8075 if (frame_size + current_function_outgoing_args_size != 0)
8077 operands[0] = operands[1] = stack_pointer_rtx;
8078 operands[2] = GEN_INT (frame_size
8079 + current_function_outgoing_args_size);
8080 output_add_immediate (operands);
8083 if (arm_fpu_arch == FP_SOFT2)
8085 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8086 if (regs_ever_live[reg] && !call_used_regs[reg])
8087 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
8088 reg, SP_REGNUM);
8090 else
8092 int start_reg = FIRST_ARM_FP_REGNUM;
8094 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
8096 if (regs_ever_live[reg] && !call_used_regs[reg])
8098 if (reg - start_reg == 3)
8100 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
8101 start_reg, SP_REGNUM);
8102 start_reg = reg + 1;
8105 else
8107 if (reg != start_reg)
8108 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8109 start_reg, reg - start_reg,
8110 SP_REGNUM);
8112 start_reg = reg + 1;
8116 /* Just in case the last register checked also needs unstacking. */
8117 if (reg != start_reg)
8118 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
8119 start_reg, reg - start_reg, SP_REGNUM);
8122 /* If we can, restore the LR into the PC. */
8123 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8124 && really_return
8125 && current_function_pretend_args_size == 0
8126 && saved_regs_mask & (1 << LR_REGNUM))
8128 saved_regs_mask &= ~ (1 << LR_REGNUM);
8129 saved_regs_mask |= (1 << PC_REGNUM);
8132 /* Load the registers off the stack. If we only have one register
8133 to load use the LDR instruction - it is faster. */
8134 if (saved_regs_mask == (1 << LR_REGNUM))
8136 /* The exception handler ignores the LR, so we do
8137 not really need to load it off the stack. */
8138 if (eh_ofs)
8139 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
8140 else
8141 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
8143 else if (saved_regs_mask)
8145 if (saved_regs_mask & (1 << SP_REGNUM))
8146 /* Note - write back to the stack register is not enabled
8147 (ie "ldmfd sp!..."). We know that the stack pointer is
8148 in the list of registers and if we add writeback the
8149 instruction becomes UNPREDICTABLE. */
8150 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, saved_regs_mask);
8151 else
8152 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
8155 if (current_function_pretend_args_size)
8157 /* Unwind the pre-pushed regs. */
8158 operands[0] = operands[1] = stack_pointer_rtx;
8159 operands[2] = GEN_INT (current_function_pretend_args_size);
8160 output_add_immediate (operands);
8164 #if 0
8165 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
8166 /* Adjust the stack to remove the exception handler stuff. */
8167 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
8168 REGNO (eh_ofs));
8169 #endif
8171 if (! really_return
8172 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
8173 && current_function_pretend_args_size == 0
8174 && saved_regs_mask & (1 << PC_REGNUM)))
8175 return "";
8177 /* Generate the return instruction. */
8178 switch ((int) ARM_FUNC_TYPE (func_type))
8180 case ARM_FT_EXCEPTION_HANDLER:
8181 /* Even in 26-bit mode we do a mov (rather than a movs)
8182 because we don't have the PSR bits set in the address. */
8183 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
8184 break;
8186 case ARM_FT_ISR:
8187 case ARM_FT_FIQ:
8188 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
8189 break;
8191 case ARM_FT_EXCEPTION:
8192 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8193 break;
8195 case ARM_FT_INTERWORKED:
8196 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
8197 break;
8199 default:
8200 if (frame_pointer_needed)
8201 /* If we used the frame pointer then the return adddress
8202 will have been loaded off the stack directly into the
8203 PC, so there is no need to issue a MOV instruction
8204 here. */
8206 else if (current_function_pretend_args_size == 0
8207 && (saved_regs_mask & (1 << LR_REGNUM)))
8208 /* Similarly we may have been able to load LR into the PC
8209 even if we did not create a stack frame. */
8211 else if (TARGET_APCS_32)
8212 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8213 else
8214 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
8215 break;
8218 return "";
8221 static void
8222 arm_output_function_epilogue (file, frame_size)
8223 FILE *file ATTRIBUTE_UNUSED;
8224 HOST_WIDE_INT frame_size;
8226 if (TARGET_THUMB)
8228 /* ??? Probably not safe to set this here, since it assumes that a
8229 function will be emitted as assembly immediately after we generate
8230 RTL for it. This does not happen for inline functions. */
8231 return_used_this_function = 0;
8233 else
8235 /* We need to take into account any stack-frame rounding. */
8236 frame_size = arm_get_frame_size ();
8238 if (use_return_insn (FALSE)
8239 && return_used_this_function
8240 && (frame_size + current_function_outgoing_args_size) != 0
8241 && !frame_pointer_needed)
8242 abort ();
8244 /* Reset the ARM-specific per-function variables. */
8245 after_arm_reorg = 0;
8249 /* Generate and emit an insn that we will recognize as a push_multi.
8250 Unfortunately, since this insn does not reflect very well the actual
8251 semantics of the operation, we need to annotate the insn for the benefit
8252 of DWARF2 frame unwind information. */
8254 static rtx
8255 emit_multi_reg_push (mask)
8256 int mask;
8258 int num_regs = 0;
8259 int num_dwarf_regs;
8260 int i, j;
8261 rtx par;
8262 rtx dwarf;
8263 int dwarf_par_index;
8264 rtx tmp, reg;
8266 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8267 if (mask & (1 << i))
8268 num_regs++;
8270 if (num_regs == 0 || num_regs > 16)
8271 abort ();
8273 /* We don't record the PC in the dwarf frame information. */
8274 num_dwarf_regs = num_regs;
8275 if (mask & (1 << PC_REGNUM))
8276 num_dwarf_regs--;
8278 /* For the body of the insn we are going to generate an UNSPEC in
8279 parallel with several USEs. This allows the insn to be recognized
8280 by the push_multi pattern in the arm.md file. The insn looks
8281 something like this:
8283 (parallel [
8284 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
8285 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
8286 (use (reg:SI 11 fp))
8287 (use (reg:SI 12 ip))
8288 (use (reg:SI 14 lr))
8289 (use (reg:SI 15 pc))
8292 For the frame note however, we try to be more explicit and actually
8293 show each register being stored into the stack frame, plus a (single)
8294 decrement of the stack pointer. We do it this way in order to be
8295 friendly to the stack unwinding code, which only wants to see a single
8296 stack decrement per instruction. The RTL we generate for the note looks
8297 something like this:
8299 (sequence [
8300 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
8301 (set (mem:SI (reg:SI sp)) (reg:SI r4))
8302 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
8303 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
8304 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
8307 This sequence is used both by the code to support stack unwinding for
8308 exceptions handlers and the code to generate dwarf2 frame debugging. */
8310 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
8311 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
8312 dwarf_par_index = 1;
8314 for (i = 0; i <= LAST_ARM_REGNUM; i++)
8316 if (mask & (1 << i))
8318 reg = gen_rtx_REG (SImode, i);
8320 XVECEXP (par, 0, 0)
8321 = gen_rtx_SET (VOIDmode,
8322 gen_rtx_MEM (BLKmode,
8323 gen_rtx_PRE_DEC (BLKmode,
8324 stack_pointer_rtx)),
8325 gen_rtx_UNSPEC (BLKmode,
8326 gen_rtvec (1, reg),
8327 UNSPEC_PUSH_MULT));
8329 if (i != PC_REGNUM)
8331 tmp = gen_rtx_SET (VOIDmode,
8332 gen_rtx_MEM (SImode, stack_pointer_rtx),
8333 reg);
8334 RTX_FRAME_RELATED_P (tmp) = 1;
8335 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
8336 dwarf_par_index++;
8339 break;
8343 for (j = 1, i++; j < num_regs; i++)
8345 if (mask & (1 << i))
8347 reg = gen_rtx_REG (SImode, i);
8349 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
8351 if (i != PC_REGNUM)
8353 tmp = gen_rtx_SET (VOIDmode,
8354 gen_rtx_MEM (SImode,
8355 plus_constant (stack_pointer_rtx,
8356 4 * j)),
8357 reg);
8358 RTX_FRAME_RELATED_P (tmp) = 1;
8359 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
8362 j++;
8366 par = emit_insn (par);
8368 tmp = gen_rtx_SET (SImode,
8369 stack_pointer_rtx,
8370 gen_rtx_PLUS (SImode,
8371 stack_pointer_rtx,
8372 GEN_INT (-4 * num_regs)));
8373 RTX_FRAME_RELATED_P (tmp) = 1;
8374 XVECEXP (dwarf, 0, 0) = tmp;
8376 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8377 REG_NOTES (par));
8378 return par;
8381 static rtx
8382 emit_sfm (base_reg, count)
8383 int base_reg;
8384 int count;
8386 rtx par;
8387 rtx dwarf;
8388 rtx tmp, reg;
8389 int i;
8391 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8392 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
8394 reg = gen_rtx_REG (XFmode, base_reg++);
8396 XVECEXP (par, 0, 0)
8397 = gen_rtx_SET (VOIDmode,
8398 gen_rtx_MEM (BLKmode,
8399 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8400 gen_rtx_UNSPEC (BLKmode,
8401 gen_rtvec (1, reg),
8402 UNSPEC_PUSH_MULT));
8404 = gen_rtx_SET (VOIDmode,
8405 gen_rtx_MEM (XFmode,
8406 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
8407 reg);
8408 RTX_FRAME_RELATED_P (tmp) = 1;
8409 XVECEXP (dwarf, 0, count - 1) = tmp;
8411 for (i = 1; i < count; i++)
8413 reg = gen_rtx_REG (XFmode, base_reg++);
8414 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
8416 tmp = gen_rtx_SET (VOIDmode,
8417 gen_rtx_MEM (XFmode,
8418 gen_rtx_PRE_DEC (BLKmode,
8419 stack_pointer_rtx)),
8420 reg);
8421 RTX_FRAME_RELATED_P (tmp) = 1;
8422 XVECEXP (dwarf, 0, count - i - 1) = tmp;
8425 par = emit_insn (par);
8426 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
8427 REG_NOTES (par));
8428 return par;
8431 /* Compute the distance from register FROM to register TO.
8432 These can be the arg pointer (26), the soft frame pointer (25),
8433 the stack pointer (13) or the hard frame pointer (11).
8434 Typical stack layout looks like this:
8436 old stack pointer -> | |
8437 ----
8438 | | \
8439 | | saved arguments for
8440 | | vararg functions
8441 | | /
8443 hard FP & arg pointer -> | | \
8444 | | stack
8445 | | frame
8446 | | /
8448 | | \
8449 | | call saved
8450 | | registers
8451 soft frame pointer -> | | /
8453 | | \
8454 | | local
8455 | | variables
8456 | | /
8458 | | \
8459 | | outgoing
8460 | | arguments
8461 current stack pointer -> | | /
8464 For a given function some or all of these stack components
8465 may not be needed, giving rise to the possibility of
8466 eliminating some of the registers.
8468 The values returned by this function must reflect the behavior
8469 of arm_expand_prologue() and arm_compute_save_reg_mask().
8471 The sign of the number returned reflects the direction of stack
8472 growth, so the values are positive for all eliminations except
8473 from the soft frame pointer to the hard frame pointer. */
8475 unsigned int
8476 arm_compute_initial_elimination_offset (from, to)
8477 unsigned int from;
8478 unsigned int to;
8480 unsigned int local_vars = arm_get_frame_size ();
8481 unsigned int outgoing_args = current_function_outgoing_args_size;
8482 unsigned int stack_frame;
8483 unsigned int call_saved_registers;
8484 unsigned long func_type;
8486 func_type = arm_current_func_type ();
8488 /* Volatile functions never return, so there is
8489 no need to save call saved registers. */
8490 call_saved_registers = 0;
8491 if (! IS_VOLATILE (func_type))
8493 unsigned int reg_mask;
8494 unsigned int reg;
8496 /* Make sure that we compute which registers will be saved
8497 on the stack using the same algorithm that is used by
8498 arm_compute_save_reg_mask(). */
8499 reg_mask = arm_compute_save_reg0_reg12_mask ();
8501 /* Now count the number of bits set in save_reg_mask.
8502 For each set bit we need 4 bytes of stack space. */
8503 while (reg_mask)
8505 call_saved_registers += 4;
8506 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8509 if (regs_ever_live[LR_REGNUM]
8510 /* If a stack frame is going to be created, the LR will
8511 be saved as part of that, so we do not need to allow
8512 for it here. */
8513 && ! frame_pointer_needed)
8514 call_saved_registers += 4;
8516 /* If the hard floating point registers are going to be
8517 used then they must be saved on the stack as well.
8518 Each register occupies 12 bytes of stack space. */
8519 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8520 if (regs_ever_live[reg] && ! call_used_regs[reg])
8521 call_saved_registers += 12;
8524 /* The stack frame contains 4 registers - the old frame pointer,
8525 the old stack pointer, the return address and PC of the start
8526 of the function. */
8527 stack_frame = frame_pointer_needed ? 16 : 0;
8529 /* OK, now we have enough information to compute the distances.
8530 There must be an entry in these switch tables for each pair
8531 of registers in ELIMINABLE_REGS, even if some of the entries
8532 seem to be redundant or useless. */
8533 switch (from)
8535 case ARG_POINTER_REGNUM:
8536 switch (to)
8538 case THUMB_HARD_FRAME_POINTER_REGNUM:
8539 return 0;
8541 case FRAME_POINTER_REGNUM:
8542 /* This is the reverse of the soft frame pointer
8543 to hard frame pointer elimination below. */
8544 if (call_saved_registers == 0 && stack_frame == 0)
8545 return 0;
8546 return (call_saved_registers + stack_frame - 4);
8548 case ARM_HARD_FRAME_POINTER_REGNUM:
8549 /* If there is no stack frame then the hard
8550 frame pointer and the arg pointer coincide. */
8551 if (stack_frame == 0 && call_saved_registers != 0)
8552 return 0;
8553 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8554 return (frame_pointer_needed
8555 && current_function_needs_context
8556 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8558 case STACK_POINTER_REGNUM:
8559 /* If nothing has been pushed on the stack at all
8560 then this will return -4. This *is* correct! */
8561 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8563 default:
8564 abort ();
8566 break;
8568 case FRAME_POINTER_REGNUM:
8569 switch (to)
8571 case THUMB_HARD_FRAME_POINTER_REGNUM:
8572 return 0;
8574 case ARM_HARD_FRAME_POINTER_REGNUM:
8575 /* The hard frame pointer points to the top entry in the
8576 stack frame. The soft frame pointer to the bottom entry
8577 in the stack frame. If there is no stack frame at all,
8578 then they are identical. */
8579 if (call_saved_registers == 0 && stack_frame == 0)
8580 return 0;
8581 return - (call_saved_registers + stack_frame - 4);
8583 case STACK_POINTER_REGNUM:
8584 return local_vars + outgoing_args;
8586 default:
8587 abort ();
8589 break;
8591 default:
8592 /* You cannot eliminate from the stack pointer.
8593 In theory you could eliminate from the hard frame
8594 pointer to the stack pointer, but this will never
8595 happen, since if a stack frame is not needed the
8596 hard frame pointer will never be used. */
8597 abort ();
8601 /* Calculate the size of the stack frame, taking into account any
8602 padding that is required to ensure stack-alignment. */
8604 HOST_WIDE_INT
8605 arm_get_frame_size ()
8607 int regno;
8609 int base_size = ROUND_UP_WORD (get_frame_size ());
8610 int entry_size = 0;
8611 unsigned long func_type = arm_current_func_type ();
8612 int leaf;
8614 if (! TARGET_ARM)
8615 abort();
8617 if (! TARGET_ATPCS)
8618 return base_size;
8620 /* We need to know if we are a leaf function. Unfortunately, it
8621 is possible to be called after start_sequence has been called,
8622 which causes get_insns to return the insns for the sequence,
8623 not the function, which will cause leaf_function_p to return
8624 the incorrect result.
8626 To work around this, we cache the computed frame size. This
8627 works because we will only be calling RTL expanders that need
8628 to know about leaf functions once reload has completed, and the
8629 frame size cannot be changed after that time, so we can safely
8630 use the cached value. */
8632 if (reload_completed)
8633 return cfun->machine->frame_size;
8635 leaf = leaf_function_p ();
8637 /* A leaf function does not need any stack alignment if it has nothing
8638 on the stack. */
8639 if (leaf && base_size == 0)
8641 cfun->machine->frame_size = 0;
8642 return 0;
8645 /* We know that SP will be word aligned on entry, and we must
8646 preserve that condition at any subroutine call. But those are
8647 the only constraints. */
8649 /* Space for variadic functions. */
8650 if (current_function_pretend_args_size)
8651 entry_size += current_function_pretend_args_size;
8653 /* Space for saved registers. */
8654 entry_size += bit_count (arm_compute_save_reg_mask ()) * 4;
8656 /* Space for saved FPA registers. */
8657 if (! IS_VOLATILE (func_type))
8659 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
8660 if (regs_ever_live[regno] && ! call_used_regs[regno])
8661 entry_size += 12;
8664 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8665 base_size += 4;
8666 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
8667 abort ();
8669 cfun->machine->frame_size = base_size;
8671 return base_size;
8674 /* Generate the prologue instructions for entry into an ARM function. */
8676 void
8677 arm_expand_prologue ()
8679 int reg;
8680 rtx amount;
8681 rtx insn;
8682 rtx ip_rtx;
8683 unsigned long live_regs_mask;
8684 unsigned long func_type;
8685 int fp_offset = 0;
8686 int saved_pretend_args = 0;
8687 unsigned int args_to_push;
8689 func_type = arm_current_func_type ();
8691 /* Naked functions don't have prologues. */
8692 if (IS_NAKED (func_type))
8693 return;
8695 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8696 args_to_push = current_function_pretend_args_size;
8698 /* Compute which register we will have to save onto the stack. */
8699 live_regs_mask = arm_compute_save_reg_mask ();
8701 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8703 if (frame_pointer_needed)
8705 if (IS_INTERRUPT (func_type))
8707 /* Interrupt functions must not corrupt any registers.
8708 Creating a frame pointer however, corrupts the IP
8709 register, so we must push it first. */
8710 insn = emit_multi_reg_push (1 << IP_REGNUM);
8712 /* Do not set RTX_FRAME_RELATED_P on this insn.
8713 The dwarf stack unwinding code only wants to see one
8714 stack decrement per function, and this is not it. If
8715 this instruction is labeled as being part of the frame
8716 creation sequence then dwarf2out_frame_debug_expr will
8717 abort when it encounters the assignment of IP to FP
8718 later on, since the use of SP here establishes SP as
8719 the CFA register and not IP.
8721 Anyway this instruction is not really part of the stack
8722 frame creation although it is part of the prologue. */
8724 else if (IS_NESTED (func_type))
8726 /* The Static chain register is the same as the IP register
8727 used as a scratch register during stack frame creation.
8728 To get around this need to find somewhere to store IP
8729 whilst the frame is being created. We try the following
8730 places in order:
8732 1. The last argument register.
8733 2. A slot on the stack above the frame. (This only
8734 works if the function is not a varargs function).
8735 3. Register r3, after pushing the argument registers
8736 onto the stack.
8738 Note - we only need to tell the dwarf2 backend about the SP
8739 adjustment in the second variant; the static chain register
8740 doesn't need to be unwound, as it doesn't contain a value
8741 inherited from the caller. */
8743 if (regs_ever_live[3] == 0)
8745 insn = gen_rtx_REG (SImode, 3);
8746 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8747 insn = emit_insn (insn);
8749 else if (args_to_push == 0)
8751 rtx dwarf;
8752 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8753 insn = gen_rtx_MEM (SImode, insn);
8754 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8755 insn = emit_insn (insn);
8757 fp_offset = 4;
8759 /* Just tell the dwarf backend that we adjusted SP. */
8760 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8761 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8762 GEN_INT (-fp_offset)));
8763 RTX_FRAME_RELATED_P (insn) = 1;
8764 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8765 dwarf, REG_NOTES (insn));
8767 else
8769 /* Store the args on the stack. */
8770 if (cfun->machine->uses_anonymous_args)
8771 insn = emit_multi_reg_push
8772 ((0xf0 >> (args_to_push / 4)) & 0xf);
8773 else
8774 insn = emit_insn
8775 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8776 GEN_INT (- args_to_push)));
8778 RTX_FRAME_RELATED_P (insn) = 1;
8780 saved_pretend_args = 1;
8781 fp_offset = args_to_push;
8782 args_to_push = 0;
8784 /* Now reuse r3 to preserve IP. */
8785 insn = gen_rtx_REG (SImode, 3);
8786 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8787 (void) emit_insn (insn);
8791 if (fp_offset)
8793 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8794 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8796 else
8797 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8799 insn = emit_insn (insn);
8800 RTX_FRAME_RELATED_P (insn) = 1;
8803 if (args_to_push)
8805 /* Push the argument registers, or reserve space for them. */
8806 if (cfun->machine->uses_anonymous_args)
8807 insn = emit_multi_reg_push
8808 ((0xf0 >> (args_to_push / 4)) & 0xf);
8809 else
8810 insn = emit_insn
8811 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8812 GEN_INT (- args_to_push)));
8813 RTX_FRAME_RELATED_P (insn) = 1;
8816 /* If this is an interrupt service routine, and the link register is
8817 going to be pushed, subtracting four now will mean that the
8818 function return can be done with a single instruction. */
8819 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8820 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8822 emit_insn (gen_rtx_SET (SImode,
8823 gen_rtx_REG (SImode, LR_REGNUM),
8824 gen_rtx_PLUS (SImode,
8825 gen_rtx_REG (SImode, LR_REGNUM),
8826 GEN_INT (-4))));
8829 if (live_regs_mask)
8831 insn = emit_multi_reg_push (live_regs_mask);
8832 RTX_FRAME_RELATED_P (insn) = 1;
8835 if (! IS_VOLATILE (func_type))
8837 /* Save any floating point call-saved registers used by this function. */
8838 if (arm_fpu_arch == FP_SOFT2)
8840 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8841 if (regs_ever_live[reg] && !call_used_regs[reg])
8843 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8844 insn = gen_rtx_MEM (XFmode, insn);
8845 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8846 gen_rtx_REG (XFmode, reg)));
8847 RTX_FRAME_RELATED_P (insn) = 1;
8850 else
8852 int start_reg = LAST_ARM_FP_REGNUM;
8854 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8856 if (regs_ever_live[reg] && !call_used_regs[reg])
8858 if (start_reg - reg == 3)
8860 insn = emit_sfm (reg, 4);
8861 RTX_FRAME_RELATED_P (insn) = 1;
8862 start_reg = reg - 1;
8865 else
8867 if (start_reg != reg)
8869 insn = emit_sfm (reg + 1, start_reg - reg);
8870 RTX_FRAME_RELATED_P (insn) = 1;
8872 start_reg = reg - 1;
8876 if (start_reg != reg)
8878 insn = emit_sfm (reg + 1, start_reg - reg);
8879 RTX_FRAME_RELATED_P (insn) = 1;
8884 if (frame_pointer_needed)
8886 /* Create the new frame pointer. */
8887 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8888 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8889 RTX_FRAME_RELATED_P (insn) = 1;
8891 if (IS_NESTED (func_type))
8893 /* Recover the static chain register. */
8894 if (regs_ever_live [3] == 0
8895 || saved_pretend_args)
8896 insn = gen_rtx_REG (SImode, 3);
8897 else /* if (current_function_pretend_args_size == 0) */
8899 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8900 insn = gen_rtx_MEM (SImode, insn);
8903 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8904 /* Add a USE to stop propagate_one_insn() from barfing. */
8905 emit_insn (gen_prologue_use (ip_rtx));
8909 amount = GEN_INT (-(arm_get_frame_size ()
8910 + current_function_outgoing_args_size));
8912 if (amount != const0_rtx)
8914 /* This add can produce multiple insns for a large constant, so we
8915 need to get tricky. */
8916 rtx last = get_last_insn ();
8917 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8918 amount));
8921 last = last ? NEXT_INSN (last) : get_insns ();
8922 RTX_FRAME_RELATED_P (last) = 1;
8924 while (last != insn);
8926 /* If the frame pointer is needed, emit a special barrier that
8927 will prevent the scheduler from moving stores to the frame
8928 before the stack adjustment. */
8929 if (frame_pointer_needed)
8930 insn = emit_insn (gen_stack_tie (stack_pointer_rtx,
8931 hard_frame_pointer_rtx));
8934 /* If we are profiling, make sure no instructions are scheduled before
8935 the call to mcount. Similarly if the user has requested no
8936 scheduling in the prolog. */
8937 if (current_function_profile || TARGET_NO_SCHED_PRO)
8938 emit_insn (gen_blockage ());
8940 /* If the link register is being kept alive, with the return address in it,
8941 then make sure that it does not get reused by the ce2 pass. */
8942 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8944 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8945 cfun->machine->lr_save_eliminated = 1;
8949 /* If CODE is 'd', then the X is a condition operand and the instruction
8950 should only be executed if the condition is true.
8951 if CODE is 'D', then the X is a condition operand and the instruction
8952 should only be executed if the condition is false: however, if the mode
8953 of the comparison is CCFPEmode, then always execute the instruction -- we
8954 do this because in these circumstances !GE does not necessarily imply LT;
8955 in these cases the instruction pattern will take care to make sure that
8956 an instruction containing %d will follow, thereby undoing the effects of
8957 doing this instruction unconditionally.
8958 If CODE is 'N' then X is a floating point operand that must be negated
8959 before output.
8960 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8961 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8963 void
8964 arm_print_operand (stream, x, code)
8965 FILE * stream;
8966 rtx x;
8967 int code;
8969 switch (code)
8971 case '@':
8972 fputs (ASM_COMMENT_START, stream);
8973 return;
8975 case '_':
8976 fputs (user_label_prefix, stream);
8977 return;
8979 case '|':
8980 fputs (REGISTER_PREFIX, stream);
8981 return;
8983 case '?':
8984 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8986 if (TARGET_THUMB || current_insn_predicate != NULL)
8987 abort ();
8989 fputs (arm_condition_codes[arm_current_cc], stream);
8991 else if (current_insn_predicate)
8993 enum arm_cond_code code;
8995 if (TARGET_THUMB)
8996 abort ();
8998 code = get_arm_condition_code (current_insn_predicate);
8999 fputs (arm_condition_codes[code], stream);
9001 return;
9003 case 'N':
9005 REAL_VALUE_TYPE r;
9006 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
9007 r = REAL_VALUE_NEGATE (r);
9008 fprintf (stream, "%s", fp_const_from_val (&r));
9010 return;
9012 case 'B':
9013 if (GET_CODE (x) == CONST_INT)
9015 HOST_WIDE_INT val;
9016 val = ARM_SIGN_EXTEND (~INTVAL (x));
9017 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9019 else
9021 putc ('~', stream);
9022 output_addr_const (stream, x);
9024 return;
9026 case 'i':
9027 fprintf (stream, "%s", arithmetic_instr (x, 1));
9028 return;
9030 case 'I':
9031 fprintf (stream, "%s", arithmetic_instr (x, 0));
9032 return;
9034 case 'S':
9036 HOST_WIDE_INT val;
9037 const char * shift = shift_op (x, &val);
9039 if (shift)
9041 fprintf (stream, ", %s ", shift_op (x, &val));
9042 if (val == -1)
9043 arm_print_operand (stream, XEXP (x, 1), 0);
9044 else
9046 fputc ('#', stream);
9047 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
9051 return;
9053 /* An explanation of the 'Q', 'R' and 'H' register operands:
9055 In a pair of registers containing a DI or DF value the 'Q'
9056 operand returns the register number of the register containing
9057 the least signficant part of the value. The 'R' operand returns
9058 the register number of the register containing the most
9059 significant part of the value.
9061 The 'H' operand returns the higher of the two register numbers.
9062 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
9063 same as the 'Q' operand, since the most signficant part of the
9064 value is held in the lower number register. The reverse is true
9065 on systems where WORDS_BIG_ENDIAN is false.
9067 The purpose of these operands is to distinguish between cases
9068 where the endian-ness of the values is important (for example
9069 when they are added together), and cases where the endian-ness
9070 is irrelevant, but the order of register operations is important.
9071 For example when loading a value from memory into a register
9072 pair, the endian-ness does not matter. Provided that the value
9073 from the lower memory address is put into the lower numbered
9074 register, and the value from the higher address is put into the
9075 higher numbered register, the load will work regardless of whether
9076 the value being loaded is big-wordian or little-wordian. The
9077 order of the two register loads can matter however, if the address
9078 of the memory location is actually held in one of the registers
9079 being overwritten by the load. */
9080 case 'Q':
9081 if (REGNO (x) > LAST_ARM_REGNUM)
9082 abort ();
9083 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
9084 return;
9086 case 'R':
9087 if (REGNO (x) > LAST_ARM_REGNUM)
9088 abort ();
9089 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
9090 return;
9092 case 'H':
9093 if (REGNO (x) > LAST_ARM_REGNUM)
9094 abort ();
9095 asm_fprintf (stream, "%r", REGNO (x) + 1);
9096 return;
9098 case 'm':
9099 asm_fprintf (stream, "%r",
9100 GET_CODE (XEXP (x, 0)) == REG
9101 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
9102 return;
9104 case 'M':
9105 asm_fprintf (stream, "{%r-%r}",
9106 REGNO (x),
9107 REGNO (x) + ARM_NUM_REGS (GET_MODE (x)) - 1);
9108 return;
9110 case 'd':
9111 /* CONST_TRUE_RTX means always -- that's the default. */
9112 if (x == const_true_rtx)
9113 return;
9115 if (TARGET_ARM)
9116 fputs (arm_condition_codes[get_arm_condition_code (x)],
9117 stream);
9118 else
9119 fputs (thumb_condition_code (x, 0), stream);
9120 return;
9122 case 'D':
9123 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
9124 want to do that. */
9125 if (x == const_true_rtx)
9126 abort ();
9128 if (TARGET_ARM)
9129 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
9130 (get_arm_condition_code (x))],
9131 stream);
9132 else
9133 fputs (thumb_condition_code (x, 1), stream);
9134 return;
9136 default:
9137 if (x == 0)
9138 abort ();
9140 if (GET_CODE (x) == REG)
9141 asm_fprintf (stream, "%r", REGNO (x));
9142 else if (GET_CODE (x) == MEM)
9144 output_memory_reference_mode = GET_MODE (x);
9145 output_address (XEXP (x, 0));
9147 else if (GET_CODE (x) == CONST_DOUBLE)
9148 fprintf (stream, "#%s", fp_immediate_constant (x));
9149 else if (GET_CODE (x) == NEG)
9150 abort (); /* This should never happen now. */
9151 else
9153 fputc ('#', stream);
9154 output_addr_const (stream, x);
9159 #ifndef AOF_ASSEMBLER
9160 /* Target hook for assembling integer objects. The ARM version needs to
9161 handle word-sized values specially. */
9163 static bool
9164 arm_assemble_integer (x, size, aligned_p)
9165 rtx x;
9166 unsigned int size;
9167 int aligned_p;
9169 if (size == UNITS_PER_WORD && aligned_p)
9171 fputs ("\t.word\t", asm_out_file);
9172 output_addr_const (asm_out_file, x);
9174 /* Mark symbols as position independent. We only do this in the
9175 .text segment, not in the .data segment. */
9176 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
9177 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
9179 if (GET_CODE (x) == SYMBOL_REF
9180 && (CONSTANT_POOL_ADDRESS_P (x)
9181 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
9182 fputs ("(GOTOFF)", asm_out_file);
9183 else if (GET_CODE (x) == LABEL_REF)
9184 fputs ("(GOTOFF)", asm_out_file);
9185 else
9186 fputs ("(GOT)", asm_out_file);
9188 fputc ('\n', asm_out_file);
9189 return true;
9192 return default_assemble_integer (x, size, aligned_p);
9194 #endif
9196 /* A finite state machine takes care of noticing whether or not instructions
9197 can be conditionally executed, and thus decrease execution time and code
9198 size by deleting branch instructions. The fsm is controlled by
9199 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
9201 /* The state of the fsm controlling condition codes are:
9202 0: normal, do nothing special
9203 1: make ASM_OUTPUT_OPCODE not output this instruction
9204 2: make ASM_OUTPUT_OPCODE not output this instruction
9205 3: make instructions conditional
9206 4: make instructions conditional
9208 State transitions (state->state by whom under condition):
9209 0 -> 1 final_prescan_insn if the `target' is a label
9210 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
9211 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
9212 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
9213 3 -> 0 (*targetm.asm_out.internal_label) if the `target' label is reached
9214 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
9215 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
9216 (the target insn is arm_target_insn).
9218 If the jump clobbers the conditions then we use states 2 and 4.
9220 A similar thing can be done with conditional return insns.
9222 XXX In case the `target' is an unconditional branch, this conditionalising
9223 of the instructions always reduces code size, but not always execution
9224 time. But then, I want to reduce the code size to somewhere near what
9225 /bin/cc produces. */
9227 /* Returns the index of the ARM condition code string in
9228 `arm_condition_codes'. COMPARISON should be an rtx like
9229 `(eq (...) (...))'. */
9231 static enum arm_cond_code
9232 get_arm_condition_code (comparison)
9233 rtx comparison;
9235 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
9236 int code;
9237 enum rtx_code comp_code = GET_CODE (comparison);
9239 if (GET_MODE_CLASS (mode) != MODE_CC)
9240 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
9241 XEXP (comparison, 1));
9243 switch (mode)
9245 case CC_DNEmode: code = ARM_NE; goto dominance;
9246 case CC_DEQmode: code = ARM_EQ; goto dominance;
9247 case CC_DGEmode: code = ARM_GE; goto dominance;
9248 case CC_DGTmode: code = ARM_GT; goto dominance;
9249 case CC_DLEmode: code = ARM_LE; goto dominance;
9250 case CC_DLTmode: code = ARM_LT; goto dominance;
9251 case CC_DGEUmode: code = ARM_CS; goto dominance;
9252 case CC_DGTUmode: code = ARM_HI; goto dominance;
9253 case CC_DLEUmode: code = ARM_LS; goto dominance;
9254 case CC_DLTUmode: code = ARM_CC;
9256 dominance:
9257 if (comp_code != EQ && comp_code != NE)
9258 abort ();
9260 if (comp_code == EQ)
9261 return ARM_INVERSE_CONDITION_CODE (code);
9262 return code;
9264 case CC_NOOVmode:
9265 switch (comp_code)
9267 case NE: return ARM_NE;
9268 case EQ: return ARM_EQ;
9269 case GE: return ARM_PL;
9270 case LT: return ARM_MI;
9271 default: abort ();
9274 case CC_Zmode:
9275 switch (comp_code)
9277 case NE: return ARM_NE;
9278 case EQ: return ARM_EQ;
9279 default: abort ();
9282 case CCFPEmode:
9283 case CCFPmode:
9284 /* These encodings assume that AC=1 in the FPA system control
9285 byte. This allows us to handle all cases except UNEQ and
9286 LTGT. */
9287 switch (comp_code)
9289 case GE: return ARM_GE;
9290 case GT: return ARM_GT;
9291 case LE: return ARM_LS;
9292 case LT: return ARM_MI;
9293 case NE: return ARM_NE;
9294 case EQ: return ARM_EQ;
9295 case ORDERED: return ARM_VC;
9296 case UNORDERED: return ARM_VS;
9297 case UNLT: return ARM_LT;
9298 case UNLE: return ARM_LE;
9299 case UNGT: return ARM_HI;
9300 case UNGE: return ARM_PL;
9301 /* UNEQ and LTGT do not have a representation. */
9302 case UNEQ: /* Fall through. */
9303 case LTGT: /* Fall through. */
9304 default: abort ();
9307 case CC_SWPmode:
9308 switch (comp_code)
9310 case NE: return ARM_NE;
9311 case EQ: return ARM_EQ;
9312 case GE: return ARM_LE;
9313 case GT: return ARM_LT;
9314 case LE: return ARM_GE;
9315 case LT: return ARM_GT;
9316 case GEU: return ARM_LS;
9317 case GTU: return ARM_CC;
9318 case LEU: return ARM_CS;
9319 case LTU: return ARM_HI;
9320 default: abort ();
9323 case CC_Cmode:
9324 switch (comp_code)
9326 case LTU: return ARM_CS;
9327 case GEU: return ARM_CC;
9328 default: abort ();
9331 case CCmode:
9332 switch (comp_code)
9334 case NE: return ARM_NE;
9335 case EQ: return ARM_EQ;
9336 case GE: return ARM_GE;
9337 case GT: return ARM_GT;
9338 case LE: return ARM_LE;
9339 case LT: return ARM_LT;
9340 case GEU: return ARM_CS;
9341 case GTU: return ARM_HI;
9342 case LEU: return ARM_LS;
9343 case LTU: return ARM_CC;
9344 default: abort ();
9347 default: abort ();
9350 abort ();
9354 void
9355 arm_final_prescan_insn (insn)
9356 rtx insn;
9358 /* BODY will hold the body of INSN. */
9359 rtx body = PATTERN (insn);
9361 /* This will be 1 if trying to repeat the trick, and things need to be
9362 reversed if it appears to fail. */
9363 int reverse = 0;
9365 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
9366 taken are clobbered, even if the rtl suggests otherwise. It also
9367 means that we have to grub around within the jump expression to find
9368 out what the conditions are when the jump isn't taken. */
9369 int jump_clobbers = 0;
9371 /* If we start with a return insn, we only succeed if we find another one. */
9372 int seeking_return = 0;
9374 /* START_INSN will hold the insn from where we start looking. This is the
9375 first insn after the following code_label if REVERSE is true. */
9376 rtx start_insn = insn;
9378 /* If in state 4, check if the target branch is reached, in order to
9379 change back to state 0. */
9380 if (arm_ccfsm_state == 4)
9382 if (insn == arm_target_insn)
9384 arm_target_insn = NULL;
9385 arm_ccfsm_state = 0;
9387 return;
9390 /* If in state 3, it is possible to repeat the trick, if this insn is an
9391 unconditional branch to a label, and immediately following this branch
9392 is the previous target label which is only used once, and the label this
9393 branch jumps to is not too far off. */
9394 if (arm_ccfsm_state == 3)
9396 if (simplejump_p (insn))
9398 start_insn = next_nonnote_insn (start_insn);
9399 if (GET_CODE (start_insn) == BARRIER)
9401 /* XXX Isn't this always a barrier? */
9402 start_insn = next_nonnote_insn (start_insn);
9404 if (GET_CODE (start_insn) == CODE_LABEL
9405 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9406 && LABEL_NUSES (start_insn) == 1)
9407 reverse = TRUE;
9408 else
9409 return;
9411 else if (GET_CODE (body) == RETURN)
9413 start_insn = next_nonnote_insn (start_insn);
9414 if (GET_CODE (start_insn) == BARRIER)
9415 start_insn = next_nonnote_insn (start_insn);
9416 if (GET_CODE (start_insn) == CODE_LABEL
9417 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
9418 && LABEL_NUSES (start_insn) == 1)
9420 reverse = TRUE;
9421 seeking_return = 1;
9423 else
9424 return;
9426 else
9427 return;
9430 if (arm_ccfsm_state != 0 && !reverse)
9431 abort ();
9432 if (GET_CODE (insn) != JUMP_INSN)
9433 return;
9435 /* This jump might be paralleled with a clobber of the condition codes
9436 the jump should always come first */
9437 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
9438 body = XVECEXP (body, 0, 0);
9440 #if 0
9441 /* If this is a conditional return then we don't want to know */
9442 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9443 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
9444 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
9445 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
9446 return;
9447 #endif
9449 if (reverse
9450 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
9451 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
9453 int insns_skipped;
9454 int fail = FALSE, succeed = FALSE;
9455 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
9456 int then_not_else = TRUE;
9457 rtx this_insn = start_insn, label = 0;
9459 /* If the jump cannot be done with one instruction, we cannot
9460 conditionally execute the instruction in the inverse case. */
9461 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
9463 jump_clobbers = 1;
9464 return;
9467 /* Register the insn jumped to. */
9468 if (reverse)
9470 if (!seeking_return)
9471 label = XEXP (SET_SRC (body), 0);
9473 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
9474 label = XEXP (XEXP (SET_SRC (body), 1), 0);
9475 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
9477 label = XEXP (XEXP (SET_SRC (body), 2), 0);
9478 then_not_else = FALSE;
9480 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
9481 seeking_return = 1;
9482 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
9484 seeking_return = 1;
9485 then_not_else = FALSE;
9487 else
9488 abort ();
9490 /* See how many insns this branch skips, and what kind of insns. If all
9491 insns are okay, and the label or unconditional branch to the same
9492 label is not too far away, succeed. */
9493 for (insns_skipped = 0;
9494 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
9496 rtx scanbody;
9498 this_insn = next_nonnote_insn (this_insn);
9499 if (!this_insn)
9500 break;
9502 switch (GET_CODE (this_insn))
9504 case CODE_LABEL:
9505 /* Succeed if it is the target label, otherwise fail since
9506 control falls in from somewhere else. */
9507 if (this_insn == label)
9509 if (jump_clobbers)
9511 arm_ccfsm_state = 2;
9512 this_insn = next_nonnote_insn (this_insn);
9514 else
9515 arm_ccfsm_state = 1;
9516 succeed = TRUE;
9518 else
9519 fail = TRUE;
9520 break;
9522 case BARRIER:
9523 /* Succeed if the following insn is the target label.
9524 Otherwise fail.
9525 If return insns are used then the last insn in a function
9526 will be a barrier. */
9527 this_insn = next_nonnote_insn (this_insn);
9528 if (this_insn && this_insn == label)
9530 if (jump_clobbers)
9532 arm_ccfsm_state = 2;
9533 this_insn = next_nonnote_insn (this_insn);
9535 else
9536 arm_ccfsm_state = 1;
9537 succeed = TRUE;
9539 else
9540 fail = TRUE;
9541 break;
9543 case CALL_INSN:
9544 /* If using 32-bit addresses the cc is not preserved over
9545 calls. */
9546 if (TARGET_APCS_32)
9548 /* Succeed if the following insn is the target label,
9549 or if the following two insns are a barrier and
9550 the target label. */
9551 this_insn = next_nonnote_insn (this_insn);
9552 if (this_insn && GET_CODE (this_insn) == BARRIER)
9553 this_insn = next_nonnote_insn (this_insn);
9555 if (this_insn && this_insn == label
9556 && insns_skipped < max_insns_skipped)
9558 if (jump_clobbers)
9560 arm_ccfsm_state = 2;
9561 this_insn = next_nonnote_insn (this_insn);
9563 else
9564 arm_ccfsm_state = 1;
9565 succeed = TRUE;
9567 else
9568 fail = TRUE;
9570 break;
9572 case JUMP_INSN:
9573 /* If this is an unconditional branch to the same label, succeed.
9574 If it is to another label, do nothing. If it is conditional,
9575 fail. */
9576 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9578 scanbody = PATTERN (this_insn);
9579 if (GET_CODE (scanbody) == SET
9580 && GET_CODE (SET_DEST (scanbody)) == PC)
9582 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9583 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9585 arm_ccfsm_state = 2;
9586 succeed = TRUE;
9588 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9589 fail = TRUE;
9591 /* Fail if a conditional return is undesirable (eg on a
9592 StrongARM), but still allow this if optimizing for size. */
9593 else if (GET_CODE (scanbody) == RETURN
9594 && !use_return_insn (TRUE)
9595 && !optimize_size)
9596 fail = TRUE;
9597 else if (GET_CODE (scanbody) == RETURN
9598 && seeking_return)
9600 arm_ccfsm_state = 2;
9601 succeed = TRUE;
9603 else if (GET_CODE (scanbody) == PARALLEL)
9605 switch (get_attr_conds (this_insn))
9607 case CONDS_NOCOND:
9608 break;
9609 default:
9610 fail = TRUE;
9611 break;
9614 else
9615 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9617 break;
9619 case INSN:
9620 /* Instructions using or affecting the condition codes make it
9621 fail. */
9622 scanbody = PATTERN (this_insn);
9623 if (!(GET_CODE (scanbody) == SET
9624 || GET_CODE (scanbody) == PARALLEL)
9625 || get_attr_conds (this_insn) != CONDS_NOCOND)
9626 fail = TRUE;
9627 break;
9629 default:
9630 break;
9633 if (succeed)
9635 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9636 arm_target_label = CODE_LABEL_NUMBER (label);
9637 else if (seeking_return || arm_ccfsm_state == 2)
9639 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9641 this_insn = next_nonnote_insn (this_insn);
9642 if (this_insn && (GET_CODE (this_insn) == BARRIER
9643 || GET_CODE (this_insn) == CODE_LABEL))
9644 abort ();
9646 if (!this_insn)
9648 /* Oh, dear! we ran off the end.. give up */
9649 recog (PATTERN (insn), insn, NULL);
9650 arm_ccfsm_state = 0;
9651 arm_target_insn = NULL;
9652 return;
9654 arm_target_insn = this_insn;
9656 else
9657 abort ();
9658 if (jump_clobbers)
9660 if (reverse)
9661 abort ();
9662 arm_current_cc =
9663 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9664 0), 0), 1));
9665 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9666 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9667 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9668 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9670 else
9672 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9673 what it was. */
9674 if (!reverse)
9675 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9676 0));
9679 if (reverse || then_not_else)
9680 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9683 /* Restore recog_data (getting the attributes of other insns can
9684 destroy this array, but final.c assumes that it remains intact
9685 across this call; since the insn has been recognized already we
9686 call recog direct). */
9687 recog (PATTERN (insn), insn, NULL);
9691 /* Returns true if REGNO is a valid register
9692 for holding a quantity of tyoe MODE. */
9695 arm_hard_regno_mode_ok (regno, mode)
9696 unsigned int regno;
9697 enum machine_mode mode;
9699 if (GET_MODE_CLASS (mode) == MODE_CC)
9700 return regno == CC_REGNUM;
9702 if (TARGET_THUMB)
9703 /* For the Thumb we only allow values bigger than SImode in
9704 registers 0 - 6, so that there is always a second low
9705 register available to hold the upper part of the value.
9706 We probably we ought to ensure that the register is the
9707 start of an even numbered register pair. */
9708 return (ARM_NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9710 if (regno <= LAST_ARM_REGNUM)
9711 /* We allow any value to be stored in the general regisetrs. */
9712 return 1;
9714 if ( regno == FRAME_POINTER_REGNUM
9715 || regno == ARG_POINTER_REGNUM)
9716 /* We only allow integers in the fake hard registers. */
9717 return GET_MODE_CLASS (mode) == MODE_INT;
9719 /* The only registers left are the FPU registers
9720 which we only allow to hold FP values. */
9721 return GET_MODE_CLASS (mode) == MODE_FLOAT
9722 && regno >= FIRST_ARM_FP_REGNUM
9723 && regno <= LAST_ARM_FP_REGNUM;
9727 arm_regno_class (regno)
9728 int regno;
9730 if (TARGET_THUMB)
9732 if (regno == STACK_POINTER_REGNUM)
9733 return STACK_REG;
9734 if (regno == CC_REGNUM)
9735 return CC_REG;
9736 if (regno < 8)
9737 return LO_REGS;
9738 return HI_REGS;
9741 if ( regno <= LAST_ARM_REGNUM
9742 || regno == FRAME_POINTER_REGNUM
9743 || regno == ARG_POINTER_REGNUM)
9744 return GENERAL_REGS;
9746 if (regno == CC_REGNUM)
9747 return NO_REGS;
9749 return FPU_REGS;
9752 /* Handle a special case when computing the offset
9753 of an argument from the frame pointer. */
9756 arm_debugger_arg_offset (value, addr)
9757 int value;
9758 rtx addr;
9760 rtx insn;
9762 /* We are only interested if dbxout_parms() failed to compute the offset. */
9763 if (value != 0)
9764 return 0;
9766 /* We can only cope with the case where the address is held in a register. */
9767 if (GET_CODE (addr) != REG)
9768 return 0;
9770 /* If we are using the frame pointer to point at the argument, then
9771 an offset of 0 is correct. */
9772 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9773 return 0;
9775 /* If we are using the stack pointer to point at the
9776 argument, then an offset of 0 is correct. */
9777 if ((TARGET_THUMB || !frame_pointer_needed)
9778 && REGNO (addr) == SP_REGNUM)
9779 return 0;
9781 /* Oh dear. The argument is pointed to by a register rather
9782 than being held in a register, or being stored at a known
9783 offset from the frame pointer. Since GDB only understands
9784 those two kinds of argument we must translate the address
9785 held in the register into an offset from the frame pointer.
9786 We do this by searching through the insns for the function
9787 looking to see where this register gets its value. If the
9788 register is initialized from the frame pointer plus an offset
9789 then we are in luck and we can continue, otherwise we give up.
9791 This code is exercised by producing debugging information
9792 for a function with arguments like this:
9794 double func (double a, double b, int c, double d) {return d;}
9796 Without this code the stab for parameter 'd' will be set to
9797 an offset of 0 from the frame pointer, rather than 8. */
9799 /* The if() statement says:
9801 If the insn is a normal instruction
9802 and if the insn is setting the value in a register
9803 and if the register being set is the register holding the address of the argument
9804 and if the address is computing by an addition
9805 that involves adding to a register
9806 which is the frame pointer
9807 a constant integer
9809 then... */
9811 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9813 if ( GET_CODE (insn) == INSN
9814 && GET_CODE (PATTERN (insn)) == SET
9815 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9816 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9817 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9818 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9819 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9822 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9824 break;
9828 if (value == 0)
9830 debug_rtx (addr);
9831 warning ("unable to compute real location of stacked parameter");
9832 value = 8; /* XXX magic hack */
9835 return value;
9838 #define def_builtin(NAME, TYPE, CODE) \
9839 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
9841 void
9842 arm_init_builtins ()
9844 tree endlink = void_list_node;
9845 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9846 tree pchar_type_node = build_pointer_type (char_type_node);
9848 tree int_ftype_int, void_ftype_pchar;
9850 /* void func (char *) */
9851 void_ftype_pchar
9852 = build_function_type_list (void_type_node, pchar_type_node, NULL_TREE);
9854 /* int func (int) */
9855 int_ftype_int
9856 = build_function_type (integer_type_node, int_endlink);
9858 /* Initialize arm V5 builtins. */
9859 if (arm_arch5)
9860 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9863 /* Expand an expression EXP that calls a built-in function,
9864 with result going to TARGET if that's convenient
9865 (and in mode MODE if that's convenient).
9866 SUBTARGET may be used as the target for computing one of EXP's operands.
9867 IGNORE is nonzero if the value is to be ignored. */
9870 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9871 tree exp;
9872 rtx target;
9873 rtx subtarget ATTRIBUTE_UNUSED;
9874 enum machine_mode mode ATTRIBUTE_UNUSED;
9875 int ignore ATTRIBUTE_UNUSED;
9877 enum insn_code icode;
9878 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9879 tree arglist = TREE_OPERAND (exp, 1);
9880 tree arg0;
9881 rtx op0, pat;
9882 enum machine_mode tmode, mode0;
9883 int fcode = DECL_FUNCTION_CODE (fndecl);
9885 switch (fcode)
9887 default:
9888 break;
9890 case ARM_BUILTIN_CLZ:
9891 icode = CODE_FOR_clz;
9892 arg0 = TREE_VALUE (arglist);
9893 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9894 tmode = insn_data[icode].operand[0].mode;
9895 mode0 = insn_data[icode].operand[1].mode;
9897 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9898 op0 = copy_to_mode_reg (mode0, op0);
9899 if (target == 0
9900 || GET_MODE (target) != tmode
9901 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9902 target = gen_reg_rtx (tmode);
9903 pat = GEN_FCN (icode) (target, op0);
9904 if (! pat)
9905 return 0;
9906 emit_insn (pat);
9907 return target;
9910 /* @@@ Should really do something sensible here. */
9911 return NULL_RTX;
9914 /* Recursively search through all of the blocks in a function
9915 checking to see if any of the variables created in that
9916 function match the RTX called 'orig'. If they do then
9917 replace them with the RTX called 'new'. */
9919 static void
9920 replace_symbols_in_block (block, orig, new)
9921 tree block;
9922 rtx orig;
9923 rtx new;
9925 for (; block; block = BLOCK_CHAIN (block))
9927 tree sym;
9929 if (!TREE_USED (block))
9930 continue;
9932 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9934 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9935 || DECL_IGNORED_P (sym)
9936 || TREE_CODE (sym) != VAR_DECL
9937 || DECL_EXTERNAL (sym)
9938 || !rtx_equal_p (DECL_RTL (sym), orig)
9940 continue;
9942 SET_DECL_RTL (sym, new);
9945 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9949 /* Return the number (counting from 0) of
9950 the least significant set bit in MASK. */
9952 #ifdef __GNUC__
9953 inline
9954 #endif
9955 static int
9956 number_of_first_bit_set (mask)
9957 int mask;
9959 int bit;
9961 for (bit = 0;
9962 (mask & (1 << bit)) == 0;
9963 ++bit)
9964 continue;
9966 return bit;
9969 /* Generate code to return from a thumb function.
9970 If 'reg_containing_return_addr' is -1, then the return address is
9971 actually on the stack, at the stack pointer. */
9972 static void
9973 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9974 FILE * f;
9975 int reg_containing_return_addr;
9976 rtx eh_ofs;
9978 unsigned regs_available_for_popping;
9979 unsigned regs_to_pop;
9980 int pops_needed;
9981 unsigned available;
9982 unsigned required;
9983 int mode;
9984 int size;
9985 int restore_a4 = FALSE;
9987 /* Compute the registers we need to pop. */
9988 regs_to_pop = 0;
9989 pops_needed = 0;
9991 /* There is an assumption here, that if eh_ofs is not NULL, the
9992 normal return address will have been pushed. */
9993 if (reg_containing_return_addr == -1 || eh_ofs)
9995 /* When we are generating a return for __builtin_eh_return,
9996 reg_containing_return_addr must specify the return regno. */
9997 if (eh_ofs && reg_containing_return_addr == -1)
9998 abort ();
10000 regs_to_pop |= 1 << LR_REGNUM;
10001 ++pops_needed;
10004 if (TARGET_BACKTRACE)
10006 /* Restore the (ARM) frame pointer and stack pointer. */
10007 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
10008 pops_needed += 2;
10011 /* If there is nothing to pop then just emit the BX instruction and
10012 return. */
10013 if (pops_needed == 0)
10015 if (eh_ofs)
10016 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10018 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10019 return;
10021 /* Otherwise if we are not supporting interworking and we have not created
10022 a backtrace structure and the function was not entered in ARM mode then
10023 just pop the return address straight into the PC. */
10024 else if (!TARGET_INTERWORK
10025 && !TARGET_BACKTRACE
10026 && !is_called_in_ARM_mode (current_function_decl))
10028 if (eh_ofs)
10030 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
10031 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10032 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10034 else
10035 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
10037 return;
10040 /* Find out how many of the (return) argument registers we can corrupt. */
10041 regs_available_for_popping = 0;
10043 /* If returning via __builtin_eh_return, the bottom three registers
10044 all contain information needed for the return. */
10045 if (eh_ofs)
10046 size = 12;
10047 else
10049 #ifdef RTX_CODE
10050 /* If we can deduce the registers used from the function's
10051 return value. This is more reliable that examining
10052 regs_ever_live[] because that will be set if the register is
10053 ever used in the function, not just if the register is used
10054 to hold a return value. */
10056 if (current_function_return_rtx != 0)
10057 mode = GET_MODE (current_function_return_rtx);
10058 else
10059 #endif
10060 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10062 size = GET_MODE_SIZE (mode);
10064 if (size == 0)
10066 /* In a void function we can use any argument register.
10067 In a function that returns a structure on the stack
10068 we can use the second and third argument registers. */
10069 if (mode == VOIDmode)
10070 regs_available_for_popping =
10071 (1 << ARG_REGISTER (1))
10072 | (1 << ARG_REGISTER (2))
10073 | (1 << ARG_REGISTER (3));
10074 else
10075 regs_available_for_popping =
10076 (1 << ARG_REGISTER (2))
10077 | (1 << ARG_REGISTER (3));
10079 else if (size <= 4)
10080 regs_available_for_popping =
10081 (1 << ARG_REGISTER (2))
10082 | (1 << ARG_REGISTER (3));
10083 else if (size <= 8)
10084 regs_available_for_popping =
10085 (1 << ARG_REGISTER (3));
10088 /* Match registers to be popped with registers into which we pop them. */
10089 for (available = regs_available_for_popping,
10090 required = regs_to_pop;
10091 required != 0 && available != 0;
10092 available &= ~(available & - available),
10093 required &= ~(required & - required))
10094 -- pops_needed;
10096 /* If we have any popping registers left over, remove them. */
10097 if (available > 0)
10098 regs_available_for_popping &= ~available;
10100 /* Otherwise if we need another popping register we can use
10101 the fourth argument register. */
10102 else if (pops_needed)
10104 /* If we have not found any free argument registers and
10105 reg a4 contains the return address, we must move it. */
10106 if (regs_available_for_popping == 0
10107 && reg_containing_return_addr == LAST_ARG_REGNUM)
10109 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10110 reg_containing_return_addr = LR_REGNUM;
10112 else if (size > 12)
10114 /* Register a4 is being used to hold part of the return value,
10115 but we have dire need of a free, low register. */
10116 restore_a4 = TRUE;
10118 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
10121 if (reg_containing_return_addr != LAST_ARG_REGNUM)
10123 /* The fourth argument register is available. */
10124 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
10126 --pops_needed;
10130 /* Pop as many registers as we can. */
10131 thumb_pushpop (f, regs_available_for_popping, FALSE);
10133 /* Process the registers we popped. */
10134 if (reg_containing_return_addr == -1)
10136 /* The return address was popped into the lowest numbered register. */
10137 regs_to_pop &= ~(1 << LR_REGNUM);
10139 reg_containing_return_addr =
10140 number_of_first_bit_set (regs_available_for_popping);
10142 /* Remove this register for the mask of available registers, so that
10143 the return address will not be corrupted by futher pops. */
10144 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
10147 /* If we popped other registers then handle them here. */
10148 if (regs_available_for_popping)
10150 int frame_pointer;
10152 /* Work out which register currently contains the frame pointer. */
10153 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
10155 /* Move it into the correct place. */
10156 asm_fprintf (f, "\tmov\t%r, %r\n",
10157 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
10159 /* (Temporarily) remove it from the mask of popped registers. */
10160 regs_available_for_popping &= ~(1 << frame_pointer);
10161 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
10163 if (regs_available_for_popping)
10165 int stack_pointer;
10167 /* We popped the stack pointer as well,
10168 find the register that contains it. */
10169 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
10171 /* Move it into the stack register. */
10172 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
10174 /* At this point we have popped all necessary registers, so
10175 do not worry about restoring regs_available_for_popping
10176 to its correct value:
10178 assert (pops_needed == 0)
10179 assert (regs_available_for_popping == (1 << frame_pointer))
10180 assert (regs_to_pop == (1 << STACK_POINTER)) */
10182 else
10184 /* Since we have just move the popped value into the frame
10185 pointer, the popping register is available for reuse, and
10186 we know that we still have the stack pointer left to pop. */
10187 regs_available_for_popping |= (1 << frame_pointer);
10191 /* If we still have registers left on the stack, but we no longer have
10192 any registers into which we can pop them, then we must move the return
10193 address into the link register and make available the register that
10194 contained it. */
10195 if (regs_available_for_popping == 0 && pops_needed > 0)
10197 regs_available_for_popping |= 1 << reg_containing_return_addr;
10199 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
10200 reg_containing_return_addr);
10202 reg_containing_return_addr = LR_REGNUM;
10205 /* If we have registers left on the stack then pop some more.
10206 We know that at most we will want to pop FP and SP. */
10207 if (pops_needed > 0)
10209 int popped_into;
10210 int move_to;
10212 thumb_pushpop (f, regs_available_for_popping, FALSE);
10214 /* We have popped either FP or SP.
10215 Move whichever one it is into the correct register. */
10216 popped_into = number_of_first_bit_set (regs_available_for_popping);
10217 move_to = number_of_first_bit_set (regs_to_pop);
10219 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
10221 regs_to_pop &= ~(1 << move_to);
10223 --pops_needed;
10226 /* If we still have not popped everything then we must have only
10227 had one register available to us and we are now popping the SP. */
10228 if (pops_needed > 0)
10230 int popped_into;
10232 thumb_pushpop (f, regs_available_for_popping, FALSE);
10234 popped_into = number_of_first_bit_set (regs_available_for_popping);
10236 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
10238 assert (regs_to_pop == (1 << STACK_POINTER))
10239 assert (pops_needed == 1)
10243 /* If necessary restore the a4 register. */
10244 if (restore_a4)
10246 if (reg_containing_return_addr != LR_REGNUM)
10248 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
10249 reg_containing_return_addr = LR_REGNUM;
10252 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10255 if (eh_ofs)
10256 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
10258 /* Return to caller. */
10259 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
10262 /* Emit code to push or pop registers to or from the stack. */
10264 static void
10265 thumb_pushpop (f, mask, push)
10266 FILE * f;
10267 int mask;
10268 int push;
10270 int regno;
10271 int lo_mask = mask & 0xFF;
10273 if (lo_mask == 0 && !push && (mask & (1 << 15)))
10275 /* Special case. Do not generate a POP PC statement here, do it in
10276 thumb_exit() */
10277 thumb_exit (f, -1, NULL_RTX);
10278 return;
10281 fprintf (f, "\t%s\t{", push ? "push" : "pop");
10283 /* Look at the low registers first. */
10284 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
10286 if (lo_mask & 1)
10288 asm_fprintf (f, "%r", regno);
10290 if ((lo_mask & ~1) != 0)
10291 fprintf (f, ", ");
10295 if (push && (mask & (1 << LR_REGNUM)))
10297 /* Catch pushing the LR. */
10298 if (mask & 0xFF)
10299 fprintf (f, ", ");
10301 asm_fprintf (f, "%r", LR_REGNUM);
10303 else if (!push && (mask & (1 << PC_REGNUM)))
10305 /* Catch popping the PC. */
10306 if (TARGET_INTERWORK || TARGET_BACKTRACE)
10308 /* The PC is never poped directly, instead
10309 it is popped into r3 and then BX is used. */
10310 fprintf (f, "}\n");
10312 thumb_exit (f, -1, NULL_RTX);
10314 return;
10316 else
10318 if (mask & 0xFF)
10319 fprintf (f, ", ");
10321 asm_fprintf (f, "%r", PC_REGNUM);
10325 fprintf (f, "}\n");
10328 void
10329 thumb_final_prescan_insn (insn)
10330 rtx insn;
10332 if (flag_print_asm_name)
10333 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
10334 INSN_ADDRESSES (INSN_UID (insn)));
10338 thumb_shiftable_const (val)
10339 unsigned HOST_WIDE_INT val;
10341 unsigned HOST_WIDE_INT mask = 0xff;
10342 int i;
10344 if (val == 0) /* XXX */
10345 return 0;
10347 for (i = 0; i < 25; i++)
10348 if ((val & (mask << i)) == val)
10349 return 1;
10351 return 0;
10354 /* Returns nonzero if the current function contains,
10355 or might contain a far jump. */
10358 thumb_far_jump_used_p (in_prologue)
10359 int in_prologue;
10361 rtx insn;
10363 /* This test is only important for leaf functions. */
10364 /* assert (!leaf_function_p ()); */
10366 /* If we have already decided that far jumps may be used,
10367 do not bother checking again, and always return true even if
10368 it turns out that they are not being used. Once we have made
10369 the decision that far jumps are present (and that hence the link
10370 register will be pushed onto the stack) we cannot go back on it. */
10371 if (cfun->machine->far_jump_used)
10372 return 1;
10374 /* If this function is not being called from the prologue/epilogue
10375 generation code then it must be being called from the
10376 INITIAL_ELIMINATION_OFFSET macro. */
10377 if (!in_prologue)
10379 /* In this case we know that we are being asked about the elimination
10380 of the arg pointer register. If that register is not being used,
10381 then there are no arguments on the stack, and we do not have to
10382 worry that a far jump might force the prologue to push the link
10383 register, changing the stack offsets. In this case we can just
10384 return false, since the presence of far jumps in the function will
10385 not affect stack offsets.
10387 If the arg pointer is live (or if it was live, but has now been
10388 eliminated and so set to dead) then we do have to test to see if
10389 the function might contain a far jump. This test can lead to some
10390 false negatives, since before reload is completed, then length of
10391 branch instructions is not known, so gcc defaults to returning their
10392 longest length, which in turn sets the far jump attribute to true.
10394 A false negative will not result in bad code being generated, but it
10395 will result in a needless push and pop of the link register. We
10396 hope that this does not occur too often. */
10397 if (regs_ever_live [ARG_POINTER_REGNUM])
10398 cfun->machine->arg_pointer_live = 1;
10399 else if (!cfun->machine->arg_pointer_live)
10400 return 0;
10403 /* Check to see if the function contains a branch
10404 insn with the far jump attribute set. */
10405 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10407 if (GET_CODE (insn) == JUMP_INSN
10408 /* Ignore tablejump patterns. */
10409 && GET_CODE (PATTERN (insn)) != ADDR_VEC
10410 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
10411 && get_attr_far_jump (insn) == FAR_JUMP_YES
10414 /* Record the fact that we have decied that
10415 the function does use far jumps. */
10416 cfun->machine->far_jump_used = 1;
10417 return 1;
10421 return 0;
10424 /* Return nonzero if FUNC must be entered in ARM mode. */
10427 is_called_in_ARM_mode (func)
10428 tree func;
10430 if (TREE_CODE (func) != FUNCTION_DECL)
10431 abort ();
10433 /* Ignore the problem about functions whoes address is taken. */
10434 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
10435 return TRUE;
10437 #ifdef ARM_PE
10438 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
10439 #else
10440 return FALSE;
10441 #endif
10444 /* The bits which aren't usefully expanded as rtl. */
10446 const char *
10447 thumb_unexpanded_epilogue ()
10449 int regno;
10450 int live_regs_mask = 0;
10451 int high_regs_pushed = 0;
10452 int leaf_function = leaf_function_p ();
10453 int had_to_push_lr;
10454 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
10456 if (return_used_this_function)
10457 return "";
10459 if (IS_NAKED (arm_current_func_type ()))
10460 return "";
10462 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10463 if (THUMB_REG_PUSHED_P (regno))
10464 live_regs_mask |= 1 << regno;
10466 for (regno = 8; regno < 13; regno++)
10467 if (THUMB_REG_PUSHED_P (regno))
10468 high_regs_pushed++;
10470 /* The prolog may have pushed some high registers to use as
10471 work registers. eg the testuite file:
10472 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
10473 compiles to produce:
10474 push {r4, r5, r6, r7, lr}
10475 mov r7, r9
10476 mov r6, r8
10477 push {r6, r7}
10478 as part of the prolog. We have to undo that pushing here. */
10480 if (high_regs_pushed)
10482 int mask = live_regs_mask;
10483 int next_hi_reg;
10484 int size;
10485 int mode;
10487 #ifdef RTX_CODE
10488 /* If we can deduce the registers used from the function's return value.
10489 This is more reliable that examining regs_ever_live[] because that
10490 will be set if the register is ever used in the function, not just if
10491 the register is used to hold a return value. */
10493 if (current_function_return_rtx != 0)
10494 mode = GET_MODE (current_function_return_rtx);
10495 else
10496 #endif
10497 mode = DECL_MODE (DECL_RESULT (current_function_decl));
10499 size = GET_MODE_SIZE (mode);
10501 /* Unless we are returning a type of size > 12 register r3 is
10502 available. */
10503 if (size < 13)
10504 mask |= 1 << 3;
10506 if (mask == 0)
10507 /* Oh dear! We have no low registers into which we can pop
10508 high registers! */
10509 internal_error
10510 ("no low registers available for popping high registers");
10512 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
10513 if (THUMB_REG_PUSHED_P (next_hi_reg))
10514 break;
10516 while (high_regs_pushed)
10518 /* Find lo register(s) into which the high register(s) can
10519 be popped. */
10520 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10522 if (mask & (1 << regno))
10523 high_regs_pushed--;
10524 if (high_regs_pushed == 0)
10525 break;
10528 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10530 /* Pop the values into the low register(s). */
10531 thumb_pushpop (asm_out_file, mask, 0);
10533 /* Move the value(s) into the high registers. */
10534 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10536 if (mask & (1 << regno))
10538 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10539 regno);
10541 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10542 if (THUMB_REG_PUSHED_P (next_hi_reg))
10543 break;
10549 had_to_push_lr = (live_regs_mask || !leaf_function
10550 || thumb_far_jump_used_p (1));
10552 if (TARGET_BACKTRACE
10553 && ((live_regs_mask & 0xFF) == 0)
10554 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10556 /* The stack backtrace structure creation code had to
10557 push R7 in order to get a work register, so we pop
10558 it now. */
10559 live_regs_mask |= (1 << LAST_LO_REGNUM);
10562 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10564 if (had_to_push_lr
10565 && !is_called_in_ARM_mode (current_function_decl)
10566 && !eh_ofs)
10567 live_regs_mask |= 1 << PC_REGNUM;
10569 /* Either no argument registers were pushed or a backtrace
10570 structure was created which includes an adjusted stack
10571 pointer, so just pop everything. */
10572 if (live_regs_mask)
10573 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10575 if (eh_ofs)
10576 thumb_exit (asm_out_file, 2, eh_ofs);
10577 /* We have either just popped the return address into the
10578 PC or it is was kept in LR for the entire function or
10579 it is still on the stack because we do not want to
10580 return by doing a pop {pc}. */
10581 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10582 thumb_exit (asm_out_file,
10583 (had_to_push_lr
10584 && is_called_in_ARM_mode (current_function_decl)) ?
10585 -1 : LR_REGNUM, NULL_RTX);
10587 else
10589 /* Pop everything but the return address. */
10590 live_regs_mask &= ~(1 << PC_REGNUM);
10592 if (live_regs_mask)
10593 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10595 if (had_to_push_lr)
10596 /* Get the return address into a temporary register. */
10597 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10599 /* Remove the argument registers that were pushed onto the stack. */
10600 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10601 SP_REGNUM, SP_REGNUM,
10602 current_function_pretend_args_size);
10604 if (eh_ofs)
10605 thumb_exit (asm_out_file, 2, eh_ofs);
10606 else
10607 thumb_exit (asm_out_file,
10608 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10611 return "";
10614 /* Functions to save and restore machine-specific function data. */
10616 static struct machine_function *
10617 arm_init_machine_status ()
10619 struct machine_function *machine;
10620 machine = (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
10622 #if ARM_FT_UNKNOWN != 0
10623 machine->func_type = ARM_FT_UNKNOWN;
10624 #endif
10625 return machine;
10628 /* Return an RTX indicating where the return address to the
10629 calling function can be found. */
10632 arm_return_addr (count, frame)
10633 int count;
10634 rtx frame ATTRIBUTE_UNUSED;
10636 if (count != 0)
10637 return NULL_RTX;
10639 if (TARGET_APCS_32)
10640 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10641 else
10643 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10644 GEN_INT (RETURN_ADDR_MASK26));
10645 return get_func_hard_reg_initial_val (cfun, lr);
10649 /* Do anything needed before RTL is emitted for each function. */
10651 void
10652 arm_init_expanders ()
10654 /* Arrange to initialize and mark the machine per-function status. */
10655 init_machine_status = arm_init_machine_status;
10658 HOST_WIDE_INT
10659 thumb_get_frame_size ()
10661 int regno;
10663 int base_size = ROUND_UP_WORD (get_frame_size ());
10664 int count_regs = 0;
10665 int entry_size = 0;
10666 int leaf;
10668 if (! TARGET_THUMB)
10669 abort ();
10671 if (! TARGET_ATPCS)
10672 return base_size;
10674 /* We need to know if we are a leaf function. Unfortunately, it
10675 is possible to be called after start_sequence has been called,
10676 which causes get_insns to return the insns for the sequence,
10677 not the function, which will cause leaf_function_p to return
10678 the incorrect result.
10680 To work around this, we cache the computed frame size. This
10681 works because we will only be calling RTL expanders that need
10682 to know about leaf functions once reload has completed, and the
10683 frame size cannot be changed after that time, so we can safely
10684 use the cached value. */
10686 if (reload_completed)
10687 return cfun->machine->frame_size;
10689 leaf = leaf_function_p ();
10691 /* A leaf function does not need any stack alignment if it has nothing
10692 on the stack. */
10693 if (leaf && base_size == 0)
10695 cfun->machine->frame_size = 0;
10696 return 0;
10699 /* We know that SP will be word aligned on entry, and we must
10700 preserve that condition at any subroutine call. But those are
10701 the only constraints. */
10703 /* Space for variadic functions. */
10704 if (current_function_pretend_args_size)
10705 entry_size += current_function_pretend_args_size;
10707 /* Space for pushed lo registers. */
10708 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10709 if (THUMB_REG_PUSHED_P (regno))
10710 count_regs++;
10712 /* Space for backtrace structure. */
10713 if (TARGET_BACKTRACE)
10715 if (count_regs == 0 && regs_ever_live[LAST_ARG_REGNUM] != 0)
10716 entry_size += 20;
10717 else
10718 entry_size += 16;
10721 if (count_regs || !leaf || thumb_far_jump_used_p (1))
10722 count_regs++; /* LR */
10724 entry_size += count_regs * 4;
10725 count_regs = 0;
10727 /* Space for pushed hi regs. */
10728 for (regno = 8; regno < 13; regno++)
10729 if (THUMB_REG_PUSHED_P (regno))
10730 count_regs++;
10732 entry_size += count_regs * 4;
10734 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10735 base_size += 4;
10736 if ((entry_size + base_size + current_function_outgoing_args_size) & 7)
10737 abort ();
10739 cfun->machine->frame_size = base_size;
10741 return base_size;
10744 /* Generate the rest of a function's prologue. */
10746 void
10747 thumb_expand_prologue ()
10749 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10750 + current_function_outgoing_args_size);
10751 unsigned long func_type;
10753 func_type = arm_current_func_type ();
10755 /* Naked functions don't have prologues. */
10756 if (IS_NAKED (func_type))
10757 return;
10759 if (IS_INTERRUPT (func_type))
10761 error ("interrupt Service Routines cannot be coded in Thumb mode");
10762 return;
10765 if (frame_pointer_needed)
10766 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10768 if (amount)
10770 amount = ROUND_UP_WORD (amount);
10772 if (amount < 512)
10773 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10774 GEN_INT (- amount)));
10775 else
10777 int regno;
10778 rtx reg;
10780 /* The stack decrement is too big for an immediate value in a single
10781 insn. In theory we could issue multiple subtracts, but after
10782 three of them it becomes more space efficient to place the full
10783 value in the constant pool and load into a register. (Also the
10784 ARM debugger really likes to see only one stack decrement per
10785 function). So instead we look for a scratch register into which
10786 we can load the decrement, and then we subtract this from the
10787 stack pointer. Unfortunately on the thumb the only available
10788 scratch registers are the argument registers, and we cannot use
10789 these as they may hold arguments to the function. Instead we
10790 attempt to locate a call preserved register which is used by this
10791 function. If we can find one, then we know that it will have
10792 been pushed at the start of the prologue and so we can corrupt
10793 it now. */
10794 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10795 if (THUMB_REG_PUSHED_P (regno)
10796 && !(frame_pointer_needed
10797 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10798 break;
10800 if (regno > LAST_LO_REGNUM) /* Very unlikely. */
10802 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10804 /* Choose an arbitary, non-argument low register. */
10805 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10807 /* Save it by copying it into a high, scratch register. */
10808 emit_insn (gen_movsi (spare, reg));
10809 /* Add a USE to stop propagate_one_insn() from barfing. */
10810 emit_insn (gen_prologue_use (spare));
10812 /* Decrement the stack. */
10813 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10814 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10815 reg));
10817 /* Restore the low register's original value. */
10818 emit_insn (gen_movsi (reg, spare));
10820 /* Emit a USE of the restored scratch register, so that flow
10821 analysis will not consider the restore redundant. The
10822 register won't be used again in this function and isn't
10823 restored by the epilogue. */
10824 emit_insn (gen_prologue_use (reg));
10826 else
10828 reg = gen_rtx (REG, SImode, regno);
10830 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10831 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10832 reg));
10837 if (current_function_profile || TARGET_NO_SCHED_PRO)
10838 emit_insn (gen_blockage ());
10841 void
10842 thumb_expand_epilogue ()
10844 HOST_WIDE_INT amount = (thumb_get_frame_size ()
10845 + current_function_outgoing_args_size);
10847 /* Naked functions don't have prologues. */
10848 if (IS_NAKED (arm_current_func_type ()))
10849 return;
10851 if (frame_pointer_needed)
10852 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10853 else if (amount)
10855 amount = ROUND_UP_WORD (amount);
10857 if (amount < 512)
10858 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10859 GEN_INT (amount)));
10860 else
10862 /* r3 is always free in the epilogue. */
10863 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10865 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10866 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10870 /* Emit a USE (stack_pointer_rtx), so that
10871 the stack adjustment will not be deleted. */
10872 emit_insn (gen_prologue_use (stack_pointer_rtx));
10874 if (current_function_profile || TARGET_NO_SCHED_PRO)
10875 emit_insn (gen_blockage ());
10878 static void
10879 thumb_output_function_prologue (f, size)
10880 FILE * f;
10881 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10883 int live_regs_mask = 0;
10884 int high_regs_pushed = 0;
10885 int regno;
10887 if (IS_NAKED (arm_current_func_type ()))
10888 return;
10890 if (is_called_in_ARM_mode (current_function_decl))
10892 const char * name;
10894 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10895 abort ();
10896 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10897 abort ();
10898 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10900 /* Generate code sequence to switch us into Thumb mode. */
10901 /* The .code 32 directive has already been emitted by
10902 ASM_DECLARE_FUNCTION_NAME. */
10903 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10904 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10906 /* Generate a label, so that the debugger will notice the
10907 change in instruction sets. This label is also used by
10908 the assembler to bypass the ARM code when this function
10909 is called from a Thumb encoded function elsewhere in the
10910 same file. Hence the definition of STUB_NAME here must
10911 agree with the definition in gas/config/tc-arm.c */
10913 #define STUB_NAME ".real_start_of"
10915 fprintf (f, "\t.code\t16\n");
10916 #ifdef ARM_PE
10917 if (arm_dllexport_name_p (name))
10918 name = arm_strip_name_encoding (name);
10919 #endif
10920 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10921 fprintf (f, "\t.thumb_func\n");
10922 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10925 if (current_function_pretend_args_size)
10927 if (cfun->machine->uses_anonymous_args)
10929 int num_pushes;
10931 fprintf (f, "\tpush\t{");
10933 num_pushes = ARM_NUM_INTS (current_function_pretend_args_size);
10935 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10936 regno <= LAST_ARG_REGNUM;
10937 regno++)
10938 asm_fprintf (f, "%r%s", regno,
10939 regno == LAST_ARG_REGNUM ? "" : ", ");
10941 fprintf (f, "}\n");
10943 else
10944 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10945 SP_REGNUM, SP_REGNUM,
10946 current_function_pretend_args_size);
10949 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10950 if (THUMB_REG_PUSHED_P (regno))
10951 live_regs_mask |= 1 << regno;
10953 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10954 live_regs_mask |= 1 << LR_REGNUM;
10956 if (TARGET_BACKTRACE)
10958 int offset;
10959 int work_register = 0;
10960 int wr;
10962 /* We have been asked to create a stack backtrace structure.
10963 The code looks like this:
10965 0 .align 2
10966 0 func:
10967 0 sub SP, #16 Reserve space for 4 registers.
10968 2 push {R7} Get a work register.
10969 4 add R7, SP, #20 Get the stack pointer before the push.
10970 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10971 8 mov R7, PC Get hold of the start of this code plus 12.
10972 10 str R7, [SP, #16] Store it.
10973 12 mov R7, FP Get hold of the current frame pointer.
10974 14 str R7, [SP, #4] Store it.
10975 16 mov R7, LR Get hold of the current return address.
10976 18 str R7, [SP, #12] Store it.
10977 20 add R7, SP, #16 Point at the start of the backtrace structure.
10978 22 mov FP, R7 Put this value into the frame pointer. */
10980 if ((live_regs_mask & 0xFF) == 0)
10982 /* See if the a4 register is free. */
10984 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10985 work_register = LAST_ARG_REGNUM;
10986 else /* We must push a register of our own */
10987 live_regs_mask |= (1 << LAST_LO_REGNUM);
10990 if (work_register == 0)
10992 /* Select a register from the list that will be pushed to
10993 use as our work register. */
10994 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10995 if ((1 << work_register) & live_regs_mask)
10996 break;
10999 asm_fprintf
11000 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
11001 SP_REGNUM, SP_REGNUM);
11003 if (live_regs_mask)
11004 thumb_pushpop (f, live_regs_mask, 1);
11006 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
11007 if (wr & live_regs_mask)
11008 offset += 4;
11010 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11011 offset + 16 + current_function_pretend_args_size);
11013 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11014 offset + 4);
11016 /* Make sure that the instruction fetching the PC is in the right place
11017 to calculate "start of backtrace creation code + 12". */
11018 if (live_regs_mask)
11020 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11021 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11022 offset + 12);
11023 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11024 ARM_HARD_FRAME_POINTER_REGNUM);
11025 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11026 offset);
11028 else
11030 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
11031 ARM_HARD_FRAME_POINTER_REGNUM);
11032 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11033 offset);
11034 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
11035 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11036 offset + 12);
11039 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
11040 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
11041 offset + 8);
11042 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
11043 offset + 12);
11044 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
11045 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
11047 else if (live_regs_mask)
11048 thumb_pushpop (f, live_regs_mask, 1);
11050 for (regno = 8; regno < 13; regno++)
11051 if (THUMB_REG_PUSHED_P (regno))
11052 high_regs_pushed++;
11054 if (high_regs_pushed)
11056 int pushable_regs = 0;
11057 int mask = live_regs_mask & 0xff;
11058 int next_hi_reg;
11060 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
11061 if (THUMB_REG_PUSHED_P (next_hi_reg))
11062 break;
11064 pushable_regs = mask;
11066 if (pushable_regs == 0)
11068 /* Desperation time -- this probably will never happen. */
11069 if (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM))
11070 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
11071 mask = 1 << LAST_ARG_REGNUM;
11074 while (high_regs_pushed > 0)
11076 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
11078 if (mask & (1 << regno))
11080 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
11082 high_regs_pushed--;
11084 if (high_regs_pushed)
11086 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
11087 next_hi_reg--)
11088 if (THUMB_REG_PUSHED_P (next_hi_reg))
11089 break;
11091 else
11093 mask &= ~((1 << regno) - 1);
11094 break;
11099 thumb_pushpop (f, mask, 1);
11102 if (pushable_regs == 0
11103 && (THUMB_REG_PUSHED_P (LAST_ARG_REGNUM)))
11104 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
11108 /* Handle the case of a double word load into a low register from
11109 a computed memory address. The computed address may involve a
11110 register which is overwritten by the load. */
11112 const char *
11113 thumb_load_double_from_address (operands)
11114 rtx *operands;
11116 rtx addr;
11117 rtx base;
11118 rtx offset;
11119 rtx arg1;
11120 rtx arg2;
11122 if (GET_CODE (operands[0]) != REG)
11123 abort ();
11125 if (GET_CODE (operands[1]) != MEM)
11126 abort ();
11128 /* Get the memory address. */
11129 addr = XEXP (operands[1], 0);
11131 /* Work out how the memory address is computed. */
11132 switch (GET_CODE (addr))
11134 case REG:
11135 operands[2] = gen_rtx (MEM, SImode,
11136 plus_constant (XEXP (operands[1], 0), 4));
11138 if (REGNO (operands[0]) == REGNO (addr))
11140 output_asm_insn ("ldr\t%H0, %2", operands);
11141 output_asm_insn ("ldr\t%0, %1", operands);
11143 else
11145 output_asm_insn ("ldr\t%0, %1", operands);
11146 output_asm_insn ("ldr\t%H0, %2", operands);
11148 break;
11150 case CONST:
11151 /* Compute <address> + 4 for the high order load. */
11152 operands[2] = gen_rtx (MEM, SImode,
11153 plus_constant (XEXP (operands[1], 0), 4));
11155 output_asm_insn ("ldr\t%0, %1", operands);
11156 output_asm_insn ("ldr\t%H0, %2", operands);
11157 break;
11159 case PLUS:
11160 arg1 = XEXP (addr, 0);
11161 arg2 = XEXP (addr, 1);
11163 if (CONSTANT_P (arg1))
11164 base = arg2, offset = arg1;
11165 else
11166 base = arg1, offset = arg2;
11168 if (GET_CODE (base) != REG)
11169 abort ();
11171 /* Catch the case of <address> = <reg> + <reg> */
11172 if (GET_CODE (offset) == REG)
11174 int reg_offset = REGNO (offset);
11175 int reg_base = REGNO (base);
11176 int reg_dest = REGNO (operands[0]);
11178 /* Add the base and offset registers together into the
11179 higher destination register. */
11180 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
11181 reg_dest + 1, reg_base, reg_offset);
11183 /* Load the lower destination register from the address in
11184 the higher destination register. */
11185 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
11186 reg_dest, reg_dest + 1);
11188 /* Load the higher destination register from its own address
11189 plus 4. */
11190 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
11191 reg_dest + 1, reg_dest + 1);
11193 else
11195 /* Compute <address> + 4 for the high order load. */
11196 operands[2] = gen_rtx (MEM, SImode,
11197 plus_constant (XEXP (operands[1], 0), 4));
11199 /* If the computed address is held in the low order register
11200 then load the high order register first, otherwise always
11201 load the low order register first. */
11202 if (REGNO (operands[0]) == REGNO (base))
11204 output_asm_insn ("ldr\t%H0, %2", operands);
11205 output_asm_insn ("ldr\t%0, %1", operands);
11207 else
11209 output_asm_insn ("ldr\t%0, %1", operands);
11210 output_asm_insn ("ldr\t%H0, %2", operands);
11213 break;
11215 case LABEL_REF:
11216 /* With no registers to worry about we can just load the value
11217 directly. */
11218 operands[2] = gen_rtx (MEM, SImode,
11219 plus_constant (XEXP (operands[1], 0), 4));
11221 output_asm_insn ("ldr\t%H0, %2", operands);
11222 output_asm_insn ("ldr\t%0, %1", operands);
11223 break;
11225 default:
11226 abort ();
11227 break;
11230 return "";
11234 const char *
11235 thumb_output_move_mem_multiple (n, operands)
11236 int n;
11237 rtx * operands;
11239 rtx tmp;
11241 switch (n)
11243 case 2:
11244 if (REGNO (operands[4]) > REGNO (operands[5]))
11246 tmp = operands[4];
11247 operands[4] = operands[5];
11248 operands[5] = tmp;
11250 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
11251 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
11252 break;
11254 case 3:
11255 if (REGNO (operands[4]) > REGNO (operands[5]))
11257 tmp = operands[4];
11258 operands[4] = operands[5];
11259 operands[5] = tmp;
11261 if (REGNO (operands[5]) > REGNO (operands[6]))
11263 tmp = operands[5];
11264 operands[5] = operands[6];
11265 operands[6] = tmp;
11267 if (REGNO (operands[4]) > REGNO (operands[5]))
11269 tmp = operands[4];
11270 operands[4] = operands[5];
11271 operands[5] = tmp;
11274 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
11275 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
11276 break;
11278 default:
11279 abort ();
11282 return "";
11285 /* Routines for generating rtl. */
11287 void
11288 thumb_expand_movstrqi (operands)
11289 rtx * operands;
11291 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
11292 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
11293 HOST_WIDE_INT len = INTVAL (operands[2]);
11294 HOST_WIDE_INT offset = 0;
11296 while (len >= 12)
11298 emit_insn (gen_movmem12b (out, in, out, in));
11299 len -= 12;
11302 if (len >= 8)
11304 emit_insn (gen_movmem8b (out, in, out, in));
11305 len -= 8;
11308 if (len >= 4)
11310 rtx reg = gen_reg_rtx (SImode);
11311 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
11312 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
11313 len -= 4;
11314 offset += 4;
11317 if (len >= 2)
11319 rtx reg = gen_reg_rtx (HImode);
11320 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
11321 plus_constant (in, offset))));
11322 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
11323 reg));
11324 len -= 2;
11325 offset += 2;
11328 if (len)
11330 rtx reg = gen_reg_rtx (QImode);
11331 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
11332 plus_constant (in, offset))));
11333 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
11334 reg));
11339 thumb_cmp_operand (op, mode)
11340 rtx op;
11341 enum machine_mode mode;
11343 return ((GET_CODE (op) == CONST_INT
11344 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
11345 || register_operand (op, mode));
11348 static const char *
11349 thumb_condition_code (x, invert)
11350 rtx x;
11351 int invert;
11353 static const char * const conds[] =
11355 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
11356 "hi", "ls", "ge", "lt", "gt", "le"
11358 int val;
11360 switch (GET_CODE (x))
11362 case EQ: val = 0; break;
11363 case NE: val = 1; break;
11364 case GEU: val = 2; break;
11365 case LTU: val = 3; break;
11366 case GTU: val = 8; break;
11367 case LEU: val = 9; break;
11368 case GE: val = 10; break;
11369 case LT: val = 11; break;
11370 case GT: val = 12; break;
11371 case LE: val = 13; break;
11372 default:
11373 abort ();
11376 return conds[val ^ invert];
11379 /* Handle storing a half-word to memory during reload. */
11381 void
11382 thumb_reload_out_hi (operands)
11383 rtx * operands;
11385 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
11388 /* Handle storing a half-word to memory during reload. */
11390 void
11391 thumb_reload_in_hi (operands)
11392 rtx * operands ATTRIBUTE_UNUSED;
11394 abort ();
11397 /* Return the length of a function name prefix
11398 that starts with the character 'c'. */
11400 static int
11401 arm_get_strip_length (c)
11402 int c;
11404 switch (c)
11406 ARM_NAME_ENCODING_LENGTHS
11407 default: return 0;
11411 /* Return a pointer to a function's name with any
11412 and all prefix encodings stripped from it. */
11414 const char *
11415 arm_strip_name_encoding (name)
11416 const char * name;
11418 int skip;
11420 while ((skip = arm_get_strip_length (* name)))
11421 name += skip;
11423 return name;
11426 /* If there is a '*' anywhere in the name's prefix, then
11427 emit the stripped name verbatim, otherwise prepend an
11428 underscore if leading underscores are being used. */
11430 void
11431 arm_asm_output_labelref (stream, name)
11432 FILE * stream;
11433 const char * name;
11435 int skip;
11436 int verbatim = 0;
11438 while ((skip = arm_get_strip_length (* name)))
11440 verbatim |= (*name == '*');
11441 name += skip;
11444 if (verbatim)
11445 fputs (name, stream);
11446 else
11447 asm_fprintf (stream, "%U%s", name);
11450 rtx aof_pic_label;
11452 #ifdef AOF_ASSEMBLER
11453 /* Special functions only needed when producing AOF syntax assembler. */
11455 struct pic_chain
11457 struct pic_chain * next;
11458 const char * symname;
11461 static struct pic_chain * aof_pic_chain = NULL;
11464 aof_pic_entry (x)
11465 rtx x;
11467 struct pic_chain ** chainp;
11468 int offset;
11470 if (aof_pic_label == NULL_RTX)
11472 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
11475 for (offset = 0, chainp = &aof_pic_chain; *chainp;
11476 offset += 4, chainp = &(*chainp)->next)
11477 if ((*chainp)->symname == XSTR (x, 0))
11478 return plus_constant (aof_pic_label, offset);
11480 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
11481 (*chainp)->next = NULL;
11482 (*chainp)->symname = XSTR (x, 0);
11483 return plus_constant (aof_pic_label, offset);
11486 void
11487 aof_dump_pic_table (f)
11488 FILE * f;
11490 struct pic_chain * chain;
11492 if (aof_pic_chain == NULL)
11493 return;
11495 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
11496 PIC_OFFSET_TABLE_REGNUM,
11497 PIC_OFFSET_TABLE_REGNUM);
11498 fputs ("|x$adcons|\n", f);
11500 for (chain = aof_pic_chain; chain; chain = chain->next)
11502 fputs ("\tDCD\t", f);
11503 assemble_name (f, chain->symname);
11504 fputs ("\n", f);
11508 int arm_text_section_count = 1;
11510 char *
11511 aof_text_section ()
11513 static char buf[100];
11514 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
11515 arm_text_section_count++);
11516 if (flag_pic)
11517 strcat (buf, ", PIC, REENTRANT");
11518 return buf;
11521 static int arm_data_section_count = 1;
11523 char *
11524 aof_data_section ()
11526 static char buf[100];
11527 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
11528 return buf;
11531 /* The AOF assembler is religiously strict about declarations of
11532 imported and exported symbols, so that it is impossible to declare
11533 a function as imported near the beginning of the file, and then to
11534 export it later on. It is, however, possible to delay the decision
11535 until all the functions in the file have been compiled. To get
11536 around this, we maintain a list of the imports and exports, and
11537 delete from it any that are subsequently defined. At the end of
11538 compilation we spit the remainder of the list out before the END
11539 directive. */
11541 struct import
11543 struct import * next;
11544 const char * name;
11547 static struct import * imports_list = NULL;
11549 void
11550 aof_add_import (name)
11551 const char * name;
11553 struct import * new;
11555 for (new = imports_list; new; new = new->next)
11556 if (new->name == name)
11557 return;
11559 new = (struct import *) xmalloc (sizeof (struct import));
11560 new->next = imports_list;
11561 imports_list = new;
11562 new->name = name;
11565 void
11566 aof_delete_import (name)
11567 const char * name;
11569 struct import ** old;
11571 for (old = &imports_list; *old; old = & (*old)->next)
11573 if ((*old)->name == name)
11575 *old = (*old)->next;
11576 return;
11581 int arm_main_function = 0;
11583 void
11584 aof_dump_imports (f)
11585 FILE * f;
11587 /* The AOF assembler needs this to cause the startup code to be extracted
11588 from the library. Brining in __main causes the whole thing to work
11589 automagically. */
11590 if (arm_main_function)
11592 text_section ();
11593 fputs ("\tIMPORT __main\n", f);
11594 fputs ("\tDCD __main\n", f);
11597 /* Now dump the remaining imports. */
11598 while (imports_list)
11600 fprintf (f, "\tIMPORT\t");
11601 assemble_name (f, imports_list->name);
11602 fputc ('\n', f);
11603 imports_list = imports_list->next;
11607 static void
11608 aof_globalize_label (stream, name)
11609 FILE *stream;
11610 const char *name;
11612 default_globalize_label (stream, name);
11613 if (! strcmp (name, "main"))
11614 arm_main_function = 1;
11616 #endif /* AOF_ASSEMBLER */
11618 #ifdef OBJECT_FORMAT_ELF
11619 /* Switch to an arbitrary section NAME with attributes as specified
11620 by FLAGS. ALIGN specifies any known alignment requirements for
11621 the section; 0 if the default should be used.
11623 Differs from the default elf version only in the prefix character
11624 used before the section type. */
11626 static void
11627 arm_elf_asm_named_section (name, flags)
11628 const char *name;
11629 unsigned int flags;
11631 char flagchars[10], *f = flagchars;
11633 if (! named_section_first_declaration (name))
11635 fprintf (asm_out_file, "\t.section\t%s\n", name);
11636 return;
11639 if (!(flags & SECTION_DEBUG))
11640 *f++ = 'a';
11641 if (flags & SECTION_WRITE)
11642 *f++ = 'w';
11643 if (flags & SECTION_CODE)
11644 *f++ = 'x';
11645 if (flags & SECTION_SMALL)
11646 *f++ = 's';
11647 if (flags & SECTION_MERGE)
11648 *f++ = 'M';
11649 if (flags & SECTION_STRINGS)
11650 *f++ = 'S';
11651 if (flags & SECTION_TLS)
11652 *f++ = 'T';
11653 *f = '\0';
11655 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
11657 if (!(flags & SECTION_NOTYPE))
11659 const char *type;
11661 if (flags & SECTION_BSS)
11662 type = "nobits";
11663 else
11664 type = "progbits";
11666 fprintf (asm_out_file, ",%%%s", type);
11668 if (flags & SECTION_ENTSIZE)
11669 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
11672 putc ('\n', asm_out_file);
11674 #endif
11676 #ifndef ARM_PE
11677 /* Symbols in the text segment can be accessed without indirecting via the
11678 constant pool; it may take an extra binary operation, but this is still
11679 faster than indirecting via memory. Don't do this when not optimizing,
11680 since we won't be calculating al of the offsets necessary to do this
11681 simplification. */
11683 static void
11684 arm_encode_section_info (decl, first)
11685 tree decl;
11686 int first;
11688 /* This doesn't work with AOF syntax, since the string table may be in
11689 a different AREA. */
11690 #ifndef AOF_ASSEMBLER
11691 if (optimize > 0 && TREE_CONSTANT (decl)
11692 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11694 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11695 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11696 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11698 #endif
11700 /* If we are referencing a function that is weak then encode a long call
11701 flag in the function name, otherwise if the function is static or
11702 or known to be defined in this file then encode a short call flag. */
11703 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11705 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11706 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11707 else if (! TREE_PUBLIC (decl))
11708 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11711 #endif /* !ARM_PE */
11713 static void
11714 arm_internal_label (stream, prefix, labelno)
11715 FILE *stream;
11716 const char *prefix;
11717 unsigned long labelno;
11719 if (arm_ccfsm_state == 3 && (unsigned) arm_target_label == labelno
11720 && !strcmp (prefix, "L"))
11722 arm_ccfsm_state = 0;
11723 arm_target_insn = NULL;
11725 default_internal_label (stream, prefix, labelno);
11728 /* Output code to add DELTA to the first argument, and then jump
11729 to FUNCTION. Used for C++ multiple inheritance. */
11731 static void
11732 arm_output_mi_thunk (file, thunk, delta, vcall_offset, function)
11733 FILE *file;
11734 tree thunk ATTRIBUTE_UNUSED;
11735 HOST_WIDE_INT delta;
11736 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED;
11737 tree function;
11739 int mi_delta = delta;
11740 const char *const mi_op = mi_delta < 0 ? "sub" : "add";
11741 int shift = 0;
11742 int this_regno = (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)))
11743 ? 1 : 0);
11744 if (mi_delta < 0)
11745 mi_delta = - mi_delta;
11746 while (mi_delta != 0)
11748 if ((mi_delta & (3 << shift)) == 0)
11749 shift += 2;
11750 else
11752 asm_fprintf (file, "\t%s\t%r, %r, #%d\n",
11753 mi_op, this_regno, this_regno,
11754 mi_delta & (0xff << shift));
11755 mi_delta &= ~(0xff << shift);
11756 shift += 8;
11759 fputs ("\tb\t", file);
11760 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
11761 if (NEED_PLT_RELOC)
11762 fputs ("(PLT)", file);
11763 fputc ('\n', file);