* system.h (ENCODE_SECTION_INFO): Poison it.
[official-gcc.git] / gcc / config / arm / arm.c
blobcbcd1a14dca25df7119aa8a4c168a177df6695d0
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
13 any later version.
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "obstack.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "optabs.h"
42 #include "toplev.h"
43 #include "recog.h"
44 #include "ggc.h"
45 #include "except.h"
46 #include "c-pragma.h"
47 #include "integrate.h"
48 #include "tm_p.h"
49 #include "target.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode;
54 typedef struct minipool_fixup Mfix;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots PARAMS ((void));
67 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
68 static Ulong bit_count PARAMS ((signed int));
69 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
70 static int eliminate_lr2ip PARAMS ((rtx *));
71 static rtx emit_multi_reg_push PARAMS ((int));
72 static rtx emit_sfm PARAMS ((int, int));
73 #ifndef AOF_ASSEMBLER
74 static bool arm_assemble_integer PARAMS ((rtx, unsigned int, int));
75 #endif
76 static Ccstar fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
77 static arm_cc get_arm_condition_code PARAMS ((rtx));
78 static void init_fpa_table PARAMS ((void));
79 static Hint int_log2 PARAMS ((Hint));
80 static rtx is_jump_table PARAMS ((rtx));
81 static Ccstar output_multi_immediate PARAMS ((rtx *, Ccstar, Ccstar, int, Hint));
82 static void print_multi_reg PARAMS ((FILE *, Ccstar, int, int));
83 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
84 static Ccstar shift_op PARAMS ((rtx, Hint *));
85 static void arm_init_machine_status PARAMS ((struct function *));
86 static void arm_mark_machine_status PARAMS ((struct function *));
87 static void arm_free_machine_status PARAMS ((struct function *));
88 static int number_of_first_bit_set PARAMS ((int));
89 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
90 static void thumb_exit PARAMS ((FILE *, int, rtx));
91 static void thumb_pushpop PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code PARAMS ((rtx, int));
93 static rtx is_jump_table PARAMS ((rtx));
94 static Hint get_jump_table_size PARAMS ((rtx));
95 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
96 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
97 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
98 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
99 static void assign_minipool_offsets PARAMS ((Mfix *));
100 static void arm_print_value PARAMS ((FILE *, rtx));
101 static void dump_minipool PARAMS ((rtx));
102 static int arm_barrier_cost PARAMS ((rtx));
103 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
104 static void push_minipool_barrier PARAMS ((rtx, Hint));
105 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
106 static void note_invalid_constants PARAMS ((rtx, Hint));
107 static int current_file_function_operand PARAMS ((rtx));
108 static Ulong arm_compute_save_reg0_reg12_mask PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask PARAMS ((void));
110 static Ulong arm_isr_value PARAMS ((tree));
111 static Ulong arm_compute_func_type PARAMS ((void));
112 static tree arm_handle_fndecl_attribute PARAMS ((tree *, tree, tree, int, bool *));
113 static tree arm_handle_isr_attribute PARAMS ((tree *, tree, tree, int, bool *));
114 static void arm_output_function_epilogue PARAMS ((FILE *, Hint));
115 static void arm_output_function_prologue PARAMS ((FILE *, Hint));
116 static void thumb_output_function_prologue PARAMS ((FILE *, Hint));
117 static int arm_comp_type_attributes PARAMS ((tree, tree));
118 static void arm_set_default_type_attributes PARAMS ((tree));
119 static int arm_adjust_cost PARAMS ((rtx, rtx, rtx, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section PARAMS ((const char *, unsigned int));
122 #endif
123 #ifndef ARM_PE
124 static void arm_encode_section_info PARAMS ((tree, int));
125 #endif
127 #undef Hint
128 #undef Mmode
129 #undef Ulong
130 #undef Ccstar
132 /* Initialize the GCC target structure. */
133 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
134 #undef TARGET_MERGE_DECL_ATTRIBUTES
135 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
136 #endif
138 #undef TARGET_ATTRIBUTE_TABLE
139 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
141 #ifdef AOF_ASSEMBLER
142 #undef TARGET_ASM_BYTE_OP
143 #define TARGET_ASM_BYTE_OP "\tDCB\t"
144 #undef TARGET_ASM_ALIGNED_HI_OP
145 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
148 #else
149 #undef TARGET_ASM_ALIGNED_SI_OP
150 #define TARGET_ASM_ALIGNED_SI_OP NULL
151 #undef TARGET_ASM_INTEGER
152 #define TARGET_ASM_INTEGER arm_assemble_integer
153 #endif
155 #undef TARGET_ASM_FUNCTION_PROLOGUE
156 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
158 #undef TARGET_ASM_FUNCTION_EPILOGUE
159 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
161 #undef TARGET_COMP_TYPE_ATTRIBUTES
162 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
164 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
165 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
167 #undef TARGET_INIT_BUILTINS
168 #define TARGET_INIT_BUILTINS arm_init_builtins
170 #undef TARGET_EXPAND_BUILTIN
171 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
173 #undef TARGET_SCHED_ADJUST_COST
174 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
176 #undef TARGET_ENCODE_SECTION_INFO
177 #ifdef ARM_PE
178 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
179 #else
180 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
181 #endif
183 struct gcc_target targetm = TARGET_INITIALIZER;
185 /* Obstack for minipool constant handling. */
186 static struct obstack minipool_obstack;
187 static char * minipool_startobj;
189 #define obstack_chunk_alloc xmalloc
190 #define obstack_chunk_free free
192 /* The maximum number of insns skipped which
193 will be conditionalised if possible. */
194 static int max_insns_skipped = 5;
196 extern FILE * asm_out_file;
198 /* True if we are currently building a constant table. */
199 int making_const_table;
201 /* Define the information needed to generate branch insns. This is
202 stored from the compare operation. */
203 rtx arm_compare_op0, arm_compare_op1;
205 /* What type of floating point are we tuning for? */
206 enum floating_point_type arm_fpu;
208 /* What type of floating point instructions are available? */
209 enum floating_point_type arm_fpu_arch;
211 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
212 enum prog_mode_type arm_prgmode;
214 /* Set by the -mfp=... option. */
215 const char * target_fp_name = NULL;
217 /* Used to parse -mstructure_size_boundary command line option. */
218 const char * structure_size_string = NULL;
219 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
221 /* Bit values used to identify processor capabilities. */
222 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
223 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
224 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
225 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
226 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
227 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
228 #define FL_THUMB (1 << 6) /* Thumb aware */
229 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
230 #define FL_STRONG (1 << 8) /* StrongARM */
231 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
232 #define FL_XSCALE (1 << 10) /* XScale */
234 /* The bits in this mask specify which
235 instructions we are allowed to generate. */
236 static int insn_flags = 0;
238 /* The bits in this mask specify which instruction scheduling options should
239 be used. Note - there is an overlap with the FL_FAST_MULT. For some
240 hardware we want to be able to generate the multiply instructions, but to
241 tune as if they were not present in the architecture. */
242 static int tune_flags = 0;
244 /* The following are used in the arm.md file as equivalents to bits
245 in the above two flag variables. */
247 /* Nonzero if this is an "M" variant of the processor. */
248 int arm_fast_multiply = 0;
250 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
251 int arm_arch4 = 0;
253 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
254 int arm_arch5 = 0;
256 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
257 int arm_arch5e = 0;
259 /* Nonzero if this chip can benefit from load scheduling. */
260 int arm_ld_sched = 0;
262 /* Nonzero if this chip is a StrongARM. */
263 int arm_is_strong = 0;
265 /* Nonzero if this chip is an XScale. */
266 int arm_is_xscale = 0;
268 /* Nonzero if this chip is an ARM6 or an ARM7. */
269 int arm_is_6_or_7 = 0;
271 /* Nonzero if generating Thumb instructions. */
272 int thumb_code = 0;
274 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
275 must report the mode of the memory reference from PRINT_OPERAND to
276 PRINT_OPERAND_ADDRESS. */
277 enum machine_mode output_memory_reference_mode;
279 /* The register number to be used for the PIC offset register. */
280 const char * arm_pic_register_string = NULL;
281 int arm_pic_register = INVALID_REGNUM;
283 /* Set to 1 when a return insn is output, this means that the epilogue
284 is not needed. */
285 int return_used_this_function;
287 /* Set to 1 after arm_reorg has started. Reset to start at the start of
288 the next function. */
289 static int after_arm_reorg = 0;
291 /* The maximum number of insns to be used when loading a constant. */
292 static int arm_constant_limit = 3;
294 /* For an explanation of these variables, see final_prescan_insn below. */
295 int arm_ccfsm_state;
296 enum arm_cond_code arm_current_cc;
297 rtx arm_target_insn;
298 int arm_target_label;
300 /* The condition codes of the ARM, and the inverse function. */
301 static const char * const arm_condition_codes[] =
303 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
304 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
307 #define streq(string1, string2) (strcmp (string1, string2) == 0)
309 /* Initialization code. */
311 struct processors
313 const char *const name;
314 const unsigned int flags;
317 /* Not all of these give usefully different compilation alternatives,
318 but there is no simple way of generalizing them. */
319 static const struct processors all_cores[] =
321 /* ARM Cores */
323 {"arm2", FL_CO_PROC | FL_MODE26 },
324 {"arm250", FL_CO_PROC | FL_MODE26 },
325 {"arm3", FL_CO_PROC | FL_MODE26 },
326 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
327 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
328 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
329 {"arm610", FL_MODE26 | FL_MODE32 },
330 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
331 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
332 /* arm7m doesn't exist on its own, but only with D, (and I), but
333 those don't alter the code, so arm7m is sometimes used. */
334 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
335 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
336 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
337 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
338 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
339 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
340 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
341 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
342 {"arm710", FL_MODE26 | FL_MODE32 },
343 {"arm710t", FL_MODE26 | FL_MODE32 | FL_THUMB },
344 {"arm720", FL_MODE26 | FL_MODE32 },
345 {"arm720t", FL_MODE26 | FL_MODE32 | FL_THUMB },
346 {"arm740t", FL_MODE26 | FL_MODE32 | FL_THUMB },
347 {"arm710c", FL_MODE26 | FL_MODE32 },
348 {"arm7100", FL_MODE26 | FL_MODE32 },
349 {"arm7500", FL_MODE26 | FL_MODE32 },
350 /* Doesn't have an external co-proc, but does have embedded fpu. */
351 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
352 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
353 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
354 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
355 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
356 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
357 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
358 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
359 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
360 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
361 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
362 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
363 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
364 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
365 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
366 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
367 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
369 {NULL, 0}
372 static const struct processors all_architectures[] =
374 /* ARM Architectures */
376 { "armv2", FL_CO_PROC | FL_MODE26 },
377 { "armv2a", FL_CO_PROC | FL_MODE26 },
378 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
379 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
380 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
381 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
382 implementations that support it, so we will leave it out for now. */
383 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
384 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
385 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
386 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
387 { NULL, 0 }
390 /* This is a magic stucture. The 'string' field is magically filled in
391 with a pointer to the value specified by the user on the command line
392 assuming that the user has specified such a value. */
394 struct arm_cpu_select arm_select[] =
396 /* string name processors */
397 { NULL, "-mcpu=", all_cores },
398 { NULL, "-march=", all_architectures },
399 { NULL, "-mtune=", all_cores }
402 /* Return the number of bits set in value' */
403 static unsigned long
404 bit_count (value)
405 signed int value;
407 unsigned long count = 0;
409 while (value)
411 value &= ~(value & -value);
412 ++count;
415 return count;
418 /* Fix up any incompatible options that the user has specified.
419 This has now turned into a maze. */
420 void
421 arm_override_options ()
423 unsigned i;
425 /* Set up the flags based on the cpu/architecture selected by the user. */
426 for (i = ARRAY_SIZE (arm_select); i--;)
428 struct arm_cpu_select * ptr = arm_select + i;
430 if (ptr->string != NULL && ptr->string[0] != '\0')
432 const struct processors * sel;
434 for (sel = ptr->processors; sel->name != NULL; sel++)
435 if (streq (ptr->string, sel->name))
437 if (i == 2)
438 tune_flags = sel->flags;
439 else
441 /* If we have been given an architecture and a processor
442 make sure that they are compatible. We only generate
443 a warning though, and we prefer the CPU over the
444 architecture. */
445 if (insn_flags != 0 && (insn_flags ^ sel->flags))
446 warning ("switch -mcpu=%s conflicts with -march= switch",
447 ptr->string);
449 insn_flags = sel->flags;
452 break;
455 if (sel->name == NULL)
456 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
460 /* If the user did not specify a processor, choose one for them. */
461 if (insn_flags == 0)
463 const struct processors * sel;
464 unsigned int sought;
465 static const struct cpu_default
467 const int cpu;
468 const char *const name;
470 cpu_defaults[] =
472 { TARGET_CPU_arm2, "arm2" },
473 { TARGET_CPU_arm6, "arm6" },
474 { TARGET_CPU_arm610, "arm610" },
475 { TARGET_CPU_arm710, "arm710" },
476 { TARGET_CPU_arm7m, "arm7m" },
477 { TARGET_CPU_arm7500fe, "arm7500fe" },
478 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
479 { TARGET_CPU_arm8, "arm8" },
480 { TARGET_CPU_arm810, "arm810" },
481 { TARGET_CPU_arm9, "arm9" },
482 { TARGET_CPU_strongarm, "strongarm" },
483 { TARGET_CPU_xscale, "xscale" },
484 { TARGET_CPU_generic, "arm" },
485 { 0, 0 }
487 const struct cpu_default * def;
489 /* Find the default. */
490 for (def = cpu_defaults; def->name; def++)
491 if (def->cpu == TARGET_CPU_DEFAULT)
492 break;
494 /* Make sure we found the default CPU. */
495 if (def->name == NULL)
496 abort ();
498 /* Find the default CPU's flags. */
499 for (sel = all_cores; sel->name != NULL; sel++)
500 if (streq (def->name, sel->name))
501 break;
503 if (sel->name == NULL)
504 abort ();
506 insn_flags = sel->flags;
508 /* Now check to see if the user has specified some command line
509 switch that require certain abilities from the cpu. */
510 sought = 0;
512 if (TARGET_INTERWORK || TARGET_THUMB)
514 sought |= (FL_THUMB | FL_MODE32);
516 /* Force apcs-32 to be used for interworking. */
517 target_flags |= ARM_FLAG_APCS_32;
519 /* There are no ARM processors that support both APCS-26 and
520 interworking. Therefore we force FL_MODE26 to be removed
521 from insn_flags here (if it was set), so that the search
522 below will always be able to find a compatible processor. */
523 insn_flags &= ~FL_MODE26;
525 else if (!TARGET_APCS_32)
526 sought |= FL_MODE26;
528 if (sought != 0 && ((sought & insn_flags) != sought))
530 /* Try to locate a CPU type that supports all of the abilities
531 of the default CPU, plus the extra abilities requested by
532 the user. */
533 for (sel = all_cores; sel->name != NULL; sel++)
534 if ((sel->flags & sought) == (sought | insn_flags))
535 break;
537 if (sel->name == NULL)
539 unsigned int current_bit_count = 0;
540 const struct processors * best_fit = NULL;
542 /* Ideally we would like to issue an error message here
543 saying that it was not possible to find a CPU compatible
544 with the default CPU, but which also supports the command
545 line options specified by the programmer, and so they
546 ought to use the -mcpu=<name> command line option to
547 override the default CPU type.
549 Unfortunately this does not work with multilibing. We
550 need to be able to support multilibs for -mapcs-26 and for
551 -mthumb-interwork and there is no CPU that can support both
552 options. Instead if we cannot find a cpu that has both the
553 characteristics of the default cpu and the given command line
554 options we scan the array again looking for a best match. */
555 for (sel = all_cores; sel->name != NULL; sel++)
556 if ((sel->flags & sought) == sought)
558 unsigned int count;
560 count = bit_count (sel->flags & insn_flags);
562 if (count >= current_bit_count)
564 best_fit = sel;
565 current_bit_count = count;
569 if (best_fit == NULL)
570 abort ();
571 else
572 sel = best_fit;
575 insn_flags = sel->flags;
579 /* If tuning has not been specified, tune for whichever processor or
580 architecture has been selected. */
581 if (tune_flags == 0)
582 tune_flags = insn_flags;
584 /* Make sure that the processor choice does not conflict with any of the
585 other command line choices. */
586 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
588 /* If APCS-32 was not the default then it must have been set by the
589 user, so issue a warning message. If the user has specified
590 "-mapcs-32 -mcpu=arm2" then we loose here. */
591 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
592 warning ("target CPU does not support APCS-32" );
593 target_flags &= ~ARM_FLAG_APCS_32;
595 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
597 warning ("target CPU does not support APCS-26" );
598 target_flags |= ARM_FLAG_APCS_32;
601 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
603 warning ("target CPU does not support interworking" );
604 target_flags &= ~ARM_FLAG_INTERWORK;
607 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
609 warning ("target CPU does not support THUMB instructions");
610 target_flags &= ~ARM_FLAG_THUMB;
613 if (TARGET_APCS_FRAME && TARGET_THUMB)
615 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
616 target_flags &= ~ARM_FLAG_APCS_FRAME;
619 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
620 from here where no function is being compiled currently. */
621 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
622 && TARGET_ARM)
623 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
625 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
626 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
628 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
629 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
631 /* If interworking is enabled then APCS-32 must be selected as well. */
632 if (TARGET_INTERWORK)
634 if (!TARGET_APCS_32)
635 warning ("interworking forces APCS-32 to be used" );
636 target_flags |= ARM_FLAG_APCS_32;
639 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
641 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
642 target_flags |= ARM_FLAG_APCS_FRAME;
645 if (TARGET_POKE_FUNCTION_NAME)
646 target_flags |= ARM_FLAG_APCS_FRAME;
648 if (TARGET_APCS_REENT && flag_pic)
649 error ("-fpic and -mapcs-reent are incompatible");
651 if (TARGET_APCS_REENT)
652 warning ("APCS reentrant code not supported. Ignored");
654 /* If this target is normally configured to use APCS frames, warn if they
655 are turned off and debugging is turned on. */
656 if (TARGET_ARM
657 && write_symbols != NO_DEBUG
658 && !TARGET_APCS_FRAME
659 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
660 warning ("-g with -mno-apcs-frame may not give sensible debugging");
662 /* If stack checking is disabled, we can use r10 as the PIC register,
663 which keeps r9 available. */
664 if (flag_pic)
665 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
667 if (TARGET_APCS_FLOAT)
668 warning ("passing floating point arguments in fp regs not yet supported");
670 /* Initialise boolean versions of the flags, for use in the arm.md file. */
671 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
672 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
673 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
674 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
675 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
677 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
678 arm_is_strong = (tune_flags & FL_STRONG) != 0;
679 thumb_code = (TARGET_ARM == 0);
680 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
681 && !(tune_flags & FL_ARCH4))) != 0;
683 /* Default value for floating point code... if no co-processor
684 bus, then schedule for emulated floating point. Otherwise,
685 assume the user has an FPA.
686 Note: this does not prevent use of floating point instructions,
687 -msoft-float does that. */
688 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
690 if (target_fp_name)
692 if (streq (target_fp_name, "2"))
693 arm_fpu_arch = FP_SOFT2;
694 else if (streq (target_fp_name, "3"))
695 arm_fpu_arch = FP_SOFT3;
696 else
697 error ("invalid floating point emulation option: -mfpe-%s",
698 target_fp_name);
700 else
701 arm_fpu_arch = FP_DEFAULT;
703 if (TARGET_FPE && arm_fpu != FP_HARD)
704 arm_fpu = FP_SOFT2;
706 /* For arm2/3 there is no need to do any scheduling if there is only
707 a floating point emulator, or we are doing software floating-point. */
708 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
709 && (tune_flags & FL_MODE32) == 0)
710 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
712 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
714 if (structure_size_string != NULL)
716 int size = strtol (structure_size_string, NULL, 0);
718 if (size == 8 || size == 32)
719 arm_structure_size_boundary = size;
720 else
721 warning ("structure size boundary can only be set to 8 or 32");
724 if (arm_pic_register_string != NULL)
726 int pic_register = decode_reg_name (arm_pic_register_string);
728 if (!flag_pic)
729 warning ("-mpic-register= is useless without -fpic");
731 /* Prevent the user from choosing an obviously stupid PIC register. */
732 else if (pic_register < 0 || call_used_regs[pic_register]
733 || pic_register == HARD_FRAME_POINTER_REGNUM
734 || pic_register == STACK_POINTER_REGNUM
735 || pic_register >= PC_REGNUM)
736 error ("unable to use '%s' for PIC register", arm_pic_register_string);
737 else
738 arm_pic_register = pic_register;
741 if (TARGET_THUMB && flag_schedule_insns)
743 /* Don't warn since it's on by default in -O2. */
744 flag_schedule_insns = 0;
747 /* If optimizing for space, don't synthesize constants.
748 For processors with load scheduling, it never costs more than 2 cycles
749 to load a constant, and the load scheduler may well reduce that to 1. */
750 if (optimize_size || (tune_flags & FL_LDSCHED))
751 arm_constant_limit = 1;
753 if (arm_is_xscale)
754 arm_constant_limit = 2;
756 /* If optimizing for size, bump the number of instructions that we
757 are prepared to conditionally execute (even on a StrongARM).
758 Otherwise for the StrongARM, which has early execution of branches,
759 a sequence that is worth skipping is shorter. */
760 if (optimize_size)
761 max_insns_skipped = 6;
762 else if (arm_is_strong)
763 max_insns_skipped = 3;
765 /* Register global variables with the garbage collector. */
766 arm_add_gc_roots ();
769 static void
770 arm_add_gc_roots ()
772 ggc_add_rtx_root (&arm_compare_op0, 1);
773 ggc_add_rtx_root (&arm_compare_op1, 1);
774 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root. */
776 gcc_obstack_init(&minipool_obstack);
777 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
780 /* A table of known ARM exception types.
781 For use with the interrupt function attribute. */
783 typedef struct
785 const char *const arg;
786 const unsigned long return_value;
788 isr_attribute_arg;
790 static const isr_attribute_arg isr_attribute_args [] =
792 { "IRQ", ARM_FT_ISR },
793 { "irq", ARM_FT_ISR },
794 { "FIQ", ARM_FT_FIQ },
795 { "fiq", ARM_FT_FIQ },
796 { "ABORT", ARM_FT_ISR },
797 { "abort", ARM_FT_ISR },
798 { "ABORT", ARM_FT_ISR },
799 { "abort", ARM_FT_ISR },
800 { "UNDEF", ARM_FT_EXCEPTION },
801 { "undef", ARM_FT_EXCEPTION },
802 { "SWI", ARM_FT_EXCEPTION },
803 { "swi", ARM_FT_EXCEPTION },
804 { NULL, ARM_FT_NORMAL }
807 /* Returns the (interrupt) function type of the current
808 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
810 static unsigned long
811 arm_isr_value (argument)
812 tree argument;
814 const isr_attribute_arg * ptr;
815 const char * arg;
817 /* No argument - default to IRQ. */
818 if (argument == NULL_TREE)
819 return ARM_FT_ISR;
821 /* Get the value of the argument. */
822 if (TREE_VALUE (argument) == NULL_TREE
823 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
824 return ARM_FT_UNKNOWN;
826 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
828 /* Check it against the list of known arguments. */
829 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr ++)
830 if (streq (arg, ptr->arg))
831 return ptr->return_value;
833 /* An unrecognised interrupt type. */
834 return ARM_FT_UNKNOWN;
837 /* Computes the type of the current function. */
839 static unsigned long
840 arm_compute_func_type ()
842 unsigned long type = ARM_FT_UNKNOWN;
843 tree a;
844 tree attr;
846 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
847 abort ();
849 /* Decide if the current function is volatile. Such functions
850 never return, and many memory cycles can be saved by not storing
851 register values that will never be needed again. This optimization
852 was added to speed up context switching in a kernel application. */
853 if (optimize > 0
854 && current_function_nothrow
855 && TREE_THIS_VOLATILE (current_function_decl))
856 type |= ARM_FT_VOLATILE;
858 if (current_function_needs_context)
859 type |= ARM_FT_NESTED;
861 attr = DECL_ATTRIBUTES (current_function_decl);
863 a = lookup_attribute ("naked", attr);
864 if (a != NULL_TREE)
865 type |= ARM_FT_NAKED;
867 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
868 type |= ARM_FT_EXCEPTION_HANDLER;
869 else
871 a = lookup_attribute ("isr", attr);
872 if (a == NULL_TREE)
873 a = lookup_attribute ("interrupt", attr);
875 if (a == NULL_TREE)
876 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
877 else
878 type |= arm_isr_value (TREE_VALUE (a));
881 return type;
884 /* Returns the type of the current function. */
886 unsigned long
887 arm_current_func_type ()
889 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
890 cfun->machine->func_type = arm_compute_func_type ();
892 return cfun->machine->func_type;
895 /* Return 1 if it is possible to return using a single instruction. */
898 use_return_insn (iscond)
899 int iscond;
901 int regno;
902 unsigned int func_type;
904 /* Never use a return instruction before reload has run. */
905 if (!reload_completed)
906 return 0;
908 func_type = arm_current_func_type ();
910 /* Naked functions and volatile functions need special
911 consideration. */
912 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
913 return 0;
915 /* As do variadic functions. */
916 if (current_function_pretend_args_size
917 || cfun->machine->uses_anonymous_args
918 /* Of if the function calls __builtin_eh_return () */
919 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
920 /* Or if there is no frame pointer and there is a stack adjustment. */
921 || ((get_frame_size () + current_function_outgoing_args_size != 0)
922 && !frame_pointer_needed))
923 return 0;
925 /* Can't be done if interworking with Thumb, and any registers have been
926 stacked. Similarly, on StrongARM, conditional returns are expensive
927 if they aren't taken and registers have been stacked. */
928 if (iscond && arm_is_strong && frame_pointer_needed)
929 return 0;
931 if ((iscond && arm_is_strong)
932 || TARGET_INTERWORK)
934 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
935 if (regs_ever_live[regno] && !call_used_regs[regno])
936 return 0;
938 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
939 return 0;
942 /* Can't be done if any of the FPU regs are pushed,
943 since this also requires an insn. */
944 if (TARGET_HARD_FLOAT)
945 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
946 if (regs_ever_live[regno] && !call_used_regs[regno])
947 return 0;
949 return 1;
952 /* Return TRUE if int I is a valid immediate ARM constant. */
955 const_ok_for_arm (i)
956 HOST_WIDE_INT i;
958 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
960 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
961 be all zero, or all one. */
962 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
963 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
964 != ((~(unsigned HOST_WIDE_INT) 0)
965 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
966 return FALSE;
968 /* Fast return for 0 and powers of 2 */
969 if ((i & (i - 1)) == 0)
970 return TRUE;
974 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
975 return TRUE;
976 mask =
977 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
978 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
980 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
982 return FALSE;
985 /* Return true if I is a valid constant for the operation CODE. */
986 static int
987 const_ok_for_op (i, code)
988 HOST_WIDE_INT i;
989 enum rtx_code code;
991 if (const_ok_for_arm (i))
992 return 1;
994 switch (code)
996 case PLUS:
997 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
999 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1000 case XOR:
1001 case IOR:
1002 return 0;
1004 case AND:
1005 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1007 default:
1008 abort ();
1012 /* Emit a sequence of insns to handle a large constant.
1013 CODE is the code of the operation required, it can be any of SET, PLUS,
1014 IOR, AND, XOR, MINUS;
1015 MODE is the mode in which the operation is being performed;
1016 VAL is the integer to operate on;
1017 SOURCE is the other operand (a register, or a null-pointer for SET);
1018 SUBTARGETS means it is safe to create scratch registers if that will
1019 either produce a simpler sequence, or we will want to cse the values.
1020 Return value is the number of insns emitted. */
1023 arm_split_constant (code, mode, val, target, source, subtargets)
1024 enum rtx_code code;
1025 enum machine_mode mode;
1026 HOST_WIDE_INT val;
1027 rtx target;
1028 rtx source;
1029 int subtargets;
1031 if (subtargets || code == SET
1032 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1033 && REGNO (target) != REGNO (source)))
1035 /* After arm_reorg has been called, we can't fix up expensive
1036 constants by pushing them into memory so we must synthesise
1037 them in-line, regardless of the cost. This is only likely to
1038 be more costly on chips that have load delay slots and we are
1039 compiling without running the scheduler (so no splitting
1040 occurred before the final instruction emission).
1042 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1044 if (!after_arm_reorg
1045 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1046 > arm_constant_limit + (code != SET)))
1048 if (code == SET)
1050 /* Currently SET is the only monadic value for CODE, all
1051 the rest are diadic. */
1052 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1053 return 1;
1055 else
1057 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1059 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1060 /* For MINUS, the value is subtracted from, since we never
1061 have subtraction of a constant. */
1062 if (code == MINUS)
1063 emit_insn (gen_rtx_SET (VOIDmode, target,
1064 gen_rtx_MINUS (mode, temp, source)));
1065 else
1066 emit_insn (gen_rtx_SET (VOIDmode, target,
1067 gen_rtx (code, mode, source, temp)));
1068 return 2;
1073 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1076 static int
1077 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1079 HOST_WIDE_INT temp1;
1080 int num_insns = 0;
1083 int end;
1085 if (i <= 0)
1086 i += 32;
1087 if (remainder & (3 << (i - 2)))
1089 end = i - 8;
1090 if (end < 0)
1091 end += 32;
1092 temp1 = remainder & ((0x0ff << end)
1093 | ((i < end) ? (0xff >> (32 - end)) : 0));
1094 remainder &= ~temp1;
1095 num_insns++;
1096 i -= 6;
1098 i -= 2;
1099 } while (remainder);
1100 return num_insns;
1103 /* As above, but extra parameter GENERATE which, if clear, suppresses
1104 RTL generation. */
1106 static int
1107 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
1108 enum rtx_code code;
1109 enum machine_mode mode;
1110 HOST_WIDE_INT val;
1111 rtx target;
1112 rtx source;
1113 int subtargets;
1114 int generate;
1116 int can_invert = 0;
1117 int can_negate = 0;
1118 int can_negate_initial = 0;
1119 int can_shift = 0;
1120 int i;
1121 int num_bits_set = 0;
1122 int set_sign_bit_copies = 0;
1123 int clear_sign_bit_copies = 0;
1124 int clear_zero_bit_copies = 0;
1125 int set_zero_bit_copies = 0;
1126 int insns = 0;
1127 unsigned HOST_WIDE_INT temp1, temp2;
1128 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1130 /* Find out which operations are safe for a given CODE. Also do a quick
1131 check for degenerate cases; these can occur when DImode operations
1132 are split. */
1133 switch (code)
1135 case SET:
1136 can_invert = 1;
1137 can_shift = 1;
1138 can_negate = 1;
1139 break;
1141 case PLUS:
1142 can_negate = 1;
1143 can_negate_initial = 1;
1144 break;
1146 case IOR:
1147 if (remainder == 0xffffffff)
1149 if (generate)
1150 emit_insn (gen_rtx_SET (VOIDmode, target,
1151 GEN_INT (ARM_SIGN_EXTEND (val))));
1152 return 1;
1154 if (remainder == 0)
1156 if (reload_completed && rtx_equal_p (target, source))
1157 return 0;
1158 if (generate)
1159 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1160 return 1;
1162 break;
1164 case AND:
1165 if (remainder == 0)
1167 if (generate)
1168 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1169 return 1;
1171 if (remainder == 0xffffffff)
1173 if (reload_completed && rtx_equal_p (target, source))
1174 return 0;
1175 if (generate)
1176 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1177 return 1;
1179 can_invert = 1;
1180 break;
1182 case XOR:
1183 if (remainder == 0)
1185 if (reload_completed && rtx_equal_p (target, source))
1186 return 0;
1187 if (generate)
1188 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1189 return 1;
1191 if (remainder == 0xffffffff)
1193 if (generate)
1194 emit_insn (gen_rtx_SET (VOIDmode, target,
1195 gen_rtx_NOT (mode, source)));
1196 return 1;
1199 /* We don't know how to handle this yet below. */
1200 abort ();
1202 case MINUS:
1203 /* We treat MINUS as (val - source), since (source - val) is always
1204 passed as (source + (-val)). */
1205 if (remainder == 0)
1207 if (generate)
1208 emit_insn (gen_rtx_SET (VOIDmode, target,
1209 gen_rtx_NEG (mode, source)));
1210 return 1;
1212 if (const_ok_for_arm (val))
1214 if (generate)
1215 emit_insn (gen_rtx_SET (VOIDmode, target,
1216 gen_rtx_MINUS (mode, GEN_INT (val),
1217 source)));
1218 return 1;
1220 can_negate = 1;
1222 break;
1224 default:
1225 abort ();
1228 /* If we can do it in one insn get out quickly. */
1229 if (const_ok_for_arm (val)
1230 || (can_negate_initial && const_ok_for_arm (-val))
1231 || (can_invert && const_ok_for_arm (~val)))
1233 if (generate)
1234 emit_insn (gen_rtx_SET (VOIDmode, target,
1235 (source ? gen_rtx (code, mode, source,
1236 GEN_INT (val))
1237 : GEN_INT (val))));
1238 return 1;
1241 /* Calculate a few attributes that may be useful for specific
1242 optimizations. */
1243 for (i = 31; i >= 0; i--)
1245 if ((remainder & (1 << i)) == 0)
1246 clear_sign_bit_copies++;
1247 else
1248 break;
1251 for (i = 31; i >= 0; i--)
1253 if ((remainder & (1 << i)) != 0)
1254 set_sign_bit_copies++;
1255 else
1256 break;
1259 for (i = 0; i <= 31; i++)
1261 if ((remainder & (1 << i)) == 0)
1262 clear_zero_bit_copies++;
1263 else
1264 break;
1267 for (i = 0; i <= 31; i++)
1269 if ((remainder & (1 << i)) != 0)
1270 set_zero_bit_copies++;
1271 else
1272 break;
1275 switch (code)
1277 case SET:
1278 /* See if we can do this by sign_extending a constant that is known
1279 to be negative. This is a good, way of doing it, since the shift
1280 may well merge into a subsequent insn. */
1281 if (set_sign_bit_copies > 1)
1283 if (const_ok_for_arm
1284 (temp1 = ARM_SIGN_EXTEND (remainder
1285 << (set_sign_bit_copies - 1))))
1287 if (generate)
1289 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1290 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1291 GEN_INT (temp1)));
1292 emit_insn (gen_ashrsi3 (target, new_src,
1293 GEN_INT (set_sign_bit_copies - 1)));
1295 return 2;
1297 /* For an inverted constant, we will need to set the low bits,
1298 these will be shifted out of harm's way. */
1299 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1300 if (const_ok_for_arm (~temp1))
1302 if (generate)
1304 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1305 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1306 GEN_INT (temp1)));
1307 emit_insn (gen_ashrsi3 (target, new_src,
1308 GEN_INT (set_sign_bit_copies - 1)));
1310 return 2;
1314 /* See if we can generate this by setting the bottom (or the top)
1315 16 bits, and then shifting these into the other half of the
1316 word. We only look for the simplest cases, to do more would cost
1317 too much. Be careful, however, not to generate this when the
1318 alternative would take fewer insns. */
1319 if (val & 0xffff0000)
1321 temp1 = remainder & 0xffff0000;
1322 temp2 = remainder & 0x0000ffff;
1324 /* Overlaps outside this range are best done using other methods. */
1325 for (i = 9; i < 24; i++)
1327 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1328 && !const_ok_for_arm (temp2))
1330 rtx new_src = (subtargets
1331 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1332 : target);
1333 insns = arm_gen_constant (code, mode, temp2, new_src,
1334 source, subtargets, generate);
1335 source = new_src;
1336 if (generate)
1337 emit_insn (gen_rtx_SET
1338 (VOIDmode, target,
1339 gen_rtx_IOR (mode,
1340 gen_rtx_ASHIFT (mode, source,
1341 GEN_INT (i)),
1342 source)));
1343 return insns + 1;
1347 /* Don't duplicate cases already considered. */
1348 for (i = 17; i < 24; i++)
1350 if (((temp1 | (temp1 >> i)) == remainder)
1351 && !const_ok_for_arm (temp1))
1353 rtx new_src = (subtargets
1354 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1355 : target);
1356 insns = arm_gen_constant (code, mode, temp1, new_src,
1357 source, subtargets, generate);
1358 source = new_src;
1359 if (generate)
1360 emit_insn
1361 (gen_rtx_SET (VOIDmode, target,
1362 gen_rtx_IOR
1363 (mode,
1364 gen_rtx_LSHIFTRT (mode, source,
1365 GEN_INT (i)),
1366 source)));
1367 return insns + 1;
1371 break;
1373 case IOR:
1374 case XOR:
1375 /* If we have IOR or XOR, and the constant can be loaded in a
1376 single instruction, and we can find a temporary to put it in,
1377 then this can be done in two instructions instead of 3-4. */
1378 if (subtargets
1379 /* TARGET can't be NULL if SUBTARGETS is 0 */
1380 || (reload_completed && !reg_mentioned_p (target, source)))
1382 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1384 if (generate)
1386 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1388 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1389 emit_insn (gen_rtx_SET (VOIDmode, target,
1390 gen_rtx (code, mode, source, sub)));
1392 return 2;
1396 if (code == XOR)
1397 break;
1399 if (set_sign_bit_copies > 8
1400 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1402 if (generate)
1404 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1405 rtx shift = GEN_INT (set_sign_bit_copies);
1407 emit_insn (gen_rtx_SET (VOIDmode, sub,
1408 gen_rtx_NOT (mode,
1409 gen_rtx_ASHIFT (mode,
1410 source,
1411 shift))));
1412 emit_insn (gen_rtx_SET (VOIDmode, target,
1413 gen_rtx_NOT (mode,
1414 gen_rtx_LSHIFTRT (mode, sub,
1415 shift))));
1417 return 2;
1420 if (set_zero_bit_copies > 8
1421 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1423 if (generate)
1425 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1426 rtx shift = GEN_INT (set_zero_bit_copies);
1428 emit_insn (gen_rtx_SET (VOIDmode, sub,
1429 gen_rtx_NOT (mode,
1430 gen_rtx_LSHIFTRT (mode,
1431 source,
1432 shift))));
1433 emit_insn (gen_rtx_SET (VOIDmode, target,
1434 gen_rtx_NOT (mode,
1435 gen_rtx_ASHIFT (mode, sub,
1436 shift))));
1438 return 2;
1441 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1443 if (generate)
1445 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1446 emit_insn (gen_rtx_SET (VOIDmode, sub,
1447 gen_rtx_NOT (mode, source)));
1448 source = sub;
1449 if (subtargets)
1450 sub = gen_reg_rtx (mode);
1451 emit_insn (gen_rtx_SET (VOIDmode, sub,
1452 gen_rtx_AND (mode, source,
1453 GEN_INT (temp1))));
1454 emit_insn (gen_rtx_SET (VOIDmode, target,
1455 gen_rtx_NOT (mode, sub)));
1457 return 3;
1459 break;
1461 case AND:
1462 /* See if two shifts will do 2 or more insn's worth of work. */
1463 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1465 HOST_WIDE_INT shift_mask = ((0xffffffff
1466 << (32 - clear_sign_bit_copies))
1467 & 0xffffffff);
1469 if ((remainder | shift_mask) != 0xffffffff)
1471 if (generate)
1473 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1474 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1475 new_src, source, subtargets, 1);
1476 source = new_src;
1478 else
1480 rtx targ = subtargets ? NULL_RTX : target;
1481 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1482 targ, source, subtargets, 0);
1486 if (generate)
1488 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1489 rtx shift = GEN_INT (clear_sign_bit_copies);
1491 emit_insn (gen_ashlsi3 (new_src, source, shift));
1492 emit_insn (gen_lshrsi3 (target, new_src, shift));
1495 return insns + 2;
1498 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1500 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1502 if ((remainder | shift_mask) != 0xffffffff)
1504 if (generate)
1506 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1508 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1509 new_src, source, subtargets, 1);
1510 source = new_src;
1512 else
1514 rtx targ = subtargets ? NULL_RTX : target;
1516 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1517 targ, source, subtargets, 0);
1521 if (generate)
1523 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1524 rtx shift = GEN_INT (clear_zero_bit_copies);
1526 emit_insn (gen_lshrsi3 (new_src, source, shift));
1527 emit_insn (gen_ashlsi3 (target, new_src, shift));
1530 return insns + 2;
1533 break;
1535 default:
1536 break;
1539 for (i = 0; i < 32; i++)
1540 if (remainder & (1 << i))
1541 num_bits_set++;
1543 if (code == AND || (can_invert && num_bits_set > 16))
1544 remainder = (~remainder) & 0xffffffff;
1545 else if (code == PLUS && num_bits_set > 16)
1546 remainder = (-remainder) & 0xffffffff;
1547 else
1549 can_invert = 0;
1550 can_negate = 0;
1553 /* Now try and find a way of doing the job in either two or three
1554 instructions.
1555 We start by looking for the largest block of zeros that are aligned on
1556 a 2-bit boundary, we then fill up the temps, wrapping around to the
1557 top of the word when we drop off the bottom.
1558 In the worst case this code should produce no more than four insns. */
1560 int best_start = 0;
1561 int best_consecutive_zeros = 0;
1563 for (i = 0; i < 32; i += 2)
1565 int consecutive_zeros = 0;
1567 if (!(remainder & (3 << i)))
1569 while ((i < 32) && !(remainder & (3 << i)))
1571 consecutive_zeros += 2;
1572 i += 2;
1574 if (consecutive_zeros > best_consecutive_zeros)
1576 best_consecutive_zeros = consecutive_zeros;
1577 best_start = i - consecutive_zeros;
1579 i -= 2;
1583 /* So long as it won't require any more insns to do so, it's
1584 desirable to emit a small constant (in bits 0...9) in the last
1585 insn. This way there is more chance that it can be combined with
1586 a later addressing insn to form a pre-indexed load or store
1587 operation. Consider:
1589 *((volatile int *)0xe0000100) = 1;
1590 *((volatile int *)0xe0000110) = 2;
1592 We want this to wind up as:
1594 mov rA, #0xe0000000
1595 mov rB, #1
1596 str rB, [rA, #0x100]
1597 mov rB, #2
1598 str rB, [rA, #0x110]
1600 rather than having to synthesize both large constants from scratch.
1602 Therefore, we calculate how many insns would be required to emit
1603 the constant starting from `best_start', and also starting from
1604 zero (ie with bit 31 first to be output). If `best_start' doesn't
1605 yield a shorter sequence, we may as well use zero. */
1606 if (best_start != 0
1607 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1608 && (count_insns_for_constant (remainder, 0) <=
1609 count_insns_for_constant (remainder, best_start)))
1610 best_start = 0;
1612 /* Now start emitting the insns. */
1613 i = best_start;
1616 int end;
1618 if (i <= 0)
1619 i += 32;
1620 if (remainder & (3 << (i - 2)))
1622 end = i - 8;
1623 if (end < 0)
1624 end += 32;
1625 temp1 = remainder & ((0x0ff << end)
1626 | ((i < end) ? (0xff >> (32 - end)) : 0));
1627 remainder &= ~temp1;
1629 if (generate)
1631 rtx new_src, temp1_rtx;
1633 if (code == SET || code == MINUS)
1635 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1636 if (can_invert && code != MINUS)
1637 temp1 = ~temp1;
1639 else
1641 if (remainder && subtargets)
1642 new_src = gen_reg_rtx (mode);
1643 else
1644 new_src = target;
1645 if (can_invert)
1646 temp1 = ~temp1;
1647 else if (can_negate)
1648 temp1 = -temp1;
1651 temp1 = trunc_int_for_mode (temp1, mode);
1652 temp1_rtx = GEN_INT (temp1);
1654 if (code == SET)
1656 else if (code == MINUS)
1657 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1658 else
1659 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1661 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1662 source = new_src;
1665 if (code == SET)
1667 can_invert = 0;
1668 code = PLUS;
1670 else if (code == MINUS)
1671 code = PLUS;
1673 insns++;
1674 i -= 6;
1676 i -= 2;
1678 while (remainder);
1681 return insns;
1684 /* Canonicalize a comparison so that we are more likely to recognize it.
1685 This can be done for a few constant compares, where we can make the
1686 immediate value easier to load. */
1688 enum rtx_code
1689 arm_canonicalize_comparison (code, op1)
1690 enum rtx_code code;
1691 rtx * op1;
1693 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1695 switch (code)
1697 case EQ:
1698 case NE:
1699 return code;
1701 case GT:
1702 case LE:
1703 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1704 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1706 *op1 = GEN_INT (i + 1);
1707 return code == GT ? GE : LT;
1709 break;
1711 case GE:
1712 case LT:
1713 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1714 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1716 *op1 = GEN_INT (i - 1);
1717 return code == GE ? GT : LE;
1719 break;
1721 case GTU:
1722 case LEU:
1723 if (i != ~((unsigned HOST_WIDE_INT) 0)
1724 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1726 *op1 = GEN_INT (i + 1);
1727 return code == GTU ? GEU : LTU;
1729 break;
1731 case GEU:
1732 case LTU:
1733 if (i != 0
1734 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1736 *op1 = GEN_INT (i - 1);
1737 return code == GEU ? GTU : LEU;
1739 break;
1741 default:
1742 abort ();
1745 return code;
1748 /* Decide whether a type should be returned in memory (true)
1749 or in a register (false). This is called by the macro
1750 RETURN_IN_MEMORY. */
1753 arm_return_in_memory (type)
1754 tree type;
1756 if (!AGGREGATE_TYPE_P (type))
1757 /* All simple types are returned in registers. */
1758 return 0;
1760 /* For the arm-wince targets we choose to be compitable with Microsoft's
1761 ARM and Thumb compilers, which always return aggregates in memory. */
1762 #ifndef ARM_WINCE
1763 /* All structures/unions bigger than one word are returned in memory.
1764 Also catch the case where int_size_in_bytes returns -1. In this case
1765 the aggregate is either huge or of varaible size, and in either case
1766 we will want to return it via memory and not in a register. */
1767 if (((unsigned int) int_size_in_bytes (type)) > UNITS_PER_WORD)
1768 return 1;
1770 if (TREE_CODE (type) == RECORD_TYPE)
1772 tree field;
1774 /* For a struct the APCS says that we only return in a register
1775 if the type is 'integer like' and every addressable element
1776 has an offset of zero. For practical purposes this means
1777 that the structure can have at most one non bit-field element
1778 and that this element must be the first one in the structure. */
1780 /* Find the first field, ignoring non FIELD_DECL things which will
1781 have been created by C++. */
1782 for (field = TYPE_FIELDS (type);
1783 field && TREE_CODE (field) != FIELD_DECL;
1784 field = TREE_CHAIN (field))
1785 continue;
1787 if (field == NULL)
1788 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1790 /* Check that the first field is valid for returning in a register. */
1792 /* ... Floats are not allowed */
1793 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1794 return 1;
1796 /* ... Aggregates that are not themselves valid for returning in
1797 a register are not allowed. */
1798 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1799 return 1;
1801 /* Now check the remaining fields, if any. Only bitfields are allowed,
1802 since they are not addressable. */
1803 for (field = TREE_CHAIN (field);
1804 field;
1805 field = TREE_CHAIN (field))
1807 if (TREE_CODE (field) != FIELD_DECL)
1808 continue;
1810 if (!DECL_BIT_FIELD_TYPE (field))
1811 return 1;
1814 return 0;
1817 if (TREE_CODE (type) == UNION_TYPE)
1819 tree field;
1821 /* Unions can be returned in registers if every element is
1822 integral, or can be returned in an integer register. */
1823 for (field = TYPE_FIELDS (type);
1824 field;
1825 field = TREE_CHAIN (field))
1827 if (TREE_CODE (field) != FIELD_DECL)
1828 continue;
1830 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1831 return 1;
1833 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1834 return 1;
1837 return 0;
1839 #endif /* not ARM_WINCE */
1841 /* Return all other types in memory. */
1842 return 1;
1845 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1846 for a call to a function whose data type is FNTYPE.
1847 For a library call, FNTYPE is NULL. */
1848 void
1849 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1850 CUMULATIVE_ARGS * pcum;
1851 tree fntype;
1852 rtx libname ATTRIBUTE_UNUSED;
1853 int indirect ATTRIBUTE_UNUSED;
1855 /* On the ARM, the offset starts at 0. */
1856 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1858 pcum->call_cookie = CALL_NORMAL;
1860 if (TARGET_LONG_CALLS)
1861 pcum->call_cookie = CALL_LONG;
1863 /* Check for long call/short call attributes. The attributes
1864 override any command line option. */
1865 if (fntype)
1867 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1868 pcum->call_cookie = CALL_SHORT;
1869 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1870 pcum->call_cookie = CALL_LONG;
1874 /* Determine where to put an argument to a function.
1875 Value is zero to push the argument on the stack,
1876 or a hard register in which to store the argument.
1878 MODE is the argument's machine mode.
1879 TYPE is the data type of the argument (as a tree).
1880 This is null for libcalls where that information may
1881 not be available.
1882 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1883 the preceding args and about the function being called.
1884 NAMED is nonzero if this argument is a named parameter
1885 (otherwise it is an extra parameter matching an ellipsis). */
1888 arm_function_arg (pcum, mode, type, named)
1889 CUMULATIVE_ARGS * pcum;
1890 enum machine_mode mode;
1891 tree type ATTRIBUTE_UNUSED;
1892 int named;
1894 if (mode == VOIDmode)
1895 /* Compute operand 2 of the call insn. */
1896 return GEN_INT (pcum->call_cookie);
1898 if (!named || pcum->nregs >= NUM_ARG_REGS)
1899 return NULL_RTX;
1901 return gen_rtx_REG (mode, pcum->nregs);
1904 /* Encode the current state of the #pragma [no_]long_calls. */
1905 typedef enum
1907 OFF, /* No #pramgma [no_]long_calls is in effect. */
1908 LONG, /* #pragma long_calls is in effect. */
1909 SHORT /* #pragma no_long_calls is in effect. */
1910 } arm_pragma_enum;
1912 static arm_pragma_enum arm_pragma_long_calls = OFF;
1914 void
1915 arm_pr_long_calls (pfile)
1916 cpp_reader * pfile ATTRIBUTE_UNUSED;
1918 arm_pragma_long_calls = LONG;
1921 void
1922 arm_pr_no_long_calls (pfile)
1923 cpp_reader * pfile ATTRIBUTE_UNUSED;
1925 arm_pragma_long_calls = SHORT;
1928 void
1929 arm_pr_long_calls_off (pfile)
1930 cpp_reader * pfile ATTRIBUTE_UNUSED;
1932 arm_pragma_long_calls = OFF;
1935 /* Table of machine attributes. */
1936 const struct attribute_spec arm_attribute_table[] =
1938 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1939 /* Function calls made to this symbol must be done indirectly, because
1940 it may lie outside of the 26 bit addressing range of a normal function
1941 call. */
1942 { "long_call", 0, 0, false, true, true, NULL },
1943 /* Whereas these functions are always known to reside within the 26 bit
1944 addressing range. */
1945 { "short_call", 0, 0, false, true, true, NULL },
1946 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1947 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
1948 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
1949 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1950 #ifdef ARM_PE
1951 /* ARM/PE has three new attributes:
1952 interfacearm - ?
1953 dllexport - for exporting a function/variable that will live in a dll
1954 dllimport - for importing a function/variable from a dll
1956 Microsoft allows multiple declspecs in one __declspec, separating
1957 them with spaces. We do NOT support this. Instead, use __declspec
1958 multiple times.
1960 { "dllimport", 0, 0, true, false, false, NULL },
1961 { "dllexport", 0, 0, true, false, false, NULL },
1962 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
1963 #endif
1964 { NULL, 0, 0, false, false, false, NULL }
1967 /* Handle an attribute requiring a FUNCTION_DECL;
1968 arguments as in struct attribute_spec.handler. */
1970 static tree
1971 arm_handle_fndecl_attribute (node, name, args, flags, no_add_attrs)
1972 tree * node;
1973 tree name;
1974 tree args ATTRIBUTE_UNUSED;
1975 int flags ATTRIBUTE_UNUSED;
1976 bool * no_add_attrs;
1978 if (TREE_CODE (*node) != FUNCTION_DECL)
1980 warning ("`%s' attribute only applies to functions",
1981 IDENTIFIER_POINTER (name));
1982 *no_add_attrs = true;
1985 return NULL_TREE;
1988 /* Handle an "interrupt" or "isr" attribute;
1989 arguments as in struct attribute_spec.handler. */
1991 static tree
1992 arm_handle_isr_attribute (node, name, args, flags, no_add_attrs)
1993 tree * node;
1994 tree name;
1995 tree args;
1996 int flags;
1997 bool * no_add_attrs;
1999 if (DECL_P (*node))
2001 if (TREE_CODE (*node) != FUNCTION_DECL)
2003 warning ("`%s' attribute only applies to functions",
2004 IDENTIFIER_POINTER (name));
2005 *no_add_attrs = true;
2007 /* FIXME: the argument if any is checked for type attributes;
2008 should it be checked for decl ones? */
2010 else
2012 if (TREE_CODE (*node) == FUNCTION_TYPE
2013 || TREE_CODE (*node) == METHOD_TYPE)
2015 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2017 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2018 *no_add_attrs = true;
2021 else if (TREE_CODE (*node) == POINTER_TYPE
2022 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2023 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2024 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2026 *node = build_type_copy (*node);
2027 TREE_TYPE (*node) = build_type_attribute_variant
2028 (TREE_TYPE (*node),
2029 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2030 *no_add_attrs = true;
2032 else
2034 /* Possibly pass this attribute on from the type to a decl. */
2035 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2036 | (int) ATTR_FLAG_FUNCTION_NEXT
2037 | (int) ATTR_FLAG_ARRAY_NEXT))
2039 *no_add_attrs = true;
2040 return tree_cons (name, args, NULL_TREE);
2042 else
2044 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2049 return NULL_TREE;
2052 /* Return 0 if the attributes for two types are incompatible, 1 if they
2053 are compatible, and 2 if they are nearly compatible (which causes a
2054 warning to be generated). */
2056 static int
2057 arm_comp_type_attributes (type1, type2)
2058 tree type1;
2059 tree type2;
2061 int l1, l2, s1, s2;
2063 /* Check for mismatch of non-default calling convention. */
2064 if (TREE_CODE (type1) != FUNCTION_TYPE)
2065 return 1;
2067 /* Check for mismatched call attributes. */
2068 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2069 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2070 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2071 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2073 /* Only bother to check if an attribute is defined. */
2074 if (l1 | l2 | s1 | s2)
2076 /* If one type has an attribute, the other must have the same attribute. */
2077 if ((l1 != l2) || (s1 != s2))
2078 return 0;
2080 /* Disallow mixed attributes. */
2081 if ((l1 & s2) || (l2 & s1))
2082 return 0;
2085 /* Check for mismatched ISR attribute. */
2086 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2087 if (! l1)
2088 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2089 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2090 if (! l2)
2091 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2092 if (l1 != l2)
2093 return 0;
2095 return 1;
2098 /* Encode long_call or short_call attribute by prefixing
2099 symbol name in DECL with a special character FLAG. */
2101 void
2102 arm_encode_call_attribute (decl, flag)
2103 tree decl;
2104 int flag;
2106 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2107 int len = strlen (str);
2108 char * newstr;
2110 /* Do not allow weak functions to be treated as short call. */
2111 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2112 return;
2114 newstr = alloca (len + 2);
2115 newstr[0] = flag;
2116 strcpy (newstr + 1, str);
2118 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2119 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2122 /* Assigns default attributes to newly defined type. This is used to
2123 set short_call/long_call attributes for function types of
2124 functions defined inside corresponding #pragma scopes. */
2126 static void
2127 arm_set_default_type_attributes (type)
2128 tree type;
2130 /* Add __attribute__ ((long_call)) to all functions, when
2131 inside #pragma long_calls or __attribute__ ((short_call)),
2132 when inside #pragma no_long_calls. */
2133 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2135 tree type_attr_list, attr_name;
2136 type_attr_list = TYPE_ATTRIBUTES (type);
2138 if (arm_pragma_long_calls == LONG)
2139 attr_name = get_identifier ("long_call");
2140 else if (arm_pragma_long_calls == SHORT)
2141 attr_name = get_identifier ("short_call");
2142 else
2143 return;
2145 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2146 TYPE_ATTRIBUTES (type) = type_attr_list;
2150 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2151 defined within the current compilation unit. If this caanot be
2152 determined, then 0 is returned. */
2154 static int
2155 current_file_function_operand (sym_ref)
2156 rtx sym_ref;
2158 /* This is a bit of a fib. A function will have a short call flag
2159 applied to its name if it has the short call attribute, or it has
2160 already been defined within the current compilation unit. */
2161 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2162 return 1;
2164 /* The current function is always defined within the current compilation
2165 unit. if it s a weak definition however, then this may not be the real
2166 definition of the function, and so we have to say no. */
2167 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2168 && !DECL_WEAK (current_function_decl))
2169 return 1;
2171 /* We cannot make the determination - default to returning 0. */
2172 return 0;
2175 /* Return non-zero if a 32 bit "long_call" should be generated for
2176 this call. We generate a long_call if the function:
2178 a. has an __attribute__((long call))
2179 or b. is within the scope of a #pragma long_calls
2180 or c. the -mlong-calls command line switch has been specified
2182 However we do not generate a long call if the function:
2184 d. has an __attribute__ ((short_call))
2185 or e. is inside the scope of a #pragma no_long_calls
2186 or f. has an __attribute__ ((section))
2187 or g. is defined within the current compilation unit.
2189 This function will be called by C fragments contained in the machine
2190 description file. CALL_REF and CALL_COOKIE correspond to the matched
2191 rtl operands. CALL_SYMBOL is used to distinguish between
2192 two different callers of the function. It is set to 1 in the
2193 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2194 and "call_value" patterns. This is because of the difference in the
2195 SYM_REFs passed by these patterns. */
2198 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
2199 rtx sym_ref;
2200 int call_cookie;
2201 int call_symbol;
2203 if (!call_symbol)
2205 if (GET_CODE (sym_ref) != MEM)
2206 return 0;
2208 sym_ref = XEXP (sym_ref, 0);
2211 if (GET_CODE (sym_ref) != SYMBOL_REF)
2212 return 0;
2214 if (call_cookie & CALL_SHORT)
2215 return 0;
2217 if (TARGET_LONG_CALLS && flag_function_sections)
2218 return 1;
2220 if (current_file_function_operand (sym_ref))
2221 return 0;
2223 return (call_cookie & CALL_LONG)
2224 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2225 || TARGET_LONG_CALLS;
2228 /* Return non-zero if it is ok to make a tail-call to DECL. */
2231 arm_function_ok_for_sibcall (decl)
2232 tree decl;
2234 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2236 /* Never tailcall something for which we have no decl, or if we
2237 are in Thumb mode. */
2238 if (decl == NULL || TARGET_THUMB)
2239 return 0;
2241 /* Get the calling method. */
2242 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2243 call_type = CALL_SHORT;
2244 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2245 call_type = CALL_LONG;
2247 /* Cannot tail-call to long calls, since these are out of range of
2248 a branch instruction. However, if not compiling PIC, we know
2249 we can reach the symbol if it is in this compilation unit. */
2250 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2251 return 0;
2253 /* If we are interworking and the function is not declared static
2254 then we can't tail-call it unless we know that it exists in this
2255 compilation unit (since it might be a Thumb routine). */
2256 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2257 return 0;
2259 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2260 if (IS_INTERRUPT (arm_current_func_type ()))
2261 return 0;
2263 /* Everything else is ok. */
2264 return 1;
2269 legitimate_pic_operand_p (x)
2270 rtx x;
2272 if (CONSTANT_P (x)
2273 && flag_pic
2274 && (GET_CODE (x) == SYMBOL_REF
2275 || (GET_CODE (x) == CONST
2276 && GET_CODE (XEXP (x, 0)) == PLUS
2277 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2278 return 0;
2280 return 1;
2284 legitimize_pic_address (orig, mode, reg)
2285 rtx orig;
2286 enum machine_mode mode;
2287 rtx reg;
2289 if (GET_CODE (orig) == SYMBOL_REF
2290 || GET_CODE (orig) == LABEL_REF)
2292 #ifndef AOF_ASSEMBLER
2293 rtx pic_ref, address;
2294 #endif
2295 rtx insn;
2296 int subregs = 0;
2298 if (reg == 0)
2300 if (no_new_pseudos)
2301 abort ();
2302 else
2303 reg = gen_reg_rtx (Pmode);
2305 subregs = 1;
2308 #ifdef AOF_ASSEMBLER
2309 /* The AOF assembler can generate relocations for these directly, and
2310 understands that the PIC register has to be added into the offset. */
2311 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2312 #else
2313 if (subregs)
2314 address = gen_reg_rtx (Pmode);
2315 else
2316 address = reg;
2318 if (TARGET_ARM)
2319 emit_insn (gen_pic_load_addr_arm (address, orig));
2320 else
2321 emit_insn (gen_pic_load_addr_thumb (address, orig));
2323 if ((GET_CODE (orig) == LABEL_REF
2324 || (GET_CODE (orig) == SYMBOL_REF &&
2325 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig, 0))))
2326 && NEED_GOT_RELOC)
2327 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2328 else
2330 pic_ref = gen_rtx_MEM (Pmode,
2331 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2332 address));
2333 RTX_UNCHANGING_P (pic_ref) = 1;
2336 insn = emit_move_insn (reg, pic_ref);
2337 #endif
2338 current_function_uses_pic_offset_table = 1;
2339 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2340 by loop. */
2341 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2342 REG_NOTES (insn));
2343 return reg;
2345 else if (GET_CODE (orig) == CONST)
2347 rtx base, offset;
2349 if (GET_CODE (XEXP (orig, 0)) == PLUS
2350 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2351 return orig;
2353 if (reg == 0)
2355 if (no_new_pseudos)
2356 abort ();
2357 else
2358 reg = gen_reg_rtx (Pmode);
2361 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2363 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2364 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2365 base == reg ? 0 : reg);
2367 else
2368 abort ();
2370 if (GET_CODE (offset) == CONST_INT)
2372 /* The base register doesn't really matter, we only want to
2373 test the index for the appropriate mode. */
2374 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
2376 if (!no_new_pseudos)
2377 offset = force_reg (Pmode, offset);
2378 else
2379 abort ();
2381 win:
2382 if (GET_CODE (offset) == CONST_INT)
2383 return plus_constant (base, INTVAL (offset));
2386 if (GET_MODE_SIZE (mode) > 4
2387 && (GET_MODE_CLASS (mode) == MODE_INT
2388 || TARGET_SOFT_FLOAT))
2390 emit_insn (gen_addsi3 (reg, base, offset));
2391 return reg;
2394 return gen_rtx_PLUS (Pmode, base, offset);
2397 return orig;
2400 /* Generate code to load the PIC register. PROLOGUE is true if
2401 called from arm_expand_prologue (in which case we want the
2402 generated insns at the start of the function); false if called
2403 by an exception receiver that needs the PIC register reloaded
2404 (in which case the insns are just dumped at the current location). */
2406 void
2407 arm_finalize_pic (prologue)
2408 int prologue ATTRIBUTE_UNUSED;
2410 #ifndef AOF_ASSEMBLER
2411 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2412 rtx global_offset_table;
2414 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2415 return;
2417 if (!flag_pic)
2418 abort ();
2420 start_sequence ();
2421 l1 = gen_label_rtx ();
2423 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2424 /* On the ARM the PC register contains 'dot + 8' at the time of the
2425 addition, on the Thumb it is 'dot + 4'. */
2426 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2427 if (GOT_PCREL)
2428 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2429 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2430 else
2431 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2433 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2435 if (TARGET_ARM)
2437 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2438 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2440 else
2442 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2443 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2446 seq = gen_sequence ();
2447 end_sequence ();
2448 if (prologue)
2449 emit_insn_after (seq, get_insns ());
2450 else
2451 emit_insn (seq);
2453 /* Need to emit this whether or not we obey regdecls,
2454 since setjmp/longjmp can cause life info to screw up. */
2455 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2456 #endif /* AOF_ASSEMBLER */
2459 #define REG_OR_SUBREG_REG(X) \
2460 (GET_CODE (X) == REG \
2461 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2463 #define REG_OR_SUBREG_RTX(X) \
2464 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2466 #ifndef COSTS_N_INSNS
2467 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2468 #endif
2471 arm_rtx_costs (x, code, outer)
2472 rtx x;
2473 enum rtx_code code;
2474 enum rtx_code outer;
2476 enum machine_mode mode = GET_MODE (x);
2477 enum rtx_code subcode;
2478 int extra_cost;
2480 if (TARGET_THUMB)
2482 switch (code)
2484 case ASHIFT:
2485 case ASHIFTRT:
2486 case LSHIFTRT:
2487 case ROTATERT:
2488 case PLUS:
2489 case MINUS:
2490 case COMPARE:
2491 case NEG:
2492 case NOT:
2493 return COSTS_N_INSNS (1);
2495 case MULT:
2496 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2498 int cycles = 0;
2499 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2501 while (i)
2503 i >>= 2;
2504 cycles++;
2506 return COSTS_N_INSNS (2) + cycles;
2508 return COSTS_N_INSNS (1) + 16;
2510 case SET:
2511 return (COSTS_N_INSNS (1)
2512 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2513 + GET_CODE (SET_DEST (x)) == MEM));
2515 case CONST_INT:
2516 if (outer == SET)
2518 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2519 return 0;
2520 if (thumb_shiftable_const (INTVAL (x)))
2521 return COSTS_N_INSNS (2);
2522 return COSTS_N_INSNS (3);
2524 else if (outer == PLUS
2525 && INTVAL (x) < 256 && INTVAL (x) > -256)
2526 return 0;
2527 else if (outer == COMPARE
2528 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2529 return 0;
2530 else if (outer == ASHIFT || outer == ASHIFTRT
2531 || outer == LSHIFTRT)
2532 return 0;
2533 return COSTS_N_INSNS (2);
2535 case CONST:
2536 case CONST_DOUBLE:
2537 case LABEL_REF:
2538 case SYMBOL_REF:
2539 return COSTS_N_INSNS (3);
2541 case UDIV:
2542 case UMOD:
2543 case DIV:
2544 case MOD:
2545 return 100;
2547 case TRUNCATE:
2548 return 99;
2550 case AND:
2551 case XOR:
2552 case IOR:
2553 /* XXX guess. */
2554 return 8;
2556 case ADDRESSOF:
2557 case MEM:
2558 /* XXX another guess. */
2559 /* Memory costs quite a lot for the first word, but subsequent words
2560 load at the equivalent of a single insn each. */
2561 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2562 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2563 ? 4 : 0));
2565 case IF_THEN_ELSE:
2566 /* XXX a guess. */
2567 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2568 return 14;
2569 return 2;
2571 case ZERO_EXTEND:
2572 /* XXX still guessing. */
2573 switch (GET_MODE (XEXP (x, 0)))
2575 case QImode:
2576 return (1 + (mode == DImode ? 4 : 0)
2577 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2579 case HImode:
2580 return (4 + (mode == DImode ? 4 : 0)
2581 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2583 case SImode:
2584 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2586 default:
2587 return 99;
2590 default:
2591 return 99;
2592 #if 0
2593 case FFS:
2594 case FLOAT:
2595 case FIX:
2596 case UNSIGNED_FIX:
2597 /* XXX guess */
2598 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2599 rtx_name[code]);
2600 abort ();
2601 #endif
2605 switch (code)
2607 case MEM:
2608 /* Memory costs quite a lot for the first word, but subsequent words
2609 load at the equivalent of a single insn each. */
2610 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2611 + (GET_CODE (x) == SYMBOL_REF
2612 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2614 case DIV:
2615 case MOD:
2616 return 100;
2618 case ROTATE:
2619 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2620 return 4;
2621 /* Fall through */
2622 case ROTATERT:
2623 if (mode != SImode)
2624 return 8;
2625 /* Fall through */
2626 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2627 if (mode == DImode)
2628 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2629 + ((GET_CODE (XEXP (x, 0)) == REG
2630 || (GET_CODE (XEXP (x, 0)) == SUBREG
2631 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2632 ? 0 : 8));
2633 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2634 || (GET_CODE (XEXP (x, 0)) == SUBREG
2635 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2636 ? 0 : 4)
2637 + ((GET_CODE (XEXP (x, 1)) == REG
2638 || (GET_CODE (XEXP (x, 1)) == SUBREG
2639 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2640 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2641 ? 0 : 4));
2643 case MINUS:
2644 if (mode == DImode)
2645 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2646 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2647 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2648 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2649 ? 0 : 8));
2651 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2652 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2653 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2654 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2655 ? 0 : 8)
2656 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2657 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2658 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2659 ? 0 : 8));
2661 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2662 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2663 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2664 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2665 || subcode == ASHIFTRT || subcode == LSHIFTRT
2666 || subcode == ROTATE || subcode == ROTATERT
2667 || (subcode == MULT
2668 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2669 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2670 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2671 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2672 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2673 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2674 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2675 return 1;
2676 /* Fall through */
2678 case PLUS:
2679 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2680 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2681 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2682 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2683 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2684 ? 0 : 8));
2686 /* Fall through */
2687 case AND: case XOR: case IOR:
2688 extra_cost = 0;
2690 /* Normally the frame registers will be spilt into reg+const during
2691 reload, so it is a bad idea to combine them with other instructions,
2692 since then they might not be moved outside of loops. As a compromise
2693 we allow integration with ops that have a constant as their second
2694 operand. */
2695 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2696 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2697 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2698 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2699 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2700 extra_cost = 4;
2702 if (mode == DImode)
2703 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2704 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2705 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2706 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2707 ? 0 : 8));
2709 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2710 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2711 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2712 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2713 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2714 ? 0 : 4));
2716 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2717 return (1 + extra_cost
2718 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2719 || subcode == LSHIFTRT || subcode == ASHIFTRT
2720 || subcode == ROTATE || subcode == ROTATERT
2721 || (subcode == MULT
2722 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2723 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2724 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2725 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2726 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2727 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2728 ? 0 : 4));
2730 return 8;
2732 case MULT:
2733 /* There is no point basing this on the tuning, since it is always the
2734 fast variant if it exists at all. */
2735 if (arm_fast_multiply && mode == DImode
2736 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2737 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2738 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2739 return 8;
2741 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2742 || mode == DImode)
2743 return 30;
2745 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2747 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2748 & (unsigned HOST_WIDE_INT) 0xffffffff);
2749 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2750 int j;
2752 /* Tune as appropriate. */
2753 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2755 for (j = 0; i && j < 32; j += booth_unit_size)
2757 i >>= booth_unit_size;
2758 add_cost += 2;
2761 return add_cost;
2764 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2765 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2766 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2768 case TRUNCATE:
2769 if (arm_fast_multiply && mode == SImode
2770 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2771 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2772 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2773 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2774 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2775 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2776 return 8;
2777 return 99;
2779 case NEG:
2780 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2781 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2782 /* Fall through */
2783 case NOT:
2784 if (mode == DImode)
2785 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2787 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2789 case IF_THEN_ELSE:
2790 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2791 return 14;
2792 return 2;
2794 case COMPARE:
2795 return 1;
2797 case ABS:
2798 return 4 + (mode == DImode ? 4 : 0);
2800 case SIGN_EXTEND:
2801 if (GET_MODE (XEXP (x, 0)) == QImode)
2802 return (4 + (mode == DImode ? 4 : 0)
2803 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2804 /* Fall through */
2805 case ZERO_EXTEND:
2806 switch (GET_MODE (XEXP (x, 0)))
2808 case QImode:
2809 return (1 + (mode == DImode ? 4 : 0)
2810 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2812 case HImode:
2813 return (4 + (mode == DImode ? 4 : 0)
2814 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2816 case SImode:
2817 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2819 default:
2820 break;
2822 abort ();
2824 case CONST_INT:
2825 if (const_ok_for_arm (INTVAL (x)))
2826 return outer == SET ? 2 : -1;
2827 else if (outer == AND
2828 && const_ok_for_arm (~INTVAL (x)))
2829 return -1;
2830 else if ((outer == COMPARE
2831 || outer == PLUS || outer == MINUS)
2832 && const_ok_for_arm (-INTVAL (x)))
2833 return -1;
2834 else
2835 return 5;
2837 case CONST:
2838 case LABEL_REF:
2839 case SYMBOL_REF:
2840 return 6;
2842 case CONST_DOUBLE:
2843 if (const_double_rtx_ok_for_fpu (x))
2844 return outer == SET ? 2 : -1;
2845 else if ((outer == COMPARE || outer == PLUS)
2846 && neg_const_double_rtx_ok_for_fpu (x))
2847 return -1;
2848 return 7;
2850 default:
2851 return 99;
2855 static int
2856 arm_adjust_cost (insn, link, dep, cost)
2857 rtx insn;
2858 rtx link;
2859 rtx dep;
2860 int cost;
2862 rtx i_pat, d_pat;
2864 /* Some true dependencies can have a higher cost depending
2865 on precisely how certain input operands are used. */
2866 if (arm_is_xscale
2867 && REG_NOTE_KIND (link) == 0
2868 && recog_memoized (insn) < 0
2869 && recog_memoized (dep) < 0)
2871 int shift_opnum = get_attr_shift (insn);
2872 enum attr_type attr_type = get_attr_type (dep);
2874 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2875 operand for INSN. If we have a shifted input operand and the
2876 instruction we depend on is another ALU instruction, then we may
2877 have to account for an additional stall. */
2878 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2880 rtx shifted_operand;
2881 int opno;
2883 /* Get the shifted operand. */
2884 extract_insn (insn);
2885 shifted_operand = recog_data.operand[shift_opnum];
2887 /* Iterate over all the operands in DEP. If we write an operand
2888 that overlaps with SHIFTED_OPERAND, then we have increase the
2889 cost of this dependency. */
2890 extract_insn (dep);
2891 preprocess_constraints ();
2892 for (opno = 0; opno < recog_data.n_operands; opno++)
2894 /* We can ignore strict inputs. */
2895 if (recog_data.operand_type[opno] == OP_IN)
2896 continue;
2898 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2899 shifted_operand))
2900 return 2;
2905 /* XXX This is not strictly true for the FPA. */
2906 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2907 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2908 return 0;
2910 /* Call insns don't incur a stall, even if they follow a load. */
2911 if (REG_NOTE_KIND (link) == 0
2912 && GET_CODE (insn) == CALL_INSN)
2913 return 1;
2915 if ((i_pat = single_set (insn)) != NULL
2916 && GET_CODE (SET_SRC (i_pat)) == MEM
2917 && (d_pat = single_set (dep)) != NULL
2918 && GET_CODE (SET_DEST (d_pat)) == MEM)
2920 rtx src_mem = XEXP (SET_SRC (i_pat), 0);
2921 /* This is a load after a store, there is no conflict if the load reads
2922 from a cached area. Assume that loads from the stack, and from the
2923 constant pool are cached, and that others will miss. This is a
2924 hack. */
2926 if ((GET_CODE (src_mem) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (src_mem))
2927 || reg_mentioned_p (stack_pointer_rtx, src_mem)
2928 || reg_mentioned_p (frame_pointer_rtx, src_mem)
2929 || reg_mentioned_p (hard_frame_pointer_rtx, src_mem))
2930 return 1;
2933 return cost;
2936 /* This code has been fixed for cross compilation. */
2938 static int fpa_consts_inited = 0;
2940 static const char * const strings_fpa[8] =
2942 "0", "1", "2", "3",
2943 "4", "5", "0.5", "10"
2946 static REAL_VALUE_TYPE values_fpa[8];
2948 static void
2949 init_fpa_table ()
2951 int i;
2952 REAL_VALUE_TYPE r;
2954 for (i = 0; i < 8; i++)
2956 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2957 values_fpa[i] = r;
2960 fpa_consts_inited = 1;
2963 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2966 const_double_rtx_ok_for_fpu (x)
2967 rtx x;
2969 REAL_VALUE_TYPE r;
2970 int i;
2972 if (!fpa_consts_inited)
2973 init_fpa_table ();
2975 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2976 if (REAL_VALUE_MINUS_ZERO (r))
2977 return 0;
2979 for (i = 0; i < 8; i++)
2980 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2981 return 1;
2983 return 0;
2986 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2989 neg_const_double_rtx_ok_for_fpu (x)
2990 rtx x;
2992 REAL_VALUE_TYPE r;
2993 int i;
2995 if (!fpa_consts_inited)
2996 init_fpa_table ();
2998 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2999 r = REAL_VALUE_NEGATE (r);
3000 if (REAL_VALUE_MINUS_ZERO (r))
3001 return 0;
3003 for (i = 0; i < 8; i++)
3004 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3005 return 1;
3007 return 0;
3010 /* Predicates for `match_operand' and `match_operator'. */
3012 /* s_register_operand is the same as register_operand, but it doesn't accept
3013 (SUBREG (MEM)...).
3015 This function exists because at the time it was put in it led to better
3016 code. SUBREG(MEM) always needs a reload in the places where
3017 s_register_operand is used, and this seemed to lead to excessive
3018 reloading. */
3021 s_register_operand (op, mode)
3022 rtx op;
3023 enum machine_mode mode;
3025 if (GET_MODE (op) != mode && mode != VOIDmode)
3026 return 0;
3028 if (GET_CODE (op) == SUBREG)
3029 op = SUBREG_REG (op);
3031 /* We don't consider registers whose class is NO_REGS
3032 to be a register operand. */
3033 /* XXX might have to check for lo regs only for thumb ??? */
3034 return (GET_CODE (op) == REG
3035 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3036 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3039 /* A hard register operand (even before reload. */
3042 arm_hard_register_operand (op, mode)
3043 rtx op;
3044 enum machine_mode mode;
3046 if (GET_MODE (op) != mode && mode != VOIDmode)
3047 return 0;
3049 return (GET_CODE (op) == REG
3050 && REGNO (op) < FIRST_PSEUDO_REGISTER);
3053 /* Only accept reg, subreg(reg), const_int. */
3056 reg_or_int_operand (op, mode)
3057 rtx op;
3058 enum machine_mode mode;
3060 if (GET_CODE (op) == CONST_INT)
3061 return 1;
3063 if (GET_MODE (op) != mode && mode != VOIDmode)
3064 return 0;
3066 if (GET_CODE (op) == SUBREG)
3067 op = SUBREG_REG (op);
3069 /* We don't consider registers whose class is NO_REGS
3070 to be a register operand. */
3071 return (GET_CODE (op) == REG
3072 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3073 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
3076 /* Return 1 if OP is an item in memory, given that we are in reload. */
3079 arm_reload_memory_operand (op, mode)
3080 rtx op;
3081 enum machine_mode mode ATTRIBUTE_UNUSED;
3083 int regno = true_regnum (op);
3085 return (!CONSTANT_P (op)
3086 && (regno == -1
3087 || (GET_CODE (op) == REG
3088 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
3091 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3092 memory access (architecture V4).
3093 MODE is QImode if called when computing constraints, or VOIDmode when
3094 emitting patterns. In this latter case we cannot use memory_operand()
3095 because it will fail on badly formed MEMs, which is precisly what we are
3096 trying to catch. */
3099 bad_signed_byte_operand (op, mode)
3100 rtx op;
3101 enum machine_mode mode ATTRIBUTE_UNUSED;
3103 #if 0
3104 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
3105 return 0;
3106 #endif
3107 if (GET_CODE (op) != MEM)
3108 return 0;
3110 op = XEXP (op, 0);
3112 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3113 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
3114 && (!s_register_operand (XEXP (op, 0), VOIDmode)
3115 || (!s_register_operand (XEXP (op, 1), VOIDmode)
3116 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
3117 return 1;
3119 /* Big constants are also bad. */
3120 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
3121 && (INTVAL (XEXP (op, 1)) > 0xff
3122 || -INTVAL (XEXP (op, 1)) > 0xff))
3123 return 1;
3125 /* Everything else is good, or can will automatically be made so. */
3126 return 0;
3129 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3132 arm_rhs_operand (op, mode)
3133 rtx op;
3134 enum machine_mode mode;
3136 return (s_register_operand (op, mode)
3137 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
3140 /* Return TRUE for valid operands for the
3141 rhs of an ARM instruction, or a load. */
3144 arm_rhsm_operand (op, mode)
3145 rtx op;
3146 enum machine_mode mode;
3148 return (s_register_operand (op, mode)
3149 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
3150 || memory_operand (op, mode));
3153 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3154 constant that is valid when negated. */
3157 arm_add_operand (op, mode)
3158 rtx op;
3159 enum machine_mode mode;
3161 if (TARGET_THUMB)
3162 return thumb_cmp_operand (op, mode);
3164 return (s_register_operand (op, mode)
3165 || (GET_CODE (op) == CONST_INT
3166 && (const_ok_for_arm (INTVAL (op))
3167 || const_ok_for_arm (-INTVAL (op)))));
3171 arm_not_operand (op, mode)
3172 rtx op;
3173 enum machine_mode mode;
3175 return (s_register_operand (op, mode)
3176 || (GET_CODE (op) == CONST_INT
3177 && (const_ok_for_arm (INTVAL (op))
3178 || const_ok_for_arm (~INTVAL (op)))));
3181 /* Return TRUE if the operand is a memory reference which contains an
3182 offsettable address. */
3185 offsettable_memory_operand (op, mode)
3186 rtx op;
3187 enum machine_mode mode;
3189 if (mode == VOIDmode)
3190 mode = GET_MODE (op);
3192 return (mode == GET_MODE (op)
3193 && GET_CODE (op) == MEM
3194 && offsettable_address_p (reload_completed | reload_in_progress,
3195 mode, XEXP (op, 0)));
3198 /* Return TRUE if the operand is a memory reference which is, or can be
3199 made word aligned by adjusting the offset. */
3202 alignable_memory_operand (op, mode)
3203 rtx op;
3204 enum machine_mode mode;
3206 rtx reg;
3208 if (mode == VOIDmode)
3209 mode = GET_MODE (op);
3211 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
3212 return 0;
3214 op = XEXP (op, 0);
3216 return ((GET_CODE (reg = op) == REG
3217 || (GET_CODE (op) == SUBREG
3218 && GET_CODE (reg = SUBREG_REG (op)) == REG)
3219 || (GET_CODE (op) == PLUS
3220 && GET_CODE (XEXP (op, 1)) == CONST_INT
3221 && (GET_CODE (reg = XEXP (op, 0)) == REG
3222 || (GET_CODE (XEXP (op, 0)) == SUBREG
3223 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
3224 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
3227 /* Similar to s_register_operand, but does not allow hard integer
3228 registers. */
3231 f_register_operand (op, mode)
3232 rtx op;
3233 enum machine_mode mode;
3235 if (GET_MODE (op) != mode && mode != VOIDmode)
3236 return 0;
3238 if (GET_CODE (op) == SUBREG)
3239 op = SUBREG_REG (op);
3241 /* We don't consider registers whose class is NO_REGS
3242 to be a register operand. */
3243 return (GET_CODE (op) == REG
3244 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
3245 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
3248 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3251 fpu_rhs_operand (op, mode)
3252 rtx op;
3253 enum machine_mode mode;
3255 if (s_register_operand (op, mode))
3256 return TRUE;
3258 if (GET_MODE (op) != mode && mode != VOIDmode)
3259 return FALSE;
3261 if (GET_CODE (op) == CONST_DOUBLE)
3262 return const_double_rtx_ok_for_fpu (op);
3264 return FALSE;
3268 fpu_add_operand (op, mode)
3269 rtx op;
3270 enum machine_mode mode;
3272 if (s_register_operand (op, mode))
3273 return TRUE;
3275 if (GET_MODE (op) != mode && mode != VOIDmode)
3276 return FALSE;
3278 if (GET_CODE (op) == CONST_DOUBLE)
3279 return (const_double_rtx_ok_for_fpu (op)
3280 || neg_const_double_rtx_ok_for_fpu (op));
3282 return FALSE;
3285 /* Return nonzero if OP is a constant power of two. */
3288 power_of_two_operand (op, mode)
3289 rtx op;
3290 enum machine_mode mode ATTRIBUTE_UNUSED;
3292 if (GET_CODE (op) == CONST_INT)
3294 HOST_WIDE_INT value = INTVAL (op);
3296 return value != 0 && (value & (value - 1)) == 0;
3299 return FALSE;
3302 /* Return TRUE for a valid operand of a DImode operation.
3303 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3304 Note that this disallows MEM(REG+REG), but allows
3305 MEM(PRE/POST_INC/DEC(REG)). */
3308 di_operand (op, mode)
3309 rtx op;
3310 enum machine_mode mode;
3312 if (s_register_operand (op, mode))
3313 return TRUE;
3315 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3316 return FALSE;
3318 if (GET_CODE (op) == SUBREG)
3319 op = SUBREG_REG (op);
3321 switch (GET_CODE (op))
3323 case CONST_DOUBLE:
3324 case CONST_INT:
3325 return TRUE;
3327 case MEM:
3328 return memory_address_p (DImode, XEXP (op, 0));
3330 default:
3331 return FALSE;
3335 /* Like di_operand, but don't accept constants. */
3338 nonimmediate_di_operand (op, mode)
3339 rtx op;
3340 enum machine_mode mode;
3342 if (s_register_operand (op, mode))
3343 return TRUE;
3345 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
3346 return FALSE;
3348 if (GET_CODE (op) == SUBREG)
3349 op = SUBREG_REG (op);
3351 if (GET_CODE (op) == MEM)
3352 return memory_address_p (DImode, XEXP (op, 0));
3354 return FALSE;
3357 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3358 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3359 Note that this disallows MEM(REG+REG), but allows
3360 MEM(PRE/POST_INC/DEC(REG)). */
3363 soft_df_operand (op, mode)
3364 rtx op;
3365 enum machine_mode mode;
3367 if (s_register_operand (op, mode))
3368 return TRUE;
3370 if (mode != VOIDmode && GET_MODE (op) != mode)
3371 return FALSE;
3373 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
3374 return FALSE;
3376 if (GET_CODE (op) == SUBREG)
3377 op = SUBREG_REG (op);
3379 switch (GET_CODE (op))
3381 case CONST_DOUBLE:
3382 return TRUE;
3384 case MEM:
3385 return memory_address_p (DFmode, XEXP (op, 0));
3387 default:
3388 return FALSE;
3392 /* Like soft_df_operand, but don't accept constants. */
3395 nonimmediate_soft_df_operand (op, mode)
3396 rtx op;
3397 enum machine_mode mode;
3399 if (s_register_operand (op, mode))
3400 return TRUE;
3402 if (mode != VOIDmode && GET_MODE (op) != mode)
3403 return FALSE;
3405 if (GET_CODE (op) == SUBREG)
3406 op = SUBREG_REG (op);
3408 if (GET_CODE (op) == MEM)
3409 return memory_address_p (DFmode, XEXP (op, 0));
3410 return FALSE;
3413 /* Return TRUE for valid index operands. */
3416 index_operand (op, mode)
3417 rtx op;
3418 enum machine_mode mode;
3420 return (s_register_operand (op, mode)
3421 || (immediate_operand (op, mode)
3422 && (GET_CODE (op) != CONST_INT
3423 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3426 /* Return TRUE for valid shifts by a constant. This also accepts any
3427 power of two on the (somewhat overly relaxed) assumption that the
3428 shift operator in this case was a mult. */
3431 const_shift_operand (op, mode)
3432 rtx op;
3433 enum machine_mode mode;
3435 return (power_of_two_operand (op, mode)
3436 || (immediate_operand (op, mode)
3437 && (GET_CODE (op) != CONST_INT
3438 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3441 /* Return TRUE for arithmetic operators which can be combined with a multiply
3442 (shift). */
3445 shiftable_operator (x, mode)
3446 rtx x;
3447 enum machine_mode mode;
3449 enum rtx_code code;
3451 if (GET_MODE (x) != mode)
3452 return FALSE;
3454 code = GET_CODE (x);
3456 return (code == PLUS || code == MINUS
3457 || code == IOR || code == XOR || code == AND);
3460 /* Return TRUE for binary logical operators. */
3463 logical_binary_operator (x, mode)
3464 rtx x;
3465 enum machine_mode mode;
3467 enum rtx_code code;
3469 if (GET_MODE (x) != mode)
3470 return FALSE;
3472 code = GET_CODE (x);
3474 return (code == IOR || code == XOR || code == AND);
3477 /* Return TRUE for shift operators. */
3480 shift_operator (x, mode)
3481 rtx x;
3482 enum machine_mode mode;
3484 enum rtx_code code;
3486 if (GET_MODE (x) != mode)
3487 return FALSE;
3489 code = GET_CODE (x);
3491 if (code == MULT)
3492 return power_of_two_operand (XEXP (x, 1), mode);
3494 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3495 || code == ROTATERT);
3498 /* Return TRUE if x is EQ or NE. */
3501 equality_operator (x, mode)
3502 rtx x;
3503 enum machine_mode mode ATTRIBUTE_UNUSED;
3505 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3508 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3511 arm_comparison_operator (x, mode)
3512 rtx x;
3513 enum machine_mode mode;
3515 return (comparison_operator (x, mode)
3516 && GET_CODE (x) != LTGT
3517 && GET_CODE (x) != UNEQ);
3520 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3523 minmax_operator (x, mode)
3524 rtx x;
3525 enum machine_mode mode;
3527 enum rtx_code code = GET_CODE (x);
3529 if (GET_MODE (x) != mode)
3530 return FALSE;
3532 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3535 /* Return TRUE if this is the condition code register, if we aren't given
3536 a mode, accept any class CCmode register. */
3539 cc_register (x, mode)
3540 rtx x;
3541 enum machine_mode mode;
3543 if (mode == VOIDmode)
3545 mode = GET_MODE (x);
3547 if (GET_MODE_CLASS (mode) != MODE_CC)
3548 return FALSE;
3551 if ( GET_MODE (x) == mode
3552 && GET_CODE (x) == REG
3553 && REGNO (x) == CC_REGNUM)
3554 return TRUE;
3556 return FALSE;
3559 /* Return TRUE if this is the condition code register, if we aren't given
3560 a mode, accept any class CCmode register which indicates a dominance
3561 expression. */
3564 dominant_cc_register (x, mode)
3565 rtx x;
3566 enum machine_mode mode;
3568 if (mode == VOIDmode)
3570 mode = GET_MODE (x);
3572 if (GET_MODE_CLASS (mode) != MODE_CC)
3573 return FALSE;
3576 if ( mode != CC_DNEmode && mode != CC_DEQmode
3577 && mode != CC_DLEmode && mode != CC_DLTmode
3578 && mode != CC_DGEmode && mode != CC_DGTmode
3579 && mode != CC_DLEUmode && mode != CC_DLTUmode
3580 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3581 return FALSE;
3583 return cc_register (x, mode);
3586 /* Return TRUE if X references a SYMBOL_REF. */
3589 symbol_mentioned_p (x)
3590 rtx x;
3592 const char * fmt;
3593 int i;
3595 if (GET_CODE (x) == SYMBOL_REF)
3596 return 1;
3598 fmt = GET_RTX_FORMAT (GET_CODE (x));
3600 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3602 if (fmt[i] == 'E')
3604 int j;
3606 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3607 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3608 return 1;
3610 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3611 return 1;
3614 return 0;
3617 /* Return TRUE if X references a LABEL_REF. */
3620 label_mentioned_p (x)
3621 rtx x;
3623 const char * fmt;
3624 int i;
3626 if (GET_CODE (x) == LABEL_REF)
3627 return 1;
3629 fmt = GET_RTX_FORMAT (GET_CODE (x));
3630 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3632 if (fmt[i] == 'E')
3634 int j;
3636 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3637 if (label_mentioned_p (XVECEXP (x, i, j)))
3638 return 1;
3640 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3641 return 1;
3644 return 0;
3647 enum rtx_code
3648 minmax_code (x)
3649 rtx x;
3651 enum rtx_code code = GET_CODE (x);
3653 if (code == SMAX)
3654 return GE;
3655 else if (code == SMIN)
3656 return LE;
3657 else if (code == UMIN)
3658 return LEU;
3659 else if (code == UMAX)
3660 return GEU;
3662 abort ();
3665 /* Return 1 if memory locations are adjacent. */
3668 adjacent_mem_locations (a, b)
3669 rtx a, b;
3671 if ((GET_CODE (XEXP (a, 0)) == REG
3672 || (GET_CODE (XEXP (a, 0)) == PLUS
3673 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3674 && (GET_CODE (XEXP (b, 0)) == REG
3675 || (GET_CODE (XEXP (b, 0)) == PLUS
3676 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3678 int val0 = 0, val1 = 0;
3679 int reg0, reg1;
3681 if (GET_CODE (XEXP (a, 0)) == PLUS)
3683 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3684 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3686 else
3687 reg0 = REGNO (XEXP (a, 0));
3689 if (GET_CODE (XEXP (b, 0)) == PLUS)
3691 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3692 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3694 else
3695 reg1 = REGNO (XEXP (b, 0));
3697 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3699 return 0;
3702 /* Return 1 if OP is a load multiple operation. It is known to be
3703 parallel and the first section will be tested. */
3706 load_multiple_operation (op, mode)
3707 rtx op;
3708 enum machine_mode mode ATTRIBUTE_UNUSED;
3710 HOST_WIDE_INT count = XVECLEN (op, 0);
3711 int dest_regno;
3712 rtx src_addr;
3713 HOST_WIDE_INT i = 1, base = 0;
3714 rtx elt;
3716 if (count <= 1
3717 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3718 return 0;
3720 /* Check to see if this might be a write-back. */
3721 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3723 i++;
3724 base = 1;
3726 /* Now check it more carefully. */
3727 if (GET_CODE (SET_DEST (elt)) != REG
3728 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3729 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3730 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3731 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3732 return 0;
3735 /* Perform a quick check so we don't blow up below. */
3736 if (count <= i
3737 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3738 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3739 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3740 return 0;
3742 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3743 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3745 for (; i < count; i++)
3747 elt = XVECEXP (op, 0, i);
3749 if (GET_CODE (elt) != SET
3750 || GET_CODE (SET_DEST (elt)) != REG
3751 || GET_MODE (SET_DEST (elt)) != SImode
3752 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3753 || GET_CODE (SET_SRC (elt)) != MEM
3754 || GET_MODE (SET_SRC (elt)) != SImode
3755 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3756 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3757 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3758 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3759 return 0;
3762 return 1;
3765 /* Return 1 if OP is a store multiple operation. It is known to be
3766 parallel and the first section will be tested. */
3769 store_multiple_operation (op, mode)
3770 rtx op;
3771 enum machine_mode mode ATTRIBUTE_UNUSED;
3773 HOST_WIDE_INT count = XVECLEN (op, 0);
3774 int src_regno;
3775 rtx dest_addr;
3776 HOST_WIDE_INT i = 1, base = 0;
3777 rtx elt;
3779 if (count <= 1
3780 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3781 return 0;
3783 /* Check to see if this might be a write-back. */
3784 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3786 i++;
3787 base = 1;
3789 /* Now check it more carefully. */
3790 if (GET_CODE (SET_DEST (elt)) != REG
3791 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3792 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3793 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3794 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3795 return 0;
3798 /* Perform a quick check so we don't blow up below. */
3799 if (count <= i
3800 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3801 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3802 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3803 return 0;
3805 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3806 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3808 for (; i < count; i++)
3810 elt = XVECEXP (op, 0, i);
3812 if (GET_CODE (elt) != SET
3813 || GET_CODE (SET_SRC (elt)) != REG
3814 || GET_MODE (SET_SRC (elt)) != SImode
3815 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3816 || GET_CODE (SET_DEST (elt)) != MEM
3817 || GET_MODE (SET_DEST (elt)) != SImode
3818 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3819 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3820 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3821 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3822 return 0;
3825 return 1;
3829 load_multiple_sequence (operands, nops, regs, base, load_offset)
3830 rtx * operands;
3831 int nops;
3832 int * regs;
3833 int * base;
3834 HOST_WIDE_INT * load_offset;
3836 int unsorted_regs[4];
3837 HOST_WIDE_INT unsorted_offsets[4];
3838 int order[4];
3839 int base_reg = -1;
3840 int i;
3842 /* Can only handle 2, 3, or 4 insns at present,
3843 though could be easily extended if required. */
3844 if (nops < 2 || nops > 4)
3845 abort ();
3847 /* Loop over the operands and check that the memory references are
3848 suitable (ie immediate offsets from the same base register). At
3849 the same time, extract the target register, and the memory
3850 offsets. */
3851 for (i = 0; i < nops; i++)
3853 rtx reg;
3854 rtx offset;
3856 /* Convert a subreg of a mem into the mem itself. */
3857 if (GET_CODE (operands[nops + i]) == SUBREG)
3858 operands[nops + i] = alter_subreg (operands + (nops + i));
3860 if (GET_CODE (operands[nops + i]) != MEM)
3861 abort ();
3863 /* Don't reorder volatile memory references; it doesn't seem worth
3864 looking for the case where the order is ok anyway. */
3865 if (MEM_VOLATILE_P (operands[nops + i]))
3866 return 0;
3868 offset = const0_rtx;
3870 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3871 || (GET_CODE (reg) == SUBREG
3872 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3873 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3874 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3875 == REG)
3876 || (GET_CODE (reg) == SUBREG
3877 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3878 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3879 == CONST_INT)))
3881 if (i == 0)
3883 base_reg = REGNO (reg);
3884 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3885 ? REGNO (operands[i])
3886 : REGNO (SUBREG_REG (operands[i])));
3887 order[0] = 0;
3889 else
3891 if (base_reg != (int) REGNO (reg))
3892 /* Not addressed from the same base register. */
3893 return 0;
3895 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3896 ? REGNO (operands[i])
3897 : REGNO (SUBREG_REG (operands[i])));
3898 if (unsorted_regs[i] < unsorted_regs[order[0]])
3899 order[0] = i;
3902 /* If it isn't an integer register, or if it overwrites the
3903 base register but isn't the last insn in the list, then
3904 we can't do this. */
3905 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3906 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3907 return 0;
3909 unsorted_offsets[i] = INTVAL (offset);
3911 else
3912 /* Not a suitable memory address. */
3913 return 0;
3916 /* All the useful information has now been extracted from the
3917 operands into unsorted_regs and unsorted_offsets; additionally,
3918 order[0] has been set to the lowest numbered register in the
3919 list. Sort the registers into order, and check that the memory
3920 offsets are ascending and adjacent. */
3922 for (i = 1; i < nops; i++)
3924 int j;
3926 order[i] = order[i - 1];
3927 for (j = 0; j < nops; j++)
3928 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3929 && (order[i] == order[i - 1]
3930 || unsorted_regs[j] < unsorted_regs[order[i]]))
3931 order[i] = j;
3933 /* Have we found a suitable register? if not, one must be used more
3934 than once. */
3935 if (order[i] == order[i - 1])
3936 return 0;
3938 /* Is the memory address adjacent and ascending? */
3939 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3940 return 0;
3943 if (base)
3945 *base = base_reg;
3947 for (i = 0; i < nops; i++)
3948 regs[i] = unsorted_regs[order[i]];
3950 *load_offset = unsorted_offsets[order[0]];
3953 if (unsorted_offsets[order[0]] == 0)
3954 return 1; /* ldmia */
3956 if (unsorted_offsets[order[0]] == 4)
3957 return 2; /* ldmib */
3959 if (unsorted_offsets[order[nops - 1]] == 0)
3960 return 3; /* ldmda */
3962 if (unsorted_offsets[order[nops - 1]] == -4)
3963 return 4; /* ldmdb */
3965 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3966 if the offset isn't small enough. The reason 2 ldrs are faster
3967 is because these ARMs are able to do more than one cache access
3968 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3969 whilst the ARM8 has a double bandwidth cache. This means that
3970 these cores can do both an instruction fetch and a data fetch in
3971 a single cycle, so the trick of calculating the address into a
3972 scratch register (one of the result regs) and then doing a load
3973 multiple actually becomes slower (and no smaller in code size).
3974 That is the transformation
3976 ldr rd1, [rbase + offset]
3977 ldr rd2, [rbase + offset + 4]
3981 add rd1, rbase, offset
3982 ldmia rd1, {rd1, rd2}
3984 produces worse code -- '3 cycles + any stalls on rd2' instead of
3985 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3986 access per cycle, the first sequence could never complete in less
3987 than 6 cycles, whereas the ldm sequence would only take 5 and
3988 would make better use of sequential accesses if not hitting the
3989 cache.
3991 We cheat here and test 'arm_ld_sched' which we currently know to
3992 only be true for the ARM8, ARM9 and StrongARM. If this ever
3993 changes, then the test below needs to be reworked. */
3994 if (nops == 2 && arm_ld_sched)
3995 return 0;
3997 /* Can't do it without setting up the offset, only do this if it takes
3998 no more than one insn. */
3999 return (const_ok_for_arm (unsorted_offsets[order[0]])
4000 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
4003 const char *
4004 emit_ldm_seq (operands, nops)
4005 rtx * operands;
4006 int nops;
4008 int regs[4];
4009 int base_reg;
4010 HOST_WIDE_INT offset;
4011 char buf[100];
4012 int i;
4014 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4016 case 1:
4017 strcpy (buf, "ldm%?ia\t");
4018 break;
4020 case 2:
4021 strcpy (buf, "ldm%?ib\t");
4022 break;
4024 case 3:
4025 strcpy (buf, "ldm%?da\t");
4026 break;
4028 case 4:
4029 strcpy (buf, "ldm%?db\t");
4030 break;
4032 case 5:
4033 if (offset >= 0)
4034 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4035 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4036 (long) offset);
4037 else
4038 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
4039 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
4040 (long) -offset);
4041 output_asm_insn (buf, operands);
4042 base_reg = regs[0];
4043 strcpy (buf, "ldm%?ia\t");
4044 break;
4046 default:
4047 abort ();
4050 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4051 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4053 for (i = 1; i < nops; i++)
4054 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4055 reg_names[regs[i]]);
4057 strcat (buf, "}\t%@ phole ldm");
4059 output_asm_insn (buf, operands);
4060 return "";
4064 store_multiple_sequence (operands, nops, regs, base, load_offset)
4065 rtx * operands;
4066 int nops;
4067 int * regs;
4068 int * base;
4069 HOST_WIDE_INT * load_offset;
4071 int unsorted_regs[4];
4072 HOST_WIDE_INT unsorted_offsets[4];
4073 int order[4];
4074 int base_reg = -1;
4075 int i;
4077 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4078 extended if required. */
4079 if (nops < 2 || nops > 4)
4080 abort ();
4082 /* Loop over the operands and check that the memory references are
4083 suitable (ie immediate offsets from the same base register). At
4084 the same time, extract the target register, and the memory
4085 offsets. */
4086 for (i = 0; i < nops; i++)
4088 rtx reg;
4089 rtx offset;
4091 /* Convert a subreg of a mem into the mem itself. */
4092 if (GET_CODE (operands[nops + i]) == SUBREG)
4093 operands[nops + i] = alter_subreg (operands + (nops + i));
4095 if (GET_CODE (operands[nops + i]) != MEM)
4096 abort ();
4098 /* Don't reorder volatile memory references; it doesn't seem worth
4099 looking for the case where the order is ok anyway. */
4100 if (MEM_VOLATILE_P (operands[nops + i]))
4101 return 0;
4103 offset = const0_rtx;
4105 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
4106 || (GET_CODE (reg) == SUBREG
4107 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4108 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
4109 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
4110 == REG)
4111 || (GET_CODE (reg) == SUBREG
4112 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
4113 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
4114 == CONST_INT)))
4116 if (i == 0)
4118 base_reg = REGNO (reg);
4119 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
4120 ? REGNO (operands[i])
4121 : REGNO (SUBREG_REG (operands[i])));
4122 order[0] = 0;
4124 else
4126 if (base_reg != (int) REGNO (reg))
4127 /* Not addressed from the same base register. */
4128 return 0;
4130 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
4131 ? REGNO (operands[i])
4132 : REGNO (SUBREG_REG (operands[i])));
4133 if (unsorted_regs[i] < unsorted_regs[order[0]])
4134 order[0] = i;
4137 /* If it isn't an integer register, then we can't do this. */
4138 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
4139 return 0;
4141 unsorted_offsets[i] = INTVAL (offset);
4143 else
4144 /* Not a suitable memory address. */
4145 return 0;
4148 /* All the useful information has now been extracted from the
4149 operands into unsorted_regs and unsorted_offsets; additionally,
4150 order[0] has been set to the lowest numbered register in the
4151 list. Sort the registers into order, and check that the memory
4152 offsets are ascending and adjacent. */
4154 for (i = 1; i < nops; i++)
4156 int j;
4158 order[i] = order[i - 1];
4159 for (j = 0; j < nops; j++)
4160 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
4161 && (order[i] == order[i - 1]
4162 || unsorted_regs[j] < unsorted_regs[order[i]]))
4163 order[i] = j;
4165 /* Have we found a suitable register? if not, one must be used more
4166 than once. */
4167 if (order[i] == order[i - 1])
4168 return 0;
4170 /* Is the memory address adjacent and ascending? */
4171 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
4172 return 0;
4175 if (base)
4177 *base = base_reg;
4179 for (i = 0; i < nops; i++)
4180 regs[i] = unsorted_regs[order[i]];
4182 *load_offset = unsorted_offsets[order[0]];
4185 if (unsorted_offsets[order[0]] == 0)
4186 return 1; /* stmia */
4188 if (unsorted_offsets[order[0]] == 4)
4189 return 2; /* stmib */
4191 if (unsorted_offsets[order[nops - 1]] == 0)
4192 return 3; /* stmda */
4194 if (unsorted_offsets[order[nops - 1]] == -4)
4195 return 4; /* stmdb */
4197 return 0;
4200 const char *
4201 emit_stm_seq (operands, nops)
4202 rtx * operands;
4203 int nops;
4205 int regs[4];
4206 int base_reg;
4207 HOST_WIDE_INT offset;
4208 char buf[100];
4209 int i;
4211 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
4213 case 1:
4214 strcpy (buf, "stm%?ia\t");
4215 break;
4217 case 2:
4218 strcpy (buf, "stm%?ib\t");
4219 break;
4221 case 3:
4222 strcpy (buf, "stm%?da\t");
4223 break;
4225 case 4:
4226 strcpy (buf, "stm%?db\t");
4227 break;
4229 default:
4230 abort ();
4233 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
4234 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
4236 for (i = 1; i < nops; i++)
4237 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
4238 reg_names[regs[i]]);
4240 strcat (buf, "}\t%@ phole stm");
4242 output_asm_insn (buf, operands);
4243 return "";
4247 multi_register_push (op, mode)
4248 rtx op;
4249 enum machine_mode mode ATTRIBUTE_UNUSED;
4251 if (GET_CODE (op) != PARALLEL
4252 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
4253 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
4254 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != UNSPEC_PUSH_MULT))
4255 return 0;
4257 return 1;
4260 /* Routines for use in generating RTL. */
4263 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
4264 in_struct_p, scalar_p)
4265 int base_regno;
4266 int count;
4267 rtx from;
4268 int up;
4269 int write_back;
4270 int unchanging_p;
4271 int in_struct_p;
4272 int scalar_p;
4274 int i = 0, j;
4275 rtx result;
4276 int sign = up ? 1 : -1;
4277 rtx mem;
4279 /* XScale has load-store double instructions, but they have stricter
4280 alignment requirements than load-store multiple, so we can not
4281 use them.
4283 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4284 the pipeline until completion.
4286 NREGS CYCLES
4292 An ldr instruction takes 1-3 cycles, but does not block the
4293 pipeline.
4295 NREGS CYCLES
4296 1 1-3
4297 2 2-6
4298 3 3-9
4299 4 4-12
4301 Best case ldr will always win. However, the more ldr instructions
4302 we issue, the less likely we are to be able to schedule them well.
4303 Using ldr instructions also increases code size.
4305 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4306 for counts of 3 or 4 regs. */
4307 if (arm_is_xscale && count <= 2 && ! optimize_size)
4309 rtx seq;
4311 start_sequence ();
4313 for (i = 0; i < count; i++)
4315 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
4316 RTX_UNCHANGING_P (mem) = unchanging_p;
4317 MEM_IN_STRUCT_P (mem) = in_struct_p;
4318 MEM_SCALAR_P (mem) = scalar_p;
4319 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
4322 if (write_back)
4323 emit_move_insn (from, plus_constant (from, count * 4 * sign));
4325 seq = gen_sequence ();
4326 end_sequence ();
4328 return seq;
4331 result = gen_rtx_PARALLEL (VOIDmode,
4332 rtvec_alloc (count + (write_back ? 1 : 0)));
4333 if (write_back)
4335 XVECEXP (result, 0, 0)
4336 = gen_rtx_SET (GET_MODE (from), from,
4337 plus_constant (from, count * 4 * sign));
4338 i = 1;
4339 count++;
4342 for (j = 0; i < count; i++, j++)
4344 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
4345 RTX_UNCHANGING_P (mem) = unchanging_p;
4346 MEM_IN_STRUCT_P (mem) = in_struct_p;
4347 MEM_SCALAR_P (mem) = scalar_p;
4348 XVECEXP (result, 0, i)
4349 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
4352 return result;
4356 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
4357 in_struct_p, scalar_p)
4358 int base_regno;
4359 int count;
4360 rtx to;
4361 int up;
4362 int write_back;
4363 int unchanging_p;
4364 int in_struct_p;
4365 int scalar_p;
4367 int i = 0, j;
4368 rtx result;
4369 int sign = up ? 1 : -1;
4370 rtx mem;
4372 /* See arm_gen_load_multiple for discussion of
4373 the pros/cons of ldm/stm usage for XScale. */
4374 if (arm_is_xscale && count <= 2 && ! optimize_size)
4376 rtx seq;
4378 start_sequence ();
4380 for (i = 0; i < count; i++)
4382 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4383 RTX_UNCHANGING_P (mem) = unchanging_p;
4384 MEM_IN_STRUCT_P (mem) = in_struct_p;
4385 MEM_SCALAR_P (mem) = scalar_p;
4386 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4389 if (write_back)
4390 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4392 seq = gen_sequence ();
4393 end_sequence ();
4395 return seq;
4398 result = gen_rtx_PARALLEL (VOIDmode,
4399 rtvec_alloc (count + (write_back ? 1 : 0)));
4400 if (write_back)
4402 XVECEXP (result, 0, 0)
4403 = gen_rtx_SET (GET_MODE (to), to,
4404 plus_constant (to, count * 4 * sign));
4405 i = 1;
4406 count++;
4409 for (j = 0; i < count; i++, j++)
4411 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4412 RTX_UNCHANGING_P (mem) = unchanging_p;
4413 MEM_IN_STRUCT_P (mem) = in_struct_p;
4414 MEM_SCALAR_P (mem) = scalar_p;
4416 XVECEXP (result, 0, i)
4417 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4420 return result;
4424 arm_gen_movstrqi (operands)
4425 rtx * operands;
4427 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4428 int i;
4429 rtx src, dst;
4430 rtx st_src, st_dst, fin_src, fin_dst;
4431 rtx part_bytes_reg = NULL;
4432 rtx mem;
4433 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4434 int dst_scalar_p, src_scalar_p;
4436 if (GET_CODE (operands[2]) != CONST_INT
4437 || GET_CODE (operands[3]) != CONST_INT
4438 || INTVAL (operands[2]) > 64
4439 || INTVAL (operands[3]) & 3)
4440 return 0;
4442 st_dst = XEXP (operands[0], 0);
4443 st_src = XEXP (operands[1], 0);
4445 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4446 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4447 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4448 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4449 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4450 src_scalar_p = MEM_SCALAR_P (operands[1]);
4452 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4453 fin_src = src = copy_to_mode_reg (SImode, st_src);
4455 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4456 out_words_to_go = INTVAL (operands[2]) / 4;
4457 last_bytes = INTVAL (operands[2]) & 3;
4459 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4460 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4462 for (i = 0; in_words_to_go >= 2; i+=4)
4464 if (in_words_to_go > 4)
4465 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4466 src_unchanging_p,
4467 src_in_struct_p,
4468 src_scalar_p));
4469 else
4470 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4471 FALSE, src_unchanging_p,
4472 src_in_struct_p, src_scalar_p));
4474 if (out_words_to_go)
4476 if (out_words_to_go > 4)
4477 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4478 dst_unchanging_p,
4479 dst_in_struct_p,
4480 dst_scalar_p));
4481 else if (out_words_to_go != 1)
4482 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4483 dst, TRUE,
4484 (last_bytes == 0
4485 ? FALSE : TRUE),
4486 dst_unchanging_p,
4487 dst_in_struct_p,
4488 dst_scalar_p));
4489 else
4491 mem = gen_rtx_MEM (SImode, dst);
4492 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4493 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4494 MEM_SCALAR_P (mem) = dst_scalar_p;
4495 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4496 if (last_bytes != 0)
4497 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4501 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4502 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4505 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4506 if (out_words_to_go)
4508 rtx sreg;
4510 mem = gen_rtx_MEM (SImode, src);
4511 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4512 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4513 MEM_SCALAR_P (mem) = src_scalar_p;
4514 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4515 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4517 mem = gen_rtx_MEM (SImode, dst);
4518 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4519 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4520 MEM_SCALAR_P (mem) = dst_scalar_p;
4521 emit_move_insn (mem, sreg);
4522 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4523 in_words_to_go--;
4525 if (in_words_to_go) /* Sanity check */
4526 abort ();
4529 if (in_words_to_go)
4531 if (in_words_to_go < 0)
4532 abort ();
4534 mem = gen_rtx_MEM (SImode, src);
4535 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4536 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4537 MEM_SCALAR_P (mem) = src_scalar_p;
4538 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4541 if (last_bytes && part_bytes_reg == NULL)
4542 abort ();
4544 if (BYTES_BIG_ENDIAN && last_bytes)
4546 rtx tmp = gen_reg_rtx (SImode);
4548 /* The bytes we want are in the top end of the word. */
4549 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4550 GEN_INT (8 * (4 - last_bytes))));
4551 part_bytes_reg = tmp;
4553 while (last_bytes)
4555 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4556 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4557 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4558 MEM_SCALAR_P (mem) = dst_scalar_p;
4559 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4561 if (--last_bytes)
4563 tmp = gen_reg_rtx (SImode);
4564 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4565 part_bytes_reg = tmp;
4570 else
4572 if (last_bytes > 1)
4574 mem = gen_rtx_MEM (HImode, dst);
4575 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4576 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4577 MEM_SCALAR_P (mem) = dst_scalar_p;
4578 emit_move_insn (mem, gen_lowpart (HImode, part_bytes_reg));
4579 last_bytes -= 2;
4580 if (last_bytes)
4582 rtx tmp = gen_reg_rtx (SImode);
4584 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4585 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4586 part_bytes_reg = tmp;
4590 if (last_bytes)
4592 mem = gen_rtx_MEM (QImode, dst);
4593 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4594 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4595 MEM_SCALAR_P (mem) = dst_scalar_p;
4596 emit_move_insn (mem, gen_lowpart (QImode, part_bytes_reg));
4600 return 1;
4603 /* Generate a memory reference for a half word, such that it will be loaded
4604 into the top 16 bits of the word. We can assume that the address is
4605 known to be alignable and of the form reg, or plus (reg, const). */
4608 arm_gen_rotated_half_load (memref)
4609 rtx memref;
4611 HOST_WIDE_INT offset = 0;
4612 rtx base = XEXP (memref, 0);
4614 if (GET_CODE (base) == PLUS)
4616 offset = INTVAL (XEXP (base, 1));
4617 base = XEXP (base, 0);
4620 /* If we aren't allowed to generate unaligned addresses, then fail. */
4621 if (TARGET_MMU_TRAPS
4622 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4623 return NULL;
4625 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4627 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4628 return base;
4630 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4633 /* Select a dominance comparison mode if possible. We support three forms.
4634 COND_OR == 0 => (X && Y)
4635 COND_OR == 1 => ((! X( || Y)
4636 COND_OR == 2 => (X || Y)
4637 If we are unable to support a dominance comparsison we return CC mode.
4638 This will then fail to match for the RTL expressions that generate this
4639 call. */
4641 static enum machine_mode
4642 select_dominance_cc_mode (x, y, cond_or)
4643 rtx x;
4644 rtx y;
4645 HOST_WIDE_INT cond_or;
4647 enum rtx_code cond1, cond2;
4648 int swapped = 0;
4650 /* Currently we will probably get the wrong result if the individual
4651 comparisons are not simple. This also ensures that it is safe to
4652 reverse a comparison if necessary. */
4653 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4654 != CCmode)
4655 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4656 != CCmode))
4657 return CCmode;
4659 /* The if_then_else variant of this tests the second condition if the
4660 first passes, but is true if the first fails. Reverse the first
4661 condition to get a true "inclusive-or" expression. */
4662 if (cond_or == 1)
4663 cond1 = reverse_condition (cond1);
4665 /* If the comparisons are not equal, and one doesn't dominate the other,
4666 then we can't do this. */
4667 if (cond1 != cond2
4668 && !comparison_dominates_p (cond1, cond2)
4669 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4670 return CCmode;
4672 if (swapped)
4674 enum rtx_code temp = cond1;
4675 cond1 = cond2;
4676 cond2 = temp;
4679 switch (cond1)
4681 case EQ:
4682 if (cond2 == EQ || !cond_or)
4683 return CC_DEQmode;
4685 switch (cond2)
4687 case LE: return CC_DLEmode;
4688 case LEU: return CC_DLEUmode;
4689 case GE: return CC_DGEmode;
4690 case GEU: return CC_DGEUmode;
4691 default: break;
4694 break;
4696 case LT:
4697 if (cond2 == LT || !cond_or)
4698 return CC_DLTmode;
4699 if (cond2 == LE)
4700 return CC_DLEmode;
4701 if (cond2 == NE)
4702 return CC_DNEmode;
4703 break;
4705 case GT:
4706 if (cond2 == GT || !cond_or)
4707 return CC_DGTmode;
4708 if (cond2 == GE)
4709 return CC_DGEmode;
4710 if (cond2 == NE)
4711 return CC_DNEmode;
4712 break;
4714 case LTU:
4715 if (cond2 == LTU || !cond_or)
4716 return CC_DLTUmode;
4717 if (cond2 == LEU)
4718 return CC_DLEUmode;
4719 if (cond2 == NE)
4720 return CC_DNEmode;
4721 break;
4723 case GTU:
4724 if (cond2 == GTU || !cond_or)
4725 return CC_DGTUmode;
4726 if (cond2 == GEU)
4727 return CC_DGEUmode;
4728 if (cond2 == NE)
4729 return CC_DNEmode;
4730 break;
4732 /* The remaining cases only occur when both comparisons are the
4733 same. */
4734 case NE:
4735 return CC_DNEmode;
4737 case LE:
4738 return CC_DLEmode;
4740 case GE:
4741 return CC_DGEmode;
4743 case LEU:
4744 return CC_DLEUmode;
4746 case GEU:
4747 return CC_DGEUmode;
4749 default:
4750 break;
4753 abort ();
4756 enum machine_mode
4757 arm_select_cc_mode (op, x, y)
4758 enum rtx_code op;
4759 rtx x;
4760 rtx y;
4762 /* All floating point compares return CCFP if it is an equality
4763 comparison, and CCFPE otherwise. */
4764 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4766 switch (op)
4768 case EQ:
4769 case NE:
4770 case UNORDERED:
4771 case ORDERED:
4772 case UNLT:
4773 case UNLE:
4774 case UNGT:
4775 case UNGE:
4776 case UNEQ:
4777 case LTGT:
4778 return CCFPmode;
4780 case LT:
4781 case LE:
4782 case GT:
4783 case GE:
4784 return CCFPEmode;
4786 default:
4787 abort ();
4791 /* A compare with a shifted operand. Because of canonicalization, the
4792 comparison will have to be swapped when we emit the assembler. */
4793 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4794 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4795 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4796 || GET_CODE (x) == ROTATERT))
4797 return CC_SWPmode;
4799 /* This is a special case that is used by combine to allow a
4800 comparison of a shifted byte load to be split into a zero-extend
4801 followed by a comparison of the shifted integer (only valid for
4802 equalities and unsigned inequalities). */
4803 if (GET_MODE (x) == SImode
4804 && GET_CODE (x) == ASHIFT
4805 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4806 && GET_CODE (XEXP (x, 0)) == SUBREG
4807 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4808 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4809 && (op == EQ || op == NE
4810 || op == GEU || op == GTU || op == LTU || op == LEU)
4811 && GET_CODE (y) == CONST_INT)
4812 return CC_Zmode;
4814 /* A construct for a conditional compare, if the false arm contains
4815 0, then both conditions must be true, otherwise either condition
4816 must be true. Not all conditions are possible, so CCmode is
4817 returned if it can't be done. */
4818 if (GET_CODE (x) == IF_THEN_ELSE
4819 && (XEXP (x, 2) == const0_rtx
4820 || XEXP (x, 2) == const1_rtx)
4821 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4822 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4823 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4824 INTVAL (XEXP (x, 2)));
4826 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4827 if (GET_CODE (x) == AND
4828 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4829 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4830 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4832 if (GET_CODE (x) == IOR
4833 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4834 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4835 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4837 /* An operation that sets the condition codes as a side-effect, the
4838 V flag is not set correctly, so we can only use comparisons where
4839 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4840 instead. */
4841 if (GET_MODE (x) == SImode
4842 && y == const0_rtx
4843 && (op == EQ || op == NE || op == LT || op == GE)
4844 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4845 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4846 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4847 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4848 || GET_CODE (x) == LSHIFTRT
4849 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4850 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4851 return CC_NOOVmode;
4853 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4854 return CC_Zmode;
4856 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4857 && GET_CODE (x) == PLUS
4858 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4859 return CC_Cmode;
4861 return CCmode;
4864 /* X and Y are two things to compare using CODE. Emit the compare insn and
4865 return the rtx for register 0 in the proper mode. FP means this is a
4866 floating point compare: I don't think that it is needed on the arm. */
4869 arm_gen_compare_reg (code, x, y)
4870 enum rtx_code code;
4871 rtx x, y;
4873 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4874 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4876 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4877 gen_rtx_COMPARE (mode, x, y)));
4879 return cc_reg;
4882 void
4883 arm_reload_in_hi (operands)
4884 rtx * operands;
4886 rtx ref = operands[1];
4887 rtx base, scratch;
4888 HOST_WIDE_INT offset = 0;
4890 if (GET_CODE (ref) == SUBREG)
4892 offset = SUBREG_BYTE (ref);
4893 ref = SUBREG_REG (ref);
4896 if (GET_CODE (ref) == REG)
4898 /* We have a pseudo which has been spilt onto the stack; there
4899 are two cases here: the first where there is a simple
4900 stack-slot replacement and a second where the stack-slot is
4901 out of range, or is used as a subreg. */
4902 if (reg_equiv_mem[REGNO (ref)])
4904 ref = reg_equiv_mem[REGNO (ref)];
4905 base = find_replacement (&XEXP (ref, 0));
4907 else
4908 /* The slot is out of range, or was dressed up in a SUBREG. */
4909 base = reg_equiv_address[REGNO (ref)];
4911 else
4912 base = find_replacement (&XEXP (ref, 0));
4914 /* Handle the case where the address is too complex to be offset by 1. */
4915 if (GET_CODE (base) == MINUS
4916 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4918 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4920 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4921 base = base_plus;
4923 else if (GET_CODE (base) == PLUS)
4925 /* The addend must be CONST_INT, or we would have dealt with it above. */
4926 HOST_WIDE_INT hi, lo;
4928 offset += INTVAL (XEXP (base, 1));
4929 base = XEXP (base, 0);
4931 /* Rework the address into a legal sequence of insns. */
4932 /* Valid range for lo is -4095 -> 4095 */
4933 lo = (offset >= 0
4934 ? (offset & 0xfff)
4935 : -((-offset) & 0xfff));
4937 /* Corner case, if lo is the max offset then we would be out of range
4938 once we have added the additional 1 below, so bump the msb into the
4939 pre-loading insn(s). */
4940 if (lo == 4095)
4941 lo &= 0x7ff;
4943 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
4944 ^ (HOST_WIDE_INT) 0x80000000)
4945 - (HOST_WIDE_INT) 0x80000000);
4947 if (hi + lo != offset)
4948 abort ();
4950 if (hi != 0)
4952 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4954 /* Get the base address; addsi3 knows how to handle constants
4955 that require more than one insn. */
4956 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4957 base = base_plus;
4958 offset = lo;
4962 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4963 emit_insn (gen_zero_extendqisi2 (scratch,
4964 gen_rtx_MEM (QImode,
4965 plus_constant (base,
4966 offset))));
4967 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4968 gen_rtx_MEM (QImode,
4969 plus_constant (base,
4970 offset + 1))));
4971 if (!BYTES_BIG_ENDIAN)
4972 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4973 gen_rtx_IOR (SImode,
4974 gen_rtx_ASHIFT
4975 (SImode,
4976 gen_rtx_SUBREG (SImode, operands[0], 0),
4977 GEN_INT (8)),
4978 scratch)));
4979 else
4980 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4981 gen_rtx_IOR (SImode,
4982 gen_rtx_ASHIFT (SImode, scratch,
4983 GEN_INT (8)),
4984 gen_rtx_SUBREG (SImode, operands[0],
4985 0))));
4988 /* Handle storing a half-word to memory during reload by synthesising as two
4989 byte stores. Take care not to clobber the input values until after we
4990 have moved them somewhere safe. This code assumes that if the DImode
4991 scratch in operands[2] overlaps either the input value or output address
4992 in some way, then that value must die in this insn (we absolutely need
4993 two scratch registers for some corner cases). */
4995 void
4996 arm_reload_out_hi (operands)
4997 rtx * operands;
4999 rtx ref = operands[0];
5000 rtx outval = operands[1];
5001 rtx base, scratch;
5002 HOST_WIDE_INT offset = 0;
5004 if (GET_CODE (ref) == SUBREG)
5006 offset = SUBREG_BYTE (ref);
5007 ref = SUBREG_REG (ref);
5010 if (GET_CODE (ref) == REG)
5012 /* We have a pseudo which has been spilt onto the stack; there
5013 are two cases here: the first where there is a simple
5014 stack-slot replacement and a second where the stack-slot is
5015 out of range, or is used as a subreg. */
5016 if (reg_equiv_mem[REGNO (ref)])
5018 ref = reg_equiv_mem[REGNO (ref)];
5019 base = find_replacement (&XEXP (ref, 0));
5021 else
5022 /* The slot is out of range, or was dressed up in a SUBREG. */
5023 base = reg_equiv_address[REGNO (ref)];
5025 else
5026 base = find_replacement (&XEXP (ref, 0));
5028 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
5030 /* Handle the case where the address is too complex to be offset by 1. */
5031 if (GET_CODE (base) == MINUS
5032 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
5034 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5036 /* Be careful not to destroy OUTVAL. */
5037 if (reg_overlap_mentioned_p (base_plus, outval))
5039 /* Updating base_plus might destroy outval, see if we can
5040 swap the scratch and base_plus. */
5041 if (!reg_overlap_mentioned_p (scratch, outval))
5043 rtx tmp = scratch;
5044 scratch = base_plus;
5045 base_plus = tmp;
5047 else
5049 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5051 /* Be conservative and copy OUTVAL into the scratch now,
5052 this should only be necessary if outval is a subreg
5053 of something larger than a word. */
5054 /* XXX Might this clobber base? I can't see how it can,
5055 since scratch is known to overlap with OUTVAL, and
5056 must be wider than a word. */
5057 emit_insn (gen_movhi (scratch_hi, outval));
5058 outval = scratch_hi;
5062 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
5063 base = base_plus;
5065 else if (GET_CODE (base) == PLUS)
5067 /* The addend must be CONST_INT, or we would have dealt with it above. */
5068 HOST_WIDE_INT hi, lo;
5070 offset += INTVAL (XEXP (base, 1));
5071 base = XEXP (base, 0);
5073 /* Rework the address into a legal sequence of insns. */
5074 /* Valid range for lo is -4095 -> 4095 */
5075 lo = (offset >= 0
5076 ? (offset & 0xfff)
5077 : -((-offset) & 0xfff));
5079 /* Corner case, if lo is the max offset then we would be out of range
5080 once we have added the additional 1 below, so bump the msb into the
5081 pre-loading insn(s). */
5082 if (lo == 4095)
5083 lo &= 0x7ff;
5085 hi = ((((offset - lo) & (HOST_WIDE_INT) 0xffffffff)
5086 ^ (HOST_WIDE_INT) 0x80000000)
5087 - (HOST_WIDE_INT) 0x80000000);
5089 if (hi + lo != offset)
5090 abort ();
5092 if (hi != 0)
5094 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
5096 /* Be careful not to destroy OUTVAL. */
5097 if (reg_overlap_mentioned_p (base_plus, outval))
5099 /* Updating base_plus might destroy outval, see if we
5100 can swap the scratch and base_plus. */
5101 if (!reg_overlap_mentioned_p (scratch, outval))
5103 rtx tmp = scratch;
5104 scratch = base_plus;
5105 base_plus = tmp;
5107 else
5109 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
5111 /* Be conservative and copy outval into scratch now,
5112 this should only be necessary if outval is a
5113 subreg of something larger than a word. */
5114 /* XXX Might this clobber base? I can't see how it
5115 can, since scratch is known to overlap with
5116 outval. */
5117 emit_insn (gen_movhi (scratch_hi, outval));
5118 outval = scratch_hi;
5122 /* Get the base address; addsi3 knows how to handle constants
5123 that require more than one insn. */
5124 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
5125 base = base_plus;
5126 offset = lo;
5130 if (BYTES_BIG_ENDIAN)
5132 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5133 plus_constant (base, offset + 1)),
5134 gen_lowpart (QImode, outval)));
5135 emit_insn (gen_lshrsi3 (scratch,
5136 gen_rtx_SUBREG (SImode, outval, 0),
5137 GEN_INT (8)));
5138 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5139 gen_lowpart (QImode, scratch)));
5141 else
5143 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
5144 gen_lowpart (QImode, outval)));
5145 emit_insn (gen_lshrsi3 (scratch,
5146 gen_rtx_SUBREG (SImode, outval, 0),
5147 GEN_INT (8)));
5148 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
5149 plus_constant (base, offset + 1)),
5150 gen_lowpart (QImode, scratch)));
5154 /* Print a symbolic form of X to the debug file, F. */
5156 static void
5157 arm_print_value (f, x)
5158 FILE * f;
5159 rtx x;
5161 switch (GET_CODE (x))
5163 case CONST_INT:
5164 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
5165 return;
5167 case CONST_DOUBLE:
5168 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
5169 return;
5171 case CONST_STRING:
5172 fprintf (f, "\"%s\"", XSTR (x, 0));
5173 return;
5175 case SYMBOL_REF:
5176 fprintf (f, "`%s'", XSTR (x, 0));
5177 return;
5179 case LABEL_REF:
5180 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
5181 return;
5183 case CONST:
5184 arm_print_value (f, XEXP (x, 0));
5185 return;
5187 case PLUS:
5188 arm_print_value (f, XEXP (x, 0));
5189 fprintf (f, "+");
5190 arm_print_value (f, XEXP (x, 1));
5191 return;
5193 case PC:
5194 fprintf (f, "pc");
5195 return;
5197 default:
5198 fprintf (f, "????");
5199 return;
5203 /* Routines for manipulation of the constant pool. */
5205 /* Arm instructions cannot load a large constant directly into a
5206 register; they have to come from a pc relative load. The constant
5207 must therefore be placed in the addressable range of the pc
5208 relative load. Depending on the precise pc relative load
5209 instruction the range is somewhere between 256 bytes and 4k. This
5210 means that we often have to dump a constant inside a function, and
5211 generate code to branch around it.
5213 It is important to minimize this, since the branches will slow
5214 things down and make the code larger.
5216 Normally we can hide the table after an existing unconditional
5217 branch so that there is no interruption of the flow, but in the
5218 worst case the code looks like this:
5220 ldr rn, L1
5222 b L2
5223 align
5224 L1: .long value
5228 ldr rn, L3
5230 b L4
5231 align
5232 L3: .long value
5236 We fix this by performing a scan after scheduling, which notices
5237 which instructions need to have their operands fetched from the
5238 constant table and builds the table.
5240 The algorithm starts by building a table of all the constants that
5241 need fixing up and all the natural barriers in the function (places
5242 where a constant table can be dropped without breaking the flow).
5243 For each fixup we note how far the pc-relative replacement will be
5244 able to reach and the offset of the instruction into the function.
5246 Having built the table we then group the fixes together to form
5247 tables that are as large as possible (subject to addressing
5248 constraints) and emit each table of constants after the last
5249 barrier that is within range of all the instructions in the group.
5250 If a group does not contain a barrier, then we forcibly create one
5251 by inserting a jump instruction into the flow. Once the table has
5252 been inserted, the insns are then modified to reference the
5253 relevant entry in the pool.
5255 Possible enhancements to the algorithm (not implemented) are:
5257 1) For some processors and object formats, there may be benefit in
5258 aligning the pools to the start of cache lines; this alignment
5259 would need to be taken into account when calculating addressability
5260 of a pool. */
5262 /* These typedefs are located at the start of this file, so that
5263 they can be used in the prototypes there. This comment is to
5264 remind readers of that fact so that the following structures
5265 can be understood more easily.
5267 typedef struct minipool_node Mnode;
5268 typedef struct minipool_fixup Mfix; */
5270 struct minipool_node
5272 /* Doubly linked chain of entries. */
5273 Mnode * next;
5274 Mnode * prev;
5275 /* The maximum offset into the code that this entry can be placed. While
5276 pushing fixes for forward references, all entries are sorted in order
5277 of increasing max_address. */
5278 HOST_WIDE_INT max_address;
5279 /* Similarly for an entry inserted for a backwards ref. */
5280 HOST_WIDE_INT min_address;
5281 /* The number of fixes referencing this entry. This can become zero
5282 if we "unpush" an entry. In this case we ignore the entry when we
5283 come to emit the code. */
5284 int refcount;
5285 /* The offset from the start of the minipool. */
5286 HOST_WIDE_INT offset;
5287 /* The value in table. */
5288 rtx value;
5289 /* The mode of value. */
5290 enum machine_mode mode;
5291 int fix_size;
5294 struct minipool_fixup
5296 Mfix * next;
5297 rtx insn;
5298 HOST_WIDE_INT address;
5299 rtx * loc;
5300 enum machine_mode mode;
5301 int fix_size;
5302 rtx value;
5303 Mnode * minipool;
5304 HOST_WIDE_INT forwards;
5305 HOST_WIDE_INT backwards;
5308 /* Fixes less than a word need padding out to a word boundary. */
5309 #define MINIPOOL_FIX_SIZE(mode) \
5310 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5312 static Mnode * minipool_vector_head;
5313 static Mnode * minipool_vector_tail;
5314 static rtx minipool_vector_label;
5316 /* The linked list of all minipool fixes required for this function. */
5317 Mfix * minipool_fix_head;
5318 Mfix * minipool_fix_tail;
5319 /* The fix entry for the current minipool, once it has been placed. */
5320 Mfix * minipool_barrier;
5322 /* Determines if INSN is the start of a jump table. Returns the end
5323 of the TABLE or NULL_RTX. */
5325 static rtx
5326 is_jump_table (insn)
5327 rtx insn;
5329 rtx table;
5331 if (GET_CODE (insn) == JUMP_INSN
5332 && JUMP_LABEL (insn) != NULL
5333 && ((table = next_real_insn (JUMP_LABEL (insn)))
5334 == next_real_insn (insn))
5335 && table != NULL
5336 && GET_CODE (table) == JUMP_INSN
5337 && (GET_CODE (PATTERN (table)) == ADDR_VEC
5338 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
5339 return table;
5341 return NULL_RTX;
5344 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5345 #define JUMP_TABLES_IN_TEXT_SECTION 0
5346 #endif
5348 static HOST_WIDE_INT
5349 get_jump_table_size (insn)
5350 rtx insn;
5352 /* ADDR_VECs only take room if read-only data does into the text
5353 section. */
5354 if (JUMP_TABLES_IN_TEXT_SECTION
5355 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5356 || 1
5357 #endif
5360 rtx body = PATTERN (insn);
5361 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
5363 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5366 return 0;
5369 /* Move a minipool fix MP from its current location to before MAX_MP.
5370 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5371 contrains may need updating. */
5373 static Mnode *
5374 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5375 Mnode * mp;
5376 Mnode * max_mp;
5377 HOST_WIDE_INT max_address;
5379 /* This should never be true and the code below assumes these are
5380 different. */
5381 if (mp == max_mp)
5382 abort ();
5384 if (max_mp == NULL)
5386 if (max_address < mp->max_address)
5387 mp->max_address = max_address;
5389 else
5391 if (max_address > max_mp->max_address - mp->fix_size)
5392 mp->max_address = max_mp->max_address - mp->fix_size;
5393 else
5394 mp->max_address = max_address;
5396 /* Unlink MP from its current position. Since max_mp is non-null,
5397 mp->prev must be non-null. */
5398 mp->prev->next = mp->next;
5399 if (mp->next != NULL)
5400 mp->next->prev = mp->prev;
5401 else
5402 minipool_vector_tail = mp->prev;
5404 /* Re-insert it before MAX_MP. */
5405 mp->next = max_mp;
5406 mp->prev = max_mp->prev;
5407 max_mp->prev = mp;
5409 if (mp->prev != NULL)
5410 mp->prev->next = mp;
5411 else
5412 minipool_vector_head = mp;
5415 /* Save the new entry. */
5416 max_mp = mp;
5418 /* Scan over the preceding entries and adjust their addresses as
5419 required. */
5420 while (mp->prev != NULL
5421 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5423 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5424 mp = mp->prev;
5427 return max_mp;
5430 /* Add a constant to the minipool for a forward reference. Returns the
5431 node added or NULL if the constant will not fit in this pool. */
5433 static Mnode *
5434 add_minipool_forward_ref (fix)
5435 Mfix * fix;
5437 /* If set, max_mp is the first pool_entry that has a lower
5438 constraint than the one we are trying to add. */
5439 Mnode * max_mp = NULL;
5440 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5441 Mnode * mp;
5443 /* If this fix's address is greater than the address of the first
5444 entry, then we can't put the fix in this pool. We subtract the
5445 size of the current fix to ensure that if the table is fully
5446 packed we still have enough room to insert this value by suffling
5447 the other fixes forwards. */
5448 if (minipool_vector_head &&
5449 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5450 return NULL;
5452 /* Scan the pool to see if a constant with the same value has
5453 already been added. While we are doing this, also note the
5454 location where we must insert the constant if it doesn't already
5455 exist. */
5456 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5458 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5459 && fix->mode == mp->mode
5460 && (GET_CODE (fix->value) != CODE_LABEL
5461 || (CODE_LABEL_NUMBER (fix->value)
5462 == CODE_LABEL_NUMBER (mp->value)))
5463 && rtx_equal_p (fix->value, mp->value))
5465 /* More than one fix references this entry. */
5466 mp->refcount++;
5467 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5470 /* Note the insertion point if necessary. */
5471 if (max_mp == NULL
5472 && mp->max_address > max_address)
5473 max_mp = mp;
5476 /* The value is not currently in the minipool, so we need to create
5477 a new entry for it. If MAX_MP is NULL, the entry will be put on
5478 the end of the list since the placement is less constrained than
5479 any existing entry. Otherwise, we insert the new fix before
5480 MAX_MP and, if neceesary, adjust the constraints on the other
5481 entries. */
5482 mp = xmalloc (sizeof (* mp));
5483 mp->fix_size = fix->fix_size;
5484 mp->mode = fix->mode;
5485 mp->value = fix->value;
5486 mp->refcount = 1;
5487 /* Not yet required for a backwards ref. */
5488 mp->min_address = -65536;
5490 if (max_mp == NULL)
5492 mp->max_address = max_address;
5493 mp->next = NULL;
5494 mp->prev = minipool_vector_tail;
5496 if (mp->prev == NULL)
5498 minipool_vector_head = mp;
5499 minipool_vector_label = gen_label_rtx ();
5501 else
5502 mp->prev->next = mp;
5504 minipool_vector_tail = mp;
5506 else
5508 if (max_address > max_mp->max_address - mp->fix_size)
5509 mp->max_address = max_mp->max_address - mp->fix_size;
5510 else
5511 mp->max_address = max_address;
5513 mp->next = max_mp;
5514 mp->prev = max_mp->prev;
5515 max_mp->prev = mp;
5516 if (mp->prev != NULL)
5517 mp->prev->next = mp;
5518 else
5519 minipool_vector_head = mp;
5522 /* Save the new entry. */
5523 max_mp = mp;
5525 /* Scan over the preceding entries and adjust their addresses as
5526 required. */
5527 while (mp->prev != NULL
5528 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5530 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5531 mp = mp->prev;
5534 return max_mp;
5537 static Mnode *
5538 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5539 Mnode * mp;
5540 Mnode * min_mp;
5541 HOST_WIDE_INT min_address;
5543 HOST_WIDE_INT offset;
5545 /* This should never be true, and the code below assumes these are
5546 different. */
5547 if (mp == min_mp)
5548 abort ();
5550 if (min_mp == NULL)
5552 if (min_address > mp->min_address)
5553 mp->min_address = min_address;
5555 else
5557 /* We will adjust this below if it is too loose. */
5558 mp->min_address = min_address;
5560 /* Unlink MP from its current position. Since min_mp is non-null,
5561 mp->next must be non-null. */
5562 mp->next->prev = mp->prev;
5563 if (mp->prev != NULL)
5564 mp->prev->next = mp->next;
5565 else
5566 minipool_vector_head = mp->next;
5568 /* Reinsert it after MIN_MP. */
5569 mp->prev = min_mp;
5570 mp->next = min_mp->next;
5571 min_mp->next = mp;
5572 if (mp->next != NULL)
5573 mp->next->prev = mp;
5574 else
5575 minipool_vector_tail = mp;
5578 min_mp = mp;
5580 offset = 0;
5581 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5583 mp->offset = offset;
5584 if (mp->refcount > 0)
5585 offset += mp->fix_size;
5587 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5588 mp->next->min_address = mp->min_address + mp->fix_size;
5591 return min_mp;
5594 /* Add a constant to the minipool for a backward reference. Returns the
5595 node added or NULL if the constant will not fit in this pool.
5597 Note that the code for insertion for a backwards reference can be
5598 somewhat confusing because the calculated offsets for each fix do
5599 not take into account the size of the pool (which is still under
5600 construction. */
5602 static Mnode *
5603 add_minipool_backward_ref (fix)
5604 Mfix * fix;
5606 /* If set, min_mp is the last pool_entry that has a lower constraint
5607 than the one we are trying to add. */
5608 Mnode * min_mp = NULL;
5609 /* This can be negative, since it is only a constraint. */
5610 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5611 Mnode * mp;
5613 /* If we can't reach the current pool from this insn, or if we can't
5614 insert this entry at the end of the pool without pushing other
5615 fixes out of range, then we don't try. This ensures that we
5616 can't fail later on. */
5617 if (min_address >= minipool_barrier->address
5618 || (minipool_vector_tail->min_address + fix->fix_size
5619 >= minipool_barrier->address))
5620 return NULL;
5622 /* Scan the pool to see if a constant with the same value has
5623 already been added. While we are doing this, also note the
5624 location where we must insert the constant if it doesn't already
5625 exist. */
5626 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5628 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5629 && fix->mode == mp->mode
5630 && (GET_CODE (fix->value) != CODE_LABEL
5631 || (CODE_LABEL_NUMBER (fix->value)
5632 == CODE_LABEL_NUMBER (mp->value)))
5633 && rtx_equal_p (fix->value, mp->value)
5634 /* Check that there is enough slack to move this entry to the
5635 end of the table (this is conservative). */
5636 && (mp->max_address
5637 > (minipool_barrier->address
5638 + minipool_vector_tail->offset
5639 + minipool_vector_tail->fix_size)))
5641 mp->refcount++;
5642 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5645 if (min_mp != NULL)
5646 mp->min_address += fix->fix_size;
5647 else
5649 /* Note the insertion point if necessary. */
5650 if (mp->min_address < min_address)
5651 min_mp = mp;
5652 else if (mp->max_address
5653 < minipool_barrier->address + mp->offset + fix->fix_size)
5655 /* Inserting before this entry would push the fix beyond
5656 its maximum address (which can happen if we have
5657 re-located a forwards fix); force the new fix to come
5658 after it. */
5659 min_mp = mp;
5660 min_address = mp->min_address + fix->fix_size;
5665 /* We need to create a new entry. */
5666 mp = xmalloc (sizeof (* mp));
5667 mp->fix_size = fix->fix_size;
5668 mp->mode = fix->mode;
5669 mp->value = fix->value;
5670 mp->refcount = 1;
5671 mp->max_address = minipool_barrier->address + 65536;
5673 mp->min_address = min_address;
5675 if (min_mp == NULL)
5677 mp->prev = NULL;
5678 mp->next = minipool_vector_head;
5680 if (mp->next == NULL)
5682 minipool_vector_tail = mp;
5683 minipool_vector_label = gen_label_rtx ();
5685 else
5686 mp->next->prev = mp;
5688 minipool_vector_head = mp;
5690 else
5692 mp->next = min_mp->next;
5693 mp->prev = min_mp;
5694 min_mp->next = mp;
5696 if (mp->next != NULL)
5697 mp->next->prev = mp;
5698 else
5699 minipool_vector_tail = mp;
5702 /* Save the new entry. */
5703 min_mp = mp;
5705 if (mp->prev)
5706 mp = mp->prev;
5707 else
5708 mp->offset = 0;
5710 /* Scan over the following entries and adjust their offsets. */
5711 while (mp->next != NULL)
5713 if (mp->next->min_address < mp->min_address + mp->fix_size)
5714 mp->next->min_address = mp->min_address + mp->fix_size;
5716 if (mp->refcount)
5717 mp->next->offset = mp->offset + mp->fix_size;
5718 else
5719 mp->next->offset = mp->offset;
5721 mp = mp->next;
5724 return min_mp;
5727 static void
5728 assign_minipool_offsets (barrier)
5729 Mfix * barrier;
5731 HOST_WIDE_INT offset = 0;
5732 Mnode * mp;
5734 minipool_barrier = barrier;
5736 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5738 mp->offset = offset;
5740 if (mp->refcount > 0)
5741 offset += mp->fix_size;
5745 /* Output the literal table */
5746 static void
5747 dump_minipool (scan)
5748 rtx scan;
5750 Mnode * mp;
5751 Mnode * nmp;
5753 if (rtl_dump_file)
5754 fprintf (rtl_dump_file,
5755 ";; Emitting minipool after insn %u; address %ld\n",
5756 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5758 scan = emit_label_after (gen_label_rtx (), scan);
5759 scan = emit_insn_after (gen_align_4 (), scan);
5760 scan = emit_label_after (minipool_vector_label, scan);
5762 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5764 if (mp->refcount > 0)
5766 if (rtl_dump_file)
5768 fprintf (rtl_dump_file,
5769 ";; Offset %u, min %ld, max %ld ",
5770 (unsigned) mp->offset, (unsigned long) mp->min_address,
5771 (unsigned long) mp->max_address);
5772 arm_print_value (rtl_dump_file, mp->value);
5773 fputc ('\n', rtl_dump_file);
5776 switch (mp->fix_size)
5778 #ifdef HAVE_consttable_1
5779 case 1:
5780 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5781 break;
5783 #endif
5784 #ifdef HAVE_consttable_2
5785 case 2:
5786 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5787 break;
5789 #endif
5790 #ifdef HAVE_consttable_4
5791 case 4:
5792 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5793 break;
5795 #endif
5796 #ifdef HAVE_consttable_8
5797 case 8:
5798 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5799 break;
5801 #endif
5802 default:
5803 abort ();
5804 break;
5808 nmp = mp->next;
5809 free (mp);
5812 minipool_vector_head = minipool_vector_tail = NULL;
5813 scan = emit_insn_after (gen_consttable_end (), scan);
5814 scan = emit_barrier_after (scan);
5817 /* Return the cost of forcibly inserting a barrier after INSN. */
5819 static int
5820 arm_barrier_cost (insn)
5821 rtx insn;
5823 /* Basing the location of the pool on the loop depth is preferable,
5824 but at the moment, the basic block information seems to be
5825 corrupt by this stage of the compilation. */
5826 int base_cost = 50;
5827 rtx next = next_nonnote_insn (insn);
5829 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5830 base_cost -= 20;
5832 switch (GET_CODE (insn))
5834 case CODE_LABEL:
5835 /* It will always be better to place the table before the label, rather
5836 than after it. */
5837 return 50;
5839 case INSN:
5840 case CALL_INSN:
5841 return base_cost;
5843 case JUMP_INSN:
5844 return base_cost - 10;
5846 default:
5847 return base_cost + 10;
5851 /* Find the best place in the insn stream in the range
5852 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5853 Create the barrier by inserting a jump and add a new fix entry for
5854 it. */
5856 static Mfix *
5857 create_fix_barrier (fix, max_address)
5858 Mfix * fix;
5859 HOST_WIDE_INT max_address;
5861 HOST_WIDE_INT count = 0;
5862 rtx barrier;
5863 rtx from = fix->insn;
5864 rtx selected = from;
5865 int selected_cost;
5866 HOST_WIDE_INT selected_address;
5867 Mfix * new_fix;
5868 HOST_WIDE_INT max_count = max_address - fix->address;
5869 rtx label = gen_label_rtx ();
5871 selected_cost = arm_barrier_cost (from);
5872 selected_address = fix->address;
5874 while (from && count < max_count)
5876 rtx tmp;
5877 int new_cost;
5879 /* This code shouldn't have been called if there was a natural barrier
5880 within range. */
5881 if (GET_CODE (from) == BARRIER)
5882 abort ();
5884 /* Count the length of this insn. */
5885 count += get_attr_length (from);
5887 /* If there is a jump table, add its length. */
5888 tmp = is_jump_table (from);
5889 if (tmp != NULL)
5891 count += get_jump_table_size (tmp);
5893 /* Jump tables aren't in a basic block, so base the cost on
5894 the dispatch insn. If we select this location, we will
5895 still put the pool after the table. */
5896 new_cost = arm_barrier_cost (from);
5898 if (count < max_count && new_cost <= selected_cost)
5900 selected = tmp;
5901 selected_cost = new_cost;
5902 selected_address = fix->address + count;
5905 /* Continue after the dispatch table. */
5906 from = NEXT_INSN (tmp);
5907 continue;
5910 new_cost = arm_barrier_cost (from);
5912 if (count < max_count && new_cost <= selected_cost)
5914 selected = from;
5915 selected_cost = new_cost;
5916 selected_address = fix->address + count;
5919 from = NEXT_INSN (from);
5922 /* Create a new JUMP_INSN that branches around a barrier. */
5923 from = emit_jump_insn_after (gen_jump (label), selected);
5924 JUMP_LABEL (from) = label;
5925 barrier = emit_barrier_after (from);
5926 emit_label_after (label, barrier);
5928 /* Create a minipool barrier entry for the new barrier. */
5929 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5930 new_fix->insn = barrier;
5931 new_fix->address = selected_address;
5932 new_fix->next = fix->next;
5933 fix->next = new_fix;
5935 return new_fix;
5938 /* Record that there is a natural barrier in the insn stream at
5939 ADDRESS. */
5940 static void
5941 push_minipool_barrier (insn, address)
5942 rtx insn;
5943 HOST_WIDE_INT address;
5945 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5947 fix->insn = insn;
5948 fix->address = address;
5950 fix->next = NULL;
5951 if (minipool_fix_head != NULL)
5952 minipool_fix_tail->next = fix;
5953 else
5954 minipool_fix_head = fix;
5956 minipool_fix_tail = fix;
5959 /* Record INSN, which will need fixing up to load a value from the
5960 minipool. ADDRESS is the offset of the insn since the start of the
5961 function; LOC is a pointer to the part of the insn which requires
5962 fixing; VALUE is the constant that must be loaded, which is of type
5963 MODE. */
5964 static void
5965 push_minipool_fix (insn, address, loc, mode, value)
5966 rtx insn;
5967 HOST_WIDE_INT address;
5968 rtx * loc;
5969 enum machine_mode mode;
5970 rtx value;
5972 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5974 #ifdef AOF_ASSEMBLER
5975 /* PIC symbol refereneces need to be converted into offsets into the
5976 based area. */
5977 /* XXX This shouldn't be done here. */
5978 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5979 value = aof_pic_entry (value);
5980 #endif /* AOF_ASSEMBLER */
5982 fix->insn = insn;
5983 fix->address = address;
5984 fix->loc = loc;
5985 fix->mode = mode;
5986 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5987 fix->value = value;
5988 fix->forwards = get_attr_pool_range (insn);
5989 fix->backwards = get_attr_neg_pool_range (insn);
5990 fix->minipool = NULL;
5992 /* If an insn doesn't have a range defined for it, then it isn't
5993 expecting to be reworked by this code. Better to abort now than
5994 to generate duff assembly code. */
5995 if (fix->forwards == 0 && fix->backwards == 0)
5996 abort ();
5998 if (rtl_dump_file)
6000 fprintf (rtl_dump_file,
6001 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
6002 GET_MODE_NAME (mode),
6003 INSN_UID (insn), (unsigned long) address,
6004 -1 * (long)fix->backwards, (long)fix->forwards);
6005 arm_print_value (rtl_dump_file, fix->value);
6006 fprintf (rtl_dump_file, "\n");
6009 /* Add it to the chain of fixes. */
6010 fix->next = NULL;
6012 if (minipool_fix_head != NULL)
6013 minipool_fix_tail->next = fix;
6014 else
6015 minipool_fix_head = fix;
6017 minipool_fix_tail = fix;
6020 /* Scan INSN and note any of its operands that need fixing. */
6022 static void
6023 note_invalid_constants (insn, address)
6024 rtx insn;
6025 HOST_WIDE_INT address;
6027 int opno;
6029 extract_insn (insn);
6031 if (!constrain_operands (1))
6032 fatal_insn_not_found (insn);
6034 /* Fill in recog_op_alt with information about the constraints of this
6035 insn. */
6036 preprocess_constraints ();
6038 for (opno = 0; opno < recog_data.n_operands; opno++)
6040 /* Things we need to fix can only occur in inputs. */
6041 if (recog_data.operand_type[opno] != OP_IN)
6042 continue;
6044 /* If this alternative is a memory reference, then any mention
6045 of constants in this alternative is really to fool reload
6046 into allowing us to accept one there. We need to fix them up
6047 now so that we output the right code. */
6048 if (recog_op_alt[opno][which_alternative].memory_ok)
6050 rtx op = recog_data.operand[opno];
6052 if (CONSTANT_P (op))
6053 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6054 recog_data.operand_mode[opno], op);
6055 #if 0
6056 /* RWE: Now we look correctly at the operands for the insn,
6057 this shouldn't be needed any more. */
6058 #ifndef AOF_ASSEMBLER
6059 /* XXX Is this still needed? */
6060 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == UNSPEC_PIC_SYM)
6061 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6062 recog_data.operand_mode[opno],
6063 XVECEXP (op, 0, 0));
6064 #endif
6065 #endif
6066 else if (GET_CODE (op) == MEM
6067 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
6068 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
6069 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
6070 recog_data.operand_mode[opno],
6071 get_pool_constant (XEXP (op, 0)));
6076 void
6077 arm_reorg (first)
6078 rtx first;
6080 rtx insn;
6081 HOST_WIDE_INT address = 0;
6082 Mfix * fix;
6084 minipool_fix_head = minipool_fix_tail = NULL;
6086 /* The first insn must always be a note, or the code below won't
6087 scan it properly. */
6088 if (GET_CODE (first) != NOTE)
6089 abort ();
6091 /* Scan all the insns and record the operands that will need fixing. */
6092 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
6094 if (GET_CODE (insn) == BARRIER)
6095 push_minipool_barrier (insn, address);
6096 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
6097 || GET_CODE (insn) == JUMP_INSN)
6099 rtx table;
6101 note_invalid_constants (insn, address);
6102 address += get_attr_length (insn);
6104 /* If the insn is a vector jump, add the size of the table
6105 and skip the table. */
6106 if ((table = is_jump_table (insn)) != NULL)
6108 address += get_jump_table_size (table);
6109 insn = table;
6114 fix = minipool_fix_head;
6116 /* Now scan the fixups and perform the required changes. */
6117 while (fix)
6119 Mfix * ftmp;
6120 Mfix * fdel;
6121 Mfix * last_added_fix;
6122 Mfix * last_barrier = NULL;
6123 Mfix * this_fix;
6125 /* Skip any further barriers before the next fix. */
6126 while (fix && GET_CODE (fix->insn) == BARRIER)
6127 fix = fix->next;
6129 /* No more fixes. */
6130 if (fix == NULL)
6131 break;
6133 last_added_fix = NULL;
6135 for (ftmp = fix; ftmp; ftmp = ftmp->next)
6137 if (GET_CODE (ftmp->insn) == BARRIER)
6139 if (ftmp->address >= minipool_vector_head->max_address)
6140 break;
6142 last_barrier = ftmp;
6144 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
6145 break;
6147 last_added_fix = ftmp; /* Keep track of the last fix added. */
6150 /* If we found a barrier, drop back to that; any fixes that we
6151 could have reached but come after the barrier will now go in
6152 the next mini-pool. */
6153 if (last_barrier != NULL)
6155 /* Reduce the refcount for those fixes that won't go into this
6156 pool after all. */
6157 for (fdel = last_barrier->next;
6158 fdel && fdel != ftmp;
6159 fdel = fdel->next)
6161 fdel->minipool->refcount--;
6162 fdel->minipool = NULL;
6165 ftmp = last_barrier;
6167 else
6169 /* ftmp is first fix that we can't fit into this pool and
6170 there no natural barriers that we could use. Insert a
6171 new barrier in the code somewhere between the previous
6172 fix and this one, and arrange to jump around it. */
6173 HOST_WIDE_INT max_address;
6175 /* The last item on the list of fixes must be a barrier, so
6176 we can never run off the end of the list of fixes without
6177 last_barrier being set. */
6178 if (ftmp == NULL)
6179 abort ();
6181 max_address = minipool_vector_head->max_address;
6182 /* Check that there isn't another fix that is in range that
6183 we couldn't fit into this pool because the pool was
6184 already too large: we need to put the pool before such an
6185 instruction. */
6186 if (ftmp->address < max_address)
6187 max_address = ftmp->address;
6189 last_barrier = create_fix_barrier (last_added_fix, max_address);
6192 assign_minipool_offsets (last_barrier);
6194 while (ftmp)
6196 if (GET_CODE (ftmp->insn) != BARRIER
6197 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
6198 == NULL))
6199 break;
6201 ftmp = ftmp->next;
6204 /* Scan over the fixes we have identified for this pool, fixing them
6205 up and adding the constants to the pool itself. */
6206 for (this_fix = fix; this_fix && ftmp != this_fix;
6207 this_fix = this_fix->next)
6208 if (GET_CODE (this_fix->insn) != BARRIER)
6210 rtx addr
6211 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
6212 minipool_vector_label),
6213 this_fix->minipool->offset);
6214 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
6217 dump_minipool (last_barrier->insn);
6218 fix = ftmp;
6221 /* From now on we must synthesize any constants that we can't handle
6222 directly. This can happen if the RTL gets split during final
6223 instruction generation. */
6224 after_arm_reorg = 1;
6226 /* Free the minipool memory. */
6227 obstack_free (&minipool_obstack, minipool_startobj);
6230 /* Routines to output assembly language. */
6232 /* If the rtx is the correct value then return the string of the number.
6233 In this way we can ensure that valid double constants are generated even
6234 when cross compiling. */
6236 const char *
6237 fp_immediate_constant (x)
6238 rtx x;
6240 REAL_VALUE_TYPE r;
6241 int i;
6243 if (!fpa_consts_inited)
6244 init_fpa_table ();
6246 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
6247 for (i = 0; i < 8; i++)
6248 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
6249 return strings_fpa[i];
6251 abort ();
6254 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6256 static const char *
6257 fp_const_from_val (r)
6258 REAL_VALUE_TYPE * r;
6260 int i;
6262 if (!fpa_consts_inited)
6263 init_fpa_table ();
6265 for (i = 0; i < 8; i++)
6266 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
6267 return strings_fpa[i];
6269 abort ();
6272 /* Output the operands of a LDM/STM instruction to STREAM.
6273 MASK is the ARM register set mask of which only bits 0-15 are important.
6274 REG is the base register, either the frame pointer or the stack pointer,
6275 INSTR is the possibly suffixed load or store instruction. */
6277 static void
6278 print_multi_reg (stream, instr, reg, mask)
6279 FILE * stream;
6280 const char * instr;
6281 int reg;
6282 int mask;
6284 int i;
6285 int not_first = FALSE;
6287 fputc ('\t', stream);
6288 asm_fprintf (stream, instr, reg);
6289 fputs (", {", stream);
6291 for (i = 0; i <= LAST_ARM_REGNUM; i++)
6292 if (mask & (1 << i))
6294 if (not_first)
6295 fprintf (stream, ", ");
6297 asm_fprintf (stream, "%r", i);
6298 not_first = TRUE;
6301 fprintf (stream, "}%s\n", TARGET_APCS_32 ? "" : "^");
6304 /* Output a 'call' insn. */
6306 const char *
6307 output_call (operands)
6308 rtx * operands;
6310 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6312 if (REGNO (operands[0]) == LR_REGNUM)
6314 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
6315 output_asm_insn ("mov%?\t%0, %|lr", operands);
6318 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6320 if (TARGET_INTERWORK)
6321 output_asm_insn ("bx%?\t%0", operands);
6322 else
6323 output_asm_insn ("mov%?\t%|pc, %0", operands);
6325 return "";
6328 static int
6329 eliminate_lr2ip (x)
6330 rtx * x;
6332 int something_changed = 0;
6333 rtx x0 = * x;
6334 int code = GET_CODE (x0);
6335 int i, j;
6336 const char * fmt;
6338 switch (code)
6340 case REG:
6341 if (REGNO (x0) == LR_REGNUM)
6343 *x = gen_rtx_REG (SImode, IP_REGNUM);
6344 return 1;
6346 return 0;
6347 default:
6348 /* Scan through the sub-elements and change any references there. */
6349 fmt = GET_RTX_FORMAT (code);
6351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6352 if (fmt[i] == 'e')
6353 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
6354 else if (fmt[i] == 'E')
6355 for (j = 0; j < XVECLEN (x0, i); j++)
6356 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
6358 return something_changed;
6362 /* Output a 'call' insn that is a reference in memory. */
6364 const char *
6365 output_call_mem (operands)
6366 rtx * operands;
6368 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
6369 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6370 if (eliminate_lr2ip (&operands[0]))
6371 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
6373 if (TARGET_INTERWORK)
6375 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6376 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6377 output_asm_insn ("bx%?\t%|ip", operands);
6379 else
6381 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6382 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6385 return "";
6389 /* Output a move from arm registers to an fpu registers.
6390 OPERANDS[0] is an fpu register.
6391 OPERANDS[1] is the first registers of an arm register pair. */
6393 const char *
6394 output_mov_long_double_fpu_from_arm (operands)
6395 rtx * operands;
6397 int arm_reg0 = REGNO (operands[1]);
6398 rtx ops[3];
6400 if (arm_reg0 == IP_REGNUM)
6401 abort ();
6403 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6404 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6405 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6407 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6408 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6410 return "";
6413 /* Output a move from an fpu register to arm registers.
6414 OPERANDS[0] is the first registers of an arm register pair.
6415 OPERANDS[1] is an fpu register. */
6417 const char *
6418 output_mov_long_double_arm_from_fpu (operands)
6419 rtx * operands;
6421 int arm_reg0 = REGNO (operands[0]);
6422 rtx ops[3];
6424 if (arm_reg0 == IP_REGNUM)
6425 abort ();
6427 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6428 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6429 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6431 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6432 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6433 return "";
6436 /* Output a move from arm registers to arm registers of a long double
6437 OPERANDS[0] is the destination.
6438 OPERANDS[1] is the source. */
6440 const char *
6441 output_mov_long_double_arm_from_arm (operands)
6442 rtx * operands;
6444 /* We have to be careful here because the two might overlap. */
6445 int dest_start = REGNO (operands[0]);
6446 int src_start = REGNO (operands[1]);
6447 rtx ops[2];
6448 int i;
6450 if (dest_start < src_start)
6452 for (i = 0; i < 3; i++)
6454 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6455 ops[1] = gen_rtx_REG (SImode, src_start + i);
6456 output_asm_insn ("mov%?\t%0, %1", ops);
6459 else
6461 for (i = 2; i >= 0; i--)
6463 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6464 ops[1] = gen_rtx_REG (SImode, src_start + i);
6465 output_asm_insn ("mov%?\t%0, %1", ops);
6469 return "";
6473 /* Output a move from arm registers to an fpu registers.
6474 OPERANDS[0] is an fpu register.
6475 OPERANDS[1] is the first registers of an arm register pair. */
6477 const char *
6478 output_mov_double_fpu_from_arm (operands)
6479 rtx * operands;
6481 int arm_reg0 = REGNO (operands[1]);
6482 rtx ops[2];
6484 if (arm_reg0 == IP_REGNUM)
6485 abort ();
6487 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6488 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6489 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6490 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6491 return "";
6494 /* Output a move from an fpu register to arm registers.
6495 OPERANDS[0] is the first registers of an arm register pair.
6496 OPERANDS[1] is an fpu register. */
6498 const char *
6499 output_mov_double_arm_from_fpu (operands)
6500 rtx * operands;
6502 int arm_reg0 = REGNO (operands[0]);
6503 rtx ops[2];
6505 if (arm_reg0 == IP_REGNUM)
6506 abort ();
6508 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6509 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6510 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6511 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6512 return "";
6515 /* Output a move between double words.
6516 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6517 or MEM<-REG and all MEMs must be offsettable addresses. */
6519 const char *
6520 output_move_double (operands)
6521 rtx * operands;
6523 enum rtx_code code0 = GET_CODE (operands[0]);
6524 enum rtx_code code1 = GET_CODE (operands[1]);
6525 rtx otherops[3];
6527 if (code0 == REG)
6529 int reg0 = REGNO (operands[0]);
6531 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6533 if (code1 == REG)
6535 int reg1 = REGNO (operands[1]);
6536 if (reg1 == IP_REGNUM)
6537 abort ();
6539 /* Ensure the second source is not overwritten. */
6540 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6541 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6542 else
6543 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6545 else if (code1 == CONST_DOUBLE)
6547 if (GET_MODE (operands[1]) == DFmode)
6549 REAL_VALUE_TYPE r;
6550 long l[2];
6552 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
6553 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
6554 otherops[1] = GEN_INT (l[1]);
6555 operands[1] = GEN_INT (l[0]);
6557 else if (GET_MODE (operands[1]) != VOIDmode)
6558 abort ();
6559 else if (WORDS_BIG_ENDIAN)
6561 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6562 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6564 else
6566 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6567 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6570 output_mov_immediate (operands);
6571 output_mov_immediate (otherops);
6573 else if (code1 == CONST_INT)
6575 #if HOST_BITS_PER_WIDE_INT > 32
6576 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6577 what the upper word is. */
6578 if (WORDS_BIG_ENDIAN)
6580 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6581 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6583 else
6585 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6586 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6588 #else
6589 /* Sign extend the intval into the high-order word. */
6590 if (WORDS_BIG_ENDIAN)
6592 otherops[1] = operands[1];
6593 operands[1] = (INTVAL (operands[1]) < 0
6594 ? constm1_rtx : const0_rtx);
6596 else
6597 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6598 #endif
6599 output_mov_immediate (otherops);
6600 output_mov_immediate (operands);
6602 else if (code1 == MEM)
6604 switch (GET_CODE (XEXP (operands[1], 0)))
6606 case REG:
6607 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6608 break;
6610 case PRE_INC:
6611 abort (); /* Should never happen now. */
6612 break;
6614 case PRE_DEC:
6615 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6616 break;
6618 case POST_INC:
6619 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6620 break;
6622 case POST_DEC:
6623 abort (); /* Should never happen now. */
6624 break;
6626 case LABEL_REF:
6627 case CONST:
6628 output_asm_insn ("adr%?\t%0, %1", operands);
6629 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6630 break;
6632 default:
6633 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6634 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6636 otherops[0] = operands[0];
6637 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6638 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6640 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6642 if (GET_CODE (otherops[2]) == CONST_INT)
6644 switch (INTVAL (otherops[2]))
6646 case -8:
6647 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6648 return "";
6649 case -4:
6650 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6651 return "";
6652 case 4:
6653 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6654 return "";
6657 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6658 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6659 else
6660 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6662 else
6663 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6665 else
6666 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6668 return "ldm%?ia\t%0, %M0";
6670 else
6672 otherops[1] = adjust_address (operands[1], VOIDmode, 4);
6673 /* Take care of overlapping base/data reg. */
6674 if (reg_mentioned_p (operands[0], operands[1]))
6676 output_asm_insn ("ldr%?\t%0, %1", otherops);
6677 output_asm_insn ("ldr%?\t%0, %1", operands);
6679 else
6681 output_asm_insn ("ldr%?\t%0, %1", operands);
6682 output_asm_insn ("ldr%?\t%0, %1", otherops);
6687 else
6688 abort (); /* Constraints should prevent this. */
6690 else if (code0 == MEM && code1 == REG)
6692 if (REGNO (operands[1]) == IP_REGNUM)
6693 abort ();
6695 switch (GET_CODE (XEXP (operands[0], 0)))
6697 case REG:
6698 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6699 break;
6701 case PRE_INC:
6702 abort (); /* Should never happen now. */
6703 break;
6705 case PRE_DEC:
6706 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6707 break;
6709 case POST_INC:
6710 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6711 break;
6713 case POST_DEC:
6714 abort (); /* Should never happen now. */
6715 break;
6717 case PLUS:
6718 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6720 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6722 case -8:
6723 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6724 return "";
6726 case -4:
6727 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6728 return "";
6730 case 4:
6731 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6732 return "";
6735 /* Fall through */
6737 default:
6738 otherops[0] = adjust_address (operands[0], VOIDmode, 4);
6739 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6740 output_asm_insn ("str%?\t%1, %0", operands);
6741 output_asm_insn ("str%?\t%1, %0", otherops);
6744 else
6745 /* Constraints should prevent this. */
6746 abort ();
6748 return "";
6752 /* Output an arbitrary MOV reg, #n.
6753 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6755 const char *
6756 output_mov_immediate (operands)
6757 rtx * operands;
6759 HOST_WIDE_INT n = INTVAL (operands[1]);
6761 /* Try to use one MOV. */
6762 if (const_ok_for_arm (n))
6763 output_asm_insn ("mov%?\t%0, %1", operands);
6765 /* Try to use one MVN. */
6766 else if (const_ok_for_arm (~n))
6768 operands[1] = GEN_INT (~n);
6769 output_asm_insn ("mvn%?\t%0, %1", operands);
6771 else
6773 int n_ones = 0;
6774 int i;
6776 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6777 for (i = 0; i < 32; i ++)
6778 if (n & 1 << i)
6779 n_ones ++;
6781 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6782 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n);
6783 else
6784 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6787 return "";
6790 /* Output an ADD r, s, #n where n may be too big for one instruction.
6791 If adding zero to one register, output nothing. */
6793 const char *
6794 output_add_immediate (operands)
6795 rtx * operands;
6797 HOST_WIDE_INT n = INTVAL (operands[2]);
6799 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6801 if (n < 0)
6802 output_multi_immediate (operands,
6803 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6804 -n);
6805 else
6806 output_multi_immediate (operands,
6807 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6811 return "";
6814 /* Output a multiple immediate operation.
6815 OPERANDS is the vector of operands referred to in the output patterns.
6816 INSTR1 is the output pattern to use for the first constant.
6817 INSTR2 is the output pattern to use for subsequent constants.
6818 IMMED_OP is the index of the constant slot in OPERANDS.
6819 N is the constant value. */
6821 static const char *
6822 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6823 rtx * operands;
6824 const char * instr1;
6825 const char * instr2;
6826 int immed_op;
6827 HOST_WIDE_INT n;
6829 #if HOST_BITS_PER_WIDE_INT > 32
6830 n &= 0xffffffff;
6831 #endif
6833 if (n == 0)
6835 /* Quick and easy output. */
6836 operands[immed_op] = const0_rtx;
6837 output_asm_insn (instr1, operands);
6839 else
6841 int i;
6842 const char * instr = instr1;
6844 /* Note that n is never zero here (which would give no output). */
6845 for (i = 0; i < 32; i += 2)
6847 if (n & (3 << i))
6849 operands[immed_op] = GEN_INT (n & (255 << i));
6850 output_asm_insn (instr, operands);
6851 instr = instr2;
6852 i += 6;
6857 return "";
6860 /* Return the appropriate ARM instruction for the operation code.
6861 The returned result should not be overwritten. OP is the rtx of the
6862 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6863 was shifted. */
6865 const char *
6866 arithmetic_instr (op, shift_first_arg)
6867 rtx op;
6868 int shift_first_arg;
6870 switch (GET_CODE (op))
6872 case PLUS:
6873 return "add";
6875 case MINUS:
6876 return shift_first_arg ? "rsb" : "sub";
6878 case IOR:
6879 return "orr";
6881 case XOR:
6882 return "eor";
6884 case AND:
6885 return "and";
6887 default:
6888 abort ();
6892 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6893 for the operation code. The returned result should not be overwritten.
6894 OP is the rtx code of the shift.
6895 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6896 shift. */
6898 static const char *
6899 shift_op (op, amountp)
6900 rtx op;
6901 HOST_WIDE_INT *amountp;
6903 const char * mnem;
6904 enum rtx_code code = GET_CODE (op);
6906 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6907 *amountp = -1;
6908 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6909 *amountp = INTVAL (XEXP (op, 1));
6910 else
6911 abort ();
6913 switch (code)
6915 case ASHIFT:
6916 mnem = "asl";
6917 break;
6919 case ASHIFTRT:
6920 mnem = "asr";
6921 break;
6923 case LSHIFTRT:
6924 mnem = "lsr";
6925 break;
6927 case ROTATERT:
6928 mnem = "ror";
6929 break;
6931 case MULT:
6932 /* We never have to worry about the amount being other than a
6933 power of 2, since this case can never be reloaded from a reg. */
6934 if (*amountp != -1)
6935 *amountp = int_log2 (*amountp);
6936 else
6937 abort ();
6938 return "asl";
6940 default:
6941 abort ();
6944 if (*amountp != -1)
6946 /* This is not 100% correct, but follows from the desire to merge
6947 multiplication by a power of 2 with the recognizer for a
6948 shift. >=32 is not a valid shift for "asl", so we must try and
6949 output a shift that produces the correct arithmetical result.
6950 Using lsr #32 is identical except for the fact that the carry bit
6951 is not set correctly if we set the flags; but we never use the
6952 carry bit from such an operation, so we can ignore that. */
6953 if (code == ROTATERT)
6954 /* Rotate is just modulo 32. */
6955 *amountp &= 31;
6956 else if (*amountp != (*amountp & 31))
6958 if (code == ASHIFT)
6959 mnem = "lsr";
6960 *amountp = 32;
6963 /* Shifts of 0 are no-ops. */
6964 if (*amountp == 0)
6965 return NULL;
6968 return mnem;
6971 /* Obtain the shift from the POWER of two. */
6973 static HOST_WIDE_INT
6974 int_log2 (power)
6975 HOST_WIDE_INT power;
6977 HOST_WIDE_INT shift = 0;
6979 while ((((HOST_WIDE_INT) 1 << shift) & power) == 0)
6981 if (shift > 31)
6982 abort ();
6983 shift ++;
6986 return shift;
6989 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6990 /bin/as is horribly restrictive. */
6991 #define MAX_ASCII_LEN 51
6993 void
6994 output_ascii_pseudo_op (stream, p, len)
6995 FILE * stream;
6996 const unsigned char * p;
6997 int len;
6999 int i;
7000 int len_so_far = 0;
7002 fputs ("\t.ascii\t\"", stream);
7004 for (i = 0; i < len; i++)
7006 int c = p[i];
7008 if (len_so_far >= MAX_ASCII_LEN)
7010 fputs ("\"\n\t.ascii\t\"", stream);
7011 len_so_far = 0;
7014 switch (c)
7016 case TARGET_TAB:
7017 fputs ("\\t", stream);
7018 len_so_far += 2;
7019 break;
7021 case TARGET_FF:
7022 fputs ("\\f", stream);
7023 len_so_far += 2;
7024 break;
7026 case TARGET_BS:
7027 fputs ("\\b", stream);
7028 len_so_far += 2;
7029 break;
7031 case TARGET_CR:
7032 fputs ("\\r", stream);
7033 len_so_far += 2;
7034 break;
7036 case TARGET_NEWLINE:
7037 fputs ("\\n", stream);
7038 c = p [i + 1];
7039 if ((c >= ' ' && c <= '~')
7040 || c == TARGET_TAB)
7041 /* This is a good place for a line break. */
7042 len_so_far = MAX_ASCII_LEN;
7043 else
7044 len_so_far += 2;
7045 break;
7047 case '\"':
7048 case '\\':
7049 putc ('\\', stream);
7050 len_so_far++;
7051 /* drop through. */
7053 default:
7054 if (c >= ' ' && c <= '~')
7056 putc (c, stream);
7057 len_so_far++;
7059 else
7061 fprintf (stream, "\\%03o", c);
7062 len_so_far += 4;
7064 break;
7068 fputs ("\"\n", stream);
7071 /* Compute the register sabe mask for registers 0 through 12
7072 inclusive. This code is used by both arm_compute_save_reg_mask
7073 and arm_compute_initial_elimination_offset. */
7075 static unsigned long
7076 arm_compute_save_reg0_reg12_mask ()
7078 unsigned long func_type = arm_current_func_type ();
7079 unsigned int save_reg_mask = 0;
7080 unsigned int reg;
7082 if (IS_INTERRUPT (func_type))
7084 unsigned int max_reg;
7085 /* Interrupt functions must not corrupt any registers,
7086 even call clobbered ones. If this is a leaf function
7087 we can just examine the registers used by the RTL, but
7088 otherwise we have to assume that whatever function is
7089 called might clobber anything, and so we have to save
7090 all the call-clobbered registers as well. */
7091 if (ARM_FUNC_TYPE (func_type) == ARM_FT_FIQ)
7092 /* FIQ handlers have registers r8 - r12 banked, so
7093 we only need to check r0 - r7, Normal ISRs only
7094 bank r14 and r15, so we must check up to r12.
7095 r13 is the stack pointer which is always preserved,
7096 so we do not need to consider it here. */
7097 max_reg = 7;
7098 else
7099 max_reg = 12;
7101 for (reg = 0; reg <= max_reg; reg++)
7102 if (regs_ever_live[reg]
7103 || (! current_function_is_leaf && call_used_regs [reg]))
7104 save_reg_mask |= (1 << reg);
7106 else
7108 /* In the normal case we only need to save those registers
7109 which are call saved and which are used by this function. */
7110 for (reg = 0; reg <= 10; reg++)
7111 if (regs_ever_live[reg] && ! call_used_regs [reg])
7112 save_reg_mask |= (1 << reg);
7114 /* Handle the frame pointer as a special case. */
7115 if (! TARGET_APCS_FRAME
7116 && ! frame_pointer_needed
7117 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7118 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7119 save_reg_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7121 /* If we aren't loading the PIC register,
7122 don't stack it even though it may be live. */
7123 if (flag_pic
7124 && ! TARGET_SINGLE_PIC_BASE
7125 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7126 save_reg_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7129 return save_reg_mask;
7132 /* Compute a bit mask of which registers need to be
7133 saved on the stack for the current function. */
7135 static unsigned long
7136 arm_compute_save_reg_mask ()
7138 unsigned int save_reg_mask = 0;
7139 unsigned long func_type = arm_current_func_type ();
7141 if (IS_NAKED (func_type))
7142 /* This should never really happen. */
7143 return 0;
7145 /* If we are creating a stack frame, then we must save the frame pointer,
7146 IP (which will hold the old stack pointer), LR and the PC. */
7147 if (frame_pointer_needed)
7148 save_reg_mask |=
7149 (1 << ARM_HARD_FRAME_POINTER_REGNUM)
7150 | (1 << IP_REGNUM)
7151 | (1 << LR_REGNUM)
7152 | (1 << PC_REGNUM);
7154 /* Volatile functions do not return, so there
7155 is no need to save any other registers. */
7156 if (IS_VOLATILE (func_type))
7157 return save_reg_mask;
7159 save_reg_mask |= arm_compute_save_reg0_reg12_mask ();
7161 /* Decide if we need to save the link register.
7162 Interrupt routines have their own banked link register,
7163 so they never need to save it.
7164 Otherwise if we do not use the link register we do not need to save
7165 it. If we are pushing other registers onto the stack however, we
7166 can save an instruction in the epilogue by pushing the link register
7167 now and then popping it back into the PC. This incurs extra memory
7168 accesses though, so we only do it when optimising for size, and only
7169 if we know that we will not need a fancy return sequence. */
7170 if (regs_ever_live [LR_REGNUM]
7171 || (save_reg_mask
7172 && optimize_size
7173 && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL))
7174 save_reg_mask |= 1 << LR_REGNUM;
7176 if (cfun->machine->lr_save_eliminated)
7177 save_reg_mask &= ~ (1 << LR_REGNUM);
7179 return save_reg_mask;
7182 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7183 everything bar the final return instruction. */
7185 const char *
7186 output_return_instruction (operand, really_return, reverse)
7187 rtx operand;
7188 int really_return;
7189 int reverse;
7191 char conditional[10];
7192 char instr[100];
7193 int reg;
7194 unsigned long live_regs_mask;
7195 unsigned long func_type;
7197 func_type = arm_current_func_type ();
7199 if (IS_NAKED (func_type))
7200 return "";
7202 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7204 /* If this function was declared non-returning, and we have found a tail
7205 call, then we have to trust that the called function won't return. */
7206 if (really_return)
7208 rtx ops[2];
7210 /* Otherwise, trap an attempted return by aborting. */
7211 ops[0] = operand;
7212 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
7213 : "abort");
7214 assemble_external_libcall (ops[1]);
7215 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
7218 return "";
7221 if (current_function_calls_alloca && !really_return)
7222 abort ();
7224 sprintf (conditional, "%%?%%%c0", reverse ? 'D' : 'd');
7226 return_used_this_function = 1;
7228 live_regs_mask = arm_compute_save_reg_mask ();
7230 if (live_regs_mask)
7232 const char * return_reg;
7234 /* If we do not have any special requirements for function exit
7235 (eg interworking, or ISR) then we can load the return address
7236 directly into the PC. Otherwise we must load it into LR. */
7237 if (really_return
7238 && ! TARGET_INTERWORK)
7239 return_reg = reg_names[PC_REGNUM];
7240 else
7241 return_reg = reg_names[LR_REGNUM];
7243 if ((live_regs_mask & (1 << IP_REGNUM)) == (1 << IP_REGNUM))
7244 /* There are two possible reasons for the IP register being saved.
7245 Either a stack frame was created, in which case IP contains the
7246 old stack pointer, or an ISR routine corrupted it. If this in an
7247 ISR routine then just restore IP, otherwise restore IP into SP. */
7248 if (! IS_INTERRUPT (func_type))
7250 live_regs_mask &= ~ (1 << IP_REGNUM);
7251 live_regs_mask |= (1 << SP_REGNUM);
7254 /* On some ARM architectures it is faster to use LDR rather than
7255 LDM to load a single register. On other architectures, the
7256 cost is the same. In 26 bit mode, or for exception handlers,
7257 we have to use LDM to load the PC so that the CPSR is also
7258 restored. */
7259 for (reg = 0; reg <= LAST_ARM_REGNUM; reg++)
7261 if (live_regs_mask == (unsigned int)(1 << reg))
7262 break;
7264 if (reg <= LAST_ARM_REGNUM
7265 && (reg != LR_REGNUM
7266 || ! really_return
7267 || (TARGET_APCS_32 && ! IS_INTERRUPT (func_type))))
7269 sprintf (instr, "ldr%s\t%%|%s, [%%|sp], #4", conditional,
7270 (reg == LR_REGNUM) ? return_reg : reg_names[reg]);
7272 else
7274 char *p;
7275 int first = 1;
7277 /* Generate the load multiple instruction to restore the registers. */
7278 if (frame_pointer_needed)
7279 sprintf (instr, "ldm%sea\t%%|fp, {", conditional);
7280 else
7281 sprintf (instr, "ldm%sfd\t%%|sp!, {", conditional);
7283 p = instr + strlen (instr);
7285 for (reg = 0; reg <= SP_REGNUM; reg++)
7286 if (live_regs_mask & (1 << reg))
7288 int l = strlen (reg_names[reg]);
7290 if (first)
7291 first = 0;
7292 else
7294 memcpy (p, ", ", 2);
7295 p += 2;
7298 memcpy (p, "%|", 2);
7299 memcpy (p + 2, reg_names[reg], l);
7300 p += l + 2;
7303 if (live_regs_mask & (1 << LR_REGNUM))
7305 int l = strlen (return_reg);
7307 if (! first)
7309 memcpy (p, ", ", 2);
7310 p += 2;
7313 memcpy (p, "%|", 2);
7314 memcpy (p + 2, return_reg, l);
7315 strcpy (p + 2 + l, ((TARGET_APCS_32
7316 && !IS_INTERRUPT (func_type))
7317 || !really_return)
7318 ? "}" : "}^");
7320 else
7321 strcpy (p, "}");
7324 output_asm_insn (instr, & operand);
7326 /* See if we need to generate an extra instruction to
7327 perform the actual function return. */
7328 if (really_return
7329 && func_type != ARM_FT_INTERWORKED
7330 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
7332 /* The return has already been handled
7333 by loading the LR into the PC. */
7334 really_return = 0;
7338 if (really_return)
7340 switch ((int) ARM_FUNC_TYPE (func_type))
7342 case ARM_FT_ISR:
7343 case ARM_FT_FIQ:
7344 sprintf (instr, "sub%ss\t%%|pc, %%|lr, #4", conditional);
7345 break;
7347 case ARM_FT_INTERWORKED:
7348 sprintf (instr, "bx%s\t%%|lr", conditional);
7349 break;
7351 case ARM_FT_EXCEPTION:
7352 sprintf (instr, "mov%ss\t%%|pc, %%|lr", conditional);
7353 break;
7355 default:
7356 /* ARMv5 implementations always provide BX, so interworking
7357 is the default unless APCS-26 is in use. */
7358 if ((insn_flags & FL_ARCH5) != 0 && TARGET_APCS_32)
7359 sprintf (instr, "bx%s\t%%|lr", conditional);
7360 else
7361 sprintf (instr, "mov%s%s\t%%|pc, %%|lr",
7362 conditional, TARGET_APCS_32 ? "" : "s");
7363 break;
7366 output_asm_insn (instr, & operand);
7369 return "";
7372 /* Write the function name into the code section, directly preceding
7373 the function prologue.
7375 Code will be output similar to this:
7377 .ascii "arm_poke_function_name", 0
7378 .align
7380 .word 0xff000000 + (t1 - t0)
7381 arm_poke_function_name
7382 mov ip, sp
7383 stmfd sp!, {fp, ip, lr, pc}
7384 sub fp, ip, #4
7386 When performing a stack backtrace, code can inspect the value
7387 of 'pc' stored at 'fp' + 0. If the trace function then looks
7388 at location pc - 12 and the top 8 bits are set, then we know
7389 that there is a function name embedded immediately preceding this
7390 location and has length ((pc[-3]) & 0xff000000).
7392 We assume that pc is declared as a pointer to an unsigned long.
7394 It is of no benefit to output the function name if we are assembling
7395 a leaf function. These function types will not contain a stack
7396 backtrace structure, therefore it is not possible to determine the
7397 function name. */
7399 void
7400 arm_poke_function_name (stream, name)
7401 FILE * stream;
7402 const char * name;
7404 unsigned long alignlength;
7405 unsigned long length;
7406 rtx x;
7408 length = strlen (name) + 1;
7409 alignlength = ROUND_UP (length);
7411 ASM_OUTPUT_ASCII (stream, name, length);
7412 ASM_OUTPUT_ALIGN (stream, 2);
7413 x = GEN_INT ((unsigned HOST_WIDE_INT) 0xff000000 + alignlength);
7414 assemble_aligned_integer (UNITS_PER_WORD, x);
7417 /* Place some comments into the assembler stream
7418 describing the current function. */
7420 static void
7421 arm_output_function_prologue (f, frame_size)
7422 FILE * f;
7423 HOST_WIDE_INT frame_size;
7425 unsigned long func_type;
7427 if (!TARGET_ARM)
7429 thumb_output_function_prologue (f, frame_size);
7430 return;
7433 /* Sanity check. */
7434 if (arm_ccfsm_state || arm_target_insn)
7435 abort ();
7437 func_type = arm_current_func_type ();
7439 switch ((int) ARM_FUNC_TYPE (func_type))
7441 default:
7442 case ARM_FT_NORMAL:
7443 break;
7444 case ARM_FT_INTERWORKED:
7445 asm_fprintf (f, "\t%@ Function supports interworking.\n");
7446 break;
7447 case ARM_FT_EXCEPTION_HANDLER:
7448 asm_fprintf (f, "\t%@ C++ Exception Handler.\n");
7449 break;
7450 case ARM_FT_ISR:
7451 asm_fprintf (f, "\t%@ Interrupt Service Routine.\n");
7452 break;
7453 case ARM_FT_FIQ:
7454 asm_fprintf (f, "\t%@ Fast Interrupt Service Routine.\n");
7455 break;
7456 case ARM_FT_EXCEPTION:
7457 asm_fprintf (f, "\t%@ ARM Exception Handler.\n");
7458 break;
7461 if (IS_NAKED (func_type))
7462 asm_fprintf (f, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7464 if (IS_VOLATILE (func_type))
7465 asm_fprintf (f, "\t%@ Volatile: function does not return.\n");
7467 if (IS_NESTED (func_type))
7468 asm_fprintf (f, "\t%@ Nested: function declared inside another function.\n");
7470 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
7471 current_function_args_size,
7472 current_function_pretend_args_size, frame_size);
7474 asm_fprintf (f, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7475 frame_pointer_needed,
7476 cfun->machine->uses_anonymous_args);
7478 if (cfun->machine->lr_save_eliminated)
7479 asm_fprintf (f, "\t%@ link register save eliminated.\n");
7481 #ifdef AOF_ASSEMBLER
7482 if (flag_pic)
7483 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
7484 #endif
7486 return_used_this_function = 0;
7489 const char *
7490 arm_output_epilogue (really_return)
7491 int really_return;
7493 int reg;
7494 unsigned long saved_regs_mask;
7495 unsigned long func_type;
7496 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7497 frame that is $fp + 4 for a non-variadic function. */
7498 int floats_offset = 0;
7499 rtx operands[3];
7500 int frame_size = get_frame_size ();
7501 FILE * f = asm_out_file;
7502 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
7504 /* If we have already generated the return instruction
7505 then it is futile to generate anything else. */
7506 if (use_return_insn (FALSE) && return_used_this_function)
7507 return "";
7509 func_type = arm_current_func_type ();
7511 if (IS_NAKED (func_type))
7512 /* Naked functions don't have epilogues. */
7513 return "";
7515 if (IS_VOLATILE (func_type) && TARGET_ABORT_NORETURN)
7517 rtx op;
7519 /* A volatile function should never return. Call abort. */
7520 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7521 assemble_external_libcall (op);
7522 output_asm_insn ("bl\t%a0", &op);
7524 return "";
7527 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
7528 && ! really_return)
7529 /* If we are throwing an exception, then we really must
7530 be doing a return, so we can't tail-call. */
7531 abort ();
7533 saved_regs_mask = arm_compute_save_reg_mask ();
7535 /* XXX We should adjust floats_offset for any anonymous args, and then
7536 re-adjust vfp_offset below to compensate. */
7538 /* Compute how far away the floats will be. */
7539 for (reg = 0; reg <= LAST_ARM_REGNUM; reg ++)
7540 if (saved_regs_mask & (1 << reg))
7541 floats_offset += 4;
7543 if (frame_pointer_needed)
7545 int vfp_offset = 4;
7547 if (arm_fpu_arch == FP_SOFT2)
7549 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7550 if (regs_ever_live[reg] && !call_used_regs[reg])
7552 floats_offset += 12;
7553 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7554 reg, FP_REGNUM, floats_offset - vfp_offset);
7557 else
7559 int start_reg = LAST_ARM_FP_REGNUM;
7561 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7563 if (regs_ever_live[reg] && !call_used_regs[reg])
7565 floats_offset += 12;
7567 /* We can't unstack more than four registers at once. */
7568 if (start_reg - reg == 3)
7570 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7571 reg, FP_REGNUM, floats_offset - vfp_offset);
7572 start_reg = reg - 1;
7575 else
7577 if (reg != start_reg)
7578 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7579 reg + 1, start_reg - reg,
7580 FP_REGNUM, floats_offset - vfp_offset);
7581 start_reg = reg - 1;
7585 /* Just in case the last register checked also needs unstacking. */
7586 if (reg != start_reg)
7587 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7588 reg + 1, start_reg - reg,
7589 FP_REGNUM, floats_offset - vfp_offset);
7592 /* saved_regs_mask should contain the IP, which at the time of stack
7593 frame generation actually contains the old stack pointer. So a
7594 quick way to unwind the stack is just pop the IP register directly
7595 into the stack pointer. */
7596 if ((saved_regs_mask & (1 << IP_REGNUM)) == 0)
7597 abort ();
7598 saved_regs_mask &= ~ (1 << IP_REGNUM);
7599 saved_regs_mask |= (1 << SP_REGNUM);
7601 /* There are two registers left in saved_regs_mask - LR and PC. We
7602 only need to restore the LR register (the return address), but to
7603 save time we can load it directly into the PC, unless we need a
7604 special function exit sequence, or we are not really returning. */
7605 if (really_return && ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL)
7606 /* Delete the LR from the register mask, so that the LR on
7607 the stack is loaded into the PC in the register mask. */
7608 saved_regs_mask &= ~ (1 << LR_REGNUM);
7609 else
7610 saved_regs_mask &= ~ (1 << PC_REGNUM);
7612 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, saved_regs_mask);
7614 if (IS_INTERRUPT (func_type))
7615 /* Interrupt handlers will have pushed the
7616 IP onto the stack, so restore it now. */
7617 print_multi_reg (f, "ldmfd\t%r", SP_REGNUM, 1 << IP_REGNUM);
7619 else
7621 /* Restore stack pointer if necessary. */
7622 if (frame_size + current_function_outgoing_args_size != 0)
7624 operands[0] = operands[1] = stack_pointer_rtx;
7625 operands[2] = GEN_INT (frame_size
7626 + current_function_outgoing_args_size);
7627 output_add_immediate (operands);
7630 if (arm_fpu_arch == FP_SOFT2)
7632 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7633 if (regs_ever_live[reg] && !call_used_regs[reg])
7634 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7635 reg, SP_REGNUM);
7637 else
7639 int start_reg = FIRST_ARM_FP_REGNUM;
7641 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7643 if (regs_ever_live[reg] && !call_used_regs[reg])
7645 if (reg - start_reg == 3)
7647 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7648 start_reg, SP_REGNUM);
7649 start_reg = reg + 1;
7652 else
7654 if (reg != start_reg)
7655 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7656 start_reg, reg - start_reg,
7657 SP_REGNUM);
7659 start_reg = reg + 1;
7663 /* Just in case the last register checked also needs unstacking. */
7664 if (reg != start_reg)
7665 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7666 start_reg, reg - start_reg, SP_REGNUM);
7669 /* If we can, restore the LR into the PC. */
7670 if (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7671 && really_return
7672 && current_function_pretend_args_size == 0
7673 && saved_regs_mask & (1 << LR_REGNUM))
7675 saved_regs_mask &= ~ (1 << LR_REGNUM);
7676 saved_regs_mask |= (1 << PC_REGNUM);
7679 /* Load the registers off the stack. If we only have one register
7680 to load use the LDR instruction - it is faster. */
7681 if (saved_regs_mask == (1 << LR_REGNUM))
7683 /* The exception handler ignores the LR, so we do
7684 not really need to load it off the stack. */
7685 if (eh_ofs)
7686 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7687 else
7688 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7690 else if (saved_regs_mask)
7691 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, saved_regs_mask);
7693 if (current_function_pretend_args_size)
7695 /* Unwind the pre-pushed regs. */
7696 operands[0] = operands[1] = stack_pointer_rtx;
7697 operands[2] = GEN_INT (current_function_pretend_args_size);
7698 output_add_immediate (operands);
7702 #if 0
7703 if (ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER)
7704 /* Adjust the stack to remove the exception handler stuff. */
7705 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7706 REGNO (eh_ofs));
7707 #endif
7709 if (! really_return
7710 || (ARM_FUNC_TYPE (func_type) == ARM_FT_NORMAL
7711 && current_function_pretend_args_size == 0
7712 && saved_regs_mask & (1 << PC_REGNUM)))
7713 return "";
7715 /* Generate the return instruction. */
7716 switch ((int) ARM_FUNC_TYPE (func_type))
7718 case ARM_FT_EXCEPTION_HANDLER:
7719 /* Even in 26-bit mode we do a mov (rather than a movs)
7720 because we don't have the PSR bits set in the address. */
7721 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, EXCEPTION_LR_REGNUM);
7722 break;
7724 case ARM_FT_ISR:
7725 case ARM_FT_FIQ:
7726 asm_fprintf (f, "\tsubs\t%r, %r, #4\n", PC_REGNUM, LR_REGNUM);
7727 break;
7729 case ARM_FT_EXCEPTION:
7730 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7731 break;
7733 case ARM_FT_INTERWORKED:
7734 asm_fprintf (f, "\tbx\t%r\n", LR_REGNUM);
7735 break;
7737 default:
7738 if (frame_pointer_needed)
7739 /* If we used the frame pointer then the return adddress
7740 will have been loaded off the stack directly into the
7741 PC, so there is no need to issue a MOV instruction
7742 here. */
7744 else if (current_function_pretend_args_size == 0
7745 && (saved_regs_mask & (1 << LR_REGNUM)))
7746 /* Similarly we may have been able to load LR into the PC
7747 even if we did not create a stack frame. */
7749 else if (TARGET_APCS_32)
7750 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7751 else
7752 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, LR_REGNUM);
7753 break;
7756 return "";
7759 static void
7760 arm_output_function_epilogue (file, frame_size)
7761 FILE *file ATTRIBUTE_UNUSED;
7762 HOST_WIDE_INT frame_size;
7764 if (TARGET_THUMB)
7766 /* ??? Probably not safe to set this here, since it assumes that a
7767 function will be emitted as assembly immediately after we generate
7768 RTL for it. This does not happen for inline functions. */
7769 return_used_this_function = 0;
7771 else
7773 if (use_return_insn (FALSE)
7774 && return_used_this_function
7775 && (frame_size + current_function_outgoing_args_size) != 0
7776 && !frame_pointer_needed)
7777 abort ();
7779 /* Reset the ARM-specific per-function variables. */
7780 after_arm_reorg = 0;
7784 /* Generate and emit an insn that we will recognize as a push_multi.
7785 Unfortunately, since this insn does not reflect very well the actual
7786 semantics of the operation, we need to annotate the insn for the benefit
7787 of DWARF2 frame unwind information. */
7789 static rtx
7790 emit_multi_reg_push (mask)
7791 int mask;
7793 int num_regs = 0;
7794 int num_dwarf_regs;
7795 int i, j;
7796 rtx par;
7797 rtx dwarf;
7798 int dwarf_par_index;
7799 rtx tmp, reg;
7801 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7802 if (mask & (1 << i))
7803 num_regs++;
7805 if (num_regs == 0 || num_regs > 16)
7806 abort ();
7808 /* We don't record the PC in the dwarf frame information. */
7809 num_dwarf_regs = num_regs;
7810 if (mask & (1 << PC_REGNUM))
7811 num_dwarf_regs--;
7813 /* For the body of the insn we are going to generate an UNSPEC in
7814 parallel with several USEs. This allows the insn to be recognised
7815 by the push_multi pattern in the arm.md file. The insn looks
7816 something like this:
7818 (parallel [
7819 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7820 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7821 (use (reg:SI 11 fp))
7822 (use (reg:SI 12 ip))
7823 (use (reg:SI 14 lr))
7824 (use (reg:SI 15 pc))
7827 For the frame note however, we try to be more explicit and actually
7828 show each register being stored into the stack frame, plus a (single)
7829 decrement of the stack pointer. We do it this way in order to be
7830 friendly to the stack unwinding code, which only wants to see a single
7831 stack decrement per instruction. The RTL we generate for the note looks
7832 something like this:
7834 (sequence [
7835 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7836 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7837 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7838 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7839 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7842 This sequence is used both by the code to support stack unwinding for
7843 exceptions handlers and the code to generate dwarf2 frame debugging. */
7845 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7846 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_dwarf_regs + 1));
7847 dwarf_par_index = 1;
7849 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7851 if (mask & (1 << i))
7853 reg = gen_rtx_REG (SImode, i);
7855 XVECEXP (par, 0, 0)
7856 = gen_rtx_SET (VOIDmode,
7857 gen_rtx_MEM (BLKmode,
7858 gen_rtx_PRE_DEC (BLKmode,
7859 stack_pointer_rtx)),
7860 gen_rtx_UNSPEC (BLKmode,
7861 gen_rtvec (1, reg),
7862 UNSPEC_PUSH_MULT));
7864 if (i != PC_REGNUM)
7866 tmp = gen_rtx_SET (VOIDmode,
7867 gen_rtx_MEM (SImode, stack_pointer_rtx),
7868 reg);
7869 RTX_FRAME_RELATED_P (tmp) = 1;
7870 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7871 dwarf_par_index++;
7874 break;
7878 for (j = 1, i++; j < num_regs; i++)
7880 if (mask & (1 << i))
7882 reg = gen_rtx_REG (SImode, i);
7884 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7886 if (i != PC_REGNUM)
7888 tmp = gen_rtx_SET (VOIDmode,
7889 gen_rtx_MEM (SImode,
7890 plus_constant (stack_pointer_rtx,
7891 4 * j)),
7892 reg);
7893 RTX_FRAME_RELATED_P (tmp) = 1;
7894 XVECEXP (dwarf, 0, dwarf_par_index++) = tmp;
7897 j++;
7901 par = emit_insn (par);
7903 tmp = gen_rtx_SET (SImode,
7904 stack_pointer_rtx,
7905 gen_rtx_PLUS (SImode,
7906 stack_pointer_rtx,
7907 GEN_INT (-4 * num_regs)));
7908 RTX_FRAME_RELATED_P (tmp) = 1;
7909 XVECEXP (dwarf, 0, 0) = tmp;
7911 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7912 REG_NOTES (par));
7913 return par;
7916 static rtx
7917 emit_sfm (base_reg, count)
7918 int base_reg;
7919 int count;
7921 rtx par;
7922 rtx dwarf;
7923 rtx tmp, reg;
7924 int i;
7926 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7927 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7928 RTX_FRAME_RELATED_P (dwarf) = 1;
7930 reg = gen_rtx_REG (XFmode, base_reg++);
7932 XVECEXP (par, 0, 0)
7933 = gen_rtx_SET (VOIDmode,
7934 gen_rtx_MEM (BLKmode,
7935 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7936 gen_rtx_UNSPEC (BLKmode,
7937 gen_rtvec (1, reg),
7938 UNSPEC_PUSH_MULT));
7940 = gen_rtx_SET (VOIDmode,
7941 gen_rtx_MEM (XFmode,
7942 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7943 reg);
7944 RTX_FRAME_RELATED_P (tmp) = 1;
7945 XVECEXP (dwarf, 0, count - 1) = tmp;
7947 for (i = 1; i < count; i++)
7949 reg = gen_rtx_REG (XFmode, base_reg++);
7950 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7952 tmp = gen_rtx_SET (VOIDmode,
7953 gen_rtx_MEM (XFmode,
7954 gen_rtx_PRE_DEC (BLKmode,
7955 stack_pointer_rtx)),
7956 reg);
7957 RTX_FRAME_RELATED_P (tmp) = 1;
7958 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7961 par = emit_insn (par);
7962 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7963 REG_NOTES (par));
7964 return par;
7967 /* Compute the distance from register FROM to register TO.
7968 These can be the arg pointer (26), the soft frame pointer (25),
7969 the stack pointer (13) or the hard frame pointer (11).
7970 Typical stack layout looks like this:
7972 old stack pointer -> | |
7973 ----
7974 | | \
7975 | | saved arguments for
7976 | | vararg functions
7977 | | /
7979 hard FP & arg pointer -> | | \
7980 | | stack
7981 | | frame
7982 | | /
7984 | | \
7985 | | call saved
7986 | | registers
7987 soft frame pointer -> | | /
7989 | | \
7990 | | local
7991 | | variables
7992 | | /
7994 | | \
7995 | | outgoing
7996 | | arguments
7997 current stack pointer -> | | /
8000 For a given funciton some or all of these stack compomnents
8001 may not be needed, giving rise to the possibility of
8002 eliminating some of the registers.
8004 The values returned by this function must reflect the behaviour
8005 of arm_expand_prologue() and arm_compute_save_reg_mask().
8007 The sign of the number returned reflects the direction of stack
8008 growth, so the values are positive for all eliminations except
8009 from the soft frame pointer to the hard frame pointer. */
8011 unsigned int
8012 arm_compute_initial_elimination_offset (from, to)
8013 unsigned int from;
8014 unsigned int to;
8016 unsigned int local_vars = (get_frame_size () + 3) & ~3;
8017 unsigned int outgoing_args = current_function_outgoing_args_size;
8018 unsigned int stack_frame;
8019 unsigned int call_saved_registers;
8020 unsigned long func_type;
8022 func_type = arm_current_func_type ();
8024 /* Volatile functions never return, so there is
8025 no need to save call saved registers. */
8026 call_saved_registers = 0;
8027 if (! IS_VOLATILE (func_type))
8029 unsigned int reg_mask;
8030 unsigned int reg;
8032 /* Make sure that we compute which registers will be saved
8033 on the stack using the same algorithm that is used by
8034 arm_compute_save_reg_mask(). */
8035 reg_mask = arm_compute_save_reg0_reg12_mask ();
8037 /* Now count the number of bits set in save_reg_mask.
8038 For each set bit we need 4 bytes of stack space. */
8039 while (reg_mask)
8041 call_saved_registers += 4;
8042 reg_mask = reg_mask & ~ (reg_mask & - reg_mask);
8045 if (regs_ever_live[LR_REGNUM]
8046 /* If a stack frame is going to be created, the LR will
8047 be saved as part of that, so we do not need to allow
8048 for it here. */
8049 && ! frame_pointer_needed)
8050 call_saved_registers += 4;
8052 /* If the hard floating point registers are going to be
8053 used then they must be saved on the stack as well.
8054 Each register occupies 12 bytes of stack space. */
8055 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg ++)
8056 if (regs_ever_live[reg] && ! call_used_regs[reg])
8057 call_saved_registers += 12;
8060 /* The stack frame contains 4 registers - the old frame pointer,
8061 the old stack pointer, the return address and PC of the start
8062 of the function. */
8063 stack_frame = frame_pointer_needed ? 16 : 0;
8065 /* OK, now we have enough information to compute the distances.
8066 There must be an entry in these switch tables for each pair
8067 of registers in ELIMINABLE_REGS, even if some of the entries
8068 seem to be redundant or useless. */
8069 switch (from)
8071 case ARG_POINTER_REGNUM:
8072 switch (to)
8074 case THUMB_HARD_FRAME_POINTER_REGNUM:
8075 return 0;
8077 case FRAME_POINTER_REGNUM:
8078 /* This is the reverse of the soft frame pointer
8079 to hard frame pointer elimination below. */
8080 if (call_saved_registers == 0 && stack_frame == 0)
8081 return 0;
8082 return (call_saved_registers + stack_frame - 4);
8084 case ARM_HARD_FRAME_POINTER_REGNUM:
8085 /* If there is no stack frame then the hard
8086 frame pointer and the arg pointer coincide. */
8087 if (stack_frame == 0 && call_saved_registers != 0)
8088 return 0;
8089 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8090 return (frame_pointer_needed
8091 && current_function_needs_context
8092 && ! cfun->machine->uses_anonymous_args) ? 4 : 0;
8094 case STACK_POINTER_REGNUM:
8095 /* If nothing has been pushed on the stack at all
8096 then this will return -4. This *is* correct! */
8097 return call_saved_registers + stack_frame + local_vars + outgoing_args - 4;
8099 default:
8100 abort ();
8102 break;
8104 case FRAME_POINTER_REGNUM:
8105 switch (to)
8107 case THUMB_HARD_FRAME_POINTER_REGNUM:
8108 return 0;
8110 case ARM_HARD_FRAME_POINTER_REGNUM:
8111 /* The hard frame pointer points to the top entry in the
8112 stack frame. The soft frame pointer to the bottom entry
8113 in the stack frame. If there is no stack frame at all,
8114 then they are identical. */
8115 if (call_saved_registers == 0 && stack_frame == 0)
8116 return 0;
8117 return - (call_saved_registers + stack_frame - 4);
8119 case STACK_POINTER_REGNUM:
8120 return local_vars + outgoing_args;
8122 default:
8123 abort ();
8125 break;
8127 default:
8128 /* You cannot eliminate from the stack pointer.
8129 In theory you could eliminate from the hard frame
8130 pointer to the stack pointer, but this will never
8131 happen, since if a stack frame is not needed the
8132 hard frame pointer will never be used. */
8133 abort ();
8137 /* Generate the prologue instructions for entry into an ARM function. */
8139 void
8140 arm_expand_prologue ()
8142 int reg;
8143 rtx amount;
8144 rtx insn;
8145 rtx ip_rtx;
8146 unsigned long live_regs_mask;
8147 unsigned long func_type;
8148 int fp_offset = 0;
8149 int saved_pretend_args = 0;
8150 unsigned int args_to_push;
8152 func_type = arm_current_func_type ();
8154 /* Naked functions don't have prologues. */
8155 if (IS_NAKED (func_type))
8156 return;
8158 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8159 args_to_push = current_function_pretend_args_size;
8161 /* Compute which register we will have to save onto the stack. */
8162 live_regs_mask = arm_compute_save_reg_mask ();
8164 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
8166 if (frame_pointer_needed)
8168 if (IS_INTERRUPT (func_type))
8170 /* Interrupt functions must not corrupt any registers.
8171 Creating a frame pointer however, corrupts the IP
8172 register, so we must push it first. */
8173 insn = emit_multi_reg_push (1 << IP_REGNUM);
8175 /* Do not set RTX_FRAME_RELATED_P on this insn.
8176 The dwarf stack unwinding code only wants to see one
8177 stack decrement per function, and this is not it. If
8178 this instruction is labeled as being part of the frame
8179 creation sequence then dwarf2out_frame_debug_expr will
8180 abort when it encounters the assignment of IP to FP
8181 later on, since the use of SP here establishes SP as
8182 the CFA register and not IP.
8184 Anyway this instruction is not really part of the stack
8185 frame creation although it is part of the prologue. */
8187 else if (IS_NESTED (func_type))
8189 /* The Static chain register is the same as the IP register
8190 used as a scratch register during stack frame creation.
8191 To get around this need to find somewhere to store IP
8192 whilst the frame is being created. We try the following
8193 places in order:
8195 1. The last argument register.
8196 2. A slot on the stack above the frame. (This only
8197 works if the function is not a varargs function).
8198 3. Register r3, after pushing the argument registers
8199 onto the stack.
8201 Note - we only need to tell the dwarf2 backend about the SP
8202 adjustment in the second variant; the static chain register
8203 doesn't need to be unwound, as it doesn't contain a value
8204 inherited from the caller. */
8206 if (regs_ever_live[3] == 0)
8208 insn = gen_rtx_REG (SImode, 3);
8209 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8210 insn = emit_insn (insn);
8212 else if (args_to_push == 0)
8214 rtx dwarf;
8215 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
8216 insn = gen_rtx_MEM (SImode, insn);
8217 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
8218 insn = emit_insn (insn);
8220 fp_offset = 4;
8222 /* Just tell the dwarf backend that we adjusted SP. */
8223 dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
8224 gen_rtx_PLUS (SImode, stack_pointer_rtx,
8225 GEN_INT (-fp_offset)));
8226 RTX_FRAME_RELATED_P (insn) = 1;
8227 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8228 dwarf, REG_NOTES (insn));
8230 else
8232 /* Store the args on the stack. */
8233 if (cfun->machine->uses_anonymous_args)
8234 insn = emit_multi_reg_push
8235 ((0xf0 >> (args_to_push / 4)) & 0xf);
8236 else
8237 insn = emit_insn
8238 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8239 GEN_INT (- args_to_push)));
8241 RTX_FRAME_RELATED_P (insn) = 1;
8243 saved_pretend_args = 1;
8244 fp_offset = args_to_push;
8245 args_to_push = 0;
8247 /* Now reuse r3 to preserve IP. */
8248 insn = gen_rtx_REG (SImode, 3);
8249 insn = gen_rtx_SET (SImode, insn, ip_rtx);
8250 (void) emit_insn (insn);
8254 if (fp_offset)
8256 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
8257 insn = gen_rtx_SET (SImode, ip_rtx, insn);
8259 else
8260 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
8262 insn = emit_insn (insn);
8263 RTX_FRAME_RELATED_P (insn) = 1;
8266 if (args_to_push)
8268 /* Push the argument registers, or reserve space for them. */
8269 if (cfun->machine->uses_anonymous_args)
8270 insn = emit_multi_reg_push
8271 ((0xf0 >> (args_to_push / 4)) & 0xf);
8272 else
8273 insn = emit_insn
8274 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8275 GEN_INT (- args_to_push)));
8276 RTX_FRAME_RELATED_P (insn) = 1;
8279 /* If this is an interrupt service routine, and the link register is
8280 going to be pushed, subtracting four now will mean that the
8281 function return can be done with a single instruction. */
8282 if ((func_type == ARM_FT_ISR || func_type == ARM_FT_FIQ)
8283 && (live_regs_mask & (1 << LR_REGNUM)) != 0)
8285 emit_insn (gen_rtx_SET (SImode,
8286 gen_rtx_REG (SImode, LR_REGNUM),
8287 gen_rtx_PLUS (SImode,
8288 gen_rtx_REG (SImode, LR_REGNUM),
8289 GEN_INT (-4))));
8292 if (live_regs_mask)
8294 insn = emit_multi_reg_push (live_regs_mask);
8295 RTX_FRAME_RELATED_P (insn) = 1;
8298 if (! IS_VOLATILE (func_type))
8300 /* Save any floating point call-saved registers used by this function. */
8301 if (arm_fpu_arch == FP_SOFT2)
8303 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8304 if (regs_ever_live[reg] && !call_used_regs[reg])
8306 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
8307 insn = gen_rtx_MEM (XFmode, insn);
8308 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
8309 gen_rtx_REG (XFmode, reg)));
8310 RTX_FRAME_RELATED_P (insn) = 1;
8313 else
8315 int start_reg = LAST_ARM_FP_REGNUM;
8317 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
8319 if (regs_ever_live[reg] && !call_used_regs[reg])
8321 if (start_reg - reg == 3)
8323 insn = emit_sfm (reg, 4);
8324 RTX_FRAME_RELATED_P (insn) = 1;
8325 start_reg = reg - 1;
8328 else
8330 if (start_reg != reg)
8332 insn = emit_sfm (reg + 1, start_reg - reg);
8333 RTX_FRAME_RELATED_P (insn) = 1;
8335 start_reg = reg - 1;
8339 if (start_reg != reg)
8341 insn = emit_sfm (reg + 1, start_reg - reg);
8342 RTX_FRAME_RELATED_P (insn) = 1;
8347 if (frame_pointer_needed)
8349 /* Create the new frame pointer. */
8350 insn = GEN_INT (-(4 + args_to_push + fp_offset));
8351 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
8352 RTX_FRAME_RELATED_P (insn) = 1;
8354 if (IS_NESTED (func_type))
8356 /* Recover the static chain register. */
8357 if (regs_ever_live [3] == 0
8358 || saved_pretend_args)
8359 insn = gen_rtx_REG (SImode, 3);
8360 else /* if (current_function_pretend_args_size == 0) */
8362 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
8363 insn = gen_rtx_MEM (SImode, insn);
8366 emit_insn (gen_rtx_SET (SImode, ip_rtx, insn));
8367 /* Add a USE to stop propagate_one_insn() from barfing. */
8368 emit_insn (gen_prologue_use (ip_rtx));
8372 amount = GEN_INT (-(get_frame_size ()
8373 + current_function_outgoing_args_size));
8375 if (amount != const0_rtx)
8377 /* This add can produce multiple insns for a large constant, so we
8378 need to get tricky. */
8379 rtx last = get_last_insn ();
8380 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8381 amount));
8384 last = last ? NEXT_INSN (last) : get_insns ();
8385 RTX_FRAME_RELATED_P (last) = 1;
8387 while (last != insn);
8389 /* If the frame pointer is needed, emit a special barrier that
8390 will prevent the scheduler from moving stores to the frame
8391 before the stack adjustment. */
8392 if (frame_pointer_needed)
8394 rtx unspec = gen_rtx_UNSPEC (SImode,
8395 gen_rtvec (2, stack_pointer_rtx,
8396 hard_frame_pointer_rtx),
8397 UNSPEC_PRLG_STK);
8399 insn = emit_insn (gen_rtx_CLOBBER (VOIDmode,
8400 gen_rtx_MEM (BLKmode, unspec)));
8404 /* If we are profiling, make sure no instructions are scheduled before
8405 the call to mcount. Similarly if the user has requested no
8406 scheduling in the prolog. */
8407 if (current_function_profile || TARGET_NO_SCHED_PRO)
8408 emit_insn (gen_blockage ());
8410 /* If the link register is being kept alive, with the return address in it,
8411 then make sure that it does not get reused by the ce2 pass. */
8412 if ((live_regs_mask & (1 << LR_REGNUM)) == 0)
8414 emit_insn (gen_prologue_use (gen_rtx_REG (SImode, LR_REGNUM)));
8415 cfun->machine->lr_save_eliminated = 1;
8419 /* If CODE is 'd', then the X is a condition operand and the instruction
8420 should only be executed if the condition is true.
8421 if CODE is 'D', then the X is a condition operand and the instruction
8422 should only be executed if the condition is false: however, if the mode
8423 of the comparison is CCFPEmode, then always execute the instruction -- we
8424 do this because in these circumstances !GE does not necessarily imply LT;
8425 in these cases the instruction pattern will take care to make sure that
8426 an instruction containing %d will follow, thereby undoing the effects of
8427 doing this instruction unconditionally.
8428 If CODE is 'N' then X is a floating point operand that must be negated
8429 before output.
8430 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8431 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8433 void
8434 arm_print_operand (stream, x, code)
8435 FILE * stream;
8436 rtx x;
8437 int code;
8439 switch (code)
8441 case '@':
8442 fputs (ASM_COMMENT_START, stream);
8443 return;
8445 case '_':
8446 fputs (user_label_prefix, stream);
8447 return;
8449 case '|':
8450 fputs (REGISTER_PREFIX, stream);
8451 return;
8453 case '?':
8454 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
8456 if (TARGET_THUMB || current_insn_predicate != NULL)
8457 abort ();
8459 fputs (arm_condition_codes[arm_current_cc], stream);
8461 else if (current_insn_predicate)
8463 enum arm_cond_code code;
8465 if (TARGET_THUMB)
8466 abort ();
8468 code = get_arm_condition_code (current_insn_predicate);
8469 fputs (arm_condition_codes[code], stream);
8471 return;
8473 case 'N':
8475 REAL_VALUE_TYPE r;
8476 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
8477 r = REAL_VALUE_NEGATE (r);
8478 fprintf (stream, "%s", fp_const_from_val (&r));
8480 return;
8482 case 'B':
8483 if (GET_CODE (x) == CONST_INT)
8485 HOST_WIDE_INT val;
8486 val = ARM_SIGN_EXTEND (~INTVAL (x));
8487 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8489 else
8491 putc ('~', stream);
8492 output_addr_const (stream, x);
8494 return;
8496 case 'i':
8497 fprintf (stream, "%s", arithmetic_instr (x, 1));
8498 return;
8500 case 'I':
8501 fprintf (stream, "%s", arithmetic_instr (x, 0));
8502 return;
8504 case 'S':
8506 HOST_WIDE_INT val;
8507 const char * shift = shift_op (x, &val);
8509 if (shift)
8511 fprintf (stream, ", %s ", shift_op (x, &val));
8512 if (val == -1)
8513 arm_print_operand (stream, XEXP (x, 1), 0);
8514 else
8516 fputc ('#', stream);
8517 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
8521 return;
8523 /* An explanation of the 'Q', 'R' and 'H' register operands:
8525 In a pair of registers containing a DI or DF value the 'Q'
8526 operand returns the register number of the register containing
8527 the least signficant part of the value. The 'R' operand returns
8528 the register number of the register containing the most
8529 significant part of the value.
8531 The 'H' operand returns the higher of the two register numbers.
8532 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8533 same as the 'Q' operand, since the most signficant part of the
8534 value is held in the lower number register. The reverse is true
8535 on systems where WORDS_BIG_ENDIAN is false.
8537 The purpose of these operands is to distinguish between cases
8538 where the endian-ness of the values is important (for example
8539 when they are added together), and cases where the endian-ness
8540 is irrelevant, but the order of register operations is important.
8541 For example when loading a value from memory into a register
8542 pair, the endian-ness does not matter. Provided that the value
8543 from the lower memory address is put into the lower numbered
8544 register, and the value from the higher address is put into the
8545 higher numbered register, the load will work regardless of whether
8546 the value being loaded is big-wordian or little-wordian. The
8547 order of the two register loads can matter however, if the address
8548 of the memory location is actually held in one of the registers
8549 being overwritten by the load. */
8550 case 'Q':
8551 if (REGNO (x) > LAST_ARM_REGNUM)
8552 abort ();
8553 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
8554 return;
8556 case 'R':
8557 if (REGNO (x) > LAST_ARM_REGNUM)
8558 abort ();
8559 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
8560 return;
8562 case 'H':
8563 if (REGNO (x) > LAST_ARM_REGNUM)
8564 abort ();
8565 asm_fprintf (stream, "%r", REGNO (x) + 1);
8566 return;
8568 case 'm':
8569 asm_fprintf (stream, "%r",
8570 GET_CODE (XEXP (x, 0)) == REG
8571 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
8572 return;
8574 case 'M':
8575 asm_fprintf (stream, "{%r-%r}",
8576 REGNO (x),
8577 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
8578 return;
8580 case 'd':
8581 /* CONST_TRUE_RTX means always -- that's the default. */
8582 if (x == const_true_rtx)
8583 return;
8585 if (TARGET_ARM)
8586 fputs (arm_condition_codes[get_arm_condition_code (x)],
8587 stream);
8588 else
8589 fputs (thumb_condition_code (x, 0), stream);
8590 return;
8592 case 'D':
8593 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8594 want to do that. */
8595 if (x == const_true_rtx)
8596 abort ();
8598 if (TARGET_ARM)
8599 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
8600 (get_arm_condition_code (x))],
8601 stream);
8602 else
8603 fputs (thumb_condition_code (x, 1), stream);
8604 return;
8606 default:
8607 if (x == 0)
8608 abort ();
8610 if (GET_CODE (x) == REG)
8611 asm_fprintf (stream, "%r", REGNO (x));
8612 else if (GET_CODE (x) == MEM)
8614 output_memory_reference_mode = GET_MODE (x);
8615 output_address (XEXP (x, 0));
8617 else if (GET_CODE (x) == CONST_DOUBLE)
8618 fprintf (stream, "#%s", fp_immediate_constant (x));
8619 else if (GET_CODE (x) == NEG)
8620 abort (); /* This should never happen now. */
8621 else
8623 fputc ('#', stream);
8624 output_addr_const (stream, x);
8629 #ifndef AOF_ASSEMBLER
8630 /* Target hook for assembling integer objects. The ARM version needs to
8631 handle word-sized values specially. */
8633 static bool
8634 arm_assemble_integer (x, size, aligned_p)
8635 rtx x;
8636 unsigned int size;
8637 int aligned_p;
8639 if (size == UNITS_PER_WORD && aligned_p)
8641 fputs ("\t.word\t", asm_out_file);
8642 output_addr_const (asm_out_file, x);
8644 /* Mark symbols as position independent. We only do this in the
8645 .text segment, not in the .data segment. */
8646 if (NEED_GOT_RELOC && flag_pic && making_const_table &&
8647 (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF))
8649 if (GET_CODE (x) == SYMBOL_REF
8650 && (CONSTANT_POOL_ADDRESS_P (x)
8651 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x, 0))))
8652 fputs ("(GOTOFF)", asm_out_file);
8653 else if (GET_CODE (x) == LABEL_REF)
8654 fputs ("(GOTOFF)", asm_out_file);
8655 else
8656 fputs ("(GOT)", asm_out_file);
8658 fputc ('\n', asm_out_file);
8659 return true;
8662 return default_assemble_integer (x, size, aligned_p);
8664 #endif
8666 /* A finite state machine takes care of noticing whether or not instructions
8667 can be conditionally executed, and thus decrease execution time and code
8668 size by deleting branch instructions. The fsm is controlled by
8669 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8671 /* The state of the fsm controlling condition codes are:
8672 0: normal, do nothing special
8673 1: make ASM_OUTPUT_OPCODE not output this instruction
8674 2: make ASM_OUTPUT_OPCODE not output this instruction
8675 3: make instructions conditional
8676 4: make instructions conditional
8678 State transitions (state->state by whom under condition):
8679 0 -> 1 final_prescan_insn if the `target' is a label
8680 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8681 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8682 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8683 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8684 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8685 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8686 (the target insn is arm_target_insn).
8688 If the jump clobbers the conditions then we use states 2 and 4.
8690 A similar thing can be done with conditional return insns.
8692 XXX In case the `target' is an unconditional branch, this conditionalising
8693 of the instructions always reduces code size, but not always execution
8694 time. But then, I want to reduce the code size to somewhere near what
8695 /bin/cc produces. */
8697 /* Returns the index of the ARM condition code string in
8698 `arm_condition_codes'. COMPARISON should be an rtx like
8699 `(eq (...) (...))'. */
8701 static enum arm_cond_code
8702 get_arm_condition_code (comparison)
8703 rtx comparison;
8705 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
8706 int code;
8707 enum rtx_code comp_code = GET_CODE (comparison);
8709 if (GET_MODE_CLASS (mode) != MODE_CC)
8710 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
8711 XEXP (comparison, 1));
8713 switch (mode)
8715 case CC_DNEmode: code = ARM_NE; goto dominance;
8716 case CC_DEQmode: code = ARM_EQ; goto dominance;
8717 case CC_DGEmode: code = ARM_GE; goto dominance;
8718 case CC_DGTmode: code = ARM_GT; goto dominance;
8719 case CC_DLEmode: code = ARM_LE; goto dominance;
8720 case CC_DLTmode: code = ARM_LT; goto dominance;
8721 case CC_DGEUmode: code = ARM_CS; goto dominance;
8722 case CC_DGTUmode: code = ARM_HI; goto dominance;
8723 case CC_DLEUmode: code = ARM_LS; goto dominance;
8724 case CC_DLTUmode: code = ARM_CC;
8726 dominance:
8727 if (comp_code != EQ && comp_code != NE)
8728 abort ();
8730 if (comp_code == EQ)
8731 return ARM_INVERSE_CONDITION_CODE (code);
8732 return code;
8734 case CC_NOOVmode:
8735 switch (comp_code)
8737 case NE: return ARM_NE;
8738 case EQ: return ARM_EQ;
8739 case GE: return ARM_PL;
8740 case LT: return ARM_MI;
8741 default: abort ();
8744 case CC_Zmode:
8745 switch (comp_code)
8747 case NE: return ARM_NE;
8748 case EQ: return ARM_EQ;
8749 default: abort ();
8752 case CCFPEmode:
8753 case CCFPmode:
8754 /* These encodings assume that AC=1 in the FPA system control
8755 byte. This allows us to handle all cases except UNEQ and
8756 LTGT. */
8757 switch (comp_code)
8759 case GE: return ARM_GE;
8760 case GT: return ARM_GT;
8761 case LE: return ARM_LS;
8762 case LT: return ARM_MI;
8763 case NE: return ARM_NE;
8764 case EQ: return ARM_EQ;
8765 case ORDERED: return ARM_VC;
8766 case UNORDERED: return ARM_VS;
8767 case UNLT: return ARM_LT;
8768 case UNLE: return ARM_LE;
8769 case UNGT: return ARM_HI;
8770 case UNGE: return ARM_PL;
8771 /* UNEQ and LTGT do not have a representation. */
8772 case UNEQ: /* Fall through. */
8773 case LTGT: /* Fall through. */
8774 default: abort ();
8777 case CC_SWPmode:
8778 switch (comp_code)
8780 case NE: return ARM_NE;
8781 case EQ: return ARM_EQ;
8782 case GE: return ARM_LE;
8783 case GT: return ARM_LT;
8784 case LE: return ARM_GE;
8785 case LT: return ARM_GT;
8786 case GEU: return ARM_LS;
8787 case GTU: return ARM_CC;
8788 case LEU: return ARM_CS;
8789 case LTU: return ARM_HI;
8790 default: abort ();
8793 case CC_Cmode:
8794 switch (comp_code)
8796 case LTU: return ARM_CS;
8797 case GEU: return ARM_CC;
8798 default: abort ();
8801 case CCmode:
8802 switch (comp_code)
8804 case NE: return ARM_NE;
8805 case EQ: return ARM_EQ;
8806 case GE: return ARM_GE;
8807 case GT: return ARM_GT;
8808 case LE: return ARM_LE;
8809 case LT: return ARM_LT;
8810 case GEU: return ARM_CS;
8811 case GTU: return ARM_HI;
8812 case LEU: return ARM_LS;
8813 case LTU: return ARM_CC;
8814 default: abort ();
8817 default: abort ();
8820 abort ();
8824 void
8825 arm_final_prescan_insn (insn)
8826 rtx insn;
8828 /* BODY will hold the body of INSN. */
8829 rtx body = PATTERN (insn);
8831 /* This will be 1 if trying to repeat the trick, and things need to be
8832 reversed if it appears to fail. */
8833 int reverse = 0;
8835 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8836 taken are clobbered, even if the rtl suggests otherwise. It also
8837 means that we have to grub around within the jump expression to find
8838 out what the conditions are when the jump isn't taken. */
8839 int jump_clobbers = 0;
8841 /* If we start with a return insn, we only succeed if we find another one. */
8842 int seeking_return = 0;
8844 /* START_INSN will hold the insn from where we start looking. This is the
8845 first insn after the following code_label if REVERSE is true. */
8846 rtx start_insn = insn;
8848 /* If in state 4, check if the target branch is reached, in order to
8849 change back to state 0. */
8850 if (arm_ccfsm_state == 4)
8852 if (insn == arm_target_insn)
8854 arm_target_insn = NULL;
8855 arm_ccfsm_state = 0;
8857 return;
8860 /* If in state 3, it is possible to repeat the trick, if this insn is an
8861 unconditional branch to a label, and immediately following this branch
8862 is the previous target label which is only used once, and the label this
8863 branch jumps to is not too far off. */
8864 if (arm_ccfsm_state == 3)
8866 if (simplejump_p (insn))
8868 start_insn = next_nonnote_insn (start_insn);
8869 if (GET_CODE (start_insn) == BARRIER)
8871 /* XXX Isn't this always a barrier? */
8872 start_insn = next_nonnote_insn (start_insn);
8874 if (GET_CODE (start_insn) == CODE_LABEL
8875 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8876 && LABEL_NUSES (start_insn) == 1)
8877 reverse = TRUE;
8878 else
8879 return;
8881 else if (GET_CODE (body) == RETURN)
8883 start_insn = next_nonnote_insn (start_insn);
8884 if (GET_CODE (start_insn) == BARRIER)
8885 start_insn = next_nonnote_insn (start_insn);
8886 if (GET_CODE (start_insn) == CODE_LABEL
8887 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8888 && LABEL_NUSES (start_insn) == 1)
8890 reverse = TRUE;
8891 seeking_return = 1;
8893 else
8894 return;
8896 else
8897 return;
8900 if (arm_ccfsm_state != 0 && !reverse)
8901 abort ();
8902 if (GET_CODE (insn) != JUMP_INSN)
8903 return;
8905 /* This jump might be paralleled with a clobber of the condition codes
8906 the jump should always come first */
8907 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8908 body = XVECEXP (body, 0, 0);
8910 #if 0
8911 /* If this is a conditional return then we don't want to know */
8912 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8913 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8914 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8915 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8916 return;
8917 #endif
8919 if (reverse
8920 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8921 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8923 int insns_skipped;
8924 int fail = FALSE, succeed = FALSE;
8925 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8926 int then_not_else = TRUE;
8927 rtx this_insn = start_insn, label = 0;
8929 /* If the jump cannot be done with one instruction, we cannot
8930 conditionally execute the instruction in the inverse case. */
8931 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8933 jump_clobbers = 1;
8934 return;
8937 /* Register the insn jumped to. */
8938 if (reverse)
8940 if (!seeking_return)
8941 label = XEXP (SET_SRC (body), 0);
8943 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8944 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8945 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8947 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8948 then_not_else = FALSE;
8950 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8951 seeking_return = 1;
8952 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8954 seeking_return = 1;
8955 then_not_else = FALSE;
8957 else
8958 abort ();
8960 /* See how many insns this branch skips, and what kind of insns. If all
8961 insns are okay, and the label or unconditional branch to the same
8962 label is not too far away, succeed. */
8963 for (insns_skipped = 0;
8964 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8966 rtx scanbody;
8968 this_insn = next_nonnote_insn (this_insn);
8969 if (!this_insn)
8970 break;
8972 switch (GET_CODE (this_insn))
8974 case CODE_LABEL:
8975 /* Succeed if it is the target label, otherwise fail since
8976 control falls in from somewhere else. */
8977 if (this_insn == label)
8979 if (jump_clobbers)
8981 arm_ccfsm_state = 2;
8982 this_insn = next_nonnote_insn (this_insn);
8984 else
8985 arm_ccfsm_state = 1;
8986 succeed = TRUE;
8988 else
8989 fail = TRUE;
8990 break;
8992 case BARRIER:
8993 /* Succeed if the following insn is the target label.
8994 Otherwise fail.
8995 If return insns are used then the last insn in a function
8996 will be a barrier. */
8997 this_insn = next_nonnote_insn (this_insn);
8998 if (this_insn && this_insn == label)
9000 if (jump_clobbers)
9002 arm_ccfsm_state = 2;
9003 this_insn = next_nonnote_insn (this_insn);
9005 else
9006 arm_ccfsm_state = 1;
9007 succeed = TRUE;
9009 else
9010 fail = TRUE;
9011 break;
9013 case CALL_INSN:
9014 /* If using 32-bit addresses the cc is not preserved over
9015 calls. */
9016 if (TARGET_APCS_32)
9018 /* Succeed if the following insn is the target label,
9019 or if the following two insns are a barrier and
9020 the target label. */
9021 this_insn = next_nonnote_insn (this_insn);
9022 if (this_insn && GET_CODE (this_insn) == BARRIER)
9023 this_insn = next_nonnote_insn (this_insn);
9025 if (this_insn && this_insn == label
9026 && insns_skipped < max_insns_skipped)
9028 if (jump_clobbers)
9030 arm_ccfsm_state = 2;
9031 this_insn = next_nonnote_insn (this_insn);
9033 else
9034 arm_ccfsm_state = 1;
9035 succeed = TRUE;
9037 else
9038 fail = TRUE;
9040 break;
9042 case JUMP_INSN:
9043 /* If this is an unconditional branch to the same label, succeed.
9044 If it is to another label, do nothing. If it is conditional,
9045 fail. */
9046 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9048 scanbody = PATTERN (this_insn);
9049 if (GET_CODE (scanbody) == SET
9050 && GET_CODE (SET_DEST (scanbody)) == PC)
9052 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
9053 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
9055 arm_ccfsm_state = 2;
9056 succeed = TRUE;
9058 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
9059 fail = TRUE;
9061 /* Fail if a conditional return is undesirable (eg on a
9062 StrongARM), but still allow this if optimizing for size. */
9063 else if (GET_CODE (scanbody) == RETURN
9064 && !use_return_insn (TRUE)
9065 && !optimize_size)
9066 fail = TRUE;
9067 else if (GET_CODE (scanbody) == RETURN
9068 && seeking_return)
9070 arm_ccfsm_state = 2;
9071 succeed = TRUE;
9073 else if (GET_CODE (scanbody) == PARALLEL)
9075 switch (get_attr_conds (this_insn))
9077 case CONDS_NOCOND:
9078 break;
9079 default:
9080 fail = TRUE;
9081 break;
9084 else
9085 fail = TRUE; /* Unrecognized jump (eg epilogue). */
9087 break;
9089 case INSN:
9090 /* Instructions using or affecting the condition codes make it
9091 fail. */
9092 scanbody = PATTERN (this_insn);
9093 if (!(GET_CODE (scanbody) == SET
9094 || GET_CODE (scanbody) == PARALLEL)
9095 || get_attr_conds (this_insn) != CONDS_NOCOND)
9096 fail = TRUE;
9097 break;
9099 default:
9100 break;
9103 if (succeed)
9105 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
9106 arm_target_label = CODE_LABEL_NUMBER (label);
9107 else if (seeking_return || arm_ccfsm_state == 2)
9109 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
9111 this_insn = next_nonnote_insn (this_insn);
9112 if (this_insn && (GET_CODE (this_insn) == BARRIER
9113 || GET_CODE (this_insn) == CODE_LABEL))
9114 abort ();
9116 if (!this_insn)
9118 /* Oh, dear! we ran off the end.. give up */
9119 recog (PATTERN (insn), insn, NULL);
9120 arm_ccfsm_state = 0;
9121 arm_target_insn = NULL;
9122 return;
9124 arm_target_insn = this_insn;
9126 else
9127 abort ();
9128 if (jump_clobbers)
9130 if (reverse)
9131 abort ();
9132 arm_current_cc =
9133 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
9134 0), 0), 1));
9135 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
9136 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9137 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
9138 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9140 else
9142 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9143 what it was. */
9144 if (!reverse)
9145 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
9146 0));
9149 if (reverse || then_not_else)
9150 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
9153 /* Restore recog_data (getting the attributes of other insns can
9154 destroy this array, but final.c assumes that it remains intact
9155 across this call; since the insn has been recognized already we
9156 call recog direct). */
9157 recog (PATTERN (insn), insn, NULL);
9161 /* Returns true if REGNO is a valid register
9162 for holding a quantity of tyoe MODE. */
9165 arm_hard_regno_mode_ok (regno, mode)
9166 unsigned int regno;
9167 enum machine_mode mode;
9169 if (GET_MODE_CLASS (mode) == MODE_CC)
9170 return regno == CC_REGNUM;
9172 if (TARGET_THUMB)
9173 /* For the Thumb we only allow values bigger than SImode in
9174 registers 0 - 6, so that there is always a second low
9175 register available to hold the upper part of the value.
9176 We probably we ought to ensure that the register is the
9177 start of an even numbered register pair. */
9178 return (NUM_REGS (mode) < 2) || (regno < LAST_LO_REGNUM);
9180 if (regno <= LAST_ARM_REGNUM)
9181 /* We allow any value to be stored in the general regisetrs. */
9182 return 1;
9184 if ( regno == FRAME_POINTER_REGNUM
9185 || regno == ARG_POINTER_REGNUM)
9186 /* We only allow integers in the fake hard registers. */
9187 return GET_MODE_CLASS (mode) == MODE_INT;
9189 /* The only registers left are the FPU registers
9190 which we only allow to hold FP values. */
9191 return GET_MODE_CLASS (mode) == MODE_FLOAT
9192 && regno >= FIRST_ARM_FP_REGNUM
9193 && regno <= LAST_ARM_FP_REGNUM;
9197 arm_regno_class (regno)
9198 int regno;
9200 if (TARGET_THUMB)
9202 if (regno == STACK_POINTER_REGNUM)
9203 return STACK_REG;
9204 if (regno == CC_REGNUM)
9205 return CC_REG;
9206 if (regno < 8)
9207 return LO_REGS;
9208 return HI_REGS;
9211 if ( regno <= LAST_ARM_REGNUM
9212 || regno == FRAME_POINTER_REGNUM
9213 || regno == ARG_POINTER_REGNUM)
9214 return GENERAL_REGS;
9216 if (regno == CC_REGNUM)
9217 return NO_REGS;
9219 return FPU_REGS;
9222 /* Handle a special case when computing the offset
9223 of an argument from the frame pointer. */
9226 arm_debugger_arg_offset (value, addr)
9227 int value;
9228 rtx addr;
9230 rtx insn;
9232 /* We are only interested if dbxout_parms() failed to compute the offset. */
9233 if (value != 0)
9234 return 0;
9236 /* We can only cope with the case where the address is held in a register. */
9237 if (GET_CODE (addr) != REG)
9238 return 0;
9240 /* If we are using the frame pointer to point at the argument, then
9241 an offset of 0 is correct. */
9242 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
9243 return 0;
9245 /* If we are using the stack pointer to point at the
9246 argument, then an offset of 0 is correct. */
9247 if ((TARGET_THUMB || !frame_pointer_needed)
9248 && REGNO (addr) == SP_REGNUM)
9249 return 0;
9251 /* Oh dear. The argument is pointed to by a register rather
9252 than being held in a register, or being stored at a known
9253 offset from the frame pointer. Since GDB only understands
9254 those two kinds of argument we must translate the address
9255 held in the register into an offset from the frame pointer.
9256 We do this by searching through the insns for the function
9257 looking to see where this register gets its value. If the
9258 register is initialised from the frame pointer plus an offset
9259 then we are in luck and we can continue, otherwise we give up.
9261 This code is exercised by producing debugging information
9262 for a function with arguments like this:
9264 double func (double a, double b, int c, double d) {return d;}
9266 Without this code the stab for parameter 'd' will be set to
9267 an offset of 0 from the frame pointer, rather than 8. */
9269 /* The if() statement says:
9271 If the insn is a normal instruction
9272 and if the insn is setting the value in a register
9273 and if the register being set is the register holding the address of the argument
9274 and if the address is computing by an addition
9275 that involves adding to a register
9276 which is the frame pointer
9277 a constant integer
9279 then... */
9281 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9283 if ( GET_CODE (insn) == INSN
9284 && GET_CODE (PATTERN (insn)) == SET
9285 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
9286 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
9287 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
9288 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9289 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
9292 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
9294 break;
9298 if (value == 0)
9300 debug_rtx (addr);
9301 warning ("unable to compute real location of stacked parameter");
9302 value = 8; /* XXX magic hack */
9305 return value;
9308 #define def_builtin(NAME, TYPE, CODE) \
9309 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9311 void
9312 arm_init_builtins ()
9314 tree endlink = void_list_node;
9315 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
9316 tree pchar_type_node = build_pointer_type (char_type_node);
9318 tree int_ftype_int, void_ftype_pchar;
9320 /* void func (void *) */
9321 void_ftype_pchar
9322 = build_function_type (void_type_node,
9323 tree_cons (NULL_TREE, pchar_type_node, endlink));
9325 /* int func (int) */
9326 int_ftype_int
9327 = build_function_type (integer_type_node, int_endlink);
9329 /* Initialize arm V5 builtins. */
9330 if (arm_arch5)
9331 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
9334 /* Expand an expression EXP that calls a built-in function,
9335 with result going to TARGET if that's convenient
9336 (and in mode MODE if that's convenient).
9337 SUBTARGET may be used as the target for computing one of EXP's operands.
9338 IGNORE is nonzero if the value is to be ignored. */
9341 arm_expand_builtin (exp, target, subtarget, mode, ignore)
9342 tree exp;
9343 rtx target;
9344 rtx subtarget ATTRIBUTE_UNUSED;
9345 enum machine_mode mode ATTRIBUTE_UNUSED;
9346 int ignore ATTRIBUTE_UNUSED;
9348 enum insn_code icode;
9349 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9350 tree arglist = TREE_OPERAND (exp, 1);
9351 tree arg0;
9352 rtx op0, pat;
9353 enum machine_mode tmode, mode0;
9354 int fcode = DECL_FUNCTION_CODE (fndecl);
9356 switch (fcode)
9358 default:
9359 break;
9361 case ARM_BUILTIN_CLZ:
9362 icode = CODE_FOR_clz;
9363 arg0 = TREE_VALUE (arglist);
9364 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
9365 tmode = insn_data[icode].operand[0].mode;
9366 mode0 = insn_data[icode].operand[1].mode;
9368 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
9369 op0 = copy_to_mode_reg (mode0, op0);
9370 if (target == 0
9371 || GET_MODE (target) != tmode
9372 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
9373 target = gen_reg_rtx (tmode);
9374 pat = GEN_FCN (icode) (target, op0);
9375 if (! pat)
9376 return 0;
9377 emit_insn (pat);
9378 return target;
9381 /* @@@ Should really do something sensible here. */
9382 return NULL_RTX;
9385 /* Recursively search through all of the blocks in a function
9386 checking to see if any of the variables created in that
9387 function match the RTX called 'orig'. If they do then
9388 replace them with the RTX called 'new'. */
9390 static void
9391 replace_symbols_in_block (block, orig, new)
9392 tree block;
9393 rtx orig;
9394 rtx new;
9396 for (; block; block = BLOCK_CHAIN (block))
9398 tree sym;
9400 if (!TREE_USED (block))
9401 continue;
9403 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
9405 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
9406 || DECL_IGNORED_P (sym)
9407 || TREE_CODE (sym) != VAR_DECL
9408 || DECL_EXTERNAL (sym)
9409 || !rtx_equal_p (DECL_RTL (sym), orig)
9411 continue;
9413 SET_DECL_RTL (sym, new);
9416 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
9420 /* Return the number (counting from 0) of
9421 the least significant set bit in MASK. */
9423 #ifdef __GNUC__
9424 inline
9425 #endif
9426 static int
9427 number_of_first_bit_set (mask)
9428 int mask;
9430 int bit;
9432 for (bit = 0;
9433 (mask & (1 << bit)) == 0;
9434 ++bit)
9435 continue;
9437 return bit;
9440 /* Generate code to return from a thumb function.
9441 If 'reg_containing_return_addr' is -1, then the return address is
9442 actually on the stack, at the stack pointer. */
9443 static void
9444 thumb_exit (f, reg_containing_return_addr, eh_ofs)
9445 FILE * f;
9446 int reg_containing_return_addr;
9447 rtx eh_ofs;
9449 unsigned regs_available_for_popping;
9450 unsigned regs_to_pop;
9451 int pops_needed;
9452 unsigned available;
9453 unsigned required;
9454 int mode;
9455 int size;
9456 int restore_a4 = FALSE;
9458 /* Compute the registers we need to pop. */
9459 regs_to_pop = 0;
9460 pops_needed = 0;
9462 /* There is an assumption here, that if eh_ofs is not NULL, the
9463 normal return address will have been pushed. */
9464 if (reg_containing_return_addr == -1 || eh_ofs)
9466 /* When we are generating a return for __builtin_eh_return,
9467 reg_containing_return_addr must specify the return regno. */
9468 if (eh_ofs && reg_containing_return_addr == -1)
9469 abort ();
9471 regs_to_pop |= 1 << LR_REGNUM;
9472 ++pops_needed;
9475 if (TARGET_BACKTRACE)
9477 /* Restore the (ARM) frame pointer and stack pointer. */
9478 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
9479 pops_needed += 2;
9482 /* If there is nothing to pop then just emit the BX instruction and
9483 return. */
9484 if (pops_needed == 0)
9486 if (eh_ofs)
9487 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9489 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9490 return;
9492 /* Otherwise if we are not supporting interworking and we have not created
9493 a backtrace structure and the function was not entered in ARM mode then
9494 just pop the return address straight into the PC. */
9495 else if (!TARGET_INTERWORK
9496 && !TARGET_BACKTRACE
9497 && !is_called_in_ARM_mode (current_function_decl))
9499 if (eh_ofs)
9501 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
9502 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9503 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9505 else
9506 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
9508 return;
9511 /* Find out how many of the (return) argument registers we can corrupt. */
9512 regs_available_for_popping = 0;
9514 /* If returning via __builtin_eh_return, the bottom three registers
9515 all contain information needed for the return. */
9516 if (eh_ofs)
9517 size = 12;
9518 else
9520 #ifdef RTX_CODE
9521 /* If we can deduce the registers used from the function's
9522 return value. This is more reliable that examining
9523 regs_ever_live[] because that will be set if the register is
9524 ever used in the function, not just if the register is used
9525 to hold a return value. */
9527 if (current_function_return_rtx != 0)
9528 mode = GET_MODE (current_function_return_rtx);
9529 else
9530 #endif
9531 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9533 size = GET_MODE_SIZE (mode);
9535 if (size == 0)
9537 /* In a void function we can use any argument register.
9538 In a function that returns a structure on the stack
9539 we can use the second and third argument registers. */
9540 if (mode == VOIDmode)
9541 regs_available_for_popping =
9542 (1 << ARG_REGISTER (1))
9543 | (1 << ARG_REGISTER (2))
9544 | (1 << ARG_REGISTER (3));
9545 else
9546 regs_available_for_popping =
9547 (1 << ARG_REGISTER (2))
9548 | (1 << ARG_REGISTER (3));
9550 else if (size <= 4)
9551 regs_available_for_popping =
9552 (1 << ARG_REGISTER (2))
9553 | (1 << ARG_REGISTER (3));
9554 else if (size <= 8)
9555 regs_available_for_popping =
9556 (1 << ARG_REGISTER (3));
9559 /* Match registers to be popped with registers into which we pop them. */
9560 for (available = regs_available_for_popping,
9561 required = regs_to_pop;
9562 required != 0 && available != 0;
9563 available &= ~(available & - available),
9564 required &= ~(required & - required))
9565 -- pops_needed;
9567 /* If we have any popping registers left over, remove them. */
9568 if (available > 0)
9569 regs_available_for_popping &= ~available;
9571 /* Otherwise if we need another popping register we can use
9572 the fourth argument register. */
9573 else if (pops_needed)
9575 /* If we have not found any free argument registers and
9576 reg a4 contains the return address, we must move it. */
9577 if (regs_available_for_popping == 0
9578 && reg_containing_return_addr == LAST_ARG_REGNUM)
9580 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9581 reg_containing_return_addr = LR_REGNUM;
9583 else if (size > 12)
9585 /* Register a4 is being used to hold part of the return value,
9586 but we have dire need of a free, low register. */
9587 restore_a4 = TRUE;
9589 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
9592 if (reg_containing_return_addr != LAST_ARG_REGNUM)
9594 /* The fourth argument register is available. */
9595 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
9597 --pops_needed;
9601 /* Pop as many registers as we can. */
9602 thumb_pushpop (f, regs_available_for_popping, FALSE);
9604 /* Process the registers we popped. */
9605 if (reg_containing_return_addr == -1)
9607 /* The return address was popped into the lowest numbered register. */
9608 regs_to_pop &= ~(1 << LR_REGNUM);
9610 reg_containing_return_addr =
9611 number_of_first_bit_set (regs_available_for_popping);
9613 /* Remove this register for the mask of available registers, so that
9614 the return address will not be corrupted by futher pops. */
9615 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
9618 /* If we popped other registers then handle them here. */
9619 if (regs_available_for_popping)
9621 int frame_pointer;
9623 /* Work out which register currently contains the frame pointer. */
9624 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
9626 /* Move it into the correct place. */
9627 asm_fprintf (f, "\tmov\t%r, %r\n",
9628 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
9630 /* (Temporarily) remove it from the mask of popped registers. */
9631 regs_available_for_popping &= ~(1 << frame_pointer);
9632 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
9634 if (regs_available_for_popping)
9636 int stack_pointer;
9638 /* We popped the stack pointer as well,
9639 find the register that contains it. */
9640 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
9642 /* Move it into the stack register. */
9643 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
9645 /* At this point we have popped all necessary registers, so
9646 do not worry about restoring regs_available_for_popping
9647 to its correct value:
9649 assert (pops_needed == 0)
9650 assert (regs_available_for_popping == (1 << frame_pointer))
9651 assert (regs_to_pop == (1 << STACK_POINTER)) */
9653 else
9655 /* Since we have just move the popped value into the frame
9656 pointer, the popping register is available for reuse, and
9657 we know that we still have the stack pointer left to pop. */
9658 regs_available_for_popping |= (1 << frame_pointer);
9662 /* If we still have registers left on the stack, but we no longer have
9663 any registers into which we can pop them, then we must move the return
9664 address into the link register and make available the register that
9665 contained it. */
9666 if (regs_available_for_popping == 0 && pops_needed > 0)
9668 regs_available_for_popping |= 1 << reg_containing_return_addr;
9670 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
9671 reg_containing_return_addr);
9673 reg_containing_return_addr = LR_REGNUM;
9676 /* If we have registers left on the stack then pop some more.
9677 We know that at most we will want to pop FP and SP. */
9678 if (pops_needed > 0)
9680 int popped_into;
9681 int move_to;
9683 thumb_pushpop (f, regs_available_for_popping, FALSE);
9685 /* We have popped either FP or SP.
9686 Move whichever one it is into the correct register. */
9687 popped_into = number_of_first_bit_set (regs_available_for_popping);
9688 move_to = number_of_first_bit_set (regs_to_pop);
9690 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
9692 regs_to_pop &= ~(1 << move_to);
9694 --pops_needed;
9697 /* If we still have not popped everything then we must have only
9698 had one register available to us and we are now popping the SP. */
9699 if (pops_needed > 0)
9701 int popped_into;
9703 thumb_pushpop (f, regs_available_for_popping, FALSE);
9705 popped_into = number_of_first_bit_set (regs_available_for_popping);
9707 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
9709 assert (regs_to_pop == (1 << STACK_POINTER))
9710 assert (pops_needed == 1)
9714 /* If necessary restore the a4 register. */
9715 if (restore_a4)
9717 if (reg_containing_return_addr != LR_REGNUM)
9719 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
9720 reg_containing_return_addr = LR_REGNUM;
9723 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9726 if (eh_ofs)
9727 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
9729 /* Return to caller. */
9730 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
9733 /* Emit code to push or pop registers to or from the stack. */
9735 static void
9736 thumb_pushpop (f, mask, push)
9737 FILE * f;
9738 int mask;
9739 int push;
9741 int regno;
9742 int lo_mask = mask & 0xFF;
9744 if (lo_mask == 0 && !push && (mask & (1 << 15)))
9746 /* Special case. Do not generate a POP PC statement here, do it in
9747 thumb_exit() */
9748 thumb_exit (f, -1, NULL_RTX);
9749 return;
9752 fprintf (f, "\t%s\t{", push ? "push" : "pop");
9754 /* Look at the low registers first. */
9755 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
9757 if (lo_mask & 1)
9759 asm_fprintf (f, "%r", regno);
9761 if ((lo_mask & ~1) != 0)
9762 fprintf (f, ", ");
9766 if (push && (mask & (1 << LR_REGNUM)))
9768 /* Catch pushing the LR. */
9769 if (mask & 0xFF)
9770 fprintf (f, ", ");
9772 asm_fprintf (f, "%r", LR_REGNUM);
9774 else if (!push && (mask & (1 << PC_REGNUM)))
9776 /* Catch popping the PC. */
9777 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9779 /* The PC is never poped directly, instead
9780 it is popped into r3 and then BX is used. */
9781 fprintf (f, "}\n");
9783 thumb_exit (f, -1, NULL_RTX);
9785 return;
9787 else
9789 if (mask & 0xFF)
9790 fprintf (f, ", ");
9792 asm_fprintf (f, "%r", PC_REGNUM);
9796 fprintf (f, "}\n");
9799 void
9800 thumb_final_prescan_insn (insn)
9801 rtx insn;
9803 if (flag_print_asm_name)
9804 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9805 INSN_ADDRESSES (INSN_UID (insn)));
9809 thumb_shiftable_const (val)
9810 unsigned HOST_WIDE_INT val;
9812 unsigned HOST_WIDE_INT mask = 0xff;
9813 int i;
9815 if (val == 0) /* XXX */
9816 return 0;
9818 for (i = 0; i < 25; i++)
9819 if ((val & (mask << i)) == val)
9820 return 1;
9822 return 0;
9825 /* Returns non-zero if the current function contains,
9826 or might contain a far jump. */
9829 thumb_far_jump_used_p (int in_prologue)
9831 rtx insn;
9833 /* This test is only important for leaf functions. */
9834 /* assert (!leaf_function_p ()); */
9836 /* If we have already decided that far jumps may be used,
9837 do not bother checking again, and always return true even if
9838 it turns out that they are not being used. Once we have made
9839 the decision that far jumps are present (and that hence the link
9840 register will be pushed onto the stack) we cannot go back on it. */
9841 if (cfun->machine->far_jump_used)
9842 return 1;
9844 /* If this function is not being called from the prologue/epilogue
9845 generation code then it must be being called from the
9846 INITIAL_ELIMINATION_OFFSET macro. */
9847 if (!in_prologue)
9849 /* In this case we know that we are being asked about the elimination
9850 of the arg pointer register. If that register is not being used,
9851 then there are no arguments on the stack, and we do not have to
9852 worry that a far jump might force the prologue to push the link
9853 register, changing the stack offsets. In this case we can just
9854 return false, since the presence of far jumps in the function will
9855 not affect stack offsets.
9857 If the arg pointer is live (or if it was live, but has now been
9858 eliminated and so set to dead) then we do have to test to see if
9859 the function might contain a far jump. This test can lead to some
9860 false negatives, since before reload is completed, then length of
9861 branch instructions is not known, so gcc defaults to returning their
9862 longest length, which in turn sets the far jump attribute to true.
9864 A false negative will not result in bad code being generated, but it
9865 will result in a needless push and pop of the link register. We
9866 hope that this does not occur too often. */
9867 if (regs_ever_live [ARG_POINTER_REGNUM])
9868 cfun->machine->arg_pointer_live = 1;
9869 else if (!cfun->machine->arg_pointer_live)
9870 return 0;
9873 /* Check to see if the function contains a branch
9874 insn with the far jump attribute set. */
9875 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9877 if (GET_CODE (insn) == JUMP_INSN
9878 /* Ignore tablejump patterns. */
9879 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9880 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9881 && get_attr_far_jump (insn) == FAR_JUMP_YES
9884 /* Record the fact that we have decied that
9885 the function does use far jumps. */
9886 cfun->machine->far_jump_used = 1;
9887 return 1;
9891 return 0;
9894 /* Return non-zero if FUNC must be entered in ARM mode. */
9897 is_called_in_ARM_mode (func)
9898 tree func;
9900 if (TREE_CODE (func) != FUNCTION_DECL)
9901 abort ();
9903 /* Ignore the problem about functions whoes address is taken. */
9904 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9905 return TRUE;
9907 #ifdef ARM_PE
9908 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func)) != NULL_TREE;
9909 #else
9910 return FALSE;
9911 #endif
9914 /* The bits which aren't usefully expanded as rtl. */
9916 const char *
9917 thumb_unexpanded_epilogue ()
9919 int regno;
9920 int live_regs_mask = 0;
9921 int high_regs_pushed = 0;
9922 int leaf_function = leaf_function_p ();
9923 int had_to_push_lr;
9924 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9926 if (return_used_this_function)
9927 return "";
9929 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9930 if (regs_ever_live[regno] && !call_used_regs[regno]
9931 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9932 live_regs_mask |= 1 << regno;
9934 for (regno = 8; regno < 13; regno++)
9936 if (regs_ever_live[regno] && !call_used_regs[regno]
9937 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9938 high_regs_pushed++;
9941 /* The prolog may have pushed some high registers to use as
9942 work registers. eg the testuite file:
9943 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9944 compiles to produce:
9945 push {r4, r5, r6, r7, lr}
9946 mov r7, r9
9947 mov r6, r8
9948 push {r6, r7}
9949 as part of the prolog. We have to undo that pushing here. */
9951 if (high_regs_pushed)
9953 int mask = live_regs_mask;
9954 int next_hi_reg;
9955 int size;
9956 int mode;
9958 #ifdef RTX_CODE
9959 /* If we can deduce the registers used from the function's return value.
9960 This is more reliable that examining regs_ever_live[] because that
9961 will be set if the register is ever used in the function, not just if
9962 the register is used to hold a return value. */
9964 if (current_function_return_rtx != 0)
9965 mode = GET_MODE (current_function_return_rtx);
9966 else
9967 #endif
9968 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9970 size = GET_MODE_SIZE (mode);
9972 /* Unless we are returning a type of size > 12 register r3 is
9973 available. */
9974 if (size < 13)
9975 mask |= 1 << 3;
9977 if (mask == 0)
9978 /* Oh dear! We have no low registers into which we can pop
9979 high registers! */
9980 internal_error
9981 ("no low registers available for popping high registers");
9983 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9984 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9985 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9986 break;
9988 while (high_regs_pushed)
9990 /* Find lo register(s) into which the high register(s) can
9991 be popped. */
9992 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9994 if (mask & (1 << regno))
9995 high_regs_pushed--;
9996 if (high_regs_pushed == 0)
9997 break;
10000 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
10002 /* Pop the values into the low register(s). */
10003 thumb_pushpop (asm_out_file, mask, 0);
10005 /* Move the value(s) into the high registers. */
10006 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10008 if (mask & (1 << regno))
10010 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
10011 regno);
10013 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
10014 if (regs_ever_live[next_hi_reg]
10015 && !call_used_regs[next_hi_reg]
10016 && !(TARGET_SINGLE_PIC_BASE
10017 && (next_hi_reg == arm_pic_register)))
10018 break;
10024 had_to_push_lr = (live_regs_mask || !leaf_function
10025 || thumb_far_jump_used_p (1));
10027 if (TARGET_BACKTRACE
10028 && ((live_regs_mask & 0xFF) == 0)
10029 && regs_ever_live [LAST_ARG_REGNUM] != 0)
10031 /* The stack backtrace structure creation code had to
10032 push R7 in order to get a work register, so we pop
10033 it now. */
10034 live_regs_mask |= (1 << LAST_LO_REGNUM);
10037 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
10039 if (had_to_push_lr
10040 && !is_called_in_ARM_mode (current_function_decl)
10041 && !eh_ofs)
10042 live_regs_mask |= 1 << PC_REGNUM;
10044 /* Either no argument registers were pushed or a backtrace
10045 structure was created which includes an adjusted stack
10046 pointer, so just pop everything. */
10047 if (live_regs_mask)
10048 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10050 if (eh_ofs)
10051 thumb_exit (asm_out_file, 2, eh_ofs);
10052 /* We have either just popped the return address into the
10053 PC or it is was kept in LR for the entire function or
10054 it is still on the stack because we do not want to
10055 return by doing a pop {pc}. */
10056 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
10057 thumb_exit (asm_out_file,
10058 (had_to_push_lr
10059 && is_called_in_ARM_mode (current_function_decl)) ?
10060 -1 : LR_REGNUM, NULL_RTX);
10062 else
10064 /* Pop everything but the return address. */
10065 live_regs_mask &= ~(1 << PC_REGNUM);
10067 if (live_regs_mask)
10068 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
10070 if (had_to_push_lr)
10071 /* Get the return address into a temporary register. */
10072 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
10074 /* Remove the argument registers that were pushed onto the stack. */
10075 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
10076 SP_REGNUM, SP_REGNUM,
10077 current_function_pretend_args_size);
10079 if (eh_ofs)
10080 thumb_exit (asm_out_file, 2, eh_ofs);
10081 else
10082 thumb_exit (asm_out_file,
10083 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
10086 return "";
10089 /* Functions to save and restore machine-specific function data. */
10091 static void
10092 arm_mark_machine_status (p)
10093 struct function * p;
10095 machine_function *machine = p->machine;
10097 if (machine)
10098 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
10101 static void
10102 arm_init_machine_status (p)
10103 struct function * p;
10105 p->machine =
10106 (machine_function *) xcalloc (1, sizeof (machine_function));
10108 #if ARM_FT_UNKNOWWN != 0
10109 ((machine_function *) p->machine)->func_type = ARM_FT_UNKNOWN;
10110 #endif
10113 static void
10114 arm_free_machine_status (p)
10115 struct function * p;
10117 if (p->machine)
10119 free (p->machine);
10120 p->machine = NULL;
10124 /* Return an RTX indicating where the return address to the
10125 calling function can be found. */
10128 arm_return_addr (count, frame)
10129 int count;
10130 rtx frame ATTRIBUTE_UNUSED;
10132 if (count != 0)
10133 return NULL_RTX;
10135 if (TARGET_APCS_32)
10136 return get_hard_reg_initial_val (Pmode, LR_REGNUM);
10137 else
10139 rtx lr = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
10140 GEN_INT (RETURN_ADDR_MASK26));
10141 return get_func_hard_reg_initial_val (cfun, lr);
10145 /* Do anything needed before RTL is emitted for each function. */
10147 void
10148 arm_init_expanders ()
10150 /* Arrange to initialize and mark the machine per-function status. */
10151 init_machine_status = arm_init_machine_status;
10152 mark_machine_status = arm_mark_machine_status;
10153 free_machine_status = arm_free_machine_status;
10156 /* Generate the rest of a function's prologue. */
10158 void
10159 thumb_expand_prologue ()
10161 HOST_WIDE_INT amount = (get_frame_size ()
10162 + current_function_outgoing_args_size);
10163 unsigned long func_type;
10165 func_type = arm_current_func_type ();
10167 /* Naked functions don't have prologues. */
10168 if (IS_NAKED (func_type))
10169 return;
10171 if (IS_INTERRUPT (func_type))
10173 error ("interrupt Service Routines cannot be coded in Thumb mode");
10174 return;
10177 if (frame_pointer_needed)
10178 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
10180 if (amount)
10182 amount = ROUND_UP (amount);
10184 if (amount < 512)
10185 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10186 GEN_INT (- amount)));
10187 else
10189 int regno;
10190 rtx reg;
10192 /* The stack decrement is too big for an immediate value in a single
10193 insn. In theory we could issue multiple subtracts, but after
10194 three of them it becomes more space efficient to place the full
10195 value in the constant pool and load into a register. (Also the
10196 ARM debugger really likes to see only one stack decrement per
10197 function). So instead we look for a scratch register into which
10198 we can load the decrement, and then we subtract this from the
10199 stack pointer. Unfortunately on the thumb the only available
10200 scratch registers are the argument registers, and we cannot use
10201 these as they may hold arguments to the function. Instead we
10202 attempt to locate a call preserved register which is used by this
10203 function. If we can find one, then we know that it will have
10204 been pushed at the start of the prologue and so we can corrupt
10205 it now. */
10206 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
10207 if (regs_ever_live[regno]
10208 && !call_used_regs[regno] /* Paranoia */
10209 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
10210 && !(frame_pointer_needed
10211 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
10212 break;
10214 if (regno > LAST_LO_REGNUM) /* Very unlikely */
10216 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
10218 /* Choose an arbitary, non-argument low register. */
10219 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
10221 /* Save it by copying it into a high, scratch register. */
10222 emit_insn (gen_movsi (spare, reg));
10223 /* Add a USE to stop propagate_one_insn() from barfing. */
10224 emit_insn (gen_prologue_use (spare));
10226 /* Decrement the stack. */
10227 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10228 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10229 reg));
10231 /* Restore the low register's original value. */
10232 emit_insn (gen_movsi (reg, spare));
10234 /* Emit a USE of the restored scratch register, so that flow
10235 analysis will not consider the restore redundant. The
10236 register won't be used again in this function and isn't
10237 restored by the epilogue. */
10238 emit_insn (gen_prologue_use (reg));
10240 else
10242 reg = gen_rtx (REG, SImode, regno);
10244 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
10245 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10246 reg));
10251 if (current_function_profile || TARGET_NO_SCHED_PRO)
10252 emit_insn (gen_blockage ());
10255 void
10256 thumb_expand_epilogue ()
10258 HOST_WIDE_INT amount = (get_frame_size ()
10259 + current_function_outgoing_args_size);
10261 /* Naked functions don't have prologues. */
10262 if (IS_NAKED (arm_current_func_type ()))
10263 return;
10265 if (frame_pointer_needed)
10266 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
10267 else if (amount)
10269 amount = ROUND_UP (amount);
10271 if (amount < 512)
10272 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
10273 GEN_INT (amount)));
10274 else
10276 /* r3 is always free in the epilogue. */
10277 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
10279 emit_insn (gen_movsi (reg, GEN_INT (amount)));
10280 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
10284 /* Emit a USE (stack_pointer_rtx), so that
10285 the stack adjustment will not be deleted. */
10286 emit_insn (gen_prologue_use (stack_pointer_rtx));
10288 if (current_function_profile || TARGET_NO_SCHED_PRO)
10289 emit_insn (gen_blockage ());
10292 static void
10293 thumb_output_function_prologue (f, size)
10294 FILE * f;
10295 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
10297 int live_regs_mask = 0;
10298 int high_regs_pushed = 0;
10299 int regno;
10301 if (IS_NAKED (arm_current_func_type ()))
10302 return;
10304 if (is_called_in_ARM_mode (current_function_decl))
10306 const char * name;
10308 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
10309 abort ();
10310 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
10311 abort ();
10312 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
10314 /* Generate code sequence to switch us into Thumb mode. */
10315 /* The .code 32 directive has already been emitted by
10316 ASM_DECLARE_FUNCTION_NAME. */
10317 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
10318 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
10320 /* Generate a label, so that the debugger will notice the
10321 change in instruction sets. This label is also used by
10322 the assembler to bypass the ARM code when this function
10323 is called from a Thumb encoded function elsewhere in the
10324 same file. Hence the definition of STUB_NAME here must
10325 agree with the definition in gas/config/tc-arm.c */
10327 #define STUB_NAME ".real_start_of"
10329 asm_fprintf (f, "\t.code\t16\n");
10330 #ifdef ARM_PE
10331 if (arm_dllexport_name_p (name))
10332 name = arm_strip_name_encoding (name);
10333 #endif
10334 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
10335 asm_fprintf (f, "\t.thumb_func\n");
10336 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
10339 if (current_function_pretend_args_size)
10341 if (cfun->machine->uses_anonymous_args)
10343 int num_pushes;
10345 asm_fprintf (f, "\tpush\t{");
10347 num_pushes = NUM_INTS (current_function_pretend_args_size);
10349 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
10350 regno <= LAST_ARG_REGNUM;
10351 regno++)
10352 asm_fprintf (f, "%r%s", regno,
10353 regno == LAST_ARG_REGNUM ? "" : ", ");
10355 asm_fprintf (f, "}\n");
10357 else
10358 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
10359 SP_REGNUM, SP_REGNUM,
10360 current_function_pretend_args_size);
10363 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
10364 if (regs_ever_live[regno] && !call_used_regs[regno]
10365 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10366 live_regs_mask |= 1 << regno;
10368 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
10369 live_regs_mask |= 1 << LR_REGNUM;
10371 if (TARGET_BACKTRACE)
10373 int offset;
10374 int work_register = 0;
10375 int wr;
10377 /* We have been asked to create a stack backtrace structure.
10378 The code looks like this:
10380 0 .align 2
10381 0 func:
10382 0 sub SP, #16 Reserve space for 4 registers.
10383 2 push {R7} Get a work register.
10384 4 add R7, SP, #20 Get the stack pointer before the push.
10385 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10386 8 mov R7, PC Get hold of the start of this code plus 12.
10387 10 str R7, [SP, #16] Store it.
10388 12 mov R7, FP Get hold of the current frame pointer.
10389 14 str R7, [SP, #4] Store it.
10390 16 mov R7, LR Get hold of the current return address.
10391 18 str R7, [SP, #12] Store it.
10392 20 add R7, SP, #16 Point at the start of the backtrace structure.
10393 22 mov FP, R7 Put this value into the frame pointer. */
10395 if ((live_regs_mask & 0xFF) == 0)
10397 /* See if the a4 register is free. */
10399 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
10400 work_register = LAST_ARG_REGNUM;
10401 else /* We must push a register of our own */
10402 live_regs_mask |= (1 << LAST_LO_REGNUM);
10405 if (work_register == 0)
10407 /* Select a register from the list that will be pushed to
10408 use as our work register. */
10409 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
10410 if ((1 << work_register) & live_regs_mask)
10411 break;
10414 asm_fprintf
10415 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10416 SP_REGNUM, SP_REGNUM);
10418 if (live_regs_mask)
10419 thumb_pushpop (f, live_regs_mask, 1);
10421 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
10422 if (wr & live_regs_mask)
10423 offset += 4;
10425 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10426 offset + 16 + current_function_pretend_args_size);
10428 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10429 offset + 4);
10431 /* Make sure that the instruction fetching the PC is in the right place
10432 to calculate "start of backtrace creation code + 12". */
10433 if (live_regs_mask)
10435 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10436 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10437 offset + 12);
10438 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10439 ARM_HARD_FRAME_POINTER_REGNUM);
10440 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10441 offset);
10443 else
10445 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
10446 ARM_HARD_FRAME_POINTER_REGNUM);
10447 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10448 offset);
10449 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
10450 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10451 offset + 12);
10454 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
10455 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
10456 offset + 8);
10457 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
10458 offset + 12);
10459 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10460 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
10462 else if (live_regs_mask)
10463 thumb_pushpop (f, live_regs_mask, 1);
10465 for (regno = 8; regno < 13; regno++)
10467 if (regs_ever_live[regno] && !call_used_regs[regno]
10468 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
10469 high_regs_pushed++;
10472 if (high_regs_pushed)
10474 int pushable_regs = 0;
10475 int mask = live_regs_mask & 0xff;
10476 int next_hi_reg;
10478 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
10480 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
10481 && !(TARGET_SINGLE_PIC_BASE
10482 && (next_hi_reg == arm_pic_register)))
10483 break;
10486 pushable_regs = mask;
10488 if (pushable_regs == 0)
10490 /* Desperation time -- this probably will never happen. */
10491 if (regs_ever_live[LAST_ARG_REGNUM]
10492 || !call_used_regs[LAST_ARG_REGNUM])
10493 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
10494 mask = 1 << LAST_ARG_REGNUM;
10497 while (high_regs_pushed > 0)
10499 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
10501 if (mask & (1 << regno))
10503 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
10505 high_regs_pushed--;
10507 if (high_regs_pushed)
10508 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
10509 next_hi_reg--)
10511 if (regs_ever_live[next_hi_reg]
10512 && !call_used_regs[next_hi_reg]
10513 && !(TARGET_SINGLE_PIC_BASE
10514 && (next_hi_reg == arm_pic_register)))
10515 break;
10517 else
10519 mask &= ~((1 << regno) - 1);
10520 break;
10525 thumb_pushpop (f, mask, 1);
10528 if (pushable_regs == 0
10529 && (regs_ever_live[LAST_ARG_REGNUM]
10530 || !call_used_regs[LAST_ARG_REGNUM]))
10531 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
10535 /* Handle the case of a double word load into a low register from
10536 a computed memory address. The computed address may involve a
10537 register which is overwritten by the load. */
10539 const char *
10540 thumb_load_double_from_address (operands)
10541 rtx *operands;
10543 rtx addr;
10544 rtx base;
10545 rtx offset;
10546 rtx arg1;
10547 rtx arg2;
10549 if (GET_CODE (operands[0]) != REG)
10550 abort ();
10552 if (GET_CODE (operands[1]) != MEM)
10553 abort ();
10555 /* Get the memory address. */
10556 addr = XEXP (operands[1], 0);
10558 /* Work out how the memory address is computed. */
10559 switch (GET_CODE (addr))
10561 case REG:
10562 operands[2] = gen_rtx (MEM, SImode,
10563 plus_constant (XEXP (operands[1], 0), 4));
10565 if (REGNO (operands[0]) == REGNO (addr))
10567 output_asm_insn ("ldr\t%H0, %2", operands);
10568 output_asm_insn ("ldr\t%0, %1", operands);
10570 else
10572 output_asm_insn ("ldr\t%0, %1", operands);
10573 output_asm_insn ("ldr\t%H0, %2", operands);
10575 break;
10577 case CONST:
10578 /* Compute <address> + 4 for the high order load. */
10579 operands[2] = gen_rtx (MEM, SImode,
10580 plus_constant (XEXP (operands[1], 0), 4));
10582 output_asm_insn ("ldr\t%0, %1", operands);
10583 output_asm_insn ("ldr\t%H0, %2", operands);
10584 break;
10586 case PLUS:
10587 arg1 = XEXP (addr, 0);
10588 arg2 = XEXP (addr, 1);
10590 if (CONSTANT_P (arg1))
10591 base = arg2, offset = arg1;
10592 else
10593 base = arg1, offset = arg2;
10595 if (GET_CODE (base) != REG)
10596 abort ();
10598 /* Catch the case of <address> = <reg> + <reg> */
10599 if (GET_CODE (offset) == REG)
10601 int reg_offset = REGNO (offset);
10602 int reg_base = REGNO (base);
10603 int reg_dest = REGNO (operands[0]);
10605 /* Add the base and offset registers together into the
10606 higher destination register. */
10607 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
10608 reg_dest + 1, reg_base, reg_offset);
10610 /* Load the lower destination register from the address in
10611 the higher destination register. */
10612 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
10613 reg_dest, reg_dest + 1);
10615 /* Load the higher destination register from its own address
10616 plus 4. */
10617 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
10618 reg_dest + 1, reg_dest + 1);
10620 else
10622 /* Compute <address> + 4 for the high order load. */
10623 operands[2] = gen_rtx (MEM, SImode,
10624 plus_constant (XEXP (operands[1], 0), 4));
10626 /* If the computed address is held in the low order register
10627 then load the high order register first, otherwise always
10628 load the low order register first. */
10629 if (REGNO (operands[0]) == REGNO (base))
10631 output_asm_insn ("ldr\t%H0, %2", operands);
10632 output_asm_insn ("ldr\t%0, %1", operands);
10634 else
10636 output_asm_insn ("ldr\t%0, %1", operands);
10637 output_asm_insn ("ldr\t%H0, %2", operands);
10640 break;
10642 case LABEL_REF:
10643 /* With no registers to worry about we can just load the value
10644 directly. */
10645 operands[2] = gen_rtx (MEM, SImode,
10646 plus_constant (XEXP (operands[1], 0), 4));
10648 output_asm_insn ("ldr\t%H0, %2", operands);
10649 output_asm_insn ("ldr\t%0, %1", operands);
10650 break;
10652 default:
10653 abort ();
10654 break;
10657 return "";
10661 const char *
10662 thumb_output_move_mem_multiple (n, operands)
10663 int n;
10664 rtx * operands;
10666 rtx tmp;
10668 switch (n)
10670 case 2:
10671 if (REGNO (operands[4]) > REGNO (operands[5]))
10673 tmp = operands[4];
10674 operands[4] = operands[5];
10675 operands[5] = tmp;
10677 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
10678 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
10679 break;
10681 case 3:
10682 if (REGNO (operands[4]) > REGNO (operands[5]))
10684 tmp = operands[4];
10685 operands[4] = operands[5];
10686 operands[5] = tmp;
10688 if (REGNO (operands[5]) > REGNO (operands[6]))
10690 tmp = operands[5];
10691 operands[5] = operands[6];
10692 operands[6] = tmp;
10694 if (REGNO (operands[4]) > REGNO (operands[5]))
10696 tmp = operands[4];
10697 operands[4] = operands[5];
10698 operands[5] = tmp;
10701 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
10702 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
10703 break;
10705 default:
10706 abort ();
10709 return "";
10712 /* Routines for generating rtl. */
10714 void
10715 thumb_expand_movstrqi (operands)
10716 rtx * operands;
10718 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
10719 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
10720 HOST_WIDE_INT len = INTVAL (operands[2]);
10721 HOST_WIDE_INT offset = 0;
10723 while (len >= 12)
10725 emit_insn (gen_movmem12b (out, in, out, in));
10726 len -= 12;
10729 if (len >= 8)
10731 emit_insn (gen_movmem8b (out, in, out, in));
10732 len -= 8;
10735 if (len >= 4)
10737 rtx reg = gen_reg_rtx (SImode);
10738 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
10739 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
10740 len -= 4;
10741 offset += 4;
10744 if (len >= 2)
10746 rtx reg = gen_reg_rtx (HImode);
10747 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
10748 plus_constant (in, offset))));
10749 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
10750 reg));
10751 len -= 2;
10752 offset += 2;
10755 if (len)
10757 rtx reg = gen_reg_rtx (QImode);
10758 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
10759 plus_constant (in, offset))));
10760 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
10761 reg));
10766 thumb_cmp_operand (op, mode)
10767 rtx op;
10768 enum machine_mode mode;
10770 return ((GET_CODE (op) == CONST_INT
10771 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
10772 || register_operand (op, mode));
10775 static const char *
10776 thumb_condition_code (x, invert)
10777 rtx x;
10778 int invert;
10780 static const char * const conds[] =
10782 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10783 "hi", "ls", "ge", "lt", "gt", "le"
10785 int val;
10787 switch (GET_CODE (x))
10789 case EQ: val = 0; break;
10790 case NE: val = 1; break;
10791 case GEU: val = 2; break;
10792 case LTU: val = 3; break;
10793 case GTU: val = 8; break;
10794 case LEU: val = 9; break;
10795 case GE: val = 10; break;
10796 case LT: val = 11; break;
10797 case GT: val = 12; break;
10798 case LE: val = 13; break;
10799 default:
10800 abort ();
10803 return conds[val ^ invert];
10806 /* Handle storing a half-word to memory during reload. */
10808 void
10809 thumb_reload_out_hi (operands)
10810 rtx * operands;
10812 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10815 /* Handle storing a half-word to memory during reload. */
10817 void
10818 thumb_reload_in_hi (operands)
10819 rtx * operands ATTRIBUTE_UNUSED;
10821 abort ();
10824 /* Return the length of a function name prefix
10825 that starts with the character 'c'. */
10827 static int
10828 arm_get_strip_length (char c)
10830 switch (c)
10832 ARM_NAME_ENCODING_LENGTHS
10833 default: return 0;
10837 /* Return a pointer to a function's name with any
10838 and all prefix encodings stripped from it. */
10840 const char *
10841 arm_strip_name_encoding (const char * name)
10843 int skip;
10845 while ((skip = arm_get_strip_length (* name)))
10846 name += skip;
10848 return name;
10851 #ifdef AOF_ASSEMBLER
10852 /* Special functions only needed when producing AOF syntax assembler. */
10854 rtx aof_pic_label = NULL_RTX;
10855 struct pic_chain
10857 struct pic_chain * next;
10858 const char * symname;
10861 static struct pic_chain * aof_pic_chain = NULL;
10864 aof_pic_entry (x)
10865 rtx x;
10867 struct pic_chain ** chainp;
10868 int offset;
10870 if (aof_pic_label == NULL_RTX)
10872 /* We mark this here and not in arm_add_gc_roots() to avoid
10873 polluting even more code with ifdefs, and because it never
10874 contains anything useful until we assign to it here. */
10875 ggc_add_rtx_root (&aof_pic_label, 1);
10876 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10879 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10880 offset += 4, chainp = &(*chainp)->next)
10881 if ((*chainp)->symname == XSTR (x, 0))
10882 return plus_constant (aof_pic_label, offset);
10884 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10885 (*chainp)->next = NULL;
10886 (*chainp)->symname = XSTR (x, 0);
10887 return plus_constant (aof_pic_label, offset);
10890 void
10891 aof_dump_pic_table (f)
10892 FILE * f;
10894 struct pic_chain * chain;
10896 if (aof_pic_chain == NULL)
10897 return;
10899 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10900 PIC_OFFSET_TABLE_REGNUM,
10901 PIC_OFFSET_TABLE_REGNUM);
10902 fputs ("|x$adcons|\n", f);
10904 for (chain = aof_pic_chain; chain; chain = chain->next)
10906 fputs ("\tDCD\t", f);
10907 assemble_name (f, chain->symname);
10908 fputs ("\n", f);
10912 int arm_text_section_count = 1;
10914 char *
10915 aof_text_section ()
10917 static char buf[100];
10918 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10919 arm_text_section_count++);
10920 if (flag_pic)
10921 strcat (buf, ", PIC, REENTRANT");
10922 return buf;
10925 static int arm_data_section_count = 1;
10927 char *
10928 aof_data_section ()
10930 static char buf[100];
10931 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10932 return buf;
10935 /* The AOF assembler is religiously strict about declarations of
10936 imported and exported symbols, so that it is impossible to declare
10937 a function as imported near the beginning of the file, and then to
10938 export it later on. It is, however, possible to delay the decision
10939 until all the functions in the file have been compiled. To get
10940 around this, we maintain a list of the imports and exports, and
10941 delete from it any that are subsequently defined. At the end of
10942 compilation we spit the remainder of the list out before the END
10943 directive. */
10945 struct import
10947 struct import * next;
10948 const char * name;
10951 static struct import * imports_list = NULL;
10953 void
10954 aof_add_import (name)
10955 const char * name;
10957 struct import * new;
10959 for (new = imports_list; new; new = new->next)
10960 if (new->name == name)
10961 return;
10963 new = (struct import *) xmalloc (sizeof (struct import));
10964 new->next = imports_list;
10965 imports_list = new;
10966 new->name = name;
10969 void
10970 aof_delete_import (name)
10971 const char * name;
10973 struct import ** old;
10975 for (old = &imports_list; *old; old = & (*old)->next)
10977 if ((*old)->name == name)
10979 *old = (*old)->next;
10980 return;
10985 int arm_main_function = 0;
10987 void
10988 aof_dump_imports (f)
10989 FILE * f;
10991 /* The AOF assembler needs this to cause the startup code to be extracted
10992 from the library. Brining in __main causes the whole thing to work
10993 automagically. */
10994 if (arm_main_function)
10996 text_section ();
10997 fputs ("\tIMPORT __main\n", f);
10998 fputs ("\tDCD __main\n", f);
11001 /* Now dump the remaining imports. */
11002 while (imports_list)
11004 fprintf (f, "\tIMPORT\t");
11005 assemble_name (f, imports_list->name);
11006 fputc ('\n', f);
11007 imports_list = imports_list->next;
11010 #endif /* AOF_ASSEMBLER */
11012 #ifdef OBJECT_FORMAT_ELF
11013 /* Switch to an arbitrary section NAME with attributes as specified
11014 by FLAGS. ALIGN specifies any known alignment requirements for
11015 the section; 0 if the default should be used.
11017 Differs from the default elf version only in the prefix character
11018 used before the section type. */
11020 static void
11021 arm_elf_asm_named_section (name, flags)
11022 const char *name;
11023 unsigned int flags;
11025 char flagchars[8], *f = flagchars;
11026 const char *type;
11028 if (!(flags & SECTION_DEBUG))
11029 *f++ = 'a';
11030 if (flags & SECTION_WRITE)
11031 *f++ = 'w';
11032 if (flags & SECTION_CODE)
11033 *f++ = 'x';
11034 if (flags & SECTION_SMALL)
11035 *f++ = 's';
11036 if (flags & SECTION_MERGE)
11037 *f++ = 'M';
11038 if (flags & SECTION_STRINGS)
11039 *f++ = 'S';
11040 *f = '\0';
11042 if (flags & SECTION_BSS)
11043 type = "nobits";
11044 else
11045 type = "progbits";
11047 if (flags & SECTION_ENTSIZE)
11048 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s,%d\n",
11049 name, flagchars, type, flags & SECTION_ENTSIZE);
11050 else
11051 fprintf (asm_out_file, "\t.section\t%s,\"%s\",%%%s\n",
11052 name, flagchars, type);
11054 #endif
11056 #ifndef ARM_PE
11057 /* Symbols in the text segment can be accessed without indirecting via the
11058 constant pool; it may take an extra binary operation, but this is still
11059 faster than indirecting via memory. Don't do this when not optimizing,
11060 since we won't be calculating al of the offsets necessary to do this
11061 simplification. */
11063 static void
11064 arm_encode_section_info (decl, first)
11065 tree decl;
11066 int first;
11068 /* This doesn't work with AOF syntax, since the string table may be in
11069 a different AREA. */
11070 #ifndef AOF_ASSEMBLER
11071 if (optimize > 0 && TREE_CONSTANT (decl)
11072 && (!flag_writable_strings || TREE_CODE (decl) != STRING_CST))
11074 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
11075 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
11076 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
11078 #endif
11080 /* If we are referencing a function that is weak then encode a long call
11081 flag in the function name, otherwise if the function is static or
11082 or known to be defined in this file then encode a short call flag. */
11083 if (first && TREE_CODE_CLASS (TREE_CODE (decl)) == 'd')
11085 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_WEAK (decl))
11086 arm_encode_call_attribute (decl, LONG_CALL_FLAG_CHAR);
11087 else if (! TREE_PUBLIC (decl))
11088 arm_encode_call_attribute (decl, SHORT_CALL_FLAG_CHAR);
11091 #endif /* !ARM_PE */