1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
47 #include "integrate.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode
;
54 typedef struct minipool_fixup Mfix
;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table
[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots
PARAMS ((void));
67 static int arm_gen_constant
PARAMS ((enum rtx_code
, Mmode
, Hint
, rtx
, rtx
, int, int));
68 static Ulong bit_count
PARAMS ((signed int));
69 static int const_ok_for_op
PARAMS ((Hint
, enum rtx_code
));
70 static int eliminate_lr2ip
PARAMS ((rtx
*));
71 static rtx emit_multi_reg_push
PARAMS ((int));
72 static rtx emit_sfm
PARAMS ((int, int));
74 static bool arm_assemble_integer
PARAMS ((rtx
, unsigned int, int));
76 static Ccstar fp_const_from_val
PARAMS ((REAL_VALUE_TYPE
*));
77 static arm_cc get_arm_condition_code
PARAMS ((rtx
));
78 static void init_fpa_table
PARAMS ((void));
79 static Hint int_log2
PARAMS ((Hint
));
80 static rtx is_jump_table
PARAMS ((rtx
));
81 static Ccstar output_multi_immediate
PARAMS ((rtx
*, Ccstar
, Ccstar
, int, Hint
));
82 static void print_multi_reg
PARAMS ((FILE *, Ccstar
, int, int));
83 static Mmode select_dominance_cc_mode
PARAMS ((rtx
, rtx
, Hint
));
84 static Ccstar shift_op
PARAMS ((rtx
, Hint
*));
85 static struct machine_function
* arm_init_machine_status
PARAMS ((void));
86 static int number_of_first_bit_set
PARAMS ((int));
87 static void replace_symbols_in_block
PARAMS ((tree
, rtx
, rtx
));
88 static void thumb_exit
PARAMS ((FILE *, int, rtx
));
89 static void thumb_pushpop
PARAMS ((FILE *, int, int));
90 static Ccstar thumb_condition_code
PARAMS ((rtx
, int));
91 static rtx is_jump_table
PARAMS ((rtx
));
92 static Hint get_jump_table_size
PARAMS ((rtx
));
93 static Mnode
* move_minipool_fix_forward_ref
PARAMS ((Mnode
*, Mnode
*, Hint
));
94 static Mnode
* add_minipool_forward_ref
PARAMS ((Mfix
*));
95 static Mnode
* move_minipool_fix_backward_ref
PARAMS ((Mnode
*, Mnode
*, Hint
));
96 static Mnode
* add_minipool_backward_ref
PARAMS ((Mfix
*));
97 static void assign_minipool_offsets
PARAMS ((Mfix
*));
98 static void arm_print_value
PARAMS ((FILE *, rtx
));
99 static void dump_minipool
PARAMS ((rtx
));
100 static int arm_barrier_cost
PARAMS ((rtx
));
101 static Mfix
* create_fix_barrier
PARAMS ((Mfix
*, Hint
));
102 static void push_minipool_barrier
PARAMS ((rtx
, Hint
));
103 static void push_minipool_fix
PARAMS ((rtx
, Hint
, rtx
*, Mmode
, rtx
));
104 static void note_invalid_constants
PARAMS ((rtx
, Hint
));
105 static int current_file_function_operand
PARAMS ((rtx
));
106 static Ulong arm_compute_save_reg0_reg12_mask
PARAMS ((void));
107 static Ulong arm_compute_save_reg_mask
PARAMS ((void));
108 static Ulong arm_isr_value
PARAMS ((tree
));
109 static Ulong arm_compute_func_type
PARAMS ((void));
110 static tree arm_handle_fndecl_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
111 static tree arm_handle_isr_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
112 static void arm_output_function_epilogue
PARAMS ((FILE *, Hint
));
113 static void arm_output_function_prologue
PARAMS ((FILE *, Hint
));
114 static void thumb_output_function_prologue
PARAMS ((FILE *, Hint
));
115 static int arm_comp_type_attributes
PARAMS ((tree
, tree
));
116 static void arm_set_default_type_attributes
PARAMS ((tree
));
117 static int arm_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
118 #ifdef OBJECT_FORMAT_ELF
119 static void arm_elf_asm_named_section
PARAMS ((const char *, unsigned int));
122 static void arm_encode_section_info
PARAMS ((tree
, int));
130 /* Initialize the GCC target structure. */
131 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
132 #undef TARGET_MERGE_DECL_ATTRIBUTES
133 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
136 #undef TARGET_ATTRIBUTE_TABLE
137 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
140 #undef TARGET_ASM_BYTE_OP
141 #define TARGET_ASM_BYTE_OP "\tDCB\t"
142 #undef TARGET_ASM_ALIGNED_HI_OP
143 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
144 #undef TARGET_ASM_ALIGNED_SI_OP
145 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
147 #undef TARGET_ASM_ALIGNED_SI_OP
148 #define TARGET_ASM_ALIGNED_SI_OP NULL
149 #undef TARGET_ASM_INTEGER
150 #define TARGET_ASM_INTEGER arm_assemble_integer
153 #undef TARGET_ASM_FUNCTION_PROLOGUE
154 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
156 #undef TARGET_ASM_FUNCTION_EPILOGUE
157 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
159 #undef TARGET_COMP_TYPE_ATTRIBUTES
160 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
162 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
163 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
165 #undef TARGET_INIT_BUILTINS
166 #define TARGET_INIT_BUILTINS arm_init_builtins
168 #undef TARGET_EXPAND_BUILTIN
169 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
171 #undef TARGET_SCHED_ADJUST_COST
172 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
174 #undef TARGET_ENCODE_SECTION_INFO
176 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
178 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
181 #undef TARGET_STRIP_NAME_ENCODING
182 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
184 struct gcc_target targetm
= TARGET_INITIALIZER
;
186 /* Obstack for minipool constant handling. */
187 static struct obstack minipool_obstack
;
188 static char * minipool_startobj
;
190 #define obstack_chunk_alloc xmalloc
191 #define obstack_chunk_free free
193 /* The maximum number of insns skipped which
194 will be conditionalised if possible. */
195 static int max_insns_skipped
= 5;
197 extern FILE * asm_out_file
;
199 /* True if we are currently building a constant table. */
200 int making_const_table
;
202 /* Define the information needed to generate branch insns. This is
203 stored from the compare operation. */
204 rtx arm_compare_op0
, arm_compare_op1
;
206 /* What type of floating point are we tuning for? */
207 enum floating_point_type arm_fpu
;
209 /* What type of floating point instructions are available? */
210 enum floating_point_type arm_fpu_arch
;
212 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
213 enum prog_mode_type arm_prgmode
;
215 /* Set by the -mfp=... option. */
216 const char * target_fp_name
= NULL
;
218 /* Used to parse -mstructure_size_boundary command line option. */
219 const char * structure_size_string
= NULL
;
220 int arm_structure_size_boundary
= DEFAULT_STRUCTURE_SIZE_BOUNDARY
;
222 /* Bit values used to identify processor capabilities. */
223 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
224 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
225 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
226 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
227 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
228 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
229 #define FL_THUMB (1 << 6) /* Thumb aware */
230 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
231 #define FL_STRONG (1 << 8) /* StrongARM */
232 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
233 #define FL_XSCALE (1 << 10) /* XScale */
235 /* The bits in this mask specify which
236 instructions we are allowed to generate. */
237 static int insn_flags
= 0;
239 /* The bits in this mask specify which instruction scheduling options should
240 be used. Note - there is an overlap with the FL_FAST_MULT. For some
241 hardware we want to be able to generate the multiply instructions, but to
242 tune as if they were not present in the architecture. */
243 static int tune_flags
= 0;
245 /* The following are used in the arm.md file as equivalents to bits
246 in the above two flag variables. */
248 /* Nonzero if this is an "M" variant of the processor. */
249 int arm_fast_multiply
= 0;
251 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
254 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
257 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
260 /* Nonzero if this chip can benefit from load scheduling. */
261 int arm_ld_sched
= 0;
263 /* Nonzero if this chip is a StrongARM. */
264 int arm_is_strong
= 0;
266 /* Nonzero if this chip is an XScale. */
267 int arm_is_xscale
= 0;
269 /* Nonzero if this chip is an ARM6 or an ARM7. */
270 int arm_is_6_or_7
= 0;
272 /* Nonzero if generating Thumb instructions. */
275 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
276 must report the mode of the memory reference from PRINT_OPERAND to
277 PRINT_OPERAND_ADDRESS. */
278 enum machine_mode output_memory_reference_mode
;
280 /* The register number to be used for the PIC offset register. */
281 const char * arm_pic_register_string
= NULL
;
282 int arm_pic_register
= INVALID_REGNUM
;
284 /* Set to 1 when a return insn is output, this means that the epilogue
286 int return_used_this_function
;
288 /* Set to 1 after arm_reorg has started. Reset to start at the start of
289 the next function. */
290 static int after_arm_reorg
= 0;
292 /* The maximum number of insns to be used when loading a constant. */
293 static int arm_constant_limit
= 3;
295 /* For an explanation of these variables, see final_prescan_insn below. */
297 enum arm_cond_code arm_current_cc
;
299 int arm_target_label
;
301 /* The condition codes of the ARM, and the inverse function. */
302 static const char * const arm_condition_codes
[] =
304 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
305 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
308 #define streq(string1, string2) (strcmp (string1, string2) == 0)
310 /* Initialization code. */
314 const char *const name
;
315 const unsigned int flags
;
318 /* Not all of these give usefully different compilation alternatives,
319 but there is no simple way of generalizing them. */
320 static const struct processors all_cores
[] =
324 {"arm2", FL_CO_PROC
| FL_MODE26
},
325 {"arm250", FL_CO_PROC
| FL_MODE26
},
326 {"arm3", FL_CO_PROC
| FL_MODE26
},
327 {"arm6", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
328 {"arm60", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
329 {"arm600", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
330 {"arm610", FL_MODE26
| FL_MODE32
},
331 {"arm620", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
332 {"arm7", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
333 /* arm7m doesn't exist on its own, but only with D, (and I), but
334 those don't alter the code, so arm7m is sometimes used. */
335 {"arm7m", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
336 {"arm7d", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
337 {"arm7dm", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
338 {"arm7di", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
339 {"arm7dmi", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
340 {"arm70", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
341 {"arm700", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
342 {"arm700i", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
343 {"arm710", FL_MODE26
| FL_MODE32
},
344 {"arm710t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
345 {"arm720", FL_MODE26
| FL_MODE32
},
346 {"arm720t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
347 {"arm740t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
348 {"arm710c", FL_MODE26
| FL_MODE32
},
349 {"arm7100", FL_MODE26
| FL_MODE32
},
350 {"arm7500", FL_MODE26
| FL_MODE32
},
351 /* Doesn't have an external co-proc, but does have embedded fpu. */
352 {"arm7500fe", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
353 {"arm7tdmi", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
},
354 {"arm8", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
355 {"arm810", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
356 {"arm9", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
357 {"arm920", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
358 {"arm920t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
359 {"arm940t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
360 {"arm9tdmi", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
361 {"arm9e", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
362 {"strongarm", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
363 {"strongarm110", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
364 {"strongarm1100", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
365 {"strongarm1110", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
366 {"arm10tdmi", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_ARCH5
},
367 {"arm1020t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_ARCH5
},
368 {"xscale", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_STRONG
| FL_ARCH5
| FL_ARCH5E
| FL_XSCALE
},
373 static const struct processors all_architectures
[] =
375 /* ARM Architectures */
377 { "armv2", FL_CO_PROC
| FL_MODE26
},
378 { "armv2a", FL_CO_PROC
| FL_MODE26
},
379 { "armv3", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
380 { "armv3m", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
381 { "armv4", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
},
382 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
383 implementations that support it, so we will leave it out for now. */
384 { "armv4t", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
},
385 { "armv5", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
},
386 { "armv5t", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
},
387 { "armv5te", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
| FL_ARCH5E
},
391 /* This is a magic stucture. The 'string' field is magically filled in
392 with a pointer to the value specified by the user on the command line
393 assuming that the user has specified such a value. */
395 struct arm_cpu_select arm_select
[] =
397 /* string name processors */
398 { NULL
, "-mcpu=", all_cores
},
399 { NULL
, "-march=", all_architectures
},
400 { NULL
, "-mtune=", all_cores
}
403 /* Return the number of bits set in value' */
408 unsigned long count
= 0;
412 value
&= ~(value
& -value
);
419 /* Fix up any incompatible options that the user has specified.
420 This has now turned into a maze. */
422 arm_override_options ()
426 /* Set up the flags based on the cpu/architecture selected by the user. */
427 for (i
= ARRAY_SIZE (arm_select
); i
--;)
429 struct arm_cpu_select
* ptr
= arm_select
+ i
;
431 if (ptr
->string
!= NULL
&& ptr
->string
[0] != '\0')
433 const struct processors
* sel
;
435 for (sel
= ptr
->processors
; sel
->name
!= NULL
; sel
++)
436 if (streq (ptr
->string
, sel
->name
))
439 tune_flags
= sel
->flags
;
442 /* If we have been given an architecture and a processor
443 make sure that they are compatible. We only generate
444 a warning though, and we prefer the CPU over the
446 if (insn_flags
!= 0 && (insn_flags
^ sel
->flags
))
447 warning ("switch -mcpu=%s conflicts with -march= switch",
450 insn_flags
= sel
->flags
;
456 if (sel
->name
== NULL
)
457 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
461 /* If the user did not specify a processor, choose one for them. */
464 const struct processors
* sel
;
466 static const struct cpu_default
469 const char *const name
;
473 { TARGET_CPU_arm2
, "arm2" },
474 { TARGET_CPU_arm6
, "arm6" },
475 { TARGET_CPU_arm610
, "arm610" },
476 { TARGET_CPU_arm710
, "arm710" },
477 { TARGET_CPU_arm7m
, "arm7m" },
478 { TARGET_CPU_arm7500fe
, "arm7500fe" },
479 { TARGET_CPU_arm7tdmi
, "arm7tdmi" },
480 { TARGET_CPU_arm8
, "arm8" },
481 { TARGET_CPU_arm810
, "arm810" },
482 { TARGET_CPU_arm9
, "arm9" },
483 { TARGET_CPU_strongarm
, "strongarm" },
484 { TARGET_CPU_xscale
, "xscale" },
485 { TARGET_CPU_generic
, "arm" },
488 const struct cpu_default
* def
;
490 /* Find the default. */
491 for (def
= cpu_defaults
; def
->name
; def
++)
492 if (def
->cpu
== TARGET_CPU_DEFAULT
)
495 /* Make sure we found the default CPU. */
496 if (def
->name
== NULL
)
499 /* Find the default CPU's flags. */
500 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
501 if (streq (def
->name
, sel
->name
))
504 if (sel
->name
== NULL
)
507 insn_flags
= sel
->flags
;
509 /* Now check to see if the user has specified some command line
510 switch that require certain abilities from the cpu. */
513 if (TARGET_INTERWORK
|| TARGET_THUMB
)
515 sought
|= (FL_THUMB
| FL_MODE32
);
517 /* Force apcs-32 to be used for interworking. */
518 target_flags
|= ARM_FLAG_APCS_32
;
520 /* There are no ARM processors that support both APCS-26 and
521 interworking. Therefore we force FL_MODE26 to be removed
522 from insn_flags here (if it was set), so that the search
523 below will always be able to find a compatible processor. */
524 insn_flags
&= ~FL_MODE26
;
526 else if (!TARGET_APCS_32
)
529 if (sought
!= 0 && ((sought
& insn_flags
) != sought
))
531 /* Try to locate a CPU type that supports all of the abilities
532 of the default CPU, plus the extra abilities requested by
534 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
535 if ((sel
->flags
& sought
) == (sought
| insn_flags
))
538 if (sel
->name
== NULL
)
540 unsigned int current_bit_count
= 0;
541 const struct processors
* best_fit
= NULL
;
543 /* Ideally we would like to issue an error message here
544 saying that it was not possible to find a CPU compatible
545 with the default CPU, but which also supports the command
546 line options specified by the programmer, and so they
547 ought to use the -mcpu=<name> command line option to
548 override the default CPU type.
550 Unfortunately this does not work with multilibing. We
551 need to be able to support multilibs for -mapcs-26 and for
552 -mthumb-interwork and there is no CPU that can support both
553 options. Instead if we cannot find a cpu that has both the
554 characteristics of the default cpu and the given command line
555 options we scan the array again looking for a best match. */
556 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
557 if ((sel
->flags
& sought
) == sought
)
561 count
= bit_count (sel
->flags
& insn_flags
);
563 if (count
>= current_bit_count
)
566 current_bit_count
= count
;
570 if (best_fit
== NULL
)
576 insn_flags
= sel
->flags
;
580 /* If tuning has not been specified, tune for whichever processor or
581 architecture has been selected. */
583 tune_flags
= insn_flags
;
585 /* Make sure that the processor choice does not conflict with any of the
586 other command line choices. */
587 if (TARGET_APCS_32
&& !(insn_flags
& FL_MODE32
))
589 /* If APCS-32 was not the default then it must have been set by the
590 user, so issue a warning message. If the user has specified
591 "-mapcs-32 -mcpu=arm2" then we loose here. */
592 if ((TARGET_DEFAULT
& ARM_FLAG_APCS_32
) == 0)
593 warning ("target CPU does not support APCS-32" );
594 target_flags
&= ~ARM_FLAG_APCS_32
;
596 else if (!TARGET_APCS_32
&& !(insn_flags
& FL_MODE26
))
598 warning ("target CPU does not support APCS-26" );
599 target_flags
|= ARM_FLAG_APCS_32
;
602 if (TARGET_INTERWORK
&& !(insn_flags
& FL_THUMB
))
604 warning ("target CPU does not support interworking" );
605 target_flags
&= ~ARM_FLAG_INTERWORK
;
608 if (TARGET_THUMB
&& !(insn_flags
& FL_THUMB
))
610 warning ("target CPU does not support THUMB instructions");
611 target_flags
&= ~ARM_FLAG_THUMB
;
614 if (TARGET_APCS_FRAME
&& TARGET_THUMB
)
616 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
617 target_flags
&= ~ARM_FLAG_APCS_FRAME
;
620 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
621 from here where no function is being compiled currently. */
622 if ((target_flags
& (THUMB_FLAG_LEAF_BACKTRACE
| THUMB_FLAG_BACKTRACE
))
624 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
626 if (TARGET_ARM
&& TARGET_CALLEE_INTERWORKING
)
627 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
629 if (TARGET_ARM
&& TARGET_CALLER_INTERWORKING
)
630 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
632 /* If interworking is enabled then APCS-32 must be selected as well. */
633 if (TARGET_INTERWORK
)
636 warning ("interworking forces APCS-32 to be used" );
637 target_flags
|= ARM_FLAG_APCS_32
;
640 if (TARGET_APCS_STACK
&& !TARGET_APCS_FRAME
)
642 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
643 target_flags
|= ARM_FLAG_APCS_FRAME
;
646 if (TARGET_POKE_FUNCTION_NAME
)
647 target_flags
|= ARM_FLAG_APCS_FRAME
;
649 if (TARGET_APCS_REENT
&& flag_pic
)
650 error ("-fpic and -mapcs-reent are incompatible");
652 if (TARGET_APCS_REENT
)
653 warning ("APCS reentrant code not supported. Ignored");
655 /* If this target is normally configured to use APCS frames, warn if they
656 are turned off and debugging is turned on. */
658 && write_symbols
!= NO_DEBUG
659 && !TARGET_APCS_FRAME
660 && (TARGET_DEFAULT
& ARM_FLAG_APCS_FRAME
))
661 warning ("-g with -mno-apcs-frame may not give sensible debugging");
663 /* If stack checking is disabled, we can use r10 as the PIC register,
664 which keeps r9 available. */
666 arm_pic_register
= TARGET_APCS_STACK
? 9 : 10;
668 if (TARGET_APCS_FLOAT
)
669 warning ("passing floating point arguments in fp regs not yet supported");
671 /* Initialise boolean versions of the flags, for use in the arm.md file. */
672 arm_fast_multiply
= (insn_flags
& FL_FAST_MULT
) != 0;
673 arm_arch4
= (insn_flags
& FL_ARCH4
) != 0;
674 arm_arch5
= (insn_flags
& FL_ARCH5
) != 0;
675 arm_arch5e
= (insn_flags
& FL_ARCH5E
) != 0;
676 arm_is_xscale
= (insn_flags
& FL_XSCALE
) != 0;
678 arm_ld_sched
= (tune_flags
& FL_LDSCHED
) != 0;
679 arm_is_strong
= (tune_flags
& FL_STRONG
) != 0;
680 thumb_code
= (TARGET_ARM
== 0);
681 arm_is_6_or_7
= (((tune_flags
& (FL_MODE26
| FL_MODE32
))
682 && !(tune_flags
& FL_ARCH4
))) != 0;
684 /* Default value for floating point code... if no co-processor
685 bus, then schedule for emulated floating point. Otherwise,
686 assume the user has an FPA.
687 Note: this does not prevent use of floating point instructions,
688 -msoft-float does that. */
689 arm_fpu
= (tune_flags
& FL_CO_PROC
) ? FP_HARD
: FP_SOFT3
;
693 if (streq (target_fp_name
, "2"))
694 arm_fpu_arch
= FP_SOFT2
;
695 else if (streq (target_fp_name
, "3"))
696 arm_fpu_arch
= FP_SOFT3
;
698 error ("invalid floating point emulation option: -mfpe-%s",
702 arm_fpu_arch
= FP_DEFAULT
;
704 if (TARGET_FPE
&& arm_fpu
!= FP_HARD
)
707 /* For arm2/3 there is no need to do any scheduling if there is only
708 a floating point emulator, or we are doing software floating-point. */
709 if ((TARGET_SOFT_FLOAT
|| arm_fpu
!= FP_HARD
)
710 && (tune_flags
& FL_MODE32
) == 0)
711 flag_schedule_insns
= flag_schedule_insns_after_reload
= 0;
713 arm_prgmode
= TARGET_APCS_32
? PROG_MODE_PROG32
: PROG_MODE_PROG26
;
715 if (structure_size_string
!= NULL
)
717 int size
= strtol (structure_size_string
, NULL
, 0);
719 if (size
== 8 || size
== 32)
720 arm_structure_size_boundary
= size
;
722 warning ("structure size boundary can only be set to 8 or 32");
725 if (arm_pic_register_string
!= NULL
)
727 int pic_register
= decode_reg_name (arm_pic_register_string
);
730 warning ("-mpic-register= is useless without -fpic");
732 /* Prevent the user from choosing an obviously stupid PIC register. */
733 else if (pic_register
< 0 || call_used_regs
[pic_register
]
734 || pic_register
== HARD_FRAME_POINTER_REGNUM
735 || pic_register
== STACK_POINTER_REGNUM
736 || pic_register
>= PC_REGNUM
)
737 error ("unable to use '%s' for PIC register", arm_pic_register_string
);
739 arm_pic_register
= pic_register
;
742 if (TARGET_THUMB
&& flag_schedule_insns
)
744 /* Don't warn since it's on by default in -O2. */
745 flag_schedule_insns
= 0;
748 /* If optimizing for space, don't synthesize constants.
749 For processors with load scheduling, it never costs more than 2 cycles
750 to load a constant, and the load scheduler may well reduce that to 1. */
751 if (optimize_size
|| (tune_flags
& FL_LDSCHED
))
752 arm_constant_limit
= 1;
755 arm_constant_limit
= 2;
757 /* If optimizing for size, bump the number of instructions that we
758 are prepared to conditionally execute (even on a StrongARM).
759 Otherwise for the StrongARM, which has early execution of branches,
760 a sequence that is worth skipping is shorter. */
762 max_insns_skipped
= 6;
763 else if (arm_is_strong
)
764 max_insns_skipped
= 3;
766 /* Register global variables with the garbage collector. */
773 gcc_obstack_init(&minipool_obstack
);
774 minipool_startobj
= (char *) obstack_alloc (&minipool_obstack
, 0);
777 /* A table of known ARM exception types.
778 For use with the interrupt function attribute. */
782 const char *const arg
;
783 const unsigned long return_value
;
787 static const isr_attribute_arg isr_attribute_args
[] =
789 { "IRQ", ARM_FT_ISR
},
790 { "irq", ARM_FT_ISR
},
791 { "FIQ", ARM_FT_FIQ
},
792 { "fiq", ARM_FT_FIQ
},
793 { "ABORT", ARM_FT_ISR
},
794 { "abort", ARM_FT_ISR
},
795 { "ABORT", ARM_FT_ISR
},
796 { "abort", ARM_FT_ISR
},
797 { "UNDEF", ARM_FT_EXCEPTION
},
798 { "undef", ARM_FT_EXCEPTION
},
799 { "SWI", ARM_FT_EXCEPTION
},
800 { "swi", ARM_FT_EXCEPTION
},
801 { NULL
, ARM_FT_NORMAL
}
804 /* Returns the (interrupt) function type of the current
805 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
808 arm_isr_value (argument
)
811 const isr_attribute_arg
* ptr
;
814 /* No argument - default to IRQ. */
815 if (argument
== NULL_TREE
)
818 /* Get the value of the argument. */
819 if (TREE_VALUE (argument
) == NULL_TREE
820 || TREE_CODE (TREE_VALUE (argument
)) != STRING_CST
)
821 return ARM_FT_UNKNOWN
;
823 arg
= TREE_STRING_POINTER (TREE_VALUE (argument
));
825 /* Check it against the list of known arguments. */
826 for (ptr
= isr_attribute_args
; ptr
->arg
!= NULL
; ptr
++)
827 if (streq (arg
, ptr
->arg
))
828 return ptr
->return_value
;
830 /* An unrecognised interrupt type. */
831 return ARM_FT_UNKNOWN
;
834 /* Computes the type of the current function. */
837 arm_compute_func_type ()
839 unsigned long type
= ARM_FT_UNKNOWN
;
843 if (TREE_CODE (current_function_decl
) != FUNCTION_DECL
)
846 /* Decide if the current function is volatile. Such functions
847 never return, and many memory cycles can be saved by not storing
848 register values that will never be needed again. This optimization
849 was added to speed up context switching in a kernel application. */
851 && current_function_nothrow
852 && TREE_THIS_VOLATILE (current_function_decl
))
853 type
|= ARM_FT_VOLATILE
;
855 if (current_function_needs_context
)
856 type
|= ARM_FT_NESTED
;
858 attr
= DECL_ATTRIBUTES (current_function_decl
);
860 a
= lookup_attribute ("naked", attr
);
862 type
|= ARM_FT_NAKED
;
864 if (cfun
->machine
->eh_epilogue_sp_ofs
!= NULL_RTX
)
865 type
|= ARM_FT_EXCEPTION_HANDLER
;
868 a
= lookup_attribute ("isr", attr
);
870 a
= lookup_attribute ("interrupt", attr
);
873 type
|= TARGET_INTERWORK
? ARM_FT_INTERWORKED
: ARM_FT_NORMAL
;
875 type
|= arm_isr_value (TREE_VALUE (a
));
881 /* Returns the type of the current function. */
884 arm_current_func_type ()
886 if (ARM_FUNC_TYPE (cfun
->machine
->func_type
) == ARM_FT_UNKNOWN
)
887 cfun
->machine
->func_type
= arm_compute_func_type ();
889 return cfun
->machine
->func_type
;
892 /* Return 1 if it is possible to return using a single instruction. */
895 use_return_insn (iscond
)
899 unsigned int func_type
;
901 /* Never use a return instruction before reload has run. */
902 if (!reload_completed
)
905 func_type
= arm_current_func_type ();
907 /* Naked functions and volatile functions need special
909 if (func_type
& (ARM_FT_VOLATILE
| ARM_FT_NAKED
))
912 /* As do variadic functions. */
913 if (current_function_pretend_args_size
914 || cfun
->machine
->uses_anonymous_args
915 /* Of if the function calls __builtin_eh_return () */
916 || ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
917 /* Or if there is no frame pointer and there is a stack adjustment. */
918 || ((get_frame_size () + current_function_outgoing_args_size
!= 0)
919 && !frame_pointer_needed
))
922 /* Can't be done if interworking with Thumb, and any registers have been
923 stacked. Similarly, on StrongARM, conditional returns are expensive
924 if they aren't taken and registers have been stacked. */
925 if (iscond
&& arm_is_strong
&& frame_pointer_needed
)
928 if ((iscond
&& arm_is_strong
)
931 for (regno
= 0; regno
<= LAST_ARM_REGNUM
; regno
++)
932 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
935 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
939 /* Can't be done if any of the FPU regs are pushed,
940 since this also requires an insn. */
941 if (TARGET_HARD_FLOAT
)
942 for (regno
= FIRST_ARM_FP_REGNUM
; regno
<= LAST_ARM_FP_REGNUM
; regno
++)
943 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
949 /* Return TRUE if int I is a valid immediate ARM constant. */
955 unsigned HOST_WIDE_INT mask
= ~(unsigned HOST_WIDE_INT
)0xFF;
957 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
958 be all zero, or all one. */
959 if ((i
& ~(unsigned HOST_WIDE_INT
) 0xffffffff) != 0
960 && ((i
& ~(unsigned HOST_WIDE_INT
) 0xffffffff)
961 != ((~(unsigned HOST_WIDE_INT
) 0)
962 & ~(unsigned HOST_WIDE_INT
) 0xffffffff)))
965 /* Fast return for 0 and powers of 2 */
966 if ((i
& (i
- 1)) == 0)
971 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
974 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
975 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT
) 0xffffffff;
977 while (mask
!= ~(unsigned HOST_WIDE_INT
) 0xFF);
982 /* Return true if I is a valid constant for the operation CODE. */
984 const_ok_for_op (i
, code
)
988 if (const_ok_for_arm (i
))
994 return const_ok_for_arm (ARM_SIGN_EXTEND (-i
));
996 case MINUS
: /* Should only occur with (MINUS I reg) => rsb */
1002 return const_ok_for_arm (ARM_SIGN_EXTEND (~i
));
1009 /* Emit a sequence of insns to handle a large constant.
1010 CODE is the code of the operation required, it can be any of SET, PLUS,
1011 IOR, AND, XOR, MINUS;
1012 MODE is the mode in which the operation is being performed;
1013 VAL is the integer to operate on;
1014 SOURCE is the other operand (a register, or a null-pointer for SET);
1015 SUBTARGETS means it is safe to create scratch registers if that will
1016 either produce a simpler sequence, or we will want to cse the values.
1017 Return value is the number of insns emitted. */
1020 arm_split_constant (code
, mode
, val
, target
, source
, subtargets
)
1022 enum machine_mode mode
;
1028 if (subtargets
|| code
== SET
1029 || (GET_CODE (target
) == REG
&& GET_CODE (source
) == REG
1030 && REGNO (target
) != REGNO (source
)))
1032 /* After arm_reorg has been called, we can't fix up expensive
1033 constants by pushing them into memory so we must synthesise
1034 them in-line, regardless of the cost. This is only likely to
1035 be more costly on chips that have load delay slots and we are
1036 compiling without running the scheduler (so no splitting
1037 occurred before the final instruction emission).
1039 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1041 if (!after_arm_reorg
1042 && (arm_gen_constant (code
, mode
, val
, target
, source
, 1, 0)
1043 > arm_constant_limit
+ (code
!= SET
)))
1047 /* Currently SET is the only monadic value for CODE, all
1048 the rest are diadic. */
1049 emit_insn (gen_rtx_SET (VOIDmode
, target
, GEN_INT (val
)));
1054 rtx temp
= subtargets
? gen_reg_rtx (mode
) : target
;
1056 emit_insn (gen_rtx_SET (VOIDmode
, temp
, GEN_INT (val
)));
1057 /* For MINUS, the value is subtracted from, since we never
1058 have subtraction of a constant. */
1060 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1061 gen_rtx_MINUS (mode
, temp
, source
)));
1063 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1064 gen_rtx (code
, mode
, source
, temp
)));
1070 return arm_gen_constant (code
, mode
, val
, target
, source
, subtargets
, 1);
1074 count_insns_for_constant (HOST_WIDE_INT remainder
, int i
)
1076 HOST_WIDE_INT temp1
;
1084 if (remainder
& (3 << (i
- 2)))
1089 temp1
= remainder
& ((0x0ff << end
)
1090 | ((i
< end
) ? (0xff >> (32 - end
)) : 0));
1091 remainder
&= ~temp1
;
1096 } while (remainder
);
1100 /* As above, but extra parameter GENERATE which, if clear, suppresses
1104 arm_gen_constant (code
, mode
, val
, target
, source
, subtargets
, generate
)
1106 enum machine_mode mode
;
1115 int can_negate_initial
= 0;
1118 int num_bits_set
= 0;
1119 int set_sign_bit_copies
= 0;
1120 int clear_sign_bit_copies
= 0;
1121 int clear_zero_bit_copies
= 0;
1122 int set_zero_bit_copies
= 0;
1124 unsigned HOST_WIDE_INT temp1
, temp2
;
1125 unsigned HOST_WIDE_INT remainder
= val
& 0xffffffff;
1127 /* Find out which operations are safe for a given CODE. Also do a quick
1128 check for degenerate cases; these can occur when DImode operations
1140 can_negate_initial
= 1;
1144 if (remainder
== 0xffffffff)
1147 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1148 GEN_INT (ARM_SIGN_EXTEND (val
))));
1153 if (reload_completed
&& rtx_equal_p (target
, source
))
1156 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1165 emit_insn (gen_rtx_SET (VOIDmode
, target
, const0_rtx
));
1168 if (remainder
== 0xffffffff)
1170 if (reload_completed
&& rtx_equal_p (target
, source
))
1173 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1182 if (reload_completed
&& rtx_equal_p (target
, source
))
1185 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1188 if (remainder
== 0xffffffff)
1191 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1192 gen_rtx_NOT (mode
, source
)));
1196 /* We don't know how to handle this yet below. */
1200 /* We treat MINUS as (val - source), since (source - val) is always
1201 passed as (source + (-val)). */
1205 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1206 gen_rtx_NEG (mode
, source
)));
1209 if (const_ok_for_arm (val
))
1212 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1213 gen_rtx_MINUS (mode
, GEN_INT (val
),
1225 /* If we can do it in one insn get out quickly. */
1226 if (const_ok_for_arm (val
)
1227 || (can_negate_initial
&& const_ok_for_arm (-val
))
1228 || (can_invert
&& const_ok_for_arm (~val
)))
1231 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1232 (source
? gen_rtx (code
, mode
, source
,
1238 /* Calculate a few attributes that may be useful for specific
1240 for (i
= 31; i
>= 0; i
--)
1242 if ((remainder
& (1 << i
)) == 0)
1243 clear_sign_bit_copies
++;
1248 for (i
= 31; i
>= 0; i
--)
1250 if ((remainder
& (1 << i
)) != 0)
1251 set_sign_bit_copies
++;
1256 for (i
= 0; i
<= 31; i
++)
1258 if ((remainder
& (1 << i
)) == 0)
1259 clear_zero_bit_copies
++;
1264 for (i
= 0; i
<= 31; i
++)
1266 if ((remainder
& (1 << i
)) != 0)
1267 set_zero_bit_copies
++;
1275 /* See if we can do this by sign_extending a constant that is known
1276 to be negative. This is a good, way of doing it, since the shift
1277 may well merge into a subsequent insn. */
1278 if (set_sign_bit_copies
> 1)
1280 if (const_ok_for_arm
1281 (temp1
= ARM_SIGN_EXTEND (remainder
1282 << (set_sign_bit_copies
- 1))))
1286 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1287 emit_insn (gen_rtx_SET (VOIDmode
, new_src
,
1289 emit_insn (gen_ashrsi3 (target
, new_src
,
1290 GEN_INT (set_sign_bit_copies
- 1)));
1294 /* For an inverted constant, we will need to set the low bits,
1295 these will be shifted out of harm's way. */
1296 temp1
|= (1 << (set_sign_bit_copies
- 1)) - 1;
1297 if (const_ok_for_arm (~temp1
))
1301 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1302 emit_insn (gen_rtx_SET (VOIDmode
, new_src
,
1304 emit_insn (gen_ashrsi3 (target
, new_src
,
1305 GEN_INT (set_sign_bit_copies
- 1)));
1311 /* See if we can generate this by setting the bottom (or the top)
1312 16 bits, and then shifting these into the other half of the
1313 word. We only look for the simplest cases, to do more would cost
1314 too much. Be careful, however, not to generate this when the
1315 alternative would take fewer insns. */
1316 if (val
& 0xffff0000)
1318 temp1
= remainder
& 0xffff0000;
1319 temp2
= remainder
& 0x0000ffff;
1321 /* Overlaps outside this range are best done using other methods. */
1322 for (i
= 9; i
< 24; i
++)
1324 if ((((temp2
| (temp2
<< i
)) & 0xffffffff) == remainder
)
1325 && !const_ok_for_arm (temp2
))
1327 rtx new_src
= (subtargets
1328 ? (generate
? gen_reg_rtx (mode
) : NULL_RTX
)
1330 insns
= arm_gen_constant (code
, mode
, temp2
, new_src
,
1331 source
, subtargets
, generate
);
1334 emit_insn (gen_rtx_SET
1337 gen_rtx_ASHIFT (mode
, source
,
1344 /* Don't duplicate cases already considered. */
1345 for (i
= 17; i
< 24; i
++)
1347 if (((temp1
| (temp1
>> i
)) == remainder
)
1348 && !const_ok_for_arm (temp1
))
1350 rtx new_src
= (subtargets
1351 ? (generate
? gen_reg_rtx (mode
) : NULL_RTX
)
1353 insns
= arm_gen_constant (code
, mode
, temp1
, new_src
,
1354 source
, subtargets
, generate
);
1358 (gen_rtx_SET (VOIDmode
, target
,
1361 gen_rtx_LSHIFTRT (mode
, source
,
1372 /* If we have IOR or XOR, and the constant can be loaded in a
1373 single instruction, and we can find a temporary to put it in,
1374 then this can be done in two instructions instead of 3-4. */
1376 /* TARGET can't be NULL if SUBTARGETS is 0 */
1377 || (reload_completed
&& !reg_mentioned_p (target
, source
)))
1379 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val
)))
1383 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1385 emit_insn (gen_rtx_SET (VOIDmode
, sub
, GEN_INT (val
)));
1386 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1387 gen_rtx (code
, mode
, source
, sub
)));
1396 if (set_sign_bit_copies
> 8
1397 && (val
& (-1 << (32 - set_sign_bit_copies
))) == val
)
1401 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1402 rtx shift
= GEN_INT (set_sign_bit_copies
);
1404 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1406 gen_rtx_ASHIFT (mode
,
1409 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1411 gen_rtx_LSHIFTRT (mode
, sub
,
1417 if (set_zero_bit_copies
> 8
1418 && (remainder
& ((1 << set_zero_bit_copies
) - 1)) == remainder
)
1422 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1423 rtx shift
= GEN_INT (set_zero_bit_copies
);
1425 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1427 gen_rtx_LSHIFTRT (mode
,
1430 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1432 gen_rtx_ASHIFT (mode
, sub
,
1438 if (const_ok_for_arm (temp1
= ARM_SIGN_EXTEND (~val
)))
1442 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1443 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1444 gen_rtx_NOT (mode
, source
)));
1447 sub
= gen_reg_rtx (mode
);
1448 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1449 gen_rtx_AND (mode
, source
,
1451 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1452 gen_rtx_NOT (mode
, sub
)));
1459 /* See if two shifts will do 2 or more insn's worth of work. */
1460 if (clear_sign_bit_copies
>= 16 && clear_sign_bit_copies
< 24)
1462 HOST_WIDE_INT shift_mask
= ((0xffffffff
1463 << (32 - clear_sign_bit_copies
))
1466 if ((remainder
| shift_mask
) != 0xffffffff)
1470 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1471 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1472 new_src
, source
, subtargets
, 1);
1477 rtx targ
= subtargets
? NULL_RTX
: target
;
1478 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1479 targ
, source
, subtargets
, 0);
1485 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1486 rtx shift
= GEN_INT (clear_sign_bit_copies
);
1488 emit_insn (gen_ashlsi3 (new_src
, source
, shift
));
1489 emit_insn (gen_lshrsi3 (target
, new_src
, shift
));
1495 if (clear_zero_bit_copies
>= 16 && clear_zero_bit_copies
< 24)
1497 HOST_WIDE_INT shift_mask
= (1 << clear_zero_bit_copies
) - 1;
1499 if ((remainder
| shift_mask
) != 0xffffffff)
1503 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1505 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1506 new_src
, source
, subtargets
, 1);
1511 rtx targ
= subtargets
? NULL_RTX
: target
;
1513 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1514 targ
, source
, subtargets
, 0);
1520 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1521 rtx shift
= GEN_INT (clear_zero_bit_copies
);
1523 emit_insn (gen_lshrsi3 (new_src
, source
, shift
));
1524 emit_insn (gen_ashlsi3 (target
, new_src
, shift
));
1536 for (i
= 0; i
< 32; i
++)
1537 if (remainder
& (1 << i
))
1540 if (code
== AND
|| (can_invert
&& num_bits_set
> 16))
1541 remainder
= (~remainder
) & 0xffffffff;
1542 else if (code
== PLUS
&& num_bits_set
> 16)
1543 remainder
= (-remainder
) & 0xffffffff;
1550 /* Now try and find a way of doing the job in either two or three
1552 We start by looking for the largest block of zeros that are aligned on
1553 a 2-bit boundary, we then fill up the temps, wrapping around to the
1554 top of the word when we drop off the bottom.
1555 In the worst case this code should produce no more than four insns. */
1558 int best_consecutive_zeros
= 0;
1560 for (i
= 0; i
< 32; i
+= 2)
1562 int consecutive_zeros
= 0;
1564 if (!(remainder
& (3 << i
)))
1566 while ((i
< 32) && !(remainder
& (3 << i
)))
1568 consecutive_zeros
+= 2;
1571 if (consecutive_zeros
> best_consecutive_zeros
)
1573 best_consecutive_zeros
= consecutive_zeros
;
1574 best_start
= i
- consecutive_zeros
;
1580 /* So long as it won't require any more insns to do so, it's
1581 desirable to emit a small constant (in bits 0...9) in the last
1582 insn. This way there is more chance that it can be combined with
1583 a later addressing insn to form a pre-indexed load or store
1584 operation. Consider:
1586 *((volatile int *)0xe0000100) = 1;
1587 *((volatile int *)0xe0000110) = 2;
1589 We want this to wind up as:
1593 str rB, [rA, #0x100]
1595 str rB, [rA, #0x110]
1597 rather than having to synthesize both large constants from scratch.
1599 Therefore, we calculate how many insns would be required to emit
1600 the constant starting from `best_start', and also starting from
1601 zero (ie with bit 31 first to be output). If `best_start' doesn't
1602 yield a shorter sequence, we may as well use zero. */
1604 && ((((unsigned HOST_WIDE_INT
) 1) << best_start
) < remainder
)
1605 && (count_insns_for_constant (remainder
, 0) <=
1606 count_insns_for_constant (remainder
, best_start
)))
1609 /* Now start emitting the insns. */
1617 if (remainder
& (3 << (i
- 2)))
1622 temp1
= remainder
& ((0x0ff << end
)
1623 | ((i
< end
) ? (0xff >> (32 - end
)) : 0));
1624 remainder
&= ~temp1
;
1628 rtx new_src
, temp1_rtx
;
1630 if (code
== SET
|| code
== MINUS
)
1632 new_src
= (subtargets
? gen_reg_rtx (mode
) : target
);
1633 if (can_invert
&& code
!= MINUS
)
1638 if (remainder
&& subtargets
)
1639 new_src
= gen_reg_rtx (mode
);
1644 else if (can_negate
)
1648 temp1
= trunc_int_for_mode (temp1
, mode
);
1649 temp1_rtx
= GEN_INT (temp1
);
1653 else if (code
== MINUS
)
1654 temp1_rtx
= gen_rtx_MINUS (mode
, temp1_rtx
, source
);
1656 temp1_rtx
= gen_rtx_fmt_ee (code
, mode
, source
, temp1_rtx
);
1658 emit_insn (gen_rtx_SET (VOIDmode
, new_src
, temp1_rtx
));
1667 else if (code
== MINUS
)
1681 /* Canonicalize a comparison so that we are more likely to recognize it.
1682 This can be done for a few constant compares, where we can make the
1683 immediate value easier to load. */
1686 arm_canonicalize_comparison (code
, op1
)
1690 unsigned HOST_WIDE_INT i
= INTVAL (*op1
);
1700 if (i
!= ((((unsigned HOST_WIDE_INT
) 1) << (HOST_BITS_PER_WIDE_INT
- 1)) - 1)
1701 && (const_ok_for_arm (i
+ 1) || const_ok_for_arm (-(i
+ 1))))
1703 *op1
= GEN_INT (i
+ 1);
1704 return code
== GT
? GE
: LT
;
1710 if (i
!= (((unsigned HOST_WIDE_INT
) 1) << (HOST_BITS_PER_WIDE_INT
- 1))
1711 && (const_ok_for_arm (i
- 1) || const_ok_for_arm (-(i
- 1))))
1713 *op1
= GEN_INT (i
- 1);
1714 return code
== GE
? GT
: LE
;
1720 if (i
!= ~((unsigned HOST_WIDE_INT
) 0)
1721 && (const_ok_for_arm (i
+ 1) || const_ok_for_arm (-(i
+ 1))))
1723 *op1
= GEN_INT (i
+ 1);
1724 return code
== GTU
? GEU
: LTU
;
1731 && (const_ok_for_arm (i
- 1) || const_ok_for_arm (-(i
- 1))))
1733 *op1
= GEN_INT (i
- 1);
1734 return code
== GEU
? GTU
: LEU
;
1745 /* Decide whether a type should be returned in memory (true)
1746 or in a register (false). This is called by the macro
1747 RETURN_IN_MEMORY. */
1750 arm_return_in_memory (type
)
1753 if (!AGGREGATE_TYPE_P (type
))
1754 /* All simple types are returned in registers. */
1757 /* For the arm-wince targets we choose to be compitable with Microsoft's
1758 ARM and Thumb compilers, which always return aggregates in memory. */
1760 /* All structures/unions bigger than one word are returned in memory.
1761 Also catch the case where int_size_in_bytes returns -1. In this case
1762 the aggregate is either huge or of varaible size, and in either case
1763 we will want to return it via memory and not in a register. */
1764 if (((unsigned int) int_size_in_bytes (type
)) > UNITS_PER_WORD
)
1767 if (TREE_CODE (type
) == RECORD_TYPE
)
1771 /* For a struct the APCS says that we only return in a register
1772 if the type is 'integer like' and every addressable element
1773 has an offset of zero. For practical purposes this means
1774 that the structure can have at most one non bit-field element
1775 and that this element must be the first one in the structure. */
1777 /* Find the first field, ignoring non FIELD_DECL things which will
1778 have been created by C++. */
1779 for (field
= TYPE_FIELDS (type
);
1780 field
&& TREE_CODE (field
) != FIELD_DECL
;
1781 field
= TREE_CHAIN (field
))
1785 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1787 /* Check that the first field is valid for returning in a register. */
1789 /* ... Floats are not allowed */
1790 if (FLOAT_TYPE_P (TREE_TYPE (field
)))
1793 /* ... Aggregates that are not themselves valid for returning in
1794 a register are not allowed. */
1795 if (RETURN_IN_MEMORY (TREE_TYPE (field
)))
1798 /* Now check the remaining fields, if any. Only bitfields are allowed,
1799 since they are not addressable. */
1800 for (field
= TREE_CHAIN (field
);
1802 field
= TREE_CHAIN (field
))
1804 if (TREE_CODE (field
) != FIELD_DECL
)
1807 if (!DECL_BIT_FIELD_TYPE (field
))
1814 if (TREE_CODE (type
) == UNION_TYPE
)
1818 /* Unions can be returned in registers if every element is
1819 integral, or can be returned in an integer register. */
1820 for (field
= TYPE_FIELDS (type
);
1822 field
= TREE_CHAIN (field
))
1824 if (TREE_CODE (field
) != FIELD_DECL
)
1827 if (FLOAT_TYPE_P (TREE_TYPE (field
)))
1830 if (RETURN_IN_MEMORY (TREE_TYPE (field
)))
1836 #endif /* not ARM_WINCE */
1838 /* Return all other types in memory. */
1842 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1843 for a call to a function whose data type is FNTYPE.
1844 For a library call, FNTYPE is NULL. */
1846 arm_init_cumulative_args (pcum
, fntype
, libname
, indirect
)
1847 CUMULATIVE_ARGS
* pcum
;
1849 rtx libname ATTRIBUTE_UNUSED
;
1850 int indirect ATTRIBUTE_UNUSED
;
1852 /* On the ARM, the offset starts at 0. */
1853 pcum
->nregs
= ((fntype
&& aggregate_value_p (TREE_TYPE (fntype
))) ? 1 : 0);
1855 pcum
->call_cookie
= CALL_NORMAL
;
1857 if (TARGET_LONG_CALLS
)
1858 pcum
->call_cookie
= CALL_LONG
;
1860 /* Check for long call/short call attributes. The attributes
1861 override any command line option. */
1864 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype
)))
1865 pcum
->call_cookie
= CALL_SHORT
;
1866 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype
)))
1867 pcum
->call_cookie
= CALL_LONG
;
1871 /* Determine where to put an argument to a function.
1872 Value is zero to push the argument on the stack,
1873 or a hard register in which to store the argument.
1875 MODE is the argument's machine mode.
1876 TYPE is the data type of the argument (as a tree).
1877 This is null for libcalls where that information may
1879 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1880 the preceding args and about the function being called.
1881 NAMED is nonzero if this argument is a named parameter
1882 (otherwise it is an extra parameter matching an ellipsis). */
1885 arm_function_arg (pcum
, mode
, type
, named
)
1886 CUMULATIVE_ARGS
* pcum
;
1887 enum machine_mode mode
;
1888 tree type ATTRIBUTE_UNUSED
;
1891 if (mode
== VOIDmode
)
1892 /* Compute operand 2 of the call insn. */
1893 return GEN_INT (pcum
->call_cookie
);
1895 if (!named
|| pcum
->nregs
>= NUM_ARG_REGS
)
1898 return gen_rtx_REG (mode
, pcum
->nregs
);
1901 /* Encode the current state of the #pragma [no_]long_calls. */
1904 OFF
, /* No #pramgma [no_]long_calls is in effect. */
1905 LONG
, /* #pragma long_calls is in effect. */
1906 SHORT
/* #pragma no_long_calls is in effect. */
1909 static arm_pragma_enum arm_pragma_long_calls
= OFF
;
1912 arm_pr_long_calls (pfile
)
1913 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1915 arm_pragma_long_calls
= LONG
;
1919 arm_pr_no_long_calls (pfile
)
1920 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1922 arm_pragma_long_calls
= SHORT
;
1926 arm_pr_long_calls_off (pfile
)
1927 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1929 arm_pragma_long_calls
= OFF
;
1932 /* Table of machine attributes. */
1933 const struct attribute_spec arm_attribute_table
[] =
1935 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1936 /* Function calls made to this symbol must be done indirectly, because
1937 it may lie outside of the 26 bit addressing range of a normal function
1939 { "long_call", 0, 0, false, true, true, NULL
},
1940 /* Whereas these functions are always known to reside within the 26 bit
1941 addressing range. */
1942 { "short_call", 0, 0, false, true, true, NULL
},
1943 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1944 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute
},
1945 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute
},
1946 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute
},
1948 /* ARM/PE has three new attributes:
1950 dllexport - for exporting a function/variable that will live in a dll
1951 dllimport - for importing a function/variable from a dll
1953 Microsoft allows multiple declspecs in one __declspec, separating
1954 them with spaces. We do NOT support this. Instead, use __declspec
1957 { "dllimport", 0, 0, true, false, false, NULL
},
1958 { "dllexport", 0, 0, true, false, false, NULL
},
1959 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute
},
1961 { NULL
, 0, 0, false, false, false, NULL
}
1964 /* Handle an attribute requiring a FUNCTION_DECL;
1965 arguments as in struct attribute_spec.handler. */
1968 arm_handle_fndecl_attribute (node
, name
, args
, flags
, no_add_attrs
)
1971 tree args ATTRIBUTE_UNUSED
;
1972 int flags ATTRIBUTE_UNUSED
;
1973 bool * no_add_attrs
;
1975 if (TREE_CODE (*node
) != FUNCTION_DECL
)
1977 warning ("`%s' attribute only applies to functions",
1978 IDENTIFIER_POINTER (name
));
1979 *no_add_attrs
= true;
1985 /* Handle an "interrupt" or "isr" attribute;
1986 arguments as in struct attribute_spec.handler. */
1989 arm_handle_isr_attribute (node
, name
, args
, flags
, no_add_attrs
)
1994 bool * no_add_attrs
;
1998 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2000 warning ("`%s' attribute only applies to functions",
2001 IDENTIFIER_POINTER (name
));
2002 *no_add_attrs
= true;
2004 /* FIXME: the argument if any is checked for type attributes;
2005 should it be checked for decl ones? */
2009 if (TREE_CODE (*node
) == FUNCTION_TYPE
2010 || TREE_CODE (*node
) == METHOD_TYPE
)
2012 if (arm_isr_value (args
) == ARM_FT_UNKNOWN
)
2014 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
2015 *no_add_attrs
= true;
2018 else if (TREE_CODE (*node
) == POINTER_TYPE
2019 && (TREE_CODE (TREE_TYPE (*node
)) == FUNCTION_TYPE
2020 || TREE_CODE (TREE_TYPE (*node
)) == METHOD_TYPE
)
2021 && arm_isr_value (args
) != ARM_FT_UNKNOWN
)
2023 *node
= build_type_copy (*node
);
2024 TREE_TYPE (*node
) = build_type_attribute_variant
2026 tree_cons (name
, args
, TYPE_ATTRIBUTES (TREE_TYPE (*node
))));
2027 *no_add_attrs
= true;
2031 /* Possibly pass this attribute on from the type to a decl. */
2032 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
2033 | (int) ATTR_FLAG_FUNCTION_NEXT
2034 | (int) ATTR_FLAG_ARRAY_NEXT
))
2036 *no_add_attrs
= true;
2037 return tree_cons (name
, args
, NULL_TREE
);
2041 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
2049 /* Return 0 if the attributes for two types are incompatible, 1 if they
2050 are compatible, and 2 if they are nearly compatible (which causes a
2051 warning to be generated). */
2054 arm_comp_type_attributes (type1
, type2
)
2060 /* Check for mismatch of non-default calling convention. */
2061 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
2064 /* Check for mismatched call attributes. */
2065 l1
= lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1
)) != NULL
;
2066 l2
= lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2
)) != NULL
;
2067 s1
= lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1
)) != NULL
;
2068 s2
= lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2
)) != NULL
;
2070 /* Only bother to check if an attribute is defined. */
2071 if (l1
| l2
| s1
| s2
)
2073 /* If one type has an attribute, the other must have the same attribute. */
2074 if ((l1
!= l2
) || (s1
!= s2
))
2077 /* Disallow mixed attributes. */
2078 if ((l1
& s2
) || (l2
& s1
))
2082 /* Check for mismatched ISR attribute. */
2083 l1
= lookup_attribute ("isr", TYPE_ATTRIBUTES (type1
)) != NULL
;
2085 l1
= lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1
)) != NULL
;
2086 l2
= lookup_attribute ("isr", TYPE_ATTRIBUTES (type2
)) != NULL
;
2088 l1
= lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2
)) != NULL
;
2095 /* Encode long_call or short_call attribute by prefixing
2096 symbol name in DECL with a special character FLAG. */
2099 arm_encode_call_attribute (decl
, flag
)
2103 const char * str
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
2104 int len
= strlen (str
);
2107 /* Do not allow weak functions to be treated as short call. */
2108 if (DECL_WEAK (decl
) && flag
== SHORT_CALL_FLAG_CHAR
)
2111 newstr
= alloca (len
+ 2);
2113 strcpy (newstr
+ 1, str
);
2115 newstr
= (char *) ggc_alloc_string (newstr
, len
+ 1);
2116 XSTR (XEXP (DECL_RTL (decl
), 0), 0) = newstr
;
2119 /* Assigns default attributes to newly defined type. This is used to
2120 set short_call/long_call attributes for function types of
2121 functions defined inside corresponding #pragma scopes. */
2124 arm_set_default_type_attributes (type
)
2127 /* Add __attribute__ ((long_call)) to all functions, when
2128 inside #pragma long_calls or __attribute__ ((short_call)),
2129 when inside #pragma no_long_calls. */
2130 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
2132 tree type_attr_list
, attr_name
;
2133 type_attr_list
= TYPE_ATTRIBUTES (type
);
2135 if (arm_pragma_long_calls
== LONG
)
2136 attr_name
= get_identifier ("long_call");
2137 else if (arm_pragma_long_calls
== SHORT
)
2138 attr_name
= get_identifier ("short_call");
2142 type_attr_list
= tree_cons (attr_name
, NULL_TREE
, type_attr_list
);
2143 TYPE_ATTRIBUTES (type
) = type_attr_list
;
2147 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2148 defined within the current compilation unit. If this caanot be
2149 determined, then 0 is returned. */
2152 current_file_function_operand (sym_ref
)
2155 /* This is a bit of a fib. A function will have a short call flag
2156 applied to its name if it has the short call attribute, or it has
2157 already been defined within the current compilation unit. */
2158 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref
, 0)))
2161 /* The current function is always defined within the current compilation
2162 unit. if it s a weak definition however, then this may not be the real
2163 definition of the function, and so we have to say no. */
2164 if (sym_ref
== XEXP (DECL_RTL (current_function_decl
), 0)
2165 && !DECL_WEAK (current_function_decl
))
2168 /* We cannot make the determination - default to returning 0. */
2172 /* Return non-zero if a 32 bit "long_call" should be generated for
2173 this call. We generate a long_call if the function:
2175 a. has an __attribute__((long call))
2176 or b. is within the scope of a #pragma long_calls
2177 or c. the -mlong-calls command line switch has been specified
2179 However we do not generate a long call if the function:
2181 d. has an __attribute__ ((short_call))
2182 or e. is inside the scope of a #pragma no_long_calls
2183 or f. has an __attribute__ ((section))
2184 or g. is defined within the current compilation unit.
2186 This function will be called by C fragments contained in the machine
2187 description file. CALL_REF and CALL_COOKIE correspond to the matched
2188 rtl operands. CALL_SYMBOL is used to distinguish between
2189 two different callers of the function. It is set to 1 in the
2190 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2191 and "call_value" patterns. This is because of the difference in the
2192 SYM_REFs passed by these patterns. */
2195 arm_is_longcall_p (sym_ref
, call_cookie
, call_symbol
)
2202 if (GET_CODE (sym_ref
) != MEM
)
2205 sym_ref
= XEXP (sym_ref
, 0);
2208 if (GET_CODE (sym_ref
) != SYMBOL_REF
)
2211 if (call_cookie
& CALL_SHORT
)
2214 if (TARGET_LONG_CALLS
&& flag_function_sections
)
2217 if (current_file_function_operand (sym_ref
))
2220 return (call_cookie
& CALL_LONG
)
2221 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref
, 0))
2222 || TARGET_LONG_CALLS
;
2225 /* Return non-zero if it is ok to make a tail-call to DECL. */
2228 arm_function_ok_for_sibcall (decl
)
2231 int call_type
= TARGET_LONG_CALLS
? CALL_LONG
: CALL_NORMAL
;
2233 /* Never tailcall something for which we have no decl, or if we
2234 are in Thumb mode. */
2235 if (decl
== NULL
|| TARGET_THUMB
)
2238 /* Get the calling method. */
2239 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
2240 call_type
= CALL_SHORT
;
2241 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
2242 call_type
= CALL_LONG
;
2244 /* Cannot tail-call to long calls, since these are out of range of
2245 a branch instruction. However, if not compiling PIC, we know
2246 we can reach the symbol if it is in this compilation unit. */
2247 if (call_type
== CALL_LONG
&& (flag_pic
|| !TREE_ASM_WRITTEN (decl
)))
2250 /* If we are interworking and the function is not declared static
2251 then we can't tail-call it unless we know that it exists in this
2252 compilation unit (since it might be a Thumb routine). */
2253 if (TARGET_INTERWORK
&& TREE_PUBLIC (decl
) && !TREE_ASM_WRITTEN (decl
))
2256 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2257 if (IS_INTERRUPT (arm_current_func_type ()))
2260 /* Everything else is ok. */
2266 legitimate_pic_operand_p (x
)
2271 && (GET_CODE (x
) == SYMBOL_REF
2272 || (GET_CODE (x
) == CONST
2273 && GET_CODE (XEXP (x
, 0)) == PLUS
2274 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
)))
2281 legitimize_pic_address (orig
, mode
, reg
)
2283 enum machine_mode mode
;
2286 if (GET_CODE (orig
) == SYMBOL_REF
2287 || GET_CODE (orig
) == LABEL_REF
)
2289 #ifndef AOF_ASSEMBLER
2290 rtx pic_ref
, address
;
2300 reg
= gen_reg_rtx (Pmode
);
2305 #ifdef AOF_ASSEMBLER
2306 /* The AOF assembler can generate relocations for these directly, and
2307 understands that the PIC register has to be added into the offset. */
2308 insn
= emit_insn (gen_pic_load_addr_based (reg
, orig
));
2311 address
= gen_reg_rtx (Pmode
);
2316 emit_insn (gen_pic_load_addr_arm (address
, orig
));
2318 emit_insn (gen_pic_load_addr_thumb (address
, orig
));
2320 if ((GET_CODE (orig
) == LABEL_REF
2321 || (GET_CODE (orig
) == SYMBOL_REF
&&
2322 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig
, 0))))
2324 pic_ref
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, address
);
2327 pic_ref
= gen_rtx_MEM (Pmode
,
2328 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
2330 RTX_UNCHANGING_P (pic_ref
) = 1;
2333 insn
= emit_move_insn (reg
, pic_ref
);
2335 current_function_uses_pic_offset_table
= 1;
2336 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2338 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUAL
, orig
,
2342 else if (GET_CODE (orig
) == CONST
)
2346 if (GET_CODE (XEXP (orig
, 0)) == PLUS
2347 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
2355 reg
= gen_reg_rtx (Pmode
);
2358 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
2360 base
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
, reg
);
2361 offset
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
2362 base
== reg
? 0 : reg
);
2367 if (GET_CODE (offset
) == CONST_INT
)
2369 /* The base register doesn't really matter, we only want to
2370 test the index for the appropriate mode. */
2371 ARM_GO_IF_LEGITIMATE_INDEX (mode
, 0, offset
, win
);
2373 if (!no_new_pseudos
)
2374 offset
= force_reg (Pmode
, offset
);
2379 if (GET_CODE (offset
) == CONST_INT
)
2380 return plus_constant (base
, INTVAL (offset
));
2383 if (GET_MODE_SIZE (mode
) > 4
2384 && (GET_MODE_CLASS (mode
) == MODE_INT
2385 || TARGET_SOFT_FLOAT
))
2387 emit_insn (gen_addsi3 (reg
, base
, offset
));
2391 return gen_rtx_PLUS (Pmode
, base
, offset
);
2397 /* Generate code to load the PIC register. PROLOGUE is true if
2398 called from arm_expand_prologue (in which case we want the
2399 generated insns at the start of the function); false if called
2400 by an exception receiver that needs the PIC register reloaded
2401 (in which case the insns are just dumped at the current location). */
2404 arm_finalize_pic (prologue
)
2405 int prologue ATTRIBUTE_UNUSED
;
2407 #ifndef AOF_ASSEMBLER
2408 rtx l1
, pic_tmp
, pic_tmp2
, seq
, pic_rtx
;
2409 rtx global_offset_table
;
2411 if (current_function_uses_pic_offset_table
== 0 || TARGET_SINGLE_PIC_BASE
)
2418 l1
= gen_label_rtx ();
2420 global_offset_table
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2421 /* On the ARM the PC register contains 'dot + 8' at the time of the
2422 addition, on the Thumb it is 'dot + 4'. */
2423 pic_tmp
= plus_constant (gen_rtx_LABEL_REF (Pmode
, l1
), TARGET_ARM
? 8 : 4);
2425 pic_tmp2
= gen_rtx_CONST (VOIDmode
,
2426 gen_rtx_PLUS (Pmode
, global_offset_table
, pc_rtx
));
2428 pic_tmp2
= gen_rtx_CONST (VOIDmode
, global_offset_table
);
2430 pic_rtx
= gen_rtx_CONST (Pmode
, gen_rtx_MINUS (Pmode
, pic_tmp2
, pic_tmp
));
2434 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx
, pic_rtx
));
2435 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx
, l1
));
2439 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx
, pic_rtx
));
2440 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx
, l1
));
2446 emit_insn_after (seq
, get_insns ());
2450 /* Need to emit this whether or not we obey regdecls,
2451 since setjmp/longjmp can cause life info to screw up. */
2452 emit_insn (gen_rtx_USE (VOIDmode
, pic_offset_table_rtx
));
2453 #endif /* AOF_ASSEMBLER */
2456 #define REG_OR_SUBREG_REG(X) \
2457 (GET_CODE (X) == REG \
2458 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2460 #define REG_OR_SUBREG_RTX(X) \
2461 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2463 #ifndef COSTS_N_INSNS
2464 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2468 arm_rtx_costs (x
, code
, outer
)
2471 enum rtx_code outer
;
2473 enum machine_mode mode
= GET_MODE (x
);
2474 enum rtx_code subcode
;
2490 return COSTS_N_INSNS (1);
2493 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2496 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
2503 return COSTS_N_INSNS (2) + cycles
;
2505 return COSTS_N_INSNS (1) + 16;
2508 return (COSTS_N_INSNS (1)
2509 + 4 * ((GET_CODE (SET_SRC (x
)) == MEM
)
2510 + GET_CODE (SET_DEST (x
)) == MEM
));
2515 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) < 256)
2517 if (thumb_shiftable_const (INTVAL (x
)))
2518 return COSTS_N_INSNS (2);
2519 return COSTS_N_INSNS (3);
2521 else if (outer
== PLUS
2522 && INTVAL (x
) < 256 && INTVAL (x
) > -256)
2524 else if (outer
== COMPARE
2525 && (unsigned HOST_WIDE_INT
) INTVAL (x
) < 256)
2527 else if (outer
== ASHIFT
|| outer
== ASHIFTRT
2528 || outer
== LSHIFTRT
)
2530 return COSTS_N_INSNS (2);
2536 return COSTS_N_INSNS (3);
2555 /* XXX another guess. */
2556 /* Memory costs quite a lot for the first word, but subsequent words
2557 load at the equivalent of a single insn each. */
2558 return (10 + 4 * ((GET_MODE_SIZE (mode
) - 1) / UNITS_PER_WORD
)
2559 + ((GET_CODE (x
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (x
))
2564 if (GET_CODE (XEXP (x
, 1)) == PC
|| GET_CODE (XEXP (x
, 2)) == PC
)
2569 /* XXX still guessing. */
2570 switch (GET_MODE (XEXP (x
, 0)))
2573 return (1 + (mode
== DImode
? 4 : 0)
2574 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2577 return (4 + (mode
== DImode
? 4 : 0)
2578 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2581 return (1 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2595 fprintf (stderr
, "unexpected code for thumb in rtx_costs: %s\n",
2605 /* Memory costs quite a lot for the first word, but subsequent words
2606 load at the equivalent of a single insn each. */
2607 return (10 + 4 * ((GET_MODE_SIZE (mode
) - 1) / UNITS_PER_WORD
)
2608 + (GET_CODE (x
) == SYMBOL_REF
2609 && CONSTANT_POOL_ADDRESS_P (x
) ? 4 : 0));
2616 if (mode
== SImode
&& GET_CODE (XEXP (x
, 1)) == REG
)
2623 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
2625 return (8 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : 8)
2626 + ((GET_CODE (XEXP (x
, 0)) == REG
2627 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2628 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
2630 return (1 + ((GET_CODE (XEXP (x
, 0)) == REG
2631 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2632 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
2634 + ((GET_CODE (XEXP (x
, 1)) == REG
2635 || (GET_CODE (XEXP (x
, 1)) == SUBREG
2636 && GET_CODE (SUBREG_REG (XEXP (x
, 1))) == REG
)
2637 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
))
2642 return (4 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 8)
2643 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2644 || (GET_CODE (XEXP (x
, 0)) == CONST_INT
2645 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))))
2648 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2649 return (2 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2650 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
2651 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
2653 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2654 || (GET_CODE (XEXP (x
, 0)) == CONST_DOUBLE
2655 && const_double_rtx_ok_for_fpu (XEXP (x
, 0))))
2658 if (((GET_CODE (XEXP (x
, 0)) == CONST_INT
2659 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))
2660 && REG_OR_SUBREG_REG (XEXP (x
, 1))))
2661 || (((subcode
= GET_CODE (XEXP (x
, 1))) == ASHIFT
2662 || subcode
== ASHIFTRT
|| subcode
== LSHIFTRT
2663 || subcode
== ROTATE
|| subcode
== ROTATERT
2665 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2666 && ((INTVAL (XEXP (XEXP (x
, 1), 1)) &
2667 (INTVAL (XEXP (XEXP (x
, 1), 1)) - 1)) == 0)))
2668 && REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 0))
2669 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 1))
2670 || GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
)
2671 && REG_OR_SUBREG_REG (XEXP (x
, 0))))
2676 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2677 return (2 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
2678 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2679 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
2680 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
2684 case AND
: case XOR
: case IOR
:
2687 /* Normally the frame registers will be spilt into reg+const during
2688 reload, so it is a bad idea to combine them with other instructions,
2689 since then they might not be moved outside of loops. As a compromise
2690 we allow integration with ops that have a constant as their second
2692 if ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2693 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))
2694 && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2695 || (REG_OR_SUBREG_REG (XEXP (x
, 0))
2696 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))))
2700 return (4 + extra_cost
+ (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
2701 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2702 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2703 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
)))
2706 if (REG_OR_SUBREG_REG (XEXP (x
, 0)))
2707 return (1 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : extra_cost
)
2708 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2709 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2710 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
)))
2713 else if (REG_OR_SUBREG_REG (XEXP (x
, 1)))
2714 return (1 + extra_cost
2715 + ((((subcode
= GET_CODE (XEXP (x
, 0))) == ASHIFT
2716 || subcode
== LSHIFTRT
|| subcode
== ASHIFTRT
2717 || subcode
== ROTATE
|| subcode
== ROTATERT
2719 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2720 && ((INTVAL (XEXP (XEXP (x
, 0), 1)) &
2721 (INTVAL (XEXP (XEXP (x
, 0), 1)) - 1)) == 0)))
2722 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 0)))
2723 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 1)))
2724 || GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
))
2730 /* There is no point basing this on the tuning, since it is always the
2731 fast variant if it exists at all. */
2732 if (arm_fast_multiply
&& mode
== DImode
2733 && (GET_CODE (XEXP (x
, 0)) == GET_CODE (XEXP (x
, 1)))
2734 && (GET_CODE (XEXP (x
, 0)) == ZERO_EXTEND
2735 || GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
))
2738 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2742 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2744 unsigned HOST_WIDE_INT i
= (INTVAL (XEXP (x
, 1))
2745 & (unsigned HOST_WIDE_INT
) 0xffffffff);
2746 int add_cost
= const_ok_for_arm (i
) ? 4 : 8;
2749 /* Tune as appropriate. */
2750 int booth_unit_size
= ((tune_flags
& FL_FAST_MULT
) ? 8 : 2);
2752 for (j
= 0; i
&& j
< 32; j
+= booth_unit_size
)
2754 i
>>= booth_unit_size
;
2761 return (((tune_flags
& FL_FAST_MULT
) ? 8 : 30)
2762 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4)
2763 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 4));
2766 if (arm_fast_multiply
&& mode
== SImode
2767 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
2768 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2769 && (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
2770 == GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)))
2771 && (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)) == ZERO_EXTEND
2772 || GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)) == SIGN_EXTEND
))
2777 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2778 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 6);
2782 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
2784 return 1 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
2787 if (GET_CODE (XEXP (x
, 1)) == PC
|| GET_CODE (XEXP (x
, 2)) == PC
)
2795 return 4 + (mode
== DImode
? 4 : 0);
2798 if (GET_MODE (XEXP (x
, 0)) == QImode
)
2799 return (4 + (mode
== DImode
? 4 : 0)
2800 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2803 switch (GET_MODE (XEXP (x
, 0)))
2806 return (1 + (mode
== DImode
? 4 : 0)
2807 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2810 return (4 + (mode
== DImode
? 4 : 0)
2811 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2814 return (1 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2822 if (const_ok_for_arm (INTVAL (x
)))
2823 return outer
== SET
? 2 : -1;
2824 else if (outer
== AND
2825 && const_ok_for_arm (~INTVAL (x
)))
2827 else if ((outer
== COMPARE
2828 || outer
== PLUS
|| outer
== MINUS
)
2829 && const_ok_for_arm (-INTVAL (x
)))
2840 if (const_double_rtx_ok_for_fpu (x
))
2841 return outer
== SET
? 2 : -1;
2842 else if ((outer
== COMPARE
|| outer
== PLUS
)
2843 && neg_const_double_rtx_ok_for_fpu (x
))
2853 arm_adjust_cost (insn
, link
, dep
, cost
)
2861 /* Some true dependencies can have a higher cost depending
2862 on precisely how certain input operands are used. */
2864 && REG_NOTE_KIND (link
) == 0
2865 && recog_memoized (insn
) < 0
2866 && recog_memoized (dep
) < 0)
2868 int shift_opnum
= get_attr_shift (insn
);
2869 enum attr_type attr_type
= get_attr_type (dep
);
2871 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2872 operand for INSN. If we have a shifted input operand and the
2873 instruction we depend on is another ALU instruction, then we may
2874 have to account for an additional stall. */
2875 if (shift_opnum
!= 0 && attr_type
== TYPE_NORMAL
)
2877 rtx shifted_operand
;
2880 /* Get the shifted operand. */
2881 extract_insn (insn
);
2882 shifted_operand
= recog_data
.operand
[shift_opnum
];
2884 /* Iterate over all the operands in DEP. If we write an operand
2885 that overlaps with SHIFTED_OPERAND, then we have increase the
2886 cost of this dependency. */
2888 preprocess_constraints ();
2889 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2891 /* We can ignore strict inputs. */
2892 if (recog_data
.operand_type
[opno
] == OP_IN
)
2895 if (reg_overlap_mentioned_p (recog_data
.operand
[opno
],
2902 /* XXX This is not strictly true for the FPA. */
2903 if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
2904 || REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
2907 /* Call insns don't incur a stall, even if they follow a load. */
2908 if (REG_NOTE_KIND (link
) == 0
2909 && GET_CODE (insn
) == CALL_INSN
)
2912 if ((i_pat
= single_set (insn
)) != NULL
2913 && GET_CODE (SET_SRC (i_pat
)) == MEM
2914 && (d_pat
= single_set (dep
)) != NULL
2915 && GET_CODE (SET_DEST (d_pat
)) == MEM
)
2917 rtx src_mem
= XEXP (SET_SRC (i_pat
), 0);
2918 /* This is a load after a store, there is no conflict if the load reads
2919 from a cached area. Assume that loads from the stack, and from the
2920 constant pool are cached, and that others will miss. This is a
2923 if ((GET_CODE (src_mem
) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (src_mem
))
2924 || reg_mentioned_p (stack_pointer_rtx
, src_mem
)
2925 || reg_mentioned_p (frame_pointer_rtx
, src_mem
)
2926 || reg_mentioned_p (hard_frame_pointer_rtx
, src_mem
))
2933 /* This code has been fixed for cross compilation. */
2935 static int fpa_consts_inited
= 0;
2937 static const char * const strings_fpa
[8] =
2940 "4", "5", "0.5", "10"
2943 static REAL_VALUE_TYPE values_fpa
[8];
2951 for (i
= 0; i
< 8; i
++)
2953 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
2957 fpa_consts_inited
= 1;
2960 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2963 const_double_rtx_ok_for_fpu (x
)
2969 if (!fpa_consts_inited
)
2972 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2973 if (REAL_VALUE_MINUS_ZERO (r
))
2976 for (i
= 0; i
< 8; i
++)
2977 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
2983 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2986 neg_const_double_rtx_ok_for_fpu (x
)
2992 if (!fpa_consts_inited
)
2995 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2996 r
= REAL_VALUE_NEGATE (r
);
2997 if (REAL_VALUE_MINUS_ZERO (r
))
3000 for (i
= 0; i
< 8; i
++)
3001 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
3007 /* Predicates for `match_operand' and `match_operator'. */
3009 /* s_register_operand is the same as register_operand, but it doesn't accept
3012 This function exists because at the time it was put in it led to better
3013 code. SUBREG(MEM) always needs a reload in the places where
3014 s_register_operand is used, and this seemed to lead to excessive
3018 s_register_operand (op
, mode
)
3020 enum machine_mode mode
;
3022 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3025 if (GET_CODE (op
) == SUBREG
)
3026 op
= SUBREG_REG (op
);
3028 /* We don't consider registers whose class is NO_REGS
3029 to be a register operand. */
3030 /* XXX might have to check for lo regs only for thumb ??? */
3031 return (GET_CODE (op
) == REG
3032 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3033 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
3036 /* A hard register operand (even before reload. */
3039 arm_hard_register_operand (op
, mode
)
3041 enum machine_mode mode
;
3043 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3046 return (GET_CODE (op
) == REG
3047 && REGNO (op
) < FIRST_PSEUDO_REGISTER
);
3050 /* Only accept reg, subreg(reg), const_int. */
3053 reg_or_int_operand (op
, mode
)
3055 enum machine_mode mode
;
3057 if (GET_CODE (op
) == CONST_INT
)
3060 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3063 if (GET_CODE (op
) == SUBREG
)
3064 op
= SUBREG_REG (op
);
3066 /* We don't consider registers whose class is NO_REGS
3067 to be a register operand. */
3068 return (GET_CODE (op
) == REG
3069 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3070 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
3073 /* Return 1 if OP is an item in memory, given that we are in reload. */
3076 arm_reload_memory_operand (op
, mode
)
3078 enum machine_mode mode ATTRIBUTE_UNUSED
;
3080 int regno
= true_regnum (op
);
3082 return (!CONSTANT_P (op
)
3084 || (GET_CODE (op
) == REG
3085 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
3088 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3089 memory access (architecture V4).
3090 MODE is QImode if called when computing constraints, or VOIDmode when
3091 emitting patterns. In this latter case we cannot use memory_operand()
3092 because it will fail on badly formed MEMs, which is precisly what we are
3096 bad_signed_byte_operand (op
, mode
)
3098 enum machine_mode mode ATTRIBUTE_UNUSED
;
3101 if ((mode
== QImode
&& !memory_operand (op
, mode
)) || GET_CODE (op
) != MEM
)
3104 if (GET_CODE (op
) != MEM
)
3109 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3110 if ((GET_CODE (op
) == PLUS
|| GET_CODE (op
) == MINUS
)
3111 && (!s_register_operand (XEXP (op
, 0), VOIDmode
)
3112 || (!s_register_operand (XEXP (op
, 1), VOIDmode
)
3113 && GET_CODE (XEXP (op
, 1)) != CONST_INT
)))
3116 /* Big constants are also bad. */
3117 if (GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
3118 && (INTVAL (XEXP (op
, 1)) > 0xff
3119 || -INTVAL (XEXP (op
, 1)) > 0xff))
3122 /* Everything else is good, or can will automatically be made so. */
3126 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3129 arm_rhs_operand (op
, mode
)
3131 enum machine_mode mode
;
3133 return (s_register_operand (op
, mode
)
3134 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
3137 /* Return TRUE for valid operands for the
3138 rhs of an ARM instruction, or a load. */
3141 arm_rhsm_operand (op
, mode
)
3143 enum machine_mode mode
;
3145 return (s_register_operand (op
, mode
)
3146 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
3147 || memory_operand (op
, mode
));
3150 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3151 constant that is valid when negated. */
3154 arm_add_operand (op
, mode
)
3156 enum machine_mode mode
;
3159 return thumb_cmp_operand (op
, mode
);
3161 return (s_register_operand (op
, mode
)
3162 || (GET_CODE (op
) == CONST_INT
3163 && (const_ok_for_arm (INTVAL (op
))
3164 || const_ok_for_arm (-INTVAL (op
)))));
3168 arm_not_operand (op
, mode
)
3170 enum machine_mode mode
;
3172 return (s_register_operand (op
, mode
)
3173 || (GET_CODE (op
) == CONST_INT
3174 && (const_ok_for_arm (INTVAL (op
))
3175 || const_ok_for_arm (~INTVAL (op
)))));
3178 /* Return TRUE if the operand is a memory reference which contains an
3179 offsettable address. */
3182 offsettable_memory_operand (op
, mode
)
3184 enum machine_mode mode
;
3186 if (mode
== VOIDmode
)
3187 mode
= GET_MODE (op
);
3189 return (mode
== GET_MODE (op
)
3190 && GET_CODE (op
) == MEM
3191 && offsettable_address_p (reload_completed
| reload_in_progress
,
3192 mode
, XEXP (op
, 0)));
3195 /* Return TRUE if the operand is a memory reference which is, or can be
3196 made word aligned by adjusting the offset. */
3199 alignable_memory_operand (op
, mode
)
3201 enum machine_mode mode
;
3205 if (mode
== VOIDmode
)
3206 mode
= GET_MODE (op
);
3208 if (mode
!= GET_MODE (op
) || GET_CODE (op
) != MEM
)
3213 return ((GET_CODE (reg
= op
) == REG
3214 || (GET_CODE (op
) == SUBREG
3215 && GET_CODE (reg
= SUBREG_REG (op
)) == REG
)
3216 || (GET_CODE (op
) == PLUS
3217 && GET_CODE (XEXP (op
, 1)) == CONST_INT
3218 && (GET_CODE (reg
= XEXP (op
, 0)) == REG
3219 || (GET_CODE (XEXP (op
, 0)) == SUBREG
3220 && GET_CODE (reg
= SUBREG_REG (XEXP (op
, 0))) == REG
))))
3221 && REGNO_POINTER_ALIGN (REGNO (reg
)) >= 32);
3224 /* Similar to s_register_operand, but does not allow hard integer
3228 f_register_operand (op
, mode
)
3230 enum machine_mode mode
;
3232 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3235 if (GET_CODE (op
) == SUBREG
)
3236 op
= SUBREG_REG (op
);
3238 /* We don't consider registers whose class is NO_REGS
3239 to be a register operand. */
3240 return (GET_CODE (op
) == REG
3241 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3242 || REGNO_REG_CLASS (REGNO (op
)) == FPU_REGS
));
3245 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3248 fpu_rhs_operand (op
, mode
)
3250 enum machine_mode mode
;
3252 if (s_register_operand (op
, mode
))
3255 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3258 if (GET_CODE (op
) == CONST_DOUBLE
)
3259 return const_double_rtx_ok_for_fpu (op
);
3265 fpu_add_operand (op
, mode
)
3267 enum machine_mode mode
;
3269 if (s_register_operand (op
, mode
))
3272 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3275 if (GET_CODE (op
) == CONST_DOUBLE
)
3276 return (const_double_rtx_ok_for_fpu (op
)
3277 || neg_const_double_rtx_ok_for_fpu (op
));
3282 /* Return nonzero if OP is a constant power of two. */
3285 power_of_two_operand (op
, mode
)
3287 enum machine_mode mode ATTRIBUTE_UNUSED
;
3289 if (GET_CODE (op
) == CONST_INT
)
3291 HOST_WIDE_INT value
= INTVAL (op
);
3293 return value
!= 0 && (value
& (value
- 1)) == 0;
3299 /* Return TRUE for a valid operand of a DImode operation.
3300 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3301 Note that this disallows MEM(REG+REG), but allows
3302 MEM(PRE/POST_INC/DEC(REG)). */
3305 di_operand (op
, mode
)
3307 enum machine_mode mode
;
3309 if (s_register_operand (op
, mode
))
3312 if (mode
!= VOIDmode
&& GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != DImode
)
3315 if (GET_CODE (op
) == SUBREG
)
3316 op
= SUBREG_REG (op
);
3318 switch (GET_CODE (op
))
3325 return memory_address_p (DImode
, XEXP (op
, 0));
3332 /* Like di_operand, but don't accept constants. */
3335 nonimmediate_di_operand (op
, mode
)
3337 enum machine_mode mode
;
3339 if (s_register_operand (op
, mode
))
3342 if (mode
!= VOIDmode
&& GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != DImode
)
3345 if (GET_CODE (op
) == SUBREG
)
3346 op
= SUBREG_REG (op
);
3348 if (GET_CODE (op
) == MEM
)
3349 return memory_address_p (DImode
, XEXP (op
, 0));
3354 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3355 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3356 Note that this disallows MEM(REG+REG), but allows
3357 MEM(PRE/POST_INC/DEC(REG)). */
3360 soft_df_operand (op
, mode
)
3362 enum machine_mode mode
;
3364 if (s_register_operand (op
, mode
))
3367 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
3370 if (GET_CODE (op
) == SUBREG
&& CONSTANT_P (SUBREG_REG (op
)))
3373 if (GET_CODE (op
) == SUBREG
)
3374 op
= SUBREG_REG (op
);
3376 switch (GET_CODE (op
))
3382 return memory_address_p (DFmode
, XEXP (op
, 0));
3389 /* Like soft_df_operand, but don't accept constants. */
3392 nonimmediate_soft_df_operand (op
, mode
)
3394 enum machine_mode mode
;
3396 if (s_register_operand (op
, mode
))
3399 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
3402 if (GET_CODE (op
) == SUBREG
)
3403 op
= SUBREG_REG (op
);
3405 if (GET_CODE (op
) == MEM
)
3406 return memory_address_p (DFmode
, XEXP (op
, 0));
3410 /* Return TRUE for valid index operands. */
3413 index_operand (op
, mode
)
3415 enum machine_mode mode
;
3417 return (s_register_operand (op
, mode
)
3418 || (immediate_operand (op
, mode
)
3419 && (GET_CODE (op
) != CONST_INT
3420 || (INTVAL (op
) < 4096 && INTVAL (op
) > -4096))));
3423 /* Return TRUE for valid shifts by a constant. This also accepts any
3424 power of two on the (somewhat overly relaxed) assumption that the
3425 shift operator in this case was a mult. */
3428 const_shift_operand (op
, mode
)
3430 enum machine_mode mode
;
3432 return (power_of_two_operand (op
, mode
)
3433 || (immediate_operand (op
, mode
)
3434 && (GET_CODE (op
) != CONST_INT
3435 || (INTVAL (op
) < 32 && INTVAL (op
) > 0))));
3438 /* Return TRUE for arithmetic operators which can be combined with a multiply
3442 shiftable_operator (x
, mode
)
3444 enum machine_mode mode
;
3448 if (GET_MODE (x
) != mode
)
3451 code
= GET_CODE (x
);
3453 return (code
== PLUS
|| code
== MINUS
3454 || code
== IOR
|| code
== XOR
|| code
== AND
);
3457 /* Return TRUE for binary logical operators. */
3460 logical_binary_operator (x
, mode
)
3462 enum machine_mode mode
;
3466 if (GET_MODE (x
) != mode
)
3469 code
= GET_CODE (x
);
3471 return (code
== IOR
|| code
== XOR
|| code
== AND
);
3474 /* Return TRUE for shift operators. */
3477 shift_operator (x
, mode
)
3479 enum machine_mode mode
;
3483 if (GET_MODE (x
) != mode
)
3486 code
= GET_CODE (x
);
3489 return power_of_two_operand (XEXP (x
, 1), mode
);
3491 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
3492 || code
== ROTATERT
);
3495 /* Return TRUE if x is EQ or NE. */
3498 equality_operator (x
, mode
)
3500 enum machine_mode mode ATTRIBUTE_UNUSED
;
3502 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
3505 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3508 arm_comparison_operator (x
, mode
)
3510 enum machine_mode mode
;
3512 return (comparison_operator (x
, mode
)
3513 && GET_CODE (x
) != LTGT
3514 && GET_CODE (x
) != UNEQ
);
3517 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3520 minmax_operator (x
, mode
)
3522 enum machine_mode mode
;
3524 enum rtx_code code
= GET_CODE (x
);
3526 if (GET_MODE (x
) != mode
)
3529 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
3532 /* Return TRUE if this is the condition code register, if we aren't given
3533 a mode, accept any class CCmode register. */
3536 cc_register (x
, mode
)
3538 enum machine_mode mode
;
3540 if (mode
== VOIDmode
)
3542 mode
= GET_MODE (x
);
3544 if (GET_MODE_CLASS (mode
) != MODE_CC
)
3548 if ( GET_MODE (x
) == mode
3549 && GET_CODE (x
) == REG
3550 && REGNO (x
) == CC_REGNUM
)
3556 /* Return TRUE if this is the condition code register, if we aren't given
3557 a mode, accept any class CCmode register which indicates a dominance
3561 dominant_cc_register (x
, mode
)
3563 enum machine_mode mode
;
3565 if (mode
== VOIDmode
)
3567 mode
= GET_MODE (x
);
3569 if (GET_MODE_CLASS (mode
) != MODE_CC
)
3573 if ( mode
!= CC_DNEmode
&& mode
!= CC_DEQmode
3574 && mode
!= CC_DLEmode
&& mode
!= CC_DLTmode
3575 && mode
!= CC_DGEmode
&& mode
!= CC_DGTmode
3576 && mode
!= CC_DLEUmode
&& mode
!= CC_DLTUmode
3577 && mode
!= CC_DGEUmode
&& mode
!= CC_DGTUmode
)
3580 return cc_register (x
, mode
);
3583 /* Return TRUE if X references a SYMBOL_REF. */
3586 symbol_mentioned_p (x
)
3592 if (GET_CODE (x
) == SYMBOL_REF
)
3595 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3597 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3603 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3604 if (symbol_mentioned_p (XVECEXP (x
, i
, j
)))
3607 else if (fmt
[i
] == 'e' && symbol_mentioned_p (XEXP (x
, i
)))
3614 /* Return TRUE if X references a LABEL_REF. */
3617 label_mentioned_p (x
)
3623 if (GET_CODE (x
) == LABEL_REF
)
3626 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3627 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3633 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3634 if (label_mentioned_p (XVECEXP (x
, i
, j
)))
3637 else if (fmt
[i
] == 'e' && label_mentioned_p (XEXP (x
, i
)))
3648 enum rtx_code code
= GET_CODE (x
);
3652 else if (code
== SMIN
)
3654 else if (code
== UMIN
)
3656 else if (code
== UMAX
)
3662 /* Return 1 if memory locations are adjacent. */
3665 adjacent_mem_locations (a
, b
)
3668 if ((GET_CODE (XEXP (a
, 0)) == REG
3669 || (GET_CODE (XEXP (a
, 0)) == PLUS
3670 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
3671 && (GET_CODE (XEXP (b
, 0)) == REG
3672 || (GET_CODE (XEXP (b
, 0)) == PLUS
3673 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
3675 int val0
= 0, val1
= 0;
3678 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
3680 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
3681 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
3684 reg0
= REGNO (XEXP (a
, 0));
3686 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
3688 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
3689 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
3692 reg1
= REGNO (XEXP (b
, 0));
3694 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
3699 /* Return 1 if OP is a load multiple operation. It is known to be
3700 parallel and the first section will be tested. */
3703 load_multiple_operation (op
, mode
)
3705 enum machine_mode mode ATTRIBUTE_UNUSED
;
3707 HOST_WIDE_INT count
= XVECLEN (op
, 0);
3710 HOST_WIDE_INT i
= 1, base
= 0;
3714 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
3717 /* Check to see if this might be a write-back. */
3718 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
3723 /* Now check it more carefully. */
3724 if (GET_CODE (SET_DEST (elt
)) != REG
3725 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
3726 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
3727 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
3728 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 1) * 4)
3732 /* Perform a quick check so we don't blow up below. */
3734 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
3735 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
3736 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
3739 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
3740 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
3742 for (; i
< count
; i
++)
3744 elt
= XVECEXP (op
, 0, i
);
3746 if (GET_CODE (elt
) != SET
3747 || GET_CODE (SET_DEST (elt
)) != REG
3748 || GET_MODE (SET_DEST (elt
)) != SImode
3749 || REGNO (SET_DEST (elt
)) != (unsigned int)(dest_regno
+ i
- base
)
3750 || GET_CODE (SET_SRC (elt
)) != MEM
3751 || GET_MODE (SET_SRC (elt
)) != SImode
3752 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
3753 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
3754 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
3755 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
3762 /* Return 1 if OP is a store multiple operation. It is known to be
3763 parallel and the first section will be tested. */
3766 store_multiple_operation (op
, mode
)
3768 enum machine_mode mode ATTRIBUTE_UNUSED
;
3770 HOST_WIDE_INT count
= XVECLEN (op
, 0);
3773 HOST_WIDE_INT i
= 1, base
= 0;
3777 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
3780 /* Check to see if this might be a write-back. */
3781 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
3786 /* Now check it more carefully. */
3787 if (GET_CODE (SET_DEST (elt
)) != REG
3788 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
3789 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
3790 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
3791 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 1) * 4)
3795 /* Perform a quick check so we don't blow up below. */
3797 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
3798 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
3799 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
3802 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
3803 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
3805 for (; i
< count
; i
++)
3807 elt
= XVECEXP (op
, 0, i
);
3809 if (GET_CODE (elt
) != SET
3810 || GET_CODE (SET_SRC (elt
)) != REG
3811 || GET_MODE (SET_SRC (elt
)) != SImode
3812 || REGNO (SET_SRC (elt
)) != (unsigned int)(src_regno
+ i
- base
)
3813 || GET_CODE (SET_DEST (elt
)) != MEM
3814 || GET_MODE (SET_DEST (elt
)) != SImode
3815 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
3816 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
3817 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
3818 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
3826 load_multiple_sequence (operands
, nops
, regs
, base
, load_offset
)
3831 HOST_WIDE_INT
* load_offset
;
3833 int unsorted_regs
[4];
3834 HOST_WIDE_INT unsorted_offsets
[4];
3839 /* Can only handle 2, 3, or 4 insns at present,
3840 though could be easily extended if required. */
3841 if (nops
< 2 || nops
> 4)
3844 /* Loop over the operands and check that the memory references are
3845 suitable (ie immediate offsets from the same base register). At
3846 the same time, extract the target register, and the memory
3848 for (i
= 0; i
< nops
; i
++)
3853 /* Convert a subreg of a mem into the mem itself. */
3854 if (GET_CODE (operands
[nops
+ i
]) == SUBREG
)
3855 operands
[nops
+ i
] = alter_subreg (operands
+ (nops
+ i
));
3857 if (GET_CODE (operands
[nops
+ i
]) != MEM
)
3860 /* Don't reorder volatile memory references; it doesn't seem worth
3861 looking for the case where the order is ok anyway. */
3862 if (MEM_VOLATILE_P (operands
[nops
+ i
]))
3865 offset
= const0_rtx
;
3867 if ((GET_CODE (reg
= XEXP (operands
[nops
+ i
], 0)) == REG
3868 || (GET_CODE (reg
) == SUBREG
3869 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
3870 || (GET_CODE (XEXP (operands
[nops
+ i
], 0)) == PLUS
3871 && ((GET_CODE (reg
= XEXP (XEXP (operands
[nops
+ i
], 0), 0))
3873 || (GET_CODE (reg
) == SUBREG
3874 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
3875 && (GET_CODE (offset
= XEXP (XEXP (operands
[nops
+ i
], 0), 1))
3880 base_reg
= REGNO (reg
);
3881 unsorted_regs
[0] = (GET_CODE (operands
[i
]) == REG
3882 ? REGNO (operands
[i
])
3883 : REGNO (SUBREG_REG (operands
[i
])));
3888 if (base_reg
!= (int) REGNO (reg
))
3889 /* Not addressed from the same base register. */
3892 unsorted_regs
[i
] = (GET_CODE (operands
[i
]) == REG
3893 ? REGNO (operands
[i
])
3894 : REGNO (SUBREG_REG (operands
[i
])));
3895 if (unsorted_regs
[i
] < unsorted_regs
[order
[0]])
3899 /* If it isn't an integer register, or if it overwrites the
3900 base register but isn't the last insn in the list, then
3901 we can't do this. */
3902 if (unsorted_regs
[i
] < 0 || unsorted_regs
[i
] > 14
3903 || (i
!= nops
- 1 && unsorted_regs
[i
] == base_reg
))
3906 unsorted_offsets
[i
] = INTVAL (offset
);
3909 /* Not a suitable memory address. */
3913 /* All the useful information has now been extracted from the
3914 operands into unsorted_regs and unsorted_offsets; additionally,
3915 order[0] has been set to the lowest numbered register in the
3916 list. Sort the registers into order, and check that the memory
3917 offsets are ascending and adjacent. */
3919 for (i
= 1; i
< nops
; i
++)
3923 order
[i
] = order
[i
- 1];
3924 for (j
= 0; j
< nops
; j
++)
3925 if (unsorted_regs
[j
] > unsorted_regs
[order
[i
- 1]]
3926 && (order
[i
] == order
[i
- 1]
3927 || unsorted_regs
[j
] < unsorted_regs
[order
[i
]]))
3930 /* Have we found a suitable register? if not, one must be used more
3932 if (order
[i
] == order
[i
- 1])
3935 /* Is the memory address adjacent and ascending? */
3936 if (unsorted_offsets
[order
[i
]] != unsorted_offsets
[order
[i
- 1]] + 4)
3944 for (i
= 0; i
< nops
; i
++)
3945 regs
[i
] = unsorted_regs
[order
[i
]];
3947 *load_offset
= unsorted_offsets
[order
[0]];
3950 if (unsorted_offsets
[order
[0]] == 0)
3951 return 1; /* ldmia */
3953 if (unsorted_offsets
[order
[0]] == 4)
3954 return 2; /* ldmib */
3956 if (unsorted_offsets
[order
[nops
- 1]] == 0)
3957 return 3; /* ldmda */
3959 if (unsorted_offsets
[order
[nops
- 1]] == -4)
3960 return 4; /* ldmdb */
3962 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3963 if the offset isn't small enough. The reason 2 ldrs are faster
3964 is because these ARMs are able to do more than one cache access
3965 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3966 whilst the ARM8 has a double bandwidth cache. This means that
3967 these cores can do both an instruction fetch and a data fetch in
3968 a single cycle, so the trick of calculating the address into a
3969 scratch register (one of the result regs) and then doing a load
3970 multiple actually becomes slower (and no smaller in code size).
3971 That is the transformation
3973 ldr rd1, [rbase + offset]
3974 ldr rd2, [rbase + offset + 4]
3978 add rd1, rbase, offset
3979 ldmia rd1, {rd1, rd2}
3981 produces worse code -- '3 cycles + any stalls on rd2' instead of
3982 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3983 access per cycle, the first sequence could never complete in less
3984 than 6 cycles, whereas the ldm sequence would only take 5 and
3985 would make better use of sequential accesses if not hitting the
3988 We cheat here and test 'arm_ld_sched' which we currently know to
3989 only be true for the ARM8, ARM9 and StrongARM. If this ever
3990 changes, then the test below needs to be reworked. */
3991 if (nops
== 2 && arm_ld_sched
)
3994 /* Can't do it without setting up the offset, only do this if it takes
3995 no more than one insn. */
3996 return (const_ok_for_arm (unsorted_offsets
[order
[0]])
3997 || const_ok_for_arm (-unsorted_offsets
[order
[0]])) ? 5 : 0;
4001 emit_ldm_seq (operands
, nops
)
4007 HOST_WIDE_INT offset
;
4011 switch (load_multiple_sequence (operands
, nops
, regs
, &base_reg
, &offset
))
4014 strcpy (buf
, "ldm%?ia\t");
4018 strcpy (buf
, "ldm%?ib\t");
4022 strcpy (buf
, "ldm%?da\t");
4026 strcpy (buf
, "ldm%?db\t");
4031 sprintf (buf
, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX
,
4032 reg_names
[regs
[0]], REGISTER_PREFIX
, reg_names
[base_reg
],
4035 sprintf (buf
, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX
,
4036 reg_names
[regs
[0]], REGISTER_PREFIX
, reg_names
[base_reg
],
4038 output_asm_insn (buf
, operands
);
4040 strcpy (buf
, "ldm%?ia\t");
4047 sprintf (buf
+ strlen (buf
), "%s%s, {%s%s", REGISTER_PREFIX
,
4048 reg_names
[base_reg
], REGISTER_PREFIX
, reg_names
[regs
[0]]);
4050 for (i
= 1; i
< nops
; i
++)
4051 sprintf (buf
+ strlen (buf
), ", %s%s", REGISTER_PREFIX
,
4052 reg_names
[regs
[i
]]);
4054 strcat (buf
, "}\t%@ phole ldm");
4056 output_asm_insn (buf
, operands
);
4061 store_multiple_sequence (operands
, nops
, regs
, base
, load_offset
)
4066 HOST_WIDE_INT
* load_offset
;
4068 int unsorted_regs
[4];
4069 HOST_WIDE_INT unsorted_offsets
[4];
4074 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4075 extended if required. */
4076 if (nops
< 2 || nops
> 4)
4079 /* Loop over the operands and check that the memory references are
4080 suitable (ie immediate offsets from the same base register). At
4081 the same time, extract the target register, and the memory
4083 for (i
= 0; i
< nops
; i
++)
4088 /* Convert a subreg of a mem into the mem itself. */
4089 if (GET_CODE (operands
[nops
+ i
]) == SUBREG
)
4090 operands
[nops
+ i
] = alter_subreg (operands
+ (nops
+ i
));
4092 if (GET_CODE (operands
[nops
+ i
]) != MEM
)
4095 /* Don't reorder volatile memory references; it doesn't seem worth
4096 looking for the case where the order is ok anyway. */
4097 if (MEM_VOLATILE_P (operands
[nops
+ i
]))
4100 offset
= const0_rtx
;
4102 if ((GET_CODE (reg
= XEXP (operands
[nops
+ i
], 0)) == REG
4103 || (GET_CODE (reg
) == SUBREG
4104 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
4105 || (GET_CODE (XEXP (operands
[nops
+ i
], 0)) == PLUS
4106 && ((GET_CODE (reg
= XEXP (XEXP (operands
[nops
+ i
], 0), 0))
4108 || (GET_CODE (reg
) == SUBREG
4109 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
4110 && (GET_CODE (offset
= XEXP (XEXP (operands
[nops
+ i
], 0), 1))
4115 base_reg
= REGNO (reg
);
4116 unsorted_regs
[0] = (GET_CODE (operands
[i
]) == REG
4117 ? REGNO (operands
[i
])
4118 : REGNO (SUBREG_REG (operands
[i
])));
4123 if (base_reg
!= (int) REGNO (reg
))
4124 /* Not addressed from the same base register. */
4127 unsorted_regs
[i
] = (GET_CODE (operands
[i
]) == REG
4128 ? REGNO (operands
[i
])
4129 : REGNO (SUBREG_REG (operands
[i
])));
4130 if (unsorted_regs
[i
] < unsorted_regs
[order
[0]])
4134 /* If it isn't an integer register, then we can't do this. */
4135 if (unsorted_regs
[i
] < 0 || unsorted_regs
[i
] > 14)
4138 unsorted_offsets
[i
] = INTVAL (offset
);
4141 /* Not a suitable memory address. */
4145 /* All the useful information has now been extracted from the
4146 operands into unsorted_regs and unsorted_offsets; additionally,
4147 order[0] has been set to the lowest numbered register in the
4148 list. Sort the registers into order, and check that the memory
4149 offsets are ascending and adjacent. */
4151 for (i
= 1; i
< nops
; i
++)
4155 order
[i
] = order
[i
- 1];
4156 for (j
= 0; j
< nops
; j
++)
4157 if (unsorted_regs
[j
] > unsorted_regs
[order
[i
- 1]]
4158 && (order
[i
] == order
[i
- 1]
4159 || unsorted_regs
[j
] < unsorted_regs
[order
[i
]]))
4162 /* Have we found a suitable register? if not, one must be used more
4164 if (order
[i
] == order
[i
- 1])
4167 /* Is the memory address adjacent and ascending? */
4168 if (unsorted_offsets
[order
[i
]] != unsorted_offsets
[order
[i
- 1]] + 4)
4176 for (i
= 0; i
< nops
; i
++)
4177 regs
[i
] = unsorted_regs
[order
[i
]];
4179 *load_offset
= unsorted_offsets
[order
[0]];
4182 if (unsorted_offsets
[order
[0]] == 0)
4183 return 1; /* stmia */
4185 if (unsorted_offsets
[order
[0]] == 4)
4186 return 2; /* stmib */
4188 if (unsorted_offsets
[order
[nops
- 1]] == 0)
4189 return 3; /* stmda */
4191 if (unsorted_offsets
[order
[nops
- 1]] == -4)
4192 return 4; /* stmdb */
4198 emit_stm_seq (operands
, nops
)
4204 HOST_WIDE_INT offset
;
4208 switch (store_multiple_sequence (operands
, nops
, regs
, &base_reg
, &offset
))
4211 strcpy (buf
, "stm%?ia\t");
4215 strcpy (buf
, "stm%?ib\t");
4219 strcpy (buf
, "stm%?da\t");
4223 strcpy (buf
, "stm%?db\t");
4230 sprintf (buf
+ strlen (buf
), "%s%s, {%s%s", REGISTER_PREFIX
,
4231 reg_names
[base_reg
], REGISTER_PREFIX
, reg_names
[regs
[0]]);
4233 for (i
= 1; i
< nops
; i
++)
4234 sprintf (buf
+ strlen (buf
), ", %s%s", REGISTER_PREFIX
,
4235 reg_names
[regs
[i
]]);
4237 strcat (buf
, "}\t%@ phole stm");
4239 output_asm_insn (buf
, operands
);
4244 multi_register_push (op
, mode
)
4246 enum machine_mode mode ATTRIBUTE_UNUSED
;
4248 if (GET_CODE (op
) != PARALLEL
4249 || (GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
4250 || (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
)
4251 || (XINT (SET_SRC (XVECEXP (op
, 0, 0)), 1) != UNSPEC_PUSH_MULT
))
4257 /* Routines for use in generating RTL. */
4260 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
, unchanging_p
,
4261 in_struct_p
, scalar_p
)
4273 int sign
= up
? 1 : -1;
4276 /* XScale has load-store double instructions, but they have stricter
4277 alignment requirements than load-store multiple, so we can not
4280 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4281 the pipeline until completion.
4289 An ldr instruction takes 1-3 cycles, but does not block the
4298 Best case ldr will always win. However, the more ldr instructions
4299 we issue, the less likely we are to be able to schedule them well.
4300 Using ldr instructions also increases code size.
4302 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4303 for counts of 3 or 4 regs. */
4304 if (arm_is_xscale
&& count
<= 2 && ! optimize_size
)
4310 for (i
= 0; i
< count
; i
++)
4312 mem
= gen_rtx_MEM (SImode
, plus_constant (from
, i
* 4 * sign
));
4313 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4314 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4315 MEM_SCALAR_P (mem
) = scalar_p
;
4316 emit_move_insn (gen_rtx_REG (SImode
, base_regno
+ i
), mem
);
4320 emit_move_insn (from
, plus_constant (from
, count
* 4 * sign
));
4328 result
= gen_rtx_PARALLEL (VOIDmode
,
4329 rtvec_alloc (count
+ (write_back
? 1 : 0)));
4332 XVECEXP (result
, 0, 0)
4333 = gen_rtx_SET (GET_MODE (from
), from
,
4334 plus_constant (from
, count
* 4 * sign
));
4339 for (j
= 0; i
< count
; i
++, j
++)
4341 mem
= gen_rtx_MEM (SImode
, plus_constant (from
, j
* 4 * sign
));
4342 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4343 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4344 MEM_SCALAR_P (mem
) = scalar_p
;
4345 XVECEXP (result
, 0, i
)
4346 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (SImode
, base_regno
+ j
), mem
);
4353 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
, unchanging_p
,
4354 in_struct_p
, scalar_p
)
4366 int sign
= up
? 1 : -1;
4369 /* See arm_gen_load_multiple for discussion of
4370 the pros/cons of ldm/stm usage for XScale. */
4371 if (arm_is_xscale
&& count
<= 2 && ! optimize_size
)
4377 for (i
= 0; i
< count
; i
++)
4379 mem
= gen_rtx_MEM (SImode
, plus_constant (to
, i
* 4 * sign
));
4380 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4381 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4382 MEM_SCALAR_P (mem
) = scalar_p
;
4383 emit_move_insn (mem
, gen_rtx_REG (SImode
, base_regno
+ i
));
4387 emit_move_insn (to
, plus_constant (to
, count
* 4 * sign
));
4395 result
= gen_rtx_PARALLEL (VOIDmode
,
4396 rtvec_alloc (count
+ (write_back
? 1 : 0)));
4399 XVECEXP (result
, 0, 0)
4400 = gen_rtx_SET (GET_MODE (to
), to
,
4401 plus_constant (to
, count
* 4 * sign
));
4406 for (j
= 0; i
< count
; i
++, j
++)
4408 mem
= gen_rtx_MEM (SImode
, plus_constant (to
, j
* 4 * sign
));
4409 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4410 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4411 MEM_SCALAR_P (mem
) = scalar_p
;
4413 XVECEXP (result
, 0, i
)
4414 = gen_rtx_SET (VOIDmode
, mem
, gen_rtx_REG (SImode
, base_regno
+ j
));
4421 arm_gen_movstrqi (operands
)
4424 HOST_WIDE_INT in_words_to_go
, out_words_to_go
, last_bytes
;
4427 rtx st_src
, st_dst
, fin_src
, fin_dst
;
4428 rtx part_bytes_reg
= NULL
;
4430 int dst_unchanging_p
, dst_in_struct_p
, src_unchanging_p
, src_in_struct_p
;
4431 int dst_scalar_p
, src_scalar_p
;
4433 if (GET_CODE (operands
[2]) != CONST_INT
4434 || GET_CODE (operands
[3]) != CONST_INT
4435 || INTVAL (operands
[2]) > 64
4436 || INTVAL (operands
[3]) & 3)
4439 st_dst
= XEXP (operands
[0], 0);
4440 st_src
= XEXP (operands
[1], 0);
4442 dst_unchanging_p
= RTX_UNCHANGING_P (operands
[0]);
4443 dst_in_struct_p
= MEM_IN_STRUCT_P (operands
[0]);
4444 dst_scalar_p
= MEM_SCALAR_P (operands
[0]);
4445 src_unchanging_p
= RTX_UNCHANGING_P (operands
[1]);
4446 src_in_struct_p
= MEM_IN_STRUCT_P (operands
[1]);
4447 src_scalar_p
= MEM_SCALAR_P (operands
[1]);
4449 fin_dst
= dst
= copy_to_mode_reg (SImode
, st_dst
);
4450 fin_src
= src
= copy_to_mode_reg (SImode
, st_src
);
4452 in_words_to_go
= ARM_NUM_INTS (INTVAL (operands
[2]));
4453 out_words_to_go
= INTVAL (operands
[2]) / 4;
4454 last_bytes
= INTVAL (operands
[2]) & 3;
4456 if (out_words_to_go
!= in_words_to_go
&& ((in_words_to_go
- 1) & 3) != 0)
4457 part_bytes_reg
= gen_rtx_REG (SImode
, (in_words_to_go
- 1) & 3);
4459 for (i
= 0; in_words_to_go
>= 2; i
+=4)
4461 if (in_words_to_go
> 4)
4462 emit_insn (arm_gen_load_multiple (0, 4, src
, TRUE
, TRUE
,
4467 emit_insn (arm_gen_load_multiple (0, in_words_to_go
, src
, TRUE
,
4468 FALSE
, src_unchanging_p
,
4469 src_in_struct_p
, src_scalar_p
));
4471 if (out_words_to_go
)
4473 if (out_words_to_go
> 4)
4474 emit_insn (arm_gen_store_multiple (0, 4, dst
, TRUE
, TRUE
,
4478 else if (out_words_to_go
!= 1)
4479 emit_insn (arm_gen_store_multiple (0, out_words_to_go
,
4488 mem
= gen_rtx_MEM (SImode
, dst
);
4489 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4490 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4491 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4492 emit_move_insn (mem
, gen_rtx_REG (SImode
, 0));
4493 if (last_bytes
!= 0)
4494 emit_insn (gen_addsi3 (dst
, dst
, GEN_INT (4)));
4498 in_words_to_go
-= in_words_to_go
< 4 ? in_words_to_go
: 4;
4499 out_words_to_go
-= out_words_to_go
< 4 ? out_words_to_go
: 4;
4502 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4503 if (out_words_to_go
)
4507 mem
= gen_rtx_MEM (SImode
, src
);
4508 RTX_UNCHANGING_P (mem
) = src_unchanging_p
;
4509 MEM_IN_STRUCT_P (mem
) = src_in_struct_p
;
4510 MEM_SCALAR_P (mem
) = src_scalar_p
;
4511 emit_move_insn (sreg
= gen_reg_rtx (SImode
), mem
);
4512 emit_move_insn (fin_src
= gen_reg_rtx (SImode
), plus_constant (src
, 4));
4514 mem
= gen_rtx_MEM (SImode
, dst
);
4515 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4516 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4517 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4518 emit_move_insn (mem
, sreg
);
4519 emit_move_insn (fin_dst
= gen_reg_rtx (SImode
), plus_constant (dst
, 4));
4522 if (in_words_to_go
) /* Sanity check */
4528 if (in_words_to_go
< 0)
4531 mem
= gen_rtx_MEM (SImode
, src
);
4532 RTX_UNCHANGING_P (mem
) = src_unchanging_p
;
4533 MEM_IN_STRUCT_P (mem
) = src_in_struct_p
;
4534 MEM_SCALAR_P (mem
) = src_scalar_p
;
4535 part_bytes_reg
= copy_to_mode_reg (SImode
, mem
);
4538 if (last_bytes
&& part_bytes_reg
== NULL
)
4541 if (BYTES_BIG_ENDIAN
&& last_bytes
)
4543 rtx tmp
= gen_reg_rtx (SImode
);
4545 /* The bytes we want are in the top end of the word. */
4546 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
,
4547 GEN_INT (8 * (4 - last_bytes
))));
4548 part_bytes_reg
= tmp
;
4552 mem
= gen_rtx_MEM (QImode
, plus_constant (dst
, last_bytes
- 1));
4553 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4554 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4555 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4556 emit_move_insn (mem
, gen_lowpart (QImode
, part_bytes_reg
));
4560 tmp
= gen_reg_rtx (SImode
);
4561 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (8)));
4562 part_bytes_reg
= tmp
;
4571 mem
= gen_rtx_MEM (HImode
, dst
);
4572 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4573 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4574 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4575 emit_move_insn (mem
, gen_lowpart (HImode
, part_bytes_reg
));
4579 rtx tmp
= gen_reg_rtx (SImode
);
4581 emit_insn (gen_addsi3 (dst
, dst
, GEN_INT (2)));
4582 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (16)));
4583 part_bytes_reg
= tmp
;
4589 mem
= gen_rtx_MEM (QImode
, dst
);
4590 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4591 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4592 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4593 emit_move_insn (mem
, gen_lowpart (QImode
, part_bytes_reg
));
4600 /* Generate a memory reference for a half word, such that it will be loaded
4601 into the top 16 bits of the word. We can assume that the address is
4602 known to be alignable and of the form reg, or plus (reg, const). */
4605 arm_gen_rotated_half_load (memref
)
4608 HOST_WIDE_INT offset
= 0;
4609 rtx base
= XEXP (memref
, 0);
4611 if (GET_CODE (base
) == PLUS
)
4613 offset
= INTVAL (XEXP (base
, 1));
4614 base
= XEXP (base
, 0);
4617 /* If we aren't allowed to generate unaligned addresses, then fail. */
4618 if (TARGET_MMU_TRAPS
4619 && ((BYTES_BIG_ENDIAN
? 1 : 0) ^ ((offset
& 2) == 0)))
4622 base
= gen_rtx_MEM (SImode
, plus_constant (base
, offset
& ~2));
4624 if ((BYTES_BIG_ENDIAN
? 1 : 0) ^ ((offset
& 2) == 2))
4627 return gen_rtx_ROTATE (SImode
, base
, GEN_INT (16));
4630 /* Select a dominance comparison mode if possible. We support three forms.
4631 COND_OR == 0 => (X && Y)
4632 COND_OR == 1 => ((! X( || Y)
4633 COND_OR == 2 => (X || Y)
4634 If we are unable to support a dominance comparsison we return CC mode.
4635 This will then fail to match for the RTL expressions that generate this
4638 static enum machine_mode
4639 select_dominance_cc_mode (x
, y
, cond_or
)
4642 HOST_WIDE_INT cond_or
;
4644 enum rtx_code cond1
, cond2
;
4647 /* Currently we will probably get the wrong result if the individual
4648 comparisons are not simple. This also ensures that it is safe to
4649 reverse a comparison if necessary. */
4650 if ((arm_select_cc_mode (cond1
= GET_CODE (x
), XEXP (x
, 0), XEXP (x
, 1))
4652 || (arm_select_cc_mode (cond2
= GET_CODE (y
), XEXP (y
, 0), XEXP (y
, 1))
4656 /* The if_then_else variant of this tests the second condition if the
4657 first passes, but is true if the first fails. Reverse the first
4658 condition to get a true "inclusive-or" expression. */
4660 cond1
= reverse_condition (cond1
);
4662 /* If the comparisons are not equal, and one doesn't dominate the other,
4663 then we can't do this. */
4665 && !comparison_dominates_p (cond1
, cond2
)
4666 && (swapped
= 1, !comparison_dominates_p (cond2
, cond1
)))
4671 enum rtx_code temp
= cond1
;
4679 if (cond2
== EQ
|| !cond_or
)
4684 case LE
: return CC_DLEmode
;
4685 case LEU
: return CC_DLEUmode
;
4686 case GE
: return CC_DGEmode
;
4687 case GEU
: return CC_DGEUmode
;
4694 if (cond2
== LT
|| !cond_or
)
4703 if (cond2
== GT
|| !cond_or
)
4712 if (cond2
== LTU
|| !cond_or
)
4721 if (cond2
== GTU
|| !cond_or
)
4729 /* The remaining cases only occur when both comparisons are the
4754 arm_select_cc_mode (op
, x
, y
)
4759 /* All floating point compares return CCFP if it is an equality
4760 comparison, and CCFPE otherwise. */
4761 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
4788 /* A compare with a shifted operand. Because of canonicalization, the
4789 comparison will have to be swapped when we emit the assembler. */
4790 if (GET_MODE (y
) == SImode
&& GET_CODE (y
) == REG
4791 && (GET_CODE (x
) == ASHIFT
|| GET_CODE (x
) == ASHIFTRT
4792 || GET_CODE (x
) == LSHIFTRT
|| GET_CODE (x
) == ROTATE
4793 || GET_CODE (x
) == ROTATERT
))
4796 /* This is a special case that is used by combine to allow a
4797 comparison of a shifted byte load to be split into a zero-extend
4798 followed by a comparison of the shifted integer (only valid for
4799 equalities and unsigned inequalities). */
4800 if (GET_MODE (x
) == SImode
4801 && GET_CODE (x
) == ASHIFT
4802 && GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 24
4803 && GET_CODE (XEXP (x
, 0)) == SUBREG
4804 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == MEM
4805 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == QImode
4806 && (op
== EQ
|| op
== NE
4807 || op
== GEU
|| op
== GTU
|| op
== LTU
|| op
== LEU
)
4808 && GET_CODE (y
) == CONST_INT
)
4811 /* A construct for a conditional compare, if the false arm contains
4812 0, then both conditions must be true, otherwise either condition
4813 must be true. Not all conditions are possible, so CCmode is
4814 returned if it can't be done. */
4815 if (GET_CODE (x
) == IF_THEN_ELSE
4816 && (XEXP (x
, 2) == const0_rtx
4817 || XEXP (x
, 2) == const1_rtx
)
4818 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4819 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4820 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1),
4821 INTVAL (XEXP (x
, 2)));
4823 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4824 if (GET_CODE (x
) == AND
4825 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4826 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4827 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1), 0);
4829 if (GET_CODE (x
) == IOR
4830 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4831 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4832 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1), 2);
4834 /* An operation that sets the condition codes as a side-effect, the
4835 V flag is not set correctly, so we can only use comparisons where
4836 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4838 if (GET_MODE (x
) == SImode
4840 && (op
== EQ
|| op
== NE
|| op
== LT
|| op
== GE
)
4841 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
4842 || GET_CODE (x
) == AND
|| GET_CODE (x
) == IOR
4843 || GET_CODE (x
) == XOR
|| GET_CODE (x
) == MULT
4844 || GET_CODE (x
) == NOT
|| GET_CODE (x
) == NEG
4845 || GET_CODE (x
) == LSHIFTRT
4846 || GET_CODE (x
) == ASHIFT
|| GET_CODE (x
) == ASHIFTRT
4847 || GET_CODE (x
) == ROTATERT
|| GET_CODE (x
) == ZERO_EXTRACT
))
4850 if (GET_MODE (x
) == QImode
&& (op
== EQ
|| op
== NE
))
4853 if (GET_MODE (x
) == SImode
&& (op
== LTU
|| op
== GEU
)
4854 && GET_CODE (x
) == PLUS
4855 && (rtx_equal_p (XEXP (x
, 0), y
) || rtx_equal_p (XEXP (x
, 1), y
)))
4861 /* X and Y are two things to compare using CODE. Emit the compare insn and
4862 return the rtx for register 0 in the proper mode. FP means this is a
4863 floating point compare: I don't think that it is needed on the arm. */
4866 arm_gen_compare_reg (code
, x
, y
)
4870 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
4871 rtx cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
4873 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
4874 gen_rtx_COMPARE (mode
, x
, y
)));
4880 arm_reload_in_hi (operands
)
4883 rtx ref
= operands
[1];
4885 HOST_WIDE_INT offset
= 0;
4887 if (GET_CODE (ref
) == SUBREG
)
4889 offset
= SUBREG_BYTE (ref
);
4890 ref
= SUBREG_REG (ref
);
4893 if (GET_CODE (ref
) == REG
)
4895 /* We have a pseudo which has been spilt onto the stack; there
4896 are two cases here: the first where there is a simple
4897 stack-slot replacement and a second where the stack-slot is
4898 out of range, or is used as a subreg. */
4899 if (reg_equiv_mem
[REGNO (ref
)])
4901 ref
= reg_equiv_mem
[REGNO (ref
)];
4902 base
= find_replacement (&XEXP (ref
, 0));
4905 /* The slot is out of range, or was dressed up in a SUBREG. */
4906 base
= reg_equiv_address
[REGNO (ref
)];
4909 base
= find_replacement (&XEXP (ref
, 0));
4911 /* Handle the case where the address is too complex to be offset by 1. */
4912 if (GET_CODE (base
) == MINUS
4913 || (GET_CODE (base
) == PLUS
&& GET_CODE (XEXP (base
, 1)) != CONST_INT
))
4915 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
4917 emit_insn (gen_rtx_SET (VOIDmode
, base_plus
, base
));
4920 else if (GET_CODE (base
) == PLUS
)
4922 /* The addend must be CONST_INT, or we would have dealt with it above. */
4923 HOST_WIDE_INT hi
, lo
;
4925 offset
+= INTVAL (XEXP (base
, 1));
4926 base
= XEXP (base
, 0);
4928 /* Rework the address into a legal sequence of insns. */
4929 /* Valid range for lo is -4095 -> 4095 */
4932 : -((-offset
) & 0xfff));
4934 /* Corner case, if lo is the max offset then we would be out of range
4935 once we have added the additional 1 below, so bump the msb into the
4936 pre-loading insn(s). */
4940 hi
= ((((offset
- lo
) & (HOST_WIDE_INT
) 0xffffffff)
4941 ^ (HOST_WIDE_INT
) 0x80000000)
4942 - (HOST_WIDE_INT
) 0x80000000);
4944 if (hi
+ lo
!= offset
)
4949 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
4951 /* Get the base address; addsi3 knows how to handle constants
4952 that require more than one insn. */
4953 emit_insn (gen_addsi3 (base_plus
, base
, GEN_INT (hi
)));
4959 scratch
= gen_rtx_REG (SImode
, REGNO (operands
[2]));
4960 emit_insn (gen_zero_extendqisi2 (scratch
,
4961 gen_rtx_MEM (QImode
,
4962 plus_constant (base
,
4964 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode
, operands
[0], 0),
4965 gen_rtx_MEM (QImode
,
4966 plus_constant (base
,
4968 if (!BYTES_BIG_ENDIAN
)
4969 emit_insn (gen_rtx_SET (VOIDmode
, gen_rtx_SUBREG (SImode
, operands
[0], 0),
4970 gen_rtx_IOR (SImode
,
4973 gen_rtx_SUBREG (SImode
, operands
[0], 0),
4977 emit_insn (gen_rtx_SET (VOIDmode
, gen_rtx_SUBREG (SImode
, operands
[0], 0),
4978 gen_rtx_IOR (SImode
,
4979 gen_rtx_ASHIFT (SImode
, scratch
,
4981 gen_rtx_SUBREG (SImode
, operands
[0],
4985 /* Handle storing a half-word to memory during reload by synthesising as two
4986 byte stores. Take care not to clobber the input values until after we
4987 have moved them somewhere safe. This code assumes that if the DImode
4988 scratch in operands[2] overlaps either the input value or output address
4989 in some way, then that value must die in this insn (we absolutely need
4990 two scratch registers for some corner cases). */
4993 arm_reload_out_hi (operands
)
4996 rtx ref
= operands
[0];
4997 rtx outval
= operands
[1];
4999 HOST_WIDE_INT offset
= 0;
5001 if (GET_CODE (ref
) == SUBREG
)
5003 offset
= SUBREG_BYTE (ref
);
5004 ref
= SUBREG_REG (ref
);
5007 if (GET_CODE (ref
) == REG
)
5009 /* We have a pseudo which has been spilt onto the stack; there
5010 are two cases here: the first where there is a simple
5011 stack-slot replacement and a second where the stack-slot is
5012 out of range, or is used as a subreg. */
5013 if (reg_equiv_mem
[REGNO (ref
)])
5015 ref
= reg_equiv_mem
[REGNO (ref
)];
5016 base
= find_replacement (&XEXP (ref
, 0));
5019 /* The slot is out of range, or was dressed up in a SUBREG. */
5020 base
= reg_equiv_address
[REGNO (ref
)];
5023 base
= find_replacement (&XEXP (ref
, 0));
5025 scratch
= gen_rtx_REG (SImode
, REGNO (operands
[2]));
5027 /* Handle the case where the address is too complex to be offset by 1. */
5028 if (GET_CODE (base
) == MINUS
5029 || (GET_CODE (base
) == PLUS
&& GET_CODE (XEXP (base
, 1)) != CONST_INT
))
5031 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
5033 /* Be careful not to destroy OUTVAL. */
5034 if (reg_overlap_mentioned_p (base_plus
, outval
))
5036 /* Updating base_plus might destroy outval, see if we can
5037 swap the scratch and base_plus. */
5038 if (!reg_overlap_mentioned_p (scratch
, outval
))
5041 scratch
= base_plus
;
5046 rtx scratch_hi
= gen_rtx_REG (HImode
, REGNO (operands
[2]));
5048 /* Be conservative and copy OUTVAL into the scratch now,
5049 this should only be necessary if outval is a subreg
5050 of something larger than a word. */
5051 /* XXX Might this clobber base? I can't see how it can,
5052 since scratch is known to overlap with OUTVAL, and
5053 must be wider than a word. */
5054 emit_insn (gen_movhi (scratch_hi
, outval
));
5055 outval
= scratch_hi
;
5059 emit_insn (gen_rtx_SET (VOIDmode
, base_plus
, base
));
5062 else if (GET_CODE (base
) == PLUS
)
5064 /* The addend must be CONST_INT, or we would have dealt with it above. */
5065 HOST_WIDE_INT hi
, lo
;
5067 offset
+= INTVAL (XEXP (base
, 1));
5068 base
= XEXP (base
, 0);
5070 /* Rework the address into a legal sequence of insns. */
5071 /* Valid range for lo is -4095 -> 4095 */
5074 : -((-offset
) & 0xfff));
5076 /* Corner case, if lo is the max offset then we would be out of range
5077 once we have added the additional 1 below, so bump the msb into the
5078 pre-loading insn(s). */
5082 hi
= ((((offset
- lo
) & (HOST_WIDE_INT
) 0xffffffff)
5083 ^ (HOST_WIDE_INT
) 0x80000000)
5084 - (HOST_WIDE_INT
) 0x80000000);
5086 if (hi
+ lo
!= offset
)
5091 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
5093 /* Be careful not to destroy OUTVAL. */
5094 if (reg_overlap_mentioned_p (base_plus
, outval
))
5096 /* Updating base_plus might destroy outval, see if we
5097 can swap the scratch and base_plus. */
5098 if (!reg_overlap_mentioned_p (scratch
, outval
))
5101 scratch
= base_plus
;
5106 rtx scratch_hi
= gen_rtx_REG (HImode
, REGNO (operands
[2]));
5108 /* Be conservative and copy outval into scratch now,
5109 this should only be necessary if outval is a
5110 subreg of something larger than a word. */
5111 /* XXX Might this clobber base? I can't see how it
5112 can, since scratch is known to overlap with
5114 emit_insn (gen_movhi (scratch_hi
, outval
));
5115 outval
= scratch_hi
;
5119 /* Get the base address; addsi3 knows how to handle constants
5120 that require more than one insn. */
5121 emit_insn (gen_addsi3 (base_plus
, base
, GEN_INT (hi
)));
5127 if (BYTES_BIG_ENDIAN
)
5129 emit_insn (gen_movqi (gen_rtx_MEM (QImode
,
5130 plus_constant (base
, offset
+ 1)),
5131 gen_lowpart (QImode
, outval
)));
5132 emit_insn (gen_lshrsi3 (scratch
,
5133 gen_rtx_SUBREG (SImode
, outval
, 0),
5135 emit_insn (gen_movqi (gen_rtx_MEM (QImode
, plus_constant (base
, offset
)),
5136 gen_lowpart (QImode
, scratch
)));
5140 emit_insn (gen_movqi (gen_rtx_MEM (QImode
, plus_constant (base
, offset
)),
5141 gen_lowpart (QImode
, outval
)));
5142 emit_insn (gen_lshrsi3 (scratch
,
5143 gen_rtx_SUBREG (SImode
, outval
, 0),
5145 emit_insn (gen_movqi (gen_rtx_MEM (QImode
,
5146 plus_constant (base
, offset
+ 1)),
5147 gen_lowpart (QImode
, scratch
)));
5151 /* Print a symbolic form of X to the debug file, F. */
5154 arm_print_value (f
, x
)
5158 switch (GET_CODE (x
))
5161 fprintf (f
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
5165 fprintf (f
, "<0x%lx,0x%lx>", (long)XWINT (x
, 2), (long)XWINT (x
, 3));
5169 fprintf (f
, "\"%s\"", XSTR (x
, 0));
5173 fprintf (f
, "`%s'", XSTR (x
, 0));
5177 fprintf (f
, "L%d", INSN_UID (XEXP (x
, 0)));
5181 arm_print_value (f
, XEXP (x
, 0));
5185 arm_print_value (f
, XEXP (x
, 0));
5187 arm_print_value (f
, XEXP (x
, 1));
5195 fprintf (f
, "????");
5200 /* Routines for manipulation of the constant pool. */
5202 /* Arm instructions cannot load a large constant directly into a
5203 register; they have to come from a pc relative load. The constant
5204 must therefore be placed in the addressable range of the pc
5205 relative load. Depending on the precise pc relative load
5206 instruction the range is somewhere between 256 bytes and 4k. This
5207 means that we often have to dump a constant inside a function, and
5208 generate code to branch around it.
5210 It is important to minimize this, since the branches will slow
5211 things down and make the code larger.
5213 Normally we can hide the table after an existing unconditional
5214 branch so that there is no interruption of the flow, but in the
5215 worst case the code looks like this:
5233 We fix this by performing a scan after scheduling, which notices
5234 which instructions need to have their operands fetched from the
5235 constant table and builds the table.
5237 The algorithm starts by building a table of all the constants that
5238 need fixing up and all the natural barriers in the function (places
5239 where a constant table can be dropped without breaking the flow).
5240 For each fixup we note how far the pc-relative replacement will be
5241 able to reach and the offset of the instruction into the function.
5243 Having built the table we then group the fixes together to form
5244 tables that are as large as possible (subject to addressing
5245 constraints) and emit each table of constants after the last
5246 barrier that is within range of all the instructions in the group.
5247 If a group does not contain a barrier, then we forcibly create one
5248 by inserting a jump instruction into the flow. Once the table has
5249 been inserted, the insns are then modified to reference the
5250 relevant entry in the pool.
5252 Possible enhancements to the algorithm (not implemented) are:
5254 1) For some processors and object formats, there may be benefit in
5255 aligning the pools to the start of cache lines; this alignment
5256 would need to be taken into account when calculating addressability
5259 /* These typedefs are located at the start of this file, so that
5260 they can be used in the prototypes there. This comment is to
5261 remind readers of that fact so that the following structures
5262 can be understood more easily.
5264 typedef struct minipool_node Mnode;
5265 typedef struct minipool_fixup Mfix; */
5267 struct minipool_node
5269 /* Doubly linked chain of entries. */
5272 /* The maximum offset into the code that this entry can be placed. While
5273 pushing fixes for forward references, all entries are sorted in order
5274 of increasing max_address. */
5275 HOST_WIDE_INT max_address
;
5276 /* Similarly for an entry inserted for a backwards ref. */
5277 HOST_WIDE_INT min_address
;
5278 /* The number of fixes referencing this entry. This can become zero
5279 if we "unpush" an entry. In this case we ignore the entry when we
5280 come to emit the code. */
5282 /* The offset from the start of the minipool. */
5283 HOST_WIDE_INT offset
;
5284 /* The value in table. */
5286 /* The mode of value. */
5287 enum machine_mode mode
;
5291 struct minipool_fixup
5295 HOST_WIDE_INT address
;
5297 enum machine_mode mode
;
5301 HOST_WIDE_INT forwards
;
5302 HOST_WIDE_INT backwards
;
5305 /* Fixes less than a word need padding out to a word boundary. */
5306 #define MINIPOOL_FIX_SIZE(mode) \
5307 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5309 static Mnode
* minipool_vector_head
;
5310 static Mnode
* minipool_vector_tail
;
5311 static rtx minipool_vector_label
;
5313 /* The linked list of all minipool fixes required for this function. */
5314 Mfix
* minipool_fix_head
;
5315 Mfix
* minipool_fix_tail
;
5316 /* The fix entry for the current minipool, once it has been placed. */
5317 Mfix
* minipool_barrier
;
5319 /* Determines if INSN is the start of a jump table. Returns the end
5320 of the TABLE or NULL_RTX. */
5323 is_jump_table (insn
)
5328 if (GET_CODE (insn
) == JUMP_INSN
5329 && JUMP_LABEL (insn
) != NULL
5330 && ((table
= next_real_insn (JUMP_LABEL (insn
)))
5331 == next_real_insn (insn
))
5333 && GET_CODE (table
) == JUMP_INSN
5334 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
5335 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
5341 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5342 #define JUMP_TABLES_IN_TEXT_SECTION 0
5345 static HOST_WIDE_INT
5346 get_jump_table_size (insn
)
5349 /* ADDR_VECs only take room if read-only data does into the text
5351 if (JUMP_TABLES_IN_TEXT_SECTION
5352 #if !defined(READONLY_DATA_SECTION) && !defined(READONLY_DATA_SECTION_ASM_OP)
5357 rtx body
= PATTERN (insn
);
5358 int elt
= GET_CODE (body
) == ADDR_DIFF_VEC
? 1 : 0;
5360 return GET_MODE_SIZE (GET_MODE (body
)) * XVECLEN (body
, elt
);
5366 /* Move a minipool fix MP from its current location to before MAX_MP.
5367 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5368 contrains may need updating. */
5371 move_minipool_fix_forward_ref (mp
, max_mp
, max_address
)
5374 HOST_WIDE_INT max_address
;
5376 /* This should never be true and the code below assumes these are
5383 if (max_address
< mp
->max_address
)
5384 mp
->max_address
= max_address
;
5388 if (max_address
> max_mp
->max_address
- mp
->fix_size
)
5389 mp
->max_address
= max_mp
->max_address
- mp
->fix_size
;
5391 mp
->max_address
= max_address
;
5393 /* Unlink MP from its current position. Since max_mp is non-null,
5394 mp->prev must be non-null. */
5395 mp
->prev
->next
= mp
->next
;
5396 if (mp
->next
!= NULL
)
5397 mp
->next
->prev
= mp
->prev
;
5399 minipool_vector_tail
= mp
->prev
;
5401 /* Re-insert it before MAX_MP. */
5403 mp
->prev
= max_mp
->prev
;
5406 if (mp
->prev
!= NULL
)
5407 mp
->prev
->next
= mp
;
5409 minipool_vector_head
= mp
;
5412 /* Save the new entry. */
5415 /* Scan over the preceding entries and adjust their addresses as
5417 while (mp
->prev
!= NULL
5418 && mp
->prev
->max_address
> mp
->max_address
- mp
->prev
->fix_size
)
5420 mp
->prev
->max_address
= mp
->max_address
- mp
->prev
->fix_size
;
5427 /* Add a constant to the minipool for a forward reference. Returns the
5428 node added or NULL if the constant will not fit in this pool. */
5431 add_minipool_forward_ref (fix
)
5434 /* If set, max_mp is the first pool_entry that has a lower
5435 constraint than the one we are trying to add. */
5436 Mnode
* max_mp
= NULL
;
5437 HOST_WIDE_INT max_address
= fix
->address
+ fix
->forwards
;
5440 /* If this fix's address is greater than the address of the first
5441 entry, then we can't put the fix in this pool. We subtract the
5442 size of the current fix to ensure that if the table is fully
5443 packed we still have enough room to insert this value by suffling
5444 the other fixes forwards. */
5445 if (minipool_vector_head
&&
5446 fix
->address
>= minipool_vector_head
->max_address
- fix
->fix_size
)
5449 /* Scan the pool to see if a constant with the same value has
5450 already been added. While we are doing this, also note the
5451 location where we must insert the constant if it doesn't already
5453 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5455 if (GET_CODE (fix
->value
) == GET_CODE (mp
->value
)
5456 && fix
->mode
== mp
->mode
5457 && (GET_CODE (fix
->value
) != CODE_LABEL
5458 || (CODE_LABEL_NUMBER (fix
->value
)
5459 == CODE_LABEL_NUMBER (mp
->value
)))
5460 && rtx_equal_p (fix
->value
, mp
->value
))
5462 /* More than one fix references this entry. */
5464 return move_minipool_fix_forward_ref (mp
, max_mp
, max_address
);
5467 /* Note the insertion point if necessary. */
5469 && mp
->max_address
> max_address
)
5473 /* The value is not currently in the minipool, so we need to create
5474 a new entry for it. If MAX_MP is NULL, the entry will be put on
5475 the end of the list since the placement is less constrained than
5476 any existing entry. Otherwise, we insert the new fix before
5477 MAX_MP and, if neceesary, adjust the constraints on the other
5479 mp
= xmalloc (sizeof (* mp
));
5480 mp
->fix_size
= fix
->fix_size
;
5481 mp
->mode
= fix
->mode
;
5482 mp
->value
= fix
->value
;
5484 /* Not yet required for a backwards ref. */
5485 mp
->min_address
= -65536;
5489 mp
->max_address
= max_address
;
5491 mp
->prev
= minipool_vector_tail
;
5493 if (mp
->prev
== NULL
)
5495 minipool_vector_head
= mp
;
5496 minipool_vector_label
= gen_label_rtx ();
5499 mp
->prev
->next
= mp
;
5501 minipool_vector_tail
= mp
;
5505 if (max_address
> max_mp
->max_address
- mp
->fix_size
)
5506 mp
->max_address
= max_mp
->max_address
- mp
->fix_size
;
5508 mp
->max_address
= max_address
;
5511 mp
->prev
= max_mp
->prev
;
5513 if (mp
->prev
!= NULL
)
5514 mp
->prev
->next
= mp
;
5516 minipool_vector_head
= mp
;
5519 /* Save the new entry. */
5522 /* Scan over the preceding entries and adjust their addresses as
5524 while (mp
->prev
!= NULL
5525 && mp
->prev
->max_address
> mp
->max_address
- mp
->prev
->fix_size
)
5527 mp
->prev
->max_address
= mp
->max_address
- mp
->prev
->fix_size
;
5535 move_minipool_fix_backward_ref (mp
, min_mp
, min_address
)
5538 HOST_WIDE_INT min_address
;
5540 HOST_WIDE_INT offset
;
5542 /* This should never be true, and the code below assumes these are
5549 if (min_address
> mp
->min_address
)
5550 mp
->min_address
= min_address
;
5554 /* We will adjust this below if it is too loose. */
5555 mp
->min_address
= min_address
;
5557 /* Unlink MP from its current position. Since min_mp is non-null,
5558 mp->next must be non-null. */
5559 mp
->next
->prev
= mp
->prev
;
5560 if (mp
->prev
!= NULL
)
5561 mp
->prev
->next
= mp
->next
;
5563 minipool_vector_head
= mp
->next
;
5565 /* Reinsert it after MIN_MP. */
5567 mp
->next
= min_mp
->next
;
5569 if (mp
->next
!= NULL
)
5570 mp
->next
->prev
= mp
;
5572 minipool_vector_tail
= mp
;
5578 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5580 mp
->offset
= offset
;
5581 if (mp
->refcount
> 0)
5582 offset
+= mp
->fix_size
;
5584 if (mp
->next
&& mp
->next
->min_address
< mp
->min_address
+ mp
->fix_size
)
5585 mp
->next
->min_address
= mp
->min_address
+ mp
->fix_size
;
5591 /* Add a constant to the minipool for a backward reference. Returns the
5592 node added or NULL if the constant will not fit in this pool.
5594 Note that the code for insertion for a backwards reference can be
5595 somewhat confusing because the calculated offsets for each fix do
5596 not take into account the size of the pool (which is still under
5600 add_minipool_backward_ref (fix
)
5603 /* If set, min_mp is the last pool_entry that has a lower constraint
5604 than the one we are trying to add. */
5605 Mnode
* min_mp
= NULL
;
5606 /* This can be negative, since it is only a constraint. */
5607 HOST_WIDE_INT min_address
= fix
->address
- fix
->backwards
;
5610 /* If we can't reach the current pool from this insn, or if we can't
5611 insert this entry at the end of the pool without pushing other
5612 fixes out of range, then we don't try. This ensures that we
5613 can't fail later on. */
5614 if (min_address
>= minipool_barrier
->address
5615 || (minipool_vector_tail
->min_address
+ fix
->fix_size
5616 >= minipool_barrier
->address
))
5619 /* Scan the pool to see if a constant with the same value has
5620 already been added. While we are doing this, also note the
5621 location where we must insert the constant if it doesn't already
5623 for (mp
= minipool_vector_tail
; mp
!= NULL
; mp
= mp
->prev
)
5625 if (GET_CODE (fix
->value
) == GET_CODE (mp
->value
)
5626 && fix
->mode
== mp
->mode
5627 && (GET_CODE (fix
->value
) != CODE_LABEL
5628 || (CODE_LABEL_NUMBER (fix
->value
)
5629 == CODE_LABEL_NUMBER (mp
->value
)))
5630 && rtx_equal_p (fix
->value
, mp
->value
)
5631 /* Check that there is enough slack to move this entry to the
5632 end of the table (this is conservative). */
5634 > (minipool_barrier
->address
5635 + minipool_vector_tail
->offset
5636 + minipool_vector_tail
->fix_size
)))
5639 return move_minipool_fix_backward_ref (mp
, min_mp
, min_address
);
5643 mp
->min_address
+= fix
->fix_size
;
5646 /* Note the insertion point if necessary. */
5647 if (mp
->min_address
< min_address
)
5649 else if (mp
->max_address
5650 < minipool_barrier
->address
+ mp
->offset
+ fix
->fix_size
)
5652 /* Inserting before this entry would push the fix beyond
5653 its maximum address (which can happen if we have
5654 re-located a forwards fix); force the new fix to come
5657 min_address
= mp
->min_address
+ fix
->fix_size
;
5662 /* We need to create a new entry. */
5663 mp
= xmalloc (sizeof (* mp
));
5664 mp
->fix_size
= fix
->fix_size
;
5665 mp
->mode
= fix
->mode
;
5666 mp
->value
= fix
->value
;
5668 mp
->max_address
= minipool_barrier
->address
+ 65536;
5670 mp
->min_address
= min_address
;
5675 mp
->next
= minipool_vector_head
;
5677 if (mp
->next
== NULL
)
5679 minipool_vector_tail
= mp
;
5680 minipool_vector_label
= gen_label_rtx ();
5683 mp
->next
->prev
= mp
;
5685 minipool_vector_head
= mp
;
5689 mp
->next
= min_mp
->next
;
5693 if (mp
->next
!= NULL
)
5694 mp
->next
->prev
= mp
;
5696 minipool_vector_tail
= mp
;
5699 /* Save the new entry. */
5707 /* Scan over the following entries and adjust their offsets. */
5708 while (mp
->next
!= NULL
)
5710 if (mp
->next
->min_address
< mp
->min_address
+ mp
->fix_size
)
5711 mp
->next
->min_address
= mp
->min_address
+ mp
->fix_size
;
5714 mp
->next
->offset
= mp
->offset
+ mp
->fix_size
;
5716 mp
->next
->offset
= mp
->offset
;
5725 assign_minipool_offsets (barrier
)
5728 HOST_WIDE_INT offset
= 0;
5731 minipool_barrier
= barrier
;
5733 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5735 mp
->offset
= offset
;
5737 if (mp
->refcount
> 0)
5738 offset
+= mp
->fix_size
;
5742 /* Output the literal table */
5744 dump_minipool (scan
)
5751 fprintf (rtl_dump_file
,
5752 ";; Emitting minipool after insn %u; address %ld\n",
5753 INSN_UID (scan
), (unsigned long) minipool_barrier
->address
);
5755 scan
= emit_label_after (gen_label_rtx (), scan
);
5756 scan
= emit_insn_after (gen_align_4 (), scan
);
5757 scan
= emit_label_after (minipool_vector_label
, scan
);
5759 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= nmp
)
5761 if (mp
->refcount
> 0)
5765 fprintf (rtl_dump_file
,
5766 ";; Offset %u, min %ld, max %ld ",
5767 (unsigned) mp
->offset
, (unsigned long) mp
->min_address
,
5768 (unsigned long) mp
->max_address
);
5769 arm_print_value (rtl_dump_file
, mp
->value
);
5770 fputc ('\n', rtl_dump_file
);
5773 switch (mp
->fix_size
)
5775 #ifdef HAVE_consttable_1
5777 scan
= emit_insn_after (gen_consttable_1 (mp
->value
), scan
);
5781 #ifdef HAVE_consttable_2
5783 scan
= emit_insn_after (gen_consttable_2 (mp
->value
), scan
);
5787 #ifdef HAVE_consttable_4
5789 scan
= emit_insn_after (gen_consttable_4 (mp
->value
), scan
);
5793 #ifdef HAVE_consttable_8
5795 scan
= emit_insn_after (gen_consttable_8 (mp
->value
), scan
);
5809 minipool_vector_head
= minipool_vector_tail
= NULL
;
5810 scan
= emit_insn_after (gen_consttable_end (), scan
);
5811 scan
= emit_barrier_after (scan
);
5814 /* Return the cost of forcibly inserting a barrier after INSN. */
5817 arm_barrier_cost (insn
)
5820 /* Basing the location of the pool on the loop depth is preferable,
5821 but at the moment, the basic block information seems to be
5822 corrupt by this stage of the compilation. */
5824 rtx next
= next_nonnote_insn (insn
);
5826 if (next
!= NULL
&& GET_CODE (next
) == CODE_LABEL
)
5829 switch (GET_CODE (insn
))
5832 /* It will always be better to place the table before the label, rather
5841 return base_cost
- 10;
5844 return base_cost
+ 10;
5848 /* Find the best place in the insn stream in the range
5849 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5850 Create the barrier by inserting a jump and add a new fix entry for
5854 create_fix_barrier (fix
, max_address
)
5856 HOST_WIDE_INT max_address
;
5858 HOST_WIDE_INT count
= 0;
5860 rtx from
= fix
->insn
;
5861 rtx selected
= from
;
5863 HOST_WIDE_INT selected_address
;
5865 HOST_WIDE_INT max_count
= max_address
- fix
->address
;
5866 rtx label
= gen_label_rtx ();
5868 selected_cost
= arm_barrier_cost (from
);
5869 selected_address
= fix
->address
;
5871 while (from
&& count
< max_count
)
5876 /* This code shouldn't have been called if there was a natural barrier
5878 if (GET_CODE (from
) == BARRIER
)
5881 /* Count the length of this insn. */
5882 count
+= get_attr_length (from
);
5884 /* If there is a jump table, add its length. */
5885 tmp
= is_jump_table (from
);
5888 count
+= get_jump_table_size (tmp
);
5890 /* Jump tables aren't in a basic block, so base the cost on
5891 the dispatch insn. If we select this location, we will
5892 still put the pool after the table. */
5893 new_cost
= arm_barrier_cost (from
);
5895 if (count
< max_count
&& new_cost
<= selected_cost
)
5898 selected_cost
= new_cost
;
5899 selected_address
= fix
->address
+ count
;
5902 /* Continue after the dispatch table. */
5903 from
= NEXT_INSN (tmp
);
5907 new_cost
= arm_barrier_cost (from
);
5909 if (count
< max_count
&& new_cost
<= selected_cost
)
5912 selected_cost
= new_cost
;
5913 selected_address
= fix
->address
+ count
;
5916 from
= NEXT_INSN (from
);
5919 /* Create a new JUMP_INSN that branches around a barrier. */
5920 from
= emit_jump_insn_after (gen_jump (label
), selected
);
5921 JUMP_LABEL (from
) = label
;
5922 barrier
= emit_barrier_after (from
);
5923 emit_label_after (label
, barrier
);
5925 /* Create a minipool barrier entry for the new barrier. */
5926 new_fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* new_fix
));
5927 new_fix
->insn
= barrier
;
5928 new_fix
->address
= selected_address
;
5929 new_fix
->next
= fix
->next
;
5930 fix
->next
= new_fix
;
5935 /* Record that there is a natural barrier in the insn stream at
5938 push_minipool_barrier (insn
, address
)
5940 HOST_WIDE_INT address
;
5942 Mfix
* fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* fix
));
5945 fix
->address
= address
;
5948 if (minipool_fix_head
!= NULL
)
5949 minipool_fix_tail
->next
= fix
;
5951 minipool_fix_head
= fix
;
5953 minipool_fix_tail
= fix
;
5956 /* Record INSN, which will need fixing up to load a value from the
5957 minipool. ADDRESS is the offset of the insn since the start of the
5958 function; LOC is a pointer to the part of the insn which requires
5959 fixing; VALUE is the constant that must be loaded, which is of type
5962 push_minipool_fix (insn
, address
, loc
, mode
, value
)
5964 HOST_WIDE_INT address
;
5966 enum machine_mode mode
;
5969 Mfix
* fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* fix
));
5971 #ifdef AOF_ASSEMBLER
5972 /* PIC symbol refereneces need to be converted into offsets into the
5974 /* XXX This shouldn't be done here. */
5975 if (flag_pic
&& GET_CODE (value
) == SYMBOL_REF
)
5976 value
= aof_pic_entry (value
);
5977 #endif /* AOF_ASSEMBLER */
5980 fix
->address
= address
;
5983 fix
->fix_size
= MINIPOOL_FIX_SIZE (mode
);
5985 fix
->forwards
= get_attr_pool_range (insn
);
5986 fix
->backwards
= get_attr_neg_pool_range (insn
);
5987 fix
->minipool
= NULL
;
5989 /* If an insn doesn't have a range defined for it, then it isn't
5990 expecting to be reworked by this code. Better to abort now than
5991 to generate duff assembly code. */
5992 if (fix
->forwards
== 0 && fix
->backwards
== 0)
5997 fprintf (rtl_dump_file
,
5998 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5999 GET_MODE_NAME (mode
),
6000 INSN_UID (insn
), (unsigned long) address
,
6001 -1 * (long)fix
->backwards
, (long)fix
->forwards
);
6002 arm_print_value (rtl_dump_file
, fix
->value
);
6003 fprintf (rtl_dump_file
, "\n");
6006 /* Add it to the chain of fixes. */
6009 if (minipool_fix_head
!= NULL
)
6010 minipool_fix_tail
->next
= fix
;
6012 minipool_fix_head
= fix
;
6014 minipool_fix_tail
= fix
;
6017 /* Scan INSN and note any of its operands that need fixing. */
6020 note_invalid_constants (insn
, address
)
6022 HOST_WIDE_INT address
;
6026 extract_insn (insn
);
6028 if (!constrain_operands (1))
6029 fatal_insn_not_found (insn
);
6031 /* Fill in recog_op_alt with information about the constraints of this
6033 preprocess_constraints ();
6035 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
6037 /* Things we need to fix can only occur in inputs. */
6038 if (recog_data
.operand_type
[opno
] != OP_IN
)
6041 /* If this alternative is a memory reference, then any mention
6042 of constants in this alternative is really to fool reload
6043 into allowing us to accept one there. We need to fix them up
6044 now so that we output the right code. */
6045 if (recog_op_alt
[opno
][which_alternative
].memory_ok
)
6047 rtx op
= recog_data
.operand
[opno
];
6049 if (CONSTANT_P (op
))
6050 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6051 recog_data
.operand_mode
[opno
], op
);
6053 /* RWE: Now we look correctly at the operands for the insn,
6054 this shouldn't be needed any more. */
6055 #ifndef AOF_ASSEMBLER
6056 /* XXX Is this still needed? */
6057 else if (GET_CODE (op
) == UNSPEC
&& XINT (op
, 1) == UNSPEC_PIC_SYM
)
6058 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6059 recog_data
.operand_mode
[opno
],
6060 XVECEXP (op
, 0, 0));
6063 else if (GET_CODE (op
) == MEM
6064 && GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
6065 && CONSTANT_POOL_ADDRESS_P (XEXP (op
, 0)))
6066 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6067 recog_data
.operand_mode
[opno
],
6068 get_pool_constant (XEXP (op
, 0)));
6078 HOST_WIDE_INT address
= 0;
6081 minipool_fix_head
= minipool_fix_tail
= NULL
;
6083 /* The first insn must always be a note, or the code below won't
6084 scan it properly. */
6085 if (GET_CODE (first
) != NOTE
)
6088 /* Scan all the insns and record the operands that will need fixing. */
6089 for (insn
= next_nonnote_insn (first
); insn
; insn
= next_nonnote_insn (insn
))
6091 if (GET_CODE (insn
) == BARRIER
)
6092 push_minipool_barrier (insn
, address
);
6093 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
6094 || GET_CODE (insn
) == JUMP_INSN
)
6098 note_invalid_constants (insn
, address
);
6099 address
+= get_attr_length (insn
);
6101 /* If the insn is a vector jump, add the size of the table
6102 and skip the table. */
6103 if ((table
= is_jump_table (insn
)) != NULL
)
6105 address
+= get_jump_table_size (table
);
6111 fix
= minipool_fix_head
;
6113 /* Now scan the fixups and perform the required changes. */
6118 Mfix
* last_added_fix
;
6119 Mfix
* last_barrier
= NULL
;
6122 /* Skip any further barriers before the next fix. */
6123 while (fix
&& GET_CODE (fix
->insn
) == BARRIER
)
6126 /* No more fixes. */
6130 last_added_fix
= NULL
;
6132 for (ftmp
= fix
; ftmp
; ftmp
= ftmp
->next
)
6134 if (GET_CODE (ftmp
->insn
) == BARRIER
)
6136 if (ftmp
->address
>= minipool_vector_head
->max_address
)
6139 last_barrier
= ftmp
;
6141 else if ((ftmp
->minipool
= add_minipool_forward_ref (ftmp
)) == NULL
)
6144 last_added_fix
= ftmp
; /* Keep track of the last fix added. */
6147 /* If we found a barrier, drop back to that; any fixes that we
6148 could have reached but come after the barrier will now go in
6149 the next mini-pool. */
6150 if (last_barrier
!= NULL
)
6152 /* Reduce the refcount for those fixes that won't go into this
6154 for (fdel
= last_barrier
->next
;
6155 fdel
&& fdel
!= ftmp
;
6158 fdel
->minipool
->refcount
--;
6159 fdel
->minipool
= NULL
;
6162 ftmp
= last_barrier
;
6166 /* ftmp is first fix that we can't fit into this pool and
6167 there no natural barriers that we could use. Insert a
6168 new barrier in the code somewhere between the previous
6169 fix and this one, and arrange to jump around it. */
6170 HOST_WIDE_INT max_address
;
6172 /* The last item on the list of fixes must be a barrier, so
6173 we can never run off the end of the list of fixes without
6174 last_barrier being set. */
6178 max_address
= minipool_vector_head
->max_address
;
6179 /* Check that there isn't another fix that is in range that
6180 we couldn't fit into this pool because the pool was
6181 already too large: we need to put the pool before such an
6183 if (ftmp
->address
< max_address
)
6184 max_address
= ftmp
->address
;
6186 last_barrier
= create_fix_barrier (last_added_fix
, max_address
);
6189 assign_minipool_offsets (last_barrier
);
6193 if (GET_CODE (ftmp
->insn
) != BARRIER
6194 && ((ftmp
->minipool
= add_minipool_backward_ref (ftmp
))
6201 /* Scan over the fixes we have identified for this pool, fixing them
6202 up and adding the constants to the pool itself. */
6203 for (this_fix
= fix
; this_fix
&& ftmp
!= this_fix
;
6204 this_fix
= this_fix
->next
)
6205 if (GET_CODE (this_fix
->insn
) != BARRIER
)
6208 = plus_constant (gen_rtx_LABEL_REF (VOIDmode
,
6209 minipool_vector_label
),
6210 this_fix
->minipool
->offset
);
6211 *this_fix
->loc
= gen_rtx_MEM (this_fix
->mode
, addr
);
6214 dump_minipool (last_barrier
->insn
);
6218 /* From now on we must synthesize any constants that we can't handle
6219 directly. This can happen if the RTL gets split during final
6220 instruction generation. */
6221 after_arm_reorg
= 1;
6223 /* Free the minipool memory. */
6224 obstack_free (&minipool_obstack
, minipool_startobj
);
6227 /* Routines to output assembly language. */
6229 /* If the rtx is the correct value then return the string of the number.
6230 In this way we can ensure that valid double constants are generated even
6231 when cross compiling. */
6234 fp_immediate_constant (x
)
6240 if (!fpa_consts_inited
)
6243 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
6244 for (i
= 0; i
< 8; i
++)
6245 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
6246 return strings_fpa
[i
];
6251 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6254 fp_const_from_val (r
)
6255 REAL_VALUE_TYPE
* r
;
6259 if (!fpa_consts_inited
)
6262 for (i
= 0; i
< 8; i
++)
6263 if (REAL_VALUES_EQUAL (*r
, values_fpa
[i
]))
6264 return strings_fpa
[i
];
6269 /* Output the operands of a LDM/STM instruction to STREAM.
6270 MASK is the ARM register set mask of which only bits 0-15 are important.
6271 REG is the base register, either the frame pointer or the stack pointer,
6272 INSTR is the possibly suffixed load or store instruction. */
6275 print_multi_reg (stream
, instr
, reg
, mask
)
6282 int not_first
= FALSE
;
6284 fputc ('\t', stream
);
6285 asm_fprintf (stream
, instr
, reg
);
6286 fputs (", {", stream
);
6288 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
6289 if (mask
& (1 << i
))
6292 fprintf (stream
, ", ");
6294 asm_fprintf (stream
, "%r", i
);
6298 fprintf (stream
, "}%s\n", TARGET_APCS_32
? "" : "^");
6301 /* Output a 'call' insn. */
6304 output_call (operands
)
6307 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6309 if (REGNO (operands
[0]) == LR_REGNUM
)
6311 operands
[0] = gen_rtx_REG (SImode
, IP_REGNUM
);
6312 output_asm_insn ("mov%?\t%0, %|lr", operands
);
6315 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6317 if (TARGET_INTERWORK
)
6318 output_asm_insn ("bx%?\t%0", operands
);
6320 output_asm_insn ("mov%?\t%|pc, %0", operands
);
6329 int something_changed
= 0;
6331 int code
= GET_CODE (x0
);
6338 if (REGNO (x0
) == LR_REGNUM
)
6340 *x
= gen_rtx_REG (SImode
, IP_REGNUM
);
6345 /* Scan through the sub-elements and change any references there. */
6346 fmt
= GET_RTX_FORMAT (code
);
6348 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6350 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
6351 else if (fmt
[i
] == 'E')
6352 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
6353 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
6355 return something_changed
;
6359 /* Output a 'call' insn that is a reference in memory. */
6362 output_call_mem (operands
)
6365 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful. */
6366 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6367 if (eliminate_lr2ip (&operands
[0]))
6368 output_asm_insn ("mov%?\t%|ip, %|lr", operands
);
6370 if (TARGET_INTERWORK
)
6372 output_asm_insn ("ldr%?\t%|ip, %0", operands
);
6373 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6374 output_asm_insn ("bx%?\t%|ip", operands
);
6378 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6379 output_asm_insn ("ldr%?\t%|pc, %0", operands
);
6386 /* Output a move from arm registers to an fpu registers.
6387 OPERANDS[0] is an fpu register.
6388 OPERANDS[1] is the first registers of an arm register pair. */
6391 output_mov_long_double_fpu_from_arm (operands
)
6394 int arm_reg0
= REGNO (operands
[1]);
6397 if (arm_reg0
== IP_REGNUM
)
6400 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6401 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6402 ops
[2] = gen_rtx_REG (SImode
, 2 + arm_reg0
);
6404 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops
);
6405 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands
);
6410 /* Output a move from an fpu register to arm registers.
6411 OPERANDS[0] is the first registers of an arm register pair.
6412 OPERANDS[1] is an fpu register. */
6415 output_mov_long_double_arm_from_fpu (operands
)
6418 int arm_reg0
= REGNO (operands
[0]);
6421 if (arm_reg0
== IP_REGNUM
)
6424 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6425 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6426 ops
[2] = gen_rtx_REG (SImode
, 2 + arm_reg0
);
6428 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands
);
6429 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops
);
6433 /* Output a move from arm registers to arm registers of a long double
6434 OPERANDS[0] is the destination.
6435 OPERANDS[1] is the source. */
6438 output_mov_long_double_arm_from_arm (operands
)
6441 /* We have to be careful here because the two might overlap. */
6442 int dest_start
= REGNO (operands
[0]);
6443 int src_start
= REGNO (operands
[1]);
6447 if (dest_start
< src_start
)
6449 for (i
= 0; i
< 3; i
++)
6451 ops
[0] = gen_rtx_REG (SImode
, dest_start
+ i
);
6452 ops
[1] = gen_rtx_REG (SImode
, src_start
+ i
);
6453 output_asm_insn ("mov%?\t%0, %1", ops
);
6458 for (i
= 2; i
>= 0; i
--)
6460 ops
[0] = gen_rtx_REG (SImode
, dest_start
+ i
);
6461 ops
[1] = gen_rtx_REG (SImode
, src_start
+ i
);
6462 output_asm_insn ("mov%?\t%0, %1", ops
);
6470 /* Output a move from arm registers to an fpu registers.
6471 OPERANDS[0] is an fpu register.
6472 OPERANDS[1] is the first registers of an arm register pair. */
6475 output_mov_double_fpu_from_arm (operands
)
6478 int arm_reg0
= REGNO (operands
[1]);
6481 if (arm_reg0
== IP_REGNUM
)
6484 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6485 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6486 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops
);
6487 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands
);
6491 /* Output a move from an fpu register to arm registers.
6492 OPERANDS[0] is the first registers of an arm register pair.
6493 OPERANDS[1] is an fpu register. */
6496 output_mov_double_arm_from_fpu (operands
)
6499 int arm_reg0
= REGNO (operands
[0]);
6502 if (arm_reg0
== IP_REGNUM
)
6505 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6506 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6507 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands
);
6508 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops
);
6512 /* Output a move between double words.
6513 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6514 or MEM<-REG and all MEMs must be offsettable addresses. */
6517 output_move_double (operands
)
6520 enum rtx_code code0
= GET_CODE (operands
[0]);
6521 enum rtx_code code1
= GET_CODE (operands
[1]);
6526 int reg0
= REGNO (operands
[0]);
6528 otherops
[0] = gen_rtx_REG (SImode
, 1 + reg0
);
6532 int reg1
= REGNO (operands
[1]);
6533 if (reg1
== IP_REGNUM
)
6536 /* Ensure the second source is not overwritten. */
6537 if (reg1
== reg0
+ (WORDS_BIG_ENDIAN
? -1 : 1))
6538 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands
);
6540 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands
);
6542 else if (code1
== CONST_DOUBLE
)
6544 if (GET_MODE (operands
[1]) == DFmode
)
6549 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
6550 REAL_VALUE_TO_TARGET_DOUBLE (r
, l
);
6551 otherops
[1] = GEN_INT (l
[1]);
6552 operands
[1] = GEN_INT (l
[0]);
6554 else if (GET_MODE (operands
[1]) != VOIDmode
)
6556 else if (WORDS_BIG_ENDIAN
)
6558 otherops
[1] = GEN_INT (CONST_DOUBLE_LOW (operands
[1]));
6559 operands
[1] = GEN_INT (CONST_DOUBLE_HIGH (operands
[1]));
6563 otherops
[1] = GEN_INT (CONST_DOUBLE_HIGH (operands
[1]));
6564 operands
[1] = GEN_INT (CONST_DOUBLE_LOW (operands
[1]));
6567 output_mov_immediate (operands
);
6568 output_mov_immediate (otherops
);
6570 else if (code1
== CONST_INT
)
6572 #if HOST_BITS_PER_WIDE_INT > 32
6573 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6574 what the upper word is. */
6575 if (WORDS_BIG_ENDIAN
)
6577 otherops
[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands
[1])));
6578 operands
[1] = GEN_INT (INTVAL (operands
[1]) >> 32);
6582 otherops
[1] = GEN_INT (INTVAL (operands
[1]) >> 32);
6583 operands
[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands
[1])));
6586 /* Sign extend the intval into the high-order word. */
6587 if (WORDS_BIG_ENDIAN
)
6589 otherops
[1] = operands
[1];
6590 operands
[1] = (INTVAL (operands
[1]) < 0
6591 ? constm1_rtx
: const0_rtx
);
6594 otherops
[1] = INTVAL (operands
[1]) < 0 ? constm1_rtx
: const0_rtx
;
6596 output_mov_immediate (otherops
);
6597 output_mov_immediate (operands
);
6599 else if (code1
== MEM
)
6601 switch (GET_CODE (XEXP (operands
[1], 0)))
6604 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
6608 abort (); /* Should never happen now. */
6612 output_asm_insn ("ldm%?db\t%m1!, %M0", operands
);
6616 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands
);
6620 abort (); /* Should never happen now. */
6625 output_asm_insn ("adr%?\t%0, %1", operands
);
6626 output_asm_insn ("ldm%?ia\t%0, %M0", operands
);
6630 if (arm_add_operand (XEXP (XEXP (operands
[1], 0), 1),
6631 GET_MODE (XEXP (XEXP (operands
[1], 0), 1))))
6633 otherops
[0] = operands
[0];
6634 otherops
[1] = XEXP (XEXP (operands
[1], 0), 0);
6635 otherops
[2] = XEXP (XEXP (operands
[1], 0), 1);
6637 if (GET_CODE (XEXP (operands
[1], 0)) == PLUS
)
6639 if (GET_CODE (otherops
[2]) == CONST_INT
)
6641 switch (INTVAL (otherops
[2]))
6644 output_asm_insn ("ldm%?db\t%1, %M0", otherops
);
6647 output_asm_insn ("ldm%?da\t%1, %M0", otherops
);
6650 output_asm_insn ("ldm%?ib\t%1, %M0", otherops
);
6654 if (!(const_ok_for_arm (INTVAL (otherops
[2]))))
6655 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops
);
6657 output_asm_insn ("add%?\t%0, %1, %2", otherops
);
6660 output_asm_insn ("add%?\t%0, %1, %2", otherops
);
6663 output_asm_insn ("sub%?\t%0, %1, %2", otherops
);
6665 return "ldm%?ia\t%0, %M0";
6669 otherops
[1] = adjust_address (operands
[1], VOIDmode
, 4);
6670 /* Take care of overlapping base/data reg. */
6671 if (reg_mentioned_p (operands
[0], operands
[1]))
6673 output_asm_insn ("ldr%?\t%0, %1", otherops
);
6674 output_asm_insn ("ldr%?\t%0, %1", operands
);
6678 output_asm_insn ("ldr%?\t%0, %1", operands
);
6679 output_asm_insn ("ldr%?\t%0, %1", otherops
);
6685 abort (); /* Constraints should prevent this. */
6687 else if (code0
== MEM
&& code1
== REG
)
6689 if (REGNO (operands
[1]) == IP_REGNUM
)
6692 switch (GET_CODE (XEXP (operands
[0], 0)))
6695 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
6699 abort (); /* Should never happen now. */
6703 output_asm_insn ("stm%?db\t%m0!, %M1", operands
);
6707 output_asm_insn ("stm%?ia\t%m0!, %M1", operands
);
6711 abort (); /* Should never happen now. */
6715 if (GET_CODE (XEXP (XEXP (operands
[0], 0), 1)) == CONST_INT
)
6717 switch (INTVAL (XEXP (XEXP (operands
[0], 0), 1)))
6720 output_asm_insn ("stm%?db\t%m0, %M1", operands
);
6724 output_asm_insn ("stm%?da\t%m0, %M1", operands
);
6728 output_asm_insn ("stm%?ib\t%m0, %M1", operands
);
6735 otherops
[0] = adjust_address (operands
[0], VOIDmode
, 4);
6736 otherops
[1] = gen_rtx_REG (SImode
, 1 + REGNO (operands
[1]));
6737 output_asm_insn ("str%?\t%1, %0", operands
);
6738 output_asm_insn ("str%?\t%1, %0", otherops
);
6742 /* Constraints should prevent this. */
6749 /* Output an arbitrary MOV reg, #n.
6750 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6753 output_mov_immediate (operands
)
6756 HOST_WIDE_INT n
= INTVAL (operands
[1]);
6758 /* Try to use one MOV. */
6759 if (const_ok_for_arm (n
))
6760 output_asm_insn ("mov%?\t%0, %1", operands
);
6762 /* Try to use one MVN. */
6763 else if (const_ok_for_arm (~n
))
6765 operands
[1] = GEN_INT (~n
);
6766 output_asm_insn ("mvn%?\t%0, %1", operands
);
6773 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6774 for (i
= 0; i
< 32; i
++)
6778 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
6779 output_multi_immediate (operands
, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n
);
6781 output_multi_immediate (operands
, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n
);
6787 /* Output an ADD r, s, #n where n may be too big for one instruction.
6788 If adding zero to one register, output nothing. */
6791 output_add_immediate (operands
)
6794 HOST_WIDE_INT n
= INTVAL (operands
[2]);
6796 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
6799 output_multi_immediate (operands
,
6800 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6803 output_multi_immediate (operands
,
6804 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6811 /* Output a multiple immediate operation.
6812 OPERANDS is the vector of operands referred to in the output patterns.
6813 INSTR1 is the output pattern to use for the first constant.
6814 INSTR2 is the output pattern to use for subsequent constants.
6815 IMMED_OP is the index of the constant slot in OPERANDS.
6816 N is the constant value. */
6819 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
6821 const char * instr1
;
6822 const char * instr2
;
6826 #if HOST_BITS_PER_WIDE_INT > 32
6832 /* Quick and easy output. */
6833 operands
[immed_op
] = const0_rtx
;
6834 output_asm_insn (instr1
, operands
);
6839 const char * instr
= instr1
;
6841 /* Note that n is never zero here (which would give no output). */
6842 for (i
= 0; i
< 32; i
+= 2)
6846 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
6847 output_asm_insn (instr
, operands
);
6857 /* Return the appropriate ARM instruction for the operation code.
6858 The returned result should not be overwritten. OP is the rtx of the
6859 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6863 arithmetic_instr (op
, shift_first_arg
)
6865 int shift_first_arg
;
6867 switch (GET_CODE (op
))
6873 return shift_first_arg
? "rsb" : "sub";
6889 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6890 for the operation code. The returned result should not be overwritten.
6891 OP is the rtx code of the shift.
6892 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6896 shift_op (op
, amountp
)
6898 HOST_WIDE_INT
*amountp
;
6901 enum rtx_code code
= GET_CODE (op
);
6903 if (GET_CODE (XEXP (op
, 1)) == REG
|| GET_CODE (XEXP (op
, 1)) == SUBREG
)
6905 else if (GET_CODE (XEXP (op
, 1)) == CONST_INT
)
6906 *amountp
= INTVAL (XEXP (op
, 1));
6929 /* We never have to worry about the amount being other than a
6930 power of 2, since this case can never be reloaded from a reg. */
6932 *amountp
= int_log2 (*amountp
);
6943 /* This is not 100% correct, but follows from the desire to merge
6944 multiplication by a power of 2 with the recognizer for a
6945 shift. >=32 is not a valid shift for "asl", so we must try and
6946 output a shift that produces the correct arithmetical result.
6947 Using lsr #32 is identical except for the fact that the carry bit
6948 is not set correctly if we set the flags; but we never use the
6949 carry bit from such an operation, so we can ignore that. */
6950 if (code
== ROTATERT
)
6951 /* Rotate is just modulo 32. */
6953 else if (*amountp
!= (*amountp
& 31))
6960 /* Shifts of 0 are no-ops. */
6968 /* Obtain the shift from the POWER of two. */
6970 static HOST_WIDE_INT
6972 HOST_WIDE_INT power
;
6974 HOST_WIDE_INT shift
= 0;
6976 while ((((HOST_WIDE_INT
) 1 << shift
) & power
) == 0)
6986 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6987 /bin/as is horribly restrictive. */
6988 #define MAX_ASCII_LEN 51
6991 output_ascii_pseudo_op (stream
, p
, len
)
6993 const unsigned char * p
;
6999 fputs ("\t.ascii\t\"", stream
);
7001 for (i
= 0; i
< len
; i
++)
7005 if (len_so_far
>= MAX_ASCII_LEN
)
7007 fputs ("\"\n\t.ascii\t\"", stream
);
7014 fputs ("\\t", stream
);
7019 fputs ("\\f", stream
);
7024 fputs ("\\b", stream
);
7029 fputs ("\\r", stream
);
7033 case TARGET_NEWLINE
:
7034 fputs ("\\n", stream
);
7036 if ((c
>= ' ' && c
<= '~')
7038 /* This is a good place for a line break. */
7039 len_so_far
= MAX_ASCII_LEN
;
7046 putc ('\\', stream
);
7051 if (c
>= ' ' && c
<= '~')
7058 fprintf (stream
, "\\%03o", c
);
7065 fputs ("\"\n", stream
);
7068 /* Compute the register sabe mask for registers 0 through 12
7069 inclusive. This code is used by both arm_compute_save_reg_mask
7070 and arm_compute_initial_elimination_offset. */
7072 static unsigned long
7073 arm_compute_save_reg0_reg12_mask ()
7075 unsigned long func_type
= arm_current_func_type ();
7076 unsigned int save_reg_mask
= 0;
7079 if (IS_INTERRUPT (func_type
))
7081 unsigned int max_reg
;
7082 /* Interrupt functions must not corrupt any registers,
7083 even call clobbered ones. If this is a leaf function
7084 we can just examine the registers used by the RTL, but
7085 otherwise we have to assume that whatever function is
7086 called might clobber anything, and so we have to save
7087 all the call-clobbered registers as well. */
7088 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_FIQ
)
7089 /* FIQ handlers have registers r8 - r12 banked, so
7090 we only need to check r0 - r7, Normal ISRs only
7091 bank r14 and r15, so we must check up to r12.
7092 r13 is the stack pointer which is always preserved,
7093 so we do not need to consider it here. */
7098 for (reg
= 0; reg
<= max_reg
; reg
++)
7099 if (regs_ever_live
[reg
]
7100 || (! current_function_is_leaf
&& call_used_regs
[reg
]))
7101 save_reg_mask
|= (1 << reg
);
7105 /* In the normal case we only need to save those registers
7106 which are call saved and which are used by this function. */
7107 for (reg
= 0; reg
<= 10; reg
++)
7108 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
7109 save_reg_mask
|= (1 << reg
);
7111 /* Handle the frame pointer as a special case. */
7112 if (! TARGET_APCS_FRAME
7113 && ! frame_pointer_needed
7114 && regs_ever_live
[HARD_FRAME_POINTER_REGNUM
]
7115 && ! call_used_regs
[HARD_FRAME_POINTER_REGNUM
])
7116 save_reg_mask
|= 1 << HARD_FRAME_POINTER_REGNUM
;
7118 /* If we aren't loading the PIC register,
7119 don't stack it even though it may be live. */
7121 && ! TARGET_SINGLE_PIC_BASE
7122 && regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
7123 save_reg_mask
|= 1 << PIC_OFFSET_TABLE_REGNUM
;
7126 return save_reg_mask
;
7129 /* Compute a bit mask of which registers need to be
7130 saved on the stack for the current function. */
7132 static unsigned long
7133 arm_compute_save_reg_mask ()
7135 unsigned int save_reg_mask
= 0;
7136 unsigned long func_type
= arm_current_func_type ();
7138 if (IS_NAKED (func_type
))
7139 /* This should never really happen. */
7142 /* If we are creating a stack frame, then we must save the frame pointer,
7143 IP (which will hold the old stack pointer), LR and the PC. */
7144 if (frame_pointer_needed
)
7146 (1 << ARM_HARD_FRAME_POINTER_REGNUM
)
7151 /* Volatile functions do not return, so there
7152 is no need to save any other registers. */
7153 if (IS_VOLATILE (func_type
))
7154 return save_reg_mask
;
7156 save_reg_mask
|= arm_compute_save_reg0_reg12_mask ();
7158 /* Decide if we need to save the link register.
7159 Interrupt routines have their own banked link register,
7160 so they never need to save it.
7161 Otherwise if we do not use the link register we do not need to save
7162 it. If we are pushing other registers onto the stack however, we
7163 can save an instruction in the epilogue by pushing the link register
7164 now and then popping it back into the PC. This incurs extra memory
7165 accesses though, so we only do it when optimising for size, and only
7166 if we know that we will not need a fancy return sequence. */
7167 if (regs_ever_live
[LR_REGNUM
]
7170 && ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
))
7171 save_reg_mask
|= 1 << LR_REGNUM
;
7173 if (cfun
->machine
->lr_save_eliminated
)
7174 save_reg_mask
&= ~ (1 << LR_REGNUM
);
7176 return save_reg_mask
;
7179 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7180 everything bar the final return instruction. */
7183 output_return_instruction (operand
, really_return
, reverse
)
7188 char conditional
[10];
7191 unsigned long live_regs_mask
;
7192 unsigned long func_type
;
7194 func_type
= arm_current_func_type ();
7196 if (IS_NAKED (func_type
))
7199 if (IS_VOLATILE (func_type
) && TARGET_ABORT_NORETURN
)
7201 /* If this function was declared non-returning, and we have found a tail
7202 call, then we have to trust that the called function won't return. */
7207 /* Otherwise, trap an attempted return by aborting. */
7209 ops
[1] = gen_rtx_SYMBOL_REF (Pmode
, NEED_PLT_RELOC
? "abort(PLT)"
7211 assemble_external_libcall (ops
[1]);
7212 output_asm_insn (reverse
? "bl%D0\t%a1" : "bl%d0\t%a1", ops
);
7218 if (current_function_calls_alloca
&& !really_return
)
7221 sprintf (conditional
, "%%?%%%c0", reverse
? 'D' : 'd');
7223 return_used_this_function
= 1;
7225 live_regs_mask
= arm_compute_save_reg_mask ();
7229 const char * return_reg
;
7231 /* If we do not have any special requirements for function exit
7232 (eg interworking, or ISR) then we can load the return address
7233 directly into the PC. Otherwise we must load it into LR. */
7235 && ! TARGET_INTERWORK
)
7236 return_reg
= reg_names
[PC_REGNUM
];
7238 return_reg
= reg_names
[LR_REGNUM
];
7240 if ((live_regs_mask
& (1 << IP_REGNUM
)) == (1 << IP_REGNUM
))
7241 /* There are two possible reasons for the IP register being saved.
7242 Either a stack frame was created, in which case IP contains the
7243 old stack pointer, or an ISR routine corrupted it. If this in an
7244 ISR routine then just restore IP, otherwise restore IP into SP. */
7245 if (! IS_INTERRUPT (func_type
))
7247 live_regs_mask
&= ~ (1 << IP_REGNUM
);
7248 live_regs_mask
|= (1 << SP_REGNUM
);
7251 /* On some ARM architectures it is faster to use LDR rather than
7252 LDM to load a single register. On other architectures, the
7253 cost is the same. In 26 bit mode, or for exception handlers,
7254 we have to use LDM to load the PC so that the CPSR is also
7256 for (reg
= 0; reg
<= LAST_ARM_REGNUM
; reg
++)
7258 if (live_regs_mask
== (unsigned int)(1 << reg
))
7261 if (reg
<= LAST_ARM_REGNUM
7262 && (reg
!= LR_REGNUM
7264 || (TARGET_APCS_32
&& ! IS_INTERRUPT (func_type
))))
7266 sprintf (instr
, "ldr%s\t%%|%s, [%%|sp], #4", conditional
,
7267 (reg
== LR_REGNUM
) ? return_reg
: reg_names
[reg
]);
7274 /* Generate the load multiple instruction to restore the registers. */
7275 if (frame_pointer_needed
)
7276 sprintf (instr
, "ldm%sea\t%%|fp, {", conditional
);
7278 sprintf (instr
, "ldm%sfd\t%%|sp!, {", conditional
);
7280 p
= instr
+ strlen (instr
);
7282 for (reg
= 0; reg
<= SP_REGNUM
; reg
++)
7283 if (live_regs_mask
& (1 << reg
))
7285 int l
= strlen (reg_names
[reg
]);
7291 memcpy (p
, ", ", 2);
7295 memcpy (p
, "%|", 2);
7296 memcpy (p
+ 2, reg_names
[reg
], l
);
7300 if (live_regs_mask
& (1 << LR_REGNUM
))
7302 int l
= strlen (return_reg
);
7306 memcpy (p
, ", ", 2);
7310 memcpy (p
, "%|", 2);
7311 memcpy (p
+ 2, return_reg
, l
);
7312 strcpy (p
+ 2 + l
, ((TARGET_APCS_32
7313 && !IS_INTERRUPT (func_type
))
7321 output_asm_insn (instr
, & operand
);
7323 /* See if we need to generate an extra instruction to
7324 perform the actual function return. */
7326 && func_type
!= ARM_FT_INTERWORKED
7327 && (live_regs_mask
& (1 << LR_REGNUM
)) != 0)
7329 /* The return has already been handled
7330 by loading the LR into the PC. */
7337 switch ((int) ARM_FUNC_TYPE (func_type
))
7341 sprintf (instr
, "sub%ss\t%%|pc, %%|lr, #4", conditional
);
7344 case ARM_FT_INTERWORKED
:
7345 sprintf (instr
, "bx%s\t%%|lr", conditional
);
7348 case ARM_FT_EXCEPTION
:
7349 sprintf (instr
, "mov%ss\t%%|pc, %%|lr", conditional
);
7353 /* ARMv5 implementations always provide BX, so interworking
7354 is the default unless APCS-26 is in use. */
7355 if ((insn_flags
& FL_ARCH5
) != 0 && TARGET_APCS_32
)
7356 sprintf (instr
, "bx%s\t%%|lr", conditional
);
7358 sprintf (instr
, "mov%s%s\t%%|pc, %%|lr",
7359 conditional
, TARGET_APCS_32
? "" : "s");
7363 output_asm_insn (instr
, & operand
);
7369 /* Write the function name into the code section, directly preceding
7370 the function prologue.
7372 Code will be output similar to this:
7374 .ascii "arm_poke_function_name", 0
7377 .word 0xff000000 + (t1 - t0)
7378 arm_poke_function_name
7380 stmfd sp!, {fp, ip, lr, pc}
7383 When performing a stack backtrace, code can inspect the value
7384 of 'pc' stored at 'fp' + 0. If the trace function then looks
7385 at location pc - 12 and the top 8 bits are set, then we know
7386 that there is a function name embedded immediately preceding this
7387 location and has length ((pc[-3]) & 0xff000000).
7389 We assume that pc is declared as a pointer to an unsigned long.
7391 It is of no benefit to output the function name if we are assembling
7392 a leaf function. These function types will not contain a stack
7393 backtrace structure, therefore it is not possible to determine the
7397 arm_poke_function_name (stream
, name
)
7401 unsigned long alignlength
;
7402 unsigned long length
;
7405 length
= strlen (name
) + 1;
7406 alignlength
= ROUND_UP (length
);
7408 ASM_OUTPUT_ASCII (stream
, name
, length
);
7409 ASM_OUTPUT_ALIGN (stream
, 2);
7410 x
= GEN_INT ((unsigned HOST_WIDE_INT
) 0xff000000 + alignlength
);
7411 assemble_aligned_integer (UNITS_PER_WORD
, x
);
7414 /* Place some comments into the assembler stream
7415 describing the current function. */
7418 arm_output_function_prologue (f
, frame_size
)
7420 HOST_WIDE_INT frame_size
;
7422 unsigned long func_type
;
7426 thumb_output_function_prologue (f
, frame_size
);
7431 if (arm_ccfsm_state
|| arm_target_insn
)
7434 func_type
= arm_current_func_type ();
7436 switch ((int) ARM_FUNC_TYPE (func_type
))
7441 case ARM_FT_INTERWORKED
:
7442 asm_fprintf (f
, "\t%@ Function supports interworking.\n");
7444 case ARM_FT_EXCEPTION_HANDLER
:
7445 asm_fprintf (f
, "\t%@ C++ Exception Handler.\n");
7448 asm_fprintf (f
, "\t%@ Interrupt Service Routine.\n");
7451 asm_fprintf (f
, "\t%@ Fast Interrupt Service Routine.\n");
7453 case ARM_FT_EXCEPTION
:
7454 asm_fprintf (f
, "\t%@ ARM Exception Handler.\n");
7458 if (IS_NAKED (func_type
))
7459 asm_fprintf (f
, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7461 if (IS_VOLATILE (func_type
))
7462 asm_fprintf (f
, "\t%@ Volatile: function does not return.\n");
7464 if (IS_NESTED (func_type
))
7465 asm_fprintf (f
, "\t%@ Nested: function declared inside another function.\n");
7467 asm_fprintf (f
, "\t%@ args = %d, pretend = %d, frame = %d\n",
7468 current_function_args_size
,
7469 current_function_pretend_args_size
, frame_size
);
7471 asm_fprintf (f
, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7472 frame_pointer_needed
,
7473 cfun
->machine
->uses_anonymous_args
);
7475 if (cfun
->machine
->lr_save_eliminated
)
7476 asm_fprintf (f
, "\t%@ link register save eliminated.\n");
7478 #ifdef AOF_ASSEMBLER
7480 asm_fprintf (f
, "\tmov\t%r, %r\n", IP_REGNUM
, PIC_OFFSET_TABLE_REGNUM
);
7483 return_used_this_function
= 0;
7487 arm_output_epilogue (really_return
)
7491 unsigned long saved_regs_mask
;
7492 unsigned long func_type
;
7493 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7494 frame that is $fp + 4 for a non-variadic function. */
7495 int floats_offset
= 0;
7497 int frame_size
= get_frame_size ();
7498 FILE * f
= asm_out_file
;
7499 rtx eh_ofs
= cfun
->machine
->eh_epilogue_sp_ofs
;
7501 /* If we have already generated the return instruction
7502 then it is futile to generate anything else. */
7503 if (use_return_insn (FALSE
) && return_used_this_function
)
7506 func_type
= arm_current_func_type ();
7508 if (IS_NAKED (func_type
))
7509 /* Naked functions don't have epilogues. */
7512 if (IS_VOLATILE (func_type
) && TARGET_ABORT_NORETURN
)
7516 /* A volatile function should never return. Call abort. */
7517 op
= gen_rtx_SYMBOL_REF (Pmode
, NEED_PLT_RELOC
? "abort(PLT)" : "abort");
7518 assemble_external_libcall (op
);
7519 output_asm_insn ("bl\t%a0", &op
);
7524 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
7526 /* If we are throwing an exception, then we really must
7527 be doing a return, so we can't tail-call. */
7530 saved_regs_mask
= arm_compute_save_reg_mask ();
7532 /* XXX We should adjust floats_offset for any anonymous args, and then
7533 re-adjust vfp_offset below to compensate. */
7535 /* Compute how far away the floats will be. */
7536 for (reg
= 0; reg
<= LAST_ARM_REGNUM
; reg
++)
7537 if (saved_regs_mask
& (1 << reg
))
7540 if (frame_pointer_needed
)
7544 if (arm_fpu_arch
== FP_SOFT2
)
7546 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
7547 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7549 floats_offset
+= 12;
7550 asm_fprintf (f
, "\tldfe\t%r, [%r, #-%d]\n",
7551 reg
, FP_REGNUM
, floats_offset
- vfp_offset
);
7556 int start_reg
= LAST_ARM_FP_REGNUM
;
7558 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
7560 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7562 floats_offset
+= 12;
7564 /* We can't unstack more than four registers at once. */
7565 if (start_reg
- reg
== 3)
7567 asm_fprintf (f
, "\tlfm\t%r, 4, [%r, #-%d]\n",
7568 reg
, FP_REGNUM
, floats_offset
- vfp_offset
);
7569 start_reg
= reg
- 1;
7574 if (reg
!= start_reg
)
7575 asm_fprintf (f
, "\tlfm\t%r, %d, [%r, #-%d]\n",
7576 reg
+ 1, start_reg
- reg
,
7577 FP_REGNUM
, floats_offset
- vfp_offset
);
7578 start_reg
= reg
- 1;
7582 /* Just in case the last register checked also needs unstacking. */
7583 if (reg
!= start_reg
)
7584 asm_fprintf (f
, "\tlfm\t%r, %d, [%r, #-%d]\n",
7585 reg
+ 1, start_reg
- reg
,
7586 FP_REGNUM
, floats_offset
- vfp_offset
);
7589 /* saved_regs_mask should contain the IP, which at the time of stack
7590 frame generation actually contains the old stack pointer. So a
7591 quick way to unwind the stack is just pop the IP register directly
7592 into the stack pointer. */
7593 if ((saved_regs_mask
& (1 << IP_REGNUM
)) == 0)
7595 saved_regs_mask
&= ~ (1 << IP_REGNUM
);
7596 saved_regs_mask
|= (1 << SP_REGNUM
);
7598 /* There are two registers left in saved_regs_mask - LR and PC. We
7599 only need to restore the LR register (the return address), but to
7600 save time we can load it directly into the PC, unless we need a
7601 special function exit sequence, or we are not really returning. */
7602 if (really_return
&& ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
)
7603 /* Delete the LR from the register mask, so that the LR on
7604 the stack is loaded into the PC in the register mask. */
7605 saved_regs_mask
&= ~ (1 << LR_REGNUM
);
7607 saved_regs_mask
&= ~ (1 << PC_REGNUM
);
7609 print_multi_reg (f
, "ldmea\t%r", FP_REGNUM
, saved_regs_mask
);
7611 if (IS_INTERRUPT (func_type
))
7612 /* Interrupt handlers will have pushed the
7613 IP onto the stack, so restore it now. */
7614 print_multi_reg (f
, "ldmfd\t%r", SP_REGNUM
, 1 << IP_REGNUM
);
7618 /* Restore stack pointer if necessary. */
7619 if (frame_size
+ current_function_outgoing_args_size
!= 0)
7621 operands
[0] = operands
[1] = stack_pointer_rtx
;
7622 operands
[2] = GEN_INT (frame_size
7623 + current_function_outgoing_args_size
);
7624 output_add_immediate (operands
);
7627 if (arm_fpu_arch
== FP_SOFT2
)
7629 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
7630 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7631 asm_fprintf (f
, "\tldfe\t%r, [%r], #12\n",
7636 int start_reg
= FIRST_ARM_FP_REGNUM
;
7638 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
7640 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7642 if (reg
- start_reg
== 3)
7644 asm_fprintf (f
, "\tlfmfd\t%r, 4, [%r]!\n",
7645 start_reg
, SP_REGNUM
);
7646 start_reg
= reg
+ 1;
7651 if (reg
!= start_reg
)
7652 asm_fprintf (f
, "\tlfmfd\t%r, %d, [%r]!\n",
7653 start_reg
, reg
- start_reg
,
7656 start_reg
= reg
+ 1;
7660 /* Just in case the last register checked also needs unstacking. */
7661 if (reg
!= start_reg
)
7662 asm_fprintf (f
, "\tlfmfd\t%r, %d, [%r]!\n",
7663 start_reg
, reg
- start_reg
, SP_REGNUM
);
7666 /* If we can, restore the LR into the PC. */
7667 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
7669 && current_function_pretend_args_size
== 0
7670 && saved_regs_mask
& (1 << LR_REGNUM
))
7672 saved_regs_mask
&= ~ (1 << LR_REGNUM
);
7673 saved_regs_mask
|= (1 << PC_REGNUM
);
7676 /* Load the registers off the stack. If we only have one register
7677 to load use the LDR instruction - it is faster. */
7678 if (saved_regs_mask
== (1 << LR_REGNUM
))
7680 /* The exception handler ignores the LR, so we do
7681 not really need to load it off the stack. */
7683 asm_fprintf (f
, "\tadd\t%r, %r, #4\n", SP_REGNUM
, SP_REGNUM
);
7685 asm_fprintf (f
, "\tldr\t%r, [%r], #4\n", LR_REGNUM
, SP_REGNUM
);
7687 else if (saved_regs_mask
)
7688 print_multi_reg (f
, "ldmfd\t%r!", SP_REGNUM
, saved_regs_mask
);
7690 if (current_function_pretend_args_size
)
7692 /* Unwind the pre-pushed regs. */
7693 operands
[0] = operands
[1] = stack_pointer_rtx
;
7694 operands
[2] = GEN_INT (current_function_pretend_args_size
);
7695 output_add_immediate (operands
);
7700 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
)
7701 /* Adjust the stack to remove the exception handler stuff. */
7702 asm_fprintf (f
, "\tadd\t%r, %r, %r\n", SP_REGNUM
, SP_REGNUM
,
7707 || (ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
7708 && current_function_pretend_args_size
== 0
7709 && saved_regs_mask
& (1 << PC_REGNUM
)))
7712 /* Generate the return instruction. */
7713 switch ((int) ARM_FUNC_TYPE (func_type
))
7715 case ARM_FT_EXCEPTION_HANDLER
:
7716 /* Even in 26-bit mode we do a mov (rather than a movs)
7717 because we don't have the PSR bits set in the address. */
7718 asm_fprintf (f
, "\tmov\t%r, %r\n", PC_REGNUM
, EXCEPTION_LR_REGNUM
);
7723 asm_fprintf (f
, "\tsubs\t%r, %r, #4\n", PC_REGNUM
, LR_REGNUM
);
7726 case ARM_FT_EXCEPTION
:
7727 asm_fprintf (f
, "\tmovs\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7730 case ARM_FT_INTERWORKED
:
7731 asm_fprintf (f
, "\tbx\t%r\n", LR_REGNUM
);
7735 if (frame_pointer_needed
)
7736 /* If we used the frame pointer then the return adddress
7737 will have been loaded off the stack directly into the
7738 PC, so there is no need to issue a MOV instruction
7741 else if (current_function_pretend_args_size
== 0
7742 && (saved_regs_mask
& (1 << LR_REGNUM
)))
7743 /* Similarly we may have been able to load LR into the PC
7744 even if we did not create a stack frame. */
7746 else if (TARGET_APCS_32
)
7747 asm_fprintf (f
, "\tmov\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7749 asm_fprintf (f
, "\tmovs\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7757 arm_output_function_epilogue (file
, frame_size
)
7758 FILE *file ATTRIBUTE_UNUSED
;
7759 HOST_WIDE_INT frame_size
;
7763 /* ??? Probably not safe to set this here, since it assumes that a
7764 function will be emitted as assembly immediately after we generate
7765 RTL for it. This does not happen for inline functions. */
7766 return_used_this_function
= 0;
7770 if (use_return_insn (FALSE
)
7771 && return_used_this_function
7772 && (frame_size
+ current_function_outgoing_args_size
) != 0
7773 && !frame_pointer_needed
)
7776 /* Reset the ARM-specific per-function variables. */
7777 after_arm_reorg
= 0;
7781 /* Generate and emit an insn that we will recognize as a push_multi.
7782 Unfortunately, since this insn does not reflect very well the actual
7783 semantics of the operation, we need to annotate the insn for the benefit
7784 of DWARF2 frame unwind information. */
7787 emit_multi_reg_push (mask
)
7795 int dwarf_par_index
;
7798 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
7799 if (mask
& (1 << i
))
7802 if (num_regs
== 0 || num_regs
> 16)
7805 /* We don't record the PC in the dwarf frame information. */
7806 num_dwarf_regs
= num_regs
;
7807 if (mask
& (1 << PC_REGNUM
))
7810 /* For the body of the insn we are going to generate an UNSPEC in
7811 parallel with several USEs. This allows the insn to be recognised
7812 by the push_multi pattern in the arm.md file. The insn looks
7813 something like this:
7816 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7817 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7818 (use (reg:SI 11 fp))
7819 (use (reg:SI 12 ip))
7820 (use (reg:SI 14 lr))
7821 (use (reg:SI 15 pc))
7824 For the frame note however, we try to be more explicit and actually
7825 show each register being stored into the stack frame, plus a (single)
7826 decrement of the stack pointer. We do it this way in order to be
7827 friendly to the stack unwinding code, which only wants to see a single
7828 stack decrement per instruction. The RTL we generate for the note looks
7829 something like this:
7832 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7833 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7834 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7835 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7836 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7839 This sequence is used both by the code to support stack unwinding for
7840 exceptions handlers and the code to generate dwarf2 frame debugging. */
7842 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_regs
));
7843 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (num_dwarf_regs
+ 1));
7844 dwarf_par_index
= 1;
7846 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
7848 if (mask
& (1 << i
))
7850 reg
= gen_rtx_REG (SImode
, i
);
7853 = gen_rtx_SET (VOIDmode
,
7854 gen_rtx_MEM (BLKmode
,
7855 gen_rtx_PRE_DEC (BLKmode
,
7856 stack_pointer_rtx
)),
7857 gen_rtx_UNSPEC (BLKmode
,
7863 tmp
= gen_rtx_SET (VOIDmode
,
7864 gen_rtx_MEM (SImode
, stack_pointer_rtx
),
7866 RTX_FRAME_RELATED_P (tmp
) = 1;
7867 XVECEXP (dwarf
, 0, dwarf_par_index
) = tmp
;
7875 for (j
= 1, i
++; j
< num_regs
; i
++)
7877 if (mask
& (1 << i
))
7879 reg
= gen_rtx_REG (SImode
, i
);
7881 XVECEXP (par
, 0, j
) = gen_rtx_USE (VOIDmode
, reg
);
7885 tmp
= gen_rtx_SET (VOIDmode
,
7886 gen_rtx_MEM (SImode
,
7887 plus_constant (stack_pointer_rtx
,
7890 RTX_FRAME_RELATED_P (tmp
) = 1;
7891 XVECEXP (dwarf
, 0, dwarf_par_index
++) = tmp
;
7898 par
= emit_insn (par
);
7900 tmp
= gen_rtx_SET (SImode
,
7902 gen_rtx_PLUS (SImode
,
7904 GEN_INT (-4 * num_regs
)));
7905 RTX_FRAME_RELATED_P (tmp
) = 1;
7906 XVECEXP (dwarf
, 0, 0) = tmp
;
7908 REG_NOTES (par
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
, dwarf
,
7914 emit_sfm (base_reg
, count
)
7923 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
7924 dwarf
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
7926 reg
= gen_rtx_REG (XFmode
, base_reg
++);
7929 = gen_rtx_SET (VOIDmode
,
7930 gen_rtx_MEM (BLKmode
,
7931 gen_rtx_PRE_DEC (BLKmode
, stack_pointer_rtx
)),
7932 gen_rtx_UNSPEC (BLKmode
,
7936 = gen_rtx_SET (VOIDmode
,
7937 gen_rtx_MEM (XFmode
,
7938 gen_rtx_PRE_DEC (BLKmode
, stack_pointer_rtx
)),
7940 RTX_FRAME_RELATED_P (tmp
) = 1;
7941 XVECEXP (dwarf
, 0, count
- 1) = tmp
;
7943 for (i
= 1; i
< count
; i
++)
7945 reg
= gen_rtx_REG (XFmode
, base_reg
++);
7946 XVECEXP (par
, 0, i
) = gen_rtx_USE (VOIDmode
, reg
);
7948 tmp
= gen_rtx_SET (VOIDmode
,
7949 gen_rtx_MEM (XFmode
,
7950 gen_rtx_PRE_DEC (BLKmode
,
7951 stack_pointer_rtx
)),
7953 RTX_FRAME_RELATED_P (tmp
) = 1;
7954 XVECEXP (dwarf
, 0, count
- i
- 1) = tmp
;
7957 par
= emit_insn (par
);
7958 REG_NOTES (par
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
, dwarf
,
7963 /* Compute the distance from register FROM to register TO.
7964 These can be the arg pointer (26), the soft frame pointer (25),
7965 the stack pointer (13) or the hard frame pointer (11).
7966 Typical stack layout looks like this:
7968 old stack pointer -> | |
7971 | | saved arguments for
7972 | | vararg functions
7975 hard FP & arg pointer -> | | \
7983 soft frame pointer -> | | /
7993 current stack pointer -> | | /
7996 For a given funciton some or all of these stack compomnents
7997 may not be needed, giving rise to the possibility of
7998 eliminating some of the registers.
8000 The values returned by this function must reflect the behaviour
8001 of arm_expand_prologue() and arm_compute_save_reg_mask().
8003 The sign of the number returned reflects the direction of stack
8004 growth, so the values are positive for all eliminations except
8005 from the soft frame pointer to the hard frame pointer. */
8008 arm_compute_initial_elimination_offset (from
, to
)
8012 unsigned int local_vars
= (get_frame_size () + 3) & ~3;
8013 unsigned int outgoing_args
= current_function_outgoing_args_size
;
8014 unsigned int stack_frame
;
8015 unsigned int call_saved_registers
;
8016 unsigned long func_type
;
8018 func_type
= arm_current_func_type ();
8020 /* Volatile functions never return, so there is
8021 no need to save call saved registers. */
8022 call_saved_registers
= 0;
8023 if (! IS_VOLATILE (func_type
))
8025 unsigned int reg_mask
;
8028 /* Make sure that we compute which registers will be saved
8029 on the stack using the same algorithm that is used by
8030 arm_compute_save_reg_mask(). */
8031 reg_mask
= arm_compute_save_reg0_reg12_mask ();
8033 /* Now count the number of bits set in save_reg_mask.
8034 For each set bit we need 4 bytes of stack space. */
8037 call_saved_registers
+= 4;
8038 reg_mask
= reg_mask
& ~ (reg_mask
& - reg_mask
);
8041 if (regs_ever_live
[LR_REGNUM
]
8042 /* If a stack frame is going to be created, the LR will
8043 be saved as part of that, so we do not need to allow
8045 && ! frame_pointer_needed
)
8046 call_saved_registers
+= 4;
8048 /* If the hard floating point registers are going to be
8049 used then they must be saved on the stack as well.
8050 Each register occupies 12 bytes of stack space. */
8051 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
8052 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
8053 call_saved_registers
+= 12;
8056 /* The stack frame contains 4 registers - the old frame pointer,
8057 the old stack pointer, the return address and PC of the start
8059 stack_frame
= frame_pointer_needed
? 16 : 0;
8061 /* OK, now we have enough information to compute the distances.
8062 There must be an entry in these switch tables for each pair
8063 of registers in ELIMINABLE_REGS, even if some of the entries
8064 seem to be redundant or useless. */
8067 case ARG_POINTER_REGNUM
:
8070 case THUMB_HARD_FRAME_POINTER_REGNUM
:
8073 case FRAME_POINTER_REGNUM
:
8074 /* This is the reverse of the soft frame pointer
8075 to hard frame pointer elimination below. */
8076 if (call_saved_registers
== 0 && stack_frame
== 0)
8078 return (call_saved_registers
+ stack_frame
- 4);
8080 case ARM_HARD_FRAME_POINTER_REGNUM
:
8081 /* If there is no stack frame then the hard
8082 frame pointer and the arg pointer coincide. */
8083 if (stack_frame
== 0 && call_saved_registers
!= 0)
8085 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8086 return (frame_pointer_needed
8087 && current_function_needs_context
8088 && ! cfun
->machine
->uses_anonymous_args
) ? 4 : 0;
8090 case STACK_POINTER_REGNUM
:
8091 /* If nothing has been pushed on the stack at all
8092 then this will return -4. This *is* correct! */
8093 return call_saved_registers
+ stack_frame
+ local_vars
+ outgoing_args
- 4;
8100 case FRAME_POINTER_REGNUM
:
8103 case THUMB_HARD_FRAME_POINTER_REGNUM
:
8106 case ARM_HARD_FRAME_POINTER_REGNUM
:
8107 /* The hard frame pointer points to the top entry in the
8108 stack frame. The soft frame pointer to the bottom entry
8109 in the stack frame. If there is no stack frame at all,
8110 then they are identical. */
8111 if (call_saved_registers
== 0 && stack_frame
== 0)
8113 return - (call_saved_registers
+ stack_frame
- 4);
8115 case STACK_POINTER_REGNUM
:
8116 return local_vars
+ outgoing_args
;
8124 /* You cannot eliminate from the stack pointer.
8125 In theory you could eliminate from the hard frame
8126 pointer to the stack pointer, but this will never
8127 happen, since if a stack frame is not needed the
8128 hard frame pointer will never be used. */
8133 /* Generate the prologue instructions for entry into an ARM function. */
8136 arm_expand_prologue ()
8142 unsigned long live_regs_mask
;
8143 unsigned long func_type
;
8145 int saved_pretend_args
= 0;
8146 unsigned int args_to_push
;
8148 func_type
= arm_current_func_type ();
8150 /* Naked functions don't have prologues. */
8151 if (IS_NAKED (func_type
))
8154 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8155 args_to_push
= current_function_pretend_args_size
;
8157 /* Compute which register we will have to save onto the stack. */
8158 live_regs_mask
= arm_compute_save_reg_mask ();
8160 ip_rtx
= gen_rtx_REG (SImode
, IP_REGNUM
);
8162 if (frame_pointer_needed
)
8164 if (IS_INTERRUPT (func_type
))
8166 /* Interrupt functions must not corrupt any registers.
8167 Creating a frame pointer however, corrupts the IP
8168 register, so we must push it first. */
8169 insn
= emit_multi_reg_push (1 << IP_REGNUM
);
8171 /* Do not set RTX_FRAME_RELATED_P on this insn.
8172 The dwarf stack unwinding code only wants to see one
8173 stack decrement per function, and this is not it. If
8174 this instruction is labeled as being part of the frame
8175 creation sequence then dwarf2out_frame_debug_expr will
8176 abort when it encounters the assignment of IP to FP
8177 later on, since the use of SP here establishes SP as
8178 the CFA register and not IP.
8180 Anyway this instruction is not really part of the stack
8181 frame creation although it is part of the prologue. */
8183 else if (IS_NESTED (func_type
))
8185 /* The Static chain register is the same as the IP register
8186 used as a scratch register during stack frame creation.
8187 To get around this need to find somewhere to store IP
8188 whilst the frame is being created. We try the following
8191 1. The last argument register.
8192 2. A slot on the stack above the frame. (This only
8193 works if the function is not a varargs function).
8194 3. Register r3, after pushing the argument registers
8197 Note - we only need to tell the dwarf2 backend about the SP
8198 adjustment in the second variant; the static chain register
8199 doesn't need to be unwound, as it doesn't contain a value
8200 inherited from the caller. */
8202 if (regs_ever_live
[3] == 0)
8204 insn
= gen_rtx_REG (SImode
, 3);
8205 insn
= gen_rtx_SET (SImode
, insn
, ip_rtx
);
8206 insn
= emit_insn (insn
);
8208 else if (args_to_push
== 0)
8211 insn
= gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
);
8212 insn
= gen_rtx_MEM (SImode
, insn
);
8213 insn
= gen_rtx_SET (VOIDmode
, insn
, ip_rtx
);
8214 insn
= emit_insn (insn
);
8218 /* Just tell the dwarf backend that we adjusted SP. */
8219 dwarf
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8220 gen_rtx_PLUS (SImode
, stack_pointer_rtx
,
8221 GEN_INT (-fp_offset
)));
8222 RTX_FRAME_RELATED_P (insn
) = 1;
8223 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8224 dwarf
, REG_NOTES (insn
));
8228 /* Store the args on the stack. */
8229 if (cfun
->machine
->uses_anonymous_args
)
8230 insn
= emit_multi_reg_push
8231 ((0xf0 >> (args_to_push
/ 4)) & 0xf);
8234 (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8235 GEN_INT (- args_to_push
)));
8237 RTX_FRAME_RELATED_P (insn
) = 1;
8239 saved_pretend_args
= 1;
8240 fp_offset
= args_to_push
;
8243 /* Now reuse r3 to preserve IP. */
8244 insn
= gen_rtx_REG (SImode
, 3);
8245 insn
= gen_rtx_SET (SImode
, insn
, ip_rtx
);
8246 (void) emit_insn (insn
);
8252 insn
= gen_rtx_PLUS (SImode
, stack_pointer_rtx
, GEN_INT (fp_offset
));
8253 insn
= gen_rtx_SET (SImode
, ip_rtx
, insn
);
8256 insn
= gen_movsi (ip_rtx
, stack_pointer_rtx
);
8258 insn
= emit_insn (insn
);
8259 RTX_FRAME_RELATED_P (insn
) = 1;
8264 /* Push the argument registers, or reserve space for them. */
8265 if (cfun
->machine
->uses_anonymous_args
)
8266 insn
= emit_multi_reg_push
8267 ((0xf0 >> (args_to_push
/ 4)) & 0xf);
8270 (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8271 GEN_INT (- args_to_push
)));
8272 RTX_FRAME_RELATED_P (insn
) = 1;
8275 /* If this is an interrupt service routine, and the link register is
8276 going to be pushed, subtracting four now will mean that the
8277 function return can be done with a single instruction. */
8278 if ((func_type
== ARM_FT_ISR
|| func_type
== ARM_FT_FIQ
)
8279 && (live_regs_mask
& (1 << LR_REGNUM
)) != 0)
8281 emit_insn (gen_rtx_SET (SImode
,
8282 gen_rtx_REG (SImode
, LR_REGNUM
),
8283 gen_rtx_PLUS (SImode
,
8284 gen_rtx_REG (SImode
, LR_REGNUM
),
8290 insn
= emit_multi_reg_push (live_regs_mask
);
8291 RTX_FRAME_RELATED_P (insn
) = 1;
8294 if (! IS_VOLATILE (func_type
))
8296 /* Save any floating point call-saved registers used by this function. */
8297 if (arm_fpu_arch
== FP_SOFT2
)
8299 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
8300 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
8302 insn
= gen_rtx_PRE_DEC (XFmode
, stack_pointer_rtx
);
8303 insn
= gen_rtx_MEM (XFmode
, insn
);
8304 insn
= emit_insn (gen_rtx_SET (VOIDmode
, insn
,
8305 gen_rtx_REG (XFmode
, reg
)));
8306 RTX_FRAME_RELATED_P (insn
) = 1;
8311 int start_reg
= LAST_ARM_FP_REGNUM
;
8313 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
8315 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
8317 if (start_reg
- reg
== 3)
8319 insn
= emit_sfm (reg
, 4);
8320 RTX_FRAME_RELATED_P (insn
) = 1;
8321 start_reg
= reg
- 1;
8326 if (start_reg
!= reg
)
8328 insn
= emit_sfm (reg
+ 1, start_reg
- reg
);
8329 RTX_FRAME_RELATED_P (insn
) = 1;
8331 start_reg
= reg
- 1;
8335 if (start_reg
!= reg
)
8337 insn
= emit_sfm (reg
+ 1, start_reg
- reg
);
8338 RTX_FRAME_RELATED_P (insn
) = 1;
8343 if (frame_pointer_needed
)
8345 /* Create the new frame pointer. */
8346 insn
= GEN_INT (-(4 + args_to_push
+ fp_offset
));
8347 insn
= emit_insn (gen_addsi3 (hard_frame_pointer_rtx
, ip_rtx
, insn
));
8348 RTX_FRAME_RELATED_P (insn
) = 1;
8350 if (IS_NESTED (func_type
))
8352 /* Recover the static chain register. */
8353 if (regs_ever_live
[3] == 0
8354 || saved_pretend_args
)
8355 insn
= gen_rtx_REG (SImode
, 3);
8356 else /* if (current_function_pretend_args_size == 0) */
8358 insn
= gen_rtx_PLUS (SImode
, hard_frame_pointer_rtx
, GEN_INT (4));
8359 insn
= gen_rtx_MEM (SImode
, insn
);
8362 emit_insn (gen_rtx_SET (SImode
, ip_rtx
, insn
));
8363 /* Add a USE to stop propagate_one_insn() from barfing. */
8364 emit_insn (gen_prologue_use (ip_rtx
));
8368 amount
= GEN_INT (-(get_frame_size ()
8369 + current_function_outgoing_args_size
));
8371 if (amount
!= const0_rtx
)
8373 /* This add can produce multiple insns for a large constant, so we
8374 need to get tricky. */
8375 rtx last
= get_last_insn ();
8376 insn
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8380 last
= last
? NEXT_INSN (last
) : get_insns ();
8381 RTX_FRAME_RELATED_P (last
) = 1;
8383 while (last
!= insn
);
8385 /* If the frame pointer is needed, emit a special barrier that
8386 will prevent the scheduler from moving stores to the frame
8387 before the stack adjustment. */
8388 if (frame_pointer_needed
)
8389 insn
= emit_insn (gen_stack_tie (stack_pointer_rtx
,
8390 hard_frame_pointer_rtx
));
8393 /* If we are profiling, make sure no instructions are scheduled before
8394 the call to mcount. Similarly if the user has requested no
8395 scheduling in the prolog. */
8396 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
8397 emit_insn (gen_blockage ());
8399 /* If the link register is being kept alive, with the return address in it,
8400 then make sure that it does not get reused by the ce2 pass. */
8401 if ((live_regs_mask
& (1 << LR_REGNUM
)) == 0)
8403 emit_insn (gen_prologue_use (gen_rtx_REG (SImode
, LR_REGNUM
)));
8404 cfun
->machine
->lr_save_eliminated
= 1;
8408 /* If CODE is 'd', then the X is a condition operand and the instruction
8409 should only be executed if the condition is true.
8410 if CODE is 'D', then the X is a condition operand and the instruction
8411 should only be executed if the condition is false: however, if the mode
8412 of the comparison is CCFPEmode, then always execute the instruction -- we
8413 do this because in these circumstances !GE does not necessarily imply LT;
8414 in these cases the instruction pattern will take care to make sure that
8415 an instruction containing %d will follow, thereby undoing the effects of
8416 doing this instruction unconditionally.
8417 If CODE is 'N' then X is a floating point operand that must be negated
8419 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8420 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8423 arm_print_operand (stream
, x
, code
)
8431 fputs (ASM_COMMENT_START
, stream
);
8435 fputs (user_label_prefix
, stream
);
8439 fputs (REGISTER_PREFIX
, stream
);
8443 if (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4)
8445 if (TARGET_THUMB
|| current_insn_predicate
!= NULL
)
8448 fputs (arm_condition_codes
[arm_current_cc
], stream
);
8450 else if (current_insn_predicate
)
8452 enum arm_cond_code code
;
8457 code
= get_arm_condition_code (current_insn_predicate
);
8458 fputs (arm_condition_codes
[code
], stream
);
8465 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
8466 r
= REAL_VALUE_NEGATE (r
);
8467 fprintf (stream
, "%s", fp_const_from_val (&r
));
8472 if (GET_CODE (x
) == CONST_INT
)
8475 val
= ARM_SIGN_EXTEND (~INTVAL (x
));
8476 fprintf (stream
, HOST_WIDE_INT_PRINT_DEC
, val
);
8481 output_addr_const (stream
, x
);
8486 fprintf (stream
, "%s", arithmetic_instr (x
, 1));
8490 fprintf (stream
, "%s", arithmetic_instr (x
, 0));
8496 const char * shift
= shift_op (x
, &val
);
8500 fprintf (stream
, ", %s ", shift_op (x
, &val
));
8502 arm_print_operand (stream
, XEXP (x
, 1), 0);
8505 fputc ('#', stream
);
8506 fprintf (stream
, HOST_WIDE_INT_PRINT_DEC
, val
);
8512 /* An explanation of the 'Q', 'R' and 'H' register operands:
8514 In a pair of registers containing a DI or DF value the 'Q'
8515 operand returns the register number of the register containing
8516 the least signficant part of the value. The 'R' operand returns
8517 the register number of the register containing the most
8518 significant part of the value.
8520 The 'H' operand returns the higher of the two register numbers.
8521 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8522 same as the 'Q' operand, since the most signficant part of the
8523 value is held in the lower number register. The reverse is true
8524 on systems where WORDS_BIG_ENDIAN is false.
8526 The purpose of these operands is to distinguish between cases
8527 where the endian-ness of the values is important (for example
8528 when they are added together), and cases where the endian-ness
8529 is irrelevant, but the order of register operations is important.
8530 For example when loading a value from memory into a register
8531 pair, the endian-ness does not matter. Provided that the value
8532 from the lower memory address is put into the lower numbered
8533 register, and the value from the higher address is put into the
8534 higher numbered register, the load will work regardless of whether
8535 the value being loaded is big-wordian or little-wordian. The
8536 order of the two register loads can matter however, if the address
8537 of the memory location is actually held in one of the registers
8538 being overwritten by the load. */
8540 if (REGNO (x
) > LAST_ARM_REGNUM
)
8542 asm_fprintf (stream
, "%r", REGNO (x
) + (WORDS_BIG_ENDIAN
? 1 : 0));
8546 if (REGNO (x
) > LAST_ARM_REGNUM
)
8548 asm_fprintf (stream
, "%r", REGNO (x
) + (WORDS_BIG_ENDIAN
? 0 : 1));
8552 if (REGNO (x
) > LAST_ARM_REGNUM
)
8554 asm_fprintf (stream
, "%r", REGNO (x
) + 1);
8558 asm_fprintf (stream
, "%r",
8559 GET_CODE (XEXP (x
, 0)) == REG
8560 ? REGNO (XEXP (x
, 0)) : REGNO (XEXP (XEXP (x
, 0), 0)));
8564 asm_fprintf (stream
, "{%r-%r}",
8566 REGNO (x
) + ARM_NUM_REGS (GET_MODE (x
)) - 1);
8570 /* CONST_TRUE_RTX means always -- that's the default. */
8571 if (x
== const_true_rtx
)
8575 fputs (arm_condition_codes
[get_arm_condition_code (x
)],
8578 fputs (thumb_condition_code (x
, 0), stream
);
8582 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8584 if (x
== const_true_rtx
)
8588 fputs (arm_condition_codes
[ARM_INVERSE_CONDITION_CODE
8589 (get_arm_condition_code (x
))],
8592 fputs (thumb_condition_code (x
, 1), stream
);
8599 if (GET_CODE (x
) == REG
)
8600 asm_fprintf (stream
, "%r", REGNO (x
));
8601 else if (GET_CODE (x
) == MEM
)
8603 output_memory_reference_mode
= GET_MODE (x
);
8604 output_address (XEXP (x
, 0));
8606 else if (GET_CODE (x
) == CONST_DOUBLE
)
8607 fprintf (stream
, "#%s", fp_immediate_constant (x
));
8608 else if (GET_CODE (x
) == NEG
)
8609 abort (); /* This should never happen now. */
8612 fputc ('#', stream
);
8613 output_addr_const (stream
, x
);
8618 #ifndef AOF_ASSEMBLER
8619 /* Target hook for assembling integer objects. The ARM version needs to
8620 handle word-sized values specially. */
8623 arm_assemble_integer (x
, size
, aligned_p
)
8628 if (size
== UNITS_PER_WORD
&& aligned_p
)
8630 fputs ("\t.word\t", asm_out_file
);
8631 output_addr_const (asm_out_file
, x
);
8633 /* Mark symbols as position independent. We only do this in the
8634 .text segment, not in the .data segment. */
8635 if (NEED_GOT_RELOC
&& flag_pic
&& making_const_table
&&
8636 (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
))
8638 if (GET_CODE (x
) == SYMBOL_REF
8639 && (CONSTANT_POOL_ADDRESS_P (x
)
8640 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x
, 0))))
8641 fputs ("(GOTOFF)", asm_out_file
);
8642 else if (GET_CODE (x
) == LABEL_REF
)
8643 fputs ("(GOTOFF)", asm_out_file
);
8645 fputs ("(GOT)", asm_out_file
);
8647 fputc ('\n', asm_out_file
);
8651 return default_assemble_integer (x
, size
, aligned_p
);
8655 /* A finite state machine takes care of noticing whether or not instructions
8656 can be conditionally executed, and thus decrease execution time and code
8657 size by deleting branch instructions. The fsm is controlled by
8658 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8660 /* The state of the fsm controlling condition codes are:
8661 0: normal, do nothing special
8662 1: make ASM_OUTPUT_OPCODE not output this instruction
8663 2: make ASM_OUTPUT_OPCODE not output this instruction
8664 3: make instructions conditional
8665 4: make instructions conditional
8667 State transitions (state->state by whom under condition):
8668 0 -> 1 final_prescan_insn if the `target' is a label
8669 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8670 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8671 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8672 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8673 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8674 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8675 (the target insn is arm_target_insn).
8677 If the jump clobbers the conditions then we use states 2 and 4.
8679 A similar thing can be done with conditional return insns.
8681 XXX In case the `target' is an unconditional branch, this conditionalising
8682 of the instructions always reduces code size, but not always execution
8683 time. But then, I want to reduce the code size to somewhere near what
8684 /bin/cc produces. */
8686 /* Returns the index of the ARM condition code string in
8687 `arm_condition_codes'. COMPARISON should be an rtx like
8688 `(eq (...) (...))'. */
8690 static enum arm_cond_code
8691 get_arm_condition_code (comparison
)
8694 enum machine_mode mode
= GET_MODE (XEXP (comparison
, 0));
8696 enum rtx_code comp_code
= GET_CODE (comparison
);
8698 if (GET_MODE_CLASS (mode
) != MODE_CC
)
8699 mode
= SELECT_CC_MODE (comp_code
, XEXP (comparison
, 0),
8700 XEXP (comparison
, 1));
8704 case CC_DNEmode
: code
= ARM_NE
; goto dominance
;
8705 case CC_DEQmode
: code
= ARM_EQ
; goto dominance
;
8706 case CC_DGEmode
: code
= ARM_GE
; goto dominance
;
8707 case CC_DGTmode
: code
= ARM_GT
; goto dominance
;
8708 case CC_DLEmode
: code
= ARM_LE
; goto dominance
;
8709 case CC_DLTmode
: code
= ARM_LT
; goto dominance
;
8710 case CC_DGEUmode
: code
= ARM_CS
; goto dominance
;
8711 case CC_DGTUmode
: code
= ARM_HI
; goto dominance
;
8712 case CC_DLEUmode
: code
= ARM_LS
; goto dominance
;
8713 case CC_DLTUmode
: code
= ARM_CC
;
8716 if (comp_code
!= EQ
&& comp_code
!= NE
)
8719 if (comp_code
== EQ
)
8720 return ARM_INVERSE_CONDITION_CODE (code
);
8726 case NE
: return ARM_NE
;
8727 case EQ
: return ARM_EQ
;
8728 case GE
: return ARM_PL
;
8729 case LT
: return ARM_MI
;
8736 case NE
: return ARM_NE
;
8737 case EQ
: return ARM_EQ
;
8743 /* These encodings assume that AC=1 in the FPA system control
8744 byte. This allows us to handle all cases except UNEQ and
8748 case GE
: return ARM_GE
;
8749 case GT
: return ARM_GT
;
8750 case LE
: return ARM_LS
;
8751 case LT
: return ARM_MI
;
8752 case NE
: return ARM_NE
;
8753 case EQ
: return ARM_EQ
;
8754 case ORDERED
: return ARM_VC
;
8755 case UNORDERED
: return ARM_VS
;
8756 case UNLT
: return ARM_LT
;
8757 case UNLE
: return ARM_LE
;
8758 case UNGT
: return ARM_HI
;
8759 case UNGE
: return ARM_PL
;
8760 /* UNEQ and LTGT do not have a representation. */
8761 case UNEQ
: /* Fall through. */
8762 case LTGT
: /* Fall through. */
8769 case NE
: return ARM_NE
;
8770 case EQ
: return ARM_EQ
;
8771 case GE
: return ARM_LE
;
8772 case GT
: return ARM_LT
;
8773 case LE
: return ARM_GE
;
8774 case LT
: return ARM_GT
;
8775 case GEU
: return ARM_LS
;
8776 case GTU
: return ARM_CC
;
8777 case LEU
: return ARM_CS
;
8778 case LTU
: return ARM_HI
;
8785 case LTU
: return ARM_CS
;
8786 case GEU
: return ARM_CC
;
8793 case NE
: return ARM_NE
;
8794 case EQ
: return ARM_EQ
;
8795 case GE
: return ARM_GE
;
8796 case GT
: return ARM_GT
;
8797 case LE
: return ARM_LE
;
8798 case LT
: return ARM_LT
;
8799 case GEU
: return ARM_CS
;
8800 case GTU
: return ARM_HI
;
8801 case LEU
: return ARM_LS
;
8802 case LTU
: return ARM_CC
;
8814 arm_final_prescan_insn (insn
)
8817 /* BODY will hold the body of INSN. */
8818 rtx body
= PATTERN (insn
);
8820 /* This will be 1 if trying to repeat the trick, and things need to be
8821 reversed if it appears to fail. */
8824 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8825 taken are clobbered, even if the rtl suggests otherwise. It also
8826 means that we have to grub around within the jump expression to find
8827 out what the conditions are when the jump isn't taken. */
8828 int jump_clobbers
= 0;
8830 /* If we start with a return insn, we only succeed if we find another one. */
8831 int seeking_return
= 0;
8833 /* START_INSN will hold the insn from where we start looking. This is the
8834 first insn after the following code_label if REVERSE is true. */
8835 rtx start_insn
= insn
;
8837 /* If in state 4, check if the target branch is reached, in order to
8838 change back to state 0. */
8839 if (arm_ccfsm_state
== 4)
8841 if (insn
== arm_target_insn
)
8843 arm_target_insn
= NULL
;
8844 arm_ccfsm_state
= 0;
8849 /* If in state 3, it is possible to repeat the trick, if this insn is an
8850 unconditional branch to a label, and immediately following this branch
8851 is the previous target label which is only used once, and the label this
8852 branch jumps to is not too far off. */
8853 if (arm_ccfsm_state
== 3)
8855 if (simplejump_p (insn
))
8857 start_insn
= next_nonnote_insn (start_insn
);
8858 if (GET_CODE (start_insn
) == BARRIER
)
8860 /* XXX Isn't this always a barrier? */
8861 start_insn
= next_nonnote_insn (start_insn
);
8863 if (GET_CODE (start_insn
) == CODE_LABEL
8864 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
8865 && LABEL_NUSES (start_insn
) == 1)
8870 else if (GET_CODE (body
) == RETURN
)
8872 start_insn
= next_nonnote_insn (start_insn
);
8873 if (GET_CODE (start_insn
) == BARRIER
)
8874 start_insn
= next_nonnote_insn (start_insn
);
8875 if (GET_CODE (start_insn
) == CODE_LABEL
8876 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
8877 && LABEL_NUSES (start_insn
) == 1)
8889 if (arm_ccfsm_state
!= 0 && !reverse
)
8891 if (GET_CODE (insn
) != JUMP_INSN
)
8894 /* This jump might be paralleled with a clobber of the condition codes
8895 the jump should always come first */
8896 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
8897 body
= XVECEXP (body
, 0, 0);
8900 /* If this is a conditional return then we don't want to know */
8901 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
8902 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
8903 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
8904 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
8909 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
8910 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
8913 int fail
= FALSE
, succeed
= FALSE
;
8914 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8915 int then_not_else
= TRUE
;
8916 rtx this_insn
= start_insn
, label
= 0;
8918 /* If the jump cannot be done with one instruction, we cannot
8919 conditionally execute the instruction in the inverse case. */
8920 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
8926 /* Register the insn jumped to. */
8929 if (!seeking_return
)
8930 label
= XEXP (SET_SRC (body
), 0);
8932 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
8933 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
8934 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
8936 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
8937 then_not_else
= FALSE
;
8939 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
8941 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
8944 then_not_else
= FALSE
;
8949 /* See how many insns this branch skips, and what kind of insns. If all
8950 insns are okay, and the label or unconditional branch to the same
8951 label is not too far away, succeed. */
8952 for (insns_skipped
= 0;
8953 !fail
&& !succeed
&& insns_skipped
++ < max_insns_skipped
;)
8957 this_insn
= next_nonnote_insn (this_insn
);
8961 switch (GET_CODE (this_insn
))
8964 /* Succeed if it is the target label, otherwise fail since
8965 control falls in from somewhere else. */
8966 if (this_insn
== label
)
8970 arm_ccfsm_state
= 2;
8971 this_insn
= next_nonnote_insn (this_insn
);
8974 arm_ccfsm_state
= 1;
8982 /* Succeed if the following insn is the target label.
8984 If return insns are used then the last insn in a function
8985 will be a barrier. */
8986 this_insn
= next_nonnote_insn (this_insn
);
8987 if (this_insn
&& this_insn
== label
)
8991 arm_ccfsm_state
= 2;
8992 this_insn
= next_nonnote_insn (this_insn
);
8995 arm_ccfsm_state
= 1;
9003 /* If using 32-bit addresses the cc is not preserved over
9007 /* Succeed if the following insn is the target label,
9008 or if the following two insns are a barrier and
9009 the target label. */
9010 this_insn
= next_nonnote_insn (this_insn
);
9011 if (this_insn
&& GET_CODE (this_insn
) == BARRIER
)
9012 this_insn
= next_nonnote_insn (this_insn
);
9014 if (this_insn
&& this_insn
== label
9015 && insns_skipped
< max_insns_skipped
)
9019 arm_ccfsm_state
= 2;
9020 this_insn
= next_nonnote_insn (this_insn
);
9023 arm_ccfsm_state
= 1;
9032 /* If this is an unconditional branch to the same label, succeed.
9033 If it is to another label, do nothing. If it is conditional,
9035 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9037 scanbody
= PATTERN (this_insn
);
9038 if (GET_CODE (scanbody
) == SET
9039 && GET_CODE (SET_DEST (scanbody
)) == PC
)
9041 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
9042 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
9044 arm_ccfsm_state
= 2;
9047 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
9050 /* Fail if a conditional return is undesirable (eg on a
9051 StrongARM), but still allow this if optimizing for size. */
9052 else if (GET_CODE (scanbody
) == RETURN
9053 && !use_return_insn (TRUE
)
9056 else if (GET_CODE (scanbody
) == RETURN
9059 arm_ccfsm_state
= 2;
9062 else if (GET_CODE (scanbody
) == PARALLEL
)
9064 switch (get_attr_conds (this_insn
))
9074 fail
= TRUE
; /* Unrecognized jump (eg epilogue). */
9079 /* Instructions using or affecting the condition codes make it
9081 scanbody
= PATTERN (this_insn
);
9082 if (!(GET_CODE (scanbody
) == SET
9083 || GET_CODE (scanbody
) == PARALLEL
)
9084 || get_attr_conds (this_insn
) != CONDS_NOCOND
)
9094 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
9095 arm_target_label
= CODE_LABEL_NUMBER (label
);
9096 else if (seeking_return
|| arm_ccfsm_state
== 2)
9098 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
9100 this_insn
= next_nonnote_insn (this_insn
);
9101 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
9102 || GET_CODE (this_insn
) == CODE_LABEL
))
9107 /* Oh, dear! we ran off the end.. give up */
9108 recog (PATTERN (insn
), insn
, NULL
);
9109 arm_ccfsm_state
= 0;
9110 arm_target_insn
= NULL
;
9113 arm_target_insn
= this_insn
;
9122 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
9124 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
9125 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9126 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
9127 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9131 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9134 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
9138 if (reverse
|| then_not_else
)
9139 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9142 /* Restore recog_data (getting the attributes of other insns can
9143 destroy this array, but final.c assumes that it remains intact
9144 across this call; since the insn has been recognized already we
9145 call recog direct). */
9146 recog (PATTERN (insn
), insn
, NULL
);
9150 /* Returns true if REGNO is a valid register
9151 for holding a quantity of tyoe MODE. */
9154 arm_hard_regno_mode_ok (regno
, mode
)
9156 enum machine_mode mode
;
9158 if (GET_MODE_CLASS (mode
) == MODE_CC
)
9159 return regno
== CC_REGNUM
;
9162 /* For the Thumb we only allow values bigger than SImode in
9163 registers 0 - 6, so that there is always a second low
9164 register available to hold the upper part of the value.
9165 We probably we ought to ensure that the register is the
9166 start of an even numbered register pair. */
9167 return (ARM_NUM_REGS (mode
) < 2) || (regno
< LAST_LO_REGNUM
);
9169 if (regno
<= LAST_ARM_REGNUM
)
9170 /* We allow any value to be stored in the general regisetrs. */
9173 if ( regno
== FRAME_POINTER_REGNUM
9174 || regno
== ARG_POINTER_REGNUM
)
9175 /* We only allow integers in the fake hard registers. */
9176 return GET_MODE_CLASS (mode
) == MODE_INT
;
9178 /* The only registers left are the FPU registers
9179 which we only allow to hold FP values. */
9180 return GET_MODE_CLASS (mode
) == MODE_FLOAT
9181 && regno
>= FIRST_ARM_FP_REGNUM
9182 && regno
<= LAST_ARM_FP_REGNUM
;
9186 arm_regno_class (regno
)
9191 if (regno
== STACK_POINTER_REGNUM
)
9193 if (regno
== CC_REGNUM
)
9200 if ( regno
<= LAST_ARM_REGNUM
9201 || regno
== FRAME_POINTER_REGNUM
9202 || regno
== ARG_POINTER_REGNUM
)
9203 return GENERAL_REGS
;
9205 if (regno
== CC_REGNUM
)
9211 /* Handle a special case when computing the offset
9212 of an argument from the frame pointer. */
9215 arm_debugger_arg_offset (value
, addr
)
9221 /* We are only interested if dbxout_parms() failed to compute the offset. */
9225 /* We can only cope with the case where the address is held in a register. */
9226 if (GET_CODE (addr
) != REG
)
9229 /* If we are using the frame pointer to point at the argument, then
9230 an offset of 0 is correct. */
9231 if (REGNO (addr
) == (unsigned) HARD_FRAME_POINTER_REGNUM
)
9234 /* If we are using the stack pointer to point at the
9235 argument, then an offset of 0 is correct. */
9236 if ((TARGET_THUMB
|| !frame_pointer_needed
)
9237 && REGNO (addr
) == SP_REGNUM
)
9240 /* Oh dear. The argument is pointed to by a register rather
9241 than being held in a register, or being stored at a known
9242 offset from the frame pointer. Since GDB only understands
9243 those two kinds of argument we must translate the address
9244 held in the register into an offset from the frame pointer.
9245 We do this by searching through the insns for the function
9246 looking to see where this register gets its value. If the
9247 register is initialised from the frame pointer plus an offset
9248 then we are in luck and we can continue, otherwise we give up.
9250 This code is exercised by producing debugging information
9251 for a function with arguments like this:
9253 double func (double a, double b, int c, double d) {return d;}
9255 Without this code the stab for parameter 'd' will be set to
9256 an offset of 0 from the frame pointer, rather than 8. */
9258 /* The if() statement says:
9260 If the insn is a normal instruction
9261 and if the insn is setting the value in a register
9262 and if the register being set is the register holding the address of the argument
9263 and if the address is computing by an addition
9264 that involves adding to a register
9265 which is the frame pointer
9270 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9272 if ( GET_CODE (insn
) == INSN
9273 && GET_CODE (PATTERN (insn
)) == SET
9274 && REGNO (XEXP (PATTERN (insn
), 0)) == REGNO (addr
)
9275 && GET_CODE (XEXP (PATTERN (insn
), 1)) == PLUS
9276 && GET_CODE (XEXP (XEXP (PATTERN (insn
), 1), 0)) == REG
9277 && REGNO (XEXP (XEXP (PATTERN (insn
), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9278 && GET_CODE (XEXP (XEXP (PATTERN (insn
), 1), 1)) == CONST_INT
9281 value
= INTVAL (XEXP (XEXP (PATTERN (insn
), 1), 1));
9290 warning ("unable to compute real location of stacked parameter");
9291 value
= 8; /* XXX magic hack */
9297 #define def_builtin(NAME, TYPE, CODE) \
9298 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL, NULL_TREE)
9301 arm_init_builtins ()
9303 tree endlink
= void_list_node
;
9304 tree int_endlink
= tree_cons (NULL_TREE
, integer_type_node
, endlink
);
9305 tree pchar_type_node
= build_pointer_type (char_type_node
);
9307 tree int_ftype_int
, void_ftype_pchar
;
9309 /* void func (char *) */
9311 = build_function_type_list (void_type_node
, pchar_type_node
, NULL_TREE
);
9313 /* int func (int) */
9315 = build_function_type (integer_type_node
, int_endlink
);
9317 /* Initialize arm V5 builtins. */
9319 def_builtin ("__builtin_clz", int_ftype_int
, ARM_BUILTIN_CLZ
);
9322 /* Expand an expression EXP that calls a built-in function,
9323 with result going to TARGET if that's convenient
9324 (and in mode MODE if that's convenient).
9325 SUBTARGET may be used as the target for computing one of EXP's operands.
9326 IGNORE is nonzero if the value is to be ignored. */
9329 arm_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
9332 rtx subtarget ATTRIBUTE_UNUSED
;
9333 enum machine_mode mode ATTRIBUTE_UNUSED
;
9334 int ignore ATTRIBUTE_UNUSED
;
9336 enum insn_code icode
;
9337 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9338 tree arglist
= TREE_OPERAND (exp
, 1);
9341 enum machine_mode tmode
, mode0
;
9342 int fcode
= DECL_FUNCTION_CODE (fndecl
);
9349 case ARM_BUILTIN_CLZ
:
9350 icode
= CODE_FOR_clz
;
9351 arg0
= TREE_VALUE (arglist
);
9352 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
9353 tmode
= insn_data
[icode
].operand
[0].mode
;
9354 mode0
= insn_data
[icode
].operand
[1].mode
;
9356 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
9357 op0
= copy_to_mode_reg (mode0
, op0
);
9359 || GET_MODE (target
) != tmode
9360 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
9361 target
= gen_reg_rtx (tmode
);
9362 pat
= GEN_FCN (icode
) (target
, op0
);
9369 /* @@@ Should really do something sensible here. */
9373 /* Recursively search through all of the blocks in a function
9374 checking to see if any of the variables created in that
9375 function match the RTX called 'orig'. If they do then
9376 replace them with the RTX called 'new'. */
9379 replace_symbols_in_block (block
, orig
, new)
9384 for (; block
; block
= BLOCK_CHAIN (block
))
9388 if (!TREE_USED (block
))
9391 for (sym
= BLOCK_VARS (block
); sym
; sym
= TREE_CHAIN (sym
))
9393 if ( (DECL_NAME (sym
) == 0 && TREE_CODE (sym
) != TYPE_DECL
)
9394 || DECL_IGNORED_P (sym
)
9395 || TREE_CODE (sym
) != VAR_DECL
9396 || DECL_EXTERNAL (sym
)
9397 || !rtx_equal_p (DECL_RTL (sym
), orig
)
9401 SET_DECL_RTL (sym
, new);
9404 replace_symbols_in_block (BLOCK_SUBBLOCKS (block
), orig
, new);
9408 /* Return the number (counting from 0) of
9409 the least significant set bit in MASK. */
9415 number_of_first_bit_set (mask
)
9421 (mask
& (1 << bit
)) == 0;
9428 /* Generate code to return from a thumb function.
9429 If 'reg_containing_return_addr' is -1, then the return address is
9430 actually on the stack, at the stack pointer. */
9432 thumb_exit (f
, reg_containing_return_addr
, eh_ofs
)
9434 int reg_containing_return_addr
;
9437 unsigned regs_available_for_popping
;
9438 unsigned regs_to_pop
;
9444 int restore_a4
= FALSE
;
9446 /* Compute the registers we need to pop. */
9450 /* There is an assumption here, that if eh_ofs is not NULL, the
9451 normal return address will have been pushed. */
9452 if (reg_containing_return_addr
== -1 || eh_ofs
)
9454 /* When we are generating a return for __builtin_eh_return,
9455 reg_containing_return_addr must specify the return regno. */
9456 if (eh_ofs
&& reg_containing_return_addr
== -1)
9459 regs_to_pop
|= 1 << LR_REGNUM
;
9463 if (TARGET_BACKTRACE
)
9465 /* Restore the (ARM) frame pointer and stack pointer. */
9466 regs_to_pop
|= (1 << ARM_HARD_FRAME_POINTER_REGNUM
) | (1 << SP_REGNUM
);
9470 /* If there is nothing to pop then just emit the BX instruction and
9472 if (pops_needed
== 0)
9475 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9477 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9480 /* Otherwise if we are not supporting interworking and we have not created
9481 a backtrace structure and the function was not entered in ARM mode then
9482 just pop the return address straight into the PC. */
9483 else if (!TARGET_INTERWORK
9484 && !TARGET_BACKTRACE
9485 && !is_called_in_ARM_mode (current_function_decl
))
9489 asm_fprintf (f
, "\tadd\t%r, #4\n", SP_REGNUM
);
9490 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9491 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9494 asm_fprintf (f
, "\tpop\t{%r}\n", PC_REGNUM
);
9499 /* Find out how many of the (return) argument registers we can corrupt. */
9500 regs_available_for_popping
= 0;
9502 /* If returning via __builtin_eh_return, the bottom three registers
9503 all contain information needed for the return. */
9509 /* If we can deduce the registers used from the function's
9510 return value. This is more reliable that examining
9511 regs_ever_live[] because that will be set if the register is
9512 ever used in the function, not just if the register is used
9513 to hold a return value. */
9515 if (current_function_return_rtx
!= 0)
9516 mode
= GET_MODE (current_function_return_rtx
);
9519 mode
= DECL_MODE (DECL_RESULT (current_function_decl
));
9521 size
= GET_MODE_SIZE (mode
);
9525 /* In a void function we can use any argument register.
9526 In a function that returns a structure on the stack
9527 we can use the second and third argument registers. */
9528 if (mode
== VOIDmode
)
9529 regs_available_for_popping
=
9530 (1 << ARG_REGISTER (1))
9531 | (1 << ARG_REGISTER (2))
9532 | (1 << ARG_REGISTER (3));
9534 regs_available_for_popping
=
9535 (1 << ARG_REGISTER (2))
9536 | (1 << ARG_REGISTER (3));
9539 regs_available_for_popping
=
9540 (1 << ARG_REGISTER (2))
9541 | (1 << ARG_REGISTER (3));
9543 regs_available_for_popping
=
9544 (1 << ARG_REGISTER (3));
9547 /* Match registers to be popped with registers into which we pop them. */
9548 for (available
= regs_available_for_popping
,
9549 required
= regs_to_pop
;
9550 required
!= 0 && available
!= 0;
9551 available
&= ~(available
& - available
),
9552 required
&= ~(required
& - required
))
9555 /* If we have any popping registers left over, remove them. */
9557 regs_available_for_popping
&= ~available
;
9559 /* Otherwise if we need another popping register we can use
9560 the fourth argument register. */
9561 else if (pops_needed
)
9563 /* If we have not found any free argument registers and
9564 reg a4 contains the return address, we must move it. */
9565 if (regs_available_for_popping
== 0
9566 && reg_containing_return_addr
== LAST_ARG_REGNUM
)
9568 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
, LAST_ARG_REGNUM
);
9569 reg_containing_return_addr
= LR_REGNUM
;
9573 /* Register a4 is being used to hold part of the return value,
9574 but we have dire need of a free, low register. */
9577 asm_fprintf (f
, "\tmov\t%r, %r\n",IP_REGNUM
, LAST_ARG_REGNUM
);
9580 if (reg_containing_return_addr
!= LAST_ARG_REGNUM
)
9582 /* The fourth argument register is available. */
9583 regs_available_for_popping
|= 1 << LAST_ARG_REGNUM
;
9589 /* Pop as many registers as we can. */
9590 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9592 /* Process the registers we popped. */
9593 if (reg_containing_return_addr
== -1)
9595 /* The return address was popped into the lowest numbered register. */
9596 regs_to_pop
&= ~(1 << LR_REGNUM
);
9598 reg_containing_return_addr
=
9599 number_of_first_bit_set (regs_available_for_popping
);
9601 /* Remove this register for the mask of available registers, so that
9602 the return address will not be corrupted by futher pops. */
9603 regs_available_for_popping
&= ~(1 << reg_containing_return_addr
);
9606 /* If we popped other registers then handle them here. */
9607 if (regs_available_for_popping
)
9611 /* Work out which register currently contains the frame pointer. */
9612 frame_pointer
= number_of_first_bit_set (regs_available_for_popping
);
9614 /* Move it into the correct place. */
9615 asm_fprintf (f
, "\tmov\t%r, %r\n",
9616 ARM_HARD_FRAME_POINTER_REGNUM
, frame_pointer
);
9618 /* (Temporarily) remove it from the mask of popped registers. */
9619 regs_available_for_popping
&= ~(1 << frame_pointer
);
9620 regs_to_pop
&= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM
);
9622 if (regs_available_for_popping
)
9626 /* We popped the stack pointer as well,
9627 find the register that contains it. */
9628 stack_pointer
= number_of_first_bit_set (regs_available_for_popping
);
9630 /* Move it into the stack register. */
9631 asm_fprintf (f
, "\tmov\t%r, %r\n", SP_REGNUM
, stack_pointer
);
9633 /* At this point we have popped all necessary registers, so
9634 do not worry about restoring regs_available_for_popping
9635 to its correct value:
9637 assert (pops_needed == 0)
9638 assert (regs_available_for_popping == (1 << frame_pointer))
9639 assert (regs_to_pop == (1 << STACK_POINTER)) */
9643 /* Since we have just move the popped value into the frame
9644 pointer, the popping register is available for reuse, and
9645 we know that we still have the stack pointer left to pop. */
9646 regs_available_for_popping
|= (1 << frame_pointer
);
9650 /* If we still have registers left on the stack, but we no longer have
9651 any registers into which we can pop them, then we must move the return
9652 address into the link register and make available the register that
9654 if (regs_available_for_popping
== 0 && pops_needed
> 0)
9656 regs_available_for_popping
|= 1 << reg_containing_return_addr
;
9658 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
,
9659 reg_containing_return_addr
);
9661 reg_containing_return_addr
= LR_REGNUM
;
9664 /* If we have registers left on the stack then pop some more.
9665 We know that at most we will want to pop FP and SP. */
9666 if (pops_needed
> 0)
9671 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9673 /* We have popped either FP or SP.
9674 Move whichever one it is into the correct register. */
9675 popped_into
= number_of_first_bit_set (regs_available_for_popping
);
9676 move_to
= number_of_first_bit_set (regs_to_pop
);
9678 asm_fprintf (f
, "\tmov\t%r, %r\n", move_to
, popped_into
);
9680 regs_to_pop
&= ~(1 << move_to
);
9685 /* If we still have not popped everything then we must have only
9686 had one register available to us and we are now popping the SP. */
9687 if (pops_needed
> 0)
9691 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9693 popped_into
= number_of_first_bit_set (regs_available_for_popping
);
9695 asm_fprintf (f
, "\tmov\t%r, %r\n", SP_REGNUM
, popped_into
);
9697 assert (regs_to_pop == (1 << STACK_POINTER))
9698 assert (pops_needed == 1)
9702 /* If necessary restore the a4 register. */
9705 if (reg_containing_return_addr
!= LR_REGNUM
)
9707 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
, LAST_ARG_REGNUM
);
9708 reg_containing_return_addr
= LR_REGNUM
;
9711 asm_fprintf (f
, "\tmov\t%r, %r\n", LAST_ARG_REGNUM
, IP_REGNUM
);
9715 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9717 /* Return to caller. */
9718 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9721 /* Emit code to push or pop registers to or from the stack. */
9724 thumb_pushpop (f
, mask
, push
)
9730 int lo_mask
= mask
& 0xFF;
9732 if (lo_mask
== 0 && !push
&& (mask
& (1 << 15)))
9734 /* Special case. Do not generate a POP PC statement here, do it in
9736 thumb_exit (f
, -1, NULL_RTX
);
9740 fprintf (f
, "\t%s\t{", push
? "push" : "pop");
9742 /* Look at the low registers first. */
9743 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++, lo_mask
>>= 1)
9747 asm_fprintf (f
, "%r", regno
);
9749 if ((lo_mask
& ~1) != 0)
9754 if (push
&& (mask
& (1 << LR_REGNUM
)))
9756 /* Catch pushing the LR. */
9760 asm_fprintf (f
, "%r", LR_REGNUM
);
9762 else if (!push
&& (mask
& (1 << PC_REGNUM
)))
9764 /* Catch popping the PC. */
9765 if (TARGET_INTERWORK
|| TARGET_BACKTRACE
)
9767 /* The PC is never poped directly, instead
9768 it is popped into r3 and then BX is used. */
9771 thumb_exit (f
, -1, NULL_RTX
);
9780 asm_fprintf (f
, "%r", PC_REGNUM
);
9788 thumb_final_prescan_insn (insn
)
9791 if (flag_print_asm_name
)
9792 asm_fprintf (asm_out_file
, "%@ 0x%04x\n",
9793 INSN_ADDRESSES (INSN_UID (insn
)));
9797 thumb_shiftable_const (val
)
9798 unsigned HOST_WIDE_INT val
;
9800 unsigned HOST_WIDE_INT mask
= 0xff;
9803 if (val
== 0) /* XXX */
9806 for (i
= 0; i
< 25; i
++)
9807 if ((val
& (mask
<< i
)) == val
)
9813 /* Returns non-zero if the current function contains,
9814 or might contain a far jump. */
9817 thumb_far_jump_used_p (int in_prologue
)
9821 /* This test is only important for leaf functions. */
9822 /* assert (!leaf_function_p ()); */
9824 /* If we have already decided that far jumps may be used,
9825 do not bother checking again, and always return true even if
9826 it turns out that they are not being used. Once we have made
9827 the decision that far jumps are present (and that hence the link
9828 register will be pushed onto the stack) we cannot go back on it. */
9829 if (cfun
->machine
->far_jump_used
)
9832 /* If this function is not being called from the prologue/epilogue
9833 generation code then it must be being called from the
9834 INITIAL_ELIMINATION_OFFSET macro. */
9837 /* In this case we know that we are being asked about the elimination
9838 of the arg pointer register. If that register is not being used,
9839 then there are no arguments on the stack, and we do not have to
9840 worry that a far jump might force the prologue to push the link
9841 register, changing the stack offsets. In this case we can just
9842 return false, since the presence of far jumps in the function will
9843 not affect stack offsets.
9845 If the arg pointer is live (or if it was live, but has now been
9846 eliminated and so set to dead) then we do have to test to see if
9847 the function might contain a far jump. This test can lead to some
9848 false negatives, since before reload is completed, then length of
9849 branch instructions is not known, so gcc defaults to returning their
9850 longest length, which in turn sets the far jump attribute to true.
9852 A false negative will not result in bad code being generated, but it
9853 will result in a needless push and pop of the link register. We
9854 hope that this does not occur too often. */
9855 if (regs_ever_live
[ARG_POINTER_REGNUM
])
9856 cfun
->machine
->arg_pointer_live
= 1;
9857 else if (!cfun
->machine
->arg_pointer_live
)
9861 /* Check to see if the function contains a branch
9862 insn with the far jump attribute set. */
9863 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9865 if (GET_CODE (insn
) == JUMP_INSN
9866 /* Ignore tablejump patterns. */
9867 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
9868 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
9869 && get_attr_far_jump (insn
) == FAR_JUMP_YES
9872 /* Record the fact that we have decied that
9873 the function does use far jumps. */
9874 cfun
->machine
->far_jump_used
= 1;
9882 /* Return non-zero if FUNC must be entered in ARM mode. */
9885 is_called_in_ARM_mode (func
)
9888 if (TREE_CODE (func
) != FUNCTION_DECL
)
9891 /* Ignore the problem about functions whoes address is taken. */
9892 if (TARGET_CALLEE_INTERWORKING
&& TREE_PUBLIC (func
))
9896 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func
)) != NULL_TREE
;
9902 /* The bits which aren't usefully expanded as rtl. */
9905 thumb_unexpanded_epilogue ()
9908 int live_regs_mask
= 0;
9909 int high_regs_pushed
= 0;
9910 int leaf_function
= leaf_function_p ();
9912 rtx eh_ofs
= cfun
->machine
->eh_epilogue_sp_ofs
;
9914 if (return_used_this_function
)
9917 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9918 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
9919 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
9920 live_regs_mask
|= 1 << regno
;
9922 for (regno
= 8; regno
< 13; regno
++)
9924 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
9925 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
9929 /* The prolog may have pushed some high registers to use as
9930 work registers. eg the testuite file:
9931 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9932 compiles to produce:
9933 push {r4, r5, r6, r7, lr}
9937 as part of the prolog. We have to undo that pushing here. */
9939 if (high_regs_pushed
)
9941 int mask
= live_regs_mask
;
9947 /* If we can deduce the registers used from the function's return value.
9948 This is more reliable that examining regs_ever_live[] because that
9949 will be set if the register is ever used in the function, not just if
9950 the register is used to hold a return value. */
9952 if (current_function_return_rtx
!= 0)
9953 mode
= GET_MODE (current_function_return_rtx
);
9956 mode
= DECL_MODE (DECL_RESULT (current_function_decl
));
9958 size
= GET_MODE_SIZE (mode
);
9960 /* Unless we are returning a type of size > 12 register r3 is
9966 /* Oh dear! We have no low registers into which we can pop
9969 ("no low registers available for popping high registers");
9971 for (next_hi_reg
= 8; next_hi_reg
< 13; next_hi_reg
++)
9972 if (regs_ever_live
[next_hi_reg
] && !call_used_regs
[next_hi_reg
]
9973 && !(TARGET_SINGLE_PIC_BASE
&& (next_hi_reg
== arm_pic_register
)))
9976 while (high_regs_pushed
)
9978 /* Find lo register(s) into which the high register(s) can
9980 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9982 if (mask
& (1 << regno
))
9984 if (high_regs_pushed
== 0)
9988 mask
&= (2 << regno
) - 1; /* A noop if regno == 8 */
9990 /* Pop the values into the low register(s). */
9991 thumb_pushpop (asm_out_file
, mask
, 0);
9993 /* Move the value(s) into the high registers. */
9994 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9996 if (mask
& (1 << regno
))
9998 asm_fprintf (asm_out_file
, "\tmov\t%r, %r\n", next_hi_reg
,
10001 for (next_hi_reg
++; next_hi_reg
< 13; next_hi_reg
++)
10002 if (regs_ever_live
[next_hi_reg
]
10003 && !call_used_regs
[next_hi_reg
]
10004 && !(TARGET_SINGLE_PIC_BASE
10005 && (next_hi_reg
== arm_pic_register
)))
10012 had_to_push_lr
= (live_regs_mask
|| !leaf_function
10013 || thumb_far_jump_used_p (1));
10015 if (TARGET_BACKTRACE
10016 && ((live_regs_mask
& 0xFF) == 0)
10017 && regs_ever_live
[LAST_ARG_REGNUM
] != 0)
10019 /* The stack backtrace structure creation code had to
10020 push R7 in order to get a work register, so we pop
10022 live_regs_mask
|= (1 << LAST_LO_REGNUM
);
10025 if (current_function_pretend_args_size
== 0 || TARGET_BACKTRACE
)
10028 && !is_called_in_ARM_mode (current_function_decl
)
10030 live_regs_mask
|= 1 << PC_REGNUM
;
10032 /* Either no argument registers were pushed or a backtrace
10033 structure was created which includes an adjusted stack
10034 pointer, so just pop everything. */
10035 if (live_regs_mask
)
10036 thumb_pushpop (asm_out_file
, live_regs_mask
, FALSE
);
10039 thumb_exit (asm_out_file
, 2, eh_ofs
);
10040 /* We have either just popped the return address into the
10041 PC or it is was kept in LR for the entire function or
10042 it is still on the stack because we do not want to
10043 return by doing a pop {pc}. */
10044 else if ((live_regs_mask
& (1 << PC_REGNUM
)) == 0)
10045 thumb_exit (asm_out_file
,
10047 && is_called_in_ARM_mode (current_function_decl
)) ?
10048 -1 : LR_REGNUM
, NULL_RTX
);
10052 /* Pop everything but the return address. */
10053 live_regs_mask
&= ~(1 << PC_REGNUM
);
10055 if (live_regs_mask
)
10056 thumb_pushpop (asm_out_file
, live_regs_mask
, FALSE
);
10058 if (had_to_push_lr
)
10059 /* Get the return address into a temporary register. */
10060 thumb_pushpop (asm_out_file
, 1 << LAST_ARG_REGNUM
, 0);
10062 /* Remove the argument registers that were pushed onto the stack. */
10063 asm_fprintf (asm_out_file
, "\tadd\t%r, %r, #%d\n",
10064 SP_REGNUM
, SP_REGNUM
,
10065 current_function_pretend_args_size
);
10068 thumb_exit (asm_out_file
, 2, eh_ofs
);
10070 thumb_exit (asm_out_file
,
10071 had_to_push_lr
? LAST_ARG_REGNUM
: LR_REGNUM
, NULL_RTX
);
10077 /* Functions to save and restore machine-specific function data. */
10079 static struct machine_function
*
10080 arm_init_machine_status ()
10082 struct machine_function
*machine
;
10083 machine
= (machine_function
*) ggc_alloc_cleared (sizeof (machine_function
));
10085 #if ARM_FT_UNKNOWN != 0
10086 machine
->func_type
= ARM_FT_UNKNOWN
;
10091 /* Return an RTX indicating where the return address to the
10092 calling function can be found. */
10095 arm_return_addr (count
, frame
)
10097 rtx frame ATTRIBUTE_UNUSED
;
10102 if (TARGET_APCS_32
)
10103 return get_hard_reg_initial_val (Pmode
, LR_REGNUM
);
10106 rtx lr
= gen_rtx_AND (Pmode
, gen_rtx_REG (Pmode
, LR_REGNUM
),
10107 GEN_INT (RETURN_ADDR_MASK26
));
10108 return get_func_hard_reg_initial_val (cfun
, lr
);
10112 /* Do anything needed before RTL is emitted for each function. */
10115 arm_init_expanders ()
10117 /* Arrange to initialize and mark the machine per-function status. */
10118 init_machine_status
= arm_init_machine_status
;
10121 /* Generate the rest of a function's prologue. */
10124 thumb_expand_prologue ()
10126 HOST_WIDE_INT amount
= (get_frame_size ()
10127 + current_function_outgoing_args_size
);
10128 unsigned long func_type
;
10130 func_type
= arm_current_func_type ();
10132 /* Naked functions don't have prologues. */
10133 if (IS_NAKED (func_type
))
10136 if (IS_INTERRUPT (func_type
))
10138 error ("interrupt Service Routines cannot be coded in Thumb mode");
10142 if (frame_pointer_needed
)
10143 emit_insn (gen_movsi (hard_frame_pointer_rtx
, stack_pointer_rtx
));
10147 amount
= ROUND_UP (amount
);
10150 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10151 GEN_INT (- amount
)));
10157 /* The stack decrement is too big for an immediate value in a single
10158 insn. In theory we could issue multiple subtracts, but after
10159 three of them it becomes more space efficient to place the full
10160 value in the constant pool and load into a register. (Also the
10161 ARM debugger really likes to see only one stack decrement per
10162 function). So instead we look for a scratch register into which
10163 we can load the decrement, and then we subtract this from the
10164 stack pointer. Unfortunately on the thumb the only available
10165 scratch registers are the argument registers, and we cannot use
10166 these as they may hold arguments to the function. Instead we
10167 attempt to locate a call preserved register which is used by this
10168 function. If we can find one, then we know that it will have
10169 been pushed at the start of the prologue and so we can corrupt
10171 for (regno
= LAST_ARG_REGNUM
+ 1; regno
<= LAST_LO_REGNUM
; regno
++)
10172 if (regs_ever_live
[regno
]
10173 && !call_used_regs
[regno
] /* Paranoia */
10174 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
))
10175 && !(frame_pointer_needed
10176 && (regno
== THUMB_HARD_FRAME_POINTER_REGNUM
)))
10179 if (regno
> LAST_LO_REGNUM
) /* Very unlikely */
10181 rtx spare
= gen_rtx (REG
, SImode
, IP_REGNUM
);
10183 /* Choose an arbitary, non-argument low register. */
10184 reg
= gen_rtx (REG
, SImode
, LAST_LO_REGNUM
);
10186 /* Save it by copying it into a high, scratch register. */
10187 emit_insn (gen_movsi (spare
, reg
));
10188 /* Add a USE to stop propagate_one_insn() from barfing. */
10189 emit_insn (gen_prologue_use (spare
));
10191 /* Decrement the stack. */
10192 emit_insn (gen_movsi (reg
, GEN_INT (- amount
)));
10193 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10196 /* Restore the low register's original value. */
10197 emit_insn (gen_movsi (reg
, spare
));
10199 /* Emit a USE of the restored scratch register, so that flow
10200 analysis will not consider the restore redundant. The
10201 register won't be used again in this function and isn't
10202 restored by the epilogue. */
10203 emit_insn (gen_prologue_use (reg
));
10207 reg
= gen_rtx (REG
, SImode
, regno
);
10209 emit_insn (gen_movsi (reg
, GEN_INT (- amount
)));
10210 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10216 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
10217 emit_insn (gen_blockage ());
10221 thumb_expand_epilogue ()
10223 HOST_WIDE_INT amount
= (get_frame_size ()
10224 + current_function_outgoing_args_size
);
10226 /* Naked functions don't have prologues. */
10227 if (IS_NAKED (arm_current_func_type ()))
10230 if (frame_pointer_needed
)
10231 emit_insn (gen_movsi (stack_pointer_rtx
, hard_frame_pointer_rtx
));
10234 amount
= ROUND_UP (amount
);
10237 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10238 GEN_INT (amount
)));
10241 /* r3 is always free in the epilogue. */
10242 rtx reg
= gen_rtx (REG
, SImode
, LAST_ARG_REGNUM
);
10244 emit_insn (gen_movsi (reg
, GEN_INT (amount
)));
10245 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
10249 /* Emit a USE (stack_pointer_rtx), so that
10250 the stack adjustment will not be deleted. */
10251 emit_insn (gen_prologue_use (stack_pointer_rtx
));
10253 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
10254 emit_insn (gen_blockage ());
10258 thumb_output_function_prologue (f
, size
)
10260 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10262 int live_regs_mask
= 0;
10263 int high_regs_pushed
= 0;
10266 if (IS_NAKED (arm_current_func_type ()))
10269 if (is_called_in_ARM_mode (current_function_decl
))
10273 if (GET_CODE (DECL_RTL (current_function_decl
)) != MEM
)
10275 if (GET_CODE (XEXP (DECL_RTL (current_function_decl
), 0)) != SYMBOL_REF
)
10277 name
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
10279 /* Generate code sequence to switch us into Thumb mode. */
10280 /* The .code 32 directive has already been emitted by
10281 ASM_DECLARE_FUNCTION_NAME. */
10282 asm_fprintf (f
, "\torr\t%r, %r, #1\n", IP_REGNUM
, PC_REGNUM
);
10283 asm_fprintf (f
, "\tbx\t%r\n", IP_REGNUM
);
10285 /* Generate a label, so that the debugger will notice the
10286 change in instruction sets. This label is also used by
10287 the assembler to bypass the ARM code when this function
10288 is called from a Thumb encoded function elsewhere in the
10289 same file. Hence the definition of STUB_NAME here must
10290 agree with the definition in gas/config/tc-arm.c */
10292 #define STUB_NAME ".real_start_of"
10294 asm_fprintf (f
, "\t.code\t16\n");
10296 if (arm_dllexport_name_p (name
))
10297 name
= arm_strip_name_encoding (name
);
10299 asm_fprintf (f
, "\t.globl %s%U%s\n", STUB_NAME
, name
);
10300 asm_fprintf (f
, "\t.thumb_func\n");
10301 asm_fprintf (f
, "%s%U%s:\n", STUB_NAME
, name
);
10304 if (current_function_pretend_args_size
)
10306 if (cfun
->machine
->uses_anonymous_args
)
10310 asm_fprintf (f
, "\tpush\t{");
10312 num_pushes
= ARM_NUM_INTS (current_function_pretend_args_size
);
10314 for (regno
= LAST_ARG_REGNUM
+ 1 - num_pushes
;
10315 regno
<= LAST_ARG_REGNUM
;
10317 asm_fprintf (f
, "%r%s", regno
,
10318 regno
== LAST_ARG_REGNUM
? "" : ", ");
10320 asm_fprintf (f
, "}\n");
10323 asm_fprintf (f
, "\tsub\t%r, %r, #%d\n",
10324 SP_REGNUM
, SP_REGNUM
,
10325 current_function_pretend_args_size
);
10328 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
10329 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
10330 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
10331 live_regs_mask
|= 1 << regno
;
10333 if (live_regs_mask
|| !leaf_function_p () || thumb_far_jump_used_p (1))
10334 live_regs_mask
|= 1 << LR_REGNUM
;
10336 if (TARGET_BACKTRACE
)
10339 int work_register
= 0;
10342 /* We have been asked to create a stack backtrace structure.
10343 The code looks like this:
10347 0 sub SP, #16 Reserve space for 4 registers.
10348 2 push {R7} Get a work register.
10349 4 add R7, SP, #20 Get the stack pointer before the push.
10350 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10351 8 mov R7, PC Get hold of the start of this code plus 12.
10352 10 str R7, [SP, #16] Store it.
10353 12 mov R7, FP Get hold of the current frame pointer.
10354 14 str R7, [SP, #4] Store it.
10355 16 mov R7, LR Get hold of the current return address.
10356 18 str R7, [SP, #12] Store it.
10357 20 add R7, SP, #16 Point at the start of the backtrace structure.
10358 22 mov FP, R7 Put this value into the frame pointer. */
10360 if ((live_regs_mask
& 0xFF) == 0)
10362 /* See if the a4 register is free. */
10364 if (regs_ever_live
[LAST_ARG_REGNUM
] == 0)
10365 work_register
= LAST_ARG_REGNUM
;
10366 else /* We must push a register of our own */
10367 live_regs_mask
|= (1 << LAST_LO_REGNUM
);
10370 if (work_register
== 0)
10372 /* Select a register from the list that will be pushed to
10373 use as our work register. */
10374 for (work_register
= (LAST_LO_REGNUM
+ 1); work_register
--;)
10375 if ((1 << work_register
) & live_regs_mask
)
10380 (f
, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10381 SP_REGNUM
, SP_REGNUM
);
10383 if (live_regs_mask
)
10384 thumb_pushpop (f
, live_regs_mask
, 1);
10386 for (offset
= 0, wr
= 1 << 15; wr
!= 0; wr
>>= 1)
10387 if (wr
& live_regs_mask
)
10390 asm_fprintf (f
, "\tadd\t%r, %r, #%d\n", work_register
, SP_REGNUM
,
10391 offset
+ 16 + current_function_pretend_args_size
);
10393 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10396 /* Make sure that the instruction fetching the PC is in the right place
10397 to calculate "start of backtrace creation code + 12". */
10398 if (live_regs_mask
)
10400 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, PC_REGNUM
);
10401 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10403 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
,
10404 ARM_HARD_FRAME_POINTER_REGNUM
);
10405 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10410 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
,
10411 ARM_HARD_FRAME_POINTER_REGNUM
);
10412 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10414 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, PC_REGNUM
);
10415 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10419 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, LR_REGNUM
);
10420 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10422 asm_fprintf (f
, "\tadd\t%r, %r, #%d\n", work_register
, SP_REGNUM
,
10424 asm_fprintf (f
, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10425 ARM_HARD_FRAME_POINTER_REGNUM
, work_register
);
10427 else if (live_regs_mask
)
10428 thumb_pushpop (f
, live_regs_mask
, 1);
10430 for (regno
= 8; regno
< 13; regno
++)
10432 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
10433 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
10434 high_regs_pushed
++;
10437 if (high_regs_pushed
)
10439 int pushable_regs
= 0;
10440 int mask
= live_regs_mask
& 0xff;
10443 for (next_hi_reg
= 12; next_hi_reg
> LAST_LO_REGNUM
; next_hi_reg
--)
10445 if (regs_ever_live
[next_hi_reg
] && !call_used_regs
[next_hi_reg
]
10446 && !(TARGET_SINGLE_PIC_BASE
10447 && (next_hi_reg
== arm_pic_register
)))
10451 pushable_regs
= mask
;
10453 if (pushable_regs
== 0)
10455 /* Desperation time -- this probably will never happen. */
10456 if (regs_ever_live
[LAST_ARG_REGNUM
]
10457 || !call_used_regs
[LAST_ARG_REGNUM
])
10458 asm_fprintf (f
, "\tmov\t%r, %r\n", IP_REGNUM
, LAST_ARG_REGNUM
);
10459 mask
= 1 << LAST_ARG_REGNUM
;
10462 while (high_regs_pushed
> 0)
10464 for (regno
= LAST_LO_REGNUM
; regno
>= 0; regno
--)
10466 if (mask
& (1 << regno
))
10468 asm_fprintf (f
, "\tmov\t%r, %r\n", regno
, next_hi_reg
);
10470 high_regs_pushed
--;
10472 if (high_regs_pushed
)
10473 for (next_hi_reg
--; next_hi_reg
> LAST_LO_REGNUM
;
10476 if (regs_ever_live
[next_hi_reg
]
10477 && !call_used_regs
[next_hi_reg
]
10478 && !(TARGET_SINGLE_PIC_BASE
10479 && (next_hi_reg
== arm_pic_register
)))
10484 mask
&= ~((1 << regno
) - 1);
10490 thumb_pushpop (f
, mask
, 1);
10493 if (pushable_regs
== 0
10494 && (regs_ever_live
[LAST_ARG_REGNUM
]
10495 || !call_used_regs
[LAST_ARG_REGNUM
]))
10496 asm_fprintf (f
, "\tmov\t%r, %r\n", LAST_ARG_REGNUM
, IP_REGNUM
);
10500 /* Handle the case of a double word load into a low register from
10501 a computed memory address. The computed address may involve a
10502 register which is overwritten by the load. */
10505 thumb_load_double_from_address (operands
)
10514 if (GET_CODE (operands
[0]) != REG
)
10517 if (GET_CODE (operands
[1]) != MEM
)
10520 /* Get the memory address. */
10521 addr
= XEXP (operands
[1], 0);
10523 /* Work out how the memory address is computed. */
10524 switch (GET_CODE (addr
))
10527 operands
[2] = gen_rtx (MEM
, SImode
,
10528 plus_constant (XEXP (operands
[1], 0), 4));
10530 if (REGNO (operands
[0]) == REGNO (addr
))
10532 output_asm_insn ("ldr\t%H0, %2", operands
);
10533 output_asm_insn ("ldr\t%0, %1", operands
);
10537 output_asm_insn ("ldr\t%0, %1", operands
);
10538 output_asm_insn ("ldr\t%H0, %2", operands
);
10543 /* Compute <address> + 4 for the high order load. */
10544 operands
[2] = gen_rtx (MEM
, SImode
,
10545 plus_constant (XEXP (operands
[1], 0), 4));
10547 output_asm_insn ("ldr\t%0, %1", operands
);
10548 output_asm_insn ("ldr\t%H0, %2", operands
);
10552 arg1
= XEXP (addr
, 0);
10553 arg2
= XEXP (addr
, 1);
10555 if (CONSTANT_P (arg1
))
10556 base
= arg2
, offset
= arg1
;
10558 base
= arg1
, offset
= arg2
;
10560 if (GET_CODE (base
) != REG
)
10563 /* Catch the case of <address> = <reg> + <reg> */
10564 if (GET_CODE (offset
) == REG
)
10566 int reg_offset
= REGNO (offset
);
10567 int reg_base
= REGNO (base
);
10568 int reg_dest
= REGNO (operands
[0]);
10570 /* Add the base and offset registers together into the
10571 higher destination register. */
10572 asm_fprintf (asm_out_file
, "\tadd\t%r, %r, %r",
10573 reg_dest
+ 1, reg_base
, reg_offset
);
10575 /* Load the lower destination register from the address in
10576 the higher destination register. */
10577 asm_fprintf (asm_out_file
, "\tldr\t%r, [%r, #0]",
10578 reg_dest
, reg_dest
+ 1);
10580 /* Load the higher destination register from its own address
10582 asm_fprintf (asm_out_file
, "\tldr\t%r, [%r, #4]",
10583 reg_dest
+ 1, reg_dest
+ 1);
10587 /* Compute <address> + 4 for the high order load. */
10588 operands
[2] = gen_rtx (MEM
, SImode
,
10589 plus_constant (XEXP (operands
[1], 0), 4));
10591 /* If the computed address is held in the low order register
10592 then load the high order register first, otherwise always
10593 load the low order register first. */
10594 if (REGNO (operands
[0]) == REGNO (base
))
10596 output_asm_insn ("ldr\t%H0, %2", operands
);
10597 output_asm_insn ("ldr\t%0, %1", operands
);
10601 output_asm_insn ("ldr\t%0, %1", operands
);
10602 output_asm_insn ("ldr\t%H0, %2", operands
);
10608 /* With no registers to worry about we can just load the value
10610 operands
[2] = gen_rtx (MEM
, SImode
,
10611 plus_constant (XEXP (operands
[1], 0), 4));
10613 output_asm_insn ("ldr\t%H0, %2", operands
);
10614 output_asm_insn ("ldr\t%0, %1", operands
);
10627 thumb_output_move_mem_multiple (n
, operands
)
10636 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10639 operands
[4] = operands
[5];
10642 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands
);
10643 output_asm_insn ("stmia\t%0!, {%4, %5}", operands
);
10647 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10650 operands
[4] = operands
[5];
10653 if (REGNO (operands
[5]) > REGNO (operands
[6]))
10656 operands
[5] = operands
[6];
10659 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10662 operands
[4] = operands
[5];
10666 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands
);
10667 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands
);
10677 /* Routines for generating rtl. */
10680 thumb_expand_movstrqi (operands
)
10683 rtx out
= copy_to_mode_reg (SImode
, XEXP (operands
[0], 0));
10684 rtx in
= copy_to_mode_reg (SImode
, XEXP (operands
[1], 0));
10685 HOST_WIDE_INT len
= INTVAL (operands
[2]);
10686 HOST_WIDE_INT offset
= 0;
10690 emit_insn (gen_movmem12b (out
, in
, out
, in
));
10696 emit_insn (gen_movmem8b (out
, in
, out
, in
));
10702 rtx reg
= gen_reg_rtx (SImode
);
10703 emit_insn (gen_movsi (reg
, gen_rtx (MEM
, SImode
, in
)));
10704 emit_insn (gen_movsi (gen_rtx (MEM
, SImode
, out
), reg
));
10711 rtx reg
= gen_reg_rtx (HImode
);
10712 emit_insn (gen_movhi (reg
, gen_rtx (MEM
, HImode
,
10713 plus_constant (in
, offset
))));
10714 emit_insn (gen_movhi (gen_rtx (MEM
, HImode
, plus_constant (out
, offset
)),
10722 rtx reg
= gen_reg_rtx (QImode
);
10723 emit_insn (gen_movqi (reg
, gen_rtx (MEM
, QImode
,
10724 plus_constant (in
, offset
))));
10725 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, plus_constant (out
, offset
)),
10731 thumb_cmp_operand (op
, mode
)
10733 enum machine_mode mode
;
10735 return ((GET_CODE (op
) == CONST_INT
10736 && (unsigned HOST_WIDE_INT
) (INTVAL (op
)) < 256)
10737 || register_operand (op
, mode
));
10740 static const char *
10741 thumb_condition_code (x
, invert
)
10745 static const char * const conds
[] =
10747 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10748 "hi", "ls", "ge", "lt", "gt", "le"
10752 switch (GET_CODE (x
))
10754 case EQ
: val
= 0; break;
10755 case NE
: val
= 1; break;
10756 case GEU
: val
= 2; break;
10757 case LTU
: val
= 3; break;
10758 case GTU
: val
= 8; break;
10759 case LEU
: val
= 9; break;
10760 case GE
: val
= 10; break;
10761 case LT
: val
= 11; break;
10762 case GT
: val
= 12; break;
10763 case LE
: val
= 13; break;
10768 return conds
[val
^ invert
];
10771 /* Handle storing a half-word to memory during reload. */
10774 thumb_reload_out_hi (operands
)
10777 emit_insn (gen_thumb_movhi_clobber (operands
[0], operands
[1], operands
[2]));
10780 /* Handle storing a half-word to memory during reload. */
10783 thumb_reload_in_hi (operands
)
10784 rtx
* operands ATTRIBUTE_UNUSED
;
10789 /* Return the length of a function name prefix
10790 that starts with the character 'c'. */
10793 arm_get_strip_length (char c
)
10797 ARM_NAME_ENCODING_LENGTHS
10802 /* Return a pointer to a function's name with any
10803 and all prefix encodings stripped from it. */
10806 arm_strip_name_encoding (const char * name
)
10810 while ((skip
= arm_get_strip_length (* name
)))
10818 #ifdef AOF_ASSEMBLER
10819 /* Special functions only needed when producing AOF syntax assembler. */
10823 struct pic_chain
* next
;
10824 const char * symname
;
10827 static struct pic_chain
* aof_pic_chain
= NULL
;
10833 struct pic_chain
** chainp
;
10836 if (aof_pic_label
== NULL_RTX
)
10838 aof_pic_label
= gen_rtx_SYMBOL_REF (Pmode
, "x$adcons");
10841 for (offset
= 0, chainp
= &aof_pic_chain
; *chainp
;
10842 offset
+= 4, chainp
= &(*chainp
)->next
)
10843 if ((*chainp
)->symname
== XSTR (x
, 0))
10844 return plus_constant (aof_pic_label
, offset
);
10846 *chainp
= (struct pic_chain
*) xmalloc (sizeof (struct pic_chain
));
10847 (*chainp
)->next
= NULL
;
10848 (*chainp
)->symname
= XSTR (x
, 0);
10849 return plus_constant (aof_pic_label
, offset
);
10853 aof_dump_pic_table (f
)
10856 struct pic_chain
* chain
;
10858 if (aof_pic_chain
== NULL
)
10861 asm_fprintf (f
, "\tAREA |%r$$adcons|, BASED %r\n",
10862 PIC_OFFSET_TABLE_REGNUM
,
10863 PIC_OFFSET_TABLE_REGNUM
);
10864 fputs ("|x$adcons|\n", f
);
10866 for (chain
= aof_pic_chain
; chain
; chain
= chain
->next
)
10868 fputs ("\tDCD\t", f
);
10869 assemble_name (f
, chain
->symname
);
10874 int arm_text_section_count
= 1;
10877 aof_text_section ()
10879 static char buf
[100];
10880 sprintf (buf
, "\tAREA |C$$code%d|, CODE, READONLY",
10881 arm_text_section_count
++);
10883 strcat (buf
, ", PIC, REENTRANT");
10887 static int arm_data_section_count
= 1;
10890 aof_data_section ()
10892 static char buf
[100];
10893 sprintf (buf
, "\tAREA |C$$data%d|, DATA", arm_data_section_count
++);
10897 /* The AOF assembler is religiously strict about declarations of
10898 imported and exported symbols, so that it is impossible to declare
10899 a function as imported near the beginning of the file, and then to
10900 export it later on. It is, however, possible to delay the decision
10901 until all the functions in the file have been compiled. To get
10902 around this, we maintain a list of the imports and exports, and
10903 delete from it any that are subsequently defined. At the end of
10904 compilation we spit the remainder of the list out before the END
10909 struct import
* next
;
10913 static struct import
* imports_list
= NULL
;
10916 aof_add_import (name
)
10919 struct import
* new;
10921 for (new = imports_list
; new; new = new->next
)
10922 if (new->name
== name
)
10925 new = (struct import
*) xmalloc (sizeof (struct import
));
10926 new->next
= imports_list
;
10927 imports_list
= new;
10932 aof_delete_import (name
)
10935 struct import
** old
;
10937 for (old
= &imports_list
; *old
; old
= & (*old
)->next
)
10939 if ((*old
)->name
== name
)
10941 *old
= (*old
)->next
;
10947 int arm_main_function
= 0;
10950 aof_dump_imports (f
)
10953 /* The AOF assembler needs this to cause the startup code to be extracted
10954 from the library. Brining in __main causes the whole thing to work
10956 if (arm_main_function
)
10959 fputs ("\tIMPORT __main\n", f
);
10960 fputs ("\tDCD __main\n", f
);
10963 /* Now dump the remaining imports. */
10964 while (imports_list
)
10966 fprintf (f
, "\tIMPORT\t");
10967 assemble_name (f
, imports_list
->name
);
10969 imports_list
= imports_list
->next
;
10972 #endif /* AOF_ASSEMBLER */
10974 #ifdef OBJECT_FORMAT_ELF
10975 /* Switch to an arbitrary section NAME with attributes as specified
10976 by FLAGS. ALIGN specifies any known alignment requirements for
10977 the section; 0 if the default should be used.
10979 Differs from the default elf version only in the prefix character
10980 used before the section type. */
10983 arm_elf_asm_named_section (name
, flags
)
10985 unsigned int flags
;
10987 char flagchars
[8], *f
= flagchars
;
10990 if (!(flags
& SECTION_DEBUG
))
10992 if (flags
& SECTION_WRITE
)
10994 if (flags
& SECTION_CODE
)
10996 if (flags
& SECTION_SMALL
)
10998 if (flags
& SECTION_MERGE
)
11000 if (flags
& SECTION_STRINGS
)
11004 if (flags
& SECTION_BSS
)
11009 if (flags
& SECTION_ENTSIZE
)
11010 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",%%%s,%d\n",
11011 name
, flagchars
, type
, flags
& SECTION_ENTSIZE
);
11013 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",%%%s\n",
11014 name
, flagchars
, type
);
11019 /* Symbols in the text segment can be accessed without indirecting via the
11020 constant pool; it may take an extra binary operation, but this is still
11021 faster than indirecting via memory. Don't do this when not optimizing,
11022 since we won't be calculating al of the offsets necessary to do this
11026 arm_encode_section_info (decl
, first
)
11030 /* This doesn't work with AOF syntax, since the string table may be in
11031 a different AREA. */
11032 #ifndef AOF_ASSEMBLER
11033 if (optimize
> 0 && TREE_CONSTANT (decl
)
11034 && (!flag_writable_strings
|| TREE_CODE (decl
) != STRING_CST
))
11036 rtx rtl
= (TREE_CODE_CLASS (TREE_CODE (decl
)) != 'd'
11037 ? TREE_CST_RTL (decl
) : DECL_RTL (decl
));
11038 SYMBOL_REF_FLAG (XEXP (rtl
, 0)) = 1;
11042 /* If we are referencing a function that is weak then encode a long call
11043 flag in the function name, otherwise if the function is static or
11044 or known to be defined in this file then encode a short call flag. */
11045 if (first
&& TREE_CODE_CLASS (TREE_CODE (decl
)) == 'd')
11047 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_WEAK (decl
))
11048 arm_encode_call_attribute (decl
, LONG_CALL_FLAG_CHAR
);
11049 else if (! TREE_PUBLIC (decl
))
11050 arm_encode_call_attribute (decl
, SHORT_CALL_FLAG_CHAR
);
11053 #endif /* !ARM_PE */