1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GNU CC.
10 GNU CC is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 2, or (at your option)
15 GNU CC is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GNU CC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
47 #include "integrate.h"
50 #include "target-def.h"
52 /* Forward definitions of types. */
53 typedef struct minipool_node Mnode
;
54 typedef struct minipool_fixup Mfix
;
56 /* In order to improve the layout of the prototypes below
57 some short type abbreviations are defined here. */
58 #define Hint HOST_WIDE_INT
59 #define Mmode enum machine_mode
60 #define Ulong unsigned long
61 #define Ccstar const char *
63 const struct attribute_spec arm_attribute_table
[];
65 /* Forward function declarations. */
66 static void arm_add_gc_roots
PARAMS ((void));
67 static int arm_gen_constant
PARAMS ((enum rtx_code
, Mmode
, Hint
, rtx
, rtx
, int, int));
68 static Ulong bit_count
PARAMS ((signed int));
69 static int const_ok_for_op
PARAMS ((Hint
, enum rtx_code
));
70 static int eliminate_lr2ip
PARAMS ((rtx
*));
71 static rtx emit_multi_reg_push
PARAMS ((int));
72 static rtx emit_sfm
PARAMS ((int, int));
74 static bool arm_assemble_integer
PARAMS ((rtx
, unsigned int, int));
76 static Ccstar fp_const_from_val
PARAMS ((REAL_VALUE_TYPE
*));
77 static arm_cc get_arm_condition_code
PARAMS ((rtx
));
78 static void init_fpa_table
PARAMS ((void));
79 static Hint int_log2
PARAMS ((Hint
));
80 static rtx is_jump_table
PARAMS ((rtx
));
81 static Ccstar output_multi_immediate
PARAMS ((rtx
*, Ccstar
, Ccstar
, int, Hint
));
82 static void print_multi_reg
PARAMS ((FILE *, Ccstar
, int, int));
83 static Mmode select_dominance_cc_mode
PARAMS ((rtx
, rtx
, Hint
));
84 static Ccstar shift_op
PARAMS ((rtx
, Hint
*));
85 static void arm_init_machine_status
PARAMS ((struct function
*));
86 static void arm_mark_machine_status
PARAMS ((struct function
*));
87 static void arm_free_machine_status
PARAMS ((struct function
*));
88 static int number_of_first_bit_set
PARAMS ((int));
89 static void replace_symbols_in_block
PARAMS ((tree
, rtx
, rtx
));
90 static void thumb_exit
PARAMS ((FILE *, int, rtx
));
91 static void thumb_pushpop
PARAMS ((FILE *, int, int));
92 static Ccstar thumb_condition_code
PARAMS ((rtx
, int));
93 static rtx is_jump_table
PARAMS ((rtx
));
94 static Hint get_jump_table_size
PARAMS ((rtx
));
95 static Mnode
* move_minipool_fix_forward_ref
PARAMS ((Mnode
*, Mnode
*, Hint
));
96 static Mnode
* add_minipool_forward_ref
PARAMS ((Mfix
*));
97 static Mnode
* move_minipool_fix_backward_ref
PARAMS ((Mnode
*, Mnode
*, Hint
));
98 static Mnode
* add_minipool_backward_ref
PARAMS ((Mfix
*));
99 static void assign_minipool_offsets
PARAMS ((Mfix
*));
100 static void arm_print_value
PARAMS ((FILE *, rtx
));
101 static void dump_minipool
PARAMS ((rtx
));
102 static int arm_barrier_cost
PARAMS ((rtx
));
103 static Mfix
* create_fix_barrier
PARAMS ((Mfix
*, Hint
));
104 static void push_minipool_barrier
PARAMS ((rtx
, Hint
));
105 static void push_minipool_fix
PARAMS ((rtx
, Hint
, rtx
*, Mmode
, rtx
));
106 static void note_invalid_constants
PARAMS ((rtx
, Hint
));
107 static int current_file_function_operand
PARAMS ((rtx
));
108 static Ulong arm_compute_save_reg0_reg12_mask
PARAMS ((void));
109 static Ulong arm_compute_save_reg_mask
PARAMS ((void));
110 static Ulong arm_isr_value
PARAMS ((tree
));
111 static Ulong arm_compute_func_type
PARAMS ((void));
112 static tree arm_handle_fndecl_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
113 static tree arm_handle_isr_attribute
PARAMS ((tree
*, tree
, tree
, int, bool *));
114 static void arm_output_function_epilogue
PARAMS ((FILE *, Hint
));
115 static void arm_output_function_prologue
PARAMS ((FILE *, Hint
));
116 static void thumb_output_function_prologue
PARAMS ((FILE *, Hint
));
117 static int arm_comp_type_attributes
PARAMS ((tree
, tree
));
118 static void arm_set_default_type_attributes
PARAMS ((tree
));
119 static int arm_adjust_cost
PARAMS ((rtx
, rtx
, rtx
, int));
120 #ifdef OBJECT_FORMAT_ELF
121 static void arm_elf_asm_named_section
PARAMS ((const char *, unsigned int));
129 /* Initialize the GCC target structure. */
130 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
131 #undef TARGET_MERGE_DECL_ATTRIBUTES
132 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
135 #undef TARGET_ATTRIBUTE_TABLE
136 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
139 #undef TARGET_ASM_BYTE_OP
140 #define TARGET_ASM_BYTE_OP "\tDCB\t"
141 #undef TARGET_ASM_ALIGNED_HI_OP
142 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
143 #undef TARGET_ASM_ALIGNED_SI_OP
144 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
146 #undef TARGET_ASM_ALIGNED_SI_OP
147 #define TARGET_ASM_ALIGNED_SI_OP NULL
148 #undef TARGET_ASM_INTEGER
149 #define TARGET_ASM_INTEGER arm_assemble_integer
152 #undef TARGET_ASM_FUNCTION_PROLOGUE
153 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
155 #undef TARGET_ASM_FUNCTION_EPILOGUE
156 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
158 #undef TARGET_COMP_TYPE_ATTRIBUTES
159 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
161 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
162 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
164 #undef TARGET_INIT_BUILTINS
165 #define TARGET_INIT_BUILTINS arm_init_builtins
167 #undef TARGET_EXPAND_BUILTIN
168 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
170 #undef TARGET_SCHED_ADJUST_COST
171 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
173 struct gcc_target targetm
= TARGET_INITIALIZER
;
175 /* Obstack for minipool constant handling. */
176 static struct obstack minipool_obstack
;
177 static char * minipool_startobj
;
179 #define obstack_chunk_alloc xmalloc
180 #define obstack_chunk_free free
182 /* The maximum number of insns skipped which
183 will be conditionalised if possible. */
184 static int max_insns_skipped
= 5;
186 extern FILE * asm_out_file
;
188 /* True if we are currently building a constant table. */
189 int making_const_table
;
191 /* Define the information needed to generate branch insns. This is
192 stored from the compare operation. */
193 rtx arm_compare_op0
, arm_compare_op1
;
195 /* What type of floating point are we tuning for? */
196 enum floating_point_type arm_fpu
;
198 /* What type of floating point instructions are available? */
199 enum floating_point_type arm_fpu_arch
;
201 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
202 enum prog_mode_type arm_prgmode
;
204 /* Set by the -mfp=... option. */
205 const char * target_fp_name
= NULL
;
207 /* Used to parse -mstructure_size_boundary command line option. */
208 const char * structure_size_string
= NULL
;
209 int arm_structure_size_boundary
= DEFAULT_STRUCTURE_SIZE_BOUNDARY
;
211 /* Bit values used to identify processor capabilities. */
212 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
213 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
214 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
215 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
216 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
217 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
218 #define FL_THUMB (1 << 6) /* Thumb aware */
219 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
220 #define FL_STRONG (1 << 8) /* StrongARM */
221 #define FL_ARCH5E (1 << 9) /* DSP extenstions to v5 */
222 #define FL_XSCALE (1 << 10) /* XScale */
224 /* The bits in this mask specify which
225 instructions we are allowed to generate. */
226 static int insn_flags
= 0;
228 /* The bits in this mask specify which instruction scheduling options should
229 be used. Note - there is an overlap with the FL_FAST_MULT. For some
230 hardware we want to be able to generate the multiply instructions, but to
231 tune as if they were not present in the architecture. */
232 static int tune_flags
= 0;
234 /* The following are used in the arm.md file as equivalents to bits
235 in the above two flag variables. */
237 /* Nonzero if this is an "M" variant of the processor. */
238 int arm_fast_multiply
= 0;
240 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
243 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
246 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
249 /* Nonzero if this chip can benefit from load scheduling. */
250 int arm_ld_sched
= 0;
252 /* Nonzero if this chip is a StrongARM. */
253 int arm_is_strong
= 0;
255 /* Nonzero if this chip is an XScale. */
256 int arm_is_xscale
= 0;
258 /* Nonzero if this chip is an ARM6 or an ARM7. */
259 int arm_is_6_or_7
= 0;
261 /* Nonzero if generating Thumb instructions. */
264 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
265 must report the mode of the memory reference from PRINT_OPERAND to
266 PRINT_OPERAND_ADDRESS. */
267 enum machine_mode output_memory_reference_mode
;
269 /* The register number to be used for the PIC offset register. */
270 const char * arm_pic_register_string
= NULL
;
271 int arm_pic_register
= INVALID_REGNUM
;
273 /* Set to 1 when a return insn is output, this means that the epilogue
275 int return_used_this_function
;
277 /* Set to 1 after arm_reorg has started. Reset to start at the start of
278 the next function. */
279 static int after_arm_reorg
= 0;
281 /* The maximum number of insns to be used when loading a constant. */
282 static int arm_constant_limit
= 3;
284 /* For an explanation of these variables, see final_prescan_insn below. */
286 enum arm_cond_code arm_current_cc
;
288 int arm_target_label
;
290 /* The condition codes of the ARM, and the inverse function. */
291 static const char * const arm_condition_codes
[] =
293 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
294 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
297 #define streq(string1, string2) (strcmp (string1, string2) == 0)
299 /* Initialization code. */
303 const char *const name
;
304 const unsigned int flags
;
307 /* Not all of these give usefully different compilation alternatives,
308 but there is no simple way of generalizing them. */
309 static const struct processors all_cores
[] =
313 {"arm2", FL_CO_PROC
| FL_MODE26
},
314 {"arm250", FL_CO_PROC
| FL_MODE26
},
315 {"arm3", FL_CO_PROC
| FL_MODE26
},
316 {"arm6", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
317 {"arm60", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
318 {"arm600", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
319 {"arm610", FL_MODE26
| FL_MODE32
},
320 {"arm620", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
321 {"arm7", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
322 /* arm7m doesn't exist on its own, but only with D, (and I), but
323 those don't alter the code, so arm7m is sometimes used. */
324 {"arm7m", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
325 {"arm7d", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
326 {"arm7dm", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
327 {"arm7di", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
328 {"arm7dmi", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
329 {"arm70", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
330 {"arm700", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
331 {"arm700i", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
332 {"arm710", FL_MODE26
| FL_MODE32
},
333 {"arm710t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
334 {"arm720", FL_MODE26
| FL_MODE32
},
335 {"arm720t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
336 {"arm740t", FL_MODE26
| FL_MODE32
| FL_THUMB
},
337 {"arm710c", FL_MODE26
| FL_MODE32
},
338 {"arm7100", FL_MODE26
| FL_MODE32
},
339 {"arm7500", FL_MODE26
| FL_MODE32
},
340 /* Doesn't have an external co-proc, but does have embedded fpu. */
341 {"arm7500fe", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
342 {"arm7tdmi", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
},
343 {"arm8", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
344 {"arm810", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
345 {"arm9", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
346 {"arm920", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
347 {"arm920t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
348 {"arm940t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
349 {"arm9tdmi", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
},
350 {"arm9e", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
},
351 {"strongarm", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
352 {"strongarm110", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
353 {"strongarm1100", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
354 {"strongarm1110", FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_LDSCHED
| FL_STRONG
},
355 {"arm10tdmi", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_ARCH5
},
356 {"arm1020t", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_ARCH5
},
357 {"xscale", FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_LDSCHED
| FL_STRONG
| FL_ARCH5
| FL_ARCH5E
| FL_XSCALE
},
362 static const struct processors all_architectures
[] =
364 /* ARM Architectures */
366 { "armv2", FL_CO_PROC
| FL_MODE26
},
367 { "armv2a", FL_CO_PROC
| FL_MODE26
},
368 { "armv3", FL_CO_PROC
| FL_MODE26
| FL_MODE32
},
369 { "armv3m", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
},
370 { "armv4", FL_CO_PROC
| FL_MODE26
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
},
371 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
372 implementations that support it, so we will leave it out for now. */
373 { "armv4t", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
},
374 { "armv5", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
},
375 { "armv5t", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
},
376 { "armv5te", FL_CO_PROC
| FL_MODE32
| FL_FAST_MULT
| FL_ARCH4
| FL_THUMB
| FL_ARCH5
| FL_ARCH5E
},
380 /* This is a magic stucture. The 'string' field is magically filled in
381 with a pointer to the value specified by the user on the command line
382 assuming that the user has specified such a value. */
384 struct arm_cpu_select arm_select
[] =
386 /* string name processors */
387 { NULL
, "-mcpu=", all_cores
},
388 { NULL
, "-march=", all_architectures
},
389 { NULL
, "-mtune=", all_cores
}
392 /* Return the number of bits set in value' */
397 unsigned long count
= 0;
401 value
&= ~(value
& -value
);
408 /* Fix up any incompatible options that the user has specified.
409 This has now turned into a maze. */
411 arm_override_options ()
415 /* Set up the flags based on the cpu/architecture selected by the user. */
416 for (i
= ARRAY_SIZE (arm_select
); i
--;)
418 struct arm_cpu_select
* ptr
= arm_select
+ i
;
420 if (ptr
->string
!= NULL
&& ptr
->string
[0] != '\0')
422 const struct processors
* sel
;
424 for (sel
= ptr
->processors
; sel
->name
!= NULL
; sel
++)
425 if (streq (ptr
->string
, sel
->name
))
428 tune_flags
= sel
->flags
;
431 /* If we have been given an architecture and a processor
432 make sure that they are compatible. We only generate
433 a warning though, and we prefer the CPU over the
435 if (insn_flags
!= 0 && (insn_flags
^ sel
->flags
))
436 warning ("switch -mcpu=%s conflicts with -march= switch",
439 insn_flags
= sel
->flags
;
445 if (sel
->name
== NULL
)
446 error ("bad value (%s) for %s switch", ptr
->string
, ptr
->name
);
450 /* If the user did not specify a processor, choose one for them. */
453 const struct processors
* sel
;
455 static const struct cpu_default
458 const char *const name
;
462 { TARGET_CPU_arm2
, "arm2" },
463 { TARGET_CPU_arm6
, "arm6" },
464 { TARGET_CPU_arm610
, "arm610" },
465 { TARGET_CPU_arm710
, "arm710" },
466 { TARGET_CPU_arm7m
, "arm7m" },
467 { TARGET_CPU_arm7500fe
, "arm7500fe" },
468 { TARGET_CPU_arm7tdmi
, "arm7tdmi" },
469 { TARGET_CPU_arm8
, "arm8" },
470 { TARGET_CPU_arm810
, "arm810" },
471 { TARGET_CPU_arm9
, "arm9" },
472 { TARGET_CPU_strongarm
, "strongarm" },
473 { TARGET_CPU_xscale
, "xscale" },
474 { TARGET_CPU_generic
, "arm" },
477 const struct cpu_default
* def
;
479 /* Find the default. */
480 for (def
= cpu_defaults
; def
->name
; def
++)
481 if (def
->cpu
== TARGET_CPU_DEFAULT
)
484 /* Make sure we found the default CPU. */
485 if (def
->name
== NULL
)
488 /* Find the default CPU's flags. */
489 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
490 if (streq (def
->name
, sel
->name
))
493 if (sel
->name
== NULL
)
496 insn_flags
= sel
->flags
;
498 /* Now check to see if the user has specified some command line
499 switch that require certain abilities from the cpu. */
502 if (TARGET_INTERWORK
|| TARGET_THUMB
)
504 sought
|= (FL_THUMB
| FL_MODE32
);
506 /* Force apcs-32 to be used for interworking. */
507 target_flags
|= ARM_FLAG_APCS_32
;
509 /* There are no ARM processors that support both APCS-26 and
510 interworking. Therefore we force FL_MODE26 to be removed
511 from insn_flags here (if it was set), so that the search
512 below will always be able to find a compatible processor. */
513 insn_flags
&= ~FL_MODE26
;
515 else if (!TARGET_APCS_32
)
518 if (sought
!= 0 && ((sought
& insn_flags
) != sought
))
520 /* Try to locate a CPU type that supports all of the abilities
521 of the default CPU, plus the extra abilities requested by
523 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
524 if ((sel
->flags
& sought
) == (sought
| insn_flags
))
527 if (sel
->name
== NULL
)
529 unsigned int current_bit_count
= 0;
530 const struct processors
* best_fit
= NULL
;
532 /* Ideally we would like to issue an error message here
533 saying that it was not possible to find a CPU compatible
534 with the default CPU, but which also supports the command
535 line options specified by the programmer, and so they
536 ought to use the -mcpu=<name> command line option to
537 override the default CPU type.
539 Unfortunately this does not work with multilibing. We
540 need to be able to support multilibs for -mapcs-26 and for
541 -mthumb-interwork and there is no CPU that can support both
542 options. Instead if we cannot find a cpu that has both the
543 characteristics of the default cpu and the given command line
544 options we scan the array again looking for a best match. */
545 for (sel
= all_cores
; sel
->name
!= NULL
; sel
++)
546 if ((sel
->flags
& sought
) == sought
)
550 count
= bit_count (sel
->flags
& insn_flags
);
552 if (count
>= current_bit_count
)
555 current_bit_count
= count
;
559 if (best_fit
== NULL
)
565 insn_flags
= sel
->flags
;
569 /* If tuning has not been specified, tune for whichever processor or
570 architecture has been selected. */
572 tune_flags
= insn_flags
;
574 /* Make sure that the processor choice does not conflict with any of the
575 other command line choices. */
576 if (TARGET_APCS_32
&& !(insn_flags
& FL_MODE32
))
578 /* If APCS-32 was not the default then it must have been set by the
579 user, so issue a warning message. If the user has specified
580 "-mapcs-32 -mcpu=arm2" then we loose here. */
581 if ((TARGET_DEFAULT
& ARM_FLAG_APCS_32
) == 0)
582 warning ("target CPU does not support APCS-32" );
583 target_flags
&= ~ARM_FLAG_APCS_32
;
585 else if (!TARGET_APCS_32
&& !(insn_flags
& FL_MODE26
))
587 warning ("target CPU does not support APCS-26" );
588 target_flags
|= ARM_FLAG_APCS_32
;
591 if (TARGET_INTERWORK
&& !(insn_flags
& FL_THUMB
))
593 warning ("target CPU does not support interworking" );
594 target_flags
&= ~ARM_FLAG_INTERWORK
;
597 if (TARGET_THUMB
&& !(insn_flags
& FL_THUMB
))
599 warning ("target CPU does not support THUMB instructions");
600 target_flags
&= ~ARM_FLAG_THUMB
;
603 if (TARGET_APCS_FRAME
&& TARGET_THUMB
)
605 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
606 target_flags
&= ~ARM_FLAG_APCS_FRAME
;
609 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
610 from here where no function is being compiled currently. */
611 if ((target_flags
& (THUMB_FLAG_LEAF_BACKTRACE
| THUMB_FLAG_BACKTRACE
))
613 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
615 if (TARGET_ARM
&& TARGET_CALLEE_INTERWORKING
)
616 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
618 if (TARGET_ARM
&& TARGET_CALLER_INTERWORKING
)
619 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
621 /* If interworking is enabled then APCS-32 must be selected as well. */
622 if (TARGET_INTERWORK
)
625 warning ("interworking forces APCS-32 to be used" );
626 target_flags
|= ARM_FLAG_APCS_32
;
629 if (TARGET_APCS_STACK
&& !TARGET_APCS_FRAME
)
631 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
632 target_flags
|= ARM_FLAG_APCS_FRAME
;
635 if (TARGET_POKE_FUNCTION_NAME
)
636 target_flags
|= ARM_FLAG_APCS_FRAME
;
638 if (TARGET_APCS_REENT
&& flag_pic
)
639 error ("-fpic and -mapcs-reent are incompatible");
641 if (TARGET_APCS_REENT
)
642 warning ("APCS reentrant code not supported. Ignored");
644 /* If this target is normally configured to use APCS frames, warn if they
645 are turned off and debugging is turned on. */
647 && write_symbols
!= NO_DEBUG
648 && !TARGET_APCS_FRAME
649 && (TARGET_DEFAULT
& ARM_FLAG_APCS_FRAME
))
650 warning ("-g with -mno-apcs-frame may not give sensible debugging");
652 /* If stack checking is disabled, we can use r10 as the PIC register,
653 which keeps r9 available. */
655 arm_pic_register
= TARGET_APCS_STACK
? 9 : 10;
657 if (TARGET_APCS_FLOAT
)
658 warning ("passing floating point arguments in fp regs not yet supported");
660 /* Initialise boolean versions of the flags, for use in the arm.md file. */
661 arm_fast_multiply
= (insn_flags
& FL_FAST_MULT
) != 0;
662 arm_arch4
= (insn_flags
& FL_ARCH4
) != 0;
663 arm_arch5
= (insn_flags
& FL_ARCH5
) != 0;
664 arm_arch5e
= (insn_flags
& FL_ARCH5E
) != 0;
665 arm_is_xscale
= (insn_flags
& FL_XSCALE
) != 0;
667 arm_ld_sched
= (tune_flags
& FL_LDSCHED
) != 0;
668 arm_is_strong
= (tune_flags
& FL_STRONG
) != 0;
669 thumb_code
= (TARGET_ARM
== 0);
670 arm_is_6_or_7
= (((tune_flags
& (FL_MODE26
| FL_MODE32
))
671 && !(tune_flags
& FL_ARCH4
))) != 0;
673 /* Default value for floating point code... if no co-processor
674 bus, then schedule for emulated floating point. Otherwise,
675 assume the user has an FPA.
676 Note: this does not prevent use of floating point instructions,
677 -msoft-float does that. */
678 arm_fpu
= (tune_flags
& FL_CO_PROC
) ? FP_HARD
: FP_SOFT3
;
682 if (streq (target_fp_name
, "2"))
683 arm_fpu_arch
= FP_SOFT2
;
684 else if (streq (target_fp_name
, "3"))
685 arm_fpu_arch
= FP_SOFT3
;
687 error ("invalid floating point emulation option: -mfpe-%s",
691 arm_fpu_arch
= FP_DEFAULT
;
693 if (TARGET_FPE
&& arm_fpu
!= FP_HARD
)
696 /* For arm2/3 there is no need to do any scheduling if there is only
697 a floating point emulator, or we are doing software floating-point. */
698 if ((TARGET_SOFT_FLOAT
|| arm_fpu
!= FP_HARD
)
699 && (tune_flags
& FL_MODE32
) == 0)
700 flag_schedule_insns
= flag_schedule_insns_after_reload
= 0;
702 arm_prgmode
= TARGET_APCS_32
? PROG_MODE_PROG32
: PROG_MODE_PROG26
;
704 if (structure_size_string
!= NULL
)
706 int size
= strtol (structure_size_string
, NULL
, 0);
708 if (size
== 8 || size
== 32)
709 arm_structure_size_boundary
= size
;
711 warning ("structure size boundary can only be set to 8 or 32");
714 if (arm_pic_register_string
!= NULL
)
716 int pic_register
= decode_reg_name (arm_pic_register_string
);
719 warning ("-mpic-register= is useless without -fpic");
721 /* Prevent the user from choosing an obviously stupid PIC register. */
722 else if (pic_register
< 0 || call_used_regs
[pic_register
]
723 || pic_register
== HARD_FRAME_POINTER_REGNUM
724 || pic_register
== STACK_POINTER_REGNUM
725 || pic_register
>= PC_REGNUM
)
726 error ("unable to use '%s' for PIC register", arm_pic_register_string
);
728 arm_pic_register
= pic_register
;
731 if (TARGET_THUMB
&& flag_schedule_insns
)
733 /* Don't warn since it's on by default in -O2. */
734 flag_schedule_insns
= 0;
737 /* If optimizing for space, don't synthesize constants.
738 For processors with load scheduling, it never costs more than 2 cycles
739 to load a constant, and the load scheduler may well reduce that to 1. */
740 if (optimize_size
|| (tune_flags
& FL_LDSCHED
))
741 arm_constant_limit
= 1;
744 arm_constant_limit
= 2;
746 /* If optimizing for size, bump the number of instructions that we
747 are prepared to conditionally execute (even on a StrongARM).
748 Otherwise for the StrongARM, which has early execution of branches,
749 a sequence that is worth skipping is shorter. */
751 max_insns_skipped
= 6;
752 else if (arm_is_strong
)
753 max_insns_skipped
= 3;
755 /* Register global variables with the garbage collector. */
762 ggc_add_rtx_root (&arm_compare_op0
, 1);
763 ggc_add_rtx_root (&arm_compare_op1
, 1);
764 ggc_add_rtx_root (&arm_target_insn
, 1); /* Not sure this is really a root. */
766 gcc_obstack_init(&minipool_obstack
);
767 minipool_startobj
= (char *) obstack_alloc (&minipool_obstack
, 0);
770 /* A table of known ARM exception types.
771 For use with the interrupt function attribute. */
775 const char *const arg
;
776 const unsigned long return_value
;
780 static const isr_attribute_arg isr_attribute_args
[] =
782 { "IRQ", ARM_FT_ISR
},
783 { "irq", ARM_FT_ISR
},
784 { "FIQ", ARM_FT_FIQ
},
785 { "fiq", ARM_FT_FIQ
},
786 { "ABORT", ARM_FT_ISR
},
787 { "abort", ARM_FT_ISR
},
788 { "ABORT", ARM_FT_ISR
},
789 { "abort", ARM_FT_ISR
},
790 { "UNDEF", ARM_FT_EXCEPTION
},
791 { "undef", ARM_FT_EXCEPTION
},
792 { "SWI", ARM_FT_EXCEPTION
},
793 { "swi", ARM_FT_EXCEPTION
},
794 { NULL
, ARM_FT_NORMAL
}
797 /* Returns the (interrupt) function type of the current
798 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
801 arm_isr_value (argument
)
804 const isr_attribute_arg
* ptr
;
807 /* No argument - default to IRQ. */
808 if (argument
== NULL_TREE
)
811 /* Get the value of the argument. */
812 if (TREE_VALUE (argument
) == NULL_TREE
813 || TREE_CODE (TREE_VALUE (argument
)) != STRING_CST
)
814 return ARM_FT_UNKNOWN
;
816 arg
= TREE_STRING_POINTER (TREE_VALUE (argument
));
818 /* Check it against the list of known arguments. */
819 for (ptr
= isr_attribute_args
; ptr
->arg
!= NULL
; ptr
++)
820 if (streq (arg
, ptr
->arg
))
821 return ptr
->return_value
;
823 /* An unrecognised interrupt type. */
824 return ARM_FT_UNKNOWN
;
827 /* Computes the type of the current function. */
830 arm_compute_func_type ()
832 unsigned long type
= ARM_FT_UNKNOWN
;
836 if (TREE_CODE (current_function_decl
) != FUNCTION_DECL
)
839 /* Decide if the current function is volatile. Such functions
840 never return, and many memory cycles can be saved by not storing
841 register values that will never be needed again. This optimization
842 was added to speed up context switching in a kernel application. */
844 && current_function_nothrow
845 && TREE_THIS_VOLATILE (current_function_decl
))
846 type
|= ARM_FT_VOLATILE
;
848 if (current_function_needs_context
)
849 type
|= ARM_FT_NESTED
;
851 attr
= DECL_ATTRIBUTES (current_function_decl
);
853 a
= lookup_attribute ("naked", attr
);
855 type
|= ARM_FT_NAKED
;
857 if (cfun
->machine
->eh_epilogue_sp_ofs
!= NULL_RTX
)
858 type
|= ARM_FT_EXCEPTION_HANDLER
;
861 a
= lookup_attribute ("isr", attr
);
863 a
= lookup_attribute ("interrupt", attr
);
866 type
|= TARGET_INTERWORK
? ARM_FT_INTERWORKED
: ARM_FT_NORMAL
;
868 type
|= arm_isr_value (TREE_VALUE (a
));
874 /* Returns the type of the current function. */
877 arm_current_func_type ()
879 if (ARM_FUNC_TYPE (cfun
->machine
->func_type
) == ARM_FT_UNKNOWN
)
880 cfun
->machine
->func_type
= arm_compute_func_type ();
882 return cfun
->machine
->func_type
;
885 /* Return 1 if it is possible to return using a single instruction. */
888 use_return_insn (iscond
)
892 unsigned int func_type
;
894 /* Never use a return instruction before reload has run. */
895 if (!reload_completed
)
898 func_type
= arm_current_func_type ();
900 /* Naked functions and volatile functions need special
902 if (func_type
& (ARM_FT_VOLATILE
| ARM_FT_NAKED
))
905 /* As do variadic functions. */
906 if (current_function_pretend_args_size
907 || cfun
->machine
->uses_anonymous_args
908 /* Of if the function calls __builtin_eh_return () */
909 || ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
910 /* Or if there is no frame pointer and there is a stack adjustment. */
911 || ((get_frame_size () + current_function_outgoing_args_size
!= 0)
912 && !frame_pointer_needed
))
915 /* Can't be done if interworking with Thumb, and any registers have been
916 stacked. Similarly, on StrongARM, conditional returns are expensive
917 if they aren't taken and registers have been stacked. */
918 if (iscond
&& arm_is_strong
&& frame_pointer_needed
)
921 if ((iscond
&& arm_is_strong
)
924 for (regno
= 0; regno
<= LAST_ARM_REGNUM
; regno
++)
925 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
928 if (flag_pic
&& regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
932 /* Can't be done if any of the FPU regs are pushed,
933 since this also requires an insn. */
934 if (TARGET_HARD_FLOAT
)
935 for (regno
= FIRST_ARM_FP_REGNUM
; regno
<= LAST_ARM_FP_REGNUM
; regno
++)
936 if (regs_ever_live
[regno
] && !call_used_regs
[regno
])
942 /* Return TRUE if int I is a valid immediate ARM constant. */
948 unsigned HOST_WIDE_INT mask
= ~(unsigned HOST_WIDE_INT
)0xFF;
950 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
951 be all zero, or all one. */
952 if ((i
& ~(unsigned HOST_WIDE_INT
) 0xffffffff) != 0
953 && ((i
& ~(unsigned HOST_WIDE_INT
) 0xffffffff)
954 != ((~(unsigned HOST_WIDE_INT
) 0)
955 & ~(unsigned HOST_WIDE_INT
) 0xffffffff)))
958 /* Fast return for 0 and powers of 2 */
959 if ((i
& (i
- 1)) == 0)
964 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
967 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
968 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT
) 0xffffffff;
970 while (mask
!= ~(unsigned HOST_WIDE_INT
) 0xFF);
975 /* Return true if I is a valid constant for the operation CODE. */
977 const_ok_for_op (i
, code
)
981 if (const_ok_for_arm (i
))
987 return const_ok_for_arm (ARM_SIGN_EXTEND (-i
));
989 case MINUS
: /* Should only occur with (MINUS I reg) => rsb */
995 return const_ok_for_arm (ARM_SIGN_EXTEND (~i
));
1002 /* Emit a sequence of insns to handle a large constant.
1003 CODE is the code of the operation required, it can be any of SET, PLUS,
1004 IOR, AND, XOR, MINUS;
1005 MODE is the mode in which the operation is being performed;
1006 VAL is the integer to operate on;
1007 SOURCE is the other operand (a register, or a null-pointer for SET);
1008 SUBTARGETS means it is safe to create scratch registers if that will
1009 either produce a simpler sequence, or we will want to cse the values.
1010 Return value is the number of insns emitted. */
1013 arm_split_constant (code
, mode
, val
, target
, source
, subtargets
)
1015 enum machine_mode mode
;
1021 if (subtargets
|| code
== SET
1022 || (GET_CODE (target
) == REG
&& GET_CODE (source
) == REG
1023 && REGNO (target
) != REGNO (source
)))
1025 /* After arm_reorg has been called, we can't fix up expensive
1026 constants by pushing them into memory so we must synthesise
1027 them in-line, regardless of the cost. This is only likely to
1028 be more costly on chips that have load delay slots and we are
1029 compiling without running the scheduler (so no splitting
1030 occurred before the final instruction emission).
1032 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1034 if (!after_arm_reorg
1035 && (arm_gen_constant (code
, mode
, val
, target
, source
, 1, 0)
1036 > arm_constant_limit
+ (code
!= SET
)))
1040 /* Currently SET is the only monadic value for CODE, all
1041 the rest are diadic. */
1042 emit_insn (gen_rtx_SET (VOIDmode
, target
, GEN_INT (val
)));
1047 rtx temp
= subtargets
? gen_reg_rtx (mode
) : target
;
1049 emit_insn (gen_rtx_SET (VOIDmode
, temp
, GEN_INT (val
)));
1050 /* For MINUS, the value is subtracted from, since we never
1051 have subtraction of a constant. */
1053 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1054 gen_rtx_MINUS (mode
, temp
, source
)));
1056 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1057 gen_rtx (code
, mode
, source
, temp
)));
1063 return arm_gen_constant (code
, mode
, val
, target
, source
, subtargets
, 1);
1067 count_insns_for_constant (HOST_WIDE_INT remainder
, int i
)
1069 HOST_WIDE_INT temp1
;
1077 if (remainder
& (3 << (i
- 2)))
1082 temp1
= remainder
& ((0x0ff << end
)
1083 | ((i
< end
) ? (0xff >> (32 - end
)) : 0));
1084 remainder
&= ~temp1
;
1089 } while (remainder
);
1093 /* As above, but extra parameter GENERATE which, if clear, suppresses
1097 arm_gen_constant (code
, mode
, val
, target
, source
, subtargets
, generate
)
1099 enum machine_mode mode
;
1108 int can_negate_initial
= 0;
1111 int num_bits_set
= 0;
1112 int set_sign_bit_copies
= 0;
1113 int clear_sign_bit_copies
= 0;
1114 int clear_zero_bit_copies
= 0;
1115 int set_zero_bit_copies
= 0;
1117 unsigned HOST_WIDE_INT temp1
, temp2
;
1118 unsigned HOST_WIDE_INT remainder
= val
& 0xffffffff;
1120 /* Find out which operations are safe for a given CODE. Also do a quick
1121 check for degenerate cases; these can occur when DImode operations
1133 can_negate_initial
= 1;
1137 if (remainder
== 0xffffffff)
1140 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1141 GEN_INT (ARM_SIGN_EXTEND (val
))));
1146 if (reload_completed
&& rtx_equal_p (target
, source
))
1149 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1158 emit_insn (gen_rtx_SET (VOIDmode
, target
, const0_rtx
));
1161 if (remainder
== 0xffffffff)
1163 if (reload_completed
&& rtx_equal_p (target
, source
))
1166 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1175 if (reload_completed
&& rtx_equal_p (target
, source
))
1178 emit_insn (gen_rtx_SET (VOIDmode
, target
, source
));
1181 if (remainder
== 0xffffffff)
1184 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1185 gen_rtx_NOT (mode
, source
)));
1189 /* We don't know how to handle this yet below. */
1193 /* We treat MINUS as (val - source), since (source - val) is always
1194 passed as (source + (-val)). */
1198 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1199 gen_rtx_NEG (mode
, source
)));
1202 if (const_ok_for_arm (val
))
1205 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1206 gen_rtx_MINUS (mode
, GEN_INT (val
),
1218 /* If we can do it in one insn get out quickly. */
1219 if (const_ok_for_arm (val
)
1220 || (can_negate_initial
&& const_ok_for_arm (-val
))
1221 || (can_invert
&& const_ok_for_arm (~val
)))
1224 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1225 (source
? gen_rtx (code
, mode
, source
,
1231 /* Calculate a few attributes that may be useful for specific
1233 for (i
= 31; i
>= 0; i
--)
1235 if ((remainder
& (1 << i
)) == 0)
1236 clear_sign_bit_copies
++;
1241 for (i
= 31; i
>= 0; i
--)
1243 if ((remainder
& (1 << i
)) != 0)
1244 set_sign_bit_copies
++;
1249 for (i
= 0; i
<= 31; i
++)
1251 if ((remainder
& (1 << i
)) == 0)
1252 clear_zero_bit_copies
++;
1257 for (i
= 0; i
<= 31; i
++)
1259 if ((remainder
& (1 << i
)) != 0)
1260 set_zero_bit_copies
++;
1268 /* See if we can do this by sign_extending a constant that is known
1269 to be negative. This is a good, way of doing it, since the shift
1270 may well merge into a subsequent insn. */
1271 if (set_sign_bit_copies
> 1)
1273 if (const_ok_for_arm
1274 (temp1
= ARM_SIGN_EXTEND (remainder
1275 << (set_sign_bit_copies
- 1))))
1279 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1280 emit_insn (gen_rtx_SET (VOIDmode
, new_src
,
1282 emit_insn (gen_ashrsi3 (target
, new_src
,
1283 GEN_INT (set_sign_bit_copies
- 1)));
1287 /* For an inverted constant, we will need to set the low bits,
1288 these will be shifted out of harm's way. */
1289 temp1
|= (1 << (set_sign_bit_copies
- 1)) - 1;
1290 if (const_ok_for_arm (~temp1
))
1294 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1295 emit_insn (gen_rtx_SET (VOIDmode
, new_src
,
1297 emit_insn (gen_ashrsi3 (target
, new_src
,
1298 GEN_INT (set_sign_bit_copies
- 1)));
1304 /* See if we can generate this by setting the bottom (or the top)
1305 16 bits, and then shifting these into the other half of the
1306 word. We only look for the simplest cases, to do more would cost
1307 too much. Be careful, however, not to generate this when the
1308 alternative would take fewer insns. */
1309 if (val
& 0xffff0000)
1311 temp1
= remainder
& 0xffff0000;
1312 temp2
= remainder
& 0x0000ffff;
1314 /* Overlaps outside this range are best done using other methods. */
1315 for (i
= 9; i
< 24; i
++)
1317 if ((((temp2
| (temp2
<< i
)) & 0xffffffff) == remainder
)
1318 && !const_ok_for_arm (temp2
))
1320 rtx new_src
= (subtargets
1321 ? (generate
? gen_reg_rtx (mode
) : NULL_RTX
)
1323 insns
= arm_gen_constant (code
, mode
, temp2
, new_src
,
1324 source
, subtargets
, generate
);
1327 emit_insn (gen_rtx_SET
1330 gen_rtx_ASHIFT (mode
, source
,
1337 /* Don't duplicate cases already considered. */
1338 for (i
= 17; i
< 24; i
++)
1340 if (((temp1
| (temp1
>> i
)) == remainder
)
1341 && !const_ok_for_arm (temp1
))
1343 rtx new_src
= (subtargets
1344 ? (generate
? gen_reg_rtx (mode
) : NULL_RTX
)
1346 insns
= arm_gen_constant (code
, mode
, temp1
, new_src
,
1347 source
, subtargets
, generate
);
1351 (gen_rtx_SET (VOIDmode
, target
,
1354 gen_rtx_LSHIFTRT (mode
, source
,
1365 /* If we have IOR or XOR, and the constant can be loaded in a
1366 single instruction, and we can find a temporary to put it in,
1367 then this can be done in two instructions instead of 3-4. */
1369 /* TARGET can't be NULL if SUBTARGETS is 0 */
1370 || (reload_completed
&& !reg_mentioned_p (target
, source
)))
1372 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val
)))
1376 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1378 emit_insn (gen_rtx_SET (VOIDmode
, sub
, GEN_INT (val
)));
1379 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1380 gen_rtx (code
, mode
, source
, sub
)));
1389 if (set_sign_bit_copies
> 8
1390 && (val
& (-1 << (32 - set_sign_bit_copies
))) == val
)
1394 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1395 rtx shift
= GEN_INT (set_sign_bit_copies
);
1397 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1399 gen_rtx_ASHIFT (mode
,
1402 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1404 gen_rtx_LSHIFTRT (mode
, sub
,
1410 if (set_zero_bit_copies
> 8
1411 && (remainder
& ((1 << set_zero_bit_copies
) - 1)) == remainder
)
1415 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1416 rtx shift
= GEN_INT (set_zero_bit_copies
);
1418 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1420 gen_rtx_LSHIFTRT (mode
,
1423 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1425 gen_rtx_ASHIFT (mode
, sub
,
1431 if (const_ok_for_arm (temp1
= ARM_SIGN_EXTEND (~val
)))
1435 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
1436 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1437 gen_rtx_NOT (mode
, source
)));
1440 sub
= gen_reg_rtx (mode
);
1441 emit_insn (gen_rtx_SET (VOIDmode
, sub
,
1442 gen_rtx_AND (mode
, source
,
1444 emit_insn (gen_rtx_SET (VOIDmode
, target
,
1445 gen_rtx_NOT (mode
, sub
)));
1452 /* See if two shifts will do 2 or more insn's worth of work. */
1453 if (clear_sign_bit_copies
>= 16 && clear_sign_bit_copies
< 24)
1455 HOST_WIDE_INT shift_mask
= ((0xffffffff
1456 << (32 - clear_sign_bit_copies
))
1459 if ((remainder
| shift_mask
) != 0xffffffff)
1463 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1464 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1465 new_src
, source
, subtargets
, 1);
1470 rtx targ
= subtargets
? NULL_RTX
: target
;
1471 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1472 targ
, source
, subtargets
, 0);
1478 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1479 rtx shift
= GEN_INT (clear_sign_bit_copies
);
1481 emit_insn (gen_ashlsi3 (new_src
, source
, shift
));
1482 emit_insn (gen_lshrsi3 (target
, new_src
, shift
));
1488 if (clear_zero_bit_copies
>= 16 && clear_zero_bit_copies
< 24)
1490 HOST_WIDE_INT shift_mask
= (1 << clear_zero_bit_copies
) - 1;
1492 if ((remainder
| shift_mask
) != 0xffffffff)
1496 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1498 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1499 new_src
, source
, subtargets
, 1);
1504 rtx targ
= subtargets
? NULL_RTX
: target
;
1506 insns
= arm_gen_constant (AND
, mode
, remainder
| shift_mask
,
1507 targ
, source
, subtargets
, 0);
1513 rtx new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
1514 rtx shift
= GEN_INT (clear_zero_bit_copies
);
1516 emit_insn (gen_lshrsi3 (new_src
, source
, shift
));
1517 emit_insn (gen_ashlsi3 (target
, new_src
, shift
));
1529 for (i
= 0; i
< 32; i
++)
1530 if (remainder
& (1 << i
))
1533 if (code
== AND
|| (can_invert
&& num_bits_set
> 16))
1534 remainder
= (~remainder
) & 0xffffffff;
1535 else if (code
== PLUS
&& num_bits_set
> 16)
1536 remainder
= (-remainder
) & 0xffffffff;
1543 /* Now try and find a way of doing the job in either two or three
1545 We start by looking for the largest block of zeros that are aligned on
1546 a 2-bit boundary, we then fill up the temps, wrapping around to the
1547 top of the word when we drop off the bottom.
1548 In the worst case this code should produce no more than four insns. */
1551 int best_consecutive_zeros
= 0;
1553 for (i
= 0; i
< 32; i
+= 2)
1555 int consecutive_zeros
= 0;
1557 if (!(remainder
& (3 << i
)))
1559 while ((i
< 32) && !(remainder
& (3 << i
)))
1561 consecutive_zeros
+= 2;
1564 if (consecutive_zeros
> best_consecutive_zeros
)
1566 best_consecutive_zeros
= consecutive_zeros
;
1567 best_start
= i
- consecutive_zeros
;
1573 /* So long as it won't require any more insns to do so, it's
1574 desirable to emit a small constant (in bits 0...9) in the last
1575 insn. This way there is more chance that it can be combined with
1576 a later addressing insn to form a pre-indexed load or store
1577 operation. Consider:
1579 *((volatile int *)0xe0000100) = 1;
1580 *((volatile int *)0xe0000110) = 2;
1582 We want this to wind up as:
1586 str rB, [rA, #0x100]
1588 str rB, [rA, #0x110]
1590 rather than having to synthesize both large constants from scratch.
1592 Therefore, we calculate how many insns would be required to emit
1593 the constant starting from `best_start', and also starting from
1594 zero (ie with bit 31 first to be output). If `best_start' doesn't
1595 yield a shorter sequence, we may as well use zero. */
1597 && ((((unsigned HOST_WIDE_INT
) 1) << best_start
) < remainder
)
1598 && (count_insns_for_constant (remainder
, 0) <=
1599 count_insns_for_constant (remainder
, best_start
)))
1602 /* Now start emitting the insns. */
1610 if (remainder
& (3 << (i
- 2)))
1615 temp1
= remainder
& ((0x0ff << end
)
1616 | ((i
< end
) ? (0xff >> (32 - end
)) : 0));
1617 remainder
&= ~temp1
;
1621 rtx new_src
, temp1_rtx
;
1623 if (code
== SET
|| code
== MINUS
)
1625 new_src
= (subtargets
? gen_reg_rtx (mode
) : target
);
1626 if (can_invert
&& code
!= MINUS
)
1631 if (remainder
&& subtargets
)
1632 new_src
= gen_reg_rtx (mode
);
1637 else if (can_negate
)
1641 temp1
= trunc_int_for_mode (temp1
, mode
);
1642 temp1_rtx
= GEN_INT (temp1
);
1646 else if (code
== MINUS
)
1647 temp1_rtx
= gen_rtx_MINUS (mode
, temp1_rtx
, source
);
1649 temp1_rtx
= gen_rtx_fmt_ee (code
, mode
, source
, temp1_rtx
);
1651 emit_insn (gen_rtx_SET (VOIDmode
, new_src
, temp1_rtx
));
1660 else if (code
== MINUS
)
1674 /* Canonicalize a comparison so that we are more likely to recognize it.
1675 This can be done for a few constant compares, where we can make the
1676 immediate value easier to load. */
1679 arm_canonicalize_comparison (code
, op1
)
1683 unsigned HOST_WIDE_INT i
= INTVAL (*op1
);
1693 if (i
!= ((((unsigned HOST_WIDE_INT
) 1) << (HOST_BITS_PER_WIDE_INT
- 1)) - 1)
1694 && (const_ok_for_arm (i
+ 1) || const_ok_for_arm (-(i
+ 1))))
1696 *op1
= GEN_INT (i
+ 1);
1697 return code
== GT
? GE
: LT
;
1703 if (i
!= (((unsigned HOST_WIDE_INT
) 1) << (HOST_BITS_PER_WIDE_INT
- 1))
1704 && (const_ok_for_arm (i
- 1) || const_ok_for_arm (-(i
- 1))))
1706 *op1
= GEN_INT (i
- 1);
1707 return code
== GE
? GT
: LE
;
1713 if (i
!= ~((unsigned HOST_WIDE_INT
) 0)
1714 && (const_ok_for_arm (i
+ 1) || const_ok_for_arm (-(i
+ 1))))
1716 *op1
= GEN_INT (i
+ 1);
1717 return code
== GTU
? GEU
: LTU
;
1724 && (const_ok_for_arm (i
- 1) || const_ok_for_arm (-(i
- 1))))
1726 *op1
= GEN_INT (i
- 1);
1727 return code
== GEU
? GTU
: LEU
;
1738 /* Decide whether a type should be returned in memory (true)
1739 or in a register (false). This is called by the macro
1740 RETURN_IN_MEMORY. */
1743 arm_return_in_memory (type
)
1746 if (!AGGREGATE_TYPE_P (type
))
1747 /* All simple types are returned in registers. */
1750 /* For the arm-wince targets we choose to be compitable with Microsoft's
1751 ARM and Thumb compilers, which always return aggregates in memory. */
1753 /* All structures/unions bigger than one word are returned in memory.
1754 Also catch the case where int_size_in_bytes returns -1. In this case
1755 the aggregate is either huge or of varaible size, and in either case
1756 we will want to return it via memory and not in a register. */
1757 if (((unsigned int) int_size_in_bytes (type
)) > UNITS_PER_WORD
)
1760 if (TREE_CODE (type
) == RECORD_TYPE
)
1764 /* For a struct the APCS says that we only return in a register
1765 if the type is 'integer like' and every addressable element
1766 has an offset of zero. For practical purposes this means
1767 that the structure can have at most one non bit-field element
1768 and that this element must be the first one in the structure. */
1770 /* Find the first field, ignoring non FIELD_DECL things which will
1771 have been created by C++. */
1772 for (field
= TYPE_FIELDS (type
);
1773 field
&& TREE_CODE (field
) != FIELD_DECL
;
1774 field
= TREE_CHAIN (field
))
1778 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1780 /* Check that the first field is valid for returning in a register. */
1782 /* ... Floats are not allowed */
1783 if (FLOAT_TYPE_P (TREE_TYPE (field
)))
1786 /* ... Aggregates that are not themselves valid for returning in
1787 a register are not allowed. */
1788 if (RETURN_IN_MEMORY (TREE_TYPE (field
)))
1791 /* Now check the remaining fields, if any. Only bitfields are allowed,
1792 since they are not addressable. */
1793 for (field
= TREE_CHAIN (field
);
1795 field
= TREE_CHAIN (field
))
1797 if (TREE_CODE (field
) != FIELD_DECL
)
1800 if (!DECL_BIT_FIELD_TYPE (field
))
1807 if (TREE_CODE (type
) == UNION_TYPE
)
1811 /* Unions can be returned in registers if every element is
1812 integral, or can be returned in an integer register. */
1813 for (field
= TYPE_FIELDS (type
);
1815 field
= TREE_CHAIN (field
))
1817 if (TREE_CODE (field
) != FIELD_DECL
)
1820 if (FLOAT_TYPE_P (TREE_TYPE (field
)))
1823 if (RETURN_IN_MEMORY (TREE_TYPE (field
)))
1829 #endif /* not ARM_WINCE */
1831 /* Return all other types in memory. */
1835 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1836 for a call to a function whose data type is FNTYPE.
1837 For a library call, FNTYPE is NULL. */
1839 arm_init_cumulative_args (pcum
, fntype
, libname
, indirect
)
1840 CUMULATIVE_ARGS
* pcum
;
1842 rtx libname ATTRIBUTE_UNUSED
;
1843 int indirect ATTRIBUTE_UNUSED
;
1845 /* On the ARM, the offset starts at 0. */
1846 pcum
->nregs
= ((fntype
&& aggregate_value_p (TREE_TYPE (fntype
))) ? 1 : 0);
1848 pcum
->call_cookie
= CALL_NORMAL
;
1850 if (TARGET_LONG_CALLS
)
1851 pcum
->call_cookie
= CALL_LONG
;
1853 /* Check for long call/short call attributes. The attributes
1854 override any command line option. */
1857 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype
)))
1858 pcum
->call_cookie
= CALL_SHORT
;
1859 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype
)))
1860 pcum
->call_cookie
= CALL_LONG
;
1864 /* Determine where to put an argument to a function.
1865 Value is zero to push the argument on the stack,
1866 or a hard register in which to store the argument.
1868 MODE is the argument's machine mode.
1869 TYPE is the data type of the argument (as a tree).
1870 This is null for libcalls where that information may
1872 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1873 the preceding args and about the function being called.
1874 NAMED is nonzero if this argument is a named parameter
1875 (otherwise it is an extra parameter matching an ellipsis). */
1878 arm_function_arg (pcum
, mode
, type
, named
)
1879 CUMULATIVE_ARGS
* pcum
;
1880 enum machine_mode mode
;
1881 tree type ATTRIBUTE_UNUSED
;
1884 if (mode
== VOIDmode
)
1885 /* Compute operand 2 of the call insn. */
1886 return GEN_INT (pcum
->call_cookie
);
1888 if (!named
|| pcum
->nregs
>= NUM_ARG_REGS
)
1891 return gen_rtx_REG (mode
, pcum
->nregs
);
1894 /* Encode the current state of the #pragma [no_]long_calls. */
1897 OFF
, /* No #pramgma [no_]long_calls is in effect. */
1898 LONG
, /* #pragma long_calls is in effect. */
1899 SHORT
/* #pragma no_long_calls is in effect. */
1902 static arm_pragma_enum arm_pragma_long_calls
= OFF
;
1905 arm_pr_long_calls (pfile
)
1906 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1908 arm_pragma_long_calls
= LONG
;
1912 arm_pr_no_long_calls (pfile
)
1913 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1915 arm_pragma_long_calls
= SHORT
;
1919 arm_pr_long_calls_off (pfile
)
1920 cpp_reader
* pfile ATTRIBUTE_UNUSED
;
1922 arm_pragma_long_calls
= OFF
;
1925 /* Table of machine attributes. */
1926 const struct attribute_spec arm_attribute_table
[] =
1928 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
1929 /* Function calls made to this symbol must be done indirectly, because
1930 it may lie outside of the 26 bit addressing range of a normal function
1932 { "long_call", 0, 0, false, true, true, NULL
},
1933 /* Whereas these functions are always known to reside within the 26 bit
1934 addressing range. */
1935 { "short_call", 0, 0, false, true, true, NULL
},
1936 /* Interrupt Service Routines have special prologue and epilogue requirements. */
1937 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute
},
1938 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute
},
1939 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute
},
1941 /* ARM/PE has three new attributes:
1943 dllexport - for exporting a function/variable that will live in a dll
1944 dllimport - for importing a function/variable from a dll
1946 Microsoft allows multiple declspecs in one __declspec, separating
1947 them with spaces. We do NOT support this. Instead, use __declspec
1950 { "dllimport", 0, 0, true, false, false, NULL
},
1951 { "dllexport", 0, 0, true, false, false, NULL
},
1952 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute
},
1954 { NULL
, 0, 0, false, false, false, NULL
}
1957 /* Handle an attribute requiring a FUNCTION_DECL;
1958 arguments as in struct attribute_spec.handler. */
1961 arm_handle_fndecl_attribute (node
, name
, args
, flags
, no_add_attrs
)
1964 tree args ATTRIBUTE_UNUSED
;
1965 int flags ATTRIBUTE_UNUSED
;
1966 bool * no_add_attrs
;
1968 if (TREE_CODE (*node
) != FUNCTION_DECL
)
1970 warning ("`%s' attribute only applies to functions",
1971 IDENTIFIER_POINTER (name
));
1972 *no_add_attrs
= true;
1978 /* Handle an "interrupt" or "isr" attribute;
1979 arguments as in struct attribute_spec.handler. */
1982 arm_handle_isr_attribute (node
, name
, args
, flags
, no_add_attrs
)
1987 bool * no_add_attrs
;
1991 if (TREE_CODE (*node
) != FUNCTION_DECL
)
1993 warning ("`%s' attribute only applies to functions",
1994 IDENTIFIER_POINTER (name
));
1995 *no_add_attrs
= true;
1997 /* FIXME: the argument if any is checked for type attributes;
1998 should it be checked for decl ones? */
2002 if (TREE_CODE (*node
) == FUNCTION_TYPE
2003 || TREE_CODE (*node
) == METHOD_TYPE
)
2005 if (arm_isr_value (args
) == ARM_FT_UNKNOWN
)
2007 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
2008 *no_add_attrs
= true;
2011 else if (TREE_CODE (*node
) == POINTER_TYPE
2012 && (TREE_CODE (TREE_TYPE (*node
)) == FUNCTION_TYPE
2013 || TREE_CODE (TREE_TYPE (*node
)) == METHOD_TYPE
)
2014 && arm_isr_value (args
) != ARM_FT_UNKNOWN
)
2016 *node
= build_type_copy (*node
);
2017 TREE_TYPE (*node
) = build_type_attribute_variant
2019 tree_cons (name
, args
, TYPE_ATTRIBUTES (TREE_TYPE (*node
))));
2020 *no_add_attrs
= true;
2024 /* Possibly pass this attribute on from the type to a decl. */
2025 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
2026 | (int) ATTR_FLAG_FUNCTION_NEXT
2027 | (int) ATTR_FLAG_ARRAY_NEXT
))
2029 *no_add_attrs
= true;
2030 return tree_cons (name
, args
, NULL_TREE
);
2034 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name
));
2042 /* Return 0 if the attributes for two types are incompatible, 1 if they
2043 are compatible, and 2 if they are nearly compatible (which causes a
2044 warning to be generated). */
2047 arm_comp_type_attributes (type1
, type2
)
2053 /* Check for mismatch of non-default calling convention. */
2054 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
2057 /* Check for mismatched call attributes. */
2058 l1
= lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1
)) != NULL
;
2059 l2
= lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2
)) != NULL
;
2060 s1
= lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1
)) != NULL
;
2061 s2
= lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2
)) != NULL
;
2063 /* Only bother to check if an attribute is defined. */
2064 if (l1
| l2
| s1
| s2
)
2066 /* If one type has an attribute, the other must have the same attribute. */
2067 if ((l1
!= l2
) || (s1
!= s2
))
2070 /* Disallow mixed attributes. */
2071 if ((l1
& s2
) || (l2
& s1
))
2075 /* Check for mismatched ISR attribute. */
2076 l1
= lookup_attribute ("isr", TYPE_ATTRIBUTES (type1
)) != NULL
;
2078 l1
= lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1
)) != NULL
;
2079 l2
= lookup_attribute ("isr", TYPE_ATTRIBUTES (type2
)) != NULL
;
2081 l1
= lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2
)) != NULL
;
2088 /* Encode long_call or short_call attribute by prefixing
2089 symbol name in DECL with a special character FLAG. */
2092 arm_encode_call_attribute (decl
, flag
)
2096 const char * str
= XSTR (XEXP (DECL_RTL (decl
), 0), 0);
2097 int len
= strlen (str
);
2100 /* Do not allow weak functions to be treated as short call. */
2101 if (DECL_WEAK (decl
) && flag
== SHORT_CALL_FLAG_CHAR
)
2104 newstr
= alloca (len
+ 2);
2106 strcpy (newstr
+ 1, str
);
2108 newstr
= (char *) ggc_alloc_string (newstr
, len
+ 1);
2109 XSTR (XEXP (DECL_RTL (decl
), 0), 0) = newstr
;
2112 /* Assigns default attributes to newly defined type. This is used to
2113 set short_call/long_call attributes for function types of
2114 functions defined inside corresponding #pragma scopes. */
2117 arm_set_default_type_attributes (type
)
2120 /* Add __attribute__ ((long_call)) to all functions, when
2121 inside #pragma long_calls or __attribute__ ((short_call)),
2122 when inside #pragma no_long_calls. */
2123 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
2125 tree type_attr_list
, attr_name
;
2126 type_attr_list
= TYPE_ATTRIBUTES (type
);
2128 if (arm_pragma_long_calls
== LONG
)
2129 attr_name
= get_identifier ("long_call");
2130 else if (arm_pragma_long_calls
== SHORT
)
2131 attr_name
= get_identifier ("short_call");
2135 type_attr_list
= tree_cons (attr_name
, NULL_TREE
, type_attr_list
);
2136 TYPE_ATTRIBUTES (type
) = type_attr_list
;
2140 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2141 defined within the current compilation unit. If this caanot be
2142 determined, then 0 is returned. */
2145 current_file_function_operand (sym_ref
)
2148 /* This is a bit of a fib. A function will have a short call flag
2149 applied to its name if it has the short call attribute, or it has
2150 already been defined within the current compilation unit. */
2151 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref
, 0)))
2154 /* The current function is always defined within the current compilation
2155 unit. if it s a weak definition however, then this may not be the real
2156 definition of the function, and so we have to say no. */
2157 if (sym_ref
== XEXP (DECL_RTL (current_function_decl
), 0)
2158 && !DECL_WEAK (current_function_decl
))
2161 /* We cannot make the determination - default to returning 0. */
2165 /* Return non-zero if a 32 bit "long_call" should be generated for
2166 this call. We generate a long_call if the function:
2168 a. has an __attribute__((long call))
2169 or b. is within the scope of a #pragma long_calls
2170 or c. the -mlong-calls command line switch has been specified
2172 However we do not generate a long call if the function:
2174 d. has an __attribute__ ((short_call))
2175 or e. is inside the scope of a #pragma no_long_calls
2176 or f. has an __attribute__ ((section))
2177 or g. is defined within the current compilation unit.
2179 This function will be called by C fragments contained in the machine
2180 description file. CALL_REF and CALL_COOKIE correspond to the matched
2181 rtl operands. CALL_SYMBOL is used to distinguish between
2182 two different callers of the function. It is set to 1 in the
2183 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2184 and "call_value" patterns. This is because of the difference in the
2185 SYM_REFs passed by these patterns. */
2188 arm_is_longcall_p (sym_ref
, call_cookie
, call_symbol
)
2195 if (GET_CODE (sym_ref
) != MEM
)
2198 sym_ref
= XEXP (sym_ref
, 0);
2201 if (GET_CODE (sym_ref
) != SYMBOL_REF
)
2204 if (call_cookie
& CALL_SHORT
)
2207 if (TARGET_LONG_CALLS
&& flag_function_sections
)
2210 if (current_file_function_operand (sym_ref
))
2213 return (call_cookie
& CALL_LONG
)
2214 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref
, 0))
2215 || TARGET_LONG_CALLS
;
2218 /* Return non-zero if it is ok to make a tail-call to DECL. */
2221 arm_function_ok_for_sibcall (decl
)
2224 int call_type
= TARGET_LONG_CALLS
? CALL_LONG
: CALL_NORMAL
;
2226 /* Never tailcall something for which we have no decl, or if we
2227 are in Thumb mode. */
2228 if (decl
== NULL
|| TARGET_THUMB
)
2231 /* Get the calling method. */
2232 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
2233 call_type
= CALL_SHORT
;
2234 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl
))))
2235 call_type
= CALL_LONG
;
2237 /* Cannot tail-call to long calls, since these are out of range of
2238 a branch instruction. However, if not compiling PIC, we know
2239 we can reach the symbol if it is in this compilation unit. */
2240 if (call_type
== CALL_LONG
&& (flag_pic
|| !TREE_ASM_WRITTEN (decl
)))
2243 /* If we are interworking and the function is not declared static
2244 then we can't tail-call it unless we know that it exists in this
2245 compilation unit (since it might be a Thumb routine). */
2246 if (TARGET_INTERWORK
&& TREE_PUBLIC (decl
) && !TREE_ASM_WRITTEN (decl
))
2249 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2250 if (IS_INTERRUPT (arm_current_func_type ()))
2253 /* Everything else is ok. */
2259 legitimate_pic_operand_p (x
)
2264 && (GET_CODE (x
) == SYMBOL_REF
2265 || (GET_CODE (x
) == CONST
2266 && GET_CODE (XEXP (x
, 0)) == PLUS
2267 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
)))
2274 legitimize_pic_address (orig
, mode
, reg
)
2276 enum machine_mode mode
;
2279 if (GET_CODE (orig
) == SYMBOL_REF
2280 || GET_CODE (orig
) == LABEL_REF
)
2282 #ifndef AOF_ASSEMBLER
2283 rtx pic_ref
, address
;
2293 reg
= gen_reg_rtx (Pmode
);
2298 #ifdef AOF_ASSEMBLER
2299 /* The AOF assembler can generate relocations for these directly, and
2300 understands that the PIC register has to be added into the offset. */
2301 insn
= emit_insn (gen_pic_load_addr_based (reg
, orig
));
2304 address
= gen_reg_rtx (Pmode
);
2309 emit_insn (gen_pic_load_addr_arm (address
, orig
));
2311 emit_insn (gen_pic_load_addr_thumb (address
, orig
));
2313 if ((GET_CODE (orig
) == LABEL_REF
2314 || (GET_CODE (orig
) == SYMBOL_REF
&&
2315 ENCODED_SHORT_CALL_ATTR_P (XSTR (orig
, 0))))
2317 pic_ref
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
, address
);
2320 pic_ref
= gen_rtx_MEM (Pmode
,
2321 gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
2323 RTX_UNCHANGING_P (pic_ref
) = 1;
2326 insn
= emit_move_insn (reg
, pic_ref
);
2328 current_function_uses_pic_offset_table
= 1;
2329 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2331 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUAL
, orig
,
2335 else if (GET_CODE (orig
) == CONST
)
2339 if (GET_CODE (XEXP (orig
, 0)) == PLUS
2340 && XEXP (XEXP (orig
, 0), 0) == pic_offset_table_rtx
)
2348 reg
= gen_reg_rtx (Pmode
);
2351 if (GET_CODE (XEXP (orig
, 0)) == PLUS
)
2353 base
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 0), Pmode
, reg
);
2354 offset
= legitimize_pic_address (XEXP (XEXP (orig
, 0), 1), Pmode
,
2355 base
== reg
? 0 : reg
);
2360 if (GET_CODE (offset
) == CONST_INT
)
2362 /* The base register doesn't really matter, we only want to
2363 test the index for the appropriate mode. */
2364 ARM_GO_IF_LEGITIMATE_INDEX (mode
, 0, offset
, win
);
2366 if (!no_new_pseudos
)
2367 offset
= force_reg (Pmode
, offset
);
2372 if (GET_CODE (offset
) == CONST_INT
)
2373 return plus_constant (base
, INTVAL (offset
));
2376 if (GET_MODE_SIZE (mode
) > 4
2377 && (GET_MODE_CLASS (mode
) == MODE_INT
2378 || TARGET_SOFT_FLOAT
))
2380 emit_insn (gen_addsi3 (reg
, base
, offset
));
2384 return gen_rtx_PLUS (Pmode
, base
, offset
);
2390 /* Generate code to load the PIC register. PROLOGUE is true if
2391 called from arm_expand_prologue (in which case we want the
2392 generated insns at the start of the function); false if called
2393 by an exception receiver that needs the PIC register reloaded
2394 (in which case the insns are just dumped at the current location). */
2397 arm_finalize_pic (prologue
)
2398 int prologue ATTRIBUTE_UNUSED
;
2400 #ifndef AOF_ASSEMBLER
2401 rtx l1
, pic_tmp
, pic_tmp2
, seq
, pic_rtx
;
2402 rtx global_offset_table
;
2404 if (current_function_uses_pic_offset_table
== 0 || TARGET_SINGLE_PIC_BASE
)
2411 l1
= gen_label_rtx ();
2413 global_offset_table
= gen_rtx_SYMBOL_REF (Pmode
, "_GLOBAL_OFFSET_TABLE_");
2414 /* On the ARM the PC register contains 'dot + 8' at the time of the
2415 addition, on the Thumb it is 'dot + 4'. */
2416 pic_tmp
= plus_constant (gen_rtx_LABEL_REF (Pmode
, l1
), TARGET_ARM
? 8 : 4);
2418 pic_tmp2
= gen_rtx_CONST (VOIDmode
,
2419 gen_rtx_PLUS (Pmode
, global_offset_table
, pc_rtx
));
2421 pic_tmp2
= gen_rtx_CONST (VOIDmode
, global_offset_table
);
2423 pic_rtx
= gen_rtx_CONST (Pmode
, gen_rtx_MINUS (Pmode
, pic_tmp2
, pic_tmp
));
2427 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx
, pic_rtx
));
2428 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx
, l1
));
2432 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx
, pic_rtx
));
2433 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx
, l1
));
2436 seq
= gen_sequence ();
2439 emit_insn_after (seq
, get_insns ());
2443 /* Need to emit this whether or not we obey regdecls,
2444 since setjmp/longjmp can cause life info to screw up. */
2445 emit_insn (gen_rtx_USE (VOIDmode
, pic_offset_table_rtx
));
2446 #endif /* AOF_ASSEMBLER */
2449 #define REG_OR_SUBREG_REG(X) \
2450 (GET_CODE (X) == REG \
2451 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2453 #define REG_OR_SUBREG_RTX(X) \
2454 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2456 #ifndef COSTS_N_INSNS
2457 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2461 arm_rtx_costs (x
, code
, outer
)
2464 enum rtx_code outer
;
2466 enum machine_mode mode
= GET_MODE (x
);
2467 enum rtx_code subcode
;
2483 return COSTS_N_INSNS (1);
2486 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2489 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
2496 return COSTS_N_INSNS (2) + cycles
;
2498 return COSTS_N_INSNS (1) + 16;
2501 return (COSTS_N_INSNS (1)
2502 + 4 * ((GET_CODE (SET_SRC (x
)) == MEM
)
2503 + GET_CODE (SET_DEST (x
)) == MEM
));
2508 if ((unsigned HOST_WIDE_INT
) INTVAL (x
) < 256)
2510 if (thumb_shiftable_const (INTVAL (x
)))
2511 return COSTS_N_INSNS (2);
2512 return COSTS_N_INSNS (3);
2514 else if (outer
== PLUS
2515 && INTVAL (x
) < 256 && INTVAL (x
) > -256)
2517 else if (outer
== COMPARE
2518 && (unsigned HOST_WIDE_INT
) INTVAL (x
) < 256)
2520 else if (outer
== ASHIFT
|| outer
== ASHIFTRT
2521 || outer
== LSHIFTRT
)
2523 return COSTS_N_INSNS (2);
2529 return COSTS_N_INSNS (3);
2548 /* XXX another guess. */
2549 /* Memory costs quite a lot for the first word, but subsequent words
2550 load at the equivalent of a single insn each. */
2551 return (10 + 4 * ((GET_MODE_SIZE (mode
) - 1) / UNITS_PER_WORD
)
2552 + (CONSTANT_POOL_ADDRESS_P (x
) ? 4 : 0));
2556 if (GET_CODE (XEXP (x
, 1)) == PC
|| GET_CODE (XEXP (x
, 2)) == PC
)
2561 /* XXX still guessing. */
2562 switch (GET_MODE (XEXP (x
, 0)))
2565 return (1 + (mode
== DImode
? 4 : 0)
2566 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2569 return (4 + (mode
== DImode
? 4 : 0)
2570 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2573 return (1 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2587 fprintf (stderr
, "unexpected code for thumb in rtx_costs: %s\n",
2597 /* Memory costs quite a lot for the first word, but subsequent words
2598 load at the equivalent of a single insn each. */
2599 return (10 + 4 * ((GET_MODE_SIZE (mode
) - 1) / UNITS_PER_WORD
)
2600 + (CONSTANT_POOL_ADDRESS_P (x
) ? 4 : 0));
2607 if (mode
== SImode
&& GET_CODE (XEXP (x
, 1)) == REG
)
2614 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
2616 return (8 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : 8)
2617 + ((GET_CODE (XEXP (x
, 0)) == REG
2618 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2619 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
2621 return (1 + ((GET_CODE (XEXP (x
, 0)) == REG
2622 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2623 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
2625 + ((GET_CODE (XEXP (x
, 1)) == REG
2626 || (GET_CODE (XEXP (x
, 1)) == SUBREG
2627 && GET_CODE (SUBREG_REG (XEXP (x
, 1))) == REG
)
2628 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
))
2633 return (4 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 8)
2634 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2635 || (GET_CODE (XEXP (x
, 0)) == CONST_INT
2636 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))))
2639 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2640 return (2 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2641 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
2642 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
2644 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2645 || (GET_CODE (XEXP (x
, 0)) == CONST_DOUBLE
2646 && const_double_rtx_ok_for_fpu (XEXP (x
, 0))))
2649 if (((GET_CODE (XEXP (x
, 0)) == CONST_INT
2650 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))
2651 && REG_OR_SUBREG_REG (XEXP (x
, 1))))
2652 || (((subcode
= GET_CODE (XEXP (x
, 1))) == ASHIFT
2653 || subcode
== ASHIFTRT
|| subcode
== LSHIFTRT
2654 || subcode
== ROTATE
|| subcode
== ROTATERT
2656 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2657 && ((INTVAL (XEXP (XEXP (x
, 1), 1)) &
2658 (INTVAL (XEXP (XEXP (x
, 1), 1)) - 1)) == 0)))
2659 && REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 0))
2660 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 1))
2661 || GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
)
2662 && REG_OR_SUBREG_REG (XEXP (x
, 0))))
2667 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2668 return (2 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
2669 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2670 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
2671 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
2675 case AND
: case XOR
: case IOR
:
2678 /* Normally the frame registers will be spilt into reg+const during
2679 reload, so it is a bad idea to combine them with other instructions,
2680 since then they might not be moved outside of loops. As a compromise
2681 we allow integration with ops that have a constant as their second
2683 if ((REG_OR_SUBREG_REG (XEXP (x
, 0))
2684 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))
2685 && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2686 || (REG_OR_SUBREG_REG (XEXP (x
, 0))
2687 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))))
2691 return (4 + extra_cost
+ (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
2692 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2693 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2694 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
)))
2697 if (REG_OR_SUBREG_REG (XEXP (x
, 0)))
2698 return (1 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : extra_cost
)
2699 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
2700 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2701 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
)))
2704 else if (REG_OR_SUBREG_REG (XEXP (x
, 1)))
2705 return (1 + extra_cost
2706 + ((((subcode
= GET_CODE (XEXP (x
, 0))) == ASHIFT
2707 || subcode
== LSHIFTRT
|| subcode
== ASHIFTRT
2708 || subcode
== ROTATE
|| subcode
== ROTATERT
2710 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2711 && ((INTVAL (XEXP (XEXP (x
, 0), 1)) &
2712 (INTVAL (XEXP (XEXP (x
, 0), 1)) - 1)) == 0)))
2713 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 0)))
2714 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 1)))
2715 || GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
))
2721 /* There is no point basing this on the tuning, since it is always the
2722 fast variant if it exists at all. */
2723 if (arm_fast_multiply
&& mode
== DImode
2724 && (GET_CODE (XEXP (x
, 0)) == GET_CODE (XEXP (x
, 1)))
2725 && (GET_CODE (XEXP (x
, 0)) == ZERO_EXTEND
2726 || GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
))
2729 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
2733 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2735 unsigned HOST_WIDE_INT i
= (INTVAL (XEXP (x
, 1))
2736 & (unsigned HOST_WIDE_INT
) 0xffffffff);
2737 int add_cost
= const_ok_for_arm (i
) ? 4 : 8;
2740 /* Tune as appropriate. */
2741 int booth_unit_size
= ((tune_flags
& FL_FAST_MULT
) ? 8 : 2);
2743 for (j
= 0; i
&& j
< 32; j
+= booth_unit_size
)
2745 i
>>= booth_unit_size
;
2752 return (((tune_flags
& FL_FAST_MULT
) ? 8 : 30)
2753 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4)
2754 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 4));
2757 if (arm_fast_multiply
&& mode
== SImode
2758 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
2759 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == MULT
2760 && (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0))
2761 == GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)))
2762 && (GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)) == ZERO_EXTEND
2763 || GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)) == SIGN_EXTEND
))
2768 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2769 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 6);
2773 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
2775 return 1 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
2778 if (GET_CODE (XEXP (x
, 1)) == PC
|| GET_CODE (XEXP (x
, 2)) == PC
)
2786 return 4 + (mode
== DImode
? 4 : 0);
2789 if (GET_MODE (XEXP (x
, 0)) == QImode
)
2790 return (4 + (mode
== DImode
? 4 : 0)
2791 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2794 switch (GET_MODE (XEXP (x
, 0)))
2797 return (1 + (mode
== DImode
? 4 : 0)
2798 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2801 return (4 + (mode
== DImode
? 4 : 0)
2802 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2805 return (1 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
2813 if (const_ok_for_arm (INTVAL (x
)))
2814 return outer
== SET
? 2 : -1;
2815 else if (outer
== AND
2816 && const_ok_for_arm (~INTVAL (x
)))
2818 else if ((outer
== COMPARE
2819 || outer
== PLUS
|| outer
== MINUS
)
2820 && const_ok_for_arm (-INTVAL (x
)))
2831 if (const_double_rtx_ok_for_fpu (x
))
2832 return outer
== SET
? 2 : -1;
2833 else if ((outer
== COMPARE
|| outer
== PLUS
)
2834 && neg_const_double_rtx_ok_for_fpu (x
))
2844 arm_adjust_cost (insn
, link
, dep
, cost
)
2852 /* Some true dependencies can have a higher cost depending
2853 on precisely how certain input operands are used. */
2855 && REG_NOTE_KIND (link
) == 0
2856 && recog_memoized (insn
) < 0
2857 && recog_memoized (dep
) < 0)
2859 int shift_opnum
= get_attr_shift (insn
);
2860 enum attr_type attr_type
= get_attr_type (dep
);
2862 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2863 operand for INSN. If we have a shifted input operand and the
2864 instruction we depend on is another ALU instruction, then we may
2865 have to account for an additional stall. */
2866 if (shift_opnum
!= 0 && attr_type
== TYPE_NORMAL
)
2868 rtx shifted_operand
;
2871 /* Get the shifted operand. */
2872 extract_insn (insn
);
2873 shifted_operand
= recog_data
.operand
[shift_opnum
];
2875 /* Iterate over all the operands in DEP. If we write an operand
2876 that overlaps with SHIFTED_OPERAND, then we have increase the
2877 cost of this dependency. */
2879 preprocess_constraints ();
2880 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2882 /* We can ignore strict inputs. */
2883 if (recog_data
.operand_type
[opno
] == OP_IN
)
2886 if (reg_overlap_mentioned_p (recog_data
.operand
[opno
],
2893 /* XXX This is not strictly true for the FPA. */
2894 if (REG_NOTE_KIND (link
) == REG_DEP_ANTI
2895 || REG_NOTE_KIND (link
) == REG_DEP_OUTPUT
)
2898 /* Call insns don't incur a stall, even if they follow a load. */
2899 if (REG_NOTE_KIND (link
) == 0
2900 && GET_CODE (insn
) == CALL_INSN
)
2903 if ((i_pat
= single_set (insn
)) != NULL
2904 && GET_CODE (SET_SRC (i_pat
)) == MEM
2905 && (d_pat
= single_set (dep
)) != NULL
2906 && GET_CODE (SET_DEST (d_pat
)) == MEM
)
2908 /* This is a load after a store, there is no conflict if the load reads
2909 from a cached area. Assume that loads from the stack, and from the
2910 constant pool are cached, and that others will miss. This is a
2913 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat
), 0))
2914 || reg_mentioned_p (stack_pointer_rtx
, XEXP (SET_SRC (i_pat
), 0))
2915 || reg_mentioned_p (frame_pointer_rtx
, XEXP (SET_SRC (i_pat
), 0))
2916 || reg_mentioned_p (hard_frame_pointer_rtx
,
2917 XEXP (SET_SRC (i_pat
), 0)))
2924 /* This code has been fixed for cross compilation. */
2926 static int fpa_consts_inited
= 0;
2928 static const char * const strings_fpa
[8] =
2931 "4", "5", "0.5", "10"
2934 static REAL_VALUE_TYPE values_fpa
[8];
2942 for (i
= 0; i
< 8; i
++)
2944 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
2948 fpa_consts_inited
= 1;
2951 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2954 const_double_rtx_ok_for_fpu (x
)
2960 if (!fpa_consts_inited
)
2963 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2964 if (REAL_VALUE_MINUS_ZERO (r
))
2967 for (i
= 0; i
< 8; i
++)
2968 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
2974 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2977 neg_const_double_rtx_ok_for_fpu (x
)
2983 if (!fpa_consts_inited
)
2986 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2987 r
= REAL_VALUE_NEGATE (r
);
2988 if (REAL_VALUE_MINUS_ZERO (r
))
2991 for (i
= 0; i
< 8; i
++)
2992 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
2998 /* Predicates for `match_operand' and `match_operator'. */
3000 /* s_register_operand is the same as register_operand, but it doesn't accept
3003 This function exists because at the time it was put in it led to better
3004 code. SUBREG(MEM) always needs a reload in the places where
3005 s_register_operand is used, and this seemed to lead to excessive
3009 s_register_operand (op
, mode
)
3011 enum machine_mode mode
;
3013 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3016 if (GET_CODE (op
) == SUBREG
)
3017 op
= SUBREG_REG (op
);
3019 /* We don't consider registers whose class is NO_REGS
3020 to be a register operand. */
3021 /* XXX might have to check for lo regs only for thumb ??? */
3022 return (GET_CODE (op
) == REG
3023 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3024 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
3027 /* A hard register operand (even before reload. */
3030 arm_hard_register_operand (op
, mode
)
3032 enum machine_mode mode
;
3034 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3037 return (GET_CODE (op
) == REG
3038 && REGNO (op
) < FIRST_PSEUDO_REGISTER
);
3041 /* Only accept reg, subreg(reg), const_int. */
3044 reg_or_int_operand (op
, mode
)
3046 enum machine_mode mode
;
3048 if (GET_CODE (op
) == CONST_INT
)
3051 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3054 if (GET_CODE (op
) == SUBREG
)
3055 op
= SUBREG_REG (op
);
3057 /* We don't consider registers whose class is NO_REGS
3058 to be a register operand. */
3059 return (GET_CODE (op
) == REG
3060 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3061 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
3064 /* Return 1 if OP is an item in memory, given that we are in reload. */
3067 arm_reload_memory_operand (op
, mode
)
3069 enum machine_mode mode ATTRIBUTE_UNUSED
;
3071 int regno
= true_regnum (op
);
3073 return (!CONSTANT_P (op
)
3075 || (GET_CODE (op
) == REG
3076 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
3079 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
3080 memory access (architecture V4).
3081 MODE is QImode if called when computing constraints, or VOIDmode when
3082 emitting patterns. In this latter case we cannot use memory_operand()
3083 because it will fail on badly formed MEMs, which is precisly what we are
3087 bad_signed_byte_operand (op
, mode
)
3089 enum machine_mode mode ATTRIBUTE_UNUSED
;
3092 if ((mode
== QImode
&& !memory_operand (op
, mode
)) || GET_CODE (op
) != MEM
)
3095 if (GET_CODE (op
) != MEM
)
3100 /* A sum of anything more complex than reg + reg or reg + const is bad. */
3101 if ((GET_CODE (op
) == PLUS
|| GET_CODE (op
) == MINUS
)
3102 && (!s_register_operand (XEXP (op
, 0), VOIDmode
)
3103 || (!s_register_operand (XEXP (op
, 1), VOIDmode
)
3104 && GET_CODE (XEXP (op
, 1)) != CONST_INT
)))
3107 /* Big constants are also bad. */
3108 if (GET_CODE (op
) == PLUS
&& GET_CODE (XEXP (op
, 1)) == CONST_INT
3109 && (INTVAL (XEXP (op
, 1)) > 0xff
3110 || -INTVAL (XEXP (op
, 1)) > 0xff))
3113 /* Everything else is good, or can will automatically be made so. */
3117 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
3120 arm_rhs_operand (op
, mode
)
3122 enum machine_mode mode
;
3124 return (s_register_operand (op
, mode
)
3125 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
3128 /* Return TRUE for valid operands for the
3129 rhs of an ARM instruction, or a load. */
3132 arm_rhsm_operand (op
, mode
)
3134 enum machine_mode mode
;
3136 return (s_register_operand (op
, mode
)
3137 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
3138 || memory_operand (op
, mode
));
3141 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
3142 constant that is valid when negated. */
3145 arm_add_operand (op
, mode
)
3147 enum machine_mode mode
;
3150 return thumb_cmp_operand (op
, mode
);
3152 return (s_register_operand (op
, mode
)
3153 || (GET_CODE (op
) == CONST_INT
3154 && (const_ok_for_arm (INTVAL (op
))
3155 || const_ok_for_arm (-INTVAL (op
)))));
3159 arm_not_operand (op
, mode
)
3161 enum machine_mode mode
;
3163 return (s_register_operand (op
, mode
)
3164 || (GET_CODE (op
) == CONST_INT
3165 && (const_ok_for_arm (INTVAL (op
))
3166 || const_ok_for_arm (~INTVAL (op
)))));
3169 /* Return TRUE if the operand is a memory reference which contains an
3170 offsettable address. */
3173 offsettable_memory_operand (op
, mode
)
3175 enum machine_mode mode
;
3177 if (mode
== VOIDmode
)
3178 mode
= GET_MODE (op
);
3180 return (mode
== GET_MODE (op
)
3181 && GET_CODE (op
) == MEM
3182 && offsettable_address_p (reload_completed
| reload_in_progress
,
3183 mode
, XEXP (op
, 0)));
3186 /* Return TRUE if the operand is a memory reference which is, or can be
3187 made word aligned by adjusting the offset. */
3190 alignable_memory_operand (op
, mode
)
3192 enum machine_mode mode
;
3196 if (mode
== VOIDmode
)
3197 mode
= GET_MODE (op
);
3199 if (mode
!= GET_MODE (op
) || GET_CODE (op
) != MEM
)
3204 return ((GET_CODE (reg
= op
) == REG
3205 || (GET_CODE (op
) == SUBREG
3206 && GET_CODE (reg
= SUBREG_REG (op
)) == REG
)
3207 || (GET_CODE (op
) == PLUS
3208 && GET_CODE (XEXP (op
, 1)) == CONST_INT
3209 && (GET_CODE (reg
= XEXP (op
, 0)) == REG
3210 || (GET_CODE (XEXP (op
, 0)) == SUBREG
3211 && GET_CODE (reg
= SUBREG_REG (XEXP (op
, 0))) == REG
))))
3212 && REGNO_POINTER_ALIGN (REGNO (reg
)) >= 32);
3215 /* Similar to s_register_operand, but does not allow hard integer
3219 f_register_operand (op
, mode
)
3221 enum machine_mode mode
;
3223 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3226 if (GET_CODE (op
) == SUBREG
)
3227 op
= SUBREG_REG (op
);
3229 /* We don't consider registers whose class is NO_REGS
3230 to be a register operand. */
3231 return (GET_CODE (op
) == REG
3232 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
3233 || REGNO_REG_CLASS (REGNO (op
)) == FPU_REGS
));
3236 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
3239 fpu_rhs_operand (op
, mode
)
3241 enum machine_mode mode
;
3243 if (s_register_operand (op
, mode
))
3246 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3249 if (GET_CODE (op
) == CONST_DOUBLE
)
3250 return const_double_rtx_ok_for_fpu (op
);
3256 fpu_add_operand (op
, mode
)
3258 enum machine_mode mode
;
3260 if (s_register_operand (op
, mode
))
3263 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
3266 if (GET_CODE (op
) == CONST_DOUBLE
)
3267 return (const_double_rtx_ok_for_fpu (op
)
3268 || neg_const_double_rtx_ok_for_fpu (op
));
3273 /* Return nonzero if OP is a constant power of two. */
3276 power_of_two_operand (op
, mode
)
3278 enum machine_mode mode ATTRIBUTE_UNUSED
;
3280 if (GET_CODE (op
) == CONST_INT
)
3282 HOST_WIDE_INT value
= INTVAL (op
);
3284 return value
!= 0 && (value
& (value
- 1)) == 0;
3290 /* Return TRUE for a valid operand of a DImode operation.
3291 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3292 Note that this disallows MEM(REG+REG), but allows
3293 MEM(PRE/POST_INC/DEC(REG)). */
3296 di_operand (op
, mode
)
3298 enum machine_mode mode
;
3300 if (s_register_operand (op
, mode
))
3303 if (mode
!= VOIDmode
&& GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != DImode
)
3306 if (GET_CODE (op
) == SUBREG
)
3307 op
= SUBREG_REG (op
);
3309 switch (GET_CODE (op
))
3316 return memory_address_p (DImode
, XEXP (op
, 0));
3323 /* Like di_operand, but don't accept constants. */
3326 nonimmediate_di_operand (op
, mode
)
3328 enum machine_mode mode
;
3330 if (s_register_operand (op
, mode
))
3333 if (mode
!= VOIDmode
&& GET_MODE (op
) != VOIDmode
&& GET_MODE (op
) != DImode
)
3336 if (GET_CODE (op
) == SUBREG
)
3337 op
= SUBREG_REG (op
);
3339 if (GET_CODE (op
) == MEM
)
3340 return memory_address_p (DImode
, XEXP (op
, 0));
3345 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
3346 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
3347 Note that this disallows MEM(REG+REG), but allows
3348 MEM(PRE/POST_INC/DEC(REG)). */
3351 soft_df_operand (op
, mode
)
3353 enum machine_mode mode
;
3355 if (s_register_operand (op
, mode
))
3358 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
3361 if (GET_CODE (op
) == SUBREG
&& CONSTANT_P (SUBREG_REG (op
)))
3364 if (GET_CODE (op
) == SUBREG
)
3365 op
= SUBREG_REG (op
);
3367 switch (GET_CODE (op
))
3373 return memory_address_p (DFmode
, XEXP (op
, 0));
3380 /* Like soft_df_operand, but don't accept constants. */
3383 nonimmediate_soft_df_operand (op
, mode
)
3385 enum machine_mode mode
;
3387 if (s_register_operand (op
, mode
))
3390 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
3393 if (GET_CODE (op
) == SUBREG
)
3394 op
= SUBREG_REG (op
);
3396 if (GET_CODE (op
) == MEM
)
3397 return memory_address_p (DFmode
, XEXP (op
, 0));
3401 /* Return TRUE for valid index operands. */
3404 index_operand (op
, mode
)
3406 enum machine_mode mode
;
3408 return (s_register_operand (op
, mode
)
3409 || (immediate_operand (op
, mode
)
3410 && (GET_CODE (op
) != CONST_INT
3411 || (INTVAL (op
) < 4096 && INTVAL (op
) > -4096))));
3414 /* Return TRUE for valid shifts by a constant. This also accepts any
3415 power of two on the (somewhat overly relaxed) assumption that the
3416 shift operator in this case was a mult. */
3419 const_shift_operand (op
, mode
)
3421 enum machine_mode mode
;
3423 return (power_of_two_operand (op
, mode
)
3424 || (immediate_operand (op
, mode
)
3425 && (GET_CODE (op
) != CONST_INT
3426 || (INTVAL (op
) < 32 && INTVAL (op
) > 0))));
3429 /* Return TRUE for arithmetic operators which can be combined with a multiply
3433 shiftable_operator (x
, mode
)
3435 enum machine_mode mode
;
3439 if (GET_MODE (x
) != mode
)
3442 code
= GET_CODE (x
);
3444 return (code
== PLUS
|| code
== MINUS
3445 || code
== IOR
|| code
== XOR
|| code
== AND
);
3448 /* Return TRUE for binary logical operators. */
3451 logical_binary_operator (x
, mode
)
3453 enum machine_mode mode
;
3457 if (GET_MODE (x
) != mode
)
3460 code
= GET_CODE (x
);
3462 return (code
== IOR
|| code
== XOR
|| code
== AND
);
3465 /* Return TRUE for shift operators. */
3468 shift_operator (x
, mode
)
3470 enum machine_mode mode
;
3474 if (GET_MODE (x
) != mode
)
3477 code
= GET_CODE (x
);
3480 return power_of_two_operand (XEXP (x
, 1), mode
);
3482 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
3483 || code
== ROTATERT
);
3486 /* Return TRUE if x is EQ or NE. */
3489 equality_operator (x
, mode
)
3491 enum machine_mode mode ATTRIBUTE_UNUSED
;
3493 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
3496 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3499 arm_comparison_operator (x
, mode
)
3501 enum machine_mode mode
;
3503 return (comparison_operator (x
, mode
)
3504 && GET_CODE (x
) != LTGT
3505 && GET_CODE (x
) != UNEQ
);
3508 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3511 minmax_operator (x
, mode
)
3513 enum machine_mode mode
;
3515 enum rtx_code code
= GET_CODE (x
);
3517 if (GET_MODE (x
) != mode
)
3520 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
3523 /* Return TRUE if this is the condition code register, if we aren't given
3524 a mode, accept any class CCmode register. */
3527 cc_register (x
, mode
)
3529 enum machine_mode mode
;
3531 if (mode
== VOIDmode
)
3533 mode
= GET_MODE (x
);
3535 if (GET_MODE_CLASS (mode
) != MODE_CC
)
3539 if ( GET_MODE (x
) == mode
3540 && GET_CODE (x
) == REG
3541 && REGNO (x
) == CC_REGNUM
)
3547 /* Return TRUE if this is the condition code register, if we aren't given
3548 a mode, accept any class CCmode register which indicates a dominance
3552 dominant_cc_register (x
, mode
)
3554 enum machine_mode mode
;
3556 if (mode
== VOIDmode
)
3558 mode
= GET_MODE (x
);
3560 if (GET_MODE_CLASS (mode
) != MODE_CC
)
3564 if ( mode
!= CC_DNEmode
&& mode
!= CC_DEQmode
3565 && mode
!= CC_DLEmode
&& mode
!= CC_DLTmode
3566 && mode
!= CC_DGEmode
&& mode
!= CC_DGTmode
3567 && mode
!= CC_DLEUmode
&& mode
!= CC_DLTUmode
3568 && mode
!= CC_DGEUmode
&& mode
!= CC_DGTUmode
)
3571 return cc_register (x
, mode
);
3574 /* Return TRUE if X references a SYMBOL_REF. */
3577 symbol_mentioned_p (x
)
3583 if (GET_CODE (x
) == SYMBOL_REF
)
3586 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3588 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3594 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3595 if (symbol_mentioned_p (XVECEXP (x
, i
, j
)))
3598 else if (fmt
[i
] == 'e' && symbol_mentioned_p (XEXP (x
, i
)))
3605 /* Return TRUE if X references a LABEL_REF. */
3608 label_mentioned_p (x
)
3614 if (GET_CODE (x
) == LABEL_REF
)
3617 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3618 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3624 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3625 if (label_mentioned_p (XVECEXP (x
, i
, j
)))
3628 else if (fmt
[i
] == 'e' && label_mentioned_p (XEXP (x
, i
)))
3639 enum rtx_code code
= GET_CODE (x
);
3643 else if (code
== SMIN
)
3645 else if (code
== UMIN
)
3647 else if (code
== UMAX
)
3653 /* Return 1 if memory locations are adjacent. */
3656 adjacent_mem_locations (a
, b
)
3659 if ((GET_CODE (XEXP (a
, 0)) == REG
3660 || (GET_CODE (XEXP (a
, 0)) == PLUS
3661 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
3662 && (GET_CODE (XEXP (b
, 0)) == REG
3663 || (GET_CODE (XEXP (b
, 0)) == PLUS
3664 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
3666 int val0
= 0, val1
= 0;
3669 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
3671 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
3672 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
3675 reg0
= REGNO (XEXP (a
, 0));
3677 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
3679 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
3680 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
3683 reg1
= REGNO (XEXP (b
, 0));
3685 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
3690 /* Return 1 if OP is a load multiple operation. It is known to be
3691 parallel and the first section will be tested. */
3694 load_multiple_operation (op
, mode
)
3696 enum machine_mode mode ATTRIBUTE_UNUSED
;
3698 HOST_WIDE_INT count
= XVECLEN (op
, 0);
3701 HOST_WIDE_INT i
= 1, base
= 0;
3705 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
3708 /* Check to see if this might be a write-back. */
3709 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
3714 /* Now check it more carefully. */
3715 if (GET_CODE (SET_DEST (elt
)) != REG
3716 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
3717 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
3718 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
3719 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 1) * 4)
3723 /* Perform a quick check so we don't blow up below. */
3725 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
3726 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
3727 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
3730 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
3731 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
3733 for (; i
< count
; i
++)
3735 elt
= XVECEXP (op
, 0, i
);
3737 if (GET_CODE (elt
) != SET
3738 || GET_CODE (SET_DEST (elt
)) != REG
3739 || GET_MODE (SET_DEST (elt
)) != SImode
3740 || REGNO (SET_DEST (elt
)) != (unsigned int)(dest_regno
+ i
- base
)
3741 || GET_CODE (SET_SRC (elt
)) != MEM
3742 || GET_MODE (SET_SRC (elt
)) != SImode
3743 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
3744 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
3745 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
3746 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
3753 /* Return 1 if OP is a store multiple operation. It is known to be
3754 parallel and the first section will be tested. */
3757 store_multiple_operation (op
, mode
)
3759 enum machine_mode mode ATTRIBUTE_UNUSED
;
3761 HOST_WIDE_INT count
= XVECLEN (op
, 0);
3764 HOST_WIDE_INT i
= 1, base
= 0;
3768 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
3771 /* Check to see if this might be a write-back. */
3772 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
3777 /* Now check it more carefully. */
3778 if (GET_CODE (SET_DEST (elt
)) != REG
3779 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
3780 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
3781 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
3782 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 1) * 4)
3786 /* Perform a quick check so we don't blow up below. */
3788 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
3789 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
3790 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
3793 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
3794 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
3796 for (; i
< count
; i
++)
3798 elt
= XVECEXP (op
, 0, i
);
3800 if (GET_CODE (elt
) != SET
3801 || GET_CODE (SET_SRC (elt
)) != REG
3802 || GET_MODE (SET_SRC (elt
)) != SImode
3803 || REGNO (SET_SRC (elt
)) != (unsigned int)(src_regno
+ i
- base
)
3804 || GET_CODE (SET_DEST (elt
)) != MEM
3805 || GET_MODE (SET_DEST (elt
)) != SImode
3806 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
3807 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
3808 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
3809 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
3817 load_multiple_sequence (operands
, nops
, regs
, base
, load_offset
)
3822 HOST_WIDE_INT
* load_offset
;
3824 int unsorted_regs
[4];
3825 HOST_WIDE_INT unsorted_offsets
[4];
3830 /* Can only handle 2, 3, or 4 insns at present,
3831 though could be easily extended if required. */
3832 if (nops
< 2 || nops
> 4)
3835 /* Loop over the operands and check that the memory references are
3836 suitable (ie immediate offsets from the same base register). At
3837 the same time, extract the target register, and the memory
3839 for (i
= 0; i
< nops
; i
++)
3844 /* Convert a subreg of a mem into the mem itself. */
3845 if (GET_CODE (operands
[nops
+ i
]) == SUBREG
)
3846 operands
[nops
+ i
] = alter_subreg (operands
+ (nops
+ i
));
3848 if (GET_CODE (operands
[nops
+ i
]) != MEM
)
3851 /* Don't reorder volatile memory references; it doesn't seem worth
3852 looking for the case where the order is ok anyway. */
3853 if (MEM_VOLATILE_P (operands
[nops
+ i
]))
3856 offset
= const0_rtx
;
3858 if ((GET_CODE (reg
= XEXP (operands
[nops
+ i
], 0)) == REG
3859 || (GET_CODE (reg
) == SUBREG
3860 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
3861 || (GET_CODE (XEXP (operands
[nops
+ i
], 0)) == PLUS
3862 && ((GET_CODE (reg
= XEXP (XEXP (operands
[nops
+ i
], 0), 0))
3864 || (GET_CODE (reg
) == SUBREG
3865 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
3866 && (GET_CODE (offset
= XEXP (XEXP (operands
[nops
+ i
], 0), 1))
3871 base_reg
= REGNO (reg
);
3872 unsorted_regs
[0] = (GET_CODE (operands
[i
]) == REG
3873 ? REGNO (operands
[i
])
3874 : REGNO (SUBREG_REG (operands
[i
])));
3879 if (base_reg
!= (int) REGNO (reg
))
3880 /* Not addressed from the same base register. */
3883 unsorted_regs
[i
] = (GET_CODE (operands
[i
]) == REG
3884 ? REGNO (operands
[i
])
3885 : REGNO (SUBREG_REG (operands
[i
])));
3886 if (unsorted_regs
[i
] < unsorted_regs
[order
[0]])
3890 /* If it isn't an integer register, or if it overwrites the
3891 base register but isn't the last insn in the list, then
3892 we can't do this. */
3893 if (unsorted_regs
[i
] < 0 || unsorted_regs
[i
] > 14
3894 || (i
!= nops
- 1 && unsorted_regs
[i
] == base_reg
))
3897 unsorted_offsets
[i
] = INTVAL (offset
);
3900 /* Not a suitable memory address. */
3904 /* All the useful information has now been extracted from the
3905 operands into unsorted_regs and unsorted_offsets; additionally,
3906 order[0] has been set to the lowest numbered register in the
3907 list. Sort the registers into order, and check that the memory
3908 offsets are ascending and adjacent. */
3910 for (i
= 1; i
< nops
; i
++)
3914 order
[i
] = order
[i
- 1];
3915 for (j
= 0; j
< nops
; j
++)
3916 if (unsorted_regs
[j
] > unsorted_regs
[order
[i
- 1]]
3917 && (order
[i
] == order
[i
- 1]
3918 || unsorted_regs
[j
] < unsorted_regs
[order
[i
]]))
3921 /* Have we found a suitable register? if not, one must be used more
3923 if (order
[i
] == order
[i
- 1])
3926 /* Is the memory address adjacent and ascending? */
3927 if (unsorted_offsets
[order
[i
]] != unsorted_offsets
[order
[i
- 1]] + 4)
3935 for (i
= 0; i
< nops
; i
++)
3936 regs
[i
] = unsorted_regs
[order
[i
]];
3938 *load_offset
= unsorted_offsets
[order
[0]];
3941 if (unsorted_offsets
[order
[0]] == 0)
3942 return 1; /* ldmia */
3944 if (unsorted_offsets
[order
[0]] == 4)
3945 return 2; /* ldmib */
3947 if (unsorted_offsets
[order
[nops
- 1]] == 0)
3948 return 3; /* ldmda */
3950 if (unsorted_offsets
[order
[nops
- 1]] == -4)
3951 return 4; /* ldmdb */
3953 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3954 if the offset isn't small enough. The reason 2 ldrs are faster
3955 is because these ARMs are able to do more than one cache access
3956 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3957 whilst the ARM8 has a double bandwidth cache. This means that
3958 these cores can do both an instruction fetch and a data fetch in
3959 a single cycle, so the trick of calculating the address into a
3960 scratch register (one of the result regs) and then doing a load
3961 multiple actually becomes slower (and no smaller in code size).
3962 That is the transformation
3964 ldr rd1, [rbase + offset]
3965 ldr rd2, [rbase + offset + 4]
3969 add rd1, rbase, offset
3970 ldmia rd1, {rd1, rd2}
3972 produces worse code -- '3 cycles + any stalls on rd2' instead of
3973 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3974 access per cycle, the first sequence could never complete in less
3975 than 6 cycles, whereas the ldm sequence would only take 5 and
3976 would make better use of sequential accesses if not hitting the
3979 We cheat here and test 'arm_ld_sched' which we currently know to
3980 only be true for the ARM8, ARM9 and StrongARM. If this ever
3981 changes, then the test below needs to be reworked. */
3982 if (nops
== 2 && arm_ld_sched
)
3985 /* Can't do it without setting up the offset, only do this if it takes
3986 no more than one insn. */
3987 return (const_ok_for_arm (unsorted_offsets
[order
[0]])
3988 || const_ok_for_arm (-unsorted_offsets
[order
[0]])) ? 5 : 0;
3992 emit_ldm_seq (operands
, nops
)
3998 HOST_WIDE_INT offset
;
4002 switch (load_multiple_sequence (operands
, nops
, regs
, &base_reg
, &offset
))
4005 strcpy (buf
, "ldm%?ia\t");
4009 strcpy (buf
, "ldm%?ib\t");
4013 strcpy (buf
, "ldm%?da\t");
4017 strcpy (buf
, "ldm%?db\t");
4022 sprintf (buf
, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX
,
4023 reg_names
[regs
[0]], REGISTER_PREFIX
, reg_names
[base_reg
],
4026 sprintf (buf
, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX
,
4027 reg_names
[regs
[0]], REGISTER_PREFIX
, reg_names
[base_reg
],
4029 output_asm_insn (buf
, operands
);
4031 strcpy (buf
, "ldm%?ia\t");
4038 sprintf (buf
+ strlen (buf
), "%s%s, {%s%s", REGISTER_PREFIX
,
4039 reg_names
[base_reg
], REGISTER_PREFIX
, reg_names
[regs
[0]]);
4041 for (i
= 1; i
< nops
; i
++)
4042 sprintf (buf
+ strlen (buf
), ", %s%s", REGISTER_PREFIX
,
4043 reg_names
[regs
[i
]]);
4045 strcat (buf
, "}\t%@ phole ldm");
4047 output_asm_insn (buf
, operands
);
4052 store_multiple_sequence (operands
, nops
, regs
, base
, load_offset
)
4057 HOST_WIDE_INT
* load_offset
;
4059 int unsorted_regs
[4];
4060 HOST_WIDE_INT unsorted_offsets
[4];
4065 /* Can only handle 2, 3, or 4 insns at present, though could be easily
4066 extended if required. */
4067 if (nops
< 2 || nops
> 4)
4070 /* Loop over the operands and check that the memory references are
4071 suitable (ie immediate offsets from the same base register). At
4072 the same time, extract the target register, and the memory
4074 for (i
= 0; i
< nops
; i
++)
4079 /* Convert a subreg of a mem into the mem itself. */
4080 if (GET_CODE (operands
[nops
+ i
]) == SUBREG
)
4081 operands
[nops
+ i
] = alter_subreg (operands
+ (nops
+ i
));
4083 if (GET_CODE (operands
[nops
+ i
]) != MEM
)
4086 /* Don't reorder volatile memory references; it doesn't seem worth
4087 looking for the case where the order is ok anyway. */
4088 if (MEM_VOLATILE_P (operands
[nops
+ i
]))
4091 offset
= const0_rtx
;
4093 if ((GET_CODE (reg
= XEXP (operands
[nops
+ i
], 0)) == REG
4094 || (GET_CODE (reg
) == SUBREG
4095 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
4096 || (GET_CODE (XEXP (operands
[nops
+ i
], 0)) == PLUS
4097 && ((GET_CODE (reg
= XEXP (XEXP (operands
[nops
+ i
], 0), 0))
4099 || (GET_CODE (reg
) == SUBREG
4100 && GET_CODE (reg
= SUBREG_REG (reg
)) == REG
))
4101 && (GET_CODE (offset
= XEXP (XEXP (operands
[nops
+ i
], 0), 1))
4106 base_reg
= REGNO (reg
);
4107 unsorted_regs
[0] = (GET_CODE (operands
[i
]) == REG
4108 ? REGNO (operands
[i
])
4109 : REGNO (SUBREG_REG (operands
[i
])));
4114 if (base_reg
!= (int) REGNO (reg
))
4115 /* Not addressed from the same base register. */
4118 unsorted_regs
[i
] = (GET_CODE (operands
[i
]) == REG
4119 ? REGNO (operands
[i
])
4120 : REGNO (SUBREG_REG (operands
[i
])));
4121 if (unsorted_regs
[i
] < unsorted_regs
[order
[0]])
4125 /* If it isn't an integer register, then we can't do this. */
4126 if (unsorted_regs
[i
] < 0 || unsorted_regs
[i
] > 14)
4129 unsorted_offsets
[i
] = INTVAL (offset
);
4132 /* Not a suitable memory address. */
4136 /* All the useful information has now been extracted from the
4137 operands into unsorted_regs and unsorted_offsets; additionally,
4138 order[0] has been set to the lowest numbered register in the
4139 list. Sort the registers into order, and check that the memory
4140 offsets are ascending and adjacent. */
4142 for (i
= 1; i
< nops
; i
++)
4146 order
[i
] = order
[i
- 1];
4147 for (j
= 0; j
< nops
; j
++)
4148 if (unsorted_regs
[j
] > unsorted_regs
[order
[i
- 1]]
4149 && (order
[i
] == order
[i
- 1]
4150 || unsorted_regs
[j
] < unsorted_regs
[order
[i
]]))
4153 /* Have we found a suitable register? if not, one must be used more
4155 if (order
[i
] == order
[i
- 1])
4158 /* Is the memory address adjacent and ascending? */
4159 if (unsorted_offsets
[order
[i
]] != unsorted_offsets
[order
[i
- 1]] + 4)
4167 for (i
= 0; i
< nops
; i
++)
4168 regs
[i
] = unsorted_regs
[order
[i
]];
4170 *load_offset
= unsorted_offsets
[order
[0]];
4173 if (unsorted_offsets
[order
[0]] == 0)
4174 return 1; /* stmia */
4176 if (unsorted_offsets
[order
[0]] == 4)
4177 return 2; /* stmib */
4179 if (unsorted_offsets
[order
[nops
- 1]] == 0)
4180 return 3; /* stmda */
4182 if (unsorted_offsets
[order
[nops
- 1]] == -4)
4183 return 4; /* stmdb */
4189 emit_stm_seq (operands
, nops
)
4195 HOST_WIDE_INT offset
;
4199 switch (store_multiple_sequence (operands
, nops
, regs
, &base_reg
, &offset
))
4202 strcpy (buf
, "stm%?ia\t");
4206 strcpy (buf
, "stm%?ib\t");
4210 strcpy (buf
, "stm%?da\t");
4214 strcpy (buf
, "stm%?db\t");
4221 sprintf (buf
+ strlen (buf
), "%s%s, {%s%s", REGISTER_PREFIX
,
4222 reg_names
[base_reg
], REGISTER_PREFIX
, reg_names
[regs
[0]]);
4224 for (i
= 1; i
< nops
; i
++)
4225 sprintf (buf
+ strlen (buf
), ", %s%s", REGISTER_PREFIX
,
4226 reg_names
[regs
[i
]]);
4228 strcat (buf
, "}\t%@ phole stm");
4230 output_asm_insn (buf
, operands
);
4235 multi_register_push (op
, mode
)
4237 enum machine_mode mode ATTRIBUTE_UNUSED
;
4239 if (GET_CODE (op
) != PARALLEL
4240 || (GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
4241 || (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
)
4242 || (XINT (SET_SRC (XVECEXP (op
, 0, 0)), 1) != UNSPEC_PUSH_MULT
))
4248 /* Routines for use in generating RTL. */
4251 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
, unchanging_p
,
4252 in_struct_p
, scalar_p
)
4264 int sign
= up
? 1 : -1;
4267 /* XScale has load-store double instructions, but they have stricter
4268 alignment requirements than load-store multiple, so we can not
4271 For XScale ldm requires 2 + NREGS cycles to complete and blocks
4272 the pipeline until completion.
4280 An ldr instruction takes 1-3 cycles, but does not block the
4289 Best case ldr will always win. However, the more ldr instructions
4290 we issue, the less likely we are to be able to schedule them well.
4291 Using ldr instructions also increases code size.
4293 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
4294 for counts of 3 or 4 regs. */
4295 if (arm_is_xscale
&& count
<= 2 && ! optimize_size
)
4301 for (i
= 0; i
< count
; i
++)
4303 mem
= gen_rtx_MEM (SImode
, plus_constant (from
, i
* 4 * sign
));
4304 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4305 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4306 MEM_SCALAR_P (mem
) = scalar_p
;
4307 emit_move_insn (gen_rtx_REG (SImode
, base_regno
+ i
), mem
);
4311 emit_move_insn (from
, plus_constant (from
, count
* 4 * sign
));
4313 seq
= gen_sequence ();
4319 result
= gen_rtx_PARALLEL (VOIDmode
,
4320 rtvec_alloc (count
+ (write_back
? 1 : 0)));
4323 XVECEXP (result
, 0, 0)
4324 = gen_rtx_SET (GET_MODE (from
), from
,
4325 plus_constant (from
, count
* 4 * sign
));
4330 for (j
= 0; i
< count
; i
++, j
++)
4332 mem
= gen_rtx_MEM (SImode
, plus_constant (from
, j
* 4 * sign
));
4333 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4334 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4335 MEM_SCALAR_P (mem
) = scalar_p
;
4336 XVECEXP (result
, 0, i
)
4337 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (SImode
, base_regno
+ j
), mem
);
4344 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
, unchanging_p
,
4345 in_struct_p
, scalar_p
)
4357 int sign
= up
? 1 : -1;
4360 /* See arm_gen_load_multiple for discussion of
4361 the pros/cons of ldm/stm usage for XScale. */
4362 if (arm_is_xscale
&& count
<= 2 && ! optimize_size
)
4368 for (i
= 0; i
< count
; i
++)
4370 mem
= gen_rtx_MEM (SImode
, plus_constant (to
, i
* 4 * sign
));
4371 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4372 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4373 MEM_SCALAR_P (mem
) = scalar_p
;
4374 emit_move_insn (mem
, gen_rtx_REG (SImode
, base_regno
+ i
));
4378 emit_move_insn (to
, plus_constant (to
, count
* 4 * sign
));
4380 seq
= gen_sequence ();
4386 result
= gen_rtx_PARALLEL (VOIDmode
,
4387 rtvec_alloc (count
+ (write_back
? 1 : 0)));
4390 XVECEXP (result
, 0, 0)
4391 = gen_rtx_SET (GET_MODE (to
), to
,
4392 plus_constant (to
, count
* 4 * sign
));
4397 for (j
= 0; i
< count
; i
++, j
++)
4399 mem
= gen_rtx_MEM (SImode
, plus_constant (to
, j
* 4 * sign
));
4400 RTX_UNCHANGING_P (mem
) = unchanging_p
;
4401 MEM_IN_STRUCT_P (mem
) = in_struct_p
;
4402 MEM_SCALAR_P (mem
) = scalar_p
;
4404 XVECEXP (result
, 0, i
)
4405 = gen_rtx_SET (VOIDmode
, mem
, gen_rtx_REG (SImode
, base_regno
+ j
));
4412 arm_gen_movstrqi (operands
)
4415 HOST_WIDE_INT in_words_to_go
, out_words_to_go
, last_bytes
;
4418 rtx st_src
, st_dst
, fin_src
, fin_dst
;
4419 rtx part_bytes_reg
= NULL
;
4421 int dst_unchanging_p
, dst_in_struct_p
, src_unchanging_p
, src_in_struct_p
;
4422 int dst_scalar_p
, src_scalar_p
;
4424 if (GET_CODE (operands
[2]) != CONST_INT
4425 || GET_CODE (operands
[3]) != CONST_INT
4426 || INTVAL (operands
[2]) > 64
4427 || INTVAL (operands
[3]) & 3)
4430 st_dst
= XEXP (operands
[0], 0);
4431 st_src
= XEXP (operands
[1], 0);
4433 dst_unchanging_p
= RTX_UNCHANGING_P (operands
[0]);
4434 dst_in_struct_p
= MEM_IN_STRUCT_P (operands
[0]);
4435 dst_scalar_p
= MEM_SCALAR_P (operands
[0]);
4436 src_unchanging_p
= RTX_UNCHANGING_P (operands
[1]);
4437 src_in_struct_p
= MEM_IN_STRUCT_P (operands
[1]);
4438 src_scalar_p
= MEM_SCALAR_P (operands
[1]);
4440 fin_dst
= dst
= copy_to_mode_reg (SImode
, st_dst
);
4441 fin_src
= src
= copy_to_mode_reg (SImode
, st_src
);
4443 in_words_to_go
= NUM_INTS (INTVAL (operands
[2]));
4444 out_words_to_go
= INTVAL (operands
[2]) / 4;
4445 last_bytes
= INTVAL (operands
[2]) & 3;
4447 if (out_words_to_go
!= in_words_to_go
&& ((in_words_to_go
- 1) & 3) != 0)
4448 part_bytes_reg
= gen_rtx_REG (SImode
, (in_words_to_go
- 1) & 3);
4450 for (i
= 0; in_words_to_go
>= 2; i
+=4)
4452 if (in_words_to_go
> 4)
4453 emit_insn (arm_gen_load_multiple (0, 4, src
, TRUE
, TRUE
,
4458 emit_insn (arm_gen_load_multiple (0, in_words_to_go
, src
, TRUE
,
4459 FALSE
, src_unchanging_p
,
4460 src_in_struct_p
, src_scalar_p
));
4462 if (out_words_to_go
)
4464 if (out_words_to_go
> 4)
4465 emit_insn (arm_gen_store_multiple (0, 4, dst
, TRUE
, TRUE
,
4469 else if (out_words_to_go
!= 1)
4470 emit_insn (arm_gen_store_multiple (0, out_words_to_go
,
4479 mem
= gen_rtx_MEM (SImode
, dst
);
4480 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4481 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4482 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4483 emit_move_insn (mem
, gen_rtx_REG (SImode
, 0));
4484 if (last_bytes
!= 0)
4485 emit_insn (gen_addsi3 (dst
, dst
, GEN_INT (4)));
4489 in_words_to_go
-= in_words_to_go
< 4 ? in_words_to_go
: 4;
4490 out_words_to_go
-= out_words_to_go
< 4 ? out_words_to_go
: 4;
4493 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4494 if (out_words_to_go
)
4498 mem
= gen_rtx_MEM (SImode
, src
);
4499 RTX_UNCHANGING_P (mem
) = src_unchanging_p
;
4500 MEM_IN_STRUCT_P (mem
) = src_in_struct_p
;
4501 MEM_SCALAR_P (mem
) = src_scalar_p
;
4502 emit_move_insn (sreg
= gen_reg_rtx (SImode
), mem
);
4503 emit_move_insn (fin_src
= gen_reg_rtx (SImode
), plus_constant (src
, 4));
4505 mem
= gen_rtx_MEM (SImode
, dst
);
4506 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4507 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4508 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4509 emit_move_insn (mem
, sreg
);
4510 emit_move_insn (fin_dst
= gen_reg_rtx (SImode
), plus_constant (dst
, 4));
4513 if (in_words_to_go
) /* Sanity check */
4519 if (in_words_to_go
< 0)
4522 mem
= gen_rtx_MEM (SImode
, src
);
4523 RTX_UNCHANGING_P (mem
) = src_unchanging_p
;
4524 MEM_IN_STRUCT_P (mem
) = src_in_struct_p
;
4525 MEM_SCALAR_P (mem
) = src_scalar_p
;
4526 part_bytes_reg
= copy_to_mode_reg (SImode
, mem
);
4529 if (last_bytes
&& part_bytes_reg
== NULL
)
4532 if (BYTES_BIG_ENDIAN
&& last_bytes
)
4534 rtx tmp
= gen_reg_rtx (SImode
);
4536 /* The bytes we want are in the top end of the word. */
4537 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
,
4538 GEN_INT (8 * (4 - last_bytes
))));
4539 part_bytes_reg
= tmp
;
4543 mem
= gen_rtx_MEM (QImode
, plus_constant (dst
, last_bytes
- 1));
4544 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4545 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4546 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4547 emit_move_insn (mem
, gen_lowpart (QImode
, part_bytes_reg
));
4551 tmp
= gen_reg_rtx (SImode
);
4552 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (8)));
4553 part_bytes_reg
= tmp
;
4562 mem
= gen_rtx_MEM (HImode
, dst
);
4563 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4564 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4565 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4566 emit_move_insn (mem
, gen_lowpart (HImode
, part_bytes_reg
));
4570 rtx tmp
= gen_reg_rtx (SImode
);
4572 emit_insn (gen_addsi3 (dst
, dst
, GEN_INT (2)));
4573 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (16)));
4574 part_bytes_reg
= tmp
;
4580 mem
= gen_rtx_MEM (QImode
, dst
);
4581 RTX_UNCHANGING_P (mem
) = dst_unchanging_p
;
4582 MEM_IN_STRUCT_P (mem
) = dst_in_struct_p
;
4583 MEM_SCALAR_P (mem
) = dst_scalar_p
;
4584 emit_move_insn (mem
, gen_lowpart (QImode
, part_bytes_reg
));
4591 /* Generate a memory reference for a half word, such that it will be loaded
4592 into the top 16 bits of the word. We can assume that the address is
4593 known to be alignable and of the form reg, or plus (reg, const). */
4596 arm_gen_rotated_half_load (memref
)
4599 HOST_WIDE_INT offset
= 0;
4600 rtx base
= XEXP (memref
, 0);
4602 if (GET_CODE (base
) == PLUS
)
4604 offset
= INTVAL (XEXP (base
, 1));
4605 base
= XEXP (base
, 0);
4608 /* If we aren't allowed to generate unaligned addresses, then fail. */
4609 if (TARGET_MMU_TRAPS
4610 && ((BYTES_BIG_ENDIAN
? 1 : 0) ^ ((offset
& 2) == 0)))
4613 base
= gen_rtx_MEM (SImode
, plus_constant (base
, offset
& ~2));
4615 if ((BYTES_BIG_ENDIAN
? 1 : 0) ^ ((offset
& 2) == 2))
4618 return gen_rtx_ROTATE (SImode
, base
, GEN_INT (16));
4621 /* Select a dominance comparison mode if possible. We support three forms.
4622 COND_OR == 0 => (X && Y)
4623 COND_OR == 1 => ((! X( || Y)
4624 COND_OR == 2 => (X || Y)
4625 If we are unable to support a dominance comparsison we return CC mode.
4626 This will then fail to match for the RTL expressions that generate this
4629 static enum machine_mode
4630 select_dominance_cc_mode (x
, y
, cond_or
)
4633 HOST_WIDE_INT cond_or
;
4635 enum rtx_code cond1
, cond2
;
4638 /* Currently we will probably get the wrong result if the individual
4639 comparisons are not simple. This also ensures that it is safe to
4640 reverse a comparison if necessary. */
4641 if ((arm_select_cc_mode (cond1
= GET_CODE (x
), XEXP (x
, 0), XEXP (x
, 1))
4643 || (arm_select_cc_mode (cond2
= GET_CODE (y
), XEXP (y
, 0), XEXP (y
, 1))
4647 /* The if_then_else variant of this tests the second condition if the
4648 first passes, but is true if the first fails. Reverse the first
4649 condition to get a true "inclusive-or" expression. */
4651 cond1
= reverse_condition (cond1
);
4653 /* If the comparisons are not equal, and one doesn't dominate the other,
4654 then we can't do this. */
4656 && !comparison_dominates_p (cond1
, cond2
)
4657 && (swapped
= 1, !comparison_dominates_p (cond2
, cond1
)))
4662 enum rtx_code temp
= cond1
;
4670 if (cond2
== EQ
|| !cond_or
)
4675 case LE
: return CC_DLEmode
;
4676 case LEU
: return CC_DLEUmode
;
4677 case GE
: return CC_DGEmode
;
4678 case GEU
: return CC_DGEUmode
;
4685 if (cond2
== LT
|| !cond_or
)
4694 if (cond2
== GT
|| !cond_or
)
4703 if (cond2
== LTU
|| !cond_or
)
4712 if (cond2
== GTU
|| !cond_or
)
4720 /* The remaining cases only occur when both comparisons are the
4745 arm_select_cc_mode (op
, x
, y
)
4750 /* All floating point compares return CCFP if it is an equality
4751 comparison, and CCFPE otherwise. */
4752 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
4779 /* A compare with a shifted operand. Because of canonicalization, the
4780 comparison will have to be swapped when we emit the assembler. */
4781 if (GET_MODE (y
) == SImode
&& GET_CODE (y
) == REG
4782 && (GET_CODE (x
) == ASHIFT
|| GET_CODE (x
) == ASHIFTRT
4783 || GET_CODE (x
) == LSHIFTRT
|| GET_CODE (x
) == ROTATE
4784 || GET_CODE (x
) == ROTATERT
))
4787 /* This is a special case that is used by combine to allow a
4788 comparison of a shifted byte load to be split into a zero-extend
4789 followed by a comparison of the shifted integer (only valid for
4790 equalities and unsigned inequalities). */
4791 if (GET_MODE (x
) == SImode
4792 && GET_CODE (x
) == ASHIFT
4793 && GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 24
4794 && GET_CODE (XEXP (x
, 0)) == SUBREG
4795 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == MEM
4796 && GET_MODE (SUBREG_REG (XEXP (x
, 0))) == QImode
4797 && (op
== EQ
|| op
== NE
4798 || op
== GEU
|| op
== GTU
|| op
== LTU
|| op
== LEU
)
4799 && GET_CODE (y
) == CONST_INT
)
4802 /* A construct for a conditional compare, if the false arm contains
4803 0, then both conditions must be true, otherwise either condition
4804 must be true. Not all conditions are possible, so CCmode is
4805 returned if it can't be done. */
4806 if (GET_CODE (x
) == IF_THEN_ELSE
4807 && (XEXP (x
, 2) == const0_rtx
4808 || XEXP (x
, 2) == const1_rtx
)
4809 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4810 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4811 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1),
4812 INTVAL (XEXP (x
, 2)));
4814 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4815 if (GET_CODE (x
) == AND
4816 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4817 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4818 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1), 0);
4820 if (GET_CODE (x
) == IOR
4821 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4822 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) == '<')
4823 return select_dominance_cc_mode (XEXP (x
, 0), XEXP (x
, 1), 2);
4825 /* An operation that sets the condition codes as a side-effect, the
4826 V flag is not set correctly, so we can only use comparisons where
4827 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4829 if (GET_MODE (x
) == SImode
4831 && (op
== EQ
|| op
== NE
|| op
== LT
|| op
== GE
)
4832 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
4833 || GET_CODE (x
) == AND
|| GET_CODE (x
) == IOR
4834 || GET_CODE (x
) == XOR
|| GET_CODE (x
) == MULT
4835 || GET_CODE (x
) == NOT
|| GET_CODE (x
) == NEG
4836 || GET_CODE (x
) == LSHIFTRT
4837 || GET_CODE (x
) == ASHIFT
|| GET_CODE (x
) == ASHIFTRT
4838 || GET_CODE (x
) == ROTATERT
|| GET_CODE (x
) == ZERO_EXTRACT
))
4841 if (GET_MODE (x
) == QImode
&& (op
== EQ
|| op
== NE
))
4844 if (GET_MODE (x
) == SImode
&& (op
== LTU
|| op
== GEU
)
4845 && GET_CODE (x
) == PLUS
4846 && (rtx_equal_p (XEXP (x
, 0), y
) || rtx_equal_p (XEXP (x
, 1), y
)))
4852 /* X and Y are two things to compare using CODE. Emit the compare insn and
4853 return the rtx for register 0 in the proper mode. FP means this is a
4854 floating point compare: I don't think that it is needed on the arm. */
4857 arm_gen_compare_reg (code
, x
, y
)
4861 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
4862 rtx cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
4864 emit_insn (gen_rtx_SET (VOIDmode
, cc_reg
,
4865 gen_rtx_COMPARE (mode
, x
, y
)));
4871 arm_reload_in_hi (operands
)
4874 rtx ref
= operands
[1];
4876 HOST_WIDE_INT offset
= 0;
4878 if (GET_CODE (ref
) == SUBREG
)
4880 offset
= SUBREG_BYTE (ref
);
4881 ref
= SUBREG_REG (ref
);
4884 if (GET_CODE (ref
) == REG
)
4886 /* We have a pseudo which has been spilt onto the stack; there
4887 are two cases here: the first where there is a simple
4888 stack-slot replacement and a second where the stack-slot is
4889 out of range, or is used as a subreg. */
4890 if (reg_equiv_mem
[REGNO (ref
)])
4892 ref
= reg_equiv_mem
[REGNO (ref
)];
4893 base
= find_replacement (&XEXP (ref
, 0));
4896 /* The slot is out of range, or was dressed up in a SUBREG. */
4897 base
= reg_equiv_address
[REGNO (ref
)];
4900 base
= find_replacement (&XEXP (ref
, 0));
4902 /* Handle the case where the address is too complex to be offset by 1. */
4903 if (GET_CODE (base
) == MINUS
4904 || (GET_CODE (base
) == PLUS
&& GET_CODE (XEXP (base
, 1)) != CONST_INT
))
4906 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
4908 emit_insn (gen_rtx_SET (VOIDmode
, base_plus
, base
));
4911 else if (GET_CODE (base
) == PLUS
)
4913 /* The addend must be CONST_INT, or we would have dealt with it above. */
4914 HOST_WIDE_INT hi
, lo
;
4916 offset
+= INTVAL (XEXP (base
, 1));
4917 base
= XEXP (base
, 0);
4919 /* Rework the address into a legal sequence of insns. */
4920 /* Valid range for lo is -4095 -> 4095 */
4923 : -((-offset
) & 0xfff));
4925 /* Corner case, if lo is the max offset then we would be out of range
4926 once we have added the additional 1 below, so bump the msb into the
4927 pre-loading insn(s). */
4931 hi
= ((((offset
- lo
) & (HOST_WIDE_INT
) 0xffffffff)
4932 ^ (HOST_WIDE_INT
) 0x80000000)
4933 - (HOST_WIDE_INT
) 0x80000000);
4935 if (hi
+ lo
!= offset
)
4940 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
4942 /* Get the base address; addsi3 knows how to handle constants
4943 that require more than one insn. */
4944 emit_insn (gen_addsi3 (base_plus
, base
, GEN_INT (hi
)));
4950 scratch
= gen_rtx_REG (SImode
, REGNO (operands
[2]));
4951 emit_insn (gen_zero_extendqisi2 (scratch
,
4952 gen_rtx_MEM (QImode
,
4953 plus_constant (base
,
4955 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode
, operands
[0], 0),
4956 gen_rtx_MEM (QImode
,
4957 plus_constant (base
,
4959 if (!BYTES_BIG_ENDIAN
)
4960 emit_insn (gen_rtx_SET (VOIDmode
, gen_rtx_SUBREG (SImode
, operands
[0], 0),
4961 gen_rtx_IOR (SImode
,
4964 gen_rtx_SUBREG (SImode
, operands
[0], 0),
4968 emit_insn (gen_rtx_SET (VOIDmode
, gen_rtx_SUBREG (SImode
, operands
[0], 0),
4969 gen_rtx_IOR (SImode
,
4970 gen_rtx_ASHIFT (SImode
, scratch
,
4972 gen_rtx_SUBREG (SImode
, operands
[0],
4976 /* Handle storing a half-word to memory during reload by synthesising as two
4977 byte stores. Take care not to clobber the input values until after we
4978 have moved them somewhere safe. This code assumes that if the DImode
4979 scratch in operands[2] overlaps either the input value or output address
4980 in some way, then that value must die in this insn (we absolutely need
4981 two scratch registers for some corner cases). */
4984 arm_reload_out_hi (operands
)
4987 rtx ref
= operands
[0];
4988 rtx outval
= operands
[1];
4990 HOST_WIDE_INT offset
= 0;
4992 if (GET_CODE (ref
) == SUBREG
)
4994 offset
= SUBREG_BYTE (ref
);
4995 ref
= SUBREG_REG (ref
);
4998 if (GET_CODE (ref
) == REG
)
5000 /* We have a pseudo which has been spilt onto the stack; there
5001 are two cases here: the first where there is a simple
5002 stack-slot replacement and a second where the stack-slot is
5003 out of range, or is used as a subreg. */
5004 if (reg_equiv_mem
[REGNO (ref
)])
5006 ref
= reg_equiv_mem
[REGNO (ref
)];
5007 base
= find_replacement (&XEXP (ref
, 0));
5010 /* The slot is out of range, or was dressed up in a SUBREG. */
5011 base
= reg_equiv_address
[REGNO (ref
)];
5014 base
= find_replacement (&XEXP (ref
, 0));
5016 scratch
= gen_rtx_REG (SImode
, REGNO (operands
[2]));
5018 /* Handle the case where the address is too complex to be offset by 1. */
5019 if (GET_CODE (base
) == MINUS
5020 || (GET_CODE (base
) == PLUS
&& GET_CODE (XEXP (base
, 1)) != CONST_INT
))
5022 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
5024 /* Be careful not to destroy OUTVAL. */
5025 if (reg_overlap_mentioned_p (base_plus
, outval
))
5027 /* Updating base_plus might destroy outval, see if we can
5028 swap the scratch and base_plus. */
5029 if (!reg_overlap_mentioned_p (scratch
, outval
))
5032 scratch
= base_plus
;
5037 rtx scratch_hi
= gen_rtx_REG (HImode
, REGNO (operands
[2]));
5039 /* Be conservative and copy OUTVAL into the scratch now,
5040 this should only be necessary if outval is a subreg
5041 of something larger than a word. */
5042 /* XXX Might this clobber base? I can't see how it can,
5043 since scratch is known to overlap with OUTVAL, and
5044 must be wider than a word. */
5045 emit_insn (gen_movhi (scratch_hi
, outval
));
5046 outval
= scratch_hi
;
5050 emit_insn (gen_rtx_SET (VOIDmode
, base_plus
, base
));
5053 else if (GET_CODE (base
) == PLUS
)
5055 /* The addend must be CONST_INT, or we would have dealt with it above. */
5056 HOST_WIDE_INT hi
, lo
;
5058 offset
+= INTVAL (XEXP (base
, 1));
5059 base
= XEXP (base
, 0);
5061 /* Rework the address into a legal sequence of insns. */
5062 /* Valid range for lo is -4095 -> 4095 */
5065 : -((-offset
) & 0xfff));
5067 /* Corner case, if lo is the max offset then we would be out of range
5068 once we have added the additional 1 below, so bump the msb into the
5069 pre-loading insn(s). */
5073 hi
= ((((offset
- lo
) & (HOST_WIDE_INT
) 0xffffffff)
5074 ^ (HOST_WIDE_INT
) 0x80000000)
5075 - (HOST_WIDE_INT
) 0x80000000);
5077 if (hi
+ lo
!= offset
)
5082 rtx base_plus
= gen_rtx_REG (SImode
, REGNO (operands
[2]) + 1);
5084 /* Be careful not to destroy OUTVAL. */
5085 if (reg_overlap_mentioned_p (base_plus
, outval
))
5087 /* Updating base_plus might destroy outval, see if we
5088 can swap the scratch and base_plus. */
5089 if (!reg_overlap_mentioned_p (scratch
, outval
))
5092 scratch
= base_plus
;
5097 rtx scratch_hi
= gen_rtx_REG (HImode
, REGNO (operands
[2]));
5099 /* Be conservative and copy outval into scratch now,
5100 this should only be necessary if outval is a
5101 subreg of something larger than a word. */
5102 /* XXX Might this clobber base? I can't see how it
5103 can, since scratch is known to overlap with
5105 emit_insn (gen_movhi (scratch_hi
, outval
));
5106 outval
= scratch_hi
;
5110 /* Get the base address; addsi3 knows how to handle constants
5111 that require more than one insn. */
5112 emit_insn (gen_addsi3 (base_plus
, base
, GEN_INT (hi
)));
5118 if (BYTES_BIG_ENDIAN
)
5120 emit_insn (gen_movqi (gen_rtx_MEM (QImode
,
5121 plus_constant (base
, offset
+ 1)),
5122 gen_lowpart (QImode
, outval
)));
5123 emit_insn (gen_lshrsi3 (scratch
,
5124 gen_rtx_SUBREG (SImode
, outval
, 0),
5126 emit_insn (gen_movqi (gen_rtx_MEM (QImode
, plus_constant (base
, offset
)),
5127 gen_lowpart (QImode
, scratch
)));
5131 emit_insn (gen_movqi (gen_rtx_MEM (QImode
, plus_constant (base
, offset
)),
5132 gen_lowpart (QImode
, outval
)));
5133 emit_insn (gen_lshrsi3 (scratch
,
5134 gen_rtx_SUBREG (SImode
, outval
, 0),
5136 emit_insn (gen_movqi (gen_rtx_MEM (QImode
,
5137 plus_constant (base
, offset
+ 1)),
5138 gen_lowpart (QImode
, scratch
)));
5142 /* Print a symbolic form of X to the debug file, F. */
5145 arm_print_value (f
, x
)
5149 switch (GET_CODE (x
))
5152 fprintf (f
, HOST_WIDE_INT_PRINT_HEX
, INTVAL (x
));
5156 fprintf (f
, "<0x%lx,0x%lx>", (long)XWINT (x
, 2), (long)XWINT (x
, 3));
5160 fprintf (f
, "\"%s\"", XSTR (x
, 0));
5164 fprintf (f
, "`%s'", XSTR (x
, 0));
5168 fprintf (f
, "L%d", INSN_UID (XEXP (x
, 0)));
5172 arm_print_value (f
, XEXP (x
, 0));
5176 arm_print_value (f
, XEXP (x
, 0));
5178 arm_print_value (f
, XEXP (x
, 1));
5186 fprintf (f
, "????");
5191 /* Routines for manipulation of the constant pool. */
5193 /* Arm instructions cannot load a large constant directly into a
5194 register; they have to come from a pc relative load. The constant
5195 must therefore be placed in the addressable range of the pc
5196 relative load. Depending on the precise pc relative load
5197 instruction the range is somewhere between 256 bytes and 4k. This
5198 means that we often have to dump a constant inside a function, and
5199 generate code to branch around it.
5201 It is important to minimize this, since the branches will slow
5202 things down and make the code larger.
5204 Normally we can hide the table after an existing unconditional
5205 branch so that there is no interruption of the flow, but in the
5206 worst case the code looks like this:
5224 We fix this by performing a scan after scheduling, which notices
5225 which instructions need to have their operands fetched from the
5226 constant table and builds the table.
5228 The algorithm starts by building a table of all the constants that
5229 need fixing up and all the natural barriers in the function (places
5230 where a constant table can be dropped without breaking the flow).
5231 For each fixup we note how far the pc-relative replacement will be
5232 able to reach and the offset of the instruction into the function.
5234 Having built the table we then group the fixes together to form
5235 tables that are as large as possible (subject to addressing
5236 constraints) and emit each table of constants after the last
5237 barrier that is within range of all the instructions in the group.
5238 If a group does not contain a barrier, then we forcibly create one
5239 by inserting a jump instruction into the flow. Once the table has
5240 been inserted, the insns are then modified to reference the
5241 relevant entry in the pool.
5243 Possible enhancements to the algorithm (not implemented) are:
5245 1) For some processors and object formats, there may be benefit in
5246 aligning the pools to the start of cache lines; this alignment
5247 would need to be taken into account when calculating addressability
5250 /* These typedefs are located at the start of this file, so that
5251 they can be used in the prototypes there. This comment is to
5252 remind readers of that fact so that the following structures
5253 can be understood more easily.
5255 typedef struct minipool_node Mnode;
5256 typedef struct minipool_fixup Mfix; */
5258 struct minipool_node
5260 /* Doubly linked chain of entries. */
5263 /* The maximum offset into the code that this entry can be placed. While
5264 pushing fixes for forward references, all entries are sorted in order
5265 of increasing max_address. */
5266 HOST_WIDE_INT max_address
;
5267 /* Similarly for an entry inserted for a backwards ref. */
5268 HOST_WIDE_INT min_address
;
5269 /* The number of fixes referencing this entry. This can become zero
5270 if we "unpush" an entry. In this case we ignore the entry when we
5271 come to emit the code. */
5273 /* The offset from the start of the minipool. */
5274 HOST_WIDE_INT offset
;
5275 /* The value in table. */
5277 /* The mode of value. */
5278 enum machine_mode mode
;
5282 struct minipool_fixup
5286 HOST_WIDE_INT address
;
5288 enum machine_mode mode
;
5292 HOST_WIDE_INT forwards
;
5293 HOST_WIDE_INT backwards
;
5296 /* Fixes less than a word need padding out to a word boundary. */
5297 #define MINIPOOL_FIX_SIZE(mode) \
5298 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
5300 static Mnode
* minipool_vector_head
;
5301 static Mnode
* minipool_vector_tail
;
5302 static rtx minipool_vector_label
;
5304 /* The linked list of all minipool fixes required for this function. */
5305 Mfix
* minipool_fix_head
;
5306 Mfix
* minipool_fix_tail
;
5307 /* The fix entry for the current minipool, once it has been placed. */
5308 Mfix
* minipool_barrier
;
5310 /* Determines if INSN is the start of a jump table. Returns the end
5311 of the TABLE or NULL_RTX. */
5314 is_jump_table (insn
)
5319 if (GET_CODE (insn
) == JUMP_INSN
5320 && JUMP_LABEL (insn
) != NULL
5321 && ((table
= next_real_insn (JUMP_LABEL (insn
)))
5322 == next_real_insn (insn
))
5324 && GET_CODE (table
) == JUMP_INSN
5325 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
5326 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
5332 #ifndef JUMP_TABLES_IN_TEXT_SECTION
5333 #define JUMP_TABLES_IN_TEXT_SECTION 0
5336 static HOST_WIDE_INT
5337 get_jump_table_size (insn
)
5340 /* ADDR_VECs only take room if read-only data does into the text
5342 if (JUMP_TABLES_IN_TEXT_SECTION
5343 #if !defined(READONLY_DATA_SECTION)
5348 rtx body
= PATTERN (insn
);
5349 int elt
= GET_CODE (body
) == ADDR_DIFF_VEC
? 1 : 0;
5351 return GET_MODE_SIZE (GET_MODE (body
)) * XVECLEN (body
, elt
);
5357 /* Move a minipool fix MP from its current location to before MAX_MP.
5358 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5359 contrains may need updating. */
5362 move_minipool_fix_forward_ref (mp
, max_mp
, max_address
)
5365 HOST_WIDE_INT max_address
;
5367 /* This should never be true and the code below assumes these are
5374 if (max_address
< mp
->max_address
)
5375 mp
->max_address
= max_address
;
5379 if (max_address
> max_mp
->max_address
- mp
->fix_size
)
5380 mp
->max_address
= max_mp
->max_address
- mp
->fix_size
;
5382 mp
->max_address
= max_address
;
5384 /* Unlink MP from its current position. Since max_mp is non-null,
5385 mp->prev must be non-null. */
5386 mp
->prev
->next
= mp
->next
;
5387 if (mp
->next
!= NULL
)
5388 mp
->next
->prev
= mp
->prev
;
5390 minipool_vector_tail
= mp
->prev
;
5392 /* Re-insert it before MAX_MP. */
5394 mp
->prev
= max_mp
->prev
;
5397 if (mp
->prev
!= NULL
)
5398 mp
->prev
->next
= mp
;
5400 minipool_vector_head
= mp
;
5403 /* Save the new entry. */
5406 /* Scan over the preceding entries and adjust their addresses as
5408 while (mp
->prev
!= NULL
5409 && mp
->prev
->max_address
> mp
->max_address
- mp
->prev
->fix_size
)
5411 mp
->prev
->max_address
= mp
->max_address
- mp
->prev
->fix_size
;
5418 /* Add a constant to the minipool for a forward reference. Returns the
5419 node added or NULL if the constant will not fit in this pool. */
5422 add_minipool_forward_ref (fix
)
5425 /* If set, max_mp is the first pool_entry that has a lower
5426 constraint than the one we are trying to add. */
5427 Mnode
* max_mp
= NULL
;
5428 HOST_WIDE_INT max_address
= fix
->address
+ fix
->forwards
;
5431 /* If this fix's address is greater than the address of the first
5432 entry, then we can't put the fix in this pool. We subtract the
5433 size of the current fix to ensure that if the table is fully
5434 packed we still have enough room to insert this value by suffling
5435 the other fixes forwards. */
5436 if (minipool_vector_head
&&
5437 fix
->address
>= minipool_vector_head
->max_address
- fix
->fix_size
)
5440 /* Scan the pool to see if a constant with the same value has
5441 already been added. While we are doing this, also note the
5442 location where we must insert the constant if it doesn't already
5444 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5446 if (GET_CODE (fix
->value
) == GET_CODE (mp
->value
)
5447 && fix
->mode
== mp
->mode
5448 && (GET_CODE (fix
->value
) != CODE_LABEL
5449 || (CODE_LABEL_NUMBER (fix
->value
)
5450 == CODE_LABEL_NUMBER (mp
->value
)))
5451 && rtx_equal_p (fix
->value
, mp
->value
))
5453 /* More than one fix references this entry. */
5455 return move_minipool_fix_forward_ref (mp
, max_mp
, max_address
);
5458 /* Note the insertion point if necessary. */
5460 && mp
->max_address
> max_address
)
5464 /* The value is not currently in the minipool, so we need to create
5465 a new entry for it. If MAX_MP is NULL, the entry will be put on
5466 the end of the list since the placement is less constrained than
5467 any existing entry. Otherwise, we insert the new fix before
5468 MAX_MP and, if neceesary, adjust the constraints on the other
5470 mp
= xmalloc (sizeof (* mp
));
5471 mp
->fix_size
= fix
->fix_size
;
5472 mp
->mode
= fix
->mode
;
5473 mp
->value
= fix
->value
;
5475 /* Not yet required for a backwards ref. */
5476 mp
->min_address
= -65536;
5480 mp
->max_address
= max_address
;
5482 mp
->prev
= minipool_vector_tail
;
5484 if (mp
->prev
== NULL
)
5486 minipool_vector_head
= mp
;
5487 minipool_vector_label
= gen_label_rtx ();
5490 mp
->prev
->next
= mp
;
5492 minipool_vector_tail
= mp
;
5496 if (max_address
> max_mp
->max_address
- mp
->fix_size
)
5497 mp
->max_address
= max_mp
->max_address
- mp
->fix_size
;
5499 mp
->max_address
= max_address
;
5502 mp
->prev
= max_mp
->prev
;
5504 if (mp
->prev
!= NULL
)
5505 mp
->prev
->next
= mp
;
5507 minipool_vector_head
= mp
;
5510 /* Save the new entry. */
5513 /* Scan over the preceding entries and adjust their addresses as
5515 while (mp
->prev
!= NULL
5516 && mp
->prev
->max_address
> mp
->max_address
- mp
->prev
->fix_size
)
5518 mp
->prev
->max_address
= mp
->max_address
- mp
->prev
->fix_size
;
5526 move_minipool_fix_backward_ref (mp
, min_mp
, min_address
)
5529 HOST_WIDE_INT min_address
;
5531 HOST_WIDE_INT offset
;
5533 /* This should never be true, and the code below assumes these are
5540 if (min_address
> mp
->min_address
)
5541 mp
->min_address
= min_address
;
5545 /* We will adjust this below if it is too loose. */
5546 mp
->min_address
= min_address
;
5548 /* Unlink MP from its current position. Since min_mp is non-null,
5549 mp->next must be non-null. */
5550 mp
->next
->prev
= mp
->prev
;
5551 if (mp
->prev
!= NULL
)
5552 mp
->prev
->next
= mp
->next
;
5554 minipool_vector_head
= mp
->next
;
5556 /* Reinsert it after MIN_MP. */
5558 mp
->next
= min_mp
->next
;
5560 if (mp
->next
!= NULL
)
5561 mp
->next
->prev
= mp
;
5563 minipool_vector_tail
= mp
;
5569 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5571 mp
->offset
= offset
;
5572 if (mp
->refcount
> 0)
5573 offset
+= mp
->fix_size
;
5575 if (mp
->next
&& mp
->next
->min_address
< mp
->min_address
+ mp
->fix_size
)
5576 mp
->next
->min_address
= mp
->min_address
+ mp
->fix_size
;
5582 /* Add a constant to the minipool for a backward reference. Returns the
5583 node added or NULL if the constant will not fit in this pool.
5585 Note that the code for insertion for a backwards reference can be
5586 somewhat confusing because the calculated offsets for each fix do
5587 not take into account the size of the pool (which is still under
5591 add_minipool_backward_ref (fix
)
5594 /* If set, min_mp is the last pool_entry that has a lower constraint
5595 than the one we are trying to add. */
5596 Mnode
* min_mp
= NULL
;
5597 /* This can be negative, since it is only a constraint. */
5598 HOST_WIDE_INT min_address
= fix
->address
- fix
->backwards
;
5601 /* If we can't reach the current pool from this insn, or if we can't
5602 insert this entry at the end of the pool without pushing other
5603 fixes out of range, then we don't try. This ensures that we
5604 can't fail later on. */
5605 if (min_address
>= minipool_barrier
->address
5606 || (minipool_vector_tail
->min_address
+ fix
->fix_size
5607 >= minipool_barrier
->address
))
5610 /* Scan the pool to see if a constant with the same value has
5611 already been added. While we are doing this, also note the
5612 location where we must insert the constant if it doesn't already
5614 for (mp
= minipool_vector_tail
; mp
!= NULL
; mp
= mp
->prev
)
5616 if (GET_CODE (fix
->value
) == GET_CODE (mp
->value
)
5617 && fix
->mode
== mp
->mode
5618 && (GET_CODE (fix
->value
) != CODE_LABEL
5619 || (CODE_LABEL_NUMBER (fix
->value
)
5620 == CODE_LABEL_NUMBER (mp
->value
)))
5621 && rtx_equal_p (fix
->value
, mp
->value
)
5622 /* Check that there is enough slack to move this entry to the
5623 end of the table (this is conservative). */
5625 > (minipool_barrier
->address
5626 + minipool_vector_tail
->offset
5627 + minipool_vector_tail
->fix_size
)))
5630 return move_minipool_fix_backward_ref (mp
, min_mp
, min_address
);
5634 mp
->min_address
+= fix
->fix_size
;
5637 /* Note the insertion point if necessary. */
5638 if (mp
->min_address
< min_address
)
5640 else if (mp
->max_address
5641 < minipool_barrier
->address
+ mp
->offset
+ fix
->fix_size
)
5643 /* Inserting before this entry would push the fix beyond
5644 its maximum address (which can happen if we have
5645 re-located a forwards fix); force the new fix to come
5648 min_address
= mp
->min_address
+ fix
->fix_size
;
5653 /* We need to create a new entry. */
5654 mp
= xmalloc (sizeof (* mp
));
5655 mp
->fix_size
= fix
->fix_size
;
5656 mp
->mode
= fix
->mode
;
5657 mp
->value
= fix
->value
;
5659 mp
->max_address
= minipool_barrier
->address
+ 65536;
5661 mp
->min_address
= min_address
;
5666 mp
->next
= minipool_vector_head
;
5668 if (mp
->next
== NULL
)
5670 minipool_vector_tail
= mp
;
5671 minipool_vector_label
= gen_label_rtx ();
5674 mp
->next
->prev
= mp
;
5676 minipool_vector_head
= mp
;
5680 mp
->next
= min_mp
->next
;
5684 if (mp
->next
!= NULL
)
5685 mp
->next
->prev
= mp
;
5687 minipool_vector_tail
= mp
;
5690 /* Save the new entry. */
5698 /* Scan over the following entries and adjust their offsets. */
5699 while (mp
->next
!= NULL
)
5701 if (mp
->next
->min_address
< mp
->min_address
+ mp
->fix_size
)
5702 mp
->next
->min_address
= mp
->min_address
+ mp
->fix_size
;
5705 mp
->next
->offset
= mp
->offset
+ mp
->fix_size
;
5707 mp
->next
->offset
= mp
->offset
;
5716 assign_minipool_offsets (barrier
)
5719 HOST_WIDE_INT offset
= 0;
5722 minipool_barrier
= barrier
;
5724 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= mp
->next
)
5726 mp
->offset
= offset
;
5728 if (mp
->refcount
> 0)
5729 offset
+= mp
->fix_size
;
5733 /* Output the literal table */
5735 dump_minipool (scan
)
5742 fprintf (rtl_dump_file
,
5743 ";; Emitting minipool after insn %u; address %ld\n",
5744 INSN_UID (scan
), (unsigned long) minipool_barrier
->address
);
5746 scan
= emit_label_after (gen_label_rtx (), scan
);
5747 scan
= emit_insn_after (gen_align_4 (), scan
);
5748 scan
= emit_label_after (minipool_vector_label
, scan
);
5750 for (mp
= minipool_vector_head
; mp
!= NULL
; mp
= nmp
)
5752 if (mp
->refcount
> 0)
5756 fprintf (rtl_dump_file
,
5757 ";; Offset %u, min %ld, max %ld ",
5758 (unsigned) mp
->offset
, (unsigned long) mp
->min_address
,
5759 (unsigned long) mp
->max_address
);
5760 arm_print_value (rtl_dump_file
, mp
->value
);
5761 fputc ('\n', rtl_dump_file
);
5764 switch (mp
->fix_size
)
5766 #ifdef HAVE_consttable_1
5768 scan
= emit_insn_after (gen_consttable_1 (mp
->value
), scan
);
5772 #ifdef HAVE_consttable_2
5774 scan
= emit_insn_after (gen_consttable_2 (mp
->value
), scan
);
5778 #ifdef HAVE_consttable_4
5780 scan
= emit_insn_after (gen_consttable_4 (mp
->value
), scan
);
5784 #ifdef HAVE_consttable_8
5786 scan
= emit_insn_after (gen_consttable_8 (mp
->value
), scan
);
5800 minipool_vector_head
= minipool_vector_tail
= NULL
;
5801 scan
= emit_insn_after (gen_consttable_end (), scan
);
5802 scan
= emit_barrier_after (scan
);
5805 /* Return the cost of forcibly inserting a barrier after INSN. */
5808 arm_barrier_cost (insn
)
5811 /* Basing the location of the pool on the loop depth is preferable,
5812 but at the moment, the basic block information seems to be
5813 corrupt by this stage of the compilation. */
5815 rtx next
= next_nonnote_insn (insn
);
5817 if (next
!= NULL
&& GET_CODE (next
) == CODE_LABEL
)
5820 switch (GET_CODE (insn
))
5823 /* It will always be better to place the table before the label, rather
5832 return base_cost
- 10;
5835 return base_cost
+ 10;
5839 /* Find the best place in the insn stream in the range
5840 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5841 Create the barrier by inserting a jump and add a new fix entry for
5845 create_fix_barrier (fix
, max_address
)
5847 HOST_WIDE_INT max_address
;
5849 HOST_WIDE_INT count
= 0;
5851 rtx from
= fix
->insn
;
5852 rtx selected
= from
;
5854 HOST_WIDE_INT selected_address
;
5856 HOST_WIDE_INT max_count
= max_address
- fix
->address
;
5857 rtx label
= gen_label_rtx ();
5859 selected_cost
= arm_barrier_cost (from
);
5860 selected_address
= fix
->address
;
5862 while (from
&& count
< max_count
)
5867 /* This code shouldn't have been called if there was a natural barrier
5869 if (GET_CODE (from
) == BARRIER
)
5872 /* Count the length of this insn. */
5873 count
+= get_attr_length (from
);
5875 /* If there is a jump table, add its length. */
5876 tmp
= is_jump_table (from
);
5879 count
+= get_jump_table_size (tmp
);
5881 /* Jump tables aren't in a basic block, so base the cost on
5882 the dispatch insn. If we select this location, we will
5883 still put the pool after the table. */
5884 new_cost
= arm_barrier_cost (from
);
5886 if (count
< max_count
&& new_cost
<= selected_cost
)
5889 selected_cost
= new_cost
;
5890 selected_address
= fix
->address
+ count
;
5893 /* Continue after the dispatch table. */
5894 from
= NEXT_INSN (tmp
);
5898 new_cost
= arm_barrier_cost (from
);
5900 if (count
< max_count
&& new_cost
<= selected_cost
)
5903 selected_cost
= new_cost
;
5904 selected_address
= fix
->address
+ count
;
5907 from
= NEXT_INSN (from
);
5910 /* Create a new JUMP_INSN that branches around a barrier. */
5911 from
= emit_jump_insn_after (gen_jump (label
), selected
);
5912 JUMP_LABEL (from
) = label
;
5913 barrier
= emit_barrier_after (from
);
5914 emit_label_after (label
, barrier
);
5916 /* Create a minipool barrier entry for the new barrier. */
5917 new_fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* new_fix
));
5918 new_fix
->insn
= barrier
;
5919 new_fix
->address
= selected_address
;
5920 new_fix
->next
= fix
->next
;
5921 fix
->next
= new_fix
;
5926 /* Record that there is a natural barrier in the insn stream at
5929 push_minipool_barrier (insn
, address
)
5931 HOST_WIDE_INT address
;
5933 Mfix
* fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* fix
));
5936 fix
->address
= address
;
5939 if (minipool_fix_head
!= NULL
)
5940 minipool_fix_tail
->next
= fix
;
5942 minipool_fix_head
= fix
;
5944 minipool_fix_tail
= fix
;
5947 /* Record INSN, which will need fixing up to load a value from the
5948 minipool. ADDRESS is the offset of the insn since the start of the
5949 function; LOC is a pointer to the part of the insn which requires
5950 fixing; VALUE is the constant that must be loaded, which is of type
5953 push_minipool_fix (insn
, address
, loc
, mode
, value
)
5955 HOST_WIDE_INT address
;
5957 enum machine_mode mode
;
5960 Mfix
* fix
= (Mfix
*) obstack_alloc (&minipool_obstack
, sizeof (* fix
));
5962 #ifdef AOF_ASSEMBLER
5963 /* PIC symbol refereneces need to be converted into offsets into the
5965 /* XXX This shouldn't be done here. */
5966 if (flag_pic
&& GET_CODE (value
) == SYMBOL_REF
)
5967 value
= aof_pic_entry (value
);
5968 #endif /* AOF_ASSEMBLER */
5971 fix
->address
= address
;
5974 fix
->fix_size
= MINIPOOL_FIX_SIZE (mode
);
5976 fix
->forwards
= get_attr_pool_range (insn
);
5977 fix
->backwards
= get_attr_neg_pool_range (insn
);
5978 fix
->minipool
= NULL
;
5980 /* If an insn doesn't have a range defined for it, then it isn't
5981 expecting to be reworked by this code. Better to abort now than
5982 to generate duff assembly code. */
5983 if (fix
->forwards
== 0 && fix
->backwards
== 0)
5988 fprintf (rtl_dump_file
,
5989 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5990 GET_MODE_NAME (mode
),
5991 INSN_UID (insn
), (unsigned long) address
,
5992 -1 * (long)fix
->backwards
, (long)fix
->forwards
);
5993 arm_print_value (rtl_dump_file
, fix
->value
);
5994 fprintf (rtl_dump_file
, "\n");
5997 /* Add it to the chain of fixes. */
6000 if (minipool_fix_head
!= NULL
)
6001 minipool_fix_tail
->next
= fix
;
6003 minipool_fix_head
= fix
;
6005 minipool_fix_tail
= fix
;
6008 /* Scan INSN and note any of its operands that need fixing. */
6011 note_invalid_constants (insn
, address
)
6013 HOST_WIDE_INT address
;
6017 extract_insn (insn
);
6019 if (!constrain_operands (1))
6020 fatal_insn_not_found (insn
);
6022 /* Fill in recog_op_alt with information about the constraints of this
6024 preprocess_constraints ();
6026 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
6028 /* Things we need to fix can only occur in inputs. */
6029 if (recog_data
.operand_type
[opno
] != OP_IN
)
6032 /* If this alternative is a memory reference, then any mention
6033 of constants in this alternative is really to fool reload
6034 into allowing us to accept one there. We need to fix them up
6035 now so that we output the right code. */
6036 if (recog_op_alt
[opno
][which_alternative
].memory_ok
)
6038 rtx op
= recog_data
.operand
[opno
];
6040 if (CONSTANT_P (op
))
6041 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6042 recog_data
.operand_mode
[opno
], op
);
6044 /* RWE: Now we look correctly at the operands for the insn,
6045 this shouldn't be needed any more. */
6046 #ifndef AOF_ASSEMBLER
6047 /* XXX Is this still needed? */
6048 else if (GET_CODE (op
) == UNSPEC
&& XINT (op
, 1) == UNSPEC_PIC_SYM
)
6049 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6050 recog_data
.operand_mode
[opno
],
6051 XVECEXP (op
, 0, 0));
6054 else if (GET_CODE (op
) == MEM
6055 && GET_CODE (XEXP (op
, 0)) == SYMBOL_REF
6056 && CONSTANT_POOL_ADDRESS_P (XEXP (op
, 0)))
6057 push_minipool_fix (insn
, address
, recog_data
.operand_loc
[opno
],
6058 recog_data
.operand_mode
[opno
],
6059 get_pool_constant (XEXP (op
, 0)));
6069 HOST_WIDE_INT address
= 0;
6072 minipool_fix_head
= minipool_fix_tail
= NULL
;
6074 /* The first insn must always be a note, or the code below won't
6075 scan it properly. */
6076 if (GET_CODE (first
) != NOTE
)
6079 /* Scan all the insns and record the operands that will need fixing. */
6080 for (insn
= next_nonnote_insn (first
); insn
; insn
= next_nonnote_insn (insn
))
6082 if (GET_CODE (insn
) == BARRIER
)
6083 push_minipool_barrier (insn
, address
);
6084 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
6085 || GET_CODE (insn
) == JUMP_INSN
)
6089 note_invalid_constants (insn
, address
);
6090 address
+= get_attr_length (insn
);
6092 /* If the insn is a vector jump, add the size of the table
6093 and skip the table. */
6094 if ((table
= is_jump_table (insn
)) != NULL
)
6096 address
+= get_jump_table_size (table
);
6102 fix
= minipool_fix_head
;
6104 /* Now scan the fixups and perform the required changes. */
6109 Mfix
* last_added_fix
;
6110 Mfix
* last_barrier
= NULL
;
6113 /* Skip any further barriers before the next fix. */
6114 while (fix
&& GET_CODE (fix
->insn
) == BARRIER
)
6117 /* No more fixes. */
6121 last_added_fix
= NULL
;
6123 for (ftmp
= fix
; ftmp
; ftmp
= ftmp
->next
)
6125 if (GET_CODE (ftmp
->insn
) == BARRIER
)
6127 if (ftmp
->address
>= minipool_vector_head
->max_address
)
6130 last_barrier
= ftmp
;
6132 else if ((ftmp
->minipool
= add_minipool_forward_ref (ftmp
)) == NULL
)
6135 last_added_fix
= ftmp
; /* Keep track of the last fix added. */
6138 /* If we found a barrier, drop back to that; any fixes that we
6139 could have reached but come after the barrier will now go in
6140 the next mini-pool. */
6141 if (last_barrier
!= NULL
)
6143 /* Reduce the refcount for those fixes that won't go into this
6145 for (fdel
= last_barrier
->next
;
6146 fdel
&& fdel
!= ftmp
;
6149 fdel
->minipool
->refcount
--;
6150 fdel
->minipool
= NULL
;
6153 ftmp
= last_barrier
;
6157 /* ftmp is first fix that we can't fit into this pool and
6158 there no natural barriers that we could use. Insert a
6159 new barrier in the code somewhere between the previous
6160 fix and this one, and arrange to jump around it. */
6161 HOST_WIDE_INT max_address
;
6163 /* The last item on the list of fixes must be a barrier, so
6164 we can never run off the end of the list of fixes without
6165 last_barrier being set. */
6169 max_address
= minipool_vector_head
->max_address
;
6170 /* Check that there isn't another fix that is in range that
6171 we couldn't fit into this pool because the pool was
6172 already too large: we need to put the pool before such an
6174 if (ftmp
->address
< max_address
)
6175 max_address
= ftmp
->address
;
6177 last_barrier
= create_fix_barrier (last_added_fix
, max_address
);
6180 assign_minipool_offsets (last_barrier
);
6184 if (GET_CODE (ftmp
->insn
) != BARRIER
6185 && ((ftmp
->minipool
= add_minipool_backward_ref (ftmp
))
6192 /* Scan over the fixes we have identified for this pool, fixing them
6193 up and adding the constants to the pool itself. */
6194 for (this_fix
= fix
; this_fix
&& ftmp
!= this_fix
;
6195 this_fix
= this_fix
->next
)
6196 if (GET_CODE (this_fix
->insn
) != BARRIER
)
6199 = plus_constant (gen_rtx_LABEL_REF (VOIDmode
,
6200 minipool_vector_label
),
6201 this_fix
->minipool
->offset
);
6202 *this_fix
->loc
= gen_rtx_MEM (this_fix
->mode
, addr
);
6205 dump_minipool (last_barrier
->insn
);
6209 /* From now on we must synthesize any constants that we can't handle
6210 directly. This can happen if the RTL gets split during final
6211 instruction generation. */
6212 after_arm_reorg
= 1;
6214 /* Free the minipool memory. */
6215 obstack_free (&minipool_obstack
, minipool_startobj
);
6218 /* Routines to output assembly language. */
6220 /* If the rtx is the correct value then return the string of the number.
6221 In this way we can ensure that valid double constants are generated even
6222 when cross compiling. */
6225 fp_immediate_constant (x
)
6231 if (!fpa_consts_inited
)
6234 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
6235 for (i
= 0; i
< 8; i
++)
6236 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
6237 return strings_fpa
[i
];
6242 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
6245 fp_const_from_val (r
)
6246 REAL_VALUE_TYPE
* r
;
6250 if (!fpa_consts_inited
)
6253 for (i
= 0; i
< 8; i
++)
6254 if (REAL_VALUES_EQUAL (*r
, values_fpa
[i
]))
6255 return strings_fpa
[i
];
6260 /* Output the operands of a LDM/STM instruction to STREAM.
6261 MASK is the ARM register set mask of which only bits 0-15 are important.
6262 REG is the base register, either the frame pointer or the stack pointer,
6263 INSTR is the possibly suffixed load or store instruction. */
6266 print_multi_reg (stream
, instr
, reg
, mask
)
6273 int not_first
= FALSE
;
6275 fputc ('\t', stream
);
6276 asm_fprintf (stream
, instr
, reg
);
6277 fputs (", {", stream
);
6279 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
6280 if (mask
& (1 << i
))
6283 fprintf (stream
, ", ");
6285 asm_fprintf (stream
, "%r", i
);
6289 fprintf (stream
, "}%s\n", TARGET_APCS_32
? "" : "^");
6292 /* Output a 'call' insn. */
6295 output_call (operands
)
6298 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
6300 if (REGNO (operands
[0]) == LR_REGNUM
)
6302 operands
[0] = gen_rtx_REG (SImode
, IP_REGNUM
);
6303 output_asm_insn ("mov%?\t%0, %|lr", operands
);
6306 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6308 if (TARGET_INTERWORK
)
6309 output_asm_insn ("bx%?\t%0", operands
);
6311 output_asm_insn ("mov%?\t%|pc, %0", operands
);
6320 int something_changed
= 0;
6322 int code
= GET_CODE (x0
);
6329 if (REGNO (x0
) == LR_REGNUM
)
6331 *x
= gen_rtx_REG (SImode
, IP_REGNUM
);
6336 /* Scan through the sub-elements and change any references there. */
6337 fmt
= GET_RTX_FORMAT (code
);
6339 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6341 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
6342 else if (fmt
[i
] == 'E')
6343 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
6344 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
6346 return something_changed
;
6350 /* Output a 'call' insn that is a reference in memory. */
6353 output_call_mem (operands
)
6356 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful. */
6357 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
6358 if (eliminate_lr2ip (&operands
[0]))
6359 output_asm_insn ("mov%?\t%|ip, %|lr", operands
);
6361 if (TARGET_INTERWORK
)
6363 output_asm_insn ("ldr%?\t%|ip, %0", operands
);
6364 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6365 output_asm_insn ("bx%?\t%|ip", operands
);
6369 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
6370 output_asm_insn ("ldr%?\t%|pc, %0", operands
);
6377 /* Output a move from arm registers to an fpu registers.
6378 OPERANDS[0] is an fpu register.
6379 OPERANDS[1] is the first registers of an arm register pair. */
6382 output_mov_long_double_fpu_from_arm (operands
)
6385 int arm_reg0
= REGNO (operands
[1]);
6388 if (arm_reg0
== IP_REGNUM
)
6391 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6392 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6393 ops
[2] = gen_rtx_REG (SImode
, 2 + arm_reg0
);
6395 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops
);
6396 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands
);
6401 /* Output a move from an fpu register to arm registers.
6402 OPERANDS[0] is the first registers of an arm register pair.
6403 OPERANDS[1] is an fpu register. */
6406 output_mov_long_double_arm_from_fpu (operands
)
6409 int arm_reg0
= REGNO (operands
[0]);
6412 if (arm_reg0
== IP_REGNUM
)
6415 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6416 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6417 ops
[2] = gen_rtx_REG (SImode
, 2 + arm_reg0
);
6419 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands
);
6420 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops
);
6424 /* Output a move from arm registers to arm registers of a long double
6425 OPERANDS[0] is the destination.
6426 OPERANDS[1] is the source. */
6429 output_mov_long_double_arm_from_arm (operands
)
6432 /* We have to be careful here because the two might overlap. */
6433 int dest_start
= REGNO (operands
[0]);
6434 int src_start
= REGNO (operands
[1]);
6438 if (dest_start
< src_start
)
6440 for (i
= 0; i
< 3; i
++)
6442 ops
[0] = gen_rtx_REG (SImode
, dest_start
+ i
);
6443 ops
[1] = gen_rtx_REG (SImode
, src_start
+ i
);
6444 output_asm_insn ("mov%?\t%0, %1", ops
);
6449 for (i
= 2; i
>= 0; i
--)
6451 ops
[0] = gen_rtx_REG (SImode
, dest_start
+ i
);
6452 ops
[1] = gen_rtx_REG (SImode
, src_start
+ i
);
6453 output_asm_insn ("mov%?\t%0, %1", ops
);
6461 /* Output a move from arm registers to an fpu registers.
6462 OPERANDS[0] is an fpu register.
6463 OPERANDS[1] is the first registers of an arm register pair. */
6466 output_mov_double_fpu_from_arm (operands
)
6469 int arm_reg0
= REGNO (operands
[1]);
6472 if (arm_reg0
== IP_REGNUM
)
6475 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6476 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6477 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops
);
6478 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands
);
6482 /* Output a move from an fpu register to arm registers.
6483 OPERANDS[0] is the first registers of an arm register pair.
6484 OPERANDS[1] is an fpu register. */
6487 output_mov_double_arm_from_fpu (operands
)
6490 int arm_reg0
= REGNO (operands
[0]);
6493 if (arm_reg0
== IP_REGNUM
)
6496 ops
[0] = gen_rtx_REG (SImode
, arm_reg0
);
6497 ops
[1] = gen_rtx_REG (SImode
, 1 + arm_reg0
);
6498 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands
);
6499 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops
);
6503 /* Output a move between double words.
6504 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6505 or MEM<-REG and all MEMs must be offsettable addresses. */
6508 output_move_double (operands
)
6511 enum rtx_code code0
= GET_CODE (operands
[0]);
6512 enum rtx_code code1
= GET_CODE (operands
[1]);
6517 int reg0
= REGNO (operands
[0]);
6519 otherops
[0] = gen_rtx_REG (SImode
, 1 + reg0
);
6523 int reg1
= REGNO (operands
[1]);
6524 if (reg1
== IP_REGNUM
)
6527 /* Ensure the second source is not overwritten. */
6528 if (reg1
== reg0
+ (WORDS_BIG_ENDIAN
? -1 : 1))
6529 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands
);
6531 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands
);
6533 else if (code1
== CONST_DOUBLE
)
6535 if (GET_MODE (operands
[1]) == DFmode
)
6540 REAL_VALUE_FROM_CONST_DOUBLE (r
, operands
[1]);
6541 REAL_VALUE_TO_TARGET_DOUBLE (r
, l
);
6542 otherops
[1] = GEN_INT (l
[1]);
6543 operands
[1] = GEN_INT (l
[0]);
6545 else if (GET_MODE (operands
[1]) != VOIDmode
)
6547 else if (WORDS_BIG_ENDIAN
)
6549 otherops
[1] = GEN_INT (CONST_DOUBLE_LOW (operands
[1]));
6550 operands
[1] = GEN_INT (CONST_DOUBLE_HIGH (operands
[1]));
6554 otherops
[1] = GEN_INT (CONST_DOUBLE_HIGH (operands
[1]));
6555 operands
[1] = GEN_INT (CONST_DOUBLE_LOW (operands
[1]));
6558 output_mov_immediate (operands
);
6559 output_mov_immediate (otherops
);
6561 else if (code1
== CONST_INT
)
6563 #if HOST_BITS_PER_WIDE_INT > 32
6564 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6565 what the upper word is. */
6566 if (WORDS_BIG_ENDIAN
)
6568 otherops
[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands
[1])));
6569 operands
[1] = GEN_INT (INTVAL (operands
[1]) >> 32);
6573 otherops
[1] = GEN_INT (INTVAL (operands
[1]) >> 32);
6574 operands
[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands
[1])));
6577 /* Sign extend the intval into the high-order word. */
6578 if (WORDS_BIG_ENDIAN
)
6580 otherops
[1] = operands
[1];
6581 operands
[1] = (INTVAL (operands
[1]) < 0
6582 ? constm1_rtx
: const0_rtx
);
6585 otherops
[1] = INTVAL (operands
[1]) < 0 ? constm1_rtx
: const0_rtx
;
6587 output_mov_immediate (otherops
);
6588 output_mov_immediate (operands
);
6590 else if (code1
== MEM
)
6592 switch (GET_CODE (XEXP (operands
[1], 0)))
6595 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
6599 abort (); /* Should never happen now. */
6603 output_asm_insn ("ldm%?db\t%m1!, %M0", operands
);
6607 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands
);
6611 abort (); /* Should never happen now. */
6616 output_asm_insn ("adr%?\t%0, %1", operands
);
6617 output_asm_insn ("ldm%?ia\t%0, %M0", operands
);
6621 if (arm_add_operand (XEXP (XEXP (operands
[1], 0), 1),
6622 GET_MODE (XEXP (XEXP (operands
[1], 0), 1))))
6624 otherops
[0] = operands
[0];
6625 otherops
[1] = XEXP (XEXP (operands
[1], 0), 0);
6626 otherops
[2] = XEXP (XEXP (operands
[1], 0), 1);
6628 if (GET_CODE (XEXP (operands
[1], 0)) == PLUS
)
6630 if (GET_CODE (otherops
[2]) == CONST_INT
)
6632 switch (INTVAL (otherops
[2]))
6635 output_asm_insn ("ldm%?db\t%1, %M0", otherops
);
6638 output_asm_insn ("ldm%?da\t%1, %M0", otherops
);
6641 output_asm_insn ("ldm%?ib\t%1, %M0", otherops
);
6645 if (!(const_ok_for_arm (INTVAL (otherops
[2]))))
6646 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops
);
6648 output_asm_insn ("add%?\t%0, %1, %2", otherops
);
6651 output_asm_insn ("add%?\t%0, %1, %2", otherops
);
6654 output_asm_insn ("sub%?\t%0, %1, %2", otherops
);
6656 return "ldm%?ia\t%0, %M0";
6660 otherops
[1] = adjust_address (operands
[1], VOIDmode
, 4);
6661 /* Take care of overlapping base/data reg. */
6662 if (reg_mentioned_p (operands
[0], operands
[1]))
6664 output_asm_insn ("ldr%?\t%0, %1", otherops
);
6665 output_asm_insn ("ldr%?\t%0, %1", operands
);
6669 output_asm_insn ("ldr%?\t%0, %1", operands
);
6670 output_asm_insn ("ldr%?\t%0, %1", otherops
);
6676 abort (); /* Constraints should prevent this. */
6678 else if (code0
== MEM
&& code1
== REG
)
6680 if (REGNO (operands
[1]) == IP_REGNUM
)
6683 switch (GET_CODE (XEXP (operands
[0], 0)))
6686 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
6690 abort (); /* Should never happen now. */
6694 output_asm_insn ("stm%?db\t%m0!, %M1", operands
);
6698 output_asm_insn ("stm%?ia\t%m0!, %M1", operands
);
6702 abort (); /* Should never happen now. */
6706 if (GET_CODE (XEXP (XEXP (operands
[0], 0), 1)) == CONST_INT
)
6708 switch (INTVAL (XEXP (XEXP (operands
[0], 0), 1)))
6711 output_asm_insn ("stm%?db\t%m0, %M1", operands
);
6715 output_asm_insn ("stm%?da\t%m0, %M1", operands
);
6719 output_asm_insn ("stm%?ib\t%m0, %M1", operands
);
6726 otherops
[0] = adjust_address (operands
[0], VOIDmode
, 4);
6727 otherops
[1] = gen_rtx_REG (SImode
, 1 + REGNO (operands
[1]));
6728 output_asm_insn ("str%?\t%1, %0", operands
);
6729 output_asm_insn ("str%?\t%1, %0", otherops
);
6733 /* Constraints should prevent this. */
6740 /* Output an arbitrary MOV reg, #n.
6741 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6744 output_mov_immediate (operands
)
6747 HOST_WIDE_INT n
= INTVAL (operands
[1]);
6749 /* Try to use one MOV. */
6750 if (const_ok_for_arm (n
))
6751 output_asm_insn ("mov%?\t%0, %1", operands
);
6753 /* Try to use one MVN. */
6754 else if (const_ok_for_arm (~n
))
6756 operands
[1] = GEN_INT (~n
);
6757 output_asm_insn ("mvn%?\t%0, %1", operands
);
6764 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6765 for (i
= 0; i
< 32; i
++)
6769 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
6770 output_multi_immediate (operands
, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~ n
);
6772 output_multi_immediate (operands
, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n
);
6778 /* Output an ADD r, s, #n where n may be too big for one instruction.
6779 If adding zero to one register, output nothing. */
6782 output_add_immediate (operands
)
6785 HOST_WIDE_INT n
= INTVAL (operands
[2]);
6787 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
6790 output_multi_immediate (operands
,
6791 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6794 output_multi_immediate (operands
,
6795 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6802 /* Output a multiple immediate operation.
6803 OPERANDS is the vector of operands referred to in the output patterns.
6804 INSTR1 is the output pattern to use for the first constant.
6805 INSTR2 is the output pattern to use for subsequent constants.
6806 IMMED_OP is the index of the constant slot in OPERANDS.
6807 N is the constant value. */
6810 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
6812 const char * instr1
;
6813 const char * instr2
;
6817 #if HOST_BITS_PER_WIDE_INT > 32
6823 /* Quick and easy output. */
6824 operands
[immed_op
] = const0_rtx
;
6825 output_asm_insn (instr1
, operands
);
6830 const char * instr
= instr1
;
6832 /* Note that n is never zero here (which would give no output). */
6833 for (i
= 0; i
< 32; i
+= 2)
6837 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
6838 output_asm_insn (instr
, operands
);
6848 /* Return the appropriate ARM instruction for the operation code.
6849 The returned result should not be overwritten. OP is the rtx of the
6850 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6854 arithmetic_instr (op
, shift_first_arg
)
6856 int shift_first_arg
;
6858 switch (GET_CODE (op
))
6864 return shift_first_arg
? "rsb" : "sub";
6880 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6881 for the operation code. The returned result should not be overwritten.
6882 OP is the rtx code of the shift.
6883 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6887 shift_op (op
, amountp
)
6889 HOST_WIDE_INT
*amountp
;
6892 enum rtx_code code
= GET_CODE (op
);
6894 if (GET_CODE (XEXP (op
, 1)) == REG
|| GET_CODE (XEXP (op
, 1)) == SUBREG
)
6896 else if (GET_CODE (XEXP (op
, 1)) == CONST_INT
)
6897 *amountp
= INTVAL (XEXP (op
, 1));
6920 /* We never have to worry about the amount being other than a
6921 power of 2, since this case can never be reloaded from a reg. */
6923 *amountp
= int_log2 (*amountp
);
6934 /* This is not 100% correct, but follows from the desire to merge
6935 multiplication by a power of 2 with the recognizer for a
6936 shift. >=32 is not a valid shift for "asl", so we must try and
6937 output a shift that produces the correct arithmetical result.
6938 Using lsr #32 is identical except for the fact that the carry bit
6939 is not set correctly if we set the flags; but we never use the
6940 carry bit from such an operation, so we can ignore that. */
6941 if (code
== ROTATERT
)
6942 /* Rotate is just modulo 32. */
6944 else if (*amountp
!= (*amountp
& 31))
6951 /* Shifts of 0 are no-ops. */
6959 /* Obtain the shift from the POWER of two. */
6961 static HOST_WIDE_INT
6963 HOST_WIDE_INT power
;
6965 HOST_WIDE_INT shift
= 0;
6967 while ((((HOST_WIDE_INT
) 1 << shift
) & power
) == 0)
6977 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6978 /bin/as is horribly restrictive. */
6979 #define MAX_ASCII_LEN 51
6982 output_ascii_pseudo_op (stream
, p
, len
)
6984 const unsigned char * p
;
6990 fputs ("\t.ascii\t\"", stream
);
6992 for (i
= 0; i
< len
; i
++)
6996 if (len_so_far
>= MAX_ASCII_LEN
)
6998 fputs ("\"\n\t.ascii\t\"", stream
);
7005 fputs ("\\t", stream
);
7010 fputs ("\\f", stream
);
7015 fputs ("\\b", stream
);
7020 fputs ("\\r", stream
);
7024 case TARGET_NEWLINE
:
7025 fputs ("\\n", stream
);
7027 if ((c
>= ' ' && c
<= '~')
7029 /* This is a good place for a line break. */
7030 len_so_far
= MAX_ASCII_LEN
;
7037 putc ('\\', stream
);
7042 if (c
>= ' ' && c
<= '~')
7049 fprintf (stream
, "\\%03o", c
);
7056 fputs ("\"\n", stream
);
7059 /* Compute the register sabe mask for registers 0 through 12
7060 inclusive. This code is used by both arm_compute_save_reg_mask
7061 and arm_compute_initial_elimination_offset. */
7063 static unsigned long
7064 arm_compute_save_reg0_reg12_mask ()
7066 unsigned long func_type
= arm_current_func_type ();
7067 unsigned int save_reg_mask
= 0;
7070 if (IS_INTERRUPT (func_type
))
7072 unsigned int max_reg
;
7073 /* Interrupt functions must not corrupt any registers,
7074 even call clobbered ones. If this is a leaf function
7075 we can just examine the registers used by the RTL, but
7076 otherwise we have to assume that whatever function is
7077 called might clobber anything, and so we have to save
7078 all the call-clobbered registers as well. */
7079 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_FIQ
)
7080 /* FIQ handlers have registers r8 - r12 banked, so
7081 we only need to check r0 - r7, Normal ISRs only
7082 bank r14 and r15, so we must check up to r12.
7083 r13 is the stack pointer which is always preserved,
7084 so we do not need to consider it here. */
7089 for (reg
= 0; reg
<= max_reg
; reg
++)
7090 if (regs_ever_live
[reg
]
7091 || (! current_function_is_leaf
&& call_used_regs
[reg
]))
7092 save_reg_mask
|= (1 << reg
);
7096 /* In the normal case we only need to save those registers
7097 which are call saved and which are used by this function. */
7098 for (reg
= 0; reg
<= 10; reg
++)
7099 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
7100 save_reg_mask
|= (1 << reg
);
7102 /* Handle the frame pointer as a special case. */
7103 if (! TARGET_APCS_FRAME
7104 && ! frame_pointer_needed
7105 && regs_ever_live
[HARD_FRAME_POINTER_REGNUM
]
7106 && ! call_used_regs
[HARD_FRAME_POINTER_REGNUM
])
7107 save_reg_mask
|= 1 << HARD_FRAME_POINTER_REGNUM
;
7109 /* If we aren't loading the PIC register,
7110 don't stack it even though it may be live. */
7112 && ! TARGET_SINGLE_PIC_BASE
7113 && regs_ever_live
[PIC_OFFSET_TABLE_REGNUM
])
7114 save_reg_mask
|= 1 << PIC_OFFSET_TABLE_REGNUM
;
7117 return save_reg_mask
;
7120 /* Compute a bit mask of which registers need to be
7121 saved on the stack for the current function. */
7123 static unsigned long
7124 arm_compute_save_reg_mask ()
7126 unsigned int save_reg_mask
= 0;
7127 unsigned long func_type
= arm_current_func_type ();
7129 if (IS_NAKED (func_type
))
7130 /* This should never really happen. */
7133 /* If we are creating a stack frame, then we must save the frame pointer,
7134 IP (which will hold the old stack pointer), LR and the PC. */
7135 if (frame_pointer_needed
)
7137 (1 << ARM_HARD_FRAME_POINTER_REGNUM
)
7142 /* Volatile functions do not return, so there
7143 is no need to save any other registers. */
7144 if (IS_VOLATILE (func_type
))
7145 return save_reg_mask
;
7147 save_reg_mask
|= arm_compute_save_reg0_reg12_mask ();
7149 /* Decide if we need to save the link register.
7150 Interrupt routines have their own banked link register,
7151 so they never need to save it.
7152 Otherwise if we do not use the link register we do not need to save
7153 it. If we are pushing other registers onto the stack however, we
7154 can save an instruction in the epilogue by pushing the link register
7155 now and then popping it back into the PC. This incurs extra memory
7156 accesses though, so we only do it when optimising for size, and only
7157 if we know that we will not need a fancy return sequence. */
7158 if (regs_ever_live
[LR_REGNUM
]
7161 && ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
))
7162 save_reg_mask
|= 1 << LR_REGNUM
;
7164 if (cfun
->machine
->lr_save_eliminated
)
7165 save_reg_mask
&= ~ (1 << LR_REGNUM
);
7167 return save_reg_mask
;
7170 /* Generate a function exit sequence. If REALLY_RETURN is true, then do
7171 everything bar the final return instruction. */
7174 output_return_instruction (operand
, really_return
, reverse
)
7179 char conditional
[10];
7182 unsigned long live_regs_mask
;
7183 unsigned long func_type
;
7185 func_type
= arm_current_func_type ();
7187 if (IS_NAKED (func_type
))
7190 if (IS_VOLATILE (func_type
) && TARGET_ABORT_NORETURN
)
7192 /* If this function was declared non-returning, and we have found a tail
7193 call, then we have to trust that the called function won't return. */
7198 /* Otherwise, trap an attempted return by aborting. */
7200 ops
[1] = gen_rtx_SYMBOL_REF (Pmode
, NEED_PLT_RELOC
? "abort(PLT)"
7202 assemble_external_libcall (ops
[1]);
7203 output_asm_insn (reverse
? "bl%D0\t%a1" : "bl%d0\t%a1", ops
);
7209 if (current_function_calls_alloca
&& !really_return
)
7212 /* Construct the conditional part of the instruction(s) to be emitted. */
7213 sprintf (conditional
, "%%?%%%c0", reverse
? 'D' : 'd');
7215 return_used_this_function
= 1;
7217 live_regs_mask
= arm_compute_save_reg_mask ();
7221 const char * return_reg
;
7223 /* If we do not have any special requirements for function exit
7224 (eg interworking, or ISR) then we can load the return address
7225 directly into the PC. Otherwise we must load it into LR. */
7227 && ! TARGET_INTERWORK
)
7228 return_reg
= reg_names
[PC_REGNUM
];
7230 return_reg
= reg_names
[LR_REGNUM
];
7232 if ((live_regs_mask
& (1 << IP_REGNUM
)) == (1 << IP_REGNUM
))
7233 /* There are two possible reasons for the IP register being saved.
7234 Either a stack frame was created, in which case IP contains the
7235 old stack pointer, or an ISR routine corrupted it. If this in an
7236 ISR routine then just restore IP, otherwise restore IP into SP. */
7237 if (! IS_INTERRUPT (func_type
))
7239 live_regs_mask
&= ~ (1 << IP_REGNUM
);
7240 live_regs_mask
|= (1 << SP_REGNUM
);
7243 /* On some ARM architectures it is faster to use LDR rather than
7244 LDM to load a single register. On other architectures, the
7245 cost is the same. In 26 bit mode, or for exception handlers,
7246 we have to use LDM to load the PC so that the CPSR is also
7248 for (reg
= 0; reg
<= LAST_ARM_REGNUM
; reg
++)
7250 if (live_regs_mask
== (unsigned int)(1 << reg
))
7253 if (reg
<= LAST_ARM_REGNUM
7254 && (reg
!= LR_REGNUM
7256 || (TARGET_APCS_32
&& ! IS_INTERRUPT (func_type
))))
7258 sprintf (instr
, "ldr%s\t%%|%s, [%%|sp], #4", conditional
,
7259 (reg
== LR_REGNUM
) ? return_reg
: reg_names
[reg
]);
7266 /* Generate the load multiple instruction to restore the registers. */
7267 if (frame_pointer_needed
)
7268 sprintf (instr
, "ldm%sea\t%%|fp, {", conditional
);
7270 sprintf (instr
, "ldm%sfd\t%%|sp!, {", conditional
);
7272 p
= instr
+ strlen (instr
);
7274 for (reg
= 0; reg
<= SP_REGNUM
; reg
++)
7275 if (live_regs_mask
& (1 << reg
))
7277 int l
= strlen (reg_names
[reg
]);
7283 memcpy (p
, ", ", 2);
7287 memcpy (p
, "%|", 2);
7288 memcpy (p
+ 2, reg_names
[reg
], l
);
7292 if (live_regs_mask
& (1 << LR_REGNUM
))
7294 int l
= strlen (return_reg
);
7298 memcpy (p
, ", ", 2);
7302 memcpy (p
, "%|", 2);
7303 memcpy (p
+ 2, return_reg
, l
);
7304 strcpy (p
+ 2 + l
, ((TARGET_APCS_32
7305 && !IS_INTERRUPT (func_type
))
7313 output_asm_insn (instr
, & operand
);
7315 /* See if we need to generate an extra instruction to
7316 perform the actual function return. */
7318 && func_type
!= ARM_FT_INTERWORKED
7319 && (live_regs_mask
& (1 << LR_REGNUM
)) != 0)
7321 /* The return has already been handled
7322 by loading the LR into the PC. */
7329 switch ((int) ARM_FUNC_TYPE (func_type
))
7333 sprintf (instr
, "sub%ss\t%%|pc, %%|lr, #4", conditional
);
7336 case ARM_FT_INTERWORKED
:
7337 sprintf (instr
, "bx%s\t%%|lr", conditional
);
7340 case ARM_FT_EXCEPTION
:
7341 sprintf (instr
, "mov%ss\t%%|pc, %%|lr", conditional
);
7345 /* ARMv5 implementations always provide BX, so interworking
7346 is the default unless APCS-26 is in use. */
7347 if ((insn_flags
& FL_ARCH5
) != 0 && TARGET_APCS_32
)
7348 sprintf (instr
, "bx%s\t%%|lr", conditional
);
7350 sprintf (instr
, "mov%s%s\t%%|pc, %%|lr",
7351 conditional
, TARGET_APCS_32
? "" : "s");
7355 output_asm_insn (instr
, & operand
);
7361 /* Write the function name into the code section, directly preceding
7362 the function prologue.
7364 Code will be output similar to this:
7366 .ascii "arm_poke_function_name", 0
7369 .word 0xff000000 + (t1 - t0)
7370 arm_poke_function_name
7372 stmfd sp!, {fp, ip, lr, pc}
7375 When performing a stack backtrace, code can inspect the value
7376 of 'pc' stored at 'fp' + 0. If the trace function then looks
7377 at location pc - 12 and the top 8 bits are set, then we know
7378 that there is a function name embedded immediately preceding this
7379 location and has length ((pc[-3]) & 0xff000000).
7381 We assume that pc is declared as a pointer to an unsigned long.
7383 It is of no benefit to output the function name if we are assembling
7384 a leaf function. These function types will not contain a stack
7385 backtrace structure, therefore it is not possible to determine the
7389 arm_poke_function_name (stream
, name
)
7393 unsigned long alignlength
;
7394 unsigned long length
;
7397 length
= strlen (name
) + 1;
7398 alignlength
= ROUND_UP (length
);
7400 ASM_OUTPUT_ASCII (stream
, name
, length
);
7401 ASM_OUTPUT_ALIGN (stream
, 2);
7402 x
= GEN_INT ((unsigned HOST_WIDE_INT
) 0xff000000 + alignlength
);
7403 assemble_aligned_integer (UNITS_PER_WORD
, x
);
7406 /* Place some comments into the assembler stream
7407 describing the current function. */
7410 arm_output_function_prologue (f
, frame_size
)
7412 HOST_WIDE_INT frame_size
;
7414 unsigned long func_type
;
7418 thumb_output_function_prologue (f
, frame_size
);
7423 if (arm_ccfsm_state
|| arm_target_insn
)
7426 func_type
= arm_current_func_type ();
7428 switch ((int) ARM_FUNC_TYPE (func_type
))
7433 case ARM_FT_INTERWORKED
:
7434 asm_fprintf (f
, "\t%@ Function supports interworking.\n");
7436 case ARM_FT_EXCEPTION_HANDLER
:
7437 asm_fprintf (f
, "\t%@ C++ Exception Handler.\n");
7440 asm_fprintf (f
, "\t%@ Interrupt Service Routine.\n");
7443 asm_fprintf (f
, "\t%@ Fast Interrupt Service Routine.\n");
7445 case ARM_FT_EXCEPTION
:
7446 asm_fprintf (f
, "\t%@ ARM Exception Handler.\n");
7450 if (IS_NAKED (func_type
))
7451 asm_fprintf (f
, "\t%@ Naked Function: prologue and epilogue provided by programmer.\n");
7453 if (IS_VOLATILE (func_type
))
7454 asm_fprintf (f
, "\t%@ Volatile: function does not return.\n");
7456 if (IS_NESTED (func_type
))
7457 asm_fprintf (f
, "\t%@ Nested: function declared inside another function.\n");
7459 asm_fprintf (f
, "\t%@ args = %d, pretend = %d, frame = %d\n",
7460 current_function_args_size
,
7461 current_function_pretend_args_size
, frame_size
);
7463 asm_fprintf (f
, "\t%@ frame_needed = %d, uses_anonymous_args = %d\n",
7464 frame_pointer_needed
,
7465 cfun
->machine
->uses_anonymous_args
);
7467 if (cfun
->machine
->lr_save_eliminated
)
7468 asm_fprintf (f
, "\t%@ link register save eliminated.\n");
7470 #ifdef AOF_ASSEMBLER
7472 asm_fprintf (f
, "\tmov\t%r, %r\n", IP_REGNUM
, PIC_OFFSET_TABLE_REGNUM
);
7475 return_used_this_function
= 0;
7479 arm_output_epilogue (really_return
)
7483 unsigned long saved_regs_mask
;
7484 unsigned long func_type
;
7485 /* Floats_offset is the offset from the "virtual" frame. In an APCS
7486 frame that is $fp + 4 for a non-variadic function. */
7487 int floats_offset
= 0;
7489 int frame_size
= get_frame_size ();
7490 FILE * f
= asm_out_file
;
7491 rtx eh_ofs
= cfun
->machine
->eh_epilogue_sp_ofs
;
7493 /* If we have already generated the return instruction
7494 then it is futile to generate anything else. */
7495 if (use_return_insn (FALSE
) && return_used_this_function
)
7498 func_type
= arm_current_func_type ();
7500 if (IS_NAKED (func_type
))
7501 /* Naked functions don't have epilogues. */
7504 if (IS_VOLATILE (func_type
) && TARGET_ABORT_NORETURN
)
7508 /* A volatile function should never return. Call abort. */
7509 op
= gen_rtx_SYMBOL_REF (Pmode
, NEED_PLT_RELOC
? "abort(PLT)" : "abort");
7510 assemble_external_libcall (op
);
7511 output_asm_insn ("bl\t%a0", &op
);
7516 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
7518 /* If we are throwing an exception, then we really must
7519 be doing a return, so we can't tail-call. */
7522 saved_regs_mask
= arm_compute_save_reg_mask ();
7524 /* XXX We should adjust floats_offset for any anonymous args, and then
7525 re-adjust vfp_offset below to compensate. */
7527 /* Compute how far away the floats will be. */
7528 for (reg
= 0; reg
<= LAST_ARM_REGNUM
; reg
++)
7529 if (saved_regs_mask
& (1 << reg
))
7532 if (frame_pointer_needed
)
7536 if (arm_fpu_arch
== FP_SOFT2
)
7538 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
7539 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7541 floats_offset
+= 12;
7542 asm_fprintf (f
, "\tldfe\t%r, [%r, #-%d]\n",
7543 reg
, FP_REGNUM
, floats_offset
- vfp_offset
);
7548 int start_reg
= LAST_ARM_FP_REGNUM
;
7550 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
7552 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7554 floats_offset
+= 12;
7556 /* We can't unstack more than four registers at once. */
7557 if (start_reg
- reg
== 3)
7559 asm_fprintf (f
, "\tlfm\t%r, 4, [%r, #-%d]\n",
7560 reg
, FP_REGNUM
, floats_offset
- vfp_offset
);
7561 start_reg
= reg
- 1;
7566 if (reg
!= start_reg
)
7567 asm_fprintf (f
, "\tlfm\t%r, %d, [%r, #-%d]\n",
7568 reg
+ 1, start_reg
- reg
,
7569 FP_REGNUM
, floats_offset
- vfp_offset
);
7570 start_reg
= reg
- 1;
7574 /* Just in case the last register checked also needs unstacking. */
7575 if (reg
!= start_reg
)
7576 asm_fprintf (f
, "\tlfm\t%r, %d, [%r, #-%d]\n",
7577 reg
+ 1, start_reg
- reg
,
7578 FP_REGNUM
, floats_offset
- vfp_offset
);
7581 /* saved_regs_mask should contain the IP, which at the time of stack
7582 frame generation actually contains the old stack pointer. So a
7583 quick way to unwind the stack is just pop the IP register directly
7584 into the stack pointer. */
7585 if ((saved_regs_mask
& (1 << IP_REGNUM
)) == 0)
7587 saved_regs_mask
&= ~ (1 << IP_REGNUM
);
7588 saved_regs_mask
|= (1 << SP_REGNUM
);
7590 /* There are two registers left in saved_regs_mask - LR and PC. We
7591 only need to restore the LR register (the return address), but to
7592 save time we can load it directly into the PC, unless we need a
7593 special function exit sequence, or we are not really returning. */
7594 if (really_return
&& ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
)
7595 /* Delete the LR from the register mask, so that the LR on
7596 the stack is loaded into the PC in the register mask. */
7597 saved_regs_mask
&= ~ (1 << LR_REGNUM
);
7599 saved_regs_mask
&= ~ (1 << PC_REGNUM
);
7601 print_multi_reg (f
, "ldmea\t%r", FP_REGNUM
, saved_regs_mask
);
7603 if (IS_INTERRUPT (func_type
))
7604 /* Interrupt handlers will have pushed the
7605 IP onto the stack, so restore it now. */
7606 print_multi_reg (f
, "ldmfd\t%r", SP_REGNUM
, 1 << IP_REGNUM
);
7610 /* Restore stack pointer if necessary. */
7611 if (frame_size
+ current_function_outgoing_args_size
!= 0)
7613 operands
[0] = operands
[1] = stack_pointer_rtx
;
7614 operands
[2] = GEN_INT (frame_size
7615 + current_function_outgoing_args_size
);
7616 output_add_immediate (operands
);
7619 if (arm_fpu_arch
== FP_SOFT2
)
7621 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
7622 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7623 asm_fprintf (f
, "\tldfe\t%r, [%r], #12\n",
7628 int start_reg
= FIRST_ARM_FP_REGNUM
;
7630 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
7632 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
7634 if (reg
- start_reg
== 3)
7636 asm_fprintf (f
, "\tlfmfd\t%r, 4, [%r]!\n",
7637 start_reg
, SP_REGNUM
);
7638 start_reg
= reg
+ 1;
7643 if (reg
!= start_reg
)
7644 asm_fprintf (f
, "\tlfmfd\t%r, %d, [%r]!\n",
7645 start_reg
, reg
- start_reg
,
7648 start_reg
= reg
+ 1;
7652 /* Just in case the last register checked also needs unstacking. */
7653 if (reg
!= start_reg
)
7654 asm_fprintf (f
, "\tlfmfd\t%r, %d, [%r]!\n",
7655 start_reg
, reg
- start_reg
, SP_REGNUM
);
7658 /* If we can, restore the LR into the PC. */
7659 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
7661 && current_function_pretend_args_size
== 0
7662 && saved_regs_mask
& (1 << LR_REGNUM
))
7664 saved_regs_mask
&= ~ (1 << LR_REGNUM
);
7665 saved_regs_mask
|= (1 << PC_REGNUM
);
7668 /* Load the registers off the stack. If we only have one register
7669 to load use the LDR instruction - it is faster. */
7670 if (saved_regs_mask
== (1 << LR_REGNUM
))
7672 /* The exception handler ignores the LR, so we do
7673 not really need to load it off the stack. */
7675 asm_fprintf (f
, "\tadd\t%r, %r, #4\n", SP_REGNUM
, SP_REGNUM
);
7677 asm_fprintf (f
, "\tldr\t%r, [%r], #4\n", LR_REGNUM
, SP_REGNUM
);
7679 else if (saved_regs_mask
)
7680 print_multi_reg (f
, "ldmfd\t%r!", SP_REGNUM
, saved_regs_mask
);
7682 if (current_function_pretend_args_size
)
7684 /* Unwind the pre-pushed regs. */
7685 operands
[0] = operands
[1] = stack_pointer_rtx
;
7686 operands
[2] = GEN_INT (current_function_pretend_args_size
);
7687 output_add_immediate (operands
);
7692 if (ARM_FUNC_TYPE (func_type
) == ARM_FT_EXCEPTION_HANDLER
)
7693 /* Adjust the stack to remove the exception handler stuff. */
7694 asm_fprintf (f
, "\tadd\t%r, %r, %r\n", SP_REGNUM
, SP_REGNUM
,
7699 || (ARM_FUNC_TYPE (func_type
) == ARM_FT_NORMAL
7700 && current_function_pretend_args_size
== 0
7701 && saved_regs_mask
& (1 << PC_REGNUM
)))
7704 /* Generate the return instruction. */
7705 switch ((int) ARM_FUNC_TYPE (func_type
))
7707 case ARM_FT_EXCEPTION_HANDLER
:
7708 /* Even in 26-bit mode we do a mov (rather than a movs)
7709 because we don't have the PSR bits set in the address. */
7710 asm_fprintf (f
, "\tmov\t%r, %r\n", PC_REGNUM
, EXCEPTION_LR_REGNUM
);
7715 asm_fprintf (f
, "\tsubs\t%r, %r, #4\n", PC_REGNUM
, LR_REGNUM
);
7718 case ARM_FT_EXCEPTION
:
7719 asm_fprintf (f
, "\tmovs\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7722 case ARM_FT_INTERWORKED
:
7723 asm_fprintf (f
, "\tbx\t%r\n", LR_REGNUM
);
7727 if (frame_pointer_needed
)
7728 /* If we used the frame pointer then the return adddress
7729 will have been loaded off the stack directly into the
7730 PC, so there is no need to issue a MOV instruction
7733 else if (current_function_pretend_args_size
== 0
7734 && (saved_regs_mask
& (1 << LR_REGNUM
)))
7735 /* Similarly we may have been able to load LR into the PC
7736 even if we did not create a stack frame. */
7738 else if (TARGET_APCS_32
)
7739 asm_fprintf (f
, "\tmov\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7741 asm_fprintf (f
, "\tmovs\t%r, %r\n", PC_REGNUM
, LR_REGNUM
);
7749 arm_output_function_epilogue (file
, frame_size
)
7750 FILE *file ATTRIBUTE_UNUSED
;
7751 HOST_WIDE_INT frame_size
;
7755 /* ??? Probably not safe to set this here, since it assumes that a
7756 function will be emitted as assembly immediately after we generate
7757 RTL for it. This does not happen for inline functions. */
7758 return_used_this_function
= 0;
7762 if (use_return_insn (FALSE
)
7763 && return_used_this_function
7764 && (frame_size
+ current_function_outgoing_args_size
) != 0
7765 && !frame_pointer_needed
)
7768 /* Reset the ARM-specific per-function variables. */
7769 after_arm_reorg
= 0;
7773 /* Generate and emit an insn that we will recognize as a push_multi.
7774 Unfortunately, since this insn does not reflect very well the actual
7775 semantics of the operation, we need to annotate the insn for the benefit
7776 of DWARF2 frame unwind information. */
7779 emit_multi_reg_push (mask
)
7787 int dwarf_par_index
;
7790 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
7791 if (mask
& (1 << i
))
7794 if (num_regs
== 0 || num_regs
> 16)
7797 /* We don't record the PC in the dwarf frame information. */
7798 num_dwarf_regs
= num_regs
;
7799 if (mask
& (1 << PC_REGNUM
))
7802 /* For the body of the insn we are going to generate an UNSPEC in
7803 parallel with several USEs. This allows the insn to be recognised
7804 by the push_multi pattern in the arm.md file. The insn looks
7805 something like this:
7808 (set (mem:BLK (pre_dec:BLK (reg:SI sp)))
7809 (unspec:BLK [(reg:SI r4)] UNSPEC_PUSH_MULT))
7810 (use (reg:SI 11 fp))
7811 (use (reg:SI 12 ip))
7812 (use (reg:SI 14 lr))
7813 (use (reg:SI 15 pc))
7816 For the frame note however, we try to be more explicit and actually
7817 show each register being stored into the stack frame, plus a (single)
7818 decrement of the stack pointer. We do it this way in order to be
7819 friendly to the stack unwinding code, which only wants to see a single
7820 stack decrement per instruction. The RTL we generate for the note looks
7821 something like this:
7824 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7825 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7826 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7827 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7828 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7831 This sequence is used both by the code to support stack unwinding for
7832 exceptions handlers and the code to generate dwarf2 frame debugging. */
7834 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_regs
));
7835 dwarf
= gen_rtx_SEQUENCE (VOIDmode
, rtvec_alloc (num_dwarf_regs
+ 1));
7836 RTX_FRAME_RELATED_P (dwarf
) = 1;
7837 dwarf_par_index
= 1;
7839 for (i
= 0; i
<= LAST_ARM_REGNUM
; i
++)
7841 if (mask
& (1 << i
))
7843 reg
= gen_rtx_REG (SImode
, i
);
7846 = gen_rtx_SET (VOIDmode
,
7847 gen_rtx_MEM (BLKmode
,
7848 gen_rtx_PRE_DEC (BLKmode
,
7849 stack_pointer_rtx
)),
7850 gen_rtx_UNSPEC (BLKmode
,
7856 tmp
= gen_rtx_SET (VOIDmode
,
7857 gen_rtx_MEM (SImode
, stack_pointer_rtx
),
7859 RTX_FRAME_RELATED_P (tmp
) = 1;
7860 XVECEXP (dwarf
, 0, dwarf_par_index
) = tmp
;
7868 for (j
= 1, i
++; j
< num_regs
; i
++)
7870 if (mask
& (1 << i
))
7872 reg
= gen_rtx_REG (SImode
, i
);
7874 XVECEXP (par
, 0, j
) = gen_rtx_USE (VOIDmode
, reg
);
7878 tmp
= gen_rtx_SET (VOIDmode
,
7879 gen_rtx_MEM (SImode
,
7880 plus_constant (stack_pointer_rtx
,
7883 RTX_FRAME_RELATED_P (tmp
) = 1;
7884 XVECEXP (dwarf
, 0, dwarf_par_index
++) = tmp
;
7891 par
= emit_insn (par
);
7893 tmp
= gen_rtx_SET (SImode
,
7895 gen_rtx_PLUS (SImode
,
7897 GEN_INT (-4 * num_regs
)));
7898 RTX_FRAME_RELATED_P (tmp
) = 1;
7899 XVECEXP (dwarf
, 0, 0) = tmp
;
7901 REG_NOTES (par
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
, dwarf
,
7907 emit_sfm (base_reg
, count
)
7916 par
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
7917 dwarf
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (count
));
7918 RTX_FRAME_RELATED_P (dwarf
) = 1;
7920 reg
= gen_rtx_REG (XFmode
, base_reg
++);
7923 = gen_rtx_SET (VOIDmode
,
7924 gen_rtx_MEM (BLKmode
,
7925 gen_rtx_PRE_DEC (BLKmode
, stack_pointer_rtx
)),
7926 gen_rtx_UNSPEC (BLKmode
,
7930 = gen_rtx_SET (VOIDmode
,
7931 gen_rtx_MEM (XFmode
,
7932 gen_rtx_PRE_DEC (BLKmode
, stack_pointer_rtx
)),
7934 RTX_FRAME_RELATED_P (tmp
) = 1;
7935 XVECEXP (dwarf
, 0, count
- 1) = tmp
;
7937 for (i
= 1; i
< count
; i
++)
7939 reg
= gen_rtx_REG (XFmode
, base_reg
++);
7940 XVECEXP (par
, 0, i
) = gen_rtx_USE (VOIDmode
, reg
);
7942 tmp
= gen_rtx_SET (VOIDmode
,
7943 gen_rtx_MEM (XFmode
,
7944 gen_rtx_PRE_DEC (BLKmode
,
7945 stack_pointer_rtx
)),
7947 RTX_FRAME_RELATED_P (tmp
) = 1;
7948 XVECEXP (dwarf
, 0, count
- i
- 1) = tmp
;
7951 par
= emit_insn (par
);
7952 REG_NOTES (par
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
, dwarf
,
7957 /* Compute the distance from register FROM to register TO.
7958 These can be the arg pointer (26), the soft frame pointer (25),
7959 the stack pointer (13) or the hard frame pointer (11).
7960 Typical stack layout looks like this:
7962 old stack pointer -> | |
7965 | | saved arguments for
7966 | | vararg functions
7969 hard FP & arg pointer -> | | \
7977 soft frame pointer -> | | /
7987 current stack pointer -> | | /
7990 For a given funciton some or all of these stack compomnents
7991 may not be needed, giving rise to the possibility of
7992 eliminating some of the registers.
7994 The values returned by this function must reflect the behaviour
7995 of arm_expand_prologue() and arm_compute_save_reg_mask().
7997 The sign of the number returned reflects the direction of stack
7998 growth, so the values are positive for all eliminations except
7999 from the soft frame pointer to the hard frame pointer. */
8002 arm_compute_initial_elimination_offset (from
, to
)
8006 unsigned int local_vars
= (get_frame_size () + 3) & ~3;
8007 unsigned int outgoing_args
= current_function_outgoing_args_size
;
8008 unsigned int stack_frame
;
8009 unsigned int call_saved_registers
;
8010 unsigned long func_type
;
8012 func_type
= arm_current_func_type ();
8014 /* Volatile functions never return, so there is
8015 no need to save call saved registers. */
8016 call_saved_registers
= 0;
8017 if (! IS_VOLATILE (func_type
))
8019 unsigned int reg_mask
;
8022 /* Make sure that we compute which registers will be saved
8023 on the stack using the same algorithm that is used by
8024 arm_compute_save_reg_mask(). */
8025 reg_mask
= arm_compute_save_reg0_reg12_mask ();
8027 /* Now count the number of bits set in save_reg_mask.
8028 For each set bit we need 4 bytes of stack space. */
8031 call_saved_registers
+= 4;
8032 reg_mask
= reg_mask
& ~ (reg_mask
& - reg_mask
);
8035 if (regs_ever_live
[LR_REGNUM
]
8036 /* If a stack frame is going to be created, the LR will
8037 be saved as part of that, so we do not need to allow
8039 && ! frame_pointer_needed
)
8040 call_saved_registers
+= 4;
8042 /* If the hard floating point registers are going to be
8043 used then they must be saved on the stack as well.
8044 Each register occupies 12 bytes of stack space. */
8045 for (reg
= FIRST_ARM_FP_REGNUM
; reg
<= LAST_ARM_FP_REGNUM
; reg
++)
8046 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
8047 call_saved_registers
+= 12;
8050 /* The stack frame contains 4 registers - the old frame pointer,
8051 the old stack pointer, the return address and PC of the start
8053 stack_frame
= frame_pointer_needed
? 16 : 0;
8055 /* OK, now we have enough information to compute the distances.
8056 There must be an entry in these switch tables for each pair
8057 of registers in ELIMINABLE_REGS, even if some of the entries
8058 seem to be redundant or useless. */
8061 case ARG_POINTER_REGNUM
:
8064 case THUMB_HARD_FRAME_POINTER_REGNUM
:
8067 case FRAME_POINTER_REGNUM
:
8068 /* This is the reverse of the soft frame pointer
8069 to hard frame pointer elimination below. */
8070 if (call_saved_registers
== 0 && stack_frame
== 0)
8072 return (call_saved_registers
+ stack_frame
- 4);
8074 case ARM_HARD_FRAME_POINTER_REGNUM
:
8075 /* If there is no stack frame then the hard
8076 frame pointer and the arg pointer coincide. */
8077 if (stack_frame
== 0 && call_saved_registers
!= 0)
8079 /* FIXME: Not sure about this. Maybe we should always return 0 ? */
8080 return (frame_pointer_needed
8081 && current_function_needs_context
8082 && ! cfun
->machine
->uses_anonymous_args
) ? 4 : 0;
8084 case STACK_POINTER_REGNUM
:
8085 /* If nothing has been pushed on the stack at all
8086 then this will return -4. This *is* correct! */
8087 return call_saved_registers
+ stack_frame
+ local_vars
+ outgoing_args
- 4;
8094 case FRAME_POINTER_REGNUM
:
8097 case THUMB_HARD_FRAME_POINTER_REGNUM
:
8100 case ARM_HARD_FRAME_POINTER_REGNUM
:
8101 /* The hard frame pointer points to the top entry in the
8102 stack frame. The soft frame pointer to the bottom entry
8103 in the stack frame. If there is no stack frame at all,
8104 then they are identical. */
8105 if (call_saved_registers
== 0 && stack_frame
== 0)
8107 return - (call_saved_registers
+ stack_frame
- 4);
8109 case STACK_POINTER_REGNUM
:
8110 return local_vars
+ outgoing_args
;
8118 /* You cannot eliminate from the stack pointer.
8119 In theory you could eliminate from the hard frame
8120 pointer to the stack pointer, but this will never
8121 happen, since if a stack frame is not needed the
8122 hard frame pointer will never be used. */
8127 /* Generate the prologue instructions for entry into an ARM function. */
8130 arm_expand_prologue ()
8136 unsigned long live_regs_mask
;
8137 unsigned long func_type
;
8139 int saved_pretend_args
= 0;
8140 unsigned int args_to_push
;
8142 func_type
= arm_current_func_type ();
8144 /* Naked functions don't have prologues. */
8145 if (IS_NAKED (func_type
))
8148 /* Make a copy of c_f_p_a_s as we may need to modify it locally. */
8149 args_to_push
= current_function_pretend_args_size
;
8151 /* Compute which register we will have to save onto the stack. */
8152 live_regs_mask
= arm_compute_save_reg_mask ();
8154 ip_rtx
= gen_rtx_REG (SImode
, IP_REGNUM
);
8156 if (frame_pointer_needed
)
8158 if (IS_INTERRUPT (func_type
))
8160 /* Interrupt functions must not corrupt any registers.
8161 Creating a frame pointer however, corrupts the IP
8162 register, so we must push it first. */
8163 insn
= emit_multi_reg_push (1 << IP_REGNUM
);
8165 /* Do not set RTX_FRAME_RELATED_P on this insn.
8166 The dwarf stack unwinding code only wants to see one
8167 stack decrement per function, and this is not it. If
8168 this instruction is labeled as being part of the frame
8169 creation sequence then dwarf2out_frame_debug_expr will
8170 abort when it encounters the assignment of IP to FP
8171 later on, since the use of SP here establishes SP as
8172 the CFA register and not IP.
8174 Anyway this instruction is not really part of the stack
8175 frame creation although it is part of the prologue. */
8177 else if (IS_NESTED (func_type
))
8179 /* The Static chain register is the same as the IP register
8180 used as a scratch register during stack frame creation.
8181 To get around this need to find somewhere to store IP
8182 whilst the frame is being created. We try the following
8185 1. The last argument register.
8186 2. A slot on the stack above the frame. (This only
8187 works if the function is not a varargs function).
8188 3. Register r3, after pushing the argument registers
8191 Note - we only need to tell the dwarf2 backend about the SP
8192 adjustment in the second variant; the static chain register
8193 doesn't need to be unwound, as it doesn't contain a value
8194 inherited from the caller. */
8196 if (regs_ever_live
[3] == 0)
8198 insn
= gen_rtx_REG (SImode
, 3);
8199 insn
= gen_rtx_SET (SImode
, insn
, ip_rtx
);
8200 insn
= emit_insn (insn
);
8202 else if (args_to_push
== 0)
8205 insn
= gen_rtx_PRE_DEC (SImode
, stack_pointer_rtx
);
8206 insn
= gen_rtx_MEM (SImode
, insn
);
8207 insn
= gen_rtx_SET (VOIDmode
, insn
, ip_rtx
);
8208 insn
= emit_insn (insn
);
8212 /* Just tell the dwarf backend that we adjusted SP. */
8213 dwarf
= gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
8214 gen_rtx_PLUS (SImode
, stack_pointer_rtx
,
8215 GEN_INT (-fp_offset
)));
8216 RTX_FRAME_RELATED_P (insn
) = 1;
8217 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR
,
8218 dwarf
, REG_NOTES (insn
));
8222 /* Store the args on the stack. */
8223 if (cfun
->machine
->uses_anonymous_args
)
8224 insn
= emit_multi_reg_push
8225 ((0xf0 >> (args_to_push
/ 4)) & 0xf);
8228 (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8229 GEN_INT (- args_to_push
)));
8231 RTX_FRAME_RELATED_P (insn
) = 1;
8233 saved_pretend_args
= 1;
8234 fp_offset
= args_to_push
;
8237 /* Now reuse r3 to preserve IP. */
8238 insn
= gen_rtx_REG (SImode
, 3);
8239 insn
= gen_rtx_SET (SImode
, insn
, ip_rtx
);
8240 (void) emit_insn (insn
);
8246 insn
= gen_rtx_PLUS (SImode
, stack_pointer_rtx
, GEN_INT (fp_offset
));
8247 insn
= gen_rtx_SET (SImode
, ip_rtx
, insn
);
8250 insn
= gen_movsi (ip_rtx
, stack_pointer_rtx
);
8252 insn
= emit_insn (insn
);
8253 RTX_FRAME_RELATED_P (insn
) = 1;
8258 /* Push the argument registers, or reserve space for them. */
8259 if (cfun
->machine
->uses_anonymous_args
)
8260 insn
= emit_multi_reg_push
8261 ((0xf0 >> (args_to_push
/ 4)) & 0xf);
8264 (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8265 GEN_INT (- args_to_push
)));
8266 RTX_FRAME_RELATED_P (insn
) = 1;
8269 /* If this is an interrupt service routine, and the link register is
8270 going to be pushed, subtracting four now will mean that the
8271 function return can be done with a single instruction. */
8272 if ((func_type
== ARM_FT_ISR
|| func_type
== ARM_FT_FIQ
)
8273 && (live_regs_mask
& (1 << LR_REGNUM
)) != 0)
8275 emit_insn (gen_rtx_SET (SImode
,
8276 gen_rtx_REG (SImode
, LR_REGNUM
),
8277 gen_rtx_PLUS (SImode
,
8278 gen_rtx_REG (SImode
, LR_REGNUM
),
8284 insn
= emit_multi_reg_push (live_regs_mask
);
8285 RTX_FRAME_RELATED_P (insn
) = 1;
8288 if (! IS_VOLATILE (func_type
))
8290 /* Save any floating point call-saved registers used by this function. */
8291 if (arm_fpu_arch
== FP_SOFT2
)
8293 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
8294 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
8296 insn
= gen_rtx_PRE_DEC (XFmode
, stack_pointer_rtx
);
8297 insn
= gen_rtx_MEM (XFmode
, insn
);
8298 insn
= emit_insn (gen_rtx_SET (VOIDmode
, insn
,
8299 gen_rtx_REG (XFmode
, reg
)));
8300 RTX_FRAME_RELATED_P (insn
) = 1;
8305 int start_reg
= LAST_ARM_FP_REGNUM
;
8307 for (reg
= LAST_ARM_FP_REGNUM
; reg
>= FIRST_ARM_FP_REGNUM
; reg
--)
8309 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
8311 if (start_reg
- reg
== 3)
8313 insn
= emit_sfm (reg
, 4);
8314 RTX_FRAME_RELATED_P (insn
) = 1;
8315 start_reg
= reg
- 1;
8320 if (start_reg
!= reg
)
8322 insn
= emit_sfm (reg
+ 1, start_reg
- reg
);
8323 RTX_FRAME_RELATED_P (insn
) = 1;
8325 start_reg
= reg
- 1;
8329 if (start_reg
!= reg
)
8331 insn
= emit_sfm (reg
+ 1, start_reg
- reg
);
8332 RTX_FRAME_RELATED_P (insn
) = 1;
8337 if (frame_pointer_needed
)
8339 /* Create the new frame pointer. */
8340 insn
= GEN_INT (-(4 + args_to_push
+ fp_offset
));
8341 insn
= emit_insn (gen_addsi3 (hard_frame_pointer_rtx
, ip_rtx
, insn
));
8342 RTX_FRAME_RELATED_P (insn
) = 1;
8344 if (IS_NESTED (func_type
))
8346 /* Recover the static chain register. */
8347 if (regs_ever_live
[3] == 0
8348 || saved_pretend_args
)
8349 insn
= gen_rtx_REG (SImode
, 3);
8350 else /* if (current_function_pretend_args_size == 0) */
8352 insn
= gen_rtx_PLUS (SImode
, hard_frame_pointer_rtx
, GEN_INT (4));
8353 insn
= gen_rtx_MEM (SImode
, insn
);
8356 emit_insn (gen_rtx_SET (SImode
, ip_rtx
, insn
));
8357 /* Add a USE to stop propagate_one_insn() from barfing. */
8358 emit_insn (gen_prologue_use (ip_rtx
));
8362 amount
= GEN_INT (-(get_frame_size ()
8363 + current_function_outgoing_args_size
));
8365 if (amount
!= const0_rtx
)
8367 /* This add can produce multiple insns for a large constant, so we
8368 need to get tricky. */
8369 rtx last
= get_last_insn ();
8370 insn
= emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
8374 last
= last
? NEXT_INSN (last
) : get_insns ();
8375 RTX_FRAME_RELATED_P (last
) = 1;
8377 while (last
!= insn
);
8379 /* If the frame pointer is needed, emit a special barrier that
8380 will prevent the scheduler from moving stores to the frame
8381 before the stack adjustment. */
8382 if (frame_pointer_needed
)
8384 rtx unspec
= gen_rtx_UNSPEC (SImode
,
8385 gen_rtvec (2, stack_pointer_rtx
,
8386 hard_frame_pointer_rtx
),
8389 insn
= emit_insn (gen_rtx_CLOBBER (VOIDmode
,
8390 gen_rtx_MEM (BLKmode
, unspec
)));
8394 /* If we are profiling, make sure no instructions are scheduled before
8395 the call to mcount. Similarly if the user has requested no
8396 scheduling in the prolog. */
8397 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
8398 emit_insn (gen_blockage ());
8400 /* If the link register is being kept alive, with the return address in it,
8401 then make sure that it does not get reused by the ce2 pass. */
8402 if ((live_regs_mask
& (1 << LR_REGNUM
)) == 0)
8404 emit_insn (gen_prologue_use (gen_rtx_REG (SImode
, LR_REGNUM
)));
8405 cfun
->machine
->lr_save_eliminated
= 1;
8409 /* If CODE is 'd', then the X is a condition operand and the instruction
8410 should only be executed if the condition is true.
8411 if CODE is 'D', then the X is a condition operand and the instruction
8412 should only be executed if the condition is false: however, if the mode
8413 of the comparison is CCFPEmode, then always execute the instruction -- we
8414 do this because in these circumstances !GE does not necessarily imply LT;
8415 in these cases the instruction pattern will take care to make sure that
8416 an instruction containing %d will follow, thereby undoing the effects of
8417 doing this instruction unconditionally.
8418 If CODE is 'N' then X is a floating point operand that must be negated
8420 If CODE is 'B' then output a bitwise inverted value of X (a const int).
8421 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
8424 arm_print_operand (stream
, x
, code
)
8432 fputs (ASM_COMMENT_START
, stream
);
8436 fputs (user_label_prefix
, stream
);
8440 fputs (REGISTER_PREFIX
, stream
);
8444 if (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4)
8446 if (TARGET_THUMB
|| current_insn_predicate
!= NULL
)
8449 fputs (arm_condition_codes
[arm_current_cc
], stream
);
8451 else if (current_insn_predicate
)
8453 enum arm_cond_code code
;
8458 code
= get_arm_condition_code (current_insn_predicate
);
8459 fputs (arm_condition_codes
[code
], stream
);
8466 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
8467 r
= REAL_VALUE_NEGATE (r
);
8468 fprintf (stream
, "%s", fp_const_from_val (&r
));
8473 if (GET_CODE (x
) == CONST_INT
)
8476 val
= ARM_SIGN_EXTEND (~INTVAL (x
));
8477 fprintf (stream
, HOST_WIDE_INT_PRINT_DEC
, val
);
8482 output_addr_const (stream
, x
);
8487 fprintf (stream
, "%s", arithmetic_instr (x
, 1));
8491 fprintf (stream
, "%s", arithmetic_instr (x
, 0));
8497 const char * shift
= shift_op (x
, &val
);
8501 fprintf (stream
, ", %s ", shift_op (x
, &val
));
8503 arm_print_operand (stream
, XEXP (x
, 1), 0);
8506 fputc ('#', stream
);
8507 fprintf (stream
, HOST_WIDE_INT_PRINT_DEC
, val
);
8513 /* An explanation of the 'Q', 'R' and 'H' register operands:
8515 In a pair of registers containing a DI or DF value the 'Q'
8516 operand returns the register number of the register containing
8517 the least signficant part of the value. The 'R' operand returns
8518 the register number of the register containing the most
8519 significant part of the value.
8521 The 'H' operand returns the higher of the two register numbers.
8522 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
8523 same as the 'Q' operand, since the most signficant part of the
8524 value is held in the lower number register. The reverse is true
8525 on systems where WORDS_BIG_ENDIAN is false.
8527 The purpose of these operands is to distinguish between cases
8528 where the endian-ness of the values is important (for example
8529 when they are added together), and cases where the endian-ness
8530 is irrelevant, but the order of register operations is important.
8531 For example when loading a value from memory into a register
8532 pair, the endian-ness does not matter. Provided that the value
8533 from the lower memory address is put into the lower numbered
8534 register, and the value from the higher address is put into the
8535 higher numbered register, the load will work regardless of whether
8536 the value being loaded is big-wordian or little-wordian. The
8537 order of the two register loads can matter however, if the address
8538 of the memory location is actually held in one of the registers
8539 being overwritten by the load. */
8541 if (REGNO (x
) > LAST_ARM_REGNUM
)
8543 asm_fprintf (stream
, "%r", REGNO (x
) + (WORDS_BIG_ENDIAN
? 1 : 0));
8547 if (REGNO (x
) > LAST_ARM_REGNUM
)
8549 asm_fprintf (stream
, "%r", REGNO (x
) + (WORDS_BIG_ENDIAN
? 0 : 1));
8553 if (REGNO (x
) > LAST_ARM_REGNUM
)
8555 asm_fprintf (stream
, "%r", REGNO (x
) + 1);
8559 asm_fprintf (stream
, "%r",
8560 GET_CODE (XEXP (x
, 0)) == REG
8561 ? REGNO (XEXP (x
, 0)) : REGNO (XEXP (XEXP (x
, 0), 0)));
8565 asm_fprintf (stream
, "{%r-%r}",
8567 REGNO (x
) + NUM_REGS (GET_MODE (x
)) - 1);
8571 /* CONST_TRUE_RTX means always -- that's the default. */
8572 if (x
== const_true_rtx
)
8576 fputs (arm_condition_codes
[get_arm_condition_code (x
)],
8579 fputs (thumb_condition_code (x
, 0), stream
);
8583 /* CONST_TRUE_RTX means not always -- ie never. We shouldn't ever
8585 if (x
== const_true_rtx
)
8589 fputs (arm_condition_codes
[ARM_INVERSE_CONDITION_CODE
8590 (get_arm_condition_code (x
))],
8593 fputs (thumb_condition_code (x
, 1), stream
);
8600 if (GET_CODE (x
) == REG
)
8601 asm_fprintf (stream
, "%r", REGNO (x
));
8602 else if (GET_CODE (x
) == MEM
)
8604 output_memory_reference_mode
= GET_MODE (x
);
8605 output_address (XEXP (x
, 0));
8607 else if (GET_CODE (x
) == CONST_DOUBLE
)
8608 fprintf (stream
, "#%s", fp_immediate_constant (x
));
8609 else if (GET_CODE (x
) == NEG
)
8610 abort (); /* This should never happen now. */
8613 fputc ('#', stream
);
8614 output_addr_const (stream
, x
);
8619 #ifndef AOF_ASSEMBLER
8620 /* Target hook for assembling integer objects. The ARM version needs to
8621 handle word-sized values specially. */
8624 arm_assemble_integer (x
, size
, aligned_p
)
8629 if (size
== UNITS_PER_WORD
&& aligned_p
)
8631 fputs ("\t.word\t", asm_out_file
);
8632 output_addr_const (asm_out_file
, x
);
8634 /* Mark symbols as position independent. We only do this in the
8635 .text segment, not in the .data segment. */
8636 if (NEED_GOT_RELOC
&& flag_pic
&& making_const_table
&&
8637 (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
))
8639 if (GET_CODE (x
) == SYMBOL_REF
8640 && (CONSTANT_POOL_ADDRESS_P (x
)
8641 || ENCODED_SHORT_CALL_ATTR_P (XSTR (x
, 0))))
8642 fputs ("(GOTOFF)", asm_out_file
);
8643 else if (GET_CODE (x
) == LABEL_REF
)
8644 fputs ("(GOTOFF)", asm_out_file
);
8646 fputs ("(GOT)", asm_out_file
);
8648 fputc ('\n', asm_out_file
);
8652 return default_assemble_integer (x
, size
, aligned_p
);
8656 /* A finite state machine takes care of noticing whether or not instructions
8657 can be conditionally executed, and thus decrease execution time and code
8658 size by deleting branch instructions. The fsm is controlled by
8659 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
8661 /* The state of the fsm controlling condition codes are:
8662 0: normal, do nothing special
8663 1: make ASM_OUTPUT_OPCODE not output this instruction
8664 2: make ASM_OUTPUT_OPCODE not output this instruction
8665 3: make instructions conditional
8666 4: make instructions conditional
8668 State transitions (state->state by whom under condition):
8669 0 -> 1 final_prescan_insn if the `target' is a label
8670 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
8671 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
8672 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
8673 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
8674 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
8675 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
8676 (the target insn is arm_target_insn).
8678 If the jump clobbers the conditions then we use states 2 and 4.
8680 A similar thing can be done with conditional return insns.
8682 XXX In case the `target' is an unconditional branch, this conditionalising
8683 of the instructions always reduces code size, but not always execution
8684 time. But then, I want to reduce the code size to somewhere near what
8685 /bin/cc produces. */
8687 /* Returns the index of the ARM condition code string in
8688 `arm_condition_codes'. COMPARISON should be an rtx like
8689 `(eq (...) (...))'. */
8691 static enum arm_cond_code
8692 get_arm_condition_code (comparison
)
8695 enum machine_mode mode
= GET_MODE (XEXP (comparison
, 0));
8697 enum rtx_code comp_code
= GET_CODE (comparison
);
8699 if (GET_MODE_CLASS (mode
) != MODE_CC
)
8700 mode
= SELECT_CC_MODE (comp_code
, XEXP (comparison
, 0),
8701 XEXP (comparison
, 1));
8705 case CC_DNEmode
: code
= ARM_NE
; goto dominance
;
8706 case CC_DEQmode
: code
= ARM_EQ
; goto dominance
;
8707 case CC_DGEmode
: code
= ARM_GE
; goto dominance
;
8708 case CC_DGTmode
: code
= ARM_GT
; goto dominance
;
8709 case CC_DLEmode
: code
= ARM_LE
; goto dominance
;
8710 case CC_DLTmode
: code
= ARM_LT
; goto dominance
;
8711 case CC_DGEUmode
: code
= ARM_CS
; goto dominance
;
8712 case CC_DGTUmode
: code
= ARM_HI
; goto dominance
;
8713 case CC_DLEUmode
: code
= ARM_LS
; goto dominance
;
8714 case CC_DLTUmode
: code
= ARM_CC
;
8717 if (comp_code
!= EQ
&& comp_code
!= NE
)
8720 if (comp_code
== EQ
)
8721 return ARM_INVERSE_CONDITION_CODE (code
);
8727 case NE
: return ARM_NE
;
8728 case EQ
: return ARM_EQ
;
8729 case GE
: return ARM_PL
;
8730 case LT
: return ARM_MI
;
8737 case NE
: return ARM_NE
;
8738 case EQ
: return ARM_EQ
;
8744 /* These encodings assume that AC=1 in the FPA system control
8745 byte. This allows us to handle all cases except UNEQ and
8749 case GE
: return ARM_GE
;
8750 case GT
: return ARM_GT
;
8751 case LE
: return ARM_LS
;
8752 case LT
: return ARM_MI
;
8753 case NE
: return ARM_NE
;
8754 case EQ
: return ARM_EQ
;
8755 case ORDERED
: return ARM_VC
;
8756 case UNORDERED
: return ARM_VS
;
8757 case UNLT
: return ARM_LT
;
8758 case UNLE
: return ARM_LE
;
8759 case UNGT
: return ARM_HI
;
8760 case UNGE
: return ARM_PL
;
8761 /* UNEQ and LTGT do not have a representation. */
8762 case UNEQ
: /* Fall through. */
8763 case LTGT
: /* Fall through. */
8770 case NE
: return ARM_NE
;
8771 case EQ
: return ARM_EQ
;
8772 case GE
: return ARM_LE
;
8773 case GT
: return ARM_LT
;
8774 case LE
: return ARM_GE
;
8775 case LT
: return ARM_GT
;
8776 case GEU
: return ARM_LS
;
8777 case GTU
: return ARM_CC
;
8778 case LEU
: return ARM_CS
;
8779 case LTU
: return ARM_HI
;
8786 case LTU
: return ARM_CS
;
8787 case GEU
: return ARM_CC
;
8794 case NE
: return ARM_NE
;
8795 case EQ
: return ARM_EQ
;
8796 case GE
: return ARM_GE
;
8797 case GT
: return ARM_GT
;
8798 case LE
: return ARM_LE
;
8799 case LT
: return ARM_LT
;
8800 case GEU
: return ARM_CS
;
8801 case GTU
: return ARM_HI
;
8802 case LEU
: return ARM_LS
;
8803 case LTU
: return ARM_CC
;
8815 arm_final_prescan_insn (insn
)
8818 /* BODY will hold the body of INSN. */
8819 rtx body
= PATTERN (insn
);
8821 /* This will be 1 if trying to repeat the trick, and things need to be
8822 reversed if it appears to fail. */
8825 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8826 taken are clobbered, even if the rtl suggests otherwise. It also
8827 means that we have to grub around within the jump expression to find
8828 out what the conditions are when the jump isn't taken. */
8829 int jump_clobbers
= 0;
8831 /* If we start with a return insn, we only succeed if we find another one. */
8832 int seeking_return
= 0;
8834 /* START_INSN will hold the insn from where we start looking. This is the
8835 first insn after the following code_label if REVERSE is true. */
8836 rtx start_insn
= insn
;
8838 /* If in state 4, check if the target branch is reached, in order to
8839 change back to state 0. */
8840 if (arm_ccfsm_state
== 4)
8842 if (insn
== arm_target_insn
)
8844 arm_target_insn
= NULL
;
8845 arm_ccfsm_state
= 0;
8850 /* If in state 3, it is possible to repeat the trick, if this insn is an
8851 unconditional branch to a label, and immediately following this branch
8852 is the previous target label which is only used once, and the label this
8853 branch jumps to is not too far off. */
8854 if (arm_ccfsm_state
== 3)
8856 if (simplejump_p (insn
))
8858 start_insn
= next_nonnote_insn (start_insn
);
8859 if (GET_CODE (start_insn
) == BARRIER
)
8861 /* XXX Isn't this always a barrier? */
8862 start_insn
= next_nonnote_insn (start_insn
);
8864 if (GET_CODE (start_insn
) == CODE_LABEL
8865 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
8866 && LABEL_NUSES (start_insn
) == 1)
8871 else if (GET_CODE (body
) == RETURN
)
8873 start_insn
= next_nonnote_insn (start_insn
);
8874 if (GET_CODE (start_insn
) == BARRIER
)
8875 start_insn
= next_nonnote_insn (start_insn
);
8876 if (GET_CODE (start_insn
) == CODE_LABEL
8877 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
8878 && LABEL_NUSES (start_insn
) == 1)
8890 if (arm_ccfsm_state
!= 0 && !reverse
)
8892 if (GET_CODE (insn
) != JUMP_INSN
)
8895 /* This jump might be paralleled with a clobber of the condition codes
8896 the jump should always come first */
8897 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
8898 body
= XVECEXP (body
, 0, 0);
8901 /* If this is a conditional return then we don't want to know */
8902 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
8903 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
8904 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
8905 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
8910 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
8911 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
8914 int fail
= FALSE
, succeed
= FALSE
;
8915 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8916 int then_not_else
= TRUE
;
8917 rtx this_insn
= start_insn
, label
= 0;
8919 /* If the jump cannot be done with one instruction, we cannot
8920 conditionally execute the instruction in the inverse case. */
8921 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
8927 /* Register the insn jumped to. */
8930 if (!seeking_return
)
8931 label
= XEXP (SET_SRC (body
), 0);
8933 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
8934 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
8935 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
8937 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
8938 then_not_else
= FALSE
;
8940 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
8942 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
8945 then_not_else
= FALSE
;
8950 /* See how many insns this branch skips, and what kind of insns. If all
8951 insns are okay, and the label or unconditional branch to the same
8952 label is not too far away, succeed. */
8953 for (insns_skipped
= 0;
8954 !fail
&& !succeed
&& insns_skipped
++ < max_insns_skipped
;)
8958 this_insn
= next_nonnote_insn (this_insn
);
8962 switch (GET_CODE (this_insn
))
8965 /* Succeed if it is the target label, otherwise fail since
8966 control falls in from somewhere else. */
8967 if (this_insn
== label
)
8971 arm_ccfsm_state
= 2;
8972 this_insn
= next_nonnote_insn (this_insn
);
8975 arm_ccfsm_state
= 1;
8983 /* Succeed if the following insn is the target label.
8985 If return insns are used then the last insn in a function
8986 will be a barrier. */
8987 this_insn
= next_nonnote_insn (this_insn
);
8988 if (this_insn
&& this_insn
== label
)
8992 arm_ccfsm_state
= 2;
8993 this_insn
= next_nonnote_insn (this_insn
);
8996 arm_ccfsm_state
= 1;
9004 /* If using 32-bit addresses the cc is not preserved over
9008 /* Succeed if the following insn is the target label,
9009 or if the following two insns are a barrier and
9010 the target label. */
9011 this_insn
= next_nonnote_insn (this_insn
);
9012 if (this_insn
&& GET_CODE (this_insn
) == BARRIER
)
9013 this_insn
= next_nonnote_insn (this_insn
);
9015 if (this_insn
&& this_insn
== label
9016 && insns_skipped
< max_insns_skipped
)
9020 arm_ccfsm_state
= 2;
9021 this_insn
= next_nonnote_insn (this_insn
);
9024 arm_ccfsm_state
= 1;
9033 /* If this is an unconditional branch to the same label, succeed.
9034 If it is to another label, do nothing. If it is conditional,
9036 /* XXX Probably, the tests for SET and the PC are unnecessary. */
9038 scanbody
= PATTERN (this_insn
);
9039 if (GET_CODE (scanbody
) == SET
9040 && GET_CODE (SET_DEST (scanbody
)) == PC
)
9042 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
9043 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
9045 arm_ccfsm_state
= 2;
9048 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
9051 /* Fail if a conditional return is undesirable (eg on a
9052 StrongARM), but still allow this if optimizing for size. */
9053 else if (GET_CODE (scanbody
) == RETURN
9054 && !use_return_insn (TRUE
)
9057 else if (GET_CODE (scanbody
) == RETURN
9060 arm_ccfsm_state
= 2;
9063 else if (GET_CODE (scanbody
) == PARALLEL
)
9065 switch (get_attr_conds (this_insn
))
9075 fail
= TRUE
; /* Unrecognized jump (eg epilogue). */
9080 /* Instructions using or affecting the condition codes make it
9082 scanbody
= PATTERN (this_insn
);
9083 if (!(GET_CODE (scanbody
) == SET
9084 || GET_CODE (scanbody
) == PARALLEL
)
9085 || get_attr_conds (this_insn
) != CONDS_NOCOND
)
9095 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
9096 arm_target_label
= CODE_LABEL_NUMBER (label
);
9097 else if (seeking_return
|| arm_ccfsm_state
== 2)
9099 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
9101 this_insn
= next_nonnote_insn (this_insn
);
9102 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
9103 || GET_CODE (this_insn
) == CODE_LABEL
))
9108 /* Oh, dear! we ran off the end.. give up */
9109 recog (PATTERN (insn
), insn
, NULL
);
9110 arm_ccfsm_state
= 0;
9111 arm_target_insn
= NULL
;
9114 arm_target_insn
= this_insn
;
9123 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
9125 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
9126 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9127 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
9128 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9132 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
9135 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
9139 if (reverse
|| then_not_else
)
9140 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
9143 /* Restore recog_data (getting the attributes of other insns can
9144 destroy this array, but final.c assumes that it remains intact
9145 across this call; since the insn has been recognized already we
9146 call recog direct). */
9147 recog (PATTERN (insn
), insn
, NULL
);
9151 /* Returns true if REGNO is a valid register
9152 for holding a quantity of tyoe MODE. */
9155 arm_hard_regno_mode_ok (regno
, mode
)
9157 enum machine_mode mode
;
9159 if (GET_MODE_CLASS (mode
) == MODE_CC
)
9160 return regno
== CC_REGNUM
;
9163 /* For the Thumb we only allow values bigger than SImode in
9164 registers 0 - 6, so that there is always a second low
9165 register available to hold the upper part of the value.
9166 We probably we ought to ensure that the register is the
9167 start of an even numbered register pair. */
9168 return (NUM_REGS (mode
) < 2) || (regno
< LAST_LO_REGNUM
);
9170 if (regno
<= LAST_ARM_REGNUM
)
9171 /* We allow any value to be stored in the general regisetrs. */
9174 if ( regno
== FRAME_POINTER_REGNUM
9175 || regno
== ARG_POINTER_REGNUM
)
9176 /* We only allow integers in the fake hard registers. */
9177 return GET_MODE_CLASS (mode
) == MODE_INT
;
9179 /* The only registers left are the FPU registers
9180 which we only allow to hold FP values. */
9181 return GET_MODE_CLASS (mode
) == MODE_FLOAT
9182 && regno
>= FIRST_ARM_FP_REGNUM
9183 && regno
<= LAST_ARM_FP_REGNUM
;
9187 arm_regno_class (regno
)
9192 if (regno
== STACK_POINTER_REGNUM
)
9194 if (regno
== CC_REGNUM
)
9201 if ( regno
<= LAST_ARM_REGNUM
9202 || regno
== FRAME_POINTER_REGNUM
9203 || regno
== ARG_POINTER_REGNUM
)
9204 return GENERAL_REGS
;
9206 if (regno
== CC_REGNUM
)
9212 /* Handle a special case when computing the offset
9213 of an argument from the frame pointer. */
9216 arm_debugger_arg_offset (value
, addr
)
9222 /* We are only interested if dbxout_parms() failed to compute the offset. */
9226 /* We can only cope with the case where the address is held in a register. */
9227 if (GET_CODE (addr
) != REG
)
9230 /* If we are using the frame pointer to point at the argument, then
9231 an offset of 0 is correct. */
9232 if (REGNO (addr
) == (unsigned) HARD_FRAME_POINTER_REGNUM
)
9235 /* If we are using the stack pointer to point at the
9236 argument, then an offset of 0 is correct. */
9237 if ((TARGET_THUMB
|| !frame_pointer_needed
)
9238 && REGNO (addr
) == SP_REGNUM
)
9241 /* Oh dear. The argument is pointed to by a register rather
9242 than being held in a register, or being stored at a known
9243 offset from the frame pointer. Since GDB only understands
9244 those two kinds of argument we must translate the address
9245 held in the register into an offset from the frame pointer.
9246 We do this by searching through the insns for the function
9247 looking to see where this register gets its value. If the
9248 register is initialised from the frame pointer plus an offset
9249 then we are in luck and we can continue, otherwise we give up.
9251 This code is exercised by producing debugging information
9252 for a function with arguments like this:
9254 double func (double a, double b, int c, double d) {return d;}
9256 Without this code the stab for parameter 'd' will be set to
9257 an offset of 0 from the frame pointer, rather than 8. */
9259 /* The if() statement says:
9261 If the insn is a normal instruction
9262 and if the insn is setting the value in a register
9263 and if the register being set is the register holding the address of the argument
9264 and if the address is computing by an addition
9265 that involves adding to a register
9266 which is the frame pointer
9271 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9273 if ( GET_CODE (insn
) == INSN
9274 && GET_CODE (PATTERN (insn
)) == SET
9275 && REGNO (XEXP (PATTERN (insn
), 0)) == REGNO (addr
)
9276 && GET_CODE (XEXP (PATTERN (insn
), 1)) == PLUS
9277 && GET_CODE (XEXP (XEXP (PATTERN (insn
), 1), 0)) == REG
9278 && REGNO (XEXP (XEXP (PATTERN (insn
), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
9279 && GET_CODE (XEXP (XEXP (PATTERN (insn
), 1), 1)) == CONST_INT
9282 value
= INTVAL (XEXP (XEXP (PATTERN (insn
), 1), 1));
9291 warning ("unable to compute real location of stacked parameter");
9292 value
= 8; /* XXX magic hack */
9298 #define def_builtin(NAME, TYPE, CODE) \
9299 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL)
9302 arm_init_builtins ()
9304 tree endlink
= void_list_node
;
9305 tree int_endlink
= tree_cons (NULL_TREE
, integer_type_node
, endlink
);
9306 tree pchar_type_node
= build_pointer_type (char_type_node
);
9308 tree int_ftype_int
, void_ftype_pchar
;
9310 /* void func (void *) */
9312 = build_function_type (void_type_node
,
9313 tree_cons (NULL_TREE
, pchar_type_node
, endlink
));
9315 /* int func (int) */
9317 = build_function_type (integer_type_node
, int_endlink
);
9319 /* Initialize arm V5 builtins. */
9321 def_builtin ("__builtin_clz", int_ftype_int
, ARM_BUILTIN_CLZ
);
9324 /* Expand an expression EXP that calls a built-in function,
9325 with result going to TARGET if that's convenient
9326 (and in mode MODE if that's convenient).
9327 SUBTARGET may be used as the target for computing one of EXP's operands.
9328 IGNORE is nonzero if the value is to be ignored. */
9331 arm_expand_builtin (exp
, target
, subtarget
, mode
, ignore
)
9334 rtx subtarget ATTRIBUTE_UNUSED
;
9335 enum machine_mode mode ATTRIBUTE_UNUSED
;
9336 int ignore ATTRIBUTE_UNUSED
;
9338 enum insn_code icode
;
9339 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
9340 tree arglist
= TREE_OPERAND (exp
, 1);
9343 enum machine_mode tmode
, mode0
;
9344 int fcode
= DECL_FUNCTION_CODE (fndecl
);
9351 case ARM_BUILTIN_CLZ
:
9352 icode
= CODE_FOR_clz
;
9353 arg0
= TREE_VALUE (arglist
);
9354 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
9355 tmode
= insn_data
[icode
].operand
[0].mode
;
9356 mode0
= insn_data
[icode
].operand
[1].mode
;
9358 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
9359 op0
= copy_to_mode_reg (mode0
, op0
);
9361 || GET_MODE (target
) != tmode
9362 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
9363 target
= gen_reg_rtx (tmode
);
9364 pat
= GEN_FCN (icode
) (target
, op0
);
9371 /* @@@ Should really do something sensible here. */
9375 /* Recursively search through all of the blocks in a function
9376 checking to see if any of the variables created in that
9377 function match the RTX called 'orig'. If they do then
9378 replace them with the RTX called 'new'. */
9381 replace_symbols_in_block (block
, orig
, new)
9386 for (; block
; block
= BLOCK_CHAIN (block
))
9390 if (!TREE_USED (block
))
9393 for (sym
= BLOCK_VARS (block
); sym
; sym
= TREE_CHAIN (sym
))
9395 if ( (DECL_NAME (sym
) == 0 && TREE_CODE (sym
) != TYPE_DECL
)
9396 || DECL_IGNORED_P (sym
)
9397 || TREE_CODE (sym
) != VAR_DECL
9398 || DECL_EXTERNAL (sym
)
9399 || !rtx_equal_p (DECL_RTL (sym
), orig
)
9403 SET_DECL_RTL (sym
, new);
9406 replace_symbols_in_block (BLOCK_SUBBLOCKS (block
), orig
, new);
9410 /* Return the number (counting from 0) of
9411 the least significant set bit in MASK. */
9417 number_of_first_bit_set (mask
)
9423 (mask
& (1 << bit
)) == 0;
9430 /* Generate code to return from a thumb function.
9431 If 'reg_containing_return_addr' is -1, then the return address is
9432 actually on the stack, at the stack pointer. */
9434 thumb_exit (f
, reg_containing_return_addr
, eh_ofs
)
9436 int reg_containing_return_addr
;
9439 unsigned regs_available_for_popping
;
9440 unsigned regs_to_pop
;
9446 int restore_a4
= FALSE
;
9448 /* Compute the registers we need to pop. */
9452 /* There is an assumption here, that if eh_ofs is not NULL, the
9453 normal return address will have been pushed. */
9454 if (reg_containing_return_addr
== -1 || eh_ofs
)
9456 /* When we are generating a return for __builtin_eh_return,
9457 reg_containing_return_addr must specify the return regno. */
9458 if (eh_ofs
&& reg_containing_return_addr
== -1)
9461 regs_to_pop
|= 1 << LR_REGNUM
;
9465 if (TARGET_BACKTRACE
)
9467 /* Restore the (ARM) frame pointer and stack pointer. */
9468 regs_to_pop
|= (1 << ARM_HARD_FRAME_POINTER_REGNUM
) | (1 << SP_REGNUM
);
9472 /* If there is nothing to pop then just emit the BX instruction and
9474 if (pops_needed
== 0)
9477 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9479 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9482 /* Otherwise if we are not supporting interworking and we have not created
9483 a backtrace structure and the function was not entered in ARM mode then
9484 just pop the return address straight into the PC. */
9485 else if (!TARGET_INTERWORK
9486 && !TARGET_BACKTRACE
9487 && !is_called_in_ARM_mode (current_function_decl
))
9491 asm_fprintf (f
, "\tadd\t%r, #4\n", SP_REGNUM
);
9492 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9493 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9496 asm_fprintf (f
, "\tpop\t{%r}\n", PC_REGNUM
);
9501 /* Find out how many of the (return) argument registers we can corrupt. */
9502 regs_available_for_popping
= 0;
9504 /* If returning via __builtin_eh_return, the bottom three registers
9505 all contain information needed for the return. */
9511 /* If we can deduce the registers used from the function's
9512 return value. This is more reliable that examining
9513 regs_ever_live[] because that will be set if the register is
9514 ever used in the function, not just if the register is used
9515 to hold a return value. */
9517 if (current_function_return_rtx
!= 0)
9518 mode
= GET_MODE (current_function_return_rtx
);
9521 mode
= DECL_MODE (DECL_RESULT (current_function_decl
));
9523 size
= GET_MODE_SIZE (mode
);
9527 /* In a void function we can use any argument register.
9528 In a function that returns a structure on the stack
9529 we can use the second and third argument registers. */
9530 if (mode
== VOIDmode
)
9531 regs_available_for_popping
=
9532 (1 << ARG_REGISTER (1))
9533 | (1 << ARG_REGISTER (2))
9534 | (1 << ARG_REGISTER (3));
9536 regs_available_for_popping
=
9537 (1 << ARG_REGISTER (2))
9538 | (1 << ARG_REGISTER (3));
9541 regs_available_for_popping
=
9542 (1 << ARG_REGISTER (2))
9543 | (1 << ARG_REGISTER (3));
9545 regs_available_for_popping
=
9546 (1 << ARG_REGISTER (3));
9549 /* Match registers to be popped with registers into which we pop them. */
9550 for (available
= regs_available_for_popping
,
9551 required
= regs_to_pop
;
9552 required
!= 0 && available
!= 0;
9553 available
&= ~(available
& - available
),
9554 required
&= ~(required
& - required
))
9557 /* If we have any popping registers left over, remove them. */
9559 regs_available_for_popping
&= ~available
;
9561 /* Otherwise if we need another popping register we can use
9562 the fourth argument register. */
9563 else if (pops_needed
)
9565 /* If we have not found any free argument registers and
9566 reg a4 contains the return address, we must move it. */
9567 if (regs_available_for_popping
== 0
9568 && reg_containing_return_addr
== LAST_ARG_REGNUM
)
9570 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
, LAST_ARG_REGNUM
);
9571 reg_containing_return_addr
= LR_REGNUM
;
9575 /* Register a4 is being used to hold part of the return value,
9576 but we have dire need of a free, low register. */
9579 asm_fprintf (f
, "\tmov\t%r, %r\n",IP_REGNUM
, LAST_ARG_REGNUM
);
9582 if (reg_containing_return_addr
!= LAST_ARG_REGNUM
)
9584 /* The fourth argument register is available. */
9585 regs_available_for_popping
|= 1 << LAST_ARG_REGNUM
;
9591 /* Pop as many registers as we can. */
9592 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9594 /* Process the registers we popped. */
9595 if (reg_containing_return_addr
== -1)
9597 /* The return address was popped into the lowest numbered register. */
9598 regs_to_pop
&= ~(1 << LR_REGNUM
);
9600 reg_containing_return_addr
=
9601 number_of_first_bit_set (regs_available_for_popping
);
9603 /* Remove this register for the mask of available registers, so that
9604 the return address will not be corrupted by futher pops. */
9605 regs_available_for_popping
&= ~(1 << reg_containing_return_addr
);
9608 /* If we popped other registers then handle them here. */
9609 if (regs_available_for_popping
)
9613 /* Work out which register currently contains the frame pointer. */
9614 frame_pointer
= number_of_first_bit_set (regs_available_for_popping
);
9616 /* Move it into the correct place. */
9617 asm_fprintf (f
, "\tmov\t%r, %r\n",
9618 ARM_HARD_FRAME_POINTER_REGNUM
, frame_pointer
);
9620 /* (Temporarily) remove it from the mask of popped registers. */
9621 regs_available_for_popping
&= ~(1 << frame_pointer
);
9622 regs_to_pop
&= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM
);
9624 if (regs_available_for_popping
)
9628 /* We popped the stack pointer as well,
9629 find the register that contains it. */
9630 stack_pointer
= number_of_first_bit_set (regs_available_for_popping
);
9632 /* Move it into the stack register. */
9633 asm_fprintf (f
, "\tmov\t%r, %r\n", SP_REGNUM
, stack_pointer
);
9635 /* At this point we have popped all necessary registers, so
9636 do not worry about restoring regs_available_for_popping
9637 to its correct value:
9639 assert (pops_needed == 0)
9640 assert (regs_available_for_popping == (1 << frame_pointer))
9641 assert (regs_to_pop == (1 << STACK_POINTER)) */
9645 /* Since we have just move the popped value into the frame
9646 pointer, the popping register is available for reuse, and
9647 we know that we still have the stack pointer left to pop. */
9648 regs_available_for_popping
|= (1 << frame_pointer
);
9652 /* If we still have registers left on the stack, but we no longer have
9653 any registers into which we can pop them, then we must move the return
9654 address into the link register and make available the register that
9656 if (regs_available_for_popping
== 0 && pops_needed
> 0)
9658 regs_available_for_popping
|= 1 << reg_containing_return_addr
;
9660 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
,
9661 reg_containing_return_addr
);
9663 reg_containing_return_addr
= LR_REGNUM
;
9666 /* If we have registers left on the stack then pop some more.
9667 We know that at most we will want to pop FP and SP. */
9668 if (pops_needed
> 0)
9673 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9675 /* We have popped either FP or SP.
9676 Move whichever one it is into the correct register. */
9677 popped_into
= number_of_first_bit_set (regs_available_for_popping
);
9678 move_to
= number_of_first_bit_set (regs_to_pop
);
9680 asm_fprintf (f
, "\tmov\t%r, %r\n", move_to
, popped_into
);
9682 regs_to_pop
&= ~(1 << move_to
);
9687 /* If we still have not popped everything then we must have only
9688 had one register available to us and we are now popping the SP. */
9689 if (pops_needed
> 0)
9693 thumb_pushpop (f
, regs_available_for_popping
, FALSE
);
9695 popped_into
= number_of_first_bit_set (regs_available_for_popping
);
9697 asm_fprintf (f
, "\tmov\t%r, %r\n", SP_REGNUM
, popped_into
);
9699 assert (regs_to_pop == (1 << STACK_POINTER))
9700 assert (pops_needed == 1)
9704 /* If necessary restore the a4 register. */
9707 if (reg_containing_return_addr
!= LR_REGNUM
)
9709 asm_fprintf (f
, "\tmov\t%r, %r\n", LR_REGNUM
, LAST_ARG_REGNUM
);
9710 reg_containing_return_addr
= LR_REGNUM
;
9713 asm_fprintf (f
, "\tmov\t%r, %r\n", LAST_ARG_REGNUM
, IP_REGNUM
);
9717 asm_fprintf (f
, "\tadd\t%r, %r\n", SP_REGNUM
, REGNO (eh_ofs
));
9719 /* Return to caller. */
9720 asm_fprintf (f
, "\tbx\t%r\n", reg_containing_return_addr
);
9723 /* Emit code to push or pop registers to or from the stack. */
9726 thumb_pushpop (f
, mask
, push
)
9732 int lo_mask
= mask
& 0xFF;
9734 if (lo_mask
== 0 && !push
&& (mask
& (1 << 15)))
9736 /* Special case. Do not generate a POP PC statement here, do it in
9738 thumb_exit (f
, -1, NULL_RTX
);
9742 fprintf (f
, "\t%s\t{", push
? "push" : "pop");
9744 /* Look at the low registers first. */
9745 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++, lo_mask
>>= 1)
9749 asm_fprintf (f
, "%r", regno
);
9751 if ((lo_mask
& ~1) != 0)
9756 if (push
&& (mask
& (1 << LR_REGNUM
)))
9758 /* Catch pushing the LR. */
9762 asm_fprintf (f
, "%r", LR_REGNUM
);
9764 else if (!push
&& (mask
& (1 << PC_REGNUM
)))
9766 /* Catch popping the PC. */
9767 if (TARGET_INTERWORK
|| TARGET_BACKTRACE
)
9769 /* The PC is never poped directly, instead
9770 it is popped into r3 and then BX is used. */
9773 thumb_exit (f
, -1, NULL_RTX
);
9782 asm_fprintf (f
, "%r", PC_REGNUM
);
9790 thumb_final_prescan_insn (insn
)
9793 if (flag_print_asm_name
)
9794 asm_fprintf (asm_out_file
, "%@ 0x%04x\n",
9795 INSN_ADDRESSES (INSN_UID (insn
)));
9799 thumb_shiftable_const (val
)
9800 unsigned HOST_WIDE_INT val
;
9802 unsigned HOST_WIDE_INT mask
= 0xff;
9805 if (val
== 0) /* XXX */
9808 for (i
= 0; i
< 25; i
++)
9809 if ((val
& (mask
<< i
)) == val
)
9815 /* Returns non-zero if the current function contains,
9816 or might contain a far jump. */
9819 thumb_far_jump_used_p (int in_prologue
)
9823 /* This test is only important for leaf functions. */
9824 /* assert (!leaf_function_p ()); */
9826 /* If we have already decided that far jumps may be used,
9827 do not bother checking again, and always return true even if
9828 it turns out that they are not being used. Once we have made
9829 the decision that far jumps are present (and that hence the link
9830 register will be pushed onto the stack) we cannot go back on it. */
9831 if (cfun
->machine
->far_jump_used
)
9834 /* If this function is not being called from the prologue/epilogue
9835 generation code then it must be being called from the
9836 INITIAL_ELIMINATION_OFFSET macro. */
9839 /* In this case we know that we are being asked about the elimination
9840 of the arg pointer register. If that register is not being used,
9841 then there are no arguments on the stack, and we do not have to
9842 worry that a far jump might force the prologue to push the link
9843 register, changing the stack offsets. In this case we can just
9844 return false, since the presence of far jumps in the function will
9845 not affect stack offsets.
9847 If the arg pointer is live (or if it was live, but has now been
9848 eliminated and so set to dead) then we do have to test to see if
9849 the function might contain a far jump. This test can lead to some
9850 false negatives, since before reload is completed, then length of
9851 branch instructions is not known, so gcc defaults to returning their
9852 longest length, which in turn sets the far jump attribute to true.
9854 A false negative will not result in bad code being generated, but it
9855 will result in a needless push and pop of the link register. We
9856 hope that this does not occur too often. */
9857 if (regs_ever_live
[ARG_POINTER_REGNUM
])
9858 cfun
->machine
->arg_pointer_live
= 1;
9859 else if (!cfun
->machine
->arg_pointer_live
)
9863 /* Check to see if the function contains a branch
9864 insn with the far jump attribute set. */
9865 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
9867 if (GET_CODE (insn
) == JUMP_INSN
9868 /* Ignore tablejump patterns. */
9869 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
9870 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
9871 && get_attr_far_jump (insn
) == FAR_JUMP_YES
9874 /* Record the fact that we have decied that
9875 the function does use far jumps. */
9876 cfun
->machine
->far_jump_used
= 1;
9884 /* Return non-zero if FUNC must be entered in ARM mode. */
9887 is_called_in_ARM_mode (func
)
9890 if (TREE_CODE (func
) != FUNCTION_DECL
)
9893 /* Ignore the problem about functions whoes address is taken. */
9894 if (TARGET_CALLEE_INTERWORKING
&& TREE_PUBLIC (func
))
9898 return lookup_attribute ("interfacearm", DECL_ATTRIBUTES (func
)) != NULL_TREE
;
9904 /* The bits which aren't usefully expanded as rtl. */
9907 thumb_unexpanded_epilogue ()
9910 int live_regs_mask
= 0;
9911 int high_regs_pushed
= 0;
9912 int leaf_function
= leaf_function_p ();
9914 rtx eh_ofs
= cfun
->machine
->eh_epilogue_sp_ofs
;
9916 if (return_used_this_function
)
9919 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9920 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
9921 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
9922 live_regs_mask
|= 1 << regno
;
9924 for (regno
= 8; regno
< 13; regno
++)
9926 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
9927 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
9931 /* The prolog may have pushed some high registers to use as
9932 work registers. eg the testuite file:
9933 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9934 compiles to produce:
9935 push {r4, r5, r6, r7, lr}
9939 as part of the prolog. We have to undo that pushing here. */
9941 if (high_regs_pushed
)
9943 int mask
= live_regs_mask
;
9949 /* If we can deduce the registers used from the function's return value.
9950 This is more reliable that examining regs_ever_live[] because that
9951 will be set if the register is ever used in the function, not just if
9952 the register is used to hold a return value. */
9954 if (current_function_return_rtx
!= 0)
9955 mode
= GET_MODE (current_function_return_rtx
);
9958 mode
= DECL_MODE (DECL_RESULT (current_function_decl
));
9960 size
= GET_MODE_SIZE (mode
);
9962 /* Unless we are returning a type of size > 12 register r3 is
9968 /* Oh dear! We have no low registers into which we can pop
9971 ("no low registers available for popping high registers");
9973 for (next_hi_reg
= 8; next_hi_reg
< 13; next_hi_reg
++)
9974 if (regs_ever_live
[next_hi_reg
] && !call_used_regs
[next_hi_reg
]
9975 && !(TARGET_SINGLE_PIC_BASE
&& (next_hi_reg
== arm_pic_register
)))
9978 while (high_regs_pushed
)
9980 /* Find lo register(s) into which the high register(s) can
9982 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9984 if (mask
& (1 << regno
))
9986 if (high_regs_pushed
== 0)
9990 mask
&= (2 << regno
) - 1; /* A noop if regno == 8 */
9992 /* Pop the values into the low register(s). */
9993 thumb_pushpop (asm_out_file
, mask
, 0);
9995 /* Move the value(s) into the high registers. */
9996 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
9998 if (mask
& (1 << regno
))
10000 asm_fprintf (asm_out_file
, "\tmov\t%r, %r\n", next_hi_reg
,
10003 for (next_hi_reg
++; next_hi_reg
< 13; next_hi_reg
++)
10004 if (regs_ever_live
[next_hi_reg
]
10005 && !call_used_regs
[next_hi_reg
]
10006 && !(TARGET_SINGLE_PIC_BASE
10007 && (next_hi_reg
== arm_pic_register
)))
10014 had_to_push_lr
= (live_regs_mask
|| !leaf_function
10015 || thumb_far_jump_used_p (1));
10017 if (TARGET_BACKTRACE
10018 && ((live_regs_mask
& 0xFF) == 0)
10019 && regs_ever_live
[LAST_ARG_REGNUM
] != 0)
10021 /* The stack backtrace structure creation code had to
10022 push R7 in order to get a work register, so we pop
10024 live_regs_mask
|= (1 << LAST_LO_REGNUM
);
10027 if (current_function_pretend_args_size
== 0 || TARGET_BACKTRACE
)
10030 && !is_called_in_ARM_mode (current_function_decl
)
10032 live_regs_mask
|= 1 << PC_REGNUM
;
10034 /* Either no argument registers were pushed or a backtrace
10035 structure was created which includes an adjusted stack
10036 pointer, so just pop everything. */
10037 if (live_regs_mask
)
10038 thumb_pushpop (asm_out_file
, live_regs_mask
, FALSE
);
10041 thumb_exit (asm_out_file
, 2, eh_ofs
);
10042 /* We have either just popped the return address into the
10043 PC or it is was kept in LR for the entire function or
10044 it is still on the stack because we do not want to
10045 return by doing a pop {pc}. */
10046 else if ((live_regs_mask
& (1 << PC_REGNUM
)) == 0)
10047 thumb_exit (asm_out_file
,
10049 && is_called_in_ARM_mode (current_function_decl
)) ?
10050 -1 : LR_REGNUM
, NULL_RTX
);
10054 /* Pop everything but the return address. */
10055 live_regs_mask
&= ~(1 << PC_REGNUM
);
10057 if (live_regs_mask
)
10058 thumb_pushpop (asm_out_file
, live_regs_mask
, FALSE
);
10060 if (had_to_push_lr
)
10061 /* Get the return address into a temporary register. */
10062 thumb_pushpop (asm_out_file
, 1 << LAST_ARG_REGNUM
, 0);
10064 /* Remove the argument registers that were pushed onto the stack. */
10065 asm_fprintf (asm_out_file
, "\tadd\t%r, %r, #%d\n",
10066 SP_REGNUM
, SP_REGNUM
,
10067 current_function_pretend_args_size
);
10070 thumb_exit (asm_out_file
, 2, eh_ofs
);
10072 thumb_exit (asm_out_file
,
10073 had_to_push_lr
? LAST_ARG_REGNUM
: LR_REGNUM
, NULL_RTX
);
10079 /* Functions to save and restore machine-specific function data. */
10082 arm_mark_machine_status (p
)
10083 struct function
* p
;
10085 machine_function
*machine
= p
->machine
;
10088 ggc_mark_rtx (machine
->eh_epilogue_sp_ofs
);
10092 arm_init_machine_status (p
)
10093 struct function
* p
;
10096 (machine_function
*) xcalloc (1, sizeof (machine_function
));
10098 #if ARM_FT_UNKNOWWN != 0
10099 ((machine_function
*) p
->machine
)->func_type
= ARM_FT_UNKNOWN
;
10104 arm_free_machine_status (p
)
10105 struct function
* p
;
10114 /* Return an RTX indicating where the return address to the
10115 calling function can be found. */
10118 arm_return_addr (count
, frame
)
10120 rtx frame ATTRIBUTE_UNUSED
;
10125 if (TARGET_APCS_32
)
10126 return get_hard_reg_initial_val (Pmode
, LR_REGNUM
);
10129 rtx lr
= gen_rtx_AND (Pmode
, gen_rtx_REG (Pmode
, LR_REGNUM
),
10130 GEN_INT (RETURN_ADDR_MASK26
));
10131 return get_func_hard_reg_initial_val (cfun
, lr
);
10135 /* Do anything needed before RTL is emitted for each function. */
10138 arm_init_expanders ()
10140 /* Arrange to initialize and mark the machine per-function status. */
10141 init_machine_status
= arm_init_machine_status
;
10142 mark_machine_status
= arm_mark_machine_status
;
10143 free_machine_status
= arm_free_machine_status
;
10146 /* Generate the rest of a function's prologue. */
10149 thumb_expand_prologue ()
10151 HOST_WIDE_INT amount
= (get_frame_size ()
10152 + current_function_outgoing_args_size
);
10153 unsigned long func_type
;
10155 func_type
= arm_current_func_type ();
10157 /* Naked functions don't have prologues. */
10158 if (IS_NAKED (func_type
))
10161 if (IS_INTERRUPT (func_type
))
10163 error ("interrupt Service Routines cannot be coded in Thumb mode");
10167 if (frame_pointer_needed
)
10168 emit_insn (gen_movsi (hard_frame_pointer_rtx
, stack_pointer_rtx
));
10172 amount
= ROUND_UP (amount
);
10175 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10176 GEN_INT (- amount
)));
10182 /* The stack decrement is too big for an immediate value in a single
10183 insn. In theory we could issue multiple subtracts, but after
10184 three of them it becomes more space efficient to place the full
10185 value in the constant pool and load into a register. (Also the
10186 ARM debugger really likes to see only one stack decrement per
10187 function). So instead we look for a scratch register into which
10188 we can load the decrement, and then we subtract this from the
10189 stack pointer. Unfortunately on the thumb the only available
10190 scratch registers are the argument registers, and we cannot use
10191 these as they may hold arguments to the function. Instead we
10192 attempt to locate a call preserved register which is used by this
10193 function. If we can find one, then we know that it will have
10194 been pushed at the start of the prologue and so we can corrupt
10196 for (regno
= LAST_ARG_REGNUM
+ 1; regno
<= LAST_LO_REGNUM
; regno
++)
10197 if (regs_ever_live
[regno
]
10198 && !call_used_regs
[regno
] /* Paranoia */
10199 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
))
10200 && !(frame_pointer_needed
10201 && (regno
== THUMB_HARD_FRAME_POINTER_REGNUM
)))
10204 if (regno
> LAST_LO_REGNUM
) /* Very unlikely */
10206 rtx spare
= gen_rtx (REG
, SImode
, IP_REGNUM
);
10208 /* Choose an arbitary, non-argument low register. */
10209 reg
= gen_rtx (REG
, SImode
, LAST_LO_REGNUM
);
10211 /* Save it by copying it into a high, scratch register. */
10212 emit_insn (gen_movsi (spare
, reg
));
10213 /* Add a USE to stop propagate_one_insn() from barfing. */
10214 emit_insn (gen_prologue_use (spare
));
10216 /* Decrement the stack. */
10217 emit_insn (gen_movsi (reg
, GEN_INT (- amount
)));
10218 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10221 /* Restore the low register's original value. */
10222 emit_insn (gen_movsi (reg
, spare
));
10224 /* Emit a USE of the restored scratch register, so that flow
10225 analysis will not consider the restore redundant. The
10226 register won't be used again in this function and isn't
10227 restored by the epilogue. */
10228 emit_insn (gen_prologue_use (reg
));
10232 reg
= gen_rtx (REG
, SImode
, regno
);
10234 emit_insn (gen_movsi (reg
, GEN_INT (- amount
)));
10235 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10241 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
10242 emit_insn (gen_blockage ());
10246 thumb_expand_epilogue ()
10248 HOST_WIDE_INT amount
= (get_frame_size ()
10249 + current_function_outgoing_args_size
);
10251 /* Naked functions don't have prologues. */
10252 if (IS_NAKED (arm_current_func_type ()))
10255 if (frame_pointer_needed
)
10256 emit_insn (gen_movsi (stack_pointer_rtx
, hard_frame_pointer_rtx
));
10259 amount
= ROUND_UP (amount
);
10262 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
10263 GEN_INT (amount
)));
10266 /* r3 is always free in the epilogue. */
10267 rtx reg
= gen_rtx (REG
, SImode
, LAST_ARG_REGNUM
);
10269 emit_insn (gen_movsi (reg
, GEN_INT (amount
)));
10270 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
10274 /* Emit a USE (stack_pointer_rtx), so that
10275 the stack adjustment will not be deleted. */
10276 emit_insn (gen_prologue_use (stack_pointer_rtx
));
10278 if (current_function_profile
|| TARGET_NO_SCHED_PRO
)
10279 emit_insn (gen_blockage ());
10283 thumb_output_function_prologue (f
, size
)
10285 HOST_WIDE_INT size ATTRIBUTE_UNUSED
;
10287 int live_regs_mask
= 0;
10288 int high_regs_pushed
= 0;
10291 if (IS_NAKED (arm_current_func_type ()))
10294 if (is_called_in_ARM_mode (current_function_decl
))
10298 if (GET_CODE (DECL_RTL (current_function_decl
)) != MEM
)
10300 if (GET_CODE (XEXP (DECL_RTL (current_function_decl
), 0)) != SYMBOL_REF
)
10302 name
= XSTR (XEXP (DECL_RTL (current_function_decl
), 0), 0);
10304 /* Generate code sequence to switch us into Thumb mode. */
10305 /* The .code 32 directive has already been emitted by
10306 ASM_DECLARE_FUNCTION_NAME. */
10307 asm_fprintf (f
, "\torr\t%r, %r, #1\n", IP_REGNUM
, PC_REGNUM
);
10308 asm_fprintf (f
, "\tbx\t%r\n", IP_REGNUM
);
10310 /* Generate a label, so that the debugger will notice the
10311 change in instruction sets. This label is also used by
10312 the assembler to bypass the ARM code when this function
10313 is called from a Thumb encoded function elsewhere in the
10314 same file. Hence the definition of STUB_NAME here must
10315 agree with the definition in gas/config/tc-arm.c */
10317 #define STUB_NAME ".real_start_of"
10319 asm_fprintf (f
, "\t.code\t16\n");
10321 if (arm_dllexport_name_p (name
))
10322 name
= arm_strip_name_encoding (name
);
10324 asm_fprintf (f
, "\t.globl %s%U%s\n", STUB_NAME
, name
);
10325 asm_fprintf (f
, "\t.thumb_func\n");
10326 asm_fprintf (f
, "%s%U%s:\n", STUB_NAME
, name
);
10329 if (current_function_pretend_args_size
)
10331 if (cfun
->machine
->uses_anonymous_args
)
10335 asm_fprintf (f
, "\tpush\t{");
10337 num_pushes
= NUM_INTS (current_function_pretend_args_size
);
10339 for (regno
= LAST_ARG_REGNUM
+ 1 - num_pushes
;
10340 regno
<= LAST_ARG_REGNUM
;
10342 asm_fprintf (f
, "%r%s", regno
,
10343 regno
== LAST_ARG_REGNUM
? "" : ", ");
10345 asm_fprintf (f
, "}\n");
10348 asm_fprintf (f
, "\tsub\t%r, %r, #%d\n",
10349 SP_REGNUM
, SP_REGNUM
,
10350 current_function_pretend_args_size
);
10353 for (regno
= 0; regno
<= LAST_LO_REGNUM
; regno
++)
10354 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
10355 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
10356 live_regs_mask
|= 1 << regno
;
10358 if (live_regs_mask
|| !leaf_function_p () || thumb_far_jump_used_p (1))
10359 live_regs_mask
|= 1 << LR_REGNUM
;
10361 if (TARGET_BACKTRACE
)
10364 int work_register
= 0;
10367 /* We have been asked to create a stack backtrace structure.
10368 The code looks like this:
10372 0 sub SP, #16 Reserve space for 4 registers.
10373 2 push {R7} Get a work register.
10374 4 add R7, SP, #20 Get the stack pointer before the push.
10375 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
10376 8 mov R7, PC Get hold of the start of this code plus 12.
10377 10 str R7, [SP, #16] Store it.
10378 12 mov R7, FP Get hold of the current frame pointer.
10379 14 str R7, [SP, #4] Store it.
10380 16 mov R7, LR Get hold of the current return address.
10381 18 str R7, [SP, #12] Store it.
10382 20 add R7, SP, #16 Point at the start of the backtrace structure.
10383 22 mov FP, R7 Put this value into the frame pointer. */
10385 if ((live_regs_mask
& 0xFF) == 0)
10387 /* See if the a4 register is free. */
10389 if (regs_ever_live
[LAST_ARG_REGNUM
] == 0)
10390 work_register
= LAST_ARG_REGNUM
;
10391 else /* We must push a register of our own */
10392 live_regs_mask
|= (1 << LAST_LO_REGNUM
);
10395 if (work_register
== 0)
10397 /* Select a register from the list that will be pushed to
10398 use as our work register. */
10399 for (work_register
= (LAST_LO_REGNUM
+ 1); work_register
--;)
10400 if ((1 << work_register
) & live_regs_mask
)
10405 (f
, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
10406 SP_REGNUM
, SP_REGNUM
);
10408 if (live_regs_mask
)
10409 thumb_pushpop (f
, live_regs_mask
, 1);
10411 for (offset
= 0, wr
= 1 << 15; wr
!= 0; wr
>>= 1)
10412 if (wr
& live_regs_mask
)
10415 asm_fprintf (f
, "\tadd\t%r, %r, #%d\n", work_register
, SP_REGNUM
,
10416 offset
+ 16 + current_function_pretend_args_size
);
10418 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10421 /* Make sure that the instruction fetching the PC is in the right place
10422 to calculate "start of backtrace creation code + 12". */
10423 if (live_regs_mask
)
10425 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, PC_REGNUM
);
10426 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10428 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
,
10429 ARM_HARD_FRAME_POINTER_REGNUM
);
10430 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10435 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
,
10436 ARM_HARD_FRAME_POINTER_REGNUM
);
10437 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10439 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, PC_REGNUM
);
10440 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10444 asm_fprintf (f
, "\tmov\t%r, %r\n", work_register
, LR_REGNUM
);
10445 asm_fprintf (f
, "\tstr\t%r, [%r, #%d]\n", work_register
, SP_REGNUM
,
10447 asm_fprintf (f
, "\tadd\t%r, %r, #%d\n", work_register
, SP_REGNUM
,
10449 asm_fprintf (f
, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
10450 ARM_HARD_FRAME_POINTER_REGNUM
, work_register
);
10452 else if (live_regs_mask
)
10453 thumb_pushpop (f
, live_regs_mask
, 1);
10455 for (regno
= 8; regno
< 13; regno
++)
10457 if (regs_ever_live
[regno
] && !call_used_regs
[regno
]
10458 && !(TARGET_SINGLE_PIC_BASE
&& (regno
== arm_pic_register
)))
10459 high_regs_pushed
++;
10462 if (high_regs_pushed
)
10464 int pushable_regs
= 0;
10465 int mask
= live_regs_mask
& 0xff;
10468 for (next_hi_reg
= 12; next_hi_reg
> LAST_LO_REGNUM
; next_hi_reg
--)
10470 if (regs_ever_live
[next_hi_reg
] && !call_used_regs
[next_hi_reg
]
10471 && !(TARGET_SINGLE_PIC_BASE
10472 && (next_hi_reg
== arm_pic_register
)))
10476 pushable_regs
= mask
;
10478 if (pushable_regs
== 0)
10480 /* Desperation time -- this probably will never happen. */
10481 if (regs_ever_live
[LAST_ARG_REGNUM
]
10482 || !call_used_regs
[LAST_ARG_REGNUM
])
10483 asm_fprintf (f
, "\tmov\t%r, %r\n", IP_REGNUM
, LAST_ARG_REGNUM
);
10484 mask
= 1 << LAST_ARG_REGNUM
;
10487 while (high_regs_pushed
> 0)
10489 for (regno
= LAST_LO_REGNUM
; regno
>= 0; regno
--)
10491 if (mask
& (1 << regno
))
10493 asm_fprintf (f
, "\tmov\t%r, %r\n", regno
, next_hi_reg
);
10495 high_regs_pushed
--;
10497 if (high_regs_pushed
)
10498 for (next_hi_reg
--; next_hi_reg
> LAST_LO_REGNUM
;
10501 if (regs_ever_live
[next_hi_reg
]
10502 && !call_used_regs
[next_hi_reg
]
10503 && !(TARGET_SINGLE_PIC_BASE
10504 && (next_hi_reg
== arm_pic_register
)))
10509 mask
&= ~((1 << regno
) - 1);
10515 thumb_pushpop (f
, mask
, 1);
10518 if (pushable_regs
== 0
10519 && (regs_ever_live
[LAST_ARG_REGNUM
]
10520 || !call_used_regs
[LAST_ARG_REGNUM
]))
10521 asm_fprintf (f
, "\tmov\t%r, %r\n", LAST_ARG_REGNUM
, IP_REGNUM
);
10525 /* Handle the case of a double word load into a low register from
10526 a computed memory address. The computed address may involve a
10527 register which is overwritten by the load. */
10530 thumb_load_double_from_address (operands
)
10539 if (GET_CODE (operands
[0]) != REG
)
10542 if (GET_CODE (operands
[1]) != MEM
)
10545 /* Get the memory address. */
10546 addr
= XEXP (operands
[1], 0);
10548 /* Work out how the memory address is computed. */
10549 switch (GET_CODE (addr
))
10552 operands
[2] = gen_rtx (MEM
, SImode
,
10553 plus_constant (XEXP (operands
[1], 0), 4));
10555 if (REGNO (operands
[0]) == REGNO (addr
))
10557 output_asm_insn ("ldr\t%H0, %2", operands
);
10558 output_asm_insn ("ldr\t%0, %1", operands
);
10562 output_asm_insn ("ldr\t%0, %1", operands
);
10563 output_asm_insn ("ldr\t%H0, %2", operands
);
10568 /* Compute <address> + 4 for the high order load. */
10569 operands
[2] = gen_rtx (MEM
, SImode
,
10570 plus_constant (XEXP (operands
[1], 0), 4));
10572 output_asm_insn ("ldr\t%0, %1", operands
);
10573 output_asm_insn ("ldr\t%H0, %2", operands
);
10577 arg1
= XEXP (addr
, 0);
10578 arg2
= XEXP (addr
, 1);
10580 if (CONSTANT_P (arg1
))
10581 base
= arg2
, offset
= arg1
;
10583 base
= arg1
, offset
= arg2
;
10585 if (GET_CODE (base
) != REG
)
10588 /* Catch the case of <address> = <reg> + <reg> */
10589 if (GET_CODE (offset
) == REG
)
10591 int reg_offset
= REGNO (offset
);
10592 int reg_base
= REGNO (base
);
10593 int reg_dest
= REGNO (operands
[0]);
10595 /* Add the base and offset registers together into the
10596 higher destination register. */
10597 asm_fprintf (asm_out_file
, "\tadd\t%r, %r, %r",
10598 reg_dest
+ 1, reg_base
, reg_offset
);
10600 /* Load the lower destination register from the address in
10601 the higher destination register. */
10602 asm_fprintf (asm_out_file
, "\tldr\t%r, [%r, #0]",
10603 reg_dest
, reg_dest
+ 1);
10605 /* Load the higher destination register from its own address
10607 asm_fprintf (asm_out_file
, "\tldr\t%r, [%r, #4]",
10608 reg_dest
+ 1, reg_dest
+ 1);
10612 /* Compute <address> + 4 for the high order load. */
10613 operands
[2] = gen_rtx (MEM
, SImode
,
10614 plus_constant (XEXP (operands
[1], 0), 4));
10616 /* If the computed address is held in the low order register
10617 then load the high order register first, otherwise always
10618 load the low order register first. */
10619 if (REGNO (operands
[0]) == REGNO (base
))
10621 output_asm_insn ("ldr\t%H0, %2", operands
);
10622 output_asm_insn ("ldr\t%0, %1", operands
);
10626 output_asm_insn ("ldr\t%0, %1", operands
);
10627 output_asm_insn ("ldr\t%H0, %2", operands
);
10633 /* With no registers to worry about we can just load the value
10635 operands
[2] = gen_rtx (MEM
, SImode
,
10636 plus_constant (XEXP (operands
[1], 0), 4));
10638 output_asm_insn ("ldr\t%H0, %2", operands
);
10639 output_asm_insn ("ldr\t%0, %1", operands
);
10652 thumb_output_move_mem_multiple (n
, operands
)
10661 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10664 operands
[4] = operands
[5];
10667 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands
);
10668 output_asm_insn ("stmia\t%0!, {%4, %5}", operands
);
10672 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10675 operands
[4] = operands
[5];
10678 if (REGNO (operands
[5]) > REGNO (operands
[6]))
10681 operands
[5] = operands
[6];
10684 if (REGNO (operands
[4]) > REGNO (operands
[5]))
10687 operands
[4] = operands
[5];
10691 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands
);
10692 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands
);
10702 /* Routines for generating rtl. */
10705 thumb_expand_movstrqi (operands
)
10708 rtx out
= copy_to_mode_reg (SImode
, XEXP (operands
[0], 0));
10709 rtx in
= copy_to_mode_reg (SImode
, XEXP (operands
[1], 0));
10710 HOST_WIDE_INT len
= INTVAL (operands
[2]);
10711 HOST_WIDE_INT offset
= 0;
10715 emit_insn (gen_movmem12b (out
, in
, out
, in
));
10721 emit_insn (gen_movmem8b (out
, in
, out
, in
));
10727 rtx reg
= gen_reg_rtx (SImode
);
10728 emit_insn (gen_movsi (reg
, gen_rtx (MEM
, SImode
, in
)));
10729 emit_insn (gen_movsi (gen_rtx (MEM
, SImode
, out
), reg
));
10736 rtx reg
= gen_reg_rtx (HImode
);
10737 emit_insn (gen_movhi (reg
, gen_rtx (MEM
, HImode
,
10738 plus_constant (in
, offset
))));
10739 emit_insn (gen_movhi (gen_rtx (MEM
, HImode
, plus_constant (out
, offset
)),
10747 rtx reg
= gen_reg_rtx (QImode
);
10748 emit_insn (gen_movqi (reg
, gen_rtx (MEM
, QImode
,
10749 plus_constant (in
, offset
))));
10750 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, plus_constant (out
, offset
)),
10756 thumb_cmp_operand (op
, mode
)
10758 enum machine_mode mode
;
10760 return ((GET_CODE (op
) == CONST_INT
10761 && (unsigned HOST_WIDE_INT
) (INTVAL (op
)) < 256)
10762 || register_operand (op
, mode
));
10765 static const char *
10766 thumb_condition_code (x
, invert
)
10770 static const char * const conds
[] =
10772 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
10773 "hi", "ls", "ge", "lt", "gt", "le"
10777 switch (GET_CODE (x
))
10779 case EQ
: val
= 0; break;
10780 case NE
: val
= 1; break;
10781 case GEU
: val
= 2; break;
10782 case LTU
: val
= 3; break;
10783 case GTU
: val
= 8; break;
10784 case LEU
: val
= 9; break;
10785 case GE
: val
= 10; break;
10786 case LT
: val
= 11; break;
10787 case GT
: val
= 12; break;
10788 case LE
: val
= 13; break;
10793 return conds
[val
^ invert
];
10796 /* Handle storing a half-word to memory during reload. */
10799 thumb_reload_out_hi (operands
)
10802 emit_insn (gen_thumb_movhi_clobber (operands
[0], operands
[1], operands
[2]));
10805 /* Handle storing a half-word to memory during reload. */
10808 thumb_reload_in_hi (operands
)
10809 rtx
* operands ATTRIBUTE_UNUSED
;
10814 /* Return the length of a function name prefix
10815 that starts with the character 'c'. */
10818 arm_get_strip_length (char c
)
10822 ARM_NAME_ENCODING_LENGTHS
10827 /* Return a pointer to a function's name with any
10828 and all prefix encodings stripped from it. */
10831 arm_strip_name_encoding (const char * name
)
10835 while ((skip
= arm_get_strip_length (* name
)))
10841 #ifdef AOF_ASSEMBLER
10842 /* Special functions only needed when producing AOF syntax assembler. */
10844 rtx aof_pic_label
= NULL_RTX
;
10847 struct pic_chain
* next
;
10848 const char * symname
;
10851 static struct pic_chain
* aof_pic_chain
= NULL
;
10857 struct pic_chain
** chainp
;
10860 if (aof_pic_label
== NULL_RTX
)
10862 /* We mark this here and not in arm_add_gc_roots() to avoid
10863 polluting even more code with ifdefs, and because it never
10864 contains anything useful until we assign to it here. */
10865 ggc_add_rtx_root (&aof_pic_label
, 1);
10866 aof_pic_label
= gen_rtx_SYMBOL_REF (Pmode
, "x$adcons");
10869 for (offset
= 0, chainp
= &aof_pic_chain
; *chainp
;
10870 offset
+= 4, chainp
= &(*chainp
)->next
)
10871 if ((*chainp
)->symname
== XSTR (x
, 0))
10872 return plus_constant (aof_pic_label
, offset
);
10874 *chainp
= (struct pic_chain
*) xmalloc (sizeof (struct pic_chain
));
10875 (*chainp
)->next
= NULL
;
10876 (*chainp
)->symname
= XSTR (x
, 0);
10877 return plus_constant (aof_pic_label
, offset
);
10881 aof_dump_pic_table (f
)
10884 struct pic_chain
* chain
;
10886 if (aof_pic_chain
== NULL
)
10889 asm_fprintf (f
, "\tAREA |%r$$adcons|, BASED %r\n",
10890 PIC_OFFSET_TABLE_REGNUM
,
10891 PIC_OFFSET_TABLE_REGNUM
);
10892 fputs ("|x$adcons|\n", f
);
10894 for (chain
= aof_pic_chain
; chain
; chain
= chain
->next
)
10896 fputs ("\tDCD\t", f
);
10897 assemble_name (f
, chain
->symname
);
10902 int arm_text_section_count
= 1;
10905 aof_text_section ()
10907 static char buf
[100];
10908 sprintf (buf
, "\tAREA |C$$code%d|, CODE, READONLY",
10909 arm_text_section_count
++);
10911 strcat (buf
, ", PIC, REENTRANT");
10915 static int arm_data_section_count
= 1;
10918 aof_data_section ()
10920 static char buf
[100];
10921 sprintf (buf
, "\tAREA |C$$data%d|, DATA", arm_data_section_count
++);
10925 /* The AOF assembler is religiously strict about declarations of
10926 imported and exported symbols, so that it is impossible to declare
10927 a function as imported near the beginning of the file, and then to
10928 export it later on. It is, however, possible to delay the decision
10929 until all the functions in the file have been compiled. To get
10930 around this, we maintain a list of the imports and exports, and
10931 delete from it any that are subsequently defined. At the end of
10932 compilation we spit the remainder of the list out before the END
10937 struct import
* next
;
10941 static struct import
* imports_list
= NULL
;
10944 aof_add_import (name
)
10947 struct import
* new;
10949 for (new = imports_list
; new; new = new->next
)
10950 if (new->name
== name
)
10953 new = (struct import
*) xmalloc (sizeof (struct import
));
10954 new->next
= imports_list
;
10955 imports_list
= new;
10960 aof_delete_import (name
)
10963 struct import
** old
;
10965 for (old
= &imports_list
; *old
; old
= & (*old
)->next
)
10967 if ((*old
)->name
== name
)
10969 *old
= (*old
)->next
;
10975 int arm_main_function
= 0;
10978 aof_dump_imports (f
)
10981 /* The AOF assembler needs this to cause the startup code to be extracted
10982 from the library. Brining in __main causes the whole thing to work
10984 if (arm_main_function
)
10987 fputs ("\tIMPORT __main\n", f
);
10988 fputs ("\tDCD __main\n", f
);
10991 /* Now dump the remaining imports. */
10992 while (imports_list
)
10994 fprintf (f
, "\tIMPORT\t");
10995 assemble_name (f
, imports_list
->name
);
10997 imports_list
= imports_list
->next
;
11000 #endif /* AOF_ASSEMBLER */
11002 #ifdef OBJECT_FORMAT_ELF
11003 /* Switch to an arbitrary section NAME with attributes as specified
11004 by FLAGS. ALIGN specifies any known alignment requirements for
11005 the section; 0 if the default should be used.
11007 Differs from the default elf version only in the prefix character
11008 used before the section type. */
11011 arm_elf_asm_named_section (name
, flags
)
11013 unsigned int flags
;
11015 char flagchars
[8], *f
= flagchars
;
11018 if (!(flags
& SECTION_DEBUG
))
11020 if (flags
& SECTION_WRITE
)
11022 if (flags
& SECTION_CODE
)
11024 if (flags
& SECTION_SMALL
)
11026 if (flags
& SECTION_MERGE
)
11028 if (flags
& SECTION_STRINGS
)
11032 if (flags
& SECTION_BSS
)
11037 if (flags
& SECTION_ENTSIZE
)
11038 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",%%%s,%d\n",
11039 name
, flagchars
, type
, flags
& SECTION_ENTSIZE
);
11041 fprintf (asm_out_file
, "\t.section\t%s,\"%s\",%%%s\n",
11042 name
, flagchars
, type
);