Remove spurious test of XScale and HARD_FLOAT floags
[official-gcc.git] / gcc / config / arm / arm.c
blobcdcc3667f7478d3cc453c1b2ac6ea5a40a8e2eaa
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "obstack.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "toplev.h"
42 #include "recog.h"
43 #include "ggc.h"
44 #include "except.h"
45 #include "c-pragma.h"
46 #include "tm_p.h"
48 /* Forward definitions of types. */
49 typedef struct minipool_node Mnode;
50 typedef struct minipool_fixup Mfix;
52 /* In order to improve the layout of the prototypes below
53 some short type abbreviations are defined here. */
54 #define Hint HOST_WIDE_INT
55 #define Mmode enum machine_mode
56 #define Ulong unsigned long
58 /* Forward function declarations. */
59 static void arm_add_gc_roots PARAMS ((void));
60 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
61 static int arm_naked_function_p PARAMS ((tree));
62 static Ulong bit_count PARAMS ((signed int));
63 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
64 static int eliminate_lr2ip PARAMS ((rtx *));
65 static rtx emit_multi_reg_push PARAMS ((int));
66 static rtx emit_sfm PARAMS ((int, int));
67 static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
68 static arm_cc get_arm_condition_code PARAMS ((rtx));
69 static void init_fpa_table PARAMS ((void));
70 static Hint int_log2 PARAMS ((Hint));
71 static rtx is_jump_table PARAMS ((rtx));
72 static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
73 static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
74 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
75 static const char * shift_op PARAMS ((rtx, Hint *));
76 static void arm_init_machine_status PARAMS ((struct function *));
77 static void arm_mark_machine_status PARAMS ((struct function *));
78 static int number_of_first_bit_set PARAMS ((int));
79 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
80 static void thumb_exit PARAMS ((FILE *, int, rtx));
81 static void thumb_pushpop PARAMS ((FILE *, int, int));
82 static const char * thumb_condition_code PARAMS ((rtx, int));
83 static rtx is_jump_table PARAMS ((rtx));
84 static Hint get_jump_table_size PARAMS ((rtx));
85 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
86 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
87 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
88 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
89 static void assign_minipool_offsets PARAMS ((Mfix *));
90 static void arm_print_value PARAMS ((FILE *, rtx));
91 static void dump_minipool PARAMS ((rtx));
92 static int arm_barrier_cost PARAMS ((rtx));
93 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
94 static void push_minipool_barrier PARAMS ((rtx, Hint));
95 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
96 static void note_invalid_constants PARAMS ((rtx, Hint));
97 static int current_file_function_operand PARAMS ((rtx));
99 #undef Hint
100 #undef Mmode
101 #undef Ulong
103 /* Obstack for minipool constant handling. */
104 static struct obstack minipool_obstack;
105 static char *minipool_startobj;
107 #define obstack_chunk_alloc xmalloc
108 #define obstack_chunk_free free
110 /* The maximum number of insns skipped which will be conditionalised if
111 possible. */
112 static int max_insns_skipped = 5;
114 extern FILE * asm_out_file;
116 /* True if we are currently building a constant table. */
117 int making_const_table;
119 /* Define the information needed to generate branch insns. This is
120 stored from the compare operation. */
121 rtx arm_compare_op0, arm_compare_op1;
123 /* What type of floating point are we tuning for? */
124 enum floating_point_type arm_fpu;
126 /* What type of floating point instructions are available? */
127 enum floating_point_type arm_fpu_arch;
129 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
130 enum prog_mode_type arm_prgmode;
132 /* Set by the -mfp=... option. */
133 const char * target_fp_name = NULL;
135 /* Used to parse -mstructure_size_boundary command line option. */
136 const char * structure_size_string = NULL;
137 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
139 /* Bit values used to identify processor capabilities. */
140 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
141 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
142 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
143 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
144 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
145 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
146 #define FL_THUMB (1 << 6) /* Thumb aware */
147 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
148 #define FL_STRONG (1 << 8) /* StrongARM */
149 #define FL_ARCH5E (1 << 9) /* El Segundo extenstions to v5 */
150 #define FL_XSCALE (1 << 10) /* XScale */
152 /* The bits in this mask specify which instructions we are
153 allowed to generate. */
154 static int insn_flags = 0;
156 /* The bits in this mask specify which instruction scheduling options should
157 be used. Note - there is an overlap with the FL_FAST_MULT. For some
158 hardware we want to be able to generate the multiply instructions, but to
159 tune as if they were not present in the architecture. */
160 static int tune_flags = 0;
162 /* The following are used in the arm.md file as equivalents to bits
163 in the above two flag variables. */
165 /* Nonzero if this is an "M" variant of the processor. */
166 int arm_fast_multiply = 0;
168 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
169 int arm_arch4 = 0;
171 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
172 int arm_arch5 = 0;
174 /* Nonzero if this chip can benefit from load scheduling. */
175 int arm_ld_sched = 0;
177 /* Nonzero if this chip is a StrongARM. */
178 int arm_is_strong = 0;
180 /* Nonzero if this chip is an XScale. */
181 int arm_is_xscale = 0;
183 /* Nonzero if this chip is a an ARM6 or an ARM7. */
184 int arm_is_6_or_7 = 0;
186 /* Nonzero if generating Thumb instructions. */
187 int thumb_code = 0;
189 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
190 must report the mode of the memory reference from PRINT_OPERAND to
191 PRINT_OPERAND_ADDRESS. */
192 enum machine_mode output_memory_reference_mode;
194 /* Nonzero if the prologue must setup `fp'. */
195 int current_function_anonymous_args;
197 /* The register number to be used for the PIC offset register. */
198 const char * arm_pic_register_string = NULL;
199 int arm_pic_register = 9;
201 /* Set to 1 when a return insn is output, this means that the epilogue
202 is not needed. */
203 int return_used_this_function;
205 /* Set to 1 after arm_reorg has started. Reset to start at the start of
206 the next function. */
207 static int after_arm_reorg = 0;
209 /* The maximum number of insns to be used when loading a constant. */
210 static int arm_constant_limit = 3;
212 /* For an explanation of these variables, see final_prescan_insn below. */
213 int arm_ccfsm_state;
214 enum arm_cond_code arm_current_cc;
215 rtx arm_target_insn;
216 int arm_target_label;
218 /* The condition codes of the ARM, and the inverse function. */
219 const char * arm_condition_codes[] =
221 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
222 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
225 #define streq(string1, string2) (strcmp (string1, string2) == 0)
227 /* Initialization code. */
229 struct processors
231 const char * name;
232 unsigned int flags;
235 /* Not all of these give usefully different compilation alternatives,
236 but there is no simple way of generalizing them. */
237 static struct processors all_cores[] =
239 /* ARM Cores */
241 {"arm2", FL_CO_PROC | FL_MODE26 },
242 {"arm250", FL_CO_PROC | FL_MODE26 },
243 {"arm3", FL_CO_PROC | FL_MODE26 },
244 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
247 {"arm610", FL_MODE26 | FL_MODE32 },
248 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
249 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
250 /* arm7m doesn't exist on its own, but only with D, (and I), but
251 those don't alter the code, so arm7m is sometimes used. */
252 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
253 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
254 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
255 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
256 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
257 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
258 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
259 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
260 {"arm710", FL_MODE26 | FL_MODE32 },
261 {"arm720", FL_MODE26 | FL_MODE32 },
262 {"arm710c", FL_MODE26 | FL_MODE32 },
263 {"arm7100", FL_MODE26 | FL_MODE32 },
264 {"arm7500", FL_MODE26 | FL_MODE32 },
265 /* Doesn't have an external co-proc, but does have embedded fpu. */
266 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
267 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
268 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
269 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
270 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
271 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
272 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
273 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
274 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
275 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
276 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
277 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_XSCALE | FL_ARCH5 },
279 {NULL, 0}
282 static struct processors all_architectures[] =
284 /* ARM Architectures */
286 { "armv2", FL_CO_PROC | FL_MODE26 },
287 { "armv2a", FL_CO_PROC | FL_MODE26 },
288 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
289 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
290 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
291 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
292 implementations that support it, so we will leave it out for now. */
293 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
294 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
295 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
296 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
297 { NULL, 0 }
300 /* This is a magic stucture. The 'string' field is magically filled in
301 with a pointer to the value specified by the user on the command line
302 assuming that the user has specified such a value. */
304 struct arm_cpu_select arm_select[] =
306 /* string name processors */
307 { NULL, "-mcpu=", all_cores },
308 { NULL, "-march=", all_architectures },
309 { NULL, "-mtune=", all_cores }
312 /* Return the number of bits set in value' */
313 static unsigned long
314 bit_count (value)
315 signed int value;
317 unsigned long count = 0;
319 while (value)
321 value &= ~(value & -value);
322 ++count;
325 return count;
328 /* Fix up any incompatible options that the user has specified.
329 This has now turned into a maze. */
330 void
331 arm_override_options ()
333 unsigned i;
335 /* Set up the flags based on the cpu/architecture selected by the user. */
336 for (i = ARRAY_SIZE (arm_select); i--;)
338 struct arm_cpu_select * ptr = arm_select + i;
340 if (ptr->string != NULL && ptr->string[0] != '\0')
342 const struct processors * sel;
344 for (sel = ptr->processors; sel->name != NULL; sel++)
345 if (streq (ptr->string, sel->name))
347 if (i == 2)
348 tune_flags = sel->flags;
349 else
351 /* If we have been given an architecture and a processor
352 make sure that they are compatible. We only generate
353 a warning though, and we prefer the CPU over the
354 architecture. */
355 if (insn_flags != 0 && (insn_flags ^ sel->flags))
356 warning ("switch -mcpu=%s conflicts with -march= switch",
357 ptr->string);
359 insn_flags = sel->flags;
362 break;
365 if (sel->name == NULL)
366 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
370 /* If the user did not specify a processor, choose one for them. */
371 if (insn_flags == 0)
373 struct processors * sel;
374 unsigned int sought;
375 static struct cpu_default
377 int cpu;
378 const char * name;
380 cpu_defaults[] =
382 { TARGET_CPU_arm2, "arm2" },
383 { TARGET_CPU_arm6, "arm6" },
384 { TARGET_CPU_arm610, "arm610" },
385 { TARGET_CPU_arm710, "arm710" },
386 { TARGET_CPU_arm7m, "arm7m" },
387 { TARGET_CPU_arm7500fe, "arm7500fe" },
388 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
389 { TARGET_CPU_arm8, "arm8" },
390 { TARGET_CPU_arm810, "arm810" },
391 { TARGET_CPU_arm9, "arm9" },
392 { TARGET_CPU_strongarm, "strongarm" },
393 { TARGET_CPU_xscale, "xscale" },
394 { TARGET_CPU_generic, "arm" },
395 { 0, 0 }
397 struct cpu_default * def;
399 /* Find the default. */
400 for (def = cpu_defaults; def->name; def++)
401 if (def->cpu == TARGET_CPU_DEFAULT)
402 break;
404 /* Make sure we found the default CPU. */
405 if (def->name == NULL)
406 abort ();
408 /* Find the default CPU's flags. */
409 for (sel = all_cores; sel->name != NULL; sel++)
410 if (streq (def->name, sel->name))
411 break;
413 if (sel->name == NULL)
414 abort ();
416 insn_flags = sel->flags;
418 /* Now check to see if the user has specified some command line
419 switch that require certain abilities from the cpu. */
420 sought = 0;
422 if (TARGET_INTERWORK || TARGET_THUMB)
424 sought |= (FL_THUMB | FL_MODE32);
426 /* Force apcs-32 to be used for interworking. */
427 target_flags |= ARM_FLAG_APCS_32;
429 /* There are no ARM processors that support both APCS-26 and
430 interworking. Therefore we force FL_MODE26 to be removed
431 from insn_flags here (if it was set), so that the search
432 below will always be able to find a compatible processor. */
433 insn_flags &= ~FL_MODE26;
435 else if (!TARGET_APCS_32)
436 sought |= FL_MODE26;
438 if (sought != 0 && ((sought & insn_flags) != sought))
440 /* Try to locate a CPU type that supports all of the abilities
441 of the default CPU, plus the extra abilities requested by
442 the user. */
443 for (sel = all_cores; sel->name != NULL; sel++)
444 if ((sel->flags & sought) == (sought | insn_flags))
445 break;
447 if (sel->name == NULL)
449 unsigned int current_bit_count = 0;
450 struct processors * best_fit = NULL;
452 /* Ideally we would like to issue an error message here
453 saying that it was not possible to find a CPU compatible
454 with the default CPU, but which also supports the command
455 line options specified by the programmer, and so they
456 ought to use the -mcpu=<name> command line option to
457 override the default CPU type.
459 Unfortunately this does not work with multilibing. We
460 need to be able to support multilibs for -mapcs-26 and for
461 -mthumb-interwork and there is no CPU that can support both
462 options. Instead if we cannot find a cpu that has both the
463 characteristics of the default cpu and the given command line
464 options we scan the array again looking for a best match. */
465 for (sel = all_cores; sel->name != NULL; sel++)
466 if ((sel->flags & sought) == sought)
468 unsigned int count;
470 count = bit_count (sel->flags & insn_flags);
472 if (count >= current_bit_count)
474 best_fit = sel;
475 current_bit_count = count;
479 if (best_fit == NULL)
480 abort ();
481 else
482 sel = best_fit;
485 insn_flags = sel->flags;
489 /* If tuning has not been specified, tune for whichever processor or
490 architecture has been selected. */
491 if (tune_flags == 0)
492 tune_flags = insn_flags;
494 /* Make sure that the processor choice does not conflict with any of the
495 other command line choices. */
496 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
498 /* If APCS-32 was not the default then it must have been set by the
499 user, so issue a warning message. If the user has specified
500 "-mapcs-32 -mcpu=arm2" then we loose here. */
501 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
502 warning ("target CPU does not support APCS-32" );
503 target_flags &= ~ARM_FLAG_APCS_32;
505 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
507 warning ("target CPU does not support APCS-26" );
508 target_flags |= ARM_FLAG_APCS_32;
511 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
513 warning ("target CPU does not support interworking" );
514 target_flags &= ~ARM_FLAG_INTERWORK;
517 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
519 warning ("target CPU does not supoport THUMB instructions.");
520 target_flags &= ~ARM_FLAG_THUMB;
523 if (TARGET_APCS_FRAME && TARGET_THUMB)
525 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
526 target_flags &= ~ARM_FLAG_APCS_FRAME;
529 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
530 from here where no function is being compiled currently. */
531 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
532 && TARGET_ARM)
533 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
535 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
536 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
538 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
539 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
541 /* If interworking is enabled then APCS-32 must be selected as well. */
542 if (TARGET_INTERWORK)
544 if (!TARGET_APCS_32)
545 warning ("interworking forces APCS-32 to be used" );
546 target_flags |= ARM_FLAG_APCS_32;
549 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
551 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
552 target_flags |= ARM_FLAG_APCS_FRAME;
555 if (TARGET_POKE_FUNCTION_NAME)
556 target_flags |= ARM_FLAG_APCS_FRAME;
558 if (TARGET_APCS_REENT && flag_pic)
559 fatal ("-fpic and -mapcs-reent are incompatible");
561 if (TARGET_APCS_REENT)
562 warning ("APCS reentrant code not supported. Ignored");
564 /* If this target is normally configured to use APCS frames, warn if they
565 are turned off and debugging is turned on. */
566 if (TARGET_ARM
567 && write_symbols != NO_DEBUG
568 && !TARGET_APCS_FRAME
569 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
570 warning ("-g with -mno-apcs-frame may not give sensible debugging");
572 /* If stack checking is disabled, we can use r10 as the PIC register,
573 which keeps r9 available. */
574 if (flag_pic && !TARGET_APCS_STACK)
575 arm_pic_register = 10;
577 if (TARGET_APCS_FLOAT)
578 warning ("Passing floating point arguments in fp regs not yet supported");
580 /* Initialise boolean versions of the flags, for use in the arm.md file. */
581 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
582 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
583 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
584 arm_is_xscale = (insn_flags & FL_XSCALE) != 0;
586 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
587 arm_is_strong = (tune_flags & FL_STRONG) != 0;
588 thumb_code = (TARGET_ARM == 0);
589 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
590 && !(tune_flags & FL_ARCH4))) != 0;
592 /* Default value for floating point code... if no co-processor
593 bus, then schedule for emulated floating point. Otherwise,
594 assume the user has an FPA.
595 Note: this does not prevent use of floating point instructions,
596 -msoft-float does that. */
597 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
599 if (target_fp_name)
601 if (streq (target_fp_name, "2"))
602 arm_fpu_arch = FP_SOFT2;
603 else if (streq (target_fp_name, "3"))
604 arm_fpu_arch = FP_SOFT3;
605 else
606 fatal ("Invalid floating point emulation option: -mfpe-%s",
607 target_fp_name);
609 else
610 arm_fpu_arch = FP_DEFAULT;
612 if (TARGET_FPE && arm_fpu != FP_HARD)
613 arm_fpu = FP_SOFT2;
615 /* For arm2/3 there is no need to do any scheduling if there is only
616 a floating point emulator, or we are doing software floating-point. */
617 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
618 && (tune_flags & FL_MODE32) == 0)
619 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
621 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
623 if (structure_size_string != NULL)
625 int size = strtol (structure_size_string, NULL, 0);
627 if (size == 8 || size == 32)
628 arm_structure_size_boundary = size;
629 else
630 warning ("Structure size boundary can only be set to 8 or 32");
633 if (arm_pic_register_string != NULL)
635 int pic_register;
637 if (!flag_pic)
638 warning ("-mpic-register= is useless without -fpic");
640 pic_register = decode_reg_name (arm_pic_register_string);
642 /* Prevent the user from choosing an obviously stupid PIC register. */
643 if (pic_register < 0 || call_used_regs[pic_register]
644 || pic_register == HARD_FRAME_POINTER_REGNUM
645 || pic_register == STACK_POINTER_REGNUM
646 || pic_register >= PC_REGNUM)
647 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
648 else
649 arm_pic_register = pic_register;
652 if (TARGET_THUMB && flag_schedule_insns)
654 /* Don't warn since it's on by default in -O2. */
655 flag_schedule_insns = 0;
658 /* If optimizing for space, don't synthesize constants.
659 For processors with load scheduling, it never costs more than 2 cycles
660 to load a constant, and the load scheduler may well reduce that to 1. */
661 if (optimize_size || (tune_flags & FL_LDSCHED))
662 arm_constant_limit = 1;
664 if (arm_is_xscale)
665 arm_constant_limit = 2;
667 /* If optimizing for size, bump the number of instructions that we
668 are prepared to conditionally execute (even on a StrongARM).
669 Otherwise for the StrongARM, which has early execution of branches,
670 a sequence that is worth skipping is shorter. */
671 if (optimize_size)
672 max_insns_skipped = 6;
673 else if (arm_is_strong)
674 max_insns_skipped = 3;
676 /* Register global variables with the garbage collector. */
677 arm_add_gc_roots ();
680 static void
681 arm_add_gc_roots ()
683 ggc_add_rtx_root (&arm_compare_op0, 1);
684 ggc_add_rtx_root (&arm_compare_op1, 1);
685 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
687 gcc_obstack_init(&minipool_obstack);
688 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
691 /* Return 1 if it is possible to return using a single instruction. */
693 use_return_insn (iscond)
694 int iscond;
696 int regno;
698 /* Never use a return instruction before reload has run. */
699 if (!reload_completed
700 /* Or if the function is variadic. */
701 || current_function_pretend_args_size
702 || current_function_anonymous_args
703 /* Of if the function calls __builtin_eh_return () */
704 || cfun->machine->eh_epilogue_sp_ofs != NULL
705 /* Or if there is no frame pointer and there is a stack adjustment. */
706 || ((get_frame_size () + current_function_outgoing_args_size != 0)
707 && !frame_pointer_needed))
708 return 0;
710 /* Can't be done if interworking with Thumb, and any registers have been
711 stacked. Similarly, on StrongARM, conditional returns are expensive
712 if they aren't taken and registers have been stacked. */
713 if (iscond && arm_is_strong && frame_pointer_needed)
714 return 0;
716 if ((iscond && arm_is_strong)
717 || TARGET_INTERWORK)
719 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
720 if (regs_ever_live[regno] && !call_used_regs[regno])
721 return 0;
723 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
724 return 0;
727 /* Can't be done if any of the FPU regs are pushed, since this also
728 requires an insn. */
729 if (TARGET_HARD_FLOAT)
730 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
731 if (regs_ever_live[regno] && !call_used_regs[regno])
732 return 0;
734 /* If a function is naked, don't use the "return" insn. */
735 if (arm_naked_function_p (current_function_decl))
736 return 0;
738 return 1;
741 /* Return TRUE if int I is a valid immediate ARM constant. */
744 const_ok_for_arm (i)
745 HOST_WIDE_INT i;
747 unsigned HOST_WIDE_INT mask = ~HOST_UINT (0xFF);
749 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
750 be all zero, or all one. */
751 if ((i & ~HOST_UINT (0xffffffff)) != 0
752 && ((i & ~HOST_UINT (0xffffffff))
753 != ((~HOST_UINT (0))
754 & ~HOST_UINT (0xffffffff))))
755 return FALSE;
757 /* Fast return for 0 and powers of 2 */
758 if ((i & (i - 1)) == 0)
759 return TRUE;
763 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
764 return TRUE;
765 mask =
766 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
767 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
768 } while (mask != ~HOST_UINT (0xFF));
770 return FALSE;
773 /* Return true if I is a valid constant for the operation CODE. */
774 static int
775 const_ok_for_op (i, code)
776 HOST_WIDE_INT i;
777 enum rtx_code code;
779 if (const_ok_for_arm (i))
780 return 1;
782 switch (code)
784 case PLUS:
785 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
787 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
788 case XOR:
789 case IOR:
790 return 0;
792 case AND:
793 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
795 default:
796 abort ();
800 /* Emit a sequence of insns to handle a large constant.
801 CODE is the code of the operation required, it can be any of SET, PLUS,
802 IOR, AND, XOR, MINUS;
803 MODE is the mode in which the operation is being performed;
804 VAL is the integer to operate on;
805 SOURCE is the other operand (a register, or a null-pointer for SET);
806 SUBTARGETS means it is safe to create scratch registers if that will
807 either produce a simpler sequence, or we will want to cse the values.
808 Return value is the number of insns emitted. */
811 arm_split_constant (code, mode, val, target, source, subtargets)
812 enum rtx_code code;
813 enum machine_mode mode;
814 HOST_WIDE_INT val;
815 rtx target;
816 rtx source;
817 int subtargets;
819 if (subtargets || code == SET
820 || (GET_CODE (target) == REG && GET_CODE (source) == REG
821 && REGNO (target) != REGNO (source)))
823 /* After arm_reorg has been called, we can't fix up expensive
824 constants by pushing them into memory so we must synthesise
825 them in-line, regardless of the cost. This is only likely to
826 be more costly on chips that have load delay slots and we are
827 compiling without running the scheduler (so no splitting
828 occurred before the final instruction emission).
830 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
832 if (!after_arm_reorg
833 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
834 > arm_constant_limit + (code != SET)))
836 if (code == SET)
838 /* Currently SET is the only monadic value for CODE, all
839 the rest are diadic. */
840 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
841 return 1;
843 else
845 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
847 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
848 /* For MINUS, the value is subtracted from, since we never
849 have subtraction of a constant. */
850 if (code == MINUS)
851 emit_insn (gen_rtx_SET (VOIDmode, target,
852 gen_rtx_MINUS (mode, temp, source)));
853 else
854 emit_insn (gen_rtx_SET (VOIDmode, target,
855 gen_rtx (code, mode, source, temp)));
856 return 2;
861 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
864 /* As above, but extra parameter GENERATE which, if clear, suppresses
865 RTL generation. */
866 static int
867 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
868 enum rtx_code code;
869 enum machine_mode mode;
870 HOST_WIDE_INT val;
871 rtx target;
872 rtx source;
873 int subtargets;
874 int generate;
876 int can_invert = 0;
877 int can_negate = 0;
878 int can_negate_initial = 0;
879 int can_shift = 0;
880 int i;
881 int num_bits_set = 0;
882 int set_sign_bit_copies = 0;
883 int clear_sign_bit_copies = 0;
884 int clear_zero_bit_copies = 0;
885 int set_zero_bit_copies = 0;
886 int insns = 0;
887 unsigned HOST_WIDE_INT temp1, temp2;
888 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
890 /* Find out which operations are safe for a given CODE. Also do a quick
891 check for degenerate cases; these can occur when DImode operations
892 are split. */
893 switch (code)
895 case SET:
896 can_invert = 1;
897 can_shift = 1;
898 can_negate = 1;
899 break;
901 case PLUS:
902 can_negate = 1;
903 can_negate_initial = 1;
904 break;
906 case IOR:
907 if (remainder == HOST_UINT (0xffffffff))
909 if (generate)
910 emit_insn (gen_rtx_SET (VOIDmode, target,
911 GEN_INT (ARM_SIGN_EXTEND (val))));
912 return 1;
914 if (remainder == 0)
916 if (reload_completed && rtx_equal_p (target, source))
917 return 0;
918 if (generate)
919 emit_insn (gen_rtx_SET (VOIDmode, target, source));
920 return 1;
922 break;
924 case AND:
925 if (remainder == 0)
927 if (generate)
928 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
929 return 1;
931 if (remainder == HOST_UINT (0xffffffff))
933 if (reload_completed && rtx_equal_p (target, source))
934 return 0;
935 if (generate)
936 emit_insn (gen_rtx_SET (VOIDmode, target, source));
937 return 1;
939 can_invert = 1;
940 break;
942 case XOR:
943 if (remainder == 0)
945 if (reload_completed && rtx_equal_p (target, source))
946 return 0;
947 if (generate)
948 emit_insn (gen_rtx_SET (VOIDmode, target, source));
949 return 1;
951 if (remainder == HOST_UINT (0xffffffff))
953 if (generate)
954 emit_insn (gen_rtx_SET (VOIDmode, target,
955 gen_rtx_NOT (mode, source)));
956 return 1;
959 /* We don't know how to handle this yet below. */
960 abort ();
962 case MINUS:
963 /* We treat MINUS as (val - source), since (source - val) is always
964 passed as (source + (-val)). */
965 if (remainder == 0)
967 if (generate)
968 emit_insn (gen_rtx_SET (VOIDmode, target,
969 gen_rtx_NEG (mode, source)));
970 return 1;
972 if (const_ok_for_arm (val))
974 if (generate)
975 emit_insn (gen_rtx_SET (VOIDmode, target,
976 gen_rtx_MINUS (mode, GEN_INT (val),
977 source)));
978 return 1;
980 can_negate = 1;
982 break;
984 default:
985 abort ();
988 /* If we can do it in one insn get out quickly. */
989 if (const_ok_for_arm (val)
990 || (can_negate_initial && const_ok_for_arm (-val))
991 || (can_invert && const_ok_for_arm (~val)))
993 if (generate)
994 emit_insn (gen_rtx_SET (VOIDmode, target,
995 (source ? gen_rtx (code, mode, source,
996 GEN_INT (val))
997 : GEN_INT (val))));
998 return 1;
1001 /* Calculate a few attributes that may be useful for specific
1002 optimizations. */
1003 for (i = 31; i >= 0; i--)
1005 if ((remainder & (1 << i)) == 0)
1006 clear_sign_bit_copies++;
1007 else
1008 break;
1011 for (i = 31; i >= 0; i--)
1013 if ((remainder & (1 << i)) != 0)
1014 set_sign_bit_copies++;
1015 else
1016 break;
1019 for (i = 0; i <= 31; i++)
1021 if ((remainder & (1 << i)) == 0)
1022 clear_zero_bit_copies++;
1023 else
1024 break;
1027 for (i = 0; i <= 31; i++)
1029 if ((remainder & (1 << i)) != 0)
1030 set_zero_bit_copies++;
1031 else
1032 break;
1035 switch (code)
1037 case SET:
1038 /* See if we can do this by sign_extending a constant that is known
1039 to be negative. This is a good, way of doing it, since the shift
1040 may well merge into a subsequent insn. */
1041 if (set_sign_bit_copies > 1)
1043 if (const_ok_for_arm
1044 (temp1 = ARM_SIGN_EXTEND (remainder
1045 << (set_sign_bit_copies - 1))))
1047 if (generate)
1049 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1050 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1051 GEN_INT (temp1)));
1052 emit_insn (gen_ashrsi3 (target, new_src,
1053 GEN_INT (set_sign_bit_copies - 1)));
1055 return 2;
1057 /* For an inverted constant, we will need to set the low bits,
1058 these will be shifted out of harm's way. */
1059 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1060 if (const_ok_for_arm (~temp1))
1062 if (generate)
1064 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1065 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1066 GEN_INT (temp1)));
1067 emit_insn (gen_ashrsi3 (target, new_src,
1068 GEN_INT (set_sign_bit_copies - 1)));
1070 return 2;
1074 /* See if we can generate this by setting the bottom (or the top)
1075 16 bits, and then shifting these into the other half of the
1076 word. We only look for the simplest cases, to do more would cost
1077 too much. Be careful, however, not to generate this when the
1078 alternative would take fewer insns. */
1079 if (val & HOST_UINT (0xffff0000))
1081 temp1 = remainder & HOST_UINT (0xffff0000);
1082 temp2 = remainder & 0x0000ffff;
1084 /* Overlaps outside this range are best done using other methods. */
1085 for (i = 9; i < 24; i++)
1087 if ((((temp2 | (temp2 << i))
1088 & HOST_UINT (0xffffffff)) == remainder)
1089 && !const_ok_for_arm (temp2))
1091 rtx new_src = (subtargets
1092 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1093 : target);
1094 insns = arm_gen_constant (code, mode, temp2, new_src,
1095 source, subtargets, generate);
1096 source = new_src;
1097 if (generate)
1098 emit_insn (gen_rtx_SET
1099 (VOIDmode, target,
1100 gen_rtx_IOR (mode,
1101 gen_rtx_ASHIFT (mode, source,
1102 GEN_INT (i)),
1103 source)));
1104 return insns + 1;
1108 /* Don't duplicate cases already considered. */
1109 for (i = 17; i < 24; i++)
1111 if (((temp1 | (temp1 >> i)) == remainder)
1112 && !const_ok_for_arm (temp1))
1114 rtx new_src = (subtargets
1115 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1116 : target);
1117 insns = arm_gen_constant (code, mode, temp1, new_src,
1118 source, subtargets, generate);
1119 source = new_src;
1120 if (generate)
1121 emit_insn
1122 (gen_rtx_SET (VOIDmode, target,
1123 gen_rtx_IOR
1124 (mode,
1125 gen_rtx_LSHIFTRT (mode, source,
1126 GEN_INT (i)),
1127 source)));
1128 return insns + 1;
1132 break;
1134 case IOR:
1135 case XOR:
1136 /* If we have IOR or XOR, and the constant can be loaded in a
1137 single instruction, and we can find a temporary to put it in,
1138 then this can be done in two instructions instead of 3-4. */
1139 if (subtargets
1140 /* TARGET can't be NULL if SUBTARGETS is 0 */
1141 || (reload_completed && !reg_mentioned_p (target, source)))
1143 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1145 if (generate)
1147 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1149 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1150 emit_insn (gen_rtx_SET (VOIDmode, target,
1151 gen_rtx (code, mode, source, sub)));
1153 return 2;
1157 if (code == XOR)
1158 break;
1160 if (set_sign_bit_copies > 8
1161 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1163 if (generate)
1165 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1166 rtx shift = GEN_INT (set_sign_bit_copies);
1168 emit_insn (gen_rtx_SET (VOIDmode, sub,
1169 gen_rtx_NOT (mode,
1170 gen_rtx_ASHIFT (mode,
1171 source,
1172 shift))));
1173 emit_insn (gen_rtx_SET (VOIDmode, target,
1174 gen_rtx_NOT (mode,
1175 gen_rtx_LSHIFTRT (mode, sub,
1176 shift))));
1178 return 2;
1181 if (set_zero_bit_copies > 8
1182 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1184 if (generate)
1186 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1187 rtx shift = GEN_INT (set_zero_bit_copies);
1189 emit_insn (gen_rtx_SET (VOIDmode, sub,
1190 gen_rtx_NOT (mode,
1191 gen_rtx_LSHIFTRT (mode,
1192 source,
1193 shift))));
1194 emit_insn (gen_rtx_SET (VOIDmode, target,
1195 gen_rtx_NOT (mode,
1196 gen_rtx_ASHIFT (mode, sub,
1197 shift))));
1199 return 2;
1202 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1204 if (generate)
1206 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1207 emit_insn (gen_rtx_SET (VOIDmode, sub,
1208 gen_rtx_NOT (mode, source)));
1209 source = sub;
1210 if (subtargets)
1211 sub = gen_reg_rtx (mode);
1212 emit_insn (gen_rtx_SET (VOIDmode, sub,
1213 gen_rtx_AND (mode, source,
1214 GEN_INT (temp1))));
1215 emit_insn (gen_rtx_SET (VOIDmode, target,
1216 gen_rtx_NOT (mode, sub)));
1218 return 3;
1220 break;
1222 case AND:
1223 /* See if two shifts will do 2 or more insn's worth of work. */
1224 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1226 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
1227 << (32 - clear_sign_bit_copies))
1228 & HOST_UINT (0xffffffff));
1230 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1232 if (generate)
1234 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1235 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1236 new_src, source, subtargets, 1);
1237 source = new_src;
1239 else
1241 rtx targ = subtargets ? NULL_RTX : target;
1242 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1243 targ, source, subtargets, 0);
1247 if (generate)
1249 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1250 rtx shift = GEN_INT (clear_sign_bit_copies);
1252 emit_insn (gen_ashlsi3 (new_src, source, shift));
1253 emit_insn (gen_lshrsi3 (target, new_src, shift));
1256 return insns + 2;
1259 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1261 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1263 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1265 if (generate)
1267 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1269 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1270 new_src, source, subtargets, 1);
1271 source = new_src;
1273 else
1275 rtx targ = subtargets ? NULL_RTX : target;
1277 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1278 targ, source, subtargets, 0);
1282 if (generate)
1284 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1285 rtx shift = GEN_INT (clear_zero_bit_copies);
1287 emit_insn (gen_lshrsi3 (new_src, source, shift));
1288 emit_insn (gen_ashlsi3 (target, new_src, shift));
1291 return insns + 2;
1294 break;
1296 default:
1297 break;
1300 for (i = 0; i < 32; i++)
1301 if (remainder & (1 << i))
1302 num_bits_set++;
1304 if (code == AND || (can_invert && num_bits_set > 16))
1305 remainder = (~remainder) & HOST_UINT (0xffffffff);
1306 else if (code == PLUS && num_bits_set > 16)
1307 remainder = (-remainder) & HOST_UINT (0xffffffff);
1308 else
1310 can_invert = 0;
1311 can_negate = 0;
1314 /* Now try and find a way of doing the job in either two or three
1315 instructions.
1316 We start by looking for the largest block of zeros that are aligned on
1317 a 2-bit boundary, we then fill up the temps, wrapping around to the
1318 top of the word when we drop off the bottom.
1319 In the worst case this code should produce no more than four insns. */
1321 int best_start = 0;
1322 int best_consecutive_zeros = 0;
1324 for (i = 0; i < 32; i += 2)
1326 int consecutive_zeros = 0;
1328 if (!(remainder & (3 << i)))
1330 while ((i < 32) && !(remainder & (3 << i)))
1332 consecutive_zeros += 2;
1333 i += 2;
1335 if (consecutive_zeros > best_consecutive_zeros)
1337 best_consecutive_zeros = consecutive_zeros;
1338 best_start = i - consecutive_zeros;
1340 i -= 2;
1344 /* Now start emitting the insns, starting with the one with the highest
1345 bit set: we do this so that the smallest number will be emitted last;
1346 this is more likely to be combinable with addressing insns. */
1347 i = best_start;
1350 int end;
1352 if (i <= 0)
1353 i += 32;
1354 if (remainder & (3 << (i - 2)))
1356 end = i - 8;
1357 if (end < 0)
1358 end += 32;
1359 temp1 = remainder & ((0x0ff << end)
1360 | ((i < end) ? (0xff >> (32 - end)) : 0));
1361 remainder &= ~temp1;
1363 if (generate)
1365 rtx new_src;
1367 if (code == SET)
1368 emit_insn (gen_rtx_SET (VOIDmode,
1369 new_src = (subtargets
1370 ? gen_reg_rtx (mode)
1371 : target),
1372 GEN_INT (can_invert
1373 ? ~temp1 : temp1)));
1374 else if (code == MINUS)
1375 emit_insn (gen_rtx_SET (VOIDmode,
1376 new_src = (subtargets
1377 ? gen_reg_rtx (mode)
1378 : target),
1379 gen_rtx (code, mode, GEN_INT (temp1),
1380 source)));
1381 else
1382 emit_insn (gen_rtx_SET (VOIDmode,
1383 new_src = (remainder
1384 ? (subtargets
1385 ? gen_reg_rtx (mode)
1386 : target)
1387 : target),
1388 gen_rtx (code, mode, source,
1389 GEN_INT (can_invert ? ~temp1
1390 : (can_negate
1391 ? -temp1
1392 : temp1)))));
1393 source = new_src;
1396 if (code == SET)
1398 can_invert = 0;
1399 code = PLUS;
1401 else if (code == MINUS)
1402 code = PLUS;
1404 insns++;
1405 i -= 6;
1407 i -= 2;
1408 } while (remainder);
1410 return insns;
1413 /* Canonicalize a comparison so that we are more likely to recognize it.
1414 This can be done for a few constant compares, where we can make the
1415 immediate value easier to load. */
1416 enum rtx_code
1417 arm_canonicalize_comparison (code, op1)
1418 enum rtx_code code;
1419 rtx * op1;
1421 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1423 switch (code)
1425 case EQ:
1426 case NE:
1427 return code;
1429 case GT:
1430 case LE:
1431 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1432 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1434 *op1 = GEN_INT (i + 1);
1435 return code == GT ? GE : LT;
1437 break;
1439 case GE:
1440 case LT:
1441 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
1442 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1444 *op1 = GEN_INT (i - 1);
1445 return code == GE ? GT : LE;
1447 break;
1449 case GTU:
1450 case LEU:
1451 if (i != ~(HOST_UINT (0))
1452 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1454 *op1 = GEN_INT (i + 1);
1455 return code == GTU ? GEU : LTU;
1457 break;
1459 case GEU:
1460 case LTU:
1461 if (i != 0
1462 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1464 *op1 = GEN_INT (i - 1);
1465 return code == GEU ? GTU : LEU;
1467 break;
1469 default:
1470 abort ();
1473 return code;
1476 /* Decide whether a type should be returned in memory (true)
1477 or in a register (false). This is called by the macro
1478 RETURN_IN_MEMORY. */
1480 arm_return_in_memory (type)
1481 tree type;
1483 if (!AGGREGATE_TYPE_P (type))
1484 /* All simple types are returned in registers. */
1485 return 0;
1487 /* For the arm-wince targets we choose to be compitable with Microsoft's
1488 ARM and Thumb compilers, which always return aggregates in memory. */
1489 #ifndef ARM_WINCE
1491 if (int_size_in_bytes (type) > 4)
1492 /* All structures/unions bigger than one word are returned in memory. */
1493 return 1;
1495 if (TREE_CODE (type) == RECORD_TYPE)
1497 tree field;
1499 /* For a struct the APCS says that we only return in a register
1500 if the type is 'integer like' and every addressable element
1501 has an offset of zero. For practical purposes this means
1502 that the structure can have at most one non bit-field element
1503 and that this element must be the first one in the structure. */
1505 /* Find the first field, ignoring non FIELD_DECL things which will
1506 have been created by C++. */
1507 for (field = TYPE_FIELDS (type);
1508 field && TREE_CODE (field) != FIELD_DECL;
1509 field = TREE_CHAIN (field))
1510 continue;
1512 if (field == NULL)
1513 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1515 /* Check that the first field is valid for returning in a register. */
1517 /* ... Floats are not allowed */
1518 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1519 return 1;
1521 /* ... Aggregates that are not themselves valid for returning in
1522 a register are not allowed. */
1523 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1524 return 1;
1526 /* Now check the remaining fields, if any. Only bitfields are allowed,
1527 since they are not addressable. */
1528 for (field = TREE_CHAIN (field);
1529 field;
1530 field = TREE_CHAIN (field))
1532 if (TREE_CODE (field) != FIELD_DECL)
1533 continue;
1535 if (!DECL_BIT_FIELD_TYPE (field))
1536 return 1;
1539 return 0;
1542 if (TREE_CODE (type) == UNION_TYPE)
1544 tree field;
1546 /* Unions can be returned in registers if every element is
1547 integral, or can be returned in an integer register. */
1548 for (field = TYPE_FIELDS (type);
1549 field;
1550 field = TREE_CHAIN (field))
1552 if (TREE_CODE (field) != FIELD_DECL)
1553 continue;
1555 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1556 return 1;
1558 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1559 return 1;
1562 return 0;
1564 #endif /* not ARM_WINCE */
1566 /* Return all other types in memory. */
1567 return 1;
1570 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1571 for a call to a function whose data type is FNTYPE.
1572 For a library call, FNTYPE is NULL. */
1573 void
1574 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1575 CUMULATIVE_ARGS * pcum;
1576 tree fntype;
1577 rtx libname ATTRIBUTE_UNUSED;
1578 int indirect ATTRIBUTE_UNUSED;
1580 /* On the ARM, the offset starts at 0. */
1581 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1583 pcum->call_cookie = CALL_NORMAL;
1585 if (TARGET_LONG_CALLS)
1586 pcum->call_cookie = CALL_LONG;
1588 /* Check for long call/short call attributes. The attributes
1589 override any command line option. */
1590 if (fntype)
1592 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1593 pcum->call_cookie = CALL_SHORT;
1594 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1595 pcum->call_cookie = CALL_LONG;
1599 /* Determine where to put an argument to a function.
1600 Value is zero to push the argument on the stack,
1601 or a hard register in which to store the argument.
1603 MODE is the argument's machine mode.
1604 TYPE is the data type of the argument (as a tree).
1605 This is null for libcalls where that information may
1606 not be available.
1607 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1608 the preceding args and about the function being called.
1609 NAMED is nonzero if this argument is a named parameter
1610 (otherwise it is an extra parameter matching an ellipsis). */
1612 arm_function_arg (pcum, mode, type, named)
1613 CUMULATIVE_ARGS * pcum;
1614 enum machine_mode mode;
1615 tree type ATTRIBUTE_UNUSED;
1616 int named;
1618 if (mode == VOIDmode)
1619 /* Compute operand 2 of the call insn. */
1620 return GEN_INT (pcum->call_cookie);
1622 if (!named || pcum->nregs >= NUM_ARG_REGS)
1623 return NULL_RTX;
1625 return gen_rtx_REG (mode, pcum->nregs);
1628 /* Encode the current state of the #pragma [no_]long_calls. */
1629 typedef enum
1631 OFF, /* No #pramgma [no_]long_calls is in effect. */
1632 LONG, /* #pragma long_calls is in effect. */
1633 SHORT /* #pragma no_long_calls is in effect. */
1634 } arm_pragma_enum;
1636 static arm_pragma_enum arm_pragma_long_calls = OFF;
1638 void
1639 arm_pr_long_calls (pfile)
1640 cpp_reader *pfile ATTRIBUTE_UNUSED;
1642 arm_pragma_long_calls = LONG;
1645 void
1646 arm_pr_no_long_calls (pfile)
1647 cpp_reader *pfile ATTRIBUTE_UNUSED;
1649 arm_pragma_long_calls = SHORT;
1652 void
1653 arm_pr_long_calls_off (pfile)
1654 cpp_reader *pfile ATTRIBUTE_UNUSED;
1656 arm_pragma_long_calls = OFF;
1660 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1661 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1662 assigned to TYPE. */
1664 arm_valid_type_attribute_p (type, attributes, identifier, args)
1665 tree type;
1666 tree attributes ATTRIBUTE_UNUSED;
1667 tree identifier;
1668 tree args;
1670 if ( TREE_CODE (type) != FUNCTION_TYPE
1671 && TREE_CODE (type) != METHOD_TYPE
1672 && TREE_CODE (type) != FIELD_DECL
1673 && TREE_CODE (type) != TYPE_DECL)
1674 return 0;
1676 /* Function calls made to this symbol must be done indirectly, because
1677 it may lie outside of the 26 bit addressing range of a normal function
1678 call. */
1679 if (is_attribute_p ("long_call", identifier))
1680 return (args == NULL_TREE);
1682 /* Whereas these functions are always known to reside within the 26 bit
1683 addressing range. */
1684 if (is_attribute_p ("short_call", identifier))
1685 return (args == NULL_TREE);
1687 return 0;
1690 /* Return 0 if the attributes for two types are incompatible, 1 if they
1691 are compatible, and 2 if they are nearly compatible (which causes a
1692 warning to be generated). */
1694 arm_comp_type_attributes (type1, type2)
1695 tree type1;
1696 tree type2;
1698 int l1, l2, s1, s2;
1700 /* Check for mismatch of non-default calling convention. */
1701 if (TREE_CODE (type1) != FUNCTION_TYPE)
1702 return 1;
1704 /* Check for mismatched call attributes. */
1705 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1706 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1707 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1708 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
1710 /* Only bother to check if an attribute is defined. */
1711 if (l1 | l2 | s1 | s2)
1713 /* If one type has an attribute, the other must have the same attribute. */
1714 if ((l1 != l2) || (s1 != s2))
1715 return 0;
1717 /* Disallow mixed attributes. */
1718 if ((l1 & s2) || (l2 & s1))
1719 return 0;
1722 return 1;
1725 /* Encode long_call or short_call attribute by prefixing
1726 symbol name in DECL with a special character FLAG. */
1727 void
1728 arm_encode_call_attribute (decl, flag)
1729 tree decl;
1730 int flag;
1732 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1733 int len = strlen (str);
1734 char * newstr;
1736 if (TREE_CODE (decl) != FUNCTION_DECL)
1737 return;
1739 /* Do not allow weak functions to be treated as short call. */
1740 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1741 return;
1743 newstr = alloca (len + 2);
1744 newstr[0] = flag;
1745 strcpy (newstr + 1, str);
1747 newstr = ggc_alloc_string (newstr, len + 1);
1748 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1751 /* Assigns default attributes to newly defined type. This is used to
1752 set short_call/long_call attributes for function types of
1753 functions defined inside corresponding #pragma scopes. */
1754 void
1755 arm_set_default_type_attributes (type)
1756 tree type;
1758 /* Add __attribute__ ((long_call)) to all functions, when
1759 inside #pragma long_calls or __attribute__ ((short_call)),
1760 when inside #pragma no_long_calls. */
1761 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1763 tree type_attr_list, attr_name;
1764 type_attr_list = TYPE_ATTRIBUTES (type);
1766 if (arm_pragma_long_calls == LONG)
1767 attr_name = get_identifier ("long_call");
1768 else if (arm_pragma_long_calls == SHORT)
1769 attr_name = get_identifier ("short_call");
1770 else
1771 return;
1773 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1774 TYPE_ATTRIBUTES (type) = type_attr_list;
1778 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
1779 defined within the current compilation unit. If this caanot be
1780 determined, then 0 is returned. */
1781 static int
1782 current_file_function_operand (sym_ref)
1783 rtx sym_ref;
1785 /* This is a bit of a fib. A function will have a short call flag
1786 applied to its name if it has the short call attribute, or it has
1787 already been defined within the current compilation unit. */
1788 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1789 return 1;
1791 /* The current function is always defined within the current compilation
1792 unit. if it s a weak defintion however, then this may not be the real
1793 defintion of the function, and so we have to say no. */
1794 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1795 && !DECL_WEAK (current_function_decl))
1796 return 1;
1798 /* We cannot make the determination - default to returning 0. */
1799 return 0;
1802 /* Return non-zero if a 32 bit "long_call" should be generated for
1803 this call. We generate a long_call if the function:
1805 a. has an __attribute__((long call))
1806 or b. is within the scope of a #pragma long_calls
1807 or c. the -mlong-calls command line switch has been specified
1809 However we do not generate a long call if the function:
1811 d. has an __attribute__ ((short_call))
1812 or e. is inside the scope of a #pragma no_long_calls
1813 or f. has an __attribute__ ((section))
1814 or g. is defined within the current compilation unit.
1816 This function will be called by C fragments contained in the machine
1817 description file. CALL_REF and CALL_COOKIE correspond to the matched
1818 rtl operands. CALL_SYMBOL is used to distinguish between
1819 two different callers of the function. It is set to 1 in the
1820 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1821 and "call_value" patterns. This is because of the difference in the
1822 SYM_REFs passed by these patterns. */
1824 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1825 rtx sym_ref;
1826 int call_cookie;
1827 int call_symbol;
1829 if (!call_symbol)
1831 if (GET_CODE (sym_ref) != MEM)
1832 return 0;
1834 sym_ref = XEXP (sym_ref, 0);
1837 if (GET_CODE (sym_ref) != SYMBOL_REF)
1838 return 0;
1840 if (call_cookie & CALL_SHORT)
1841 return 0;
1843 if (TARGET_LONG_CALLS && flag_function_sections)
1844 return 1;
1846 if (current_file_function_operand (sym_ref))
1847 return 0;
1849 return (call_cookie & CALL_LONG)
1850 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1851 || TARGET_LONG_CALLS;
1854 /* Return non-zero if it is ok to make a tail-call to DECL. */
1856 arm_function_ok_for_sibcall (decl)
1857 tree decl;
1859 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
1861 /* Never tailcall something for which we have no decl, or if we
1862 are in Thumb mode. */
1863 if (decl == NULL || TARGET_THUMB)
1864 return 0;
1866 /* Get the calling method. */
1867 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1868 call_type = CALL_SHORT;
1869 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
1870 call_type = CALL_LONG;
1872 /* Cannot tail-call to long calls, since these are out of range of
1873 a branch instruction. However, if not compiling PIC, we know
1874 we can reach the symbol if it is in this compilation unit. */
1875 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
1876 return 0;
1878 /* If we are interworking and the function is not declared static
1879 then we can't tail-call it unless we know that it exists in this
1880 compilation unit (since it might be a Thumb routine). */
1881 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
1882 return 0;
1884 /* Everything else is ok. */
1885 return 1;
1890 legitimate_pic_operand_p (x)
1891 rtx x;
1893 if (CONSTANT_P (x)
1894 && flag_pic
1895 && (GET_CODE (x) == SYMBOL_REF
1896 || (GET_CODE (x) == CONST
1897 && GET_CODE (XEXP (x, 0)) == PLUS
1898 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1899 return 0;
1901 return 1;
1905 legitimize_pic_address (orig, mode, reg)
1906 rtx orig;
1907 enum machine_mode mode;
1908 rtx reg;
1910 if (GET_CODE (orig) == SYMBOL_REF)
1912 rtx pic_ref, address;
1913 rtx insn;
1914 int subregs = 0;
1916 if (reg == 0)
1918 if (no_new_pseudos)
1919 abort ();
1920 else
1921 reg = gen_reg_rtx (Pmode);
1923 subregs = 1;
1926 #ifdef AOF_ASSEMBLER
1927 /* The AOF assembler can generate relocations for these directly, and
1928 understands that the PIC register has to be added into the offset. */
1929 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1930 #else
1931 if (subregs)
1932 address = gen_reg_rtx (Pmode);
1933 else
1934 address = reg;
1936 if (TARGET_ARM)
1937 emit_insn (gen_pic_load_addr_arm (address, orig));
1938 else
1939 emit_insn (gen_pic_load_addr_thumb (address, orig));
1941 pic_ref = gen_rtx_MEM (Pmode,
1942 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1943 address));
1944 RTX_UNCHANGING_P (pic_ref) = 1;
1945 insn = emit_move_insn (reg, pic_ref);
1946 #endif
1947 current_function_uses_pic_offset_table = 1;
1948 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1949 by loop. */
1950 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1951 REG_NOTES (insn));
1952 return reg;
1954 else if (GET_CODE (orig) == CONST)
1956 rtx base, offset;
1958 if (GET_CODE (XEXP (orig, 0)) == PLUS
1959 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1960 return orig;
1962 if (reg == 0)
1964 if (no_new_pseudos)
1965 abort ();
1966 else
1967 reg = gen_reg_rtx (Pmode);
1970 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1972 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1973 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1974 base == reg ? 0 : reg);
1976 else
1977 abort ();
1979 if (GET_CODE (offset) == CONST_INT)
1981 /* The base register doesn't really matter, we only want to
1982 test the index for the appropriate mode. */
1983 ARM_GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1985 if (!no_new_pseudos)
1986 offset = force_reg (Pmode, offset);
1987 else
1988 abort ();
1990 win:
1991 if (GET_CODE (offset) == CONST_INT)
1992 return plus_constant_for_output (base, INTVAL (offset));
1995 if (GET_MODE_SIZE (mode) > 4
1996 && (GET_MODE_CLASS (mode) == MODE_INT
1997 || TARGET_SOFT_FLOAT))
1999 emit_insn (gen_addsi3 (reg, base, offset));
2000 return reg;
2003 return gen_rtx_PLUS (Pmode, base, offset);
2005 else if (GET_CODE (orig) == LABEL_REF)
2007 current_function_uses_pic_offset_table = 1;
2009 if (NEED_GOT_RELOC)
2011 rtx pic_ref, address = gen_reg_rtx (Pmode);
2013 if (TARGET_ARM)
2014 emit_insn (gen_pic_load_addr_arm (address, orig));
2015 else
2016 emit_insn (gen_pic_load_addr_thumb (address, orig));
2018 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2020 emit_move_insn (address, pic_ref);
2021 return address;
2025 return orig;
2028 static rtx pic_rtx;
2031 is_pic (x)
2032 rtx x;
2034 if (x == pic_rtx)
2035 return 1;
2036 return 0;
2039 void
2040 arm_finalize_pic ()
2042 #ifndef AOF_ASSEMBLER
2043 rtx l1, pic_tmp, pic_tmp2, seq;
2044 rtx global_offset_table;
2046 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2047 return;
2049 if (!flag_pic)
2050 abort ();
2052 start_sequence ();
2053 l1 = gen_label_rtx ();
2055 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2056 /* On the ARM the PC register contains 'dot + 8' at the time of the
2057 addition, on the Thumb it is 'dot + 4'. */
2058 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2059 if (GOT_PCREL)
2060 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2061 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2062 else
2063 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2065 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2067 if (TARGET_ARM)
2069 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2070 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2072 else
2074 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2075 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2078 seq = gen_sequence ();
2079 end_sequence ();
2080 emit_insn_after (seq, get_insns ());
2082 /* Need to emit this whether or not we obey regdecls,
2083 since setjmp/longjmp can cause life info to screw up. */
2084 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2085 #endif /* AOF_ASSEMBLER */
2088 #define REG_OR_SUBREG_REG(X) \
2089 (GET_CODE (X) == REG \
2090 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2092 #define REG_OR_SUBREG_RTX(X) \
2093 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2095 #ifndef COSTS_N_INSNS
2096 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2097 #endif
2100 arm_rtx_costs (x, code, outer)
2101 rtx x;
2102 enum rtx_code code;
2103 enum rtx_code outer;
2105 enum machine_mode mode = GET_MODE (x);
2106 enum rtx_code subcode;
2107 int extra_cost;
2109 if (TARGET_THUMB)
2111 switch (code)
2113 case ASHIFT:
2114 case ASHIFTRT:
2115 case LSHIFTRT:
2116 case ROTATERT:
2117 case PLUS:
2118 case MINUS:
2119 case COMPARE:
2120 case NEG:
2121 case NOT:
2122 return COSTS_N_INSNS (1);
2124 case MULT:
2125 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2127 int cycles = 0;
2128 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2130 while (i)
2132 i >>= 2;
2133 cycles++;
2135 return COSTS_N_INSNS (2) + cycles;
2137 return COSTS_N_INSNS (1) + 16;
2139 case SET:
2140 return (COSTS_N_INSNS (1)
2141 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2142 + GET_CODE (SET_DEST (x)) == MEM));
2144 case CONST_INT:
2145 if (outer == SET)
2147 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2148 return 0;
2149 if (thumb_shiftable_const (INTVAL (x)))
2150 return COSTS_N_INSNS (2);
2151 return COSTS_N_INSNS (3);
2153 else if (outer == PLUS
2154 && INTVAL (x) < 256 && INTVAL (x) > -256)
2155 return 0;
2156 else if (outer == COMPARE
2157 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2158 return 0;
2159 else if (outer == ASHIFT || outer == ASHIFTRT
2160 || outer == LSHIFTRT)
2161 return 0;
2162 return COSTS_N_INSNS (2);
2164 case CONST:
2165 case CONST_DOUBLE:
2166 case LABEL_REF:
2167 case SYMBOL_REF:
2168 return COSTS_N_INSNS (3);
2170 case UDIV:
2171 case UMOD:
2172 case DIV:
2173 case MOD:
2174 return 100;
2176 case TRUNCATE:
2177 return 99;
2179 case AND:
2180 case XOR:
2181 case IOR:
2182 /* XXX guess. */
2183 return 8;
2185 case ADDRESSOF:
2186 case MEM:
2187 /* XXX another guess. */
2188 /* Memory costs quite a lot for the first word, but subsequent words
2189 load at the equivalent of a single insn each. */
2190 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2191 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2193 case IF_THEN_ELSE:
2194 /* XXX a guess. */
2195 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2196 return 14;
2197 return 2;
2199 case ZERO_EXTEND:
2200 /* XXX still guessing. */
2201 switch (GET_MODE (XEXP (x, 0)))
2203 case QImode:
2204 return (1 + (mode == DImode ? 4 : 0)
2205 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2207 case HImode:
2208 return (4 + (mode == DImode ? 4 : 0)
2209 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2211 case SImode:
2212 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2214 default:
2215 return 99;
2218 default:
2219 return 99;
2220 #if 0
2221 case FFS:
2222 case FLOAT:
2223 case FIX:
2224 case UNSIGNED_FIX:
2225 /* XXX guess */
2226 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2227 rtx_name[code]);
2228 abort ();
2229 #endif
2233 switch (code)
2235 case MEM:
2236 /* Memory costs quite a lot for the first word, but subsequent words
2237 load at the equivalent of a single insn each. */
2238 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2239 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2241 case DIV:
2242 case MOD:
2243 return 100;
2245 case ROTATE:
2246 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2247 return 4;
2248 /* Fall through */
2249 case ROTATERT:
2250 if (mode != SImode)
2251 return 8;
2252 /* Fall through */
2253 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2254 if (mode == DImode)
2255 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2256 + ((GET_CODE (XEXP (x, 0)) == REG
2257 || (GET_CODE (XEXP (x, 0)) == SUBREG
2258 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2259 ? 0 : 8));
2260 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2261 || (GET_CODE (XEXP (x, 0)) == SUBREG
2262 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2263 ? 0 : 4)
2264 + ((GET_CODE (XEXP (x, 1)) == REG
2265 || (GET_CODE (XEXP (x, 1)) == SUBREG
2266 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2267 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2268 ? 0 : 4));
2270 case MINUS:
2271 if (mode == DImode)
2272 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2273 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2274 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2275 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2276 ? 0 : 8));
2278 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2279 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2280 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2281 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2282 ? 0 : 8)
2283 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2284 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2285 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2286 ? 0 : 8));
2288 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2289 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2290 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2291 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2292 || subcode == ASHIFTRT || subcode == LSHIFTRT
2293 || subcode == ROTATE || subcode == ROTATERT
2294 || (subcode == MULT
2295 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2296 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2297 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2298 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2299 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2300 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2301 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2302 return 1;
2303 /* Fall through */
2305 case PLUS:
2306 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2307 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2308 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2309 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2310 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2311 ? 0 : 8));
2313 /* Fall through */
2314 case AND: case XOR: case IOR:
2315 extra_cost = 0;
2317 /* Normally the frame registers will be spilt into reg+const during
2318 reload, so it is a bad idea to combine them with other instructions,
2319 since then they might not be moved outside of loops. As a compromise
2320 we allow integration with ops that have a constant as their second
2321 operand. */
2322 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2323 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2324 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2325 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2326 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2327 extra_cost = 4;
2329 if (mode == DImode)
2330 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2331 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2332 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2333 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2334 ? 0 : 8));
2336 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2337 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2338 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2339 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2340 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2341 ? 0 : 4));
2343 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2344 return (1 + extra_cost
2345 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2346 || subcode == LSHIFTRT || subcode == ASHIFTRT
2347 || subcode == ROTATE || subcode == ROTATERT
2348 || (subcode == MULT
2349 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2350 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2351 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2352 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2353 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2354 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2355 ? 0 : 4));
2357 return 8;
2359 case MULT:
2360 /* There is no point basing this on the tuning, since it is always the
2361 fast variant if it exists at all. */
2362 if (arm_fast_multiply && mode == DImode
2363 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2364 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2365 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2366 return 8;
2368 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2369 || mode == DImode)
2370 return 30;
2372 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2374 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2375 & HOST_UINT (0xffffffff));
2376 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2377 int j;
2379 /* Tune as appropriate. */
2380 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2382 for (j = 0; i && j < 32; j += booth_unit_size)
2384 i >>= booth_unit_size;
2385 add_cost += 2;
2388 return add_cost;
2391 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2392 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2393 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2395 case TRUNCATE:
2396 if (arm_fast_multiply && mode == SImode
2397 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2398 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2399 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2400 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2401 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2402 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2403 return 8;
2404 return 99;
2406 case NEG:
2407 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2408 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2409 /* Fall through */
2410 case NOT:
2411 if (mode == DImode)
2412 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2414 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2416 case IF_THEN_ELSE:
2417 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2418 return 14;
2419 return 2;
2421 case COMPARE:
2422 return 1;
2424 case ABS:
2425 return 4 + (mode == DImode ? 4 : 0);
2427 case SIGN_EXTEND:
2428 if (GET_MODE (XEXP (x, 0)) == QImode)
2429 return (4 + (mode == DImode ? 4 : 0)
2430 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2431 /* Fall through */
2432 case ZERO_EXTEND:
2433 switch (GET_MODE (XEXP (x, 0)))
2435 case QImode:
2436 return (1 + (mode == DImode ? 4 : 0)
2437 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2439 case HImode:
2440 return (4 + (mode == DImode ? 4 : 0)
2441 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2443 case SImode:
2444 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2446 default:
2447 break;
2449 abort ();
2451 case CONST_INT:
2452 if (const_ok_for_arm (INTVAL (x)))
2453 return outer == SET ? 2 : -1;
2454 else if (outer == AND
2455 && const_ok_for_arm (~INTVAL (x)))
2456 return -1;
2457 else if ((outer == COMPARE
2458 || outer == PLUS || outer == MINUS)
2459 && const_ok_for_arm (-INTVAL (x)))
2460 return -1;
2461 else
2462 return 5;
2464 case CONST:
2465 case LABEL_REF:
2466 case SYMBOL_REF:
2467 return 6;
2469 case CONST_DOUBLE:
2470 if (const_double_rtx_ok_for_fpu (x))
2471 return outer == SET ? 2 : -1;
2472 else if ((outer == COMPARE || outer == PLUS)
2473 && neg_const_double_rtx_ok_for_fpu (x))
2474 return -1;
2475 return 7;
2477 default:
2478 return 99;
2483 arm_adjust_cost (insn, link, dep, cost)
2484 rtx insn;
2485 rtx link;
2486 rtx dep;
2487 int cost;
2489 rtx i_pat, d_pat;
2491 /* Some true dependencies can have a higher cost depending
2492 on precisely how certain input operands are used. */
2493 if (arm_is_xscale
2494 && REG_NOTE_KIND (link) == 0
2495 && recog_memoized (insn) < 0
2496 && recog_memoized (dep) < 0)
2498 int shift_opnum = get_attr_shift (insn);
2499 enum attr_type attr_type = get_attr_type (dep);
2501 /* If nonzero, SHIFT_OPNUM contains the operand number of a shifted
2502 operand for INSN. If we have a shifted input operand and the
2503 instruction we depend on is another ALU instruction, then we may
2504 have to account for an additional stall. */
2505 if (shift_opnum != 0 && attr_type == TYPE_NORMAL)
2507 rtx shifted_operand;
2508 int opno;
2510 /* Get the shifted operand. */
2511 extract_insn (insn);
2512 shifted_operand = recog_data.operand[shift_opnum];
2514 /* Iterate over all the operands in DEP. If we write an operand
2515 that overlaps with SHIFTED_OPERAND, then we have increase the
2516 cost of this dependency. */
2517 extract_insn (dep);
2518 preprocess_constraints ();
2519 for (opno = 0; opno < recog_data.n_operands; opno++)
2521 /* We can ignore strict inputs. */
2522 if (recog_data.operand_type[opno] == OP_IN)
2523 continue;
2525 if (reg_overlap_mentioned_p (recog_data.operand[opno],
2526 shifted_operand))
2527 return 2;
2532 /* XXX This is not strictly true for the FPA. */
2533 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2534 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2535 return 0;
2537 /* Call insns don't incur a stall, even if they follow a load. */
2538 if (REG_NOTE_KIND (link) == 0
2539 && GET_CODE (insn) == CALL_INSN)
2540 return 1;
2542 if ((i_pat = single_set (insn)) != NULL
2543 && GET_CODE (SET_SRC (i_pat)) == MEM
2544 && (d_pat = single_set (dep)) != NULL
2545 && GET_CODE (SET_DEST (d_pat)) == MEM)
2547 /* This is a load after a store, there is no conflict if the load reads
2548 from a cached area. Assume that loads from the stack, and from the
2549 constant pool are cached, and that others will miss. This is a
2550 hack. */
2552 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2553 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2554 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2555 || reg_mentioned_p (hard_frame_pointer_rtx,
2556 XEXP (SET_SRC (i_pat), 0)))
2557 return 1;
2560 return cost;
2563 /* This code has been fixed for cross compilation. */
2565 static int fpa_consts_inited = 0;
2567 static const char * strings_fpa[8] =
2569 "0", "1", "2", "3",
2570 "4", "5", "0.5", "10"
2573 static REAL_VALUE_TYPE values_fpa[8];
2575 static void
2576 init_fpa_table ()
2578 int i;
2579 REAL_VALUE_TYPE r;
2581 for (i = 0; i < 8; i++)
2583 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2584 values_fpa[i] = r;
2587 fpa_consts_inited = 1;
2590 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2593 const_double_rtx_ok_for_fpu (x)
2594 rtx x;
2596 REAL_VALUE_TYPE r;
2597 int i;
2599 if (!fpa_consts_inited)
2600 init_fpa_table ();
2602 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2603 if (REAL_VALUE_MINUS_ZERO (r))
2604 return 0;
2606 for (i = 0; i < 8; i++)
2607 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2608 return 1;
2610 return 0;
2613 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2616 neg_const_double_rtx_ok_for_fpu (x)
2617 rtx x;
2619 REAL_VALUE_TYPE r;
2620 int i;
2622 if (!fpa_consts_inited)
2623 init_fpa_table ();
2625 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2626 r = REAL_VALUE_NEGATE (r);
2627 if (REAL_VALUE_MINUS_ZERO (r))
2628 return 0;
2630 for (i = 0; i < 8; i++)
2631 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2632 return 1;
2634 return 0;
2637 /* Predicates for `match_operand' and `match_operator'. */
2639 /* s_register_operand is the same as register_operand, but it doesn't accept
2640 (SUBREG (MEM)...).
2642 This function exists because at the time it was put in it led to better
2643 code. SUBREG(MEM) always needs a reload in the places where
2644 s_register_operand is used, and this seemed to lead to excessive
2645 reloading. */
2648 s_register_operand (op, mode)
2649 register rtx op;
2650 enum machine_mode mode;
2652 if (GET_MODE (op) != mode && mode != VOIDmode)
2653 return 0;
2655 if (GET_CODE (op) == SUBREG)
2656 op = SUBREG_REG (op);
2658 /* We don't consider registers whose class is NO_REGS
2659 to be a register operand. */
2660 /* XXX might have to check for lo regs only for thumb ??? */
2661 return (GET_CODE (op) == REG
2662 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2663 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2666 /* Only accept reg, subreg(reg), const_int. */
2669 reg_or_int_operand (op, mode)
2670 register rtx op;
2671 enum machine_mode mode;
2673 if (GET_CODE (op) == CONST_INT)
2674 return 1;
2676 if (GET_MODE (op) != mode && mode != VOIDmode)
2677 return 0;
2679 if (GET_CODE (op) == SUBREG)
2680 op = SUBREG_REG (op);
2682 /* We don't consider registers whose class is NO_REGS
2683 to be a register operand. */
2684 return (GET_CODE (op) == REG
2685 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2686 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2689 /* Return 1 if OP is an item in memory, given that we are in reload. */
2692 arm_reload_memory_operand (op, mode)
2693 rtx op;
2694 enum machine_mode mode ATTRIBUTE_UNUSED;
2696 int regno = true_regnum (op);
2698 return (!CONSTANT_P (op)
2699 && (regno == -1
2700 || (GET_CODE (op) == REG
2701 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2704 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
2705 memory access (architecture V4).
2706 MODE is QImode if called when computing contraints, or VOIDmode when
2707 emitting patterns. In this latter case we cannot use memory_operand()
2708 because it will fail on badly formed MEMs, which is precisly what we are
2709 trying to catch. */
2711 bad_signed_byte_operand (op, mode)
2712 rtx op;
2713 enum machine_mode mode ATTRIBUTE_UNUSED;
2715 #if 0
2716 if ((mode == QImode && !memory_operand (op, mode)) || GET_CODE (op) != MEM)
2717 return 0;
2718 #endif
2719 if (GET_CODE (op) != MEM)
2720 return 0;
2722 op = XEXP (op, 0);
2724 /* A sum of anything more complex than reg + reg or reg + const is bad. */
2725 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
2726 && (!s_register_operand (XEXP (op, 0), VOIDmode)
2727 || (!s_register_operand (XEXP (op, 1), VOIDmode)
2728 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
2729 return 1;
2731 /* Big constants are also bad. */
2732 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2733 && (INTVAL (XEXP (op, 1)) > 0xff
2734 || -INTVAL (XEXP (op, 1)) > 0xff))
2735 return 1;
2737 /* Everything else is good, or can will automatically be made so. */
2738 return 0;
2741 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
2744 arm_rhs_operand (op, mode)
2745 rtx op;
2746 enum machine_mode mode;
2748 return (s_register_operand (op, mode)
2749 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
2752 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2756 arm_rhsm_operand (op, mode)
2757 rtx op;
2758 enum machine_mode mode;
2760 return (s_register_operand (op, mode)
2761 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2762 || memory_operand (op, mode));
2765 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2766 constant that is valid when negated. */
2769 arm_add_operand (op, mode)
2770 rtx op;
2771 enum machine_mode mode;
2773 if (TARGET_THUMB)
2774 return thumb_cmp_operand (op, mode);
2776 return (s_register_operand (op, mode)
2777 || (GET_CODE (op) == CONST_INT
2778 && (const_ok_for_arm (INTVAL (op))
2779 || const_ok_for_arm (-INTVAL (op)))));
2783 arm_not_operand (op, mode)
2784 rtx op;
2785 enum machine_mode mode;
2787 return (s_register_operand (op, mode)
2788 || (GET_CODE (op) == CONST_INT
2789 && (const_ok_for_arm (INTVAL (op))
2790 || const_ok_for_arm (~INTVAL (op)))));
2793 /* Return TRUE if the operand is a memory reference which contains an
2794 offsettable address. */
2796 offsettable_memory_operand (op, mode)
2797 register rtx op;
2798 enum machine_mode mode;
2800 if (mode == VOIDmode)
2801 mode = GET_MODE (op);
2803 return (mode == GET_MODE (op)
2804 && GET_CODE (op) == MEM
2805 && offsettable_address_p (reload_completed | reload_in_progress,
2806 mode, XEXP (op, 0)));
2809 /* Return TRUE if the operand is a memory reference which is, or can be
2810 made word aligned by adjusting the offset. */
2812 alignable_memory_operand (op, mode)
2813 register rtx op;
2814 enum machine_mode mode;
2816 rtx reg;
2818 if (mode == VOIDmode)
2819 mode = GET_MODE (op);
2821 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2822 return 0;
2824 op = XEXP (op, 0);
2826 return ((GET_CODE (reg = op) == REG
2827 || (GET_CODE (op) == SUBREG
2828 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2829 || (GET_CODE (op) == PLUS
2830 && GET_CODE (XEXP (op, 1)) == CONST_INT
2831 && (GET_CODE (reg = XEXP (op, 0)) == REG
2832 || (GET_CODE (XEXP (op, 0)) == SUBREG
2833 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2834 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
2837 /* Similar to s_register_operand, but does not allow hard integer
2838 registers. */
2840 f_register_operand (op, mode)
2841 register rtx op;
2842 enum machine_mode mode;
2844 if (GET_MODE (op) != mode && mode != VOIDmode)
2845 return 0;
2847 if (GET_CODE (op) == SUBREG)
2848 op = SUBREG_REG (op);
2850 /* We don't consider registers whose class is NO_REGS
2851 to be a register operand. */
2852 return (GET_CODE (op) == REG
2853 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2854 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2857 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
2860 fpu_rhs_operand (op, mode)
2861 rtx op;
2862 enum machine_mode mode;
2864 if (s_register_operand (op, mode))
2865 return TRUE;
2867 if (GET_MODE (op) != mode && mode != VOIDmode)
2868 return FALSE;
2870 if (GET_CODE (op) == CONST_DOUBLE)
2871 return const_double_rtx_ok_for_fpu (op);
2873 return FALSE;
2877 fpu_add_operand (op, mode)
2878 rtx op;
2879 enum machine_mode mode;
2881 if (s_register_operand (op, mode))
2882 return TRUE;
2884 if (GET_MODE (op) != mode && mode != VOIDmode)
2885 return FALSE;
2887 if (GET_CODE (op) == CONST_DOUBLE)
2888 return (const_double_rtx_ok_for_fpu (op)
2889 || neg_const_double_rtx_ok_for_fpu (op));
2891 return FALSE;
2894 /* Return nonzero if OP is a constant power of two. */
2897 power_of_two_operand (op, mode)
2898 rtx op;
2899 enum machine_mode mode ATTRIBUTE_UNUSED;
2901 if (GET_CODE (op) == CONST_INT)
2903 HOST_WIDE_INT value = INTVAL (op);
2904 return value != 0 && (value & (value - 1)) == 0;
2906 return FALSE;
2909 /* Return TRUE for a valid operand of a DImode operation.
2910 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2911 Note that this disallows MEM(REG+REG), but allows
2912 MEM(PRE/POST_INC/DEC(REG)). */
2915 di_operand (op, mode)
2916 rtx op;
2917 enum machine_mode mode;
2919 if (s_register_operand (op, mode))
2920 return TRUE;
2922 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2923 return FALSE;
2925 if (GET_CODE (op) == SUBREG)
2926 op = SUBREG_REG (op);
2928 switch (GET_CODE (op))
2930 case CONST_DOUBLE:
2931 case CONST_INT:
2932 return TRUE;
2934 case MEM:
2935 return memory_address_p (DImode, XEXP (op, 0));
2937 default:
2938 return FALSE;
2942 /* Like di_operand, but don't accept constants. */
2944 nonimmediate_di_operand (op, mode)
2945 rtx op;
2946 enum machine_mode mode;
2948 if (s_register_operand (op, mode))
2949 return TRUE;
2951 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2952 return FALSE;
2954 if (GET_CODE (op) == SUBREG)
2955 op = SUBREG_REG (op);
2957 if (GET_CODE (op) == MEM)
2958 return memory_address_p (DImode, XEXP (op, 0));
2960 return FALSE;
2963 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
2964 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2965 Note that this disallows MEM(REG+REG), but allows
2966 MEM(PRE/POST_INC/DEC(REG)). */
2969 soft_df_operand (op, mode)
2970 rtx op;
2971 enum machine_mode mode;
2973 if (s_register_operand (op, mode))
2974 return TRUE;
2976 if (mode != VOIDmode && GET_MODE (op) != mode)
2977 return FALSE;
2979 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2980 return FALSE;
2982 if (GET_CODE (op) == SUBREG)
2983 op = SUBREG_REG (op);
2985 switch (GET_CODE (op))
2987 case CONST_DOUBLE:
2988 return TRUE;
2990 case MEM:
2991 return memory_address_p (DFmode, XEXP (op, 0));
2993 default:
2994 return FALSE;
2998 /* Like soft_df_operand, but don't accept constants. */
3000 nonimmediate_soft_df_operand (op, mode)
3001 rtx op;
3002 enum machine_mode mode;
3004 if (s_register_operand (op, mode))
3005 return TRUE;
3007 if (mode != VOIDmode && GET_MODE (op) != mode)
3008 return FALSE;
3010 if (GET_CODE (op) == SUBREG)
3011 op = SUBREG_REG (op);
3013 if (GET_CODE (op) == MEM)
3014 return memory_address_p (DFmode, XEXP (op, 0));
3015 return FALSE;
3018 /* Return TRUE for valid index operands. */
3020 index_operand (op, mode)
3021 rtx op;
3022 enum machine_mode mode;
3024 return (s_register_operand (op, mode)
3025 || (immediate_operand (op, mode)
3026 && (GET_CODE (op) != CONST_INT
3027 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
3030 /* Return TRUE for valid shifts by a constant. This also accepts any
3031 power of two on the (somewhat overly relaxed) assumption that the
3032 shift operator in this case was a mult. */
3035 const_shift_operand (op, mode)
3036 rtx op;
3037 enum machine_mode mode;
3039 return (power_of_two_operand (op, mode)
3040 || (immediate_operand (op, mode)
3041 && (GET_CODE (op) != CONST_INT
3042 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
3045 /* Return TRUE for arithmetic operators which can be combined with a multiply
3046 (shift). */
3049 shiftable_operator (x, mode)
3050 rtx x;
3051 enum machine_mode mode;
3053 if (GET_MODE (x) != mode)
3054 return FALSE;
3055 else
3057 enum rtx_code code = GET_CODE (x);
3059 return (code == PLUS || code == MINUS
3060 || code == IOR || code == XOR || code == AND);
3064 /* Return TRUE for binary logical operators. */
3067 logical_binary_operator (x, mode)
3068 rtx x;
3069 enum machine_mode mode;
3071 if (GET_MODE (x) != mode)
3072 return FALSE;
3073 else
3075 enum rtx_code code = GET_CODE (x);
3077 return (code == IOR || code == XOR || code == AND);
3081 /* Return TRUE for shift operators. */
3084 shift_operator (x, mode)
3085 rtx x;
3086 enum machine_mode mode;
3088 if (GET_MODE (x) != mode)
3089 return FALSE;
3090 else
3092 enum rtx_code code = GET_CODE (x);
3094 if (code == MULT)
3095 return power_of_two_operand (XEXP (x, 1), mode);
3097 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
3098 || code == ROTATERT);
3102 /* Return TRUE if x is EQ or NE. */
3104 equality_operator (x, mode)
3105 rtx x;
3106 enum machine_mode mode ATTRIBUTE_UNUSED;
3108 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3111 /* Return TRUE if x is a comparison operator other than LTGT or UNEQ. */
3113 arm_comparison_operator (x, mode)
3114 rtx x;
3115 enum machine_mode mode;
3117 return (comparison_operator (x, mode)
3118 && GET_CODE (x) != LTGT
3119 && GET_CODE (x) != UNEQ);
3122 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3124 minmax_operator (x, mode)
3125 rtx x;
3126 enum machine_mode mode;
3128 enum rtx_code code = GET_CODE (x);
3130 if (GET_MODE (x) != mode)
3131 return FALSE;
3133 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3136 /* Return TRUE if this is the condition code register, if we aren't given
3137 a mode, accept any class CCmode register. */
3139 cc_register (x, mode)
3140 rtx x;
3141 enum machine_mode mode;
3143 if (mode == VOIDmode)
3145 mode = GET_MODE (x);
3147 if (GET_MODE_CLASS (mode) != MODE_CC)
3148 return FALSE;
3151 if ( GET_MODE (x) == mode
3152 && GET_CODE (x) == REG
3153 && REGNO (x) == CC_REGNUM)
3154 return TRUE;
3156 return FALSE;
3159 /* Return TRUE if this is the condition code register, if we aren't given
3160 a mode, accept any class CCmode register which indicates a dominance
3161 expression. */
3163 dominant_cc_register (x, mode)
3164 rtx x;
3165 enum machine_mode mode;
3167 if (mode == VOIDmode)
3169 mode = GET_MODE (x);
3171 if (GET_MODE_CLASS (mode) != MODE_CC)
3172 return FALSE;
3175 if ( mode != CC_DNEmode && mode != CC_DEQmode
3176 && mode != CC_DLEmode && mode != CC_DLTmode
3177 && mode != CC_DGEmode && mode != CC_DGTmode
3178 && mode != CC_DLEUmode && mode != CC_DLTUmode
3179 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3180 return FALSE;
3182 return cc_register (x, mode);
3185 /* Return TRUE if X references a SYMBOL_REF. */
3187 symbol_mentioned_p (x)
3188 rtx x;
3190 register const char * fmt;
3191 register int i;
3193 if (GET_CODE (x) == SYMBOL_REF)
3194 return 1;
3196 fmt = GET_RTX_FORMAT (GET_CODE (x));
3198 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3200 if (fmt[i] == 'E')
3202 register int j;
3204 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3205 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3206 return 1;
3208 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3209 return 1;
3212 return 0;
3215 /* Return TRUE if X references a LABEL_REF. */
3217 label_mentioned_p (x)
3218 rtx x;
3220 register const char * fmt;
3221 register int i;
3223 if (GET_CODE (x) == LABEL_REF)
3224 return 1;
3226 fmt = GET_RTX_FORMAT (GET_CODE (x));
3227 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3229 if (fmt[i] == 'E')
3231 register int j;
3233 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3234 if (label_mentioned_p (XVECEXP (x, i, j)))
3235 return 1;
3237 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3238 return 1;
3241 return 0;
3244 enum rtx_code
3245 minmax_code (x)
3246 rtx x;
3248 enum rtx_code code = GET_CODE (x);
3250 if (code == SMAX)
3251 return GE;
3252 else if (code == SMIN)
3253 return LE;
3254 else if (code == UMIN)
3255 return LEU;
3256 else if (code == UMAX)
3257 return GEU;
3259 abort ();
3262 /* Return 1 if memory locations are adjacent. */
3264 adjacent_mem_locations (a, b)
3265 rtx a, b;
3267 int val0 = 0, val1 = 0;
3268 int reg0, reg1;
3270 if ((GET_CODE (XEXP (a, 0)) == REG
3271 || (GET_CODE (XEXP (a, 0)) == PLUS
3272 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3273 && (GET_CODE (XEXP (b, 0)) == REG
3274 || (GET_CODE (XEXP (b, 0)) == PLUS
3275 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3277 if (GET_CODE (XEXP (a, 0)) == PLUS)
3279 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3280 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3282 else
3283 reg0 = REGNO (XEXP (a, 0));
3284 if (GET_CODE (XEXP (b, 0)) == PLUS)
3286 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3287 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3289 else
3290 reg1 = REGNO (XEXP (b, 0));
3291 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3293 return 0;
3296 /* Return 1 if OP is a load multiple operation. It is known to be
3297 parallel and the first section will be tested. */
3299 load_multiple_operation (op, mode)
3300 rtx op;
3301 enum machine_mode mode ATTRIBUTE_UNUSED;
3303 HOST_WIDE_INT count = XVECLEN (op, 0);
3304 int dest_regno;
3305 rtx src_addr;
3306 HOST_WIDE_INT i = 1, base = 0;
3307 rtx elt;
3309 if (count <= 1
3310 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3311 return 0;
3313 /* Check to see if this might be a write-back. */
3314 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3316 i++;
3317 base = 1;
3319 /* Now check it more carefully. */
3320 if (GET_CODE (SET_DEST (elt)) != REG
3321 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3322 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3323 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3324 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3325 return 0;
3328 /* Perform a quick check so we don't blow up below. */
3329 if (count <= i
3330 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3331 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3332 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3333 return 0;
3335 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3336 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3338 for (; i < count; i++)
3340 elt = XVECEXP (op, 0, i);
3342 if (GET_CODE (elt) != SET
3343 || GET_CODE (SET_DEST (elt)) != REG
3344 || GET_MODE (SET_DEST (elt)) != SImode
3345 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3346 || GET_CODE (SET_SRC (elt)) != MEM
3347 || GET_MODE (SET_SRC (elt)) != SImode
3348 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3349 || !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3350 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3351 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3352 return 0;
3355 return 1;
3358 /* Return 1 if OP is a store multiple operation. It is known to be
3359 parallel and the first section will be tested. */
3361 store_multiple_operation (op, mode)
3362 rtx op;
3363 enum machine_mode mode ATTRIBUTE_UNUSED;
3365 HOST_WIDE_INT count = XVECLEN (op, 0);
3366 int src_regno;
3367 rtx dest_addr;
3368 HOST_WIDE_INT i = 1, base = 0;
3369 rtx elt;
3371 if (count <= 1
3372 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3373 return 0;
3375 /* Check to see if this might be a write-back. */
3376 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3378 i++;
3379 base = 1;
3381 /* Now check it more carefully. */
3382 if (GET_CODE (SET_DEST (elt)) != REG
3383 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3384 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3385 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3386 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3387 return 0;
3390 /* Perform a quick check so we don't blow up below. */
3391 if (count <= i
3392 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3393 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3394 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3395 return 0;
3397 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3398 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3400 for (; i < count; i++)
3402 elt = XVECEXP (op, 0, i);
3404 if (GET_CODE (elt) != SET
3405 || GET_CODE (SET_SRC (elt)) != REG
3406 || GET_MODE (SET_SRC (elt)) != SImode
3407 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3408 || GET_CODE (SET_DEST (elt)) != MEM
3409 || GET_MODE (SET_DEST (elt)) != SImode
3410 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3411 || !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3412 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3413 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3414 return 0;
3417 return 1;
3421 load_multiple_sequence (operands, nops, regs, base, load_offset)
3422 rtx * operands;
3423 int nops;
3424 int * regs;
3425 int * base;
3426 HOST_WIDE_INT * load_offset;
3428 int unsorted_regs[4];
3429 HOST_WIDE_INT unsorted_offsets[4];
3430 int order[4];
3431 int base_reg = -1;
3432 int i;
3434 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3435 extended if required. */
3436 if (nops < 2 || nops > 4)
3437 abort ();
3439 /* Loop over the operands and check that the memory references are
3440 suitable (ie immediate offsets from the same base register). At
3441 the same time, extract the target register, and the memory
3442 offsets. */
3443 for (i = 0; i < nops; i++)
3445 rtx reg;
3446 rtx offset;
3448 /* Convert a subreg of a mem into the mem itself. */
3449 if (GET_CODE (operands[nops + i]) == SUBREG)
3450 operands[nops + i] = alter_subreg (operands[nops + i]);
3452 if (GET_CODE (operands[nops + i]) != MEM)
3453 abort ();
3455 /* Don't reorder volatile memory references; it doesn't seem worth
3456 looking for the case where the order is ok anyway. */
3457 if (MEM_VOLATILE_P (operands[nops + i]))
3458 return 0;
3460 offset = const0_rtx;
3462 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3463 || (GET_CODE (reg) == SUBREG
3464 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3465 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3466 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3467 == REG)
3468 || (GET_CODE (reg) == SUBREG
3469 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3470 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3471 == CONST_INT)))
3473 if (i == 0)
3475 base_reg = REGNO (reg);
3476 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3477 ? REGNO (operands[i])
3478 : REGNO (SUBREG_REG (operands[i])));
3479 order[0] = 0;
3481 else
3483 if (base_reg != (int) REGNO (reg))
3484 /* Not addressed from the same base register. */
3485 return 0;
3487 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3488 ? REGNO (operands[i])
3489 : REGNO (SUBREG_REG (operands[i])));
3490 if (unsorted_regs[i] < unsorted_regs[order[0]])
3491 order[0] = i;
3494 /* If it isn't an integer register, or if it overwrites the
3495 base register but isn't the last insn in the list, then
3496 we can't do this. */
3497 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3498 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3499 return 0;
3501 unsorted_offsets[i] = INTVAL (offset);
3503 else
3504 /* Not a suitable memory address. */
3505 return 0;
3508 /* All the useful information has now been extracted from the
3509 operands into unsorted_regs and unsorted_offsets; additionally,
3510 order[0] has been set to the lowest numbered register in the
3511 list. Sort the registers into order, and check that the memory
3512 offsets are ascending and adjacent. */
3514 for (i = 1; i < nops; i++)
3516 int j;
3518 order[i] = order[i - 1];
3519 for (j = 0; j < nops; j++)
3520 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3521 && (order[i] == order[i - 1]
3522 || unsorted_regs[j] < unsorted_regs[order[i]]))
3523 order[i] = j;
3525 /* Have we found a suitable register? if not, one must be used more
3526 than once. */
3527 if (order[i] == order[i - 1])
3528 return 0;
3530 /* Is the memory address adjacent and ascending? */
3531 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3532 return 0;
3535 if (base)
3537 *base = base_reg;
3539 for (i = 0; i < nops; i++)
3540 regs[i] = unsorted_regs[order[i]];
3542 *load_offset = unsorted_offsets[order[0]];
3545 if (unsorted_offsets[order[0]] == 0)
3546 return 1; /* ldmia */
3548 if (unsorted_offsets[order[0]] == 4)
3549 return 2; /* ldmib */
3551 if (unsorted_offsets[order[nops - 1]] == 0)
3552 return 3; /* ldmda */
3554 if (unsorted_offsets[order[nops - 1]] == -4)
3555 return 4; /* ldmdb */
3557 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3558 if the offset isn't small enough. The reason 2 ldrs are faster
3559 is because these ARMs are able to do more than one cache access
3560 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3561 whilst the ARM8 has a double bandwidth cache. This means that
3562 these cores can do both an instruction fetch and a data fetch in
3563 a single cycle, so the trick of calculating the address into a
3564 scratch register (one of the result regs) and then doing a load
3565 multiple actually becomes slower (and no smaller in code size).
3566 That is the transformation
3568 ldr rd1, [rbase + offset]
3569 ldr rd2, [rbase + offset + 4]
3573 add rd1, rbase, offset
3574 ldmia rd1, {rd1, rd2}
3576 produces worse code -- '3 cycles + any stalls on rd2' instead of
3577 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3578 access per cycle, the first sequence could never complete in less
3579 than 6 cycles, whereas the ldm sequence would only take 5 and
3580 would make better use of sequential accesses if not hitting the
3581 cache.
3583 We cheat here and test 'arm_ld_sched' which we currently know to
3584 only be true for the ARM8, ARM9 and StrongARM. If this ever
3585 changes, then the test below needs to be reworked. */
3586 if (nops == 2 && arm_ld_sched)
3587 return 0;
3589 /* Can't do it without setting up the offset, only do this if it takes
3590 no more than one insn. */
3591 return (const_ok_for_arm (unsorted_offsets[order[0]])
3592 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3595 const char *
3596 emit_ldm_seq (operands, nops)
3597 rtx * operands;
3598 int nops;
3600 int regs[4];
3601 int base_reg;
3602 HOST_WIDE_INT offset;
3603 char buf[100];
3604 int i;
3606 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3608 case 1:
3609 strcpy (buf, "ldm%?ia\t");
3610 break;
3612 case 2:
3613 strcpy (buf, "ldm%?ib\t");
3614 break;
3616 case 3:
3617 strcpy (buf, "ldm%?da\t");
3618 break;
3620 case 4:
3621 strcpy (buf, "ldm%?db\t");
3622 break;
3624 case 5:
3625 if (offset >= 0)
3626 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3627 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3628 (long) offset);
3629 else
3630 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3631 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3632 (long) -offset);
3633 output_asm_insn (buf, operands);
3634 base_reg = regs[0];
3635 strcpy (buf, "ldm%?ia\t");
3636 break;
3638 default:
3639 abort ();
3642 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3643 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3645 for (i = 1; i < nops; i++)
3646 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3647 reg_names[regs[i]]);
3649 strcat (buf, "}\t%@ phole ldm");
3651 output_asm_insn (buf, operands);
3652 return "";
3656 store_multiple_sequence (operands, nops, regs, base, load_offset)
3657 rtx * operands;
3658 int nops;
3659 int * regs;
3660 int * base;
3661 HOST_WIDE_INT * load_offset;
3663 int unsorted_regs[4];
3664 HOST_WIDE_INT unsorted_offsets[4];
3665 int order[4];
3666 int base_reg = -1;
3667 int i;
3669 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3670 extended if required. */
3671 if (nops < 2 || nops > 4)
3672 abort ();
3674 /* Loop over the operands and check that the memory references are
3675 suitable (ie immediate offsets from the same base register). At
3676 the same time, extract the target register, and the memory
3677 offsets. */
3678 for (i = 0; i < nops; i++)
3680 rtx reg;
3681 rtx offset;
3683 /* Convert a subreg of a mem into the mem itself. */
3684 if (GET_CODE (operands[nops + i]) == SUBREG)
3685 operands[nops + i] = alter_subreg (operands[nops + i]);
3687 if (GET_CODE (operands[nops + i]) != MEM)
3688 abort ();
3690 /* Don't reorder volatile memory references; it doesn't seem worth
3691 looking for the case where the order is ok anyway. */
3692 if (MEM_VOLATILE_P (operands[nops + i]))
3693 return 0;
3695 offset = const0_rtx;
3697 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3698 || (GET_CODE (reg) == SUBREG
3699 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3700 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3701 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3702 == REG)
3703 || (GET_CODE (reg) == SUBREG
3704 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3705 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3706 == CONST_INT)))
3708 if (i == 0)
3710 base_reg = REGNO (reg);
3711 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3712 ? REGNO (operands[i])
3713 : REGNO (SUBREG_REG (operands[i])));
3714 order[0] = 0;
3716 else
3718 if (base_reg != (int) REGNO (reg))
3719 /* Not addressed from the same base register. */
3720 return 0;
3722 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3723 ? REGNO (operands[i])
3724 : REGNO (SUBREG_REG (operands[i])));
3725 if (unsorted_regs[i] < unsorted_regs[order[0]])
3726 order[0] = i;
3729 /* If it isn't an integer register, then we can't do this. */
3730 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3731 return 0;
3733 unsorted_offsets[i] = INTVAL (offset);
3735 else
3736 /* Not a suitable memory address. */
3737 return 0;
3740 /* All the useful information has now been extracted from the
3741 operands into unsorted_regs and unsorted_offsets; additionally,
3742 order[0] has been set to the lowest numbered register in the
3743 list. Sort the registers into order, and check that the memory
3744 offsets are ascending and adjacent. */
3746 for (i = 1; i < nops; i++)
3748 int j;
3750 order[i] = order[i - 1];
3751 for (j = 0; j < nops; j++)
3752 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3753 && (order[i] == order[i - 1]
3754 || unsorted_regs[j] < unsorted_regs[order[i]]))
3755 order[i] = j;
3757 /* Have we found a suitable register? if not, one must be used more
3758 than once. */
3759 if (order[i] == order[i - 1])
3760 return 0;
3762 /* Is the memory address adjacent and ascending? */
3763 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3764 return 0;
3767 if (base)
3769 *base = base_reg;
3771 for (i = 0; i < nops; i++)
3772 regs[i] = unsorted_regs[order[i]];
3774 *load_offset = unsorted_offsets[order[0]];
3777 if (unsorted_offsets[order[0]] == 0)
3778 return 1; /* stmia */
3780 if (unsorted_offsets[order[0]] == 4)
3781 return 2; /* stmib */
3783 if (unsorted_offsets[order[nops - 1]] == 0)
3784 return 3; /* stmda */
3786 if (unsorted_offsets[order[nops - 1]] == -4)
3787 return 4; /* stmdb */
3789 return 0;
3792 const char *
3793 emit_stm_seq (operands, nops)
3794 rtx * operands;
3795 int nops;
3797 int regs[4];
3798 int base_reg;
3799 HOST_WIDE_INT offset;
3800 char buf[100];
3801 int i;
3803 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3805 case 1:
3806 strcpy (buf, "stm%?ia\t");
3807 break;
3809 case 2:
3810 strcpy (buf, "stm%?ib\t");
3811 break;
3813 case 3:
3814 strcpy (buf, "stm%?da\t");
3815 break;
3817 case 4:
3818 strcpy (buf, "stm%?db\t");
3819 break;
3821 default:
3822 abort ();
3825 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3826 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3828 for (i = 1; i < nops; i++)
3829 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3830 reg_names[regs[i]]);
3832 strcat (buf, "}\t%@ phole stm");
3834 output_asm_insn (buf, operands);
3835 return "";
3839 multi_register_push (op, mode)
3840 rtx op;
3841 enum machine_mode mode ATTRIBUTE_UNUSED;
3843 if (GET_CODE (op) != PARALLEL
3844 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3845 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3846 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3847 return 0;
3849 return 1;
3852 /* Routines for use with attributes. */
3854 /* Return nonzero if ATTR is a valid attribute for DECL.
3855 ATTRIBUTES are any existing attributes and ARGS are
3856 the arguments supplied with ATTR.
3858 Supported attributes:
3860 naked:
3861 don't output any prologue or epilogue code, the user is assumed
3862 to do the right thing.
3864 interfacearm:
3865 Always assume that this function will be entered in ARM mode,
3866 not Thumb mode, and that the caller wishes to be returned to in
3867 ARM mode. */
3869 arm_valid_machine_decl_attribute (decl, attr, args)
3870 tree decl;
3871 tree attr;
3872 tree args;
3874 if (args != NULL_TREE)
3875 return 0;
3877 if (is_attribute_p ("naked", attr))
3878 return TREE_CODE (decl) == FUNCTION_DECL;
3880 #ifdef ARM_PE
3881 if (is_attribute_p ("interfacearm", attr))
3882 return TREE_CODE (decl) == FUNCTION_DECL;
3883 #endif /* ARM_PE */
3885 return 0;
3888 /* Return non-zero if FUNC is a naked function. */
3889 static int
3890 arm_naked_function_p (func)
3891 tree func;
3893 tree a;
3895 if (TREE_CODE (func) != FUNCTION_DECL)
3896 abort ();
3898 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3899 return a != NULL_TREE;
3902 /* Routines for use in generating RTL. */
3904 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
3905 in_struct_p, scalar_p)
3906 int base_regno;
3907 int count;
3908 rtx from;
3909 int up;
3910 int write_back;
3911 int unchanging_p;
3912 int in_struct_p;
3913 int scalar_p;
3915 int i = 0, j;
3916 rtx result;
3917 int sign = up ? 1 : -1;
3918 rtx mem;
3920 /* XScale has load-store double instructions, but they have stricter
3921 alignment requirements than load-store multiple, so we can not
3922 use them.
3924 For XScale ldm requires 2 + NREGS cycles to complete and blocks
3925 the pipeline until completion.
3927 NREGS CYCLES
3933 An ldr instruction takes 1-3 cycles, but does not block the
3934 pipeline.
3936 NREGS CYCLES
3937 1 1-3
3938 2 2-6
3939 3 3-9
3940 4 4-12
3942 Best case ldr will always win. However, the more ldr instructions
3943 we issue, the less likely we are to be able to schedule them well.
3944 Using ldr instructions also increases code size.
3946 As a compromise, we use ldr for counts of 1 or 2 regs, and ldm
3947 for counts of 3 or 4 regs. */
3948 if (arm_is_xscale && count <= 2 && ! optimize_size)
3950 rtx seq;
3952 start_sequence ();
3954 for (i = 0; i < count; i++)
3956 mem = gen_rtx_MEM (SImode, plus_constant (from, i * 4 * sign));
3957 RTX_UNCHANGING_P (mem) = unchanging_p;
3958 MEM_IN_STRUCT_P (mem) = in_struct_p;
3959 MEM_SCALAR_P (mem) = scalar_p;
3960 emit_move_insn (gen_rtx_REG (SImode, base_regno + i), mem);
3963 if (write_back)
3964 emit_move_insn (from, plus_constant (from, count * 4 * sign));
3966 seq = gen_sequence ();
3967 end_sequence ();
3969 return seq;
3972 result = gen_rtx_PARALLEL (VOIDmode,
3973 rtvec_alloc (count + (write_back ? 1 : 0)));
3974 if (write_back)
3976 XVECEXP (result, 0, 0)
3977 = gen_rtx_SET (GET_MODE (from), from,
3978 plus_constant (from, count * 4 * sign));
3979 i = 1;
3980 count++;
3983 for (j = 0; i < count; i++, j++)
3985 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
3986 RTX_UNCHANGING_P (mem) = unchanging_p;
3987 MEM_IN_STRUCT_P (mem) = in_struct_p;
3988 MEM_SCALAR_P (mem) = scalar_p;
3989 XVECEXP (result, 0, i)
3990 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
3993 return result;
3997 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
3998 in_struct_p, scalar_p)
3999 int base_regno;
4000 int count;
4001 rtx to;
4002 int up;
4003 int write_back;
4004 int unchanging_p;
4005 int in_struct_p;
4006 int scalar_p;
4008 int i = 0, j;
4009 rtx result;
4010 int sign = up ? 1 : -1;
4011 rtx mem;
4013 /* See arm_gen_load_multiple for discussion of
4014 the pros/cons of ldm/stm usage for XScale. */
4015 if (arm_is_xscale && count <= 2 && ! optimize_size)
4017 rtx seq;
4019 start_sequence ();
4021 for (i = 0; i < count; i++)
4023 mem = gen_rtx_MEM (SImode, plus_constant (to, i * 4 * sign));
4024 RTX_UNCHANGING_P (mem) = unchanging_p;
4025 MEM_IN_STRUCT_P (mem) = in_struct_p;
4026 MEM_SCALAR_P (mem) = scalar_p;
4027 emit_move_insn (mem, gen_rtx_REG (SImode, base_regno + i));
4030 if (write_back)
4031 emit_move_insn (to, plus_constant (to, count * 4 * sign));
4033 seq = gen_sequence ();
4034 end_sequence ();
4036 return seq;
4039 result = gen_rtx_PARALLEL (VOIDmode,
4040 rtvec_alloc (count + (write_back ? 1 : 0)));
4041 if (write_back)
4043 XVECEXP (result, 0, 0)
4044 = gen_rtx_SET (GET_MODE (to), to,
4045 plus_constant (to, count * 4 * sign));
4046 i = 1;
4047 count++;
4050 for (j = 0; i < count; i++, j++)
4052 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
4053 RTX_UNCHANGING_P (mem) = unchanging_p;
4054 MEM_IN_STRUCT_P (mem) = in_struct_p;
4055 MEM_SCALAR_P (mem) = scalar_p;
4057 XVECEXP (result, 0, i)
4058 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
4061 return result;
4065 arm_gen_movstrqi (operands)
4066 rtx * operands;
4068 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
4069 int i;
4070 rtx src, dst;
4071 rtx st_src, st_dst, fin_src, fin_dst;
4072 rtx part_bytes_reg = NULL;
4073 rtx mem;
4074 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
4075 int dst_scalar_p, src_scalar_p;
4077 if (GET_CODE (operands[2]) != CONST_INT
4078 || GET_CODE (operands[3]) != CONST_INT
4079 || INTVAL (operands[2]) > 64
4080 || INTVAL (operands[3]) & 3)
4081 return 0;
4083 st_dst = XEXP (operands[0], 0);
4084 st_src = XEXP (operands[1], 0);
4086 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
4087 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
4088 dst_scalar_p = MEM_SCALAR_P (operands[0]);
4089 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
4090 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
4091 src_scalar_p = MEM_SCALAR_P (operands[1]);
4093 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
4094 fin_src = src = copy_to_mode_reg (SImode, st_src);
4096 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
4097 out_words_to_go = INTVAL (operands[2]) / 4;
4098 last_bytes = INTVAL (operands[2]) & 3;
4100 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
4101 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
4103 for (i = 0; in_words_to_go >= 2; i+=4)
4105 if (in_words_to_go > 4)
4106 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
4107 src_unchanging_p,
4108 src_in_struct_p,
4109 src_scalar_p));
4110 else
4111 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
4112 FALSE, src_unchanging_p,
4113 src_in_struct_p, src_scalar_p));
4115 if (out_words_to_go)
4117 if (out_words_to_go > 4)
4118 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
4119 dst_unchanging_p,
4120 dst_in_struct_p,
4121 dst_scalar_p));
4122 else if (out_words_to_go != 1)
4123 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
4124 dst, TRUE,
4125 (last_bytes == 0
4126 ? FALSE : TRUE),
4127 dst_unchanging_p,
4128 dst_in_struct_p,
4129 dst_scalar_p));
4130 else
4132 mem = gen_rtx_MEM (SImode, dst);
4133 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4134 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4135 MEM_SCALAR_P (mem) = dst_scalar_p;
4136 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
4137 if (last_bytes != 0)
4138 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
4142 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
4143 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
4146 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
4147 if (out_words_to_go)
4149 rtx sreg;
4151 mem = gen_rtx_MEM (SImode, src);
4152 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4153 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4154 MEM_SCALAR_P (mem) = src_scalar_p;
4155 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
4156 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
4158 mem = gen_rtx_MEM (SImode, dst);
4159 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4160 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4161 MEM_SCALAR_P (mem) = dst_scalar_p;
4162 emit_move_insn (mem, sreg);
4163 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
4164 in_words_to_go--;
4166 if (in_words_to_go) /* Sanity check */
4167 abort ();
4170 if (in_words_to_go)
4172 if (in_words_to_go < 0)
4173 abort ();
4175 mem = gen_rtx_MEM (SImode, src);
4176 RTX_UNCHANGING_P (mem) = src_unchanging_p;
4177 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
4178 MEM_SCALAR_P (mem) = src_scalar_p;
4179 part_bytes_reg = copy_to_mode_reg (SImode, mem);
4182 if (last_bytes && part_bytes_reg == NULL)
4183 abort ();
4185 if (BYTES_BIG_ENDIAN && last_bytes)
4187 rtx tmp = gen_reg_rtx (SImode);
4189 /* The bytes we want are in the top end of the word. */
4190 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
4191 GEN_INT (8 * (4 - last_bytes))));
4192 part_bytes_reg = tmp;
4194 while (last_bytes)
4196 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
4197 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4198 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4199 MEM_SCALAR_P (mem) = dst_scalar_p;
4200 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4202 if (--last_bytes)
4204 tmp = gen_reg_rtx (SImode);
4205 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4206 part_bytes_reg = tmp;
4211 else
4213 if (last_bytes > 1)
4215 mem = gen_rtx_MEM (HImode, dst);
4216 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4217 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4218 MEM_SCALAR_P (mem) = dst_scalar_p;
4219 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4220 last_bytes -= 2;
4221 if (last_bytes)
4223 rtx tmp = gen_reg_rtx (SImode);
4225 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4226 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4227 part_bytes_reg = tmp;
4231 if (last_bytes)
4233 mem = gen_rtx_MEM (QImode, dst);
4234 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4235 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4236 MEM_SCALAR_P (mem) = dst_scalar_p;
4237 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4241 return 1;
4244 /* Generate a memory reference for a half word, such that it will be loaded
4245 into the top 16 bits of the word. We can assume that the address is
4246 known to be alignable and of the form reg, or plus (reg, const). */
4248 arm_gen_rotated_half_load (memref)
4249 rtx memref;
4251 HOST_WIDE_INT offset = 0;
4252 rtx base = XEXP (memref, 0);
4254 if (GET_CODE (base) == PLUS)
4256 offset = INTVAL (XEXP (base, 1));
4257 base = XEXP (base, 0);
4260 /* If we aren't allowed to generate unaligned addresses, then fail. */
4261 if (TARGET_MMU_TRAPS
4262 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4263 return NULL;
4265 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4267 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4268 return base;
4270 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4273 /* Select a dominance comparison mode if possible. We support three forms.
4274 COND_OR == 0 => (X && Y)
4275 COND_OR == 1 => ((! X( || Y)
4276 COND_OR == 2 => (X || Y)
4277 If we are unable to support a dominance comparsison we return CC mode.
4278 This will then fail to match for the RTL expressions that generate this
4279 call. */
4281 static enum machine_mode
4282 select_dominance_cc_mode (x, y, cond_or)
4283 rtx x;
4284 rtx y;
4285 HOST_WIDE_INT cond_or;
4287 enum rtx_code cond1, cond2;
4288 int swapped = 0;
4290 /* Currently we will probably get the wrong result if the individual
4291 comparisons are not simple. This also ensures that it is safe to
4292 reverse a comparison if necessary. */
4293 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4294 != CCmode)
4295 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4296 != CCmode))
4297 return CCmode;
4299 /* The if_then_else variant of this tests the second condition if the
4300 first passes, but is true if the first fails. Reverse the first
4301 condition to get a true "inclusive-or" expression. */
4302 if (cond_or == 1)
4303 cond1 = reverse_condition (cond1);
4305 /* If the comparisons are not equal, and one doesn't dominate the other,
4306 then we can't do this. */
4307 if (cond1 != cond2
4308 && !comparison_dominates_p (cond1, cond2)
4309 && (swapped = 1, !comparison_dominates_p (cond2, cond1)))
4310 return CCmode;
4312 if (swapped)
4314 enum rtx_code temp = cond1;
4315 cond1 = cond2;
4316 cond2 = temp;
4319 switch (cond1)
4321 case EQ:
4322 if (cond2 == EQ || !cond_or)
4323 return CC_DEQmode;
4325 switch (cond2)
4327 case LE: return CC_DLEmode;
4328 case LEU: return CC_DLEUmode;
4329 case GE: return CC_DGEmode;
4330 case GEU: return CC_DGEUmode;
4331 default: break;
4334 break;
4336 case LT:
4337 if (cond2 == LT || !cond_or)
4338 return CC_DLTmode;
4339 if (cond2 == LE)
4340 return CC_DLEmode;
4341 if (cond2 == NE)
4342 return CC_DNEmode;
4343 break;
4345 case GT:
4346 if (cond2 == GT || !cond_or)
4347 return CC_DGTmode;
4348 if (cond2 == GE)
4349 return CC_DGEmode;
4350 if (cond2 == NE)
4351 return CC_DNEmode;
4352 break;
4354 case LTU:
4355 if (cond2 == LTU || !cond_or)
4356 return CC_DLTUmode;
4357 if (cond2 == LEU)
4358 return CC_DLEUmode;
4359 if (cond2 == NE)
4360 return CC_DNEmode;
4361 break;
4363 case GTU:
4364 if (cond2 == GTU || !cond_or)
4365 return CC_DGTUmode;
4366 if (cond2 == GEU)
4367 return CC_DGEUmode;
4368 if (cond2 == NE)
4369 return CC_DNEmode;
4370 break;
4372 /* The remaining cases only occur when both comparisons are the
4373 same. */
4374 case NE:
4375 return CC_DNEmode;
4377 case LE:
4378 return CC_DLEmode;
4380 case GE:
4381 return CC_DGEmode;
4383 case LEU:
4384 return CC_DLEUmode;
4386 case GEU:
4387 return CC_DGEUmode;
4389 default:
4390 break;
4393 abort ();
4396 enum machine_mode
4397 arm_select_cc_mode (op, x, y)
4398 enum rtx_code op;
4399 rtx x;
4400 rtx y;
4402 /* All floating point compares return CCFP if it is an equality
4403 comparison, and CCFPE otherwise. */
4404 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4406 switch (op)
4408 case EQ:
4409 case NE:
4410 case UNORDERED:
4411 case ORDERED:
4412 case UNLT:
4413 case UNLE:
4414 case UNGT:
4415 case UNGE:
4416 case UNEQ:
4417 case LTGT:
4418 return CCFPmode;
4420 case LT:
4421 case LE:
4422 case GT:
4423 case GE:
4424 return CCFPEmode;
4426 default:
4427 abort ();
4431 /* A compare with a shifted operand. Because of canonicalization, the
4432 comparison will have to be swapped when we emit the assembler. */
4433 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4434 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4435 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4436 || GET_CODE (x) == ROTATERT))
4437 return CC_SWPmode;
4439 /* This is a special case that is used by combine to allow a
4440 comparison of a shifted byte load to be split into a zero-extend
4441 followed by a comparison of the shifted integer (only valid for
4442 equalities and unsigned inequalities). */
4443 if (GET_MODE (x) == SImode
4444 && GET_CODE (x) == ASHIFT
4445 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4446 && GET_CODE (XEXP (x, 0)) == SUBREG
4447 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4448 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4449 && (op == EQ || op == NE
4450 || op == GEU || op == GTU || op == LTU || op == LEU)
4451 && GET_CODE (y) == CONST_INT)
4452 return CC_Zmode;
4454 /* A construct for a conditional compare, if the false arm contains
4455 0, then both conditions must be true, otherwise either condition
4456 must be true. Not all conditions are possible, so CCmode is
4457 returned if it can't be done. */
4458 if (GET_CODE (x) == IF_THEN_ELSE
4459 && (XEXP (x, 2) == const0_rtx
4460 || XEXP (x, 2) == const1_rtx)
4461 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4462 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4463 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4464 INTVAL (XEXP (x, 2)));
4466 /* Alternate canonicalizations of the above. These are somewhat cleaner. */
4467 if (GET_CODE (x) == AND
4468 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4469 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4470 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 0);
4472 if (GET_CODE (x) == IOR
4473 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4474 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4475 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1), 2);
4477 /* An operation that sets the condition codes as a side-effect, the
4478 V flag is not set correctly, so we can only use comparisons where
4479 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4480 instead. */
4481 if (GET_MODE (x) == SImode
4482 && y == const0_rtx
4483 && (op == EQ || op == NE || op == LT || op == GE)
4484 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4485 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4486 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4487 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4488 || GET_CODE (x) == LSHIFTRT
4489 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4490 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4491 return CC_NOOVmode;
4493 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4494 return CC_Zmode;
4496 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4497 && GET_CODE (x) == PLUS
4498 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4499 return CC_Cmode;
4501 return CCmode;
4504 /* X and Y are two things to compare using CODE. Emit the compare insn and
4505 return the rtx for register 0 in the proper mode. FP means this is a
4506 floating point compare: I don't think that it is needed on the arm. */
4509 arm_gen_compare_reg (code, x, y)
4510 enum rtx_code code;
4511 rtx x, y;
4513 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4514 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4516 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4517 gen_rtx_COMPARE (mode, x, y)));
4519 return cc_reg;
4522 void
4523 arm_reload_in_hi (operands)
4524 rtx * operands;
4526 rtx ref = operands[1];
4527 rtx base, scratch;
4528 HOST_WIDE_INT offset = 0;
4530 if (GET_CODE (ref) == SUBREG)
4532 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4533 if (BYTES_BIG_ENDIAN)
4534 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4535 - MIN (UNITS_PER_WORD,
4536 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4537 ref = SUBREG_REG (ref);
4540 if (GET_CODE (ref) == REG)
4542 /* We have a pseudo which has been spilt onto the stack; there
4543 are two cases here: the first where there is a simple
4544 stack-slot replacement and a second where the stack-slot is
4545 out of range, or is used as a subreg. */
4546 if (reg_equiv_mem[REGNO (ref)])
4548 ref = reg_equiv_mem[REGNO (ref)];
4549 base = find_replacement (&XEXP (ref, 0));
4551 else
4552 /* The slot is out of range, or was dressed up in a SUBREG. */
4553 base = reg_equiv_address[REGNO (ref)];
4555 else
4556 base = find_replacement (&XEXP (ref, 0));
4558 /* Handle the case where the address is too complex to be offset by 1. */
4559 if (GET_CODE (base) == MINUS
4560 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4562 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4564 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4565 base = base_plus;
4567 else if (GET_CODE (base) == PLUS)
4569 /* The addend must be CONST_INT, or we would have dealt with it above. */
4570 HOST_WIDE_INT hi, lo;
4572 offset += INTVAL (XEXP (base, 1));
4573 base = XEXP (base, 0);
4575 /* Rework the address into a legal sequence of insns. */
4576 /* Valid range for lo is -4095 -> 4095 */
4577 lo = (offset >= 0
4578 ? (offset & 0xfff)
4579 : -((-offset) & 0xfff));
4581 /* Corner case, if lo is the max offset then we would be out of range
4582 once we have added the additional 1 below, so bump the msb into the
4583 pre-loading insn(s). */
4584 if (lo == 4095)
4585 lo &= 0x7ff;
4587 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4588 ^ HOST_INT (0x80000000))
4589 - HOST_INT (0x80000000));
4591 if (hi + lo != offset)
4592 abort ();
4594 if (hi != 0)
4596 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4598 /* Get the base address; addsi3 knows how to handle constants
4599 that require more than one insn. */
4600 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4601 base = base_plus;
4602 offset = lo;
4606 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4607 emit_insn (gen_zero_extendqisi2 (scratch,
4608 gen_rtx_MEM (QImode,
4609 plus_constant (base,
4610 offset))));
4611 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4612 gen_rtx_MEM (QImode,
4613 plus_constant (base,
4614 offset + 1))));
4615 if (!BYTES_BIG_ENDIAN)
4616 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4617 gen_rtx_IOR (SImode,
4618 gen_rtx_ASHIFT
4619 (SImode,
4620 gen_rtx_SUBREG (SImode, operands[0], 0),
4621 GEN_INT (8)),
4622 scratch)));
4623 else
4624 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4625 gen_rtx_IOR (SImode,
4626 gen_rtx_ASHIFT (SImode, scratch,
4627 GEN_INT (8)),
4628 gen_rtx_SUBREG (SImode, operands[0],
4629 0))));
4632 /* Handle storing a half-word to memory during reload by synthesising as two
4633 byte stores. Take care not to clobber the input values until after we
4634 have moved them somewhere safe. This code assumes that if the DImode
4635 scratch in operands[2] overlaps either the input value or output address
4636 in some way, then that value must die in this insn (we absolutely need
4637 two scratch registers for some corner cases). */
4638 void
4639 arm_reload_out_hi (operands)
4640 rtx * operands;
4642 rtx ref = operands[0];
4643 rtx outval = operands[1];
4644 rtx base, scratch;
4645 HOST_WIDE_INT offset = 0;
4647 if (GET_CODE (ref) == SUBREG)
4649 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4650 if (BYTES_BIG_ENDIAN)
4651 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4652 - MIN (UNITS_PER_WORD,
4653 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4654 ref = SUBREG_REG (ref);
4658 if (GET_CODE (ref) == REG)
4660 /* We have a pseudo which has been spilt onto the stack; there
4661 are two cases here: the first where there is a simple
4662 stack-slot replacement and a second where the stack-slot is
4663 out of range, or is used as a subreg. */
4664 if (reg_equiv_mem[REGNO (ref)])
4666 ref = reg_equiv_mem[REGNO (ref)];
4667 base = find_replacement (&XEXP (ref, 0));
4669 else
4670 /* The slot is out of range, or was dressed up in a SUBREG. */
4671 base = reg_equiv_address[REGNO (ref)];
4673 else
4674 base = find_replacement (&XEXP (ref, 0));
4676 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4678 /* Handle the case where the address is too complex to be offset by 1. */
4679 if (GET_CODE (base) == MINUS
4680 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4682 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4684 /* Be careful not to destroy OUTVAL. */
4685 if (reg_overlap_mentioned_p (base_plus, outval))
4687 /* Updating base_plus might destroy outval, see if we can
4688 swap the scratch and base_plus. */
4689 if (!reg_overlap_mentioned_p (scratch, outval))
4691 rtx tmp = scratch;
4692 scratch = base_plus;
4693 base_plus = tmp;
4695 else
4697 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4699 /* Be conservative and copy OUTVAL into the scratch now,
4700 this should only be necessary if outval is a subreg
4701 of something larger than a word. */
4702 /* XXX Might this clobber base? I can't see how it can,
4703 since scratch is known to overlap with OUTVAL, and
4704 must be wider than a word. */
4705 emit_insn (gen_movhi (scratch_hi, outval));
4706 outval = scratch_hi;
4710 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4711 base = base_plus;
4713 else if (GET_CODE (base) == PLUS)
4715 /* The addend must be CONST_INT, or we would have dealt with it above. */
4716 HOST_WIDE_INT hi, lo;
4718 offset += INTVAL (XEXP (base, 1));
4719 base = XEXP (base, 0);
4721 /* Rework the address into a legal sequence of insns. */
4722 /* Valid range for lo is -4095 -> 4095 */
4723 lo = (offset >= 0
4724 ? (offset & 0xfff)
4725 : -((-offset) & 0xfff));
4727 /* Corner case, if lo is the max offset then we would be out of range
4728 once we have added the additional 1 below, so bump the msb into the
4729 pre-loading insn(s). */
4730 if (lo == 4095)
4731 lo &= 0x7ff;
4733 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4734 ^ HOST_INT (0x80000000))
4735 - HOST_INT (0x80000000));
4737 if (hi + lo != offset)
4738 abort ();
4740 if (hi != 0)
4742 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4744 /* Be careful not to destroy OUTVAL. */
4745 if (reg_overlap_mentioned_p (base_plus, outval))
4747 /* Updating base_plus might destroy outval, see if we
4748 can swap the scratch and base_plus. */
4749 if (!reg_overlap_mentioned_p (scratch, outval))
4751 rtx tmp = scratch;
4752 scratch = base_plus;
4753 base_plus = tmp;
4755 else
4757 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4759 /* Be conservative and copy outval into scratch now,
4760 this should only be necessary if outval is a
4761 subreg of something larger than a word. */
4762 /* XXX Might this clobber base? I can't see how it
4763 can, since scratch is known to overlap with
4764 outval. */
4765 emit_insn (gen_movhi (scratch_hi, outval));
4766 outval = scratch_hi;
4770 /* Get the base address; addsi3 knows how to handle constants
4771 that require more than one insn. */
4772 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4773 base = base_plus;
4774 offset = lo;
4778 if (BYTES_BIG_ENDIAN)
4780 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4781 plus_constant (base, offset + 1)),
4782 gen_rtx_SUBREG (QImode, outval, 0)));
4783 emit_insn (gen_lshrsi3 (scratch,
4784 gen_rtx_SUBREG (SImode, outval, 0),
4785 GEN_INT (8)));
4786 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4787 gen_rtx_SUBREG (QImode, scratch, 0)));
4789 else
4791 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4792 gen_rtx_SUBREG (QImode, outval, 0)));
4793 emit_insn (gen_lshrsi3 (scratch,
4794 gen_rtx_SUBREG (SImode, outval, 0),
4795 GEN_INT (8)));
4796 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4797 plus_constant (base, offset + 1)),
4798 gen_rtx_SUBREG (QImode, scratch, 0)));
4802 /* Print a symbolic form of X to the debug file, F. */
4803 static void
4804 arm_print_value (f, x)
4805 FILE * f;
4806 rtx x;
4808 switch (GET_CODE (x))
4810 case CONST_INT:
4811 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4812 return;
4814 case CONST_DOUBLE:
4815 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4816 return;
4818 case CONST_STRING:
4819 fprintf (f, "\"%s\"", XSTR (x, 0));
4820 return;
4822 case SYMBOL_REF:
4823 fprintf (f, "`%s'", XSTR (x, 0));
4824 return;
4826 case LABEL_REF:
4827 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4828 return;
4830 case CONST:
4831 arm_print_value (f, XEXP (x, 0));
4832 return;
4834 case PLUS:
4835 arm_print_value (f, XEXP (x, 0));
4836 fprintf (f, "+");
4837 arm_print_value (f, XEXP (x, 1));
4838 return;
4840 case PC:
4841 fprintf (f, "pc");
4842 return;
4844 default:
4845 fprintf (f, "????");
4846 return;
4850 /* Routines for manipulation of the constant pool. */
4852 /* Arm instructions cannot load a large constant directly into a
4853 register; they have to come from a pc relative load. The constant
4854 must therefore be placed in the addressable range of the pc
4855 relative load. Depending on the precise pc relative load
4856 instruction the range is somewhere between 256 bytes and 4k. This
4857 means that we often have to dump a constant inside a function, and
4858 generate code to branch around it.
4860 It is important to minimize this, since the branches will slow
4861 things down and make the code larger.
4863 Normally we can hide the table after an existing unconditional
4864 branch so that there is no interruption of the flow, but in the
4865 worst case the code looks like this:
4867 ldr rn, L1
4869 b L2
4870 align
4871 L1: .long value
4875 ldr rn, L3
4877 b L4
4878 align
4879 L3: .long value
4883 We fix this by performing a scan after scheduling, which notices
4884 which instructions need to have their operands fetched from the
4885 constant table and builds the table.
4887 The algorithm starts by building a table of all the constants that
4888 need fixing up and all the natural barriers in the function (places
4889 where a constant table can be dropped without breaking the flow).
4890 For each fixup we note how far the pc-relative replacement will be
4891 able to reach and the offset of the instruction into the function.
4893 Having built the table we then group the fixes together to form
4894 tables that are as large as possible (subject to addressing
4895 constraints) and emit each table of constants after the last
4896 barrier that is within range of all the instructions in the group.
4897 If a group does not contain a barrier, then we forcibly create one
4898 by inserting a jump instruction into the flow. Once the table has
4899 been inserted, the insns are then modified to reference the
4900 relevant entry in the pool.
4902 Possible enhancements to the algorithm (not implemented) are:
4904 1) For some processors and object formats, there may be benefit in
4905 aligning the pools to the start of cache lines; this alignment
4906 would need to be taken into account when calculating addressability
4907 of a pool. */
4909 /* These typedefs are located at the start of this file, so that
4910 they can be used in the prototypes there. This comment is to
4911 remind readers of that fact so that the following structures
4912 can be understood more easily.
4914 typedef struct minipool_node Mnode;
4915 typedef struct minipool_fixup Mfix; */
4917 struct minipool_node
4919 /* Doubly linked chain of entries. */
4920 Mnode * next;
4921 Mnode * prev;
4922 /* The maximum offset into the code that this entry can be placed. While
4923 pushing fixes for forward references, all entries are sorted in order
4924 of increasing max_address. */
4925 HOST_WIDE_INT max_address;
4926 /* Similarly for a entry inserted for a backwards ref. */
4927 HOST_WIDE_INT min_address;
4928 /* The number of fixes referencing this entry. This can become zero
4929 if we "unpush" an entry. In this case we ignore the entry when we
4930 come to emit the code. */
4931 int refcount;
4932 /* The offset from the start of the minipool. */
4933 HOST_WIDE_INT offset;
4934 /* The value in table. */
4935 rtx value;
4936 /* The mode of value. */
4937 enum machine_mode mode;
4938 int fix_size;
4941 struct minipool_fixup
4943 Mfix * next;
4944 rtx insn;
4945 HOST_WIDE_INT address;
4946 rtx * loc;
4947 enum machine_mode mode;
4948 int fix_size;
4949 rtx value;
4950 Mnode * minipool;
4951 HOST_WIDE_INT forwards;
4952 HOST_WIDE_INT backwards;
4955 /* Fixes less than a word need padding out to a word boundary. */
4956 #define MINIPOOL_FIX_SIZE(mode) \
4957 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
4959 static Mnode * minipool_vector_head;
4960 static Mnode * minipool_vector_tail;
4961 static rtx minipool_vector_label;
4963 /* The linked list of all minipool fixes required for this function. */
4964 Mfix * minipool_fix_head;
4965 Mfix * minipool_fix_tail;
4966 /* The fix entry for the current minipool, once it has been placed. */
4967 Mfix * minipool_barrier;
4969 /* Determines if INSN is the start of a jump table. Returns the end
4970 of the TABLE or NULL_RTX. */
4971 static rtx
4972 is_jump_table (insn)
4973 rtx insn;
4975 rtx table;
4977 if (GET_CODE (insn) == JUMP_INSN
4978 && JUMP_LABEL (insn) != NULL
4979 && ((table = next_real_insn (JUMP_LABEL (insn)))
4980 == next_real_insn (insn))
4981 && table != NULL
4982 && GET_CODE (table) == JUMP_INSN
4983 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4984 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4985 return table;
4987 return NULL_RTX;
4990 static HOST_WIDE_INT
4991 get_jump_table_size (insn)
4992 rtx insn;
4994 rtx body = PATTERN (insn);
4995 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
4997 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
5000 /* Move a minipool fix MP from its current location to before MAX_MP.
5001 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
5002 contrains may need updating. */
5003 static Mnode *
5004 move_minipool_fix_forward_ref (mp, max_mp, max_address)
5005 Mnode * mp;
5006 Mnode * max_mp;
5007 HOST_WIDE_INT max_address;
5009 /* This should never be true and the code below assumes these are
5010 different. */
5011 if (mp == max_mp)
5012 abort ();
5014 if (max_mp == NULL)
5016 if (max_address < mp->max_address)
5017 mp->max_address = max_address;
5019 else
5021 if (max_address > max_mp->max_address - mp->fix_size)
5022 mp->max_address = max_mp->max_address - mp->fix_size;
5023 else
5024 mp->max_address = max_address;
5026 /* Unlink MP from its current position. Since max_mp is non-null,
5027 mp->prev must be non-null. */
5028 mp->prev->next = mp->next;
5029 if (mp->next != NULL)
5030 mp->next->prev = mp->prev;
5031 else
5032 minipool_vector_tail = mp->prev;
5034 /* Re-insert it before MAX_MP. */
5035 mp->next = max_mp;
5036 mp->prev = max_mp->prev;
5037 max_mp->prev = mp;
5039 if (mp->prev != NULL)
5040 mp->prev->next = mp;
5041 else
5042 minipool_vector_head = mp;
5045 /* Save the new entry. */
5046 max_mp = mp;
5048 /* Scan over the preceeding entries and adjust their addresses as
5049 required. */
5050 while (mp->prev != NULL
5051 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5053 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5054 mp = mp->prev;
5057 return max_mp;
5060 /* Add a constant to the minipool for a forward reference. Returns the
5061 node added or NULL if the constant will not fit in this pool. */
5062 static Mnode *
5063 add_minipool_forward_ref (fix)
5064 Mfix * fix;
5066 /* If set, max_mp is the first pool_entry that has a lower
5067 constraint than the one we are trying to add. */
5068 Mnode * max_mp = NULL;
5069 HOST_WIDE_INT max_address = fix->address + fix->forwards;
5070 Mnode * mp;
5072 /* If this fix's address is greater than the address of the first
5073 entry, then we can't put the fix in this pool. We subtract the
5074 size of the current fix to ensure that if the table is fully
5075 packed we still have enough room to insert this value by suffling
5076 the other fixes forwards. */
5077 if (minipool_vector_head &&
5078 fix->address >= minipool_vector_head->max_address - fix->fix_size)
5079 return NULL;
5081 /* Scan the pool to see if a constant with the same value has
5082 already been added. While we are doing this, also note the
5083 location where we must insert the constant if it doesn't already
5084 exist. */
5085 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5087 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5088 && fix->mode == mp->mode
5089 && (GET_CODE (fix->value) != CODE_LABEL
5090 || (CODE_LABEL_NUMBER (fix->value)
5091 == CODE_LABEL_NUMBER (mp->value)))
5092 && rtx_equal_p (fix->value, mp->value))
5094 /* More than one fix references this entry. */
5095 mp->refcount++;
5096 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
5099 /* Note the insertion point if necessary. */
5100 if (max_mp == NULL
5101 && mp->max_address > max_address)
5102 max_mp = mp;
5105 /* The value is not currently in the minipool, so we need to create
5106 a new entry for it. If MAX_MP is NULL, the entry will be put on
5107 the end of the list since the placement is less constrained than
5108 any existing entry. Otherwise, we insert the new fix before
5109 MAX_MP and, if neceesary, adjust the constraints on the other
5110 entries. */
5111 mp = xmalloc (sizeof (* mp));
5112 mp->fix_size = fix->fix_size;
5113 mp->mode = fix->mode;
5114 mp->value = fix->value;
5115 mp->refcount = 1;
5116 /* Not yet required for a backwards ref. */
5117 mp->min_address = -65536;
5119 if (max_mp == NULL)
5121 mp->max_address = max_address;
5122 mp->next = NULL;
5123 mp->prev = minipool_vector_tail;
5125 if (mp->prev == NULL)
5127 minipool_vector_head = mp;
5128 minipool_vector_label = gen_label_rtx ();
5130 else
5131 mp->prev->next = mp;
5133 minipool_vector_tail = mp;
5135 else
5137 if (max_address > max_mp->max_address - mp->fix_size)
5138 mp->max_address = max_mp->max_address - mp->fix_size;
5139 else
5140 mp->max_address = max_address;
5142 mp->next = max_mp;
5143 mp->prev = max_mp->prev;
5144 max_mp->prev = mp;
5145 if (mp->prev != NULL)
5146 mp->prev->next = mp;
5147 else
5148 minipool_vector_head = mp;
5151 /* Save the new entry. */
5152 max_mp = mp;
5154 /* Scan over the preceeding entries and adjust their addresses as
5155 required. */
5156 while (mp->prev != NULL
5157 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
5159 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
5160 mp = mp->prev;
5163 return max_mp;
5166 static Mnode *
5167 move_minipool_fix_backward_ref (mp, min_mp, min_address)
5168 Mnode * mp;
5169 Mnode * min_mp;
5170 HOST_WIDE_INT min_address;
5172 HOST_WIDE_INT offset;
5174 /* This should never be true, and the code below assumes these are
5175 different. */
5176 if (mp == min_mp)
5177 abort ();
5179 if (min_mp == NULL)
5181 if (min_address > mp->min_address)
5182 mp->min_address = min_address;
5184 else
5186 /* We will adjust this below if it is too loose. */
5187 mp->min_address = min_address;
5189 /* Unlink MP from its current position. Since min_mp is non-null,
5190 mp->next must be non-null. */
5191 mp->next->prev = mp->prev;
5192 if (mp->prev != NULL)
5193 mp->prev->next = mp->next;
5194 else
5195 minipool_vector_head = mp->next;
5197 /* Reinsert it after MIN_MP. */
5198 mp->prev = min_mp;
5199 mp->next = min_mp->next;
5200 min_mp->next = mp;
5201 if (mp->next != NULL)
5202 mp->next->prev = mp;
5203 else
5204 minipool_vector_tail = mp;
5207 min_mp = mp;
5209 offset = 0;
5210 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5212 mp->offset = offset;
5213 if (mp->refcount > 0)
5214 offset += mp->fix_size;
5216 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
5217 mp->next->min_address = mp->min_address + mp->fix_size;
5220 return min_mp;
5223 /* Add a constant to the minipool for a backward reference. Returns the
5224 node added or NULL if the constant will not fit in this pool.
5226 Note that the code for insertion for a backwards reference can be
5227 somewhat confusing because the calculated offsets for each fix do
5228 not take into account the size of the pool (which is still under
5229 construction. */
5230 static Mnode *
5231 add_minipool_backward_ref (fix)
5232 Mfix * fix;
5234 /* If set, min_mp is the last pool_entry that has a lower constraint
5235 than the one we are trying to add. */
5236 Mnode * min_mp = NULL;
5237 /* This can be negative, since it is only a constraint. */
5238 HOST_WIDE_INT min_address = fix->address - fix->backwards;
5239 Mnode * mp;
5241 /* If we can't reach the current pool from this insn, or if we can't
5242 insert this entry at the end of the pool without pushing other
5243 fixes out of range, then we don't try. This ensures that we
5244 can't fail later on. */
5245 if (min_address >= minipool_barrier->address
5246 || (minipool_vector_tail->min_address + fix->fix_size
5247 >= minipool_barrier->address))
5248 return NULL;
5250 /* Scan the pool to see if a constant with the same value has
5251 already been added. While we are doing this, also note the
5252 location where we must insert the constant if it doesn't already
5253 exist. */
5254 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5256 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5257 && fix->mode == mp->mode
5258 && (GET_CODE (fix->value) != CODE_LABEL
5259 || (CODE_LABEL_NUMBER (fix->value)
5260 == CODE_LABEL_NUMBER (mp->value)))
5261 && rtx_equal_p (fix->value, mp->value)
5262 /* Check that there is enough slack to move this entry to the
5263 end of the table (this is conservative). */
5264 && (mp->max_address
5265 > (minipool_barrier->address
5266 + minipool_vector_tail->offset
5267 + minipool_vector_tail->fix_size)))
5269 mp->refcount++;
5270 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5273 if (min_mp != NULL)
5274 mp->min_address += fix->fix_size;
5275 else
5277 /* Note the insertion point if necessary. */
5278 if (mp->min_address < min_address)
5279 min_mp = mp;
5280 else if (mp->max_address
5281 < minipool_barrier->address + mp->offset + fix->fix_size)
5283 /* Inserting before this entry would push the fix beyond
5284 its maximum address (which can happen if we have
5285 re-located a forwards fix); force the new fix to come
5286 after it. */
5287 min_mp = mp;
5288 min_address = mp->min_address + fix->fix_size;
5293 /* We need to create a new entry. */
5294 mp = xmalloc (sizeof (* mp));
5295 mp->fix_size = fix->fix_size;
5296 mp->mode = fix->mode;
5297 mp->value = fix->value;
5298 mp->refcount = 1;
5299 mp->max_address = minipool_barrier->address + 65536;
5301 mp->min_address = min_address;
5303 if (min_mp == NULL)
5305 mp->prev = NULL;
5306 mp->next = minipool_vector_head;
5308 if (mp->next == NULL)
5310 minipool_vector_tail = mp;
5311 minipool_vector_label = gen_label_rtx ();
5313 else
5314 mp->next->prev = mp;
5316 minipool_vector_head = mp;
5318 else
5320 mp->next = min_mp->next;
5321 mp->prev = min_mp;
5322 min_mp->next = mp;
5324 if (mp->next != NULL)
5325 mp->next->prev = mp;
5326 else
5327 minipool_vector_tail = mp;
5330 /* Save the new entry. */
5331 min_mp = mp;
5333 if (mp->prev)
5334 mp = mp->prev;
5335 else
5336 mp->offset = 0;
5338 /* Scan over the following entries and adjust their offsets. */
5339 while (mp->next != NULL)
5341 if (mp->next->min_address < mp->min_address + mp->fix_size)
5342 mp->next->min_address = mp->min_address + mp->fix_size;
5344 if (mp->refcount)
5345 mp->next->offset = mp->offset + mp->fix_size;
5346 else
5347 mp->next->offset = mp->offset;
5349 mp = mp->next;
5352 return min_mp;
5355 static void
5356 assign_minipool_offsets (barrier)
5357 Mfix * barrier;
5359 HOST_WIDE_INT offset = 0;
5360 Mnode * mp;
5362 minipool_barrier = barrier;
5364 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5366 mp->offset = offset;
5368 if (mp->refcount > 0)
5369 offset += mp->fix_size;
5373 /* Output the literal table */
5374 static void
5375 dump_minipool (scan)
5376 rtx scan;
5378 Mnode * mp;
5379 Mnode * nmp;
5381 if (rtl_dump_file)
5382 fprintf (rtl_dump_file,
5383 ";; Emitting minipool after insn %u; address %ld\n",
5384 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5386 scan = emit_label_after (gen_label_rtx (), scan);
5387 scan = emit_insn_after (gen_align_4 (), scan);
5388 scan = emit_label_after (minipool_vector_label, scan);
5390 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5392 if (mp->refcount > 0)
5394 if (rtl_dump_file)
5396 fprintf (rtl_dump_file,
5397 ";; Offset %u, min %ld, max %ld ",
5398 (unsigned) mp->offset, (unsigned long) mp->min_address,
5399 (unsigned long) mp->max_address);
5400 arm_print_value (rtl_dump_file, mp->value);
5401 fputc ('\n', rtl_dump_file);
5404 switch (mp->fix_size)
5406 #ifdef HAVE_consttable_1
5407 case 1:
5408 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5409 break;
5411 #endif
5412 #ifdef HAVE_consttable_2
5413 case 2:
5414 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5415 break;
5417 #endif
5418 #ifdef HAVE_consttable_4
5419 case 4:
5420 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5421 break;
5423 #endif
5424 #ifdef HAVE_consttable_8
5425 case 8:
5426 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5427 break;
5429 #endif
5430 default:
5431 abort ();
5432 break;
5436 nmp = mp->next;
5437 free (mp);
5440 minipool_vector_head = minipool_vector_tail = NULL;
5441 scan = emit_insn_after (gen_consttable_end (), scan);
5442 scan = emit_barrier_after (scan);
5445 /* Return the cost of forcibly inserting a barrier after INSN. */
5446 static int
5447 arm_barrier_cost (insn)
5448 rtx insn;
5450 /* Basing the location of the pool on the loop depth is preferable,
5451 but at the moment, the basic block information seems to be
5452 corrupt by this stage of the compilation. */
5453 int base_cost = 50;
5454 rtx next = next_nonnote_insn (insn);
5456 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5457 base_cost -= 20;
5459 switch (GET_CODE (insn))
5461 case CODE_LABEL:
5462 /* It will always be better to place the table before the label, rather
5463 than after it. */
5464 return 50;
5466 case INSN:
5467 case CALL_INSN:
5468 return base_cost;
5470 case JUMP_INSN:
5471 return base_cost - 10;
5473 default:
5474 return base_cost + 10;
5478 /* Find the best place in the insn stream in the range
5479 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5480 Create the barrier by inserting a jump and add a new fix entry for
5481 it. */
5482 static Mfix *
5483 create_fix_barrier (fix, max_address)
5484 Mfix * fix;
5485 HOST_WIDE_INT max_address;
5487 HOST_WIDE_INT count = 0;
5488 rtx barrier;
5489 rtx from = fix->insn;
5490 rtx selected = from;
5491 int selected_cost;
5492 HOST_WIDE_INT selected_address;
5493 Mfix * new_fix;
5494 HOST_WIDE_INT max_count = max_address - fix->address;
5495 rtx label = gen_label_rtx ();
5497 selected_cost = arm_barrier_cost (from);
5498 selected_address = fix->address;
5500 while (from && count < max_count)
5502 rtx tmp;
5503 int new_cost;
5505 /* This code shouldn't have been called if there was a natural barrier
5506 within range. */
5507 if (GET_CODE (from) == BARRIER)
5508 abort ();
5510 /* Count the length of this insn. */
5511 count += get_attr_length (from);
5513 /* If there is a jump table, add its length. */
5514 tmp = is_jump_table (from);
5515 if (tmp != NULL)
5517 count += get_jump_table_size (tmp);
5519 /* Jump tables aren't in a basic block, so base the cost on
5520 the dispatch insn. If we select this location, we will
5521 still put the pool after the table. */
5522 new_cost = arm_barrier_cost (from);
5524 if (count < max_count && new_cost <= selected_cost)
5526 selected = tmp;
5527 selected_cost = new_cost;
5528 selected_address = fix->address + count;
5531 /* Continue after the dispatch table. */
5532 from = NEXT_INSN (tmp);
5533 continue;
5536 new_cost = arm_barrier_cost (from);
5538 if (count < max_count && new_cost <= selected_cost)
5540 selected = from;
5541 selected_cost = new_cost;
5542 selected_address = fix->address + count;
5545 from = NEXT_INSN (from);
5548 /* Create a new JUMP_INSN that branches around a barrier. */
5549 from = emit_jump_insn_after (gen_jump (label), selected);
5550 JUMP_LABEL (from) = label;
5551 barrier = emit_barrier_after (from);
5552 emit_label_after (label, barrier);
5554 /* Create a minipool barrier entry for the new barrier. */
5555 new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* new_fix));
5556 new_fix->insn = barrier;
5557 new_fix->address = selected_address;
5558 new_fix->next = fix->next;
5559 fix->next = new_fix;
5561 return new_fix;
5564 /* Record that there is a natural barrier in the insn stream at
5565 ADDRESS. */
5566 static void
5567 push_minipool_barrier (insn, address)
5568 rtx insn;
5569 HOST_WIDE_INT address;
5571 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5573 fix->insn = insn;
5574 fix->address = address;
5576 fix->next = NULL;
5577 if (minipool_fix_head != NULL)
5578 minipool_fix_tail->next = fix;
5579 else
5580 minipool_fix_head = fix;
5582 minipool_fix_tail = fix;
5585 /* Record INSN, which will need fixing up to load a value from the
5586 minipool. ADDRESS is the offset of the insn since the start of the
5587 function; LOC is a pointer to the part of the insn which requires
5588 fixing; VALUE is the constant that must be loaded, which is of type
5589 MODE. */
5590 static void
5591 push_minipool_fix (insn, address, loc, mode, value)
5592 rtx insn;
5593 HOST_WIDE_INT address;
5594 rtx * loc;
5595 enum machine_mode mode;
5596 rtx value;
5598 Mfix * fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (* fix));
5600 #ifdef AOF_ASSEMBLER
5601 /* PIC symbol refereneces need to be converted into offsets into the
5602 based area. */
5603 /* XXX This shouldn't be done here. */
5604 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5605 value = aof_pic_entry (value);
5606 #endif /* AOF_ASSEMBLER */
5608 fix->insn = insn;
5609 fix->address = address;
5610 fix->loc = loc;
5611 fix->mode = mode;
5612 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5613 fix->value = value;
5614 fix->forwards = get_attr_pool_range (insn);
5615 fix->backwards = get_attr_neg_pool_range (insn);
5616 fix->minipool = NULL;
5618 /* If an insn doesn't have a range defined for it, then it isn't
5619 expecting to be reworked by this code. Better to abort now than
5620 to generate duff assembly code. */
5621 if (fix->forwards == 0 && fix->backwards == 0)
5622 abort ();
5624 if (rtl_dump_file)
5626 fprintf (rtl_dump_file,
5627 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5628 GET_MODE_NAME (mode),
5629 INSN_UID (insn), (unsigned long) address,
5630 -1 * (long)fix->backwards, (long)fix->forwards);
5631 arm_print_value (rtl_dump_file, fix->value);
5632 fprintf (rtl_dump_file, "\n");
5635 /* Add it to the chain of fixes. */
5636 fix->next = NULL;
5638 if (minipool_fix_head != NULL)
5639 minipool_fix_tail->next = fix;
5640 else
5641 minipool_fix_head = fix;
5643 minipool_fix_tail = fix;
5646 /* Scan INSN and note any of its operands that need fixing. */
5647 static void
5648 note_invalid_constants (insn, address)
5649 rtx insn;
5650 HOST_WIDE_INT address;
5652 int opno;
5654 extract_insn (insn);
5656 if (!constrain_operands (1))
5657 fatal_insn_not_found (insn);
5659 /* Fill in recog_op_alt with information about the constraints of this
5660 insn. */
5661 preprocess_constraints ();
5663 for (opno = 0; opno < recog_data.n_operands; opno++)
5665 /* Things we need to fix can only occur in inputs. */
5666 if (recog_data.operand_type[opno] != OP_IN)
5667 continue;
5669 /* If this alternative is a memory reference, then any mention
5670 of constants in this alternative is really to fool reload
5671 into allowing us to accept one there. We need to fix them up
5672 now so that we output the right code. */
5673 if (recog_op_alt[opno][which_alternative].memory_ok)
5675 rtx op = recog_data.operand[opno];
5677 if (CONSTANT_P (op))
5678 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5679 recog_data.operand_mode[opno], op);
5680 #if 0
5681 /* RWE: Now we look correctly at the operands for the insn,
5682 this shouldn't be needed any more. */
5683 #ifndef AOF_ASSEMBLER
5684 /* XXX Is this still needed? */
5685 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
5686 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5687 recog_data.operand_mode[opno],
5688 XVECEXP (op, 0, 0));
5689 #endif
5690 #endif
5691 else if (GET_CODE (op) == MEM
5692 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5693 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
5694 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5695 recog_data.operand_mode[opno],
5696 get_pool_constant (XEXP (op, 0)));
5701 void
5702 arm_reorg (first)
5703 rtx first;
5705 rtx insn;
5706 HOST_WIDE_INT address = 0;
5707 Mfix * fix;
5709 minipool_fix_head = minipool_fix_tail = NULL;
5711 /* The first insn must always be a note, or the code below won't
5712 scan it properly. */
5713 if (GET_CODE (first) != NOTE)
5714 abort ();
5716 /* Scan all the insns and record the operands that will need fixing. */
5717 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
5719 if (GET_CODE (insn) == BARRIER)
5720 push_minipool_barrier (insn, address);
5721 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5722 || GET_CODE (insn) == JUMP_INSN)
5724 rtx table;
5726 note_invalid_constants (insn, address);
5727 address += get_attr_length (insn);
5729 /* If the insn is a vector jump, add the size of the table
5730 and skip the table. */
5731 if ((table = is_jump_table (insn)) != NULL)
5733 address += get_jump_table_size (table);
5734 insn = table;
5739 fix = minipool_fix_head;
5741 /* Now scan the fixups and perform the required changes. */
5742 while (fix)
5744 Mfix * ftmp;
5745 Mfix * fdel;
5746 Mfix * last_added_fix;
5747 Mfix * last_barrier = NULL;
5748 Mfix * this_fix;
5750 /* Skip any further barriers before the next fix. */
5751 while (fix && GET_CODE (fix->insn) == BARRIER)
5752 fix = fix->next;
5754 /* No more fixes. */
5755 if (fix == NULL)
5756 break;
5758 last_added_fix = NULL;
5760 for (ftmp = fix; ftmp; ftmp = ftmp->next)
5762 if (GET_CODE (ftmp->insn) == BARRIER)
5764 if (ftmp->address >= minipool_vector_head->max_address)
5765 break;
5767 last_barrier = ftmp;
5769 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5770 break;
5772 last_added_fix = ftmp; /* Keep track of the last fix added. */
5775 /* If we found a barrier, drop back to that; any fixes that we
5776 could have reached but come after the barrier will now go in
5777 the next mini-pool. */
5778 if (last_barrier != NULL)
5780 /* Reduce the refcount for those fixes that won't go into this
5781 pool after all. */
5782 for (fdel = last_barrier->next;
5783 fdel && fdel != ftmp;
5784 fdel = fdel->next)
5786 fdel->minipool->refcount--;
5787 fdel->minipool = NULL;
5790 ftmp = last_barrier;
5792 else
5794 /* ftmp is first fix that we can't fit into this pool and
5795 there no natural barriers that we could use. Insert a
5796 new barrier in the code somewhere between the previous
5797 fix and this one, and arrange to jump around it. */
5798 HOST_WIDE_INT max_address;
5800 /* The last item on the list of fixes must be a barrier, so
5801 we can never run off the end of the list of fixes without
5802 last_barrier being set. */
5803 if (ftmp == NULL)
5804 abort ();
5806 max_address = minipool_vector_head->max_address;
5807 /* Check that there isn't another fix that is in range that
5808 we couldn't fit into this pool because the pool was
5809 already too large: we need to put the pool before such an
5810 instruction. */
5811 if (ftmp->address < max_address)
5812 max_address = ftmp->address;
5814 last_barrier = create_fix_barrier (last_added_fix, max_address);
5817 assign_minipool_offsets (last_barrier);
5819 while (ftmp)
5821 if (GET_CODE (ftmp->insn) != BARRIER
5822 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5823 == NULL))
5824 break;
5826 ftmp = ftmp->next;
5829 /* Scan over the fixes we have identified for this pool, fixing them
5830 up and adding the constants to the pool itself. */
5831 for (this_fix = fix; this_fix && ftmp != this_fix;
5832 this_fix = this_fix->next)
5833 if (GET_CODE (this_fix->insn) != BARRIER)
5835 rtx addr
5836 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5837 minipool_vector_label),
5838 this_fix->minipool->offset);
5839 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5842 dump_minipool (last_barrier->insn);
5843 fix = ftmp;
5846 /* From now on we must synthesize any constants that we can't handle
5847 directly. This can happen if the RTL gets split during final
5848 instruction generation. */
5849 after_arm_reorg = 1;
5851 /* Free the minipool memory. */
5852 obstack_free (&minipool_obstack, minipool_startobj);
5855 /* Routines to output assembly language. */
5857 /* If the rtx is the correct value then return the string of the number.
5858 In this way we can ensure that valid double constants are generated even
5859 when cross compiling. */
5860 const char *
5861 fp_immediate_constant (x)
5862 rtx x;
5864 REAL_VALUE_TYPE r;
5865 int i;
5867 if (!fpa_consts_inited)
5868 init_fpa_table ();
5870 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5871 for (i = 0; i < 8; i++)
5872 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5873 return strings_fpa[i];
5875 abort ();
5878 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
5879 static const char *
5880 fp_const_from_val (r)
5881 REAL_VALUE_TYPE * r;
5883 int i;
5885 if (!fpa_consts_inited)
5886 init_fpa_table ();
5888 for (i = 0; i < 8; i++)
5889 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5890 return strings_fpa[i];
5892 abort ();
5895 /* Output the operands of a LDM/STM instruction to STREAM.
5896 MASK is the ARM register set mask of which only bits 0-15 are important.
5897 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5898 must follow the register list. */
5900 static void
5901 print_multi_reg (stream, instr, reg, mask, hat)
5902 FILE * stream;
5903 const char * instr;
5904 int reg;
5905 int mask;
5906 int hat;
5908 int i;
5909 int not_first = FALSE;
5911 fputc ('\t', stream);
5912 asm_fprintf (stream, instr, reg);
5913 fputs (", {", stream);
5915 for (i = 0; i <= LAST_ARM_REGNUM; i++)
5916 if (mask & (1 << i))
5918 if (not_first)
5919 fprintf (stream, ", ");
5921 asm_fprintf (stream, "%r", i);
5922 not_first = TRUE;
5925 fprintf (stream, "}%s\n", hat ? "^" : "");
5928 /* Output a 'call' insn. */
5930 const char *
5931 output_call (operands)
5932 rtx * operands;
5934 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
5936 if (REGNO (operands[0]) == LR_REGNUM)
5938 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
5939 output_asm_insn ("mov%?\t%0, %|lr", operands);
5942 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5944 if (TARGET_INTERWORK)
5945 output_asm_insn ("bx%?\t%0", operands);
5946 else
5947 output_asm_insn ("mov%?\t%|pc, %0", operands);
5949 return "";
5952 static int
5953 eliminate_lr2ip (x)
5954 rtx * x;
5956 int something_changed = 0;
5957 rtx x0 = * x;
5958 int code = GET_CODE (x0);
5959 register int i, j;
5960 register const char * fmt;
5962 switch (code)
5964 case REG:
5965 if (REGNO (x0) == LR_REGNUM)
5967 *x = gen_rtx_REG (SImode, IP_REGNUM);
5968 return 1;
5970 return 0;
5971 default:
5972 /* Scan through the sub-elements and change any references there. */
5973 fmt = GET_RTX_FORMAT (code);
5975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5976 if (fmt[i] == 'e')
5977 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5978 else if (fmt[i] == 'E')
5979 for (j = 0; j < XVECLEN (x0, i); j++)
5980 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
5982 return something_changed;
5986 /* Output a 'call' insn that is a reference in memory. */
5988 const char *
5989 output_call_mem (operands)
5990 rtx * operands;
5992 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5993 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
5994 if (eliminate_lr2ip (&operands[0]))
5995 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
5997 if (TARGET_INTERWORK)
5999 output_asm_insn ("ldr%?\t%|ip, %0", operands);
6000 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6001 output_asm_insn ("bx%?\t%|ip", operands);
6003 else
6005 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
6006 output_asm_insn ("ldr%?\t%|pc, %0", operands);
6009 return "";
6013 /* Output a move from arm registers to an fpu registers.
6014 OPERANDS[0] is an fpu register.
6015 OPERANDS[1] is the first registers of an arm register pair. */
6017 const char *
6018 output_mov_long_double_fpu_from_arm (operands)
6019 rtx * operands;
6021 int arm_reg0 = REGNO (operands[1]);
6022 rtx ops[3];
6024 if (arm_reg0 == IP_REGNUM)
6025 abort ();
6027 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6028 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6029 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6031 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
6032 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
6034 return "";
6037 /* Output a move from an fpu register to arm registers.
6038 OPERANDS[0] is the first registers of an arm register pair.
6039 OPERANDS[1] is an fpu register. */
6041 const char *
6042 output_mov_long_double_arm_from_fpu (operands)
6043 rtx * operands;
6045 int arm_reg0 = REGNO (operands[0]);
6046 rtx ops[3];
6048 if (arm_reg0 == IP_REGNUM)
6049 abort ();
6051 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6052 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6053 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
6055 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
6056 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
6057 return "";
6060 /* Output a move from arm registers to arm registers of a long double
6061 OPERANDS[0] is the destination.
6062 OPERANDS[1] is the source. */
6063 const char *
6064 output_mov_long_double_arm_from_arm (operands)
6065 rtx * operands;
6067 /* We have to be careful here because the two might overlap. */
6068 int dest_start = REGNO (operands[0]);
6069 int src_start = REGNO (operands[1]);
6070 rtx ops[2];
6071 int i;
6073 if (dest_start < src_start)
6075 for (i = 0; i < 3; i++)
6077 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6078 ops[1] = gen_rtx_REG (SImode, src_start + i);
6079 output_asm_insn ("mov%?\t%0, %1", ops);
6082 else
6084 for (i = 2; i >= 0; i--)
6086 ops[0] = gen_rtx_REG (SImode, dest_start + i);
6087 ops[1] = gen_rtx_REG (SImode, src_start + i);
6088 output_asm_insn ("mov%?\t%0, %1", ops);
6092 return "";
6096 /* Output a move from arm registers to an fpu registers.
6097 OPERANDS[0] is an fpu register.
6098 OPERANDS[1] is the first registers of an arm register pair. */
6100 const char *
6101 output_mov_double_fpu_from_arm (operands)
6102 rtx * operands;
6104 int arm_reg0 = REGNO (operands[1]);
6105 rtx ops[2];
6107 if (arm_reg0 == IP_REGNUM)
6108 abort ();
6110 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6111 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6112 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
6113 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
6114 return "";
6117 /* Output a move from an fpu register to arm registers.
6118 OPERANDS[0] is the first registers of an arm register pair.
6119 OPERANDS[1] is an fpu register. */
6121 const char *
6122 output_mov_double_arm_from_fpu (operands)
6123 rtx * operands;
6125 int arm_reg0 = REGNO (operands[0]);
6126 rtx ops[2];
6128 if (arm_reg0 == IP_REGNUM)
6129 abort ();
6131 ops[0] = gen_rtx_REG (SImode, arm_reg0);
6132 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
6133 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
6134 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
6135 return "";
6138 /* Output a move between double words.
6139 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
6140 or MEM<-REG and all MEMs must be offsettable addresses. */
6142 const char *
6143 output_move_double (operands)
6144 rtx * operands;
6146 enum rtx_code code0 = GET_CODE (operands[0]);
6147 enum rtx_code code1 = GET_CODE (operands[1]);
6148 rtx otherops[3];
6150 if (code0 == REG)
6152 int reg0 = REGNO (operands[0]);
6154 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
6156 if (code1 == REG)
6158 int reg1 = REGNO (operands[1]);
6159 if (reg1 == IP_REGNUM)
6160 abort ();
6162 /* Ensure the second source is not overwritten. */
6163 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
6164 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
6165 else
6166 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
6168 else if (code1 == CONST_DOUBLE)
6170 if (GET_MODE (operands[1]) == DFmode)
6172 long l[2];
6173 union real_extract u;
6175 memcpy (&u, &CONST_DOUBLE_LOW (operands[1]), sizeof (u));
6176 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
6177 otherops[1] = GEN_INT (l[1]);
6178 operands[1] = GEN_INT (l[0]);
6180 else if (GET_MODE (operands[1]) != VOIDmode)
6181 abort ();
6182 else if (WORDS_BIG_ENDIAN)
6185 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6186 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6188 else
6191 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
6192 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
6195 output_mov_immediate (operands);
6196 output_mov_immediate (otherops);
6198 else if (code1 == CONST_INT)
6200 #if HOST_BITS_PER_WIDE_INT > 32
6201 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
6202 what the upper word is. */
6203 if (WORDS_BIG_ENDIAN)
6205 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6206 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6208 else
6210 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
6211 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
6213 #else
6214 /* Sign extend the intval into the high-order word. */
6215 if (WORDS_BIG_ENDIAN)
6217 otherops[1] = operands[1];
6218 operands[1] = (INTVAL (operands[1]) < 0
6219 ? constm1_rtx : const0_rtx);
6221 else
6222 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
6223 #endif
6224 output_mov_immediate (otherops);
6225 output_mov_immediate (operands);
6227 else if (code1 == MEM)
6229 switch (GET_CODE (XEXP (operands[1], 0)))
6231 case REG:
6232 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
6233 break;
6235 case PRE_INC:
6236 abort (); /* Should never happen now. */
6237 break;
6239 case PRE_DEC:
6240 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
6241 break;
6243 case POST_INC:
6244 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6245 break;
6247 case POST_DEC:
6248 abort (); /* Should never happen now. */
6249 break;
6251 case LABEL_REF:
6252 case CONST:
6253 output_asm_insn ("adr%?\t%0, %1", operands);
6254 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6255 break;
6257 default:
6258 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6259 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6261 otherops[0] = operands[0];
6262 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6263 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6264 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6266 if (GET_CODE (otherops[2]) == CONST_INT)
6268 switch (INTVAL (otherops[2]))
6270 case -8:
6271 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6272 return "";
6273 case -4:
6274 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6275 return "";
6276 case 4:
6277 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6278 return "";
6280 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6281 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6282 else
6283 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6285 else
6286 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6288 else
6289 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6291 return "ldm%?ia\t%0, %M0";
6293 else
6295 otherops[1] = adj_offsettable_operand (operands[1], 4);
6296 /* Take care of overlapping base/data reg. */
6297 if (reg_mentioned_p (operands[0], operands[1]))
6299 output_asm_insn ("ldr%?\t%0, %1", otherops);
6300 output_asm_insn ("ldr%?\t%0, %1", operands);
6302 else
6304 output_asm_insn ("ldr%?\t%0, %1", operands);
6305 output_asm_insn ("ldr%?\t%0, %1", otherops);
6310 else
6311 abort (); /* Constraints should prevent this. */
6313 else if (code0 == MEM && code1 == REG)
6315 if (REGNO (operands[1]) == IP_REGNUM)
6316 abort ();
6318 switch (GET_CODE (XEXP (operands[0], 0)))
6320 case REG:
6321 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6322 break;
6324 case PRE_INC:
6325 abort (); /* Should never happen now. */
6326 break;
6328 case PRE_DEC:
6329 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6330 break;
6332 case POST_INC:
6333 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6334 break;
6336 case POST_DEC:
6337 abort (); /* Should never happen now. */
6338 break;
6340 case PLUS:
6341 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6343 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6345 case -8:
6346 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6347 return "";
6349 case -4:
6350 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6351 return "";
6353 case 4:
6354 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6355 return "";
6358 /* Fall through */
6360 default:
6361 otherops[0] = adj_offsettable_operand (operands[0], 4);
6362 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6363 output_asm_insn ("str%?\t%1, %0", operands);
6364 output_asm_insn ("str%?\t%1, %0", otherops);
6367 else
6368 abort (); /* Constraints should prevent this */
6370 return "";
6374 /* Output an arbitrary MOV reg, #n.
6375 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6377 const char *
6378 output_mov_immediate (operands)
6379 rtx * operands;
6381 HOST_WIDE_INT n = INTVAL (operands[1]);
6382 int n_ones = 0;
6383 int i;
6385 /* Try to use one MOV */
6386 if (const_ok_for_arm (n))
6388 output_asm_insn ("mov%?\t%0, %1", operands);
6389 return "";
6392 /* Try to use one MVN */
6393 if (const_ok_for_arm (~n))
6395 operands[1] = GEN_INT (~n);
6396 output_asm_insn ("mvn%?\t%0, %1", operands);
6397 return "";
6400 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6402 for (i=0; i < 32; i++)
6403 if (n & 1 << i)
6404 n_ones++;
6406 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6407 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
6408 else
6409 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6411 return "";
6415 /* Output an ADD r, s, #n where n may be too big for one instruction. If
6416 adding zero to one register, output nothing. */
6418 const char *
6419 output_add_immediate (operands)
6420 rtx * operands;
6422 HOST_WIDE_INT n = INTVAL (operands[2]);
6424 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6426 if (n < 0)
6427 output_multi_immediate (operands,
6428 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6429 -n);
6430 else
6431 output_multi_immediate (operands,
6432 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6436 return "";
6439 /* Output a multiple immediate operation.
6440 OPERANDS is the vector of operands referred to in the output patterns.
6441 INSTR1 is the output pattern to use for the first constant.
6442 INSTR2 is the output pattern to use for subsequent constants.
6443 IMMED_OP is the index of the constant slot in OPERANDS.
6444 N is the constant value. */
6446 static const char *
6447 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6448 rtx * operands;
6449 const char * instr1;
6450 const char * instr2;
6451 int immed_op;
6452 HOST_WIDE_INT n;
6454 #if HOST_BITS_PER_WIDE_INT > 32
6455 n &= HOST_UINT (0xffffffff);
6456 #endif
6458 if (n == 0)
6460 operands[immed_op] = const0_rtx;
6461 output_asm_insn (instr1, operands); /* Quick and easy output. */
6463 else
6465 int i;
6466 const char * instr = instr1;
6468 /* Note that n is never zero here (which would give no output). */
6469 for (i = 0; i < 32; i += 2)
6471 if (n & (3 << i))
6473 operands[immed_op] = GEN_INT (n & (255 << i));
6474 output_asm_insn (instr, operands);
6475 instr = instr2;
6476 i += 6;
6481 return "";
6485 /* Return the appropriate ARM instruction for the operation code.
6486 The returned result should not be overwritten. OP is the rtx of the
6487 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6488 was shifted. */
6490 const char *
6491 arithmetic_instr (op, shift_first_arg)
6492 rtx op;
6493 int shift_first_arg;
6495 switch (GET_CODE (op))
6497 case PLUS:
6498 return "add";
6500 case MINUS:
6501 return shift_first_arg ? "rsb" : "sub";
6503 case IOR:
6504 return "orr";
6506 case XOR:
6507 return "eor";
6509 case AND:
6510 return "and";
6512 default:
6513 abort ();
6518 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6519 for the operation code. The returned result should not be overwritten.
6520 OP is the rtx code of the shift.
6521 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6522 shift. */
6524 static const char *
6525 shift_op (op, amountp)
6526 rtx op;
6527 HOST_WIDE_INT *amountp;
6529 const char * mnem;
6530 enum rtx_code code = GET_CODE (op);
6532 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6533 *amountp = -1;
6534 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6535 *amountp = INTVAL (XEXP (op, 1));
6536 else
6537 abort ();
6539 switch (code)
6541 case ASHIFT:
6542 mnem = "asl";
6543 break;
6545 case ASHIFTRT:
6546 mnem = "asr";
6547 break;
6549 case LSHIFTRT:
6550 mnem = "lsr";
6551 break;
6553 case ROTATERT:
6554 mnem = "ror";
6555 break;
6557 case MULT:
6558 /* We never have to worry about the amount being other than a
6559 power of 2, since this case can never be reloaded from a reg. */
6560 if (*amountp != -1)
6561 *amountp = int_log2 (*amountp);
6562 else
6563 abort ();
6564 return "asl";
6566 default:
6567 abort ();
6570 if (*amountp != -1)
6572 /* This is not 100% correct, but follows from the desire to merge
6573 multiplication by a power of 2 with the recognizer for a
6574 shift. >=32 is not a valid shift for "asl", so we must try and
6575 output a shift that produces the correct arithmetical result.
6576 Using lsr #32 is identical except for the fact that the carry bit
6577 is not set correctly if we set the flags; but we never use the
6578 carry bit from such an operation, so we can ignore that. */
6579 if (code == ROTATERT)
6580 *amountp &= 31; /* Rotate is just modulo 32 */
6581 else if (*amountp != (*amountp & 31))
6583 if (code == ASHIFT)
6584 mnem = "lsr";
6585 *amountp = 32;
6588 /* Shifts of 0 are no-ops. */
6589 if (*amountp == 0)
6590 return NULL;
6593 return mnem;
6597 /* Obtain the shift from the POWER of two. */
6598 static HOST_WIDE_INT
6599 int_log2 (power)
6600 HOST_WIDE_INT power;
6602 HOST_WIDE_INT shift = 0;
6604 while ((((HOST_INT (1)) << shift) & power) == 0)
6606 if (shift > 31)
6607 abort ();
6608 shift++;
6611 return shift;
6614 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6615 /bin/as is horribly restrictive. */
6616 #define MAX_ASCII_LEN 51
6618 void
6619 output_ascii_pseudo_op (stream, p, len)
6620 FILE * stream;
6621 const unsigned char * p;
6622 int len;
6624 int i;
6625 int len_so_far = 0;
6627 fputs ("\t.ascii\t\"", stream);
6629 for (i = 0; i < len; i++)
6631 register int c = p[i];
6633 if (len_so_far >= MAX_ASCII_LEN)
6635 fputs ("\"\n\t.ascii\t\"", stream);
6636 len_so_far = 0;
6639 switch (c)
6641 case TARGET_TAB:
6642 fputs ("\\t", stream);
6643 len_so_far += 2;
6644 break;
6646 case TARGET_FF:
6647 fputs ("\\f", stream);
6648 len_so_far += 2;
6649 break;
6651 case TARGET_BS:
6652 fputs ("\\b", stream);
6653 len_so_far += 2;
6654 break;
6656 case TARGET_CR:
6657 fputs ("\\r", stream);
6658 len_so_far += 2;
6659 break;
6661 case TARGET_NEWLINE:
6662 fputs ("\\n", stream);
6663 c = p [i + 1];
6664 if ((c >= ' ' && c <= '~')
6665 || c == TARGET_TAB)
6666 /* This is a good place for a line break. */
6667 len_so_far = MAX_ASCII_LEN;
6668 else
6669 len_so_far += 2;
6670 break;
6672 case '\"':
6673 case '\\':
6674 putc ('\\', stream);
6675 len_so_far++;
6676 /* drop through. */
6678 default:
6679 if (c >= ' ' && c <= '~')
6681 putc (c, stream);
6682 len_so_far++;
6684 else
6686 fprintf (stream, "\\%03o", c);
6687 len_so_far += 4;
6689 break;
6693 fputs ("\"\n", stream);
6697 const char *
6698 output_return_instruction (operand, really_return, reverse)
6699 rtx operand;
6700 int really_return;
6701 int reverse;
6703 char instr[100];
6704 int reg, live_regs = 0;
6705 int volatile_func = arm_volatile_func ();
6707 /* If a function is naked, don't use the "return" insn. */
6708 if (arm_naked_function_p (current_function_decl))
6709 return "";
6711 return_used_this_function = 1;
6713 if (TARGET_ABORT_NORETURN && volatile_func)
6715 /* If this function was declared non-returning, and we have found a tail
6716 call, then we have to trust that the called function won't return. */
6717 if (really_return)
6719 rtx ops[2];
6721 /* Otherwise, trap an attempted return by aborting. */
6722 ops[0] = operand;
6723 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6724 : "abort");
6725 assemble_external_libcall (ops[1]);
6726 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6729 return "";
6732 if (current_function_calls_alloca && !really_return)
6733 abort ();
6735 for (reg = 0; reg <= 10; reg++)
6736 if (regs_ever_live[reg] && !call_used_regs[reg])
6737 live_regs++;
6739 if (!TARGET_APCS_FRAME
6740 && !frame_pointer_needed
6741 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6742 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
6743 live_regs++;
6745 if (flag_pic && !TARGET_SINGLE_PIC_BASE
6746 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6747 live_regs++;
6749 if (live_regs || regs_ever_live[LR_REGNUM])
6750 live_regs++;
6752 if (frame_pointer_needed)
6753 live_regs += 4;
6755 /* On some ARM architectures it is faster to use LDR rather than LDM to
6756 load a single register. On other architectures, the cost is the same. */
6757 if (live_regs == 1
6758 && regs_ever_live[LR_REGNUM]
6759 && !really_return)
6760 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6761 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6762 else if (live_regs == 1
6763 && regs_ever_live[LR_REGNUM]
6764 && TARGET_APCS_32)
6765 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6766 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
6767 else if (live_regs)
6769 if (!regs_ever_live[LR_REGNUM])
6770 live_regs++;
6772 if (frame_pointer_needed)
6773 strcpy (instr,
6774 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
6775 else
6776 strcpy (instr,
6777 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
6779 for (reg = 0; reg <= 10; reg++)
6780 if (regs_ever_live[reg]
6781 && (!call_used_regs[reg]
6782 || (flag_pic && !TARGET_SINGLE_PIC_BASE
6783 && reg == PIC_OFFSET_TABLE_REGNUM)))
6785 strcat (instr, "%|");
6786 strcat (instr, reg_names[reg]);
6787 if (--live_regs)
6788 strcat (instr, ", ");
6791 if (frame_pointer_needed)
6793 strcat (instr, "%|");
6794 strcat (instr, reg_names[11]);
6795 strcat (instr, ", ");
6796 strcat (instr, "%|");
6797 strcat (instr, reg_names[13]);
6798 strcat (instr, ", ");
6799 strcat (instr, "%|");
6800 strcat (instr, TARGET_INTERWORK || (!really_return)
6801 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
6803 else
6805 if (!TARGET_APCS_FRAME
6806 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6807 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
6809 strcat (instr, "%|");
6810 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6811 strcat (instr, ", ");
6814 strcat (instr, "%|");
6816 if (TARGET_INTERWORK && really_return)
6817 strcat (instr, reg_names[IP_REGNUM]);
6818 else
6819 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
6822 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
6823 output_asm_insn (instr, &operand);
6825 if (TARGET_INTERWORK && really_return)
6827 strcpy (instr, "bx%?");
6828 strcat (instr, reverse ? "%D0" : "%d0");
6829 strcat (instr, "\t%|");
6830 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6832 output_asm_insn (instr, &operand);
6835 else if (really_return)
6837 if (TARGET_INTERWORK)
6838 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
6839 else
6840 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6841 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
6843 output_asm_insn (instr, &operand);
6846 return "";
6849 /* Return nonzero if optimizing and the current function is volatile.
6850 Such functions never return, and many memory cycles can be saved
6851 by not storing register values that will never be needed again.
6852 This optimization was added to speed up context switching in a
6853 kernel application. */
6855 arm_volatile_func ()
6857 return (optimize > 0
6858 && current_function_nothrow
6859 && TREE_THIS_VOLATILE (current_function_decl));
6862 /* Write the function name into the code section, directly preceding
6863 the function prologue.
6865 Code will be output similar to this:
6867 .ascii "arm_poke_function_name", 0
6868 .align
6870 .word 0xff000000 + (t1 - t0)
6871 arm_poke_function_name
6872 mov ip, sp
6873 stmfd sp!, {fp, ip, lr, pc}
6874 sub fp, ip, #4
6876 When performing a stack backtrace, code can inspect the value
6877 of 'pc' stored at 'fp' + 0. If the trace function then looks
6878 at location pc - 12 and the top 8 bits are set, then we know
6879 that there is a function name embedded immediately preceding this
6880 location and has length ((pc[-3]) & 0xff000000).
6882 We assume that pc is declared as a pointer to an unsigned long.
6884 It is of no benefit to output the function name if we are assembling
6885 a leaf function. These function types will not contain a stack
6886 backtrace structure, therefore it is not possible to determine the
6887 function name. */
6889 void
6890 arm_poke_function_name (stream, name)
6891 FILE * stream;
6892 char * name;
6894 unsigned long alignlength;
6895 unsigned long length;
6896 rtx x;
6898 length = strlen (name) + 1;
6899 alignlength = ROUND_UP (length);
6901 ASM_OUTPUT_ASCII (stream, name, length);
6902 ASM_OUTPUT_ALIGN (stream, 2);
6903 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
6904 ASM_OUTPUT_INT (stream, x);
6907 /* The amount of stack adjustment that happens here, in output_return and in
6908 output_epilogue must be exactly the same as was calculated during reload,
6909 or things will point to the wrong place. The only time we can safely
6910 ignore this constraint is when a function has no arguments on the stack,
6911 no stack frame requirement and no live registers execpt for `lr'. If we
6912 can guarantee that by making all function calls into tail calls and that
6913 lr is not clobbered in any other way, then there is no need to push lr
6914 onto the stack. */
6915 void
6916 output_arm_prologue (f, frame_size)
6917 FILE * f;
6918 int frame_size;
6920 int reg, live_regs_mask = 0;
6921 int volatile_func = arm_volatile_func ();
6923 /* Nonzero if we must stuff some register arguments onto the stack as if
6924 they were passed there. */
6925 int store_arg_regs = 0;
6927 if (arm_ccfsm_state || arm_target_insn)
6928 abort (); /* Sanity check. */
6930 if (arm_naked_function_p (current_function_decl))
6931 return;
6933 return_used_this_function = 0;
6935 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6936 current_function_args_size,
6937 current_function_pretend_args_size, frame_size);
6938 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6939 frame_pointer_needed,
6940 current_function_anonymous_args);
6942 if (volatile_func)
6943 asm_fprintf (f, "\t%@ Volatile function.\n");
6945 if (current_function_needs_context)
6946 asm_fprintf (f, "\t%@ Nested function.\n");
6948 if (current_function_anonymous_args && current_function_pretend_args_size)
6949 store_arg_regs = 1;
6951 for (reg = 0; reg <= 10; reg++)
6952 if (regs_ever_live[reg] && !call_used_regs[reg])
6953 live_regs_mask |= (1 << reg);
6955 if (!TARGET_APCS_FRAME
6956 && !frame_pointer_needed
6957 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6958 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
6959 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6961 if (flag_pic && !TARGET_SINGLE_PIC_BASE
6962 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6963 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6965 if (frame_pointer_needed)
6966 live_regs_mask |= 0xD800;
6967 else if (regs_ever_live[LR_REGNUM])
6969 live_regs_mask |= 1 << LR_REGNUM;
6972 if (live_regs_mask)
6973 /* If a di mode load/store multiple is used, and the base register
6974 is r3, then r4 can become an ever live register without lr
6975 doing so, in this case we need to push lr as well, or we
6976 will fail to get a proper return. */
6977 live_regs_mask |= 1 << LR_REGNUM;
6979 #ifdef AOF_ASSEMBLER
6980 if (flag_pic)
6981 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
6982 #endif
6985 const char *
6986 arm_output_epilogue (really_return)
6987 int really_return;
6989 int reg;
6990 int live_regs_mask = 0;
6991 /* If we need this, then it will always be at least this much. */
6992 int floats_offset = 12;
6993 rtx operands[3];
6994 int frame_size = get_frame_size ();
6995 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6996 FILE * f = asm_out_file;
6997 int volatile_func = arm_volatile_func ();
6998 int return_regnum;
7000 if (use_return_insn (FALSE) && return_used_this_function)
7001 return "";
7003 /* Naked functions don't have epilogues. */
7004 if (arm_naked_function_p (current_function_decl))
7005 return "";
7007 /* If we are throwing an exception, the address we want to jump to is in
7008 R1; otherwise, it's in LR. */
7009 return_regnum = eh_ofs ? 2 : LR_REGNUM;
7011 /* If we are throwing an exception, then we really must be doing a return,
7012 so we can't tail-call. */
7013 if (eh_ofs && !really_return)
7014 abort();
7016 /* A volatile function should never return. Call abort. */
7017 if (TARGET_ABORT_NORETURN && volatile_func)
7019 rtx op;
7020 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
7021 assemble_external_libcall (op);
7022 output_asm_insn ("bl\t%a0", &op);
7023 return "";
7026 for (reg = 0; reg <= 10; reg++)
7027 if (regs_ever_live[reg] && !call_used_regs[reg])
7029 live_regs_mask |= (1 << reg);
7030 floats_offset += 4;
7033 /* Handle the frame pointer as a special case. */
7034 if (!TARGET_APCS_FRAME
7035 && !frame_pointer_needed
7036 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7037 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
7039 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
7040 floats_offset += 4;
7043 /* If we aren't loading the PIC register, don't stack it even though it may
7044 be live. */
7045 if (flag_pic && !TARGET_SINGLE_PIC_BASE
7046 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7048 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
7049 floats_offset += 4;
7052 if (frame_pointer_needed)
7054 if (arm_fpu_arch == FP_SOFT2)
7056 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7057 if (regs_ever_live[reg] && !call_used_regs[reg])
7059 floats_offset += 12;
7060 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
7061 reg, FP_REGNUM, floats_offset);
7064 else
7066 int start_reg = LAST_ARM_FP_REGNUM;
7068 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
7070 if (regs_ever_live[reg] && !call_used_regs[reg])
7072 floats_offset += 12;
7074 /* We can't unstack more than four registers at once. */
7075 if (start_reg - reg == 3)
7077 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
7078 reg, FP_REGNUM, floats_offset);
7079 start_reg = reg - 1;
7082 else
7084 if (reg != start_reg)
7085 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7086 reg + 1, start_reg - reg,
7087 FP_REGNUM, floats_offset);
7088 start_reg = reg - 1;
7092 /* Just in case the last register checked also needs unstacking. */
7093 if (reg != start_reg)
7094 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
7095 reg + 1, start_reg - reg,
7096 FP_REGNUM, floats_offset);
7099 if (TARGET_INTERWORK)
7101 live_regs_mask |= 0x6800;
7102 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
7103 if (eh_ofs)
7104 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7105 REGNO (eh_ofs));
7106 if (really_return)
7107 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7109 else if (eh_ofs || !really_return)
7111 live_regs_mask |= 0x6800;
7112 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
7113 if (eh_ofs)
7115 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7116 REGNO (eh_ofs));
7117 /* Even in 26-bit mode we do a mov (rather than a movs)
7118 because we don't have the PSR bits set in the
7119 address. */
7120 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7123 else
7125 live_regs_mask |= 0xA800;
7126 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
7127 TARGET_APCS_32 ? FALSE : TRUE);
7130 else
7132 /* Restore stack pointer if necessary. */
7133 if (frame_size + current_function_outgoing_args_size != 0)
7135 operands[0] = operands[1] = stack_pointer_rtx;
7136 operands[2] = GEN_INT (frame_size
7137 + current_function_outgoing_args_size);
7138 output_add_immediate (operands);
7141 if (arm_fpu_arch == FP_SOFT2)
7143 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7144 if (regs_ever_live[reg] && !call_used_regs[reg])
7145 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
7146 reg, SP_REGNUM);
7148 else
7150 int start_reg = FIRST_ARM_FP_REGNUM;
7152 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
7154 if (regs_ever_live[reg] && !call_used_regs[reg])
7156 if (reg - start_reg == 3)
7158 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
7159 start_reg, SP_REGNUM);
7160 start_reg = reg + 1;
7163 else
7165 if (reg != start_reg)
7166 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7167 start_reg, reg - start_reg,
7168 SP_REGNUM);
7170 start_reg = reg + 1;
7174 /* Just in case the last register checked also needs unstacking. */
7175 if (reg != start_reg)
7176 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
7177 start_reg, reg - start_reg, SP_REGNUM);
7180 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
7182 if (TARGET_INTERWORK)
7184 live_regs_mask |= 1 << LR_REGNUM;
7186 /* Handle LR on its own. */
7187 if (live_regs_mask == (1 << LR_REGNUM))
7189 if (eh_ofs)
7190 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7191 SP_REGNUM);
7192 else
7193 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7194 SP_REGNUM);
7196 else if (live_regs_mask != 0)
7197 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7198 FALSE);
7200 if (eh_ofs)
7201 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7202 REGNO (eh_ofs));
7204 if (really_return)
7205 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7207 else if (eh_ofs)
7209 if (live_regs_mask == 0)
7210 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
7211 else
7212 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
7213 live_regs_mask | (1 << LR_REGNUM), FALSE);
7215 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7216 REGNO (eh_ofs));
7217 /* Jump to the target; even in 26-bit mode. */
7218 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7220 else if (TARGET_APCS_32 && live_regs_mask == 0 && !really_return)
7221 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
7222 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
7223 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
7224 else if (!really_return)
7225 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7226 live_regs_mask | (1 << LR_REGNUM), FALSE);
7227 else
7228 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
7229 live_regs_mask | (1 << PC_REGNUM),
7230 TARGET_APCS_32 ? FALSE : TRUE);
7232 else
7234 if (live_regs_mask || regs_ever_live[LR_REGNUM])
7236 /* Restore the integer regs, and the return address into lr. */
7237 live_regs_mask |= 1 << LR_REGNUM;
7239 if (live_regs_mask == (1 << LR_REGNUM))
7241 if (eh_ofs)
7242 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
7243 SP_REGNUM);
7244 else
7245 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
7246 SP_REGNUM);
7248 else if (live_regs_mask != 0)
7249 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7250 FALSE);
7253 if (current_function_pretend_args_size)
7255 /* Unwind the pre-pushed regs. */
7256 operands[0] = operands[1] = stack_pointer_rtx;
7257 operands[2] = GEN_INT (current_function_pretend_args_size);
7258 output_add_immediate (operands);
7261 if (eh_ofs)
7262 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7263 REGNO (eh_ofs));
7265 if (really_return)
7267 /* And finally, go home. */
7268 if (TARGET_INTERWORK)
7269 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7270 else if (TARGET_APCS_32 || eh_ofs)
7271 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7272 else
7273 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7278 return "";
7281 void
7282 output_func_epilogue (frame_size)
7283 int frame_size;
7285 if (TARGET_THUMB)
7287 /* ??? Probably not safe to set this here, since it assumes that a
7288 function will be emitted as assembly immediately after we generate
7289 RTL for it. This does not happen for inline functions. */
7290 return_used_this_function = 0;
7292 else
7294 if (use_return_insn (FALSE)
7295 && return_used_this_function
7296 && (frame_size + current_function_outgoing_args_size) != 0
7297 && !frame_pointer_needed)
7298 abort ();
7300 /* Reset the ARM-specific per-function variables. */
7301 current_function_anonymous_args = 0;
7302 after_arm_reorg = 0;
7306 /* Generate and emit an insn that we will recognize as a push_multi.
7307 Unfortunately, since this insn does not reflect very well the actual
7308 semantics of the operation, we need to annotate the insn for the benefit
7309 of DWARF2 frame unwind information. */
7310 static rtx
7311 emit_multi_reg_push (mask)
7312 int mask;
7314 int num_regs = 0;
7315 int i, j;
7316 rtx par;
7317 rtx dwarf;
7318 int dwarf_par_index;
7319 rtx tmp, reg;
7321 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7322 if (mask & (1 << i))
7323 num_regs++;
7325 if (num_regs == 0 || num_regs > 16)
7326 abort ();
7328 /* For the body of the insn we are going to generate an UNSPEC in
7329 parallel with several USEs. This allows the insn to be recognised
7330 by the push_multi pattern in the arm.md file. The insn looks
7331 something like this:
7333 (parallel [
7334 (set (mem:BLK (pre_dec:BLK (reg:SI sp))) (unspec:BLK [(reg:SI r4)] 2))
7335 (use (reg:SI 11 fp))
7336 (use (reg:SI 12 ip))
7337 (use (reg:SI 14 lr))
7338 (use (reg:SI 15 pc))
7341 For the frame note however, we try to be more explicit and actually
7342 show each register being stored into the stack frame, plus a (single)
7343 decrement of the stack pointer. We do it this way in order to be
7344 friendly to the stack unwinding code, which only wants to see a single
7345 stack decrement per instruction. The RTL we generate for the note looks
7346 something like this:
7348 (sequence [
7349 (set (reg:SI sp) (plus:SI (reg:SI sp) (const_int -20)))
7350 (set (mem:SI (reg:SI sp)) (reg:SI r4))
7351 (set (mem:SI (plus:SI (reg:SI sp) (const_int 4))) (reg:SI fp))
7352 (set (mem:SI (plus:SI (reg:SI sp) (const_int 8))) (reg:SI ip))
7353 (set (mem:SI (plus:SI (reg:SI sp) (const_int 12))) (reg:SI lr))
7354 (set (mem:SI (plus:SI (reg:SI sp) (const_int 16))) (reg:SI pc))
7357 This sequence is used both by the code to support stack unwinding for
7358 exceptions handlers and the code to generate dwarf2 frame debugging. */
7360 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7361 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (num_regs + 1));
7362 RTX_FRAME_RELATED_P (dwarf) = 1;
7363 dwarf_par_index = 1;
7365 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7367 if (mask & (1 << i))
7369 reg = gen_rtx_REG (SImode, i);
7371 XVECEXP (par, 0, 0)
7372 = gen_rtx_SET (VOIDmode,
7373 gen_rtx_MEM (BLKmode,
7374 gen_rtx_PRE_DEC (BLKmode,
7375 stack_pointer_rtx)),
7376 gen_rtx_UNSPEC (BLKmode,
7377 gen_rtvec (1, reg),
7378 2));
7380 tmp = gen_rtx_SET (VOIDmode,
7381 gen_rtx_MEM (SImode, stack_pointer_rtx),
7382 reg);
7383 RTX_FRAME_RELATED_P (tmp) = 1;
7384 XVECEXP (dwarf, 0, dwarf_par_index) = tmp;
7385 dwarf_par_index ++;
7387 break;
7391 for (j = 1, i++; j < num_regs; i++)
7393 if (mask & (1 << i))
7395 reg = gen_rtx_REG (SImode, i);
7397 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7399 tmp = gen_rtx_SET (VOIDmode,
7400 gen_rtx_MEM (SImode,
7401 gen_rtx_PLUS (SImode,
7402 stack_pointer_rtx,
7403 GEN_INT (4 * j))),
7404 reg);
7405 RTX_FRAME_RELATED_P (tmp) = 1;
7406 XVECEXP (dwarf, 0, dwarf_par_index ++) = tmp;
7408 j++;
7412 par = emit_insn (par);
7414 tmp = gen_rtx_SET (SImode,
7415 stack_pointer_rtx,
7416 gen_rtx_PLUS (SImode,
7417 stack_pointer_rtx,
7418 GEN_INT (-4 * num_regs)));
7419 RTX_FRAME_RELATED_P (tmp) = 1;
7420 XVECEXP (dwarf, 0, 0) = tmp;
7422 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7423 REG_NOTES (par));
7424 return par;
7427 static rtx
7428 emit_sfm (base_reg, count)
7429 int base_reg;
7430 int count;
7432 rtx par;
7433 rtx dwarf;
7434 rtx tmp, reg;
7435 int i;
7437 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7438 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7439 RTX_FRAME_RELATED_P (dwarf) = 1;
7441 reg = gen_rtx_REG (XFmode, base_reg++);
7443 XVECEXP (par, 0, 0)
7444 = gen_rtx_SET (VOIDmode,
7445 gen_rtx_MEM (BLKmode,
7446 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7447 gen_rtx_UNSPEC (BLKmode,
7448 gen_rtvec (1, reg),
7449 2));
7451 = gen_rtx_SET (VOIDmode,
7452 gen_rtx_MEM (XFmode,
7453 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7454 reg);
7455 RTX_FRAME_RELATED_P (tmp) = 1;
7456 XVECEXP (dwarf, 0, count - 1) = tmp;
7458 for (i = 1; i < count; i++)
7460 reg = gen_rtx_REG (XFmode, base_reg++);
7461 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7463 tmp = gen_rtx_SET (VOIDmode,
7464 gen_rtx_MEM (XFmode,
7465 gen_rtx_PRE_DEC (BLKmode,
7466 stack_pointer_rtx)),
7467 reg);
7468 RTX_FRAME_RELATED_P (tmp) = 1;
7469 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7472 par = emit_insn (par);
7473 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7474 REG_NOTES (par));
7475 return par;
7478 void
7479 arm_expand_prologue ()
7481 int reg;
7482 rtx amount = GEN_INT (-(get_frame_size ()
7483 + current_function_outgoing_args_size));
7484 int live_regs_mask = 0;
7485 int store_arg_regs = 0;
7486 /* If this function doesn't return, then there is no need to push
7487 the call-saved regs. */
7488 int volatile_func = arm_volatile_func ();
7489 rtx insn;
7490 rtx ip_rtx;
7491 int fp_offset = 0;
7494 /* Naked functions don't have prologues. */
7495 if (arm_naked_function_p (current_function_decl))
7496 return;
7498 if (current_function_anonymous_args && current_function_pretend_args_size)
7499 store_arg_regs = 1;
7501 if (!volatile_func)
7503 for (reg = 0; reg <= 10; reg++)
7504 if (regs_ever_live[reg] && !call_used_regs[reg])
7505 live_regs_mask |= 1 << reg;
7507 if (!TARGET_APCS_FRAME
7508 && !frame_pointer_needed
7509 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7510 && !call_used_regs[HARD_FRAME_POINTER_REGNUM])
7511 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7513 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7514 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7516 if (regs_ever_live[LR_REGNUM])
7517 live_regs_mask |= 1 << LR_REGNUM;
7520 ip_rtx = gen_rtx_REG (SImode, IP_REGNUM);
7522 if (frame_pointer_needed)
7524 if (current_function_needs_context)
7526 /* The Static chain register is the same as the IP register
7527 used as a scratch register during stack frame creation.
7528 To get around this need to find somewhere to store IP
7529 whilst the frame is being created. We try the following
7530 places in order:
7532 1. An unused argument register.
7533 2. A slot on the stack above the frame. (This only
7534 works if the function is not a varargs function).
7536 If neither of these places is available, we abort (for now). */
7537 if (regs_ever_live[3] == 0)
7539 insn = gen_rtx_REG (SImode, 3);
7540 insn = gen_rtx_SET (SImode, insn, ip_rtx);
7541 insn = emit_insn (insn);
7542 RTX_FRAME_RELATED_P (insn) = 1;
7544 else if (current_function_pretend_args_size == 0)
7546 insn = gen_rtx_PRE_DEC (SImode, stack_pointer_rtx);
7547 insn = gen_rtx_MEM (SImode, insn);
7548 insn = gen_rtx_SET (VOIDmode, insn, ip_rtx);
7549 insn = emit_insn (insn);
7550 RTX_FRAME_RELATED_P (insn) = 1;
7551 fp_offset = 4;
7553 else
7554 /* FIXME - the way to handle this situation is to allow
7555 the pretend args to be dumped onto the stack, then
7556 reuse r3 to save IP. This would involve moving the
7557 copying os SP into IP until after the pretend args
7558 have been dumped, but this is not too hard. */
7559 error ("Unable to find a temporary location for static chanin register");
7562 live_regs_mask |= 0xD800;
7564 if (fp_offset)
7566 insn = gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (fp_offset));
7567 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7569 else
7570 insn = gen_movsi (ip_rtx, stack_pointer_rtx);
7572 insn = emit_insn (insn);
7573 RTX_FRAME_RELATED_P (insn) = 1;
7576 if (current_function_pretend_args_size)
7578 if (store_arg_regs)
7579 insn = emit_multi_reg_push
7580 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
7581 else
7582 insn = emit_insn
7583 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7584 GEN_INT (-current_function_pretend_args_size)));
7585 RTX_FRAME_RELATED_P (insn) = 1;
7588 if (live_regs_mask)
7590 /* If we have to push any regs, then we must push lr as well, or
7591 we won't get a proper return. */
7592 live_regs_mask |= 1 << LR_REGNUM;
7593 insn = emit_multi_reg_push (live_regs_mask);
7594 RTX_FRAME_RELATED_P (insn) = 1;
7597 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7599 if (!volatile_func)
7601 if (arm_fpu_arch == FP_SOFT2)
7603 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7604 if (regs_ever_live[reg] && !call_used_regs[reg])
7606 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7607 insn = gen_rtx_MEM (XFmode, insn);
7608 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7609 gen_rtx_REG (XFmode, reg)));
7610 RTX_FRAME_RELATED_P (insn) = 1;
7613 else
7615 int start_reg = LAST_ARM_FP_REGNUM;
7617 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7619 if (regs_ever_live[reg] && !call_used_regs[reg])
7621 if (start_reg - reg == 3)
7623 insn = emit_sfm (reg, 4);
7624 RTX_FRAME_RELATED_P (insn) = 1;
7625 start_reg = reg - 1;
7628 else
7630 if (start_reg != reg)
7632 insn = emit_sfm (reg + 1, start_reg - reg);
7633 RTX_FRAME_RELATED_P (insn) = 1;
7635 start_reg = reg - 1;
7639 if (start_reg != reg)
7641 insn = emit_sfm (reg + 1, start_reg - reg);
7642 RTX_FRAME_RELATED_P (insn) = 1;
7647 if (frame_pointer_needed)
7649 insn = GEN_INT (-(4 + current_function_pretend_args_size + fp_offset));
7650 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx, ip_rtx, insn));
7651 RTX_FRAME_RELATED_P (insn) = 1;
7653 if (current_function_needs_context)
7655 /* Recover the static chain register. */
7656 if (regs_ever_live [3] == 0)
7658 insn = gen_rtx_REG (SImode, 3);
7659 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7660 insn = emit_insn (insn);
7661 RTX_FRAME_RELATED_P (insn) = 1;
7663 else /* if (current_function_pretend_args_size == 0) */
7665 insn = gen_rtx_PLUS (SImode, hard_frame_pointer_rtx, GEN_INT (4));
7666 insn = gen_rtx_MEM (SImode, insn);
7667 insn = gen_rtx_SET (SImode, ip_rtx, insn);
7668 insn = emit_insn (insn);
7669 RTX_FRAME_RELATED_P (insn) = 1;
7674 if (amount != const0_rtx)
7676 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7677 amount));
7678 RTX_FRAME_RELATED_P (insn) = 1;
7680 /* If the frame pointer is needed, emit a special barrier that
7681 will prevent the scheduler from moving stores to the frame
7682 before the stack adjustment. */
7683 if (frame_pointer_needed)
7685 rtx unspec = gen_rtx_UNSPEC (SImode,
7686 gen_rtvec (2, stack_pointer_rtx,
7687 hard_frame_pointer_rtx), 4);
7689 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7690 gen_rtx_MEM (BLKmode, unspec)));
7694 /* If we are profiling, make sure no instructions are scheduled before
7695 the call to mcount. Similarly if the user has requested no
7696 scheduling in the prolog. */
7697 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
7698 emit_insn (gen_blockage ());
7701 /* If CODE is 'd', then the X is a condition operand and the instruction
7702 should only be executed if the condition is true.
7703 if CODE is 'D', then the X is a condition operand and the instruction
7704 should only be executed if the condition is false: however, if the mode
7705 of the comparison is CCFPEmode, then always execute the instruction -- we
7706 do this because in these circumstances !GE does not necessarily imply LT;
7707 in these cases the instruction pattern will take care to make sure that
7708 an instruction containing %d will follow, thereby undoing the effects of
7709 doing this instruction unconditionally.
7710 If CODE is 'N' then X is a floating point operand that must be negated
7711 before output.
7712 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7713 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7715 void
7716 arm_print_operand (stream, x, code)
7717 FILE * stream;
7718 rtx x;
7719 int code;
7721 switch (code)
7723 case '@':
7724 fputs (ASM_COMMENT_START, stream);
7725 return;
7727 case '_':
7728 fputs (user_label_prefix, stream);
7729 return;
7731 case '|':
7732 fputs (REGISTER_PREFIX, stream);
7733 return;
7735 case '?':
7736 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
7738 if (TARGET_THUMB || current_insn_predicate != NULL)
7739 abort ();
7741 fputs (arm_condition_codes[arm_current_cc], stream);
7743 else if (current_insn_predicate)
7745 enum arm_cond_code code;
7747 if (TARGET_THUMB)
7748 abort ();
7750 code = get_arm_condition_code (current_insn_predicate);
7751 fputs (arm_condition_codes[code], stream);
7753 return;
7755 case 'N':
7757 REAL_VALUE_TYPE r;
7758 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7759 r = REAL_VALUE_NEGATE (r);
7760 fprintf (stream, "%s", fp_const_from_val (&r));
7762 return;
7764 case 'B':
7765 if (GET_CODE (x) == CONST_INT)
7767 HOST_WIDE_INT val;
7768 val = ARM_SIGN_EXTEND (~INTVAL (x));
7769 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7771 else
7773 putc ('~', stream);
7774 output_addr_const (stream, x);
7776 return;
7778 case 'i':
7779 fprintf (stream, "%s", arithmetic_instr (x, 1));
7780 return;
7782 case 'I':
7783 fprintf (stream, "%s", arithmetic_instr (x, 0));
7784 return;
7786 case 'S':
7788 HOST_WIDE_INT val;
7789 const char * shift = shift_op (x, &val);
7791 if (shift)
7793 fprintf (stream, ", %s ", shift_op (x, &val));
7794 if (val == -1)
7795 arm_print_operand (stream, XEXP (x, 1), 0);
7796 else
7798 fputc ('#', stream);
7799 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7803 return;
7805 /* An explanation of the 'Q', 'R' and 'H' register operands:
7807 In a pair of registers containing a DI or DF value the 'Q'
7808 operand returns the register number of the register containing
7809 the least signficant part of the value. The 'R' operand returns
7810 the register number of the register containing the most
7811 significant part of the value.
7813 The 'H' operand returns the higher of the two register numbers.
7814 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7815 same as the 'Q' operand, since the most signficant part of the
7816 value is held in the lower number register. The reverse is true
7817 on systems where WORDS_BIG_ENDIAN is false.
7819 The purpose of these operands is to distinguish between cases
7820 where the endian-ness of the values is important (for example
7821 when they are added together), and cases where the endian-ness
7822 is irrelevant, but the order of register operations is important.
7823 For example when loading a value from memory into a register
7824 pair, the endian-ness does not matter. Provided that the value
7825 from the lower memory address is put into the lower numbered
7826 register, and the value from the higher address is put into the
7827 higher numbered register, the load will work regardless of whether
7828 the value being loaded is big-wordian or little-wordian. The
7829 order of the two register loads can matter however, if the address
7830 of the memory location is actually held in one of the registers
7831 being overwritten by the load. */
7832 case 'Q':
7833 if (REGNO (x) > LAST_ARM_REGNUM)
7834 abort ();
7835 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
7836 return;
7838 case 'R':
7839 if (REGNO (x) > LAST_ARM_REGNUM)
7840 abort ();
7841 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7842 return;
7844 case 'H':
7845 if (REGNO (x) > LAST_ARM_REGNUM)
7846 abort ();
7847 asm_fprintf (stream, "%r", REGNO (x) + 1);
7848 return;
7850 case 'm':
7851 asm_fprintf (stream, "%r",
7852 GET_CODE (XEXP (x, 0)) == REG
7853 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
7854 return;
7856 case 'M':
7857 asm_fprintf (stream, "{%r-%r}",
7858 REGNO (x),
7859 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
7860 return;
7862 case 'd':
7863 if (!x)
7864 return;
7866 if (TARGET_ARM)
7867 fputs (arm_condition_codes[get_arm_condition_code (x)],
7868 stream);
7869 else
7870 fputs (thumb_condition_code (x, 0), stream);
7871 return;
7873 case 'D':
7874 if (!x)
7875 return;
7877 if (TARGET_ARM)
7878 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7879 (get_arm_condition_code (x))],
7880 stream);
7881 else
7882 fputs (thumb_condition_code (x, 1), stream);
7883 return;
7885 default:
7886 if (x == 0)
7887 abort ();
7889 if (GET_CODE (x) == REG)
7890 asm_fprintf (stream, "%r", REGNO (x));
7891 else if (GET_CODE (x) == MEM)
7893 output_memory_reference_mode = GET_MODE (x);
7894 output_address (XEXP (x, 0));
7896 else if (GET_CODE (x) == CONST_DOUBLE)
7897 fprintf (stream, "#%s", fp_immediate_constant (x));
7898 else if (GET_CODE (x) == NEG)
7899 abort (); /* This should never happen now. */
7900 else
7902 fputc ('#', stream);
7903 output_addr_const (stream, x);
7908 /* A finite state machine takes care of noticing whether or not instructions
7909 can be conditionally executed, and thus decrease execution time and code
7910 size by deleting branch instructions. The fsm is controlled by
7911 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7913 /* The state of the fsm controlling condition codes are:
7914 0: normal, do nothing special
7915 1: make ASM_OUTPUT_OPCODE not output this instruction
7916 2: make ASM_OUTPUT_OPCODE not output this instruction
7917 3: make instructions conditional
7918 4: make instructions conditional
7920 State transitions (state->state by whom under condition):
7921 0 -> 1 final_prescan_insn if the `target' is a label
7922 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7923 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7924 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7925 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7926 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7927 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7928 (the target insn is arm_target_insn).
7930 If the jump clobbers the conditions then we use states 2 and 4.
7932 A similar thing can be done with conditional return insns.
7934 XXX In case the `target' is an unconditional branch, this conditionalising
7935 of the instructions always reduces code size, but not always execution
7936 time. But then, I want to reduce the code size to somewhere near what
7937 /bin/cc produces. */
7939 /* Returns the index of the ARM condition code string in
7940 `arm_condition_codes'. COMPARISON should be an rtx like
7941 `(eq (...) (...))'. */
7943 static enum arm_cond_code
7944 get_arm_condition_code (comparison)
7945 rtx comparison;
7947 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
7948 register int code;
7949 register enum rtx_code comp_code = GET_CODE (comparison);
7951 if (GET_MODE_CLASS (mode) != MODE_CC)
7952 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
7953 XEXP (comparison, 1));
7955 switch (mode)
7957 case CC_DNEmode: code = ARM_NE; goto dominance;
7958 case CC_DEQmode: code = ARM_EQ; goto dominance;
7959 case CC_DGEmode: code = ARM_GE; goto dominance;
7960 case CC_DGTmode: code = ARM_GT; goto dominance;
7961 case CC_DLEmode: code = ARM_LE; goto dominance;
7962 case CC_DLTmode: code = ARM_LT; goto dominance;
7963 case CC_DGEUmode: code = ARM_CS; goto dominance;
7964 case CC_DGTUmode: code = ARM_HI; goto dominance;
7965 case CC_DLEUmode: code = ARM_LS; goto dominance;
7966 case CC_DLTUmode: code = ARM_CC;
7968 dominance:
7969 if (comp_code != EQ && comp_code != NE)
7970 abort ();
7972 if (comp_code == EQ)
7973 return ARM_INVERSE_CONDITION_CODE (code);
7974 return code;
7976 case CC_NOOVmode:
7977 switch (comp_code)
7979 case NE: return ARM_NE;
7980 case EQ: return ARM_EQ;
7981 case GE: return ARM_PL;
7982 case LT: return ARM_MI;
7983 default: abort ();
7986 case CC_Zmode:
7987 switch (comp_code)
7989 case NE: return ARM_NE;
7990 case EQ: return ARM_EQ;
7991 default: abort ();
7994 case CCFPEmode:
7995 case CCFPmode:
7996 /* These encodings assume that AC=1 in the FPA system control
7997 byte. This allows us to handle all cases except UNEQ and
7998 LTGT. */
7999 switch (comp_code)
8001 case GE: return ARM_GE;
8002 case GT: return ARM_GT;
8003 case LE: return ARM_LS;
8004 case LT: return ARM_MI;
8005 case NE: return ARM_NE;
8006 case EQ: return ARM_EQ;
8007 case ORDERED: return ARM_VC;
8008 case UNORDERED: return ARM_VS;
8009 case UNLT: return ARM_LT;
8010 case UNLE: return ARM_LE;
8011 case UNGT: return ARM_HI;
8012 case UNGE: return ARM_PL;
8013 /* UNEQ and LTGT do not have a representation. */
8014 case UNEQ: /* Fall through. */
8015 case LTGT: /* Fall through. */
8016 default: abort ();
8019 case CC_SWPmode:
8020 switch (comp_code)
8022 case NE: return ARM_NE;
8023 case EQ: return ARM_EQ;
8024 case GE: return ARM_LE;
8025 case GT: return ARM_LT;
8026 case LE: return ARM_GE;
8027 case LT: return ARM_GT;
8028 case GEU: return ARM_LS;
8029 case GTU: return ARM_CC;
8030 case LEU: return ARM_CS;
8031 case LTU: return ARM_HI;
8032 default: abort ();
8035 case CC_Cmode:
8036 switch (comp_code)
8038 case LTU: return ARM_CS;
8039 case GEU: return ARM_CC;
8040 default: abort ();
8043 case CCmode:
8044 switch (comp_code)
8046 case NE: return ARM_NE;
8047 case EQ: return ARM_EQ;
8048 case GE: return ARM_GE;
8049 case GT: return ARM_GT;
8050 case LE: return ARM_LE;
8051 case LT: return ARM_LT;
8052 case GEU: return ARM_CS;
8053 case GTU: return ARM_HI;
8054 case LEU: return ARM_LS;
8055 case LTU: return ARM_CC;
8056 default: abort ();
8059 default: abort ();
8062 abort ();
8066 void
8067 arm_final_prescan_insn (insn)
8068 rtx insn;
8070 /* BODY will hold the body of INSN. */
8071 register rtx body = PATTERN (insn);
8073 /* This will be 1 if trying to repeat the trick, and things need to be
8074 reversed if it appears to fail. */
8075 int reverse = 0;
8077 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
8078 taken are clobbered, even if the rtl suggests otherwise. It also
8079 means that we have to grub around within the jump expression to find
8080 out what the conditions are when the jump isn't taken. */
8081 int jump_clobbers = 0;
8083 /* If we start with a return insn, we only succeed if we find another one. */
8084 int seeking_return = 0;
8086 /* START_INSN will hold the insn from where we start looking. This is the
8087 first insn after the following code_label if REVERSE is true. */
8088 rtx start_insn = insn;
8090 /* If in state 4, check if the target branch is reached, in order to
8091 change back to state 0. */
8092 if (arm_ccfsm_state == 4)
8094 if (insn == arm_target_insn)
8096 arm_target_insn = NULL;
8097 arm_ccfsm_state = 0;
8099 return;
8102 /* If in state 3, it is possible to repeat the trick, if this insn is an
8103 unconditional branch to a label, and immediately following this branch
8104 is the previous target label which is only used once, and the label this
8105 branch jumps to is not too far off. */
8106 if (arm_ccfsm_state == 3)
8108 if (simplejump_p (insn))
8110 start_insn = next_nonnote_insn (start_insn);
8111 if (GET_CODE (start_insn) == BARRIER)
8113 /* XXX Isn't this always a barrier? */
8114 start_insn = next_nonnote_insn (start_insn);
8116 if (GET_CODE (start_insn) == CODE_LABEL
8117 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8118 && LABEL_NUSES (start_insn) == 1)
8119 reverse = TRUE;
8120 else
8121 return;
8123 else if (GET_CODE (body) == RETURN)
8125 start_insn = next_nonnote_insn (start_insn);
8126 if (GET_CODE (start_insn) == BARRIER)
8127 start_insn = next_nonnote_insn (start_insn);
8128 if (GET_CODE (start_insn) == CODE_LABEL
8129 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
8130 && LABEL_NUSES (start_insn) == 1)
8132 reverse = TRUE;
8133 seeking_return = 1;
8135 else
8136 return;
8138 else
8139 return;
8142 if (arm_ccfsm_state != 0 && !reverse)
8143 abort ();
8144 if (GET_CODE (insn) != JUMP_INSN)
8145 return;
8147 /* This jump might be paralleled with a clobber of the condition codes
8148 the jump should always come first */
8149 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
8150 body = XVECEXP (body, 0, 0);
8152 #if 0
8153 /* If this is a conditional return then we don't want to know */
8154 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8155 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
8156 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
8157 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
8158 return;
8159 #endif
8161 if (reverse
8162 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
8163 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
8165 int insns_skipped;
8166 int fail = FALSE, succeed = FALSE;
8167 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
8168 int then_not_else = TRUE;
8169 rtx this_insn = start_insn, label = 0;
8171 /* If the jump cannot be done with one instruction, we cannot
8172 conditionally execute the instruction in the inverse case. */
8173 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
8175 jump_clobbers = 1;
8176 return;
8179 /* Register the insn jumped to. */
8180 if (reverse)
8182 if (!seeking_return)
8183 label = XEXP (SET_SRC (body), 0);
8185 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
8186 label = XEXP (XEXP (SET_SRC (body), 1), 0);
8187 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
8189 label = XEXP (XEXP (SET_SRC (body), 2), 0);
8190 then_not_else = FALSE;
8192 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
8193 seeking_return = 1;
8194 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
8196 seeking_return = 1;
8197 then_not_else = FALSE;
8199 else
8200 abort ();
8202 /* See how many insns this branch skips, and what kind of insns. If all
8203 insns are okay, and the label or unconditional branch to the same
8204 label is not too far away, succeed. */
8205 for (insns_skipped = 0;
8206 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
8208 rtx scanbody;
8210 this_insn = next_nonnote_insn (this_insn);
8211 if (!this_insn)
8212 break;
8214 switch (GET_CODE (this_insn))
8216 case CODE_LABEL:
8217 /* Succeed if it is the target label, otherwise fail since
8218 control falls in from somewhere else. */
8219 if (this_insn == label)
8221 if (jump_clobbers)
8223 arm_ccfsm_state = 2;
8224 this_insn = next_nonnote_insn (this_insn);
8226 else
8227 arm_ccfsm_state = 1;
8228 succeed = TRUE;
8230 else
8231 fail = TRUE;
8232 break;
8234 case BARRIER:
8235 /* Succeed if the following insn is the target label.
8236 Otherwise fail.
8237 If return insns are used then the last insn in a function
8238 will be a barrier. */
8239 this_insn = next_nonnote_insn (this_insn);
8240 if (this_insn && this_insn == label)
8242 if (jump_clobbers)
8244 arm_ccfsm_state = 2;
8245 this_insn = next_nonnote_insn (this_insn);
8247 else
8248 arm_ccfsm_state = 1;
8249 succeed = TRUE;
8251 else
8252 fail = TRUE;
8253 break;
8255 case CALL_INSN:
8256 /* If using 32-bit addresses the cc is not preserved over
8257 calls. */
8258 if (TARGET_APCS_32)
8260 /* Succeed if the following insn is the target label,
8261 or if the following two insns are a barrier and
8262 the target label. */
8263 this_insn = next_nonnote_insn (this_insn);
8264 if (this_insn && GET_CODE (this_insn) == BARRIER)
8265 this_insn = next_nonnote_insn (this_insn);
8267 if (this_insn && this_insn == label
8268 && insns_skipped < max_insns_skipped)
8270 if (jump_clobbers)
8272 arm_ccfsm_state = 2;
8273 this_insn = next_nonnote_insn (this_insn);
8275 else
8276 arm_ccfsm_state = 1;
8277 succeed = TRUE;
8279 else
8280 fail = TRUE;
8282 break;
8284 case JUMP_INSN:
8285 /* If this is an unconditional branch to the same label, succeed.
8286 If it is to another label, do nothing. If it is conditional,
8287 fail. */
8288 /* XXX Probably, the tests for SET and the PC are unnecessary. */
8290 scanbody = PATTERN (this_insn);
8291 if (GET_CODE (scanbody) == SET
8292 && GET_CODE (SET_DEST (scanbody)) == PC)
8294 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
8295 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
8297 arm_ccfsm_state = 2;
8298 succeed = TRUE;
8300 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
8301 fail = TRUE;
8303 /* Fail if a conditional return is undesirable (eg on a
8304 StrongARM), but still allow this if optimizing for size. */
8305 else if (GET_CODE (scanbody) == RETURN
8306 && !use_return_insn (TRUE)
8307 && !optimize_size)
8308 fail = TRUE;
8309 else if (GET_CODE (scanbody) == RETURN
8310 && seeking_return)
8312 arm_ccfsm_state = 2;
8313 succeed = TRUE;
8315 else if (GET_CODE (scanbody) == PARALLEL)
8317 switch (get_attr_conds (this_insn))
8319 case CONDS_NOCOND:
8320 break;
8321 default:
8322 fail = TRUE;
8323 break;
8326 else
8327 fail = TRUE; /* Unrecognized jump (eg epilogue). */
8329 break;
8331 case INSN:
8332 /* Instructions using or affecting the condition codes make it
8333 fail. */
8334 scanbody = PATTERN (this_insn);
8335 if (!(GET_CODE (scanbody) == SET
8336 || GET_CODE (scanbody) == PARALLEL)
8337 || get_attr_conds (this_insn) != CONDS_NOCOND)
8338 fail = TRUE;
8339 break;
8341 default:
8342 break;
8345 if (succeed)
8347 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
8348 arm_target_label = CODE_LABEL_NUMBER (label);
8349 else if (seeking_return || arm_ccfsm_state == 2)
8351 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
8353 this_insn = next_nonnote_insn (this_insn);
8354 if (this_insn && (GET_CODE (this_insn) == BARRIER
8355 || GET_CODE (this_insn) == CODE_LABEL))
8356 abort ();
8358 if (!this_insn)
8360 /* Oh, dear! we ran off the end.. give up */
8361 recog (PATTERN (insn), insn, NULL_PTR);
8362 arm_ccfsm_state = 0;
8363 arm_target_insn = NULL;
8364 return;
8366 arm_target_insn = this_insn;
8368 else
8369 abort ();
8370 if (jump_clobbers)
8372 if (reverse)
8373 abort ();
8374 arm_current_cc =
8375 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
8376 0), 0), 1));
8377 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
8378 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8379 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
8380 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8382 else
8384 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
8385 what it was. */
8386 if (!reverse)
8387 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
8388 0));
8391 if (reverse || then_not_else)
8392 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
8395 /* Restore recog_data (getting the attributes of other insns can
8396 destroy this array, but final.c assumes that it remains intact
8397 across this call; since the insn has been recognized already we
8398 call recog direct). */
8399 recog (PATTERN (insn), insn, NULL_PTR);
8404 arm_regno_class (regno)
8405 int regno;
8407 if (TARGET_THUMB)
8409 if (regno == STACK_POINTER_REGNUM)
8410 return STACK_REG;
8411 if (regno == CC_REGNUM)
8412 return CC_REG;
8413 if (regno < 8)
8414 return LO_REGS;
8415 return HI_REGS;
8418 if ( regno <= LAST_ARM_REGNUM
8419 || regno == FRAME_POINTER_REGNUM
8420 || regno == ARG_POINTER_REGNUM)
8421 return GENERAL_REGS;
8423 if (regno == CC_REGNUM)
8424 return NO_REGS;
8426 return FPU_REGS;
8429 /* Handle a special case when computing the offset
8430 of an argument from the frame pointer. */
8432 arm_debugger_arg_offset (value, addr)
8433 int value;
8434 rtx addr;
8436 rtx insn;
8438 /* We are only interested if dbxout_parms() failed to compute the offset. */
8439 if (value != 0)
8440 return 0;
8442 /* We can only cope with the case where the address is held in a register. */
8443 if (GET_CODE (addr) != REG)
8444 return 0;
8446 /* If we are using the frame pointer to point at the argument, then
8447 an offset of 0 is correct. */
8448 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
8449 return 0;
8451 /* If we are using the stack pointer to point at the
8452 argument, then an offset of 0 is correct. */
8453 if ((TARGET_THUMB || !frame_pointer_needed)
8454 && REGNO (addr) == SP_REGNUM)
8455 return 0;
8457 /* Oh dear. The argument is pointed to by a register rather
8458 than being held in a register, or being stored at a known
8459 offset from the frame pointer. Since GDB only understands
8460 those two kinds of argument we must translate the address
8461 held in the register into an offset from the frame pointer.
8462 We do this by searching through the insns for the function
8463 looking to see where this register gets its value. If the
8464 register is initialised from the frame pointer plus an offset
8465 then we are in luck and we can continue, otherwise we give up.
8467 This code is exercised by producing debugging information
8468 for a function with arguments like this:
8470 double func (double a, double b, int c, double d) {return d;}
8472 Without this code the stab for parameter 'd' will be set to
8473 an offset of 0 from the frame pointer, rather than 8. */
8475 /* The if() statement says:
8477 If the insn is a normal instruction
8478 and if the insn is setting the value in a register
8479 and if the register being set is the register holding the address of the argument
8480 and if the address is computing by an addition
8481 that involves adding to a register
8482 which is the frame pointer
8483 a constant integer
8485 then... */
8487 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8489 if ( GET_CODE (insn) == INSN
8490 && GET_CODE (PATTERN (insn)) == SET
8491 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8492 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8493 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
8494 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
8495 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8498 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8500 break;
8504 if (value == 0)
8506 debug_rtx (addr);
8507 warning ("Unable to compute real location of stacked parameter");
8508 value = 8; /* XXX magic hack */
8511 return value;
8514 #define def_builtin(NAME, TYPE, CODE) \
8515 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL_PTR)
8517 void
8518 arm_init_builtins ()
8520 tree endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
8521 tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
8522 tree pchar_type_node = build_pointer_type (char_type_node);
8524 tree int_ftype_int, void_ftype_pchar;
8526 /* void func (void *) */
8527 void_ftype_pchar
8528 = build_function_type (void_type_node,
8529 tree_cons (NULL_TREE, pchar_type_node, endlink));
8531 /* int func (int) */
8532 int_ftype_int
8533 = build_function_type (integer_type_node, int_endlink);
8535 /* Initialize arm V5 builtins. */
8536 if (arm_arch5)
8538 def_builtin ("__builtin_clz", int_ftype_int, ARM_BUILTIN_CLZ);
8539 def_builtin ("__builtin_prefetch", void_ftype_pchar,
8540 ARM_BUILTIN_PREFETCH);
8544 /* Expand an expression EXP that calls a built-in function,
8545 with result going to TARGET if that's convenient
8546 (and in mode MODE if that's convenient).
8547 SUBTARGET may be used as the target for computing one of EXP's operands.
8548 IGNORE is nonzero if the value is to be ignored. */
8551 arm_expand_builtin (exp, target, subtarget, mode, ignore)
8552 tree exp;
8553 rtx target;
8554 rtx subtarget ATTRIBUTE_UNUSED;
8555 enum machine_mode mode ATTRIBUTE_UNUSED;
8556 int ignore ATTRIBUTE_UNUSED;
8558 enum insn_code icode;
8559 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8560 tree arglist = TREE_OPERAND (exp, 1);
8561 tree arg0;
8562 rtx op0, pat;
8563 enum machine_mode tmode, mode0;
8564 int fcode = DECL_FUNCTION_CODE (fndecl);
8566 switch (fcode)
8568 default:
8569 break;
8571 case ARM_BUILTIN_CLZ:
8572 icode = CODE_FOR_clz;
8573 arg0 = TREE_VALUE (arglist);
8574 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8575 tmode = insn_data[icode].operand[0].mode;
8576 mode0 = insn_data[icode].operand[1].mode;
8578 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
8579 op0 = copy_to_mode_reg (mode0, op0);
8580 if (target == 0
8581 || GET_MODE (target) != tmode
8582 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
8583 target = gen_reg_rtx (tmode);
8584 pat = GEN_FCN (icode) (target, op0);
8585 if (! pat)
8586 return 0;
8587 emit_insn (pat);
8588 return target;
8590 case ARM_BUILTIN_PREFETCH:
8591 icode = CODE_FOR_prefetch;
8592 arg0 = TREE_VALUE (arglist);
8593 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
8595 op0 = gen_rtx_MEM (SImode, copy_to_mode_reg (Pmode, op0));
8597 pat = GEN_FCN (icode) (op0);
8598 if (! pat)
8599 return 0;
8600 emit_insn (pat);
8601 return target;
8604 /* @@@ Should really do something sensible here. */
8605 return NULL_RTX;
8608 /* Recursively search through all of the blocks in a function
8609 checking to see if any of the variables created in that
8610 function match the RTX called 'orig'. If they do then
8611 replace them with the RTX called 'new'. */
8613 static void
8614 replace_symbols_in_block (block, orig, new)
8615 tree block;
8616 rtx orig;
8617 rtx new;
8619 for (; block; block = BLOCK_CHAIN (block))
8621 tree sym;
8623 if (!TREE_USED (block))
8624 continue;
8626 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8628 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8629 || DECL_IGNORED_P (sym)
8630 || TREE_CODE (sym) != VAR_DECL
8631 || DECL_EXTERNAL (sym)
8632 || !rtx_equal_p (DECL_RTL (sym), orig)
8634 continue;
8636 DECL_RTL (sym) = new;
8639 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8643 /* Return the number (counting from 0) of the least significant set
8644 bit in MASK. */
8645 #ifdef __GNUC__
8646 inline
8647 #endif
8648 static int
8649 number_of_first_bit_set (mask)
8650 int mask;
8652 int bit;
8654 for (bit = 0;
8655 (mask & (1 << bit)) == 0;
8656 ++bit)
8657 continue;
8659 return bit;
8662 /* Generate code to return from a thumb function.
8663 If 'reg_containing_return_addr' is -1, then the return address is
8664 actually on the stack, at the stack pointer. */
8665 static void
8666 thumb_exit (f, reg_containing_return_addr, eh_ofs)
8667 FILE * f;
8668 int reg_containing_return_addr;
8669 rtx eh_ofs;
8671 unsigned regs_available_for_popping;
8672 unsigned regs_to_pop;
8673 int pops_needed;
8674 unsigned available;
8675 unsigned required;
8676 int mode;
8677 int size;
8678 int restore_a4 = FALSE;
8680 /* Compute the registers we need to pop. */
8681 regs_to_pop = 0;
8682 pops_needed = 0;
8684 /* There is an assumption here, that if eh_ofs is not NULL, the
8685 normal return address will have been pushed. */
8686 if (reg_containing_return_addr == -1 || eh_ofs)
8688 /* When we are generating a return for __builtin_eh_return,
8689 reg_containing_return_addr must specify the return regno. */
8690 if (eh_ofs && reg_containing_return_addr == -1)
8691 abort ();
8693 regs_to_pop |= 1 << LR_REGNUM;
8694 ++pops_needed;
8697 if (TARGET_BACKTRACE)
8699 /* Restore the (ARM) frame pointer and stack pointer. */
8700 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8701 pops_needed += 2;
8704 /* If there is nothing to pop then just emit the BX instruction and
8705 return. */
8706 if (pops_needed == 0)
8708 if (eh_ofs)
8709 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8711 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8712 return;
8714 /* Otherwise if we are not supporting interworking and we have not created
8715 a backtrace structure and the function was not entered in ARM mode then
8716 just pop the return address straight into the PC. */
8717 else if (!TARGET_INTERWORK
8718 && !TARGET_BACKTRACE
8719 && !is_called_in_ARM_mode (current_function_decl))
8721 if (eh_ofs)
8723 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8724 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8725 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8727 else
8728 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8730 return;
8733 /* Find out how many of the (return) argument registers we can corrupt. */
8734 regs_available_for_popping = 0;
8736 /* If returning via __builtin_eh_return, the bottom three registers
8737 all contain information needed for the return. */
8738 if (eh_ofs)
8739 size = 12;
8740 else
8742 #ifdef RTX_CODE
8743 /* If we can deduce the registers used from the function's
8744 return value. This is more reliable that examining
8745 regs_ever_live[] because that will be set if the register is
8746 ever used in the function, not just if the register is used
8747 to hold a return value. */
8749 if (current_function_return_rtx != 0)
8750 mode = GET_MODE (current_function_return_rtx);
8751 else
8752 #endif
8753 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8755 size = GET_MODE_SIZE (mode);
8757 if (size == 0)
8759 /* In a void function we can use any argument register.
8760 In a function that returns a structure on the stack
8761 we can use the second and third argument registers. */
8762 if (mode == VOIDmode)
8763 regs_available_for_popping =
8764 (1 << ARG_REGISTER (1))
8765 | (1 << ARG_REGISTER (2))
8766 | (1 << ARG_REGISTER (3));
8767 else
8768 regs_available_for_popping =
8769 (1 << ARG_REGISTER (2))
8770 | (1 << ARG_REGISTER (3));
8772 else if (size <= 4)
8773 regs_available_for_popping =
8774 (1 << ARG_REGISTER (2))
8775 | (1 << ARG_REGISTER (3));
8776 else if (size <= 8)
8777 regs_available_for_popping =
8778 (1 << ARG_REGISTER (3));
8781 /* Match registers to be popped with registers into which we pop them. */
8782 for (available = regs_available_for_popping,
8783 required = regs_to_pop;
8784 required != 0 && available != 0;
8785 available &= ~(available & - available),
8786 required &= ~(required & - required))
8787 -- pops_needed;
8789 /* If we have any popping registers left over, remove them. */
8790 if (available > 0)
8791 regs_available_for_popping &= ~available;
8793 /* Otherwise if we need another popping register we can use
8794 the fourth argument register. */
8795 else if (pops_needed)
8797 /* If we have not found any free argument registers and
8798 reg a4 contains the return address, we must move it. */
8799 if (regs_available_for_popping == 0
8800 && reg_containing_return_addr == LAST_ARG_REGNUM)
8802 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8803 reg_containing_return_addr = LR_REGNUM;
8805 else if (size > 12)
8807 /* Register a4 is being used to hold part of the return value,
8808 but we have dire need of a free, low register. */
8809 restore_a4 = TRUE;
8811 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8814 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8816 /* The fourth argument register is available. */
8817 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8819 --pops_needed;
8823 /* Pop as many registers as we can. */
8824 thumb_pushpop (f, regs_available_for_popping, FALSE);
8826 /* Process the registers we popped. */
8827 if (reg_containing_return_addr == -1)
8829 /* The return address was popped into the lowest numbered register. */
8830 regs_to_pop &= ~(1 << LR_REGNUM);
8832 reg_containing_return_addr =
8833 number_of_first_bit_set (regs_available_for_popping);
8835 /* Remove this register for the mask of available registers, so that
8836 the return address will not be corrupted by futher pops. */
8837 regs_available_for_popping &= ~(1 << reg_containing_return_addr);
8840 /* If we popped other registers then handle them here. */
8841 if (regs_available_for_popping)
8843 int frame_pointer;
8845 /* Work out which register currently contains the frame pointer. */
8846 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8848 /* Move it into the correct place. */
8849 asm_fprintf (f, "\tmov\t%r, %r\n",
8850 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8852 /* (Temporarily) remove it from the mask of popped registers. */
8853 regs_available_for_popping &= ~(1 << frame_pointer);
8854 regs_to_pop &= ~(1 << ARM_HARD_FRAME_POINTER_REGNUM);
8856 if (regs_available_for_popping)
8858 int stack_pointer;
8860 /* We popped the stack pointer as well,
8861 find the register that contains it. */
8862 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8864 /* Move it into the stack register. */
8865 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8867 /* At this point we have popped all necessary registers, so
8868 do not worry about restoring regs_available_for_popping
8869 to its correct value:
8871 assert (pops_needed == 0)
8872 assert (regs_available_for_popping == (1 << frame_pointer))
8873 assert (regs_to_pop == (1 << STACK_POINTER)) */
8875 else
8877 /* Since we have just move the popped value into the frame
8878 pointer, the popping register is available for reuse, and
8879 we know that we still have the stack pointer left to pop. */
8880 regs_available_for_popping |= (1 << frame_pointer);
8884 /* If we still have registers left on the stack, but we no longer have
8885 any registers into which we can pop them, then we must move the return
8886 address into the link register and make available the register that
8887 contained it. */
8888 if (regs_available_for_popping == 0 && pops_needed > 0)
8890 regs_available_for_popping |= 1 << reg_containing_return_addr;
8892 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8893 reg_containing_return_addr);
8895 reg_containing_return_addr = LR_REGNUM;
8898 /* If we have registers left on the stack then pop some more.
8899 We know that at most we will want to pop FP and SP. */
8900 if (pops_needed > 0)
8902 int popped_into;
8903 int move_to;
8905 thumb_pushpop (f, regs_available_for_popping, FALSE);
8907 /* We have popped either FP or SP.
8908 Move whichever one it is into the correct register. */
8909 popped_into = number_of_first_bit_set (regs_available_for_popping);
8910 move_to = number_of_first_bit_set (regs_to_pop);
8912 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8914 regs_to_pop &= ~(1 << move_to);
8916 --pops_needed;
8919 /* If we still have not popped everything then we must have only
8920 had one register available to us and we are now popping the SP. */
8921 if (pops_needed > 0)
8923 int popped_into;
8925 thumb_pushpop (f, regs_available_for_popping, FALSE);
8927 popped_into = number_of_first_bit_set (regs_available_for_popping);
8929 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8931 assert (regs_to_pop == (1 << STACK_POINTER))
8932 assert (pops_needed == 1)
8936 /* If necessary restore the a4 register. */
8937 if (restore_a4)
8939 if (reg_containing_return_addr != LR_REGNUM)
8941 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8942 reg_containing_return_addr = LR_REGNUM;
8945 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8948 if (eh_ofs)
8949 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8951 /* Return to caller. */
8952 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8955 /* Emit code to push or pop registers to or from the stack. */
8956 static void
8957 thumb_pushpop (f, mask, push)
8958 FILE * f;
8959 int mask;
8960 int push;
8962 int regno;
8963 int lo_mask = mask & 0xFF;
8965 if (lo_mask == 0 && !push && (mask & (1 << 15)))
8967 /* Special case. Do not generate a POP PC statement here, do it in
8968 thumb_exit() */
8969 thumb_exit (f, -1, NULL_RTX);
8970 return;
8973 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8975 /* Look at the low registers first. */
8976 for (regno = 0; regno <= LAST_LO_REGNUM; regno++, lo_mask >>= 1)
8978 if (lo_mask & 1)
8980 asm_fprintf (f, "%r", regno);
8982 if ((lo_mask & ~1) != 0)
8983 fprintf (f, ", ");
8987 if (push && (mask & (1 << LR_REGNUM)))
8989 /* Catch pushing the LR. */
8990 if (mask & 0xFF)
8991 fprintf (f, ", ");
8993 asm_fprintf (f, "%r", LR_REGNUM);
8995 else if (!push && (mask & (1 << PC_REGNUM)))
8997 /* Catch popping the PC. */
8998 if (TARGET_INTERWORK || TARGET_BACKTRACE)
9000 /* The PC is never poped directly, instead
9001 it is popped into r3 and then BX is used. */
9002 fprintf (f, "}\n");
9004 thumb_exit (f, -1, NULL_RTX);
9006 return;
9008 else
9010 if (mask & 0xFF)
9011 fprintf (f, ", ");
9013 asm_fprintf (f, "%r", PC_REGNUM);
9017 fprintf (f, "}\n");
9020 void
9021 thumb_final_prescan_insn (insn)
9022 rtx insn;
9024 if (flag_print_asm_name)
9025 asm_fprintf (asm_out_file, "%@ 0x%04x\n",
9026 INSN_ADDRESSES (INSN_UID (insn)));
9030 thumb_shiftable_const (val)
9031 unsigned HOST_WIDE_INT val;
9033 unsigned HOST_WIDE_INT mask = 0xff;
9034 int i;
9036 if (val == 0) /* XXX */
9037 return 0;
9039 for (i = 0; i < 25; i++)
9040 if ((val & (mask << i)) == val)
9041 return 1;
9043 return 0;
9046 /* Returns non-zero if the current function contains,
9047 or might contain a far jump. */
9049 thumb_far_jump_used_p (int in_prologue)
9051 rtx insn;
9053 /* This test is only important for leaf functions. */
9054 /* assert (!leaf_function_p ()); */
9056 /* If we have already decided that far jumps may be used,
9057 do not bother checking again, and always return true even if
9058 it turns out that they are not being used. Once we have made
9059 the decision that far jumps are present (and that hence the link
9060 register will be pushed onto the stack) we cannot go back on it. */
9061 if (cfun->machine->far_jump_used)
9062 return 1;
9064 /* If this function is not being called from the prologue/epilogue
9065 generation code then it must be being called from the
9066 INITIAL_ELIMINATION_OFFSET macro. */
9067 if (!in_prologue)
9069 /* In this case we know that we are being asked about the elimination
9070 of the arg pointer register. If that register is not being used,
9071 then there are no arguments on the stack, and we do not have to
9072 worry that a far jump might force the prologue to push the link
9073 register, changing the stack offsets. In this case we can just
9074 return false, since the presence of far jumps in the function will
9075 not affect stack offsets.
9077 If the arg pointer is live (or if it was live, but has now been
9078 eliminated and so set to dead) then we do have to test to see if
9079 the function might contain a far jump. This test can lead to some
9080 false negatives, since before reload is completed, then length of
9081 branch instructions is not known, so gcc defaults to returning their
9082 longest length, which in turn sets the far jump attribute to true.
9084 A false negative will not result in bad code being generated, but it
9085 will result in a needless push and pop of the link register. We
9086 hope that this does not occur too often. */
9087 if (regs_ever_live [ARG_POINTER_REGNUM])
9088 cfun->machine->arg_pointer_live = 1;
9089 else if (!cfun->machine->arg_pointer_live)
9090 return 0;
9093 /* Check to see if the function contains a branch
9094 insn with the far jump attribute set. */
9095 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9097 if (GET_CODE (insn) == JUMP_INSN
9098 /* Ignore tablejump patterns. */
9099 && GET_CODE (PATTERN (insn)) != ADDR_VEC
9100 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
9101 && get_attr_far_jump (insn) == FAR_JUMP_YES
9104 /* Record the fact that we have decied that
9105 the function does use far jumps. */
9106 cfun->machine->far_jump_used = 1;
9107 return 1;
9111 return 0;
9114 /* Return non-zero if FUNC must be entered in ARM mode. */
9116 is_called_in_ARM_mode (func)
9117 tree func;
9119 if (TREE_CODE (func) != FUNCTION_DECL)
9120 abort ();
9122 /* Ignore the problem about functions whoes address is taken. */
9123 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
9124 return TRUE;
9126 #ifdef ARM_PE
9127 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
9128 #else
9129 return FALSE;
9130 #endif
9133 /* The bits which aren't usefully expanded as rtl. */
9134 const char *
9135 thumb_unexpanded_epilogue ()
9137 int regno;
9138 int live_regs_mask = 0;
9139 int high_regs_pushed = 0;
9140 int leaf_function = leaf_function_p ();
9141 int had_to_push_lr;
9142 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
9144 if (return_used_this_function)
9145 return "";
9147 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9148 if (regs_ever_live[regno] && !call_used_regs[regno]
9149 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9150 live_regs_mask |= 1 << regno;
9152 for (regno = 8; regno < 13; regno++)
9154 if (regs_ever_live[regno] && !call_used_regs[regno]
9155 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9156 high_regs_pushed++;
9159 /* The prolog may have pushed some high registers to use as
9160 work registers. eg the testuite file:
9161 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
9162 compiles to produce:
9163 push {r4, r5, r6, r7, lr}
9164 mov r7, r9
9165 mov r6, r8
9166 push {r6, r7}
9167 as part of the prolog. We have to undo that pushing here. */
9169 if (high_regs_pushed)
9171 int mask = live_regs_mask;
9172 int next_hi_reg;
9173 int size;
9174 int mode;
9176 #ifdef RTX_CODE
9177 /* If we can deduce the registers used from the function's return value.
9178 This is more reliable that examining regs_ever_live[] because that
9179 will be set if the register is ever used in the function, not just if
9180 the register is used to hold a return value. */
9182 if (current_function_return_rtx != 0)
9183 mode = GET_MODE (current_function_return_rtx);
9184 else
9185 #endif
9186 mode = DECL_MODE (DECL_RESULT (current_function_decl));
9188 size = GET_MODE_SIZE (mode);
9190 /* Unless we are returning a type of size > 12 register r3 is
9191 available. */
9192 if (size < 13)
9193 mask |= 1 << 3;
9195 if (mask == 0)
9196 /* Oh dear! We have no low registers into which we can pop
9197 high registers! */
9198 fatal ("No low registers available for popping high registers");
9200 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
9201 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9202 && !(TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
9203 break;
9205 while (high_regs_pushed)
9207 /* Find lo register(s) into which the high register(s) can
9208 be popped. */
9209 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9211 if (mask & (1 << regno))
9212 high_regs_pushed--;
9213 if (high_regs_pushed == 0)
9214 break;
9217 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
9219 /* Pop the values into the low register(s). */
9220 thumb_pushpop (asm_out_file, mask, 0);
9222 /* Move the value(s) into the high registers. */
9223 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9225 if (mask & (1 << regno))
9227 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
9228 regno);
9230 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
9231 if (regs_ever_live[next_hi_reg]
9232 && !call_used_regs[next_hi_reg]
9233 && !(TARGET_SINGLE_PIC_BASE
9234 && (next_hi_reg == arm_pic_register)))
9235 break;
9241 had_to_push_lr = (live_regs_mask || !leaf_function
9242 || thumb_far_jump_used_p (1));
9244 if (TARGET_BACKTRACE
9245 && ((live_regs_mask & 0xFF) == 0)
9246 && regs_ever_live [LAST_ARG_REGNUM] != 0)
9248 /* The stack backtrace structure creation code had to
9249 push R7 in order to get a work register, so we pop
9250 it now. */
9251 live_regs_mask |= (1 << LAST_LO_REGNUM);
9254 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
9256 if (had_to_push_lr
9257 && !is_called_in_ARM_mode (current_function_decl)
9258 && !eh_ofs)
9259 live_regs_mask |= 1 << PC_REGNUM;
9261 /* Either no argument registers were pushed or a backtrace
9262 structure was created which includes an adjusted stack
9263 pointer, so just pop everything. */
9264 if (live_regs_mask)
9265 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9267 if (eh_ofs)
9268 thumb_exit (asm_out_file, 2, eh_ofs);
9269 /* We have either just popped the return address into the
9270 PC or it is was kept in LR for the entire function or
9271 it is still on the stack because we do not want to
9272 return by doing a pop {pc}. */
9273 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
9274 thumb_exit (asm_out_file,
9275 (had_to_push_lr
9276 && is_called_in_ARM_mode (current_function_decl)) ?
9277 -1 : LR_REGNUM, NULL_RTX);
9279 else
9281 /* Pop everything but the return address. */
9282 live_regs_mask &= ~(1 << PC_REGNUM);
9284 if (live_regs_mask)
9285 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
9287 if (had_to_push_lr)
9288 /* Get the return address into a temporary register. */
9289 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
9291 /* Remove the argument registers that were pushed onto the stack. */
9292 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
9293 SP_REGNUM, SP_REGNUM,
9294 current_function_pretend_args_size);
9296 if (eh_ofs)
9297 thumb_exit (asm_out_file, 2, eh_ofs);
9298 else
9299 thumb_exit (asm_out_file,
9300 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
9303 return "";
9306 /* Functions to save and restore machine-specific function data. */
9308 static void
9309 arm_mark_machine_status (p)
9310 struct function * p;
9312 struct machine_function *machine = p->machine;
9314 ggc_mark_rtx (machine->ra_rtx);
9315 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
9318 static void
9319 arm_init_machine_status (p)
9320 struct function * p;
9322 p->machine =
9323 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
9326 /* Return an RTX indicating where the return address to the
9327 calling function can be found. */
9329 arm_return_addr (count, frame)
9330 int count;
9331 rtx frame ATTRIBUTE_UNUSED;
9333 rtx reg;
9335 if (count != 0)
9336 return NULL_RTX;
9338 reg = cfun->machine->ra_rtx;
9340 if (reg == NULL)
9342 rtx init;
9344 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
9345 the prologue. */
9346 reg = gen_reg_rtx (Pmode);
9347 cfun->machine->ra_rtx = reg;
9349 if (!TARGET_APCS_32)
9350 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
9351 GEN_INT (RETURN_ADDR_MASK26));
9352 else
9353 init = gen_rtx_REG (Pmode, LR_REGNUM);
9355 init = gen_rtx_SET (VOIDmode, reg, init);
9357 /* Emit the insn to the prologue with the other argument copies. */
9358 push_topmost_sequence ();
9359 emit_insn_after (init, get_insns ());
9360 pop_topmost_sequence ();
9363 return reg;
9366 /* Do anything needed before RTL is emitted for each function. */
9367 void
9368 arm_init_expanders ()
9370 /* Arrange to initialize and mark the machine per-function status. */
9371 init_machine_status = arm_init_machine_status;
9372 mark_machine_status = arm_mark_machine_status;
9375 /* Generate the rest of a function's prologue. */
9376 void
9377 thumb_expand_prologue ()
9379 HOST_WIDE_INT amount = (get_frame_size ()
9380 + current_function_outgoing_args_size);
9382 /* Naked functions don't have prologues. */
9383 if (arm_naked_function_p (current_function_decl))
9384 return;
9386 if (frame_pointer_needed)
9387 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
9389 if (amount)
9391 amount = ROUND_UP (amount);
9393 if (amount < 512)
9394 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9395 GEN_INT (-amount)));
9396 else
9398 int regno;
9399 rtx reg;
9401 /* The stack decrement is too big for an immediate value in a single
9402 insn. In theory we could issue multiple subtracts, but after
9403 three of them it becomes more space efficient to place the full
9404 value in the constant pool and load into a register. (Also the
9405 ARM debugger really likes to see only one stack decrement per
9406 function). So instead we look for a scratch register into which
9407 we can load the decrement, and then we subtract this from the
9408 stack pointer. Unfortunately on the thumb the only available
9409 scratch registers are the argument registers, and we cannot use
9410 these as they may hold arguments to the function. Instead we
9411 attempt to locate a call preserved register which is used by this
9412 function. If we can find one, then we know that it will have
9413 been pushed at the start of the prologue and so we can corrupt
9414 it now. */
9415 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
9416 if (regs_ever_live[regno]
9417 && !call_used_regs[regno] /* Paranoia */
9418 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
9419 && !(frame_pointer_needed
9420 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
9421 break;
9423 if (regno > LAST_LO_REGNUM) /* Very unlikely */
9425 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
9427 /* Choose an arbitary, non-argument low register. */
9428 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
9430 /* Save it by copying it into a high, scratch register. */
9431 emit_insn (gen_movsi (spare, reg));
9433 /* Decrement the stack. */
9434 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
9435 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9436 reg));
9438 /* Restore the low register's original value. */
9439 emit_insn (gen_movsi (reg, spare));
9441 /* Emit a USE of the restored scratch register, so that flow
9442 analysis will not consider the restore redundant. The
9443 register won't be used again in this function and isn't
9444 restored by the epilogue. */
9445 emit_insn (gen_rtx_USE (VOIDmode, reg));
9447 else
9449 reg = gen_rtx (REG, SImode, regno);
9451 emit_insn (gen_movsi (reg, GEN_INT (-amount)));
9452 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9453 reg));
9458 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9459 emit_insn (gen_blockage ());
9462 void
9463 thumb_expand_epilogue ()
9465 HOST_WIDE_INT amount = (get_frame_size ()
9466 + current_function_outgoing_args_size);
9468 /* Naked functions don't have epilogues. */
9469 if (arm_naked_function_p (current_function_decl))
9470 return;
9472 if (frame_pointer_needed)
9473 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
9474 else if (amount)
9476 amount = ROUND_UP (amount);
9478 if (amount < 512)
9479 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
9480 GEN_INT (amount)));
9481 else
9483 /* r3 is always free in the epilogue. */
9484 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
9486 emit_insn (gen_movsi (reg, GEN_INT (amount)));
9487 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
9491 /* Emit a USE (stack_pointer_rtx), so that
9492 the stack adjustment will not be deleted. */
9493 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9495 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9496 emit_insn (gen_blockage ());
9499 void
9500 output_thumb_prologue (f)
9501 FILE * f;
9503 int live_regs_mask = 0;
9504 int high_regs_pushed = 0;
9505 int store_arg_regs = 0;
9506 int regno;
9508 if (arm_naked_function_p (current_function_decl))
9509 return;
9511 if (is_called_in_ARM_mode (current_function_decl))
9513 const char * name;
9515 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9516 abort ();
9517 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9518 abort ();
9519 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9521 /* Generate code sequence to switch us into Thumb mode. */
9522 /* The .code 32 directive has already been emitted by
9523 ASM_DECLARE_FUNCTION_NAME. */
9524 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9525 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9527 /* Generate a label, so that the debugger will notice the
9528 change in instruction sets. This label is also used by
9529 the assembler to bypass the ARM code when this function
9530 is called from a Thumb encoded function elsewhere in the
9531 same file. Hence the definition of STUB_NAME here must
9532 agree with the definition in gas/config/tc-arm.c */
9534 #define STUB_NAME ".real_start_of"
9536 asm_fprintf (f, "\t.code\t16\n");
9537 #ifdef ARM_PE
9538 if (arm_dllexport_name_p (name))
9539 name = arm_strip_name_encoding (name);
9540 #endif
9541 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9542 asm_fprintf (f, "\t.thumb_func\n");
9543 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9546 if (current_function_anonymous_args && current_function_pretend_args_size)
9547 store_arg_regs = 1;
9549 if (current_function_pretend_args_size)
9551 if (store_arg_regs)
9553 int num_pushes;
9555 asm_fprintf (f, "\tpush\t{");
9557 num_pushes = NUM_INTS (current_function_pretend_args_size);
9559 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9560 regno <= LAST_ARG_REGNUM;
9561 regno++)
9562 asm_fprintf (f, "%r%s", regno,
9563 regno == LAST_ARG_REGNUM ? "" : ", ");
9565 asm_fprintf (f, "}\n");
9567 else
9568 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9569 SP_REGNUM, SP_REGNUM,
9570 current_function_pretend_args_size);
9573 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
9574 if (regs_ever_live[regno] && !call_used_regs[regno]
9575 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9576 live_regs_mask |= 1 << regno;
9578 if (live_regs_mask || !leaf_function_p () || thumb_far_jump_used_p (1))
9579 live_regs_mask |= 1 << LR_REGNUM;
9581 if (TARGET_BACKTRACE)
9583 int offset;
9584 int work_register = 0;
9585 int wr;
9587 /* We have been asked to create a stack backtrace structure.
9588 The code looks like this:
9590 0 .align 2
9591 0 func:
9592 0 sub SP, #16 Reserve space for 4 registers.
9593 2 push {R7} Get a work register.
9594 4 add R7, SP, #20 Get the stack pointer before the push.
9595 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9596 8 mov R7, PC Get hold of the start of this code plus 12.
9597 10 str R7, [SP, #16] Store it.
9598 12 mov R7, FP Get hold of the current frame pointer.
9599 14 str R7, [SP, #4] Store it.
9600 16 mov R7, LR Get hold of the current return address.
9601 18 str R7, [SP, #12] Store it.
9602 20 add R7, SP, #16 Point at the start of the backtrace structure.
9603 22 mov FP, R7 Put this value into the frame pointer. */
9605 if ((live_regs_mask & 0xFF) == 0)
9607 /* See if the a4 register is free. */
9609 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9610 work_register = LAST_ARG_REGNUM;
9611 else /* We must push a register of our own */
9612 live_regs_mask |= (1 << LAST_LO_REGNUM);
9615 if (work_register == 0)
9617 /* Select a register from the list that will be pushed to
9618 use as our work register. */
9619 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9620 if ((1 << work_register) & live_regs_mask)
9621 break;
9624 asm_fprintf
9625 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9626 SP_REGNUM, SP_REGNUM);
9628 if (live_regs_mask)
9629 thumb_pushpop (f, live_regs_mask, 1);
9631 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9632 if (wr & live_regs_mask)
9633 offset += 4;
9635 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9636 offset + 16 + current_function_pretend_args_size);
9638 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9639 offset + 4);
9641 /* Make sure that the instruction fetching the PC is in the right place
9642 to calculate "start of backtrace creation code + 12". */
9643 if (live_regs_mask)
9645 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9646 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9647 offset + 12);
9648 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9649 ARM_HARD_FRAME_POINTER_REGNUM);
9650 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9651 offset);
9653 else
9655 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9656 ARM_HARD_FRAME_POINTER_REGNUM);
9657 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9658 offset);
9659 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9660 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9661 offset + 12);
9664 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9665 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9666 offset + 8);
9667 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9668 offset + 12);
9669 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9670 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9672 else if (live_regs_mask)
9673 thumb_pushpop (f, live_regs_mask, 1);
9675 for (regno = 8; regno < 13; regno++)
9677 if (regs_ever_live[regno] && !call_used_regs[regno]
9678 && !(TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9679 high_regs_pushed++;
9682 if (high_regs_pushed)
9684 int pushable_regs = 0;
9685 int mask = live_regs_mask & 0xff;
9686 int next_hi_reg;
9688 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9690 if (regs_ever_live[next_hi_reg] && !call_used_regs[next_hi_reg]
9691 && !(TARGET_SINGLE_PIC_BASE
9692 && (next_hi_reg == arm_pic_register)))
9693 break;
9696 pushable_regs = mask;
9698 if (pushable_regs == 0)
9700 /* Desperation time -- this probably will never happen. */
9701 if (regs_ever_live[LAST_ARG_REGNUM]
9702 || !call_used_regs[LAST_ARG_REGNUM])
9703 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9704 mask = 1 << LAST_ARG_REGNUM;
9707 while (high_regs_pushed > 0)
9709 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9711 if (mask & (1 << regno))
9713 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9715 high_regs_pushed--;
9717 if (high_regs_pushed)
9718 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9719 next_hi_reg--)
9721 if (regs_ever_live[next_hi_reg]
9722 && !call_used_regs[next_hi_reg]
9723 && !(TARGET_SINGLE_PIC_BASE
9724 && (next_hi_reg == arm_pic_register)))
9725 break;
9727 else
9729 mask &= ~((1 << regno) - 1);
9730 break;
9735 thumb_pushpop (f, mask, 1);
9738 if (pushable_regs == 0
9739 && (regs_ever_live[LAST_ARG_REGNUM]
9740 || !call_used_regs[LAST_ARG_REGNUM]))
9741 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9745 /* Handle the case of a double word load into a low register from
9746 a computed memory address. The computed address may involve a
9747 register which is overwritten by the load. */
9749 const char *
9750 thumb_load_double_from_address (operands)
9751 rtx * operands;
9753 rtx addr;
9754 rtx base;
9755 rtx offset;
9756 rtx arg1;
9757 rtx arg2;
9759 if (GET_CODE (operands[0]) != REG)
9760 fatal ("thumb_load_double_from_address: destination is not a register");
9762 if (GET_CODE (operands[1]) != MEM)
9764 debug_rtx (operands[1]);
9765 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9768 /* Get the memory address. */
9769 addr = XEXP (operands[1], 0);
9771 /* Work out how the memory address is computed. */
9772 switch (GET_CODE (addr))
9774 case REG:
9775 operands[2] = gen_rtx (MEM, SImode,
9776 plus_constant (XEXP (operands[1], 0), 4));
9778 if (REGNO (operands[0]) == REGNO (addr))
9780 output_asm_insn ("ldr\t%H0, %2", operands);
9781 output_asm_insn ("ldr\t%0, %1", operands);
9783 else
9785 output_asm_insn ("ldr\t%0, %1", operands);
9786 output_asm_insn ("ldr\t%H0, %2", operands);
9788 break;
9790 case CONST:
9791 /* Compute <address> + 4 for the high order load. */
9792 operands[2] = gen_rtx (MEM, SImode,
9793 plus_constant (XEXP (operands[1], 0), 4));
9795 output_asm_insn ("ldr\t%0, %1", operands);
9796 output_asm_insn ("ldr\t%H0, %2", operands);
9797 break;
9799 case PLUS:
9800 arg1 = XEXP (addr, 0);
9801 arg2 = XEXP (addr, 1);
9803 if (CONSTANT_P (arg1))
9804 base = arg2, offset = arg1;
9805 else
9806 base = arg1, offset = arg2;
9808 if (GET_CODE (base) != REG)
9809 fatal ("thumb_load_double_from_address: base is not a register");
9811 /* Catch the case of <address> = <reg> + <reg> */
9812 if (GET_CODE (offset) == REG)
9814 int reg_offset = REGNO (offset);
9815 int reg_base = REGNO (base);
9816 int reg_dest = REGNO (operands[0]);
9818 /* Add the base and offset registers together into the
9819 higher destination register. */
9820 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9821 reg_dest + 1, reg_base, reg_offset);
9823 /* Load the lower destination register from the address in
9824 the higher destination register. */
9825 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9826 reg_dest, reg_dest + 1);
9828 /* Load the higher destination register from its own address
9829 plus 4. */
9830 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9831 reg_dest + 1, reg_dest + 1);
9833 else
9835 /* Compute <address> + 4 for the high order load. */
9836 operands[2] = gen_rtx (MEM, SImode,
9837 plus_constant (XEXP (operands[1], 0), 4));
9839 /* If the computed address is held in the low order register
9840 then load the high order register first, otherwise always
9841 load the low order register first. */
9842 if (REGNO (operands[0]) == REGNO (base))
9844 output_asm_insn ("ldr\t%H0, %2", operands);
9845 output_asm_insn ("ldr\t%0, %1", operands);
9847 else
9849 output_asm_insn ("ldr\t%0, %1", operands);
9850 output_asm_insn ("ldr\t%H0, %2", operands);
9853 break;
9855 case LABEL_REF:
9856 /* With no registers to worry about we can just load the value
9857 directly. */
9858 operands[2] = gen_rtx (MEM, SImode,
9859 plus_constant (XEXP (operands[1], 0), 4));
9861 output_asm_insn ("ldr\t%H0, %2", operands);
9862 output_asm_insn ("ldr\t%0, %1", operands);
9863 break;
9865 default:
9866 debug_rtx (operands[1]);
9867 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9868 break;
9871 return "";
9875 const char *
9876 thumb_output_move_mem_multiple (n, operands)
9877 int n;
9878 rtx * operands;
9880 rtx tmp;
9882 switch (n)
9884 case 2:
9885 if (REGNO (operands[4]) > REGNO (operands[5]))
9887 tmp = operands[4];
9888 operands[4] = operands[5];
9889 operands[5] = tmp;
9891 output_asm_insn ("ldmia\t%1!, {%4, %5}", operands);
9892 output_asm_insn ("stmia\t%0!, {%4, %5}", operands);
9893 break;
9895 case 3:
9896 if (REGNO (operands[4]) > REGNO (operands[5]))
9898 tmp = operands[4];
9899 operands[4] = operands[5];
9900 operands[5] = tmp;
9902 if (REGNO (operands[5]) > REGNO (operands[6]))
9904 tmp = operands[5];
9905 operands[5] = operands[6];
9906 operands[6] = tmp;
9908 if (REGNO (operands[4]) > REGNO (operands[5]))
9910 tmp = operands[4];
9911 operands[4] = operands[5];
9912 operands[5] = tmp;
9915 output_asm_insn ("ldmia\t%1!, {%4, %5, %6}", operands);
9916 output_asm_insn ("stmia\t%0!, {%4, %5, %6}", operands);
9917 break;
9919 default:
9920 abort ();
9923 return "";
9926 /* Routines for generating rtl */
9928 void
9929 thumb_expand_movstrqi (operands)
9930 rtx * operands;
9932 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9933 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9934 HOST_WIDE_INT len = INTVAL (operands[2]);
9935 HOST_WIDE_INT offset = 0;
9937 while (len >= 12)
9939 emit_insn (gen_movmem12b (out, in, out, in));
9940 len -= 12;
9943 if (len >= 8)
9945 emit_insn (gen_movmem8b (out, in, out, in));
9946 len -= 8;
9949 if (len >= 4)
9951 rtx reg = gen_reg_rtx (SImode);
9952 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9953 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9954 len -= 4;
9955 offset += 4;
9958 if (len >= 2)
9960 rtx reg = gen_reg_rtx (HImode);
9961 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9962 plus_constant (in, offset))));
9963 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9964 reg));
9965 len -= 2;
9966 offset += 2;
9969 if (len)
9971 rtx reg = gen_reg_rtx (QImode);
9972 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9973 plus_constant (in, offset))));
9974 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9975 reg));
9980 thumb_cmp_operand (op, mode)
9981 rtx op;
9982 enum machine_mode mode;
9984 return ((GET_CODE (op) == CONST_INT
9985 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9986 || register_operand (op, mode));
9989 static const char *
9990 thumb_condition_code (x, invert)
9991 rtx x;
9992 int invert;
9994 static const char * conds[] =
9996 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9997 "hi", "ls", "ge", "lt", "gt", "le"
9999 int val;
10001 switch (GET_CODE (x))
10003 case EQ: val = 0; break;
10004 case NE: val = 1; break;
10005 case GEU: val = 2; break;
10006 case LTU: val = 3; break;
10007 case GTU: val = 8; break;
10008 case LEU: val = 9; break;
10009 case GE: val = 10; break;
10010 case LT: val = 11; break;
10011 case GT: val = 12; break;
10012 case LE: val = 13; break;
10013 default:
10014 abort ();
10017 return conds[val ^ invert];
10020 /* Handle storing a half-word to memory during reload. */
10021 void
10022 thumb_reload_out_hi (operands)
10023 rtx * operands;
10025 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
10028 /* Handle storing a half-word to memory during reload. */
10029 void
10030 thumb_reload_in_hi (operands)
10031 rtx * operands ATTRIBUTE_UNUSED;
10033 abort ();
10036 /* Return the length of a function name prefix
10037 that starts with the character 'c'. */
10038 static int
10039 arm_get_strip_length (char c)
10041 switch (c)
10043 ARM_NAME_ENCODING_LENGTHS
10044 default: return 0;
10048 /* Return a pointer to a function's name with any
10049 and all prefix encodings stripped from it. */
10050 const char *
10051 arm_strip_name_encoding (const char * name)
10053 int skip;
10055 while ((skip = arm_get_strip_length (* name)))
10056 name += skip;
10058 return name;
10061 #ifdef AOF_ASSEMBLER
10062 /* Special functions only needed when producing AOF syntax assembler. */
10064 rtx aof_pic_label = NULL_RTX;
10065 struct pic_chain
10067 struct pic_chain * next;
10068 char * symname;
10071 static struct pic_chain * aof_pic_chain = NULL;
10074 aof_pic_entry (x)
10075 rtx x;
10077 struct pic_chain ** chainp;
10078 int offset;
10080 if (aof_pic_label == NULL_RTX)
10082 /* We mark this here and not in arm_add_gc_roots() to avoid
10083 polluting even more code with ifdefs, and because it never
10084 contains anything useful until we assign to it here. */
10085 ggc_add_rtx_root (&aof_pic_label, 1);
10086 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
10089 for (offset = 0, chainp = &aof_pic_chain; *chainp;
10090 offset += 4, chainp = &(*chainp)->next)
10091 if ((*chainp)->symname == XSTR (x, 0))
10092 return plus_constant (aof_pic_label, offset);
10094 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
10095 (*chainp)->next = NULL;
10096 (*chainp)->symname = XSTR (x, 0);
10097 return plus_constant (aof_pic_label, offset);
10100 void
10101 aof_dump_pic_table (f)
10102 FILE * f;
10104 struct pic_chain * chain;
10106 if (aof_pic_chain == NULL)
10107 return;
10109 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
10110 PIC_OFFSET_TABLE_REGNUM,
10111 PIC_OFFSET_TABLE_REGNUM);
10112 fputs ("|x$adcons|\n", f);
10114 for (chain = aof_pic_chain; chain; chain = chain->next)
10116 fputs ("\tDCD\t", f);
10117 assemble_name (f, chain->symname);
10118 fputs ("\n", f);
10122 int arm_text_section_count = 1;
10124 char *
10125 aof_text_section ()
10127 static char buf[100];
10128 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
10129 arm_text_section_count++);
10130 if (flag_pic)
10131 strcat (buf, ", PIC, REENTRANT");
10132 return buf;
10135 static int arm_data_section_count = 1;
10137 char *
10138 aof_data_section ()
10140 static char buf[100];
10141 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
10142 return buf;
10145 /* The AOF assembler is religiously strict about declarations of
10146 imported and exported symbols, so that it is impossible to declare
10147 a function as imported near the beginning of the file, and then to
10148 export it later on. It is, however, possible to delay the decision
10149 until all the functions in the file have been compiled. To get
10150 around this, we maintain a list of the imports and exports, and
10151 delete from it any that are subsequently defined. At the end of
10152 compilation we spit the remainder of the list out before the END
10153 directive. */
10155 struct import
10157 struct import * next;
10158 char * name;
10161 static struct import * imports_list = NULL;
10163 void
10164 aof_add_import (name)
10165 char * name;
10167 struct import * new;
10169 for (new = imports_list; new; new = new->next)
10170 if (new->name == name)
10171 return;
10173 new = (struct import *) xmalloc (sizeof (struct import));
10174 new->next = imports_list;
10175 imports_list = new;
10176 new->name = name;
10179 void
10180 aof_delete_import (name)
10181 char * name;
10183 struct import ** old;
10185 for (old = &imports_list; *old; old = & (*old)->next)
10187 if ((*old)->name == name)
10189 *old = (*old)->next;
10190 return;
10195 int arm_main_function = 0;
10197 void
10198 aof_dump_imports (f)
10199 FILE * f;
10201 /* The AOF assembler needs this to cause the startup code to be extracted
10202 from the library. Brining in __main causes the whole thing to work
10203 automagically. */
10204 if (arm_main_function)
10206 text_section ();
10207 fputs ("\tIMPORT __main\n", f);
10208 fputs ("\tDCD __main\n", f);
10211 /* Now dump the remaining imports. */
10212 while (imports_list)
10214 fprintf (f, "\tIMPORT\t");
10215 assemble_name (f, imports_list->name);
10216 fputc ('\n', f);
10217 imports_list = imports_list->next;
10220 #endif /* AOF_ASSEMBLER */