Use new tail-calling mechanism on ARM.
[official-gcc.git] / gcc / config / arm / arm.c
blob206224d6a0a487f80aa836aeb01d1ba33e0b9e83
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "toplev.h"
42 #include "recog.h"
43 #include "ggc.h"
44 #include "except.h"
45 #include "tm_p.h"
47 /* Forward definitions of types. */
48 typedef struct minipool_node Mnode;
49 typedef struct minipool_fixup Mfix;
51 /* In order to improve the layout of the prototypes below
52 some short type abbreviations are defined here. */
53 #define Hint HOST_WIDE_INT
54 #define Mmode enum machine_mode
55 #define Ulong unsigned long
57 /* Forward function declarations. */
58 static void arm_add_gc_roots PARAMS ((void));
59 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
60 static int arm_naked_function_p PARAMS ((tree));
61 static Ulong bit_count PARAMS ((signed int));
62 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
63 static int eliminate_lr2ip PARAMS ((rtx *));
64 static rtx emit_multi_reg_push PARAMS ((int));
65 static rtx emit_sfm PARAMS ((int, int));
66 static char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
67 static arm_cc get_arm_condition_code PARAMS ((rtx));
68 static void init_fpa_table PARAMS ((void));
69 static Hint int_log2 PARAMS ((Hint));
70 static rtx is_jump_table PARAMS ((rtx));
71 static char * output_multi_immediate PARAMS ((rtx *, char *, char *, int, Hint));
72 static void print_multi_reg PARAMS ((FILE *, char *, int, int, int));
73 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
74 static char * shift_op PARAMS ((rtx, Hint *));
75 static void arm_init_machine_status PARAMS ((struct function *));
76 static void arm_mark_machine_status PARAMS ((struct function *));
77 static int number_of_first_bit_set PARAMS ((int));
78 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
79 static void thumb_exit PARAMS ((FILE *, int, rtx));
80 static void thumb_pushpop PARAMS ((FILE *, int, int));
81 static char * thumb_condition_code PARAMS ((rtx, int));
82 static rtx is_jump_table PARAMS ((rtx));
83 static Hint get_jump_table_size PARAMS ((rtx));
84 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
85 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
86 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
87 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
88 static void assign_minipool_offsets PARAMS ((Mfix *));
89 static void arm_print_value PARAMS ((FILE *, rtx));
90 static void dump_minipool PARAMS ((rtx));
91 static int arm_barrier_cost PARAMS ((rtx));
92 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
93 static void push_minipool_barrier PARAMS ((rtx, Hint));
94 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
95 static void note_invalid_constants PARAMS ((rtx, Hint));
97 #undef Hint
98 #undef Mmode
99 #undef Ulong
101 /* The maximum number of insns skipped which will be conditionalised if
102 possible. */
103 static int max_insns_skipped = 5;
105 extern FILE * asm_out_file;
107 /* True if we are currently building a constant table. */
108 int making_const_table;
110 /* Define the information needed to generate branch insns. This is
111 stored from the compare operation. */
112 rtx arm_compare_op0, arm_compare_op1;
114 /* What type of floating point are we tuning for? */
115 enum floating_point_type arm_fpu;
117 /* What type of floating point instructions are available? */
118 enum floating_point_type arm_fpu_arch;
120 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
121 enum prog_mode_type arm_prgmode;
123 /* Set by the -mfp=... option. */
124 const char * target_fp_name = NULL;
126 /* Used to parse -mstructure_size_boundary command line option. */
127 const char * structure_size_string = NULL;
128 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
130 /* Bit values used to identify processor capabilities. */
131 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
132 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
133 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
134 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
135 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
136 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
137 #define FL_THUMB (1 << 6) /* Thumb aware */
138 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
139 #define FL_STRONG (1 << 8) /* StrongARM */
141 /* The bits in this mask specify which instructions we are
142 allowed to generate. */
143 static int insn_flags = 0;
145 /* The bits in this mask specify which instruction scheduling options should
146 be used. Note - there is an overlap with the FL_FAST_MULT. For some
147 hardware we want to be able to generate the multiply instructions, but to
148 tune as if they were not present in the architecture. */
149 static int tune_flags = 0;
151 /* The following are used in the arm.md file as equivalents to bits
152 in the above two flag variables. */
154 /* Nonzero if this is an "M" variant of the processor. */
155 int arm_fast_multiply = 0;
157 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
158 int arm_arch4 = 0;
160 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
161 int arm_arch5 = 0;
163 /* Nonzero if this chip can benefit from load scheduling. */
164 int arm_ld_sched = 0;
166 /* Nonzero if this chip is a StrongARM. */
167 int arm_is_strong = 0;
169 /* Nonzero if this chip is a an ARM6 or an ARM7. */
170 int arm_is_6_or_7 = 0;
172 /* Nonzero if generating Thumb instructions. */
173 int thumb_code = 0;
175 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
176 must report the mode of the memory reference from PRINT_OPERAND to
177 PRINT_OPERAND_ADDRESS. */
178 enum machine_mode output_memory_reference_mode;
180 /* Nonzero if the prologue must setup `fp'. */
181 int current_function_anonymous_args;
183 /* The register number to be used for the PIC offset register. */
184 const char * arm_pic_register_string = NULL;
185 int arm_pic_register = 9;
187 /* Set to 1 when a return insn is output, this means that the epilogue
188 is not needed. */
189 int return_used_this_function;
191 /* Set to 1 after arm_reorg has started. Reset to start at the start of
192 the next function. */
193 static int after_arm_reorg = 0;
195 /* The maximum number of insns to be used when loading a constant. */
196 static int arm_constant_limit = 3;
198 /* For an explanation of these variables, see final_prescan_insn below. */
199 int arm_ccfsm_state;
200 enum arm_cond_code arm_current_cc;
201 rtx arm_target_insn;
202 int arm_target_label;
204 /* The condition codes of the ARM, and the inverse function. */
205 char * arm_condition_codes[] =
207 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
208 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
211 #define streq(string1, string2) (strcmp (string1, string2) == 0)
213 /* Initialization code. */
215 struct processors
217 char * name;
218 unsigned int flags;
221 /* Not all of these give usefully different compilation alternatives,
222 but there is no simple way of generalizing them. */
223 static struct processors all_cores[] =
225 /* ARM Cores */
227 {"arm2", FL_CO_PROC | FL_MODE26 },
228 {"arm250", FL_CO_PROC | FL_MODE26 },
229 {"arm3", FL_CO_PROC | FL_MODE26 },
230 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
231 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
232 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
233 {"arm610", FL_MODE26 | FL_MODE32 },
234 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
235 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
236 /* arm7m doesn't exist on its own, but only with D, (and I), but
237 those don't alter the code, so arm7m is sometimes used. */
238 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
239 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
241 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
242 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
243 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
244 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 {"arm710", FL_MODE26 | FL_MODE32 },
247 {"arm720", FL_MODE26 | FL_MODE32 },
248 {"arm710c", FL_MODE26 | FL_MODE32 },
249 {"arm7100", FL_MODE26 | FL_MODE32 },
250 {"arm7500", FL_MODE26 | FL_MODE32 },
251 /* Doesn't have an external co-proc, but does have embedded fpu. */
252 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
254 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
255 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
256 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
257 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
258 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
259 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
260 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
261 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
262 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
264 {NULL, 0}
267 static struct processors all_architectures[] =
269 /* ARM Architectures */
271 { "armv2", FL_CO_PROC | FL_MODE26 },
272 { "armv2a", FL_CO_PROC | FL_MODE26 },
273 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
274 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
275 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
276 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
277 implementations that support it, so we will leave it out for now. */
278 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
279 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
280 { NULL, 0 }
283 /* This is a magic stucture. The 'string' field is magically filled in
284 with a pointer to the value specified by the user on the command line
285 assuming that the user has specified such a value. */
287 struct arm_cpu_select arm_select[] =
289 /* string name processors */
290 { NULL, "-mcpu=", all_cores },
291 { NULL, "-march=", all_architectures },
292 { NULL, "-mtune=", all_cores }
295 /* Return the number of bits set in value' */
296 static unsigned long
297 bit_count (value)
298 signed int value;
300 unsigned long count = 0;
302 while (value)
304 value &= ~(value & - value);
305 ++ count;
308 return count;
311 /* Fix up any incompatible options that the user has specified.
312 This has now turned into a maze. */
313 void
314 arm_override_options ()
316 unsigned i;
318 /* Set up the flags based on the cpu/architecture selected by the user. */
319 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
321 struct arm_cpu_select * ptr = arm_select + i;
323 if (ptr->string != NULL && ptr->string[0] != '\0')
325 const struct processors * sel;
327 for (sel = ptr->processors; sel->name != NULL; sel ++)
328 if (streq (ptr->string, sel->name))
330 if (i == 2)
331 tune_flags = sel->flags;
332 else
334 /* If we have been given an architecture and a processor
335 make sure that they are compatible. We only generate
336 a warning though, and we prefer the CPU over the
337 architecture. */
338 if (insn_flags != 0 && (insn_flags ^ sel->flags))
339 warning ("switch -mcpu=%s conflicts with -march= switch",
340 ptr->string);
342 insn_flags = sel->flags;
345 break;
348 if (sel->name == NULL)
349 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
353 /* If the user did not specify a processor, choose one for them. */
354 if (insn_flags == 0)
356 struct processors * sel;
357 unsigned int sought;
358 static struct cpu_default
360 int cpu;
361 char * name;
363 cpu_defaults[] =
365 { TARGET_CPU_arm2, "arm2" },
366 { TARGET_CPU_arm6, "arm6" },
367 { TARGET_CPU_arm610, "arm610" },
368 { TARGET_CPU_arm710, "arm710" },
369 { TARGET_CPU_arm7m, "arm7m" },
370 { TARGET_CPU_arm7500fe, "arm7500fe" },
371 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
372 { TARGET_CPU_arm8, "arm8" },
373 { TARGET_CPU_arm810, "arm810" },
374 { TARGET_CPU_arm9, "arm9" },
375 { TARGET_CPU_strongarm, "strongarm" },
376 { TARGET_CPU_generic, "arm" },
377 { 0, 0 }
379 struct cpu_default * def;
381 /* Find the default. */
382 for (def = cpu_defaults; def->name; def ++)
383 if (def->cpu == TARGET_CPU_DEFAULT)
384 break;
386 /* Make sure we found the default CPU. */
387 if (def->name == NULL)
388 abort ();
390 /* Find the default CPU's flags. */
391 for (sel = all_cores; sel->name != NULL; sel ++)
392 if (streq (def->name, sel->name))
393 break;
395 if (sel->name == NULL)
396 abort ();
398 insn_flags = sel->flags;
400 /* Now check to see if the user has specified some command line
401 switch that require certain abilities from the cpu. */
402 sought = 0;
404 if (TARGET_INTERWORK || TARGET_THUMB)
406 sought |= (FL_THUMB | FL_MODE32);
408 /* Force apcs-32 to be used for interworking. */
409 target_flags |= ARM_FLAG_APCS_32;
411 /* There are no ARM processors that support both APCS-26 and
412 interworking. Therefore we force FL_MODE26 to be removed
413 from insn_flags here (if it was set), so that the search
414 below will always be able to find a compatible processor. */
415 insn_flags &= ~ FL_MODE26;
417 else if (! TARGET_APCS_32)
418 sought |= FL_MODE26;
420 if (sought != 0 && ((sought & insn_flags) != sought))
422 /* Try to locate a CPU type that supports all of the abilities
423 of the default CPU, plus the extra abilities requested by
424 the user. */
425 for (sel = all_cores; sel->name != NULL; sel ++)
426 if ((sel->flags & sought) == (sought | insn_flags))
427 break;
429 if (sel->name == NULL)
431 unsigned int current_bit_count = 0;
432 struct processors * best_fit = NULL;
434 /* Ideally we would like to issue an error message here
435 saying that it was not possible to find a CPU compatible
436 with the default CPU, but which also supports the command
437 line options specified by the programmer, and so they
438 ought to use the -mcpu=<name> command line option to
439 override the default CPU type.
441 Unfortunately this does not work with multilibing. We
442 need to be able to support multilibs for -mapcs-26 and for
443 -mthumb-interwork and there is no CPU that can support both
444 options. Instead if we cannot find a cpu that has both the
445 characteristics of the default cpu and the given command line
446 options we scan the array again looking for a best match. */
447 for (sel = all_cores; sel->name != NULL; sel ++)
448 if ((sel->flags & sought) == sought)
450 unsigned int count;
452 count = bit_count (sel->flags & insn_flags);
454 if (count >= current_bit_count)
456 best_fit = sel;
457 current_bit_count = count;
461 if (best_fit == NULL)
462 abort ();
463 else
464 sel = best_fit;
467 insn_flags = sel->flags;
471 /* If tuning has not been specified, tune for whichever processor or
472 architecture has been selected. */
473 if (tune_flags == 0)
474 tune_flags = insn_flags;
476 /* Make sure that the processor choice does not conflict with any of the
477 other command line choices. */
478 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
480 /* If APCS-32 was not the default then it must have been set by the
481 user, so issue a warning message. If the user has specified
482 "-mapcs-32 -mcpu=arm2" then we loose here. */
483 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
484 warning ("target CPU does not support APCS-32" );
485 target_flags &= ~ ARM_FLAG_APCS_32;
487 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
489 warning ("target CPU does not support APCS-26" );
490 target_flags |= ARM_FLAG_APCS_32;
493 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
495 warning ("target CPU does not support interworking" );
496 target_flags &= ~ARM_FLAG_INTERWORK;
499 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
501 warning ("target CPU does not supoport THUMB instructions.");
502 target_flags &= ~ARM_FLAG_THUMB;
505 if (TARGET_APCS_FRAME && TARGET_THUMB)
507 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
508 target_flags &= ~ARM_FLAG_APCS_FRAME;
511 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
512 from here where no function is being compiled currently. */
513 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
514 && TARGET_ARM)
515 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
517 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
518 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
520 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
521 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
523 /* If interworking is enabled then APCS-32 must be selected as well. */
524 if (TARGET_INTERWORK)
526 if (! TARGET_APCS_32)
527 warning ("interworking forces APCS-32 to be used" );
528 target_flags |= ARM_FLAG_APCS_32;
531 if (TARGET_APCS_STACK && ! TARGET_APCS_FRAME)
533 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
534 target_flags |= ARM_FLAG_APCS_FRAME;
537 if (TARGET_POKE_FUNCTION_NAME)
538 target_flags |= ARM_FLAG_APCS_FRAME;
540 if (TARGET_APCS_REENT && flag_pic)
541 fatal ("-fpic and -mapcs-reent are incompatible");
543 if (TARGET_APCS_REENT)
544 warning ("APCS reentrant code not supported. Ignored");
546 /* If this target is normally configured to use APCS frames, warn if they
547 are turned off and debugging is turned on. */
548 if (TARGET_ARM
549 && write_symbols != NO_DEBUG
550 && ! TARGET_APCS_FRAME
551 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
552 warning ("-g with -mno-apcs-frame may not give sensible debugging");
554 /* If stack checking is disabled, we can use r10 as the PIC register,
555 which keeps r9 available. */
556 if (flag_pic && ! TARGET_APCS_STACK)
557 arm_pic_register = 10;
559 if (TARGET_APCS_FLOAT)
560 warning ("Passing floating point arguments in fp regs not yet supported");
562 /* Initialise boolean versions of the flags, for use in the arm.md file. */
563 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
564 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
565 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
567 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
568 arm_is_strong = (tune_flags & FL_STRONG) != 0;
569 thumb_code = (TARGET_ARM == 0);
570 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
571 && !(tune_flags & FL_ARCH4))) != 0;
573 /* Default value for floating point code... if no co-processor
574 bus, then schedule for emulated floating point. Otherwise,
575 assume the user has an FPA.
576 Note: this does not prevent use of floating point instructions,
577 -msoft-float does that. */
578 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
580 if (target_fp_name)
582 if (streq (target_fp_name, "2"))
583 arm_fpu_arch = FP_SOFT2;
584 else if (streq (target_fp_name, "3"))
585 arm_fpu_arch = FP_SOFT3;
586 else
587 fatal ("Invalid floating point emulation option: -mfpe-%s",
588 target_fp_name);
590 else
591 arm_fpu_arch = FP_DEFAULT;
593 if (TARGET_FPE && arm_fpu != FP_HARD)
594 arm_fpu = FP_SOFT2;
596 /* For arm2/3 there is no need to do any scheduling if there is only
597 a floating point emulator, or we are doing software floating-point. */
598 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
599 && (tune_flags & FL_MODE32) == 0)
600 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
602 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
604 if (structure_size_string != NULL)
606 int size = strtol (structure_size_string, NULL, 0);
608 if (size == 8 || size == 32)
609 arm_structure_size_boundary = size;
610 else
611 warning ("Structure size boundary can only be set to 8 or 32");
614 if (arm_pic_register_string != NULL)
616 int pic_register;
618 if (! flag_pic)
619 warning ("-mpic-register= is useless without -fpic");
621 pic_register = decode_reg_name (arm_pic_register_string);
623 /* Prevent the user from choosing an obviously stupid PIC register. */
624 if (pic_register < 0 || call_used_regs[pic_register]
625 || pic_register == HARD_FRAME_POINTER_REGNUM
626 || pic_register == STACK_POINTER_REGNUM
627 || pic_register >= PC_REGNUM)
628 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
629 else
630 arm_pic_register = pic_register;
633 if (TARGET_THUMB && flag_schedule_insns)
635 /* Don't warn since it's on by default in -O2. */
636 flag_schedule_insns = 0;
639 /* If optimizing for space, don't synthesize constants.
640 For processors with load scheduling, it never costs more than 2 cycles
641 to load a constant, and the load scheduler may well reduce that to 1. */
642 if (optimize_size || (tune_flags & FL_LDSCHED))
643 arm_constant_limit = 1;
645 /* If optimizing for size, bump the number of instructions that we
646 are prepared to conditionally execute (even on a StrongARM).
647 Otherwise for the StrongARM, which has early execution of branches,
648 a sequence that is worth skipping is shorter. */
649 if (optimize_size)
650 max_insns_skipped = 6;
651 else if (arm_is_strong)
652 max_insns_skipped = 3;
654 /* Register global variables with the garbage collector. */
655 arm_add_gc_roots ();
658 static void
659 arm_add_gc_roots ()
661 ggc_add_rtx_root (&arm_compare_op0, 1);
662 ggc_add_rtx_root (&arm_compare_op1, 1);
663 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
664 /* XXX: What about the minipool tables? */
667 /* Return 1 if it is possible to return using a single instruction. */
669 use_return_insn (iscond)
670 int iscond;
672 int regno;
674 /* Never use a return instruction before reload has run. */
675 if (! reload_completed
676 /* Or if the function is variadic. */
677 || current_function_pretend_args_size
678 || current_function_anonymous_args
679 /* Of if the function calls __builtin_eh_return () */
680 || cfun->machine->eh_epilogue_sp_ofs != NULL
681 /* Or if there is no frame pointer and there is a stack adjustment. */
682 || ((get_frame_size () + current_function_outgoing_args_size != 0)
683 && ! frame_pointer_needed))
684 return 0;
686 /* Can't be done if interworking with Thumb, and any registers have been
687 stacked. Similarly, on StrongARM, conditional returns are expensive
688 if they aren't taken and registers have been stacked. */
689 if (iscond && arm_is_strong && frame_pointer_needed)
690 return 0;
692 if ((iscond && arm_is_strong)
693 || TARGET_INTERWORK)
695 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
696 if (regs_ever_live[regno] && ! call_used_regs[regno])
697 return 0;
699 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
700 return 0;
703 /* Can't be done if any of the FPU regs are pushed, since this also
704 requires an insn. */
705 if (TARGET_HARD_FLOAT)
706 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
707 if (regs_ever_live[regno] && ! call_used_regs[regno])
708 return 0;
710 /* If a function is naked, don't use the "return" insn. */
711 if (arm_naked_function_p (current_function_decl))
712 return 0;
714 return 1;
717 /* Return TRUE if int I is a valid immediate ARM constant. */
720 const_ok_for_arm (i)
721 HOST_WIDE_INT i;
723 unsigned HOST_WIDE_INT mask = ~ HOST_UINT (0xFF);
725 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
726 be all zero, or all one. */
727 if ((i & ~ HOST_UINT (0xffffffff)) != 0
728 && ((i & ~ HOST_UINT (0xffffffff))
729 != ((~ HOST_UINT (0))
730 & ~ HOST_UINT (0xffffffff))))
731 return FALSE;
733 /* Fast return for 0 and powers of 2 */
734 if ((i & (i - 1)) == 0)
735 return TRUE;
739 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
740 return TRUE;
741 mask =
742 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
743 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
744 } while (mask != ~ HOST_UINT (0xFF));
746 return FALSE;
749 /* Return true if I is a valid constant for the operation CODE. */
750 static int
751 const_ok_for_op (i, code)
752 HOST_WIDE_INT i;
753 enum rtx_code code;
755 if (const_ok_for_arm (i))
756 return 1;
758 switch (code)
760 case PLUS:
761 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
763 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
764 case XOR:
765 case IOR:
766 return 0;
768 case AND:
769 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
771 default:
772 abort ();
776 /* Emit a sequence of insns to handle a large constant.
777 CODE is the code of the operation required, it can be any of SET, PLUS,
778 IOR, AND, XOR, MINUS;
779 MODE is the mode in which the operation is being performed;
780 VAL is the integer to operate on;
781 SOURCE is the other operand (a register, or a null-pointer for SET);
782 SUBTARGETS means it is safe to create scratch registers if that will
783 either produce a simpler sequence, or we will want to cse the values.
784 Return value is the number of insns emitted. */
787 arm_split_constant (code, mode, val, target, source, subtargets)
788 enum rtx_code code;
789 enum machine_mode mode;
790 HOST_WIDE_INT val;
791 rtx target;
792 rtx source;
793 int subtargets;
795 if (subtargets || code == SET
796 || (GET_CODE (target) == REG && GET_CODE (source) == REG
797 && REGNO (target) != REGNO (source)))
799 /* After arm_reorg has been called, we can't fix up expensive
800 constants by pushing them into memory so we must synthesise
801 them in-line, regardless of the cost. This is only likely to
802 be more costly on chips that have load delay slots and we are
803 compiling without running the scheduler (so no splitting
804 occurred before the final instruction emission).
806 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
808 if (! after_arm_reorg
809 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
810 > arm_constant_limit + (code != SET)))
812 if (code == SET)
814 /* Currently SET is the only monadic value for CODE, all
815 the rest are diadic. */
816 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
817 return 1;
819 else
821 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
823 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
824 /* For MINUS, the value is subtracted from, since we never
825 have subtraction of a constant. */
826 if (code == MINUS)
827 emit_insn (gen_rtx_SET (VOIDmode, target,
828 gen_rtx_MINUS (mode, temp, source)));
829 else
830 emit_insn (gen_rtx_SET (VOIDmode, target,
831 gen_rtx (code, mode, source, temp)));
832 return 2;
837 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
840 /* As above, but extra parameter GENERATE which, if clear, suppresses
841 RTL generation. */
842 static int
843 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
844 enum rtx_code code;
845 enum machine_mode mode;
846 HOST_WIDE_INT val;
847 rtx target;
848 rtx source;
849 int subtargets;
850 int generate;
852 int can_invert = 0;
853 int can_negate = 0;
854 int can_negate_initial = 0;
855 int can_shift = 0;
856 int i;
857 int num_bits_set = 0;
858 int set_sign_bit_copies = 0;
859 int clear_sign_bit_copies = 0;
860 int clear_zero_bit_copies = 0;
861 int set_zero_bit_copies = 0;
862 int insns = 0;
863 unsigned HOST_WIDE_INT temp1, temp2;
864 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
866 /* Find out which operations are safe for a given CODE. Also do a quick
867 check for degenerate cases; these can occur when DImode operations
868 are split. */
869 switch (code)
871 case SET:
872 can_invert = 1;
873 can_shift = 1;
874 can_negate = 1;
875 break;
877 case PLUS:
878 can_negate = 1;
879 can_negate_initial = 1;
880 break;
882 case IOR:
883 if (remainder == HOST_UINT (0xffffffff))
885 if (generate)
886 emit_insn (gen_rtx_SET (VOIDmode, target,
887 GEN_INT (ARM_SIGN_EXTEND (val))));
888 return 1;
890 if (remainder == 0)
892 if (reload_completed && rtx_equal_p (target, source))
893 return 0;
894 if (generate)
895 emit_insn (gen_rtx_SET (VOIDmode, target, source));
896 return 1;
898 break;
900 case AND:
901 if (remainder == 0)
903 if (generate)
904 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
905 return 1;
907 if (remainder == HOST_UINT (0xffffffff))
909 if (reload_completed && rtx_equal_p (target, source))
910 return 0;
911 if (generate)
912 emit_insn (gen_rtx_SET (VOIDmode, target, source));
913 return 1;
915 can_invert = 1;
916 break;
918 case XOR:
919 if (remainder == 0)
921 if (reload_completed && rtx_equal_p (target, source))
922 return 0;
923 if (generate)
924 emit_insn (gen_rtx_SET (VOIDmode, target, source));
925 return 1;
927 if (remainder == HOST_UINT (0xffffffff))
929 if (generate)
930 emit_insn (gen_rtx_SET (VOIDmode, target,
931 gen_rtx_NOT (mode, source)));
932 return 1;
935 /* We don't know how to handle this yet below. */
936 abort ();
938 case MINUS:
939 /* We treat MINUS as (val - source), since (source - val) is always
940 passed as (source + (-val)). */
941 if (remainder == 0)
943 if (generate)
944 emit_insn (gen_rtx_SET (VOIDmode, target,
945 gen_rtx_NEG (mode, source)));
946 return 1;
948 if (const_ok_for_arm (val))
950 if (generate)
951 emit_insn (gen_rtx_SET (VOIDmode, target,
952 gen_rtx_MINUS (mode, GEN_INT (val),
953 source)));
954 return 1;
956 can_negate = 1;
958 break;
960 default:
961 abort ();
964 /* If we can do it in one insn get out quickly. */
965 if (const_ok_for_arm (val)
966 || (can_negate_initial && const_ok_for_arm (-val))
967 || (can_invert && const_ok_for_arm (~val)))
969 if (generate)
970 emit_insn (gen_rtx_SET (VOIDmode, target,
971 (source ? gen_rtx (code, mode, source,
972 GEN_INT (val))
973 : GEN_INT (val))));
974 return 1;
977 /* Calculate a few attributes that may be useful for specific
978 optimizations. */
979 for (i = 31; i >= 0; i--)
981 if ((remainder & (1 << i)) == 0)
982 clear_sign_bit_copies++;
983 else
984 break;
987 for (i = 31; i >= 0; i--)
989 if ((remainder & (1 << i)) != 0)
990 set_sign_bit_copies++;
991 else
992 break;
995 for (i = 0; i <= 31; i++)
997 if ((remainder & (1 << i)) == 0)
998 clear_zero_bit_copies++;
999 else
1000 break;
1003 for (i = 0; i <= 31; i++)
1005 if ((remainder & (1 << i)) != 0)
1006 set_zero_bit_copies++;
1007 else
1008 break;
1011 switch (code)
1013 case SET:
1014 /* See if we can do this by sign_extending a constant that is known
1015 to be negative. This is a good, way of doing it, since the shift
1016 may well merge into a subsequent insn. */
1017 if (set_sign_bit_copies > 1)
1019 if (const_ok_for_arm
1020 (temp1 = ARM_SIGN_EXTEND (remainder
1021 << (set_sign_bit_copies - 1))))
1023 if (generate)
1025 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1026 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1027 GEN_INT (temp1)));
1028 emit_insn (gen_ashrsi3 (target, new_src,
1029 GEN_INT (set_sign_bit_copies - 1)));
1031 return 2;
1033 /* For an inverted constant, we will need to set the low bits,
1034 these will be shifted out of harm's way. */
1035 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1036 if (const_ok_for_arm (~temp1))
1038 if (generate)
1040 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1041 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1042 GEN_INT (temp1)));
1043 emit_insn (gen_ashrsi3 (target, new_src,
1044 GEN_INT (set_sign_bit_copies - 1)));
1046 return 2;
1050 /* See if we can generate this by setting the bottom (or the top)
1051 16 bits, and then shifting these into the other half of the
1052 word. We only look for the simplest cases, to do more would cost
1053 too much. Be careful, however, not to generate this when the
1054 alternative would take fewer insns. */
1055 if (val & HOST_UINT (0xffff0000))
1057 temp1 = remainder & HOST_UINT (0xffff0000);
1058 temp2 = remainder & 0x0000ffff;
1060 /* Overlaps outside this range are best done using other methods. */
1061 for (i = 9; i < 24; i++)
1063 if ((((temp2 | (temp2 << i))
1064 & HOST_UINT (0xffffffff)) == remainder)
1065 && ! const_ok_for_arm (temp2))
1067 rtx new_src = (subtargets
1068 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1069 : target);
1070 insns = arm_gen_constant (code, mode, temp2, new_src,
1071 source, subtargets, generate);
1072 source = new_src;
1073 if (generate)
1074 emit_insn (gen_rtx_SET
1075 (VOIDmode, target,
1076 gen_rtx_IOR (mode,
1077 gen_rtx_ASHIFT (mode, source,
1078 GEN_INT (i)),
1079 source)));
1080 return insns + 1;
1084 /* Don't duplicate cases already considered. */
1085 for (i = 17; i < 24; i++)
1087 if (((temp1 | (temp1 >> i)) == remainder)
1088 && ! const_ok_for_arm (temp1))
1090 rtx new_src = (subtargets
1091 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1092 : target);
1093 insns = arm_gen_constant (code, mode, temp1, new_src,
1094 source, subtargets, generate);
1095 source = new_src;
1096 if (generate)
1097 emit_insn
1098 (gen_rtx_SET (VOIDmode, target,
1099 gen_rtx_IOR
1100 (mode,
1101 gen_rtx_LSHIFTRT (mode, source,
1102 GEN_INT (i)),
1103 source)));
1104 return insns + 1;
1108 break;
1110 case IOR:
1111 case XOR:
1112 /* If we have IOR or XOR, and the constant can be loaded in a
1113 single instruction, and we can find a temporary to put it in,
1114 then this can be done in two instructions instead of 3-4. */
1115 if (subtargets
1116 /* TARGET can't be NULL if SUBTARGETS is 0 */
1117 || (reload_completed && ! reg_mentioned_p (target, source)))
1119 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1121 if (generate)
1123 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1125 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1126 emit_insn (gen_rtx_SET (VOIDmode, target,
1127 gen_rtx (code, mode, source, sub)));
1129 return 2;
1133 if (code == XOR)
1134 break;
1136 if (set_sign_bit_copies > 8
1137 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1139 if (generate)
1141 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1142 rtx shift = GEN_INT (set_sign_bit_copies);
1144 emit_insn (gen_rtx_SET (VOIDmode, sub,
1145 gen_rtx_NOT (mode,
1146 gen_rtx_ASHIFT (mode,
1147 source,
1148 shift))));
1149 emit_insn (gen_rtx_SET (VOIDmode, target,
1150 gen_rtx_NOT (mode,
1151 gen_rtx_LSHIFTRT (mode, sub,
1152 shift))));
1154 return 2;
1157 if (set_zero_bit_copies > 8
1158 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1160 if (generate)
1162 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1163 rtx shift = GEN_INT (set_zero_bit_copies);
1165 emit_insn (gen_rtx_SET (VOIDmode, sub,
1166 gen_rtx_NOT (mode,
1167 gen_rtx_LSHIFTRT (mode,
1168 source,
1169 shift))));
1170 emit_insn (gen_rtx_SET (VOIDmode, target,
1171 gen_rtx_NOT (mode,
1172 gen_rtx_ASHIFT (mode, sub,
1173 shift))));
1175 return 2;
1178 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1180 if (generate)
1182 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1183 emit_insn (gen_rtx_SET (VOIDmode, sub,
1184 gen_rtx_NOT (mode, source)));
1185 source = sub;
1186 if (subtargets)
1187 sub = gen_reg_rtx (mode);
1188 emit_insn (gen_rtx_SET (VOIDmode, sub,
1189 gen_rtx_AND (mode, source,
1190 GEN_INT (temp1))));
1191 emit_insn (gen_rtx_SET (VOIDmode, target,
1192 gen_rtx_NOT (mode, sub)));
1194 return 3;
1196 break;
1198 case AND:
1199 /* See if two shifts will do 2 or more insn's worth of work. */
1200 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1202 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
1203 << (32 - clear_sign_bit_copies))
1204 & HOST_UINT (0xffffffff));
1206 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1208 if (generate)
1210 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1211 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1212 new_src, source, subtargets, 1);
1213 source = new_src;
1215 else
1217 rtx targ = subtargets ? NULL_RTX : target;
1218 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1219 targ, source, subtargets, 0);
1223 if (generate)
1225 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1226 rtx shift = GEN_INT (clear_sign_bit_copies);
1228 emit_insn (gen_ashlsi3 (new_src, source, shift));
1229 emit_insn (gen_lshrsi3 (target, new_src, shift));
1232 return insns + 2;
1235 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1237 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1239 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1241 if (generate)
1243 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1245 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1246 new_src, source, subtargets, 1);
1247 source = new_src;
1249 else
1251 rtx targ = subtargets ? NULL_RTX : target;
1253 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1254 targ, source, subtargets, 0);
1258 if (generate)
1260 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1261 rtx shift = GEN_INT (clear_zero_bit_copies);
1263 emit_insn (gen_lshrsi3 (new_src, source, shift));
1264 emit_insn (gen_ashlsi3 (target, new_src, shift));
1267 return insns + 2;
1270 break;
1272 default:
1273 break;
1276 for (i = 0; i < 32; i++)
1277 if (remainder & (1 << i))
1278 num_bits_set++;
1280 if (code == AND || (can_invert && num_bits_set > 16))
1281 remainder = (~remainder) & HOST_UINT (0xffffffff);
1282 else if (code == PLUS && num_bits_set > 16)
1283 remainder = (-remainder) & HOST_UINT (0xffffffff);
1284 else
1286 can_invert = 0;
1287 can_negate = 0;
1290 /* Now try and find a way of doing the job in either two or three
1291 instructions.
1292 We start by looking for the largest block of zeros that are aligned on
1293 a 2-bit boundary, we then fill up the temps, wrapping around to the
1294 top of the word when we drop off the bottom.
1295 In the worst case this code should produce no more than four insns. */
1297 int best_start = 0;
1298 int best_consecutive_zeros = 0;
1300 for (i = 0; i < 32; i += 2)
1302 int consecutive_zeros = 0;
1304 if (! (remainder & (3 << i)))
1306 while ((i < 32) && ! (remainder & (3 << i)))
1308 consecutive_zeros += 2;
1309 i += 2;
1311 if (consecutive_zeros > best_consecutive_zeros)
1313 best_consecutive_zeros = consecutive_zeros;
1314 best_start = i - consecutive_zeros;
1316 i -= 2;
1320 /* Now start emitting the insns, starting with the one with the highest
1321 bit set: we do this so that the smallest number will be emitted last;
1322 this is more likely to be combinable with addressing insns. */
1323 i = best_start;
1326 int end;
1328 if (i <= 0)
1329 i += 32;
1330 if (remainder & (3 << (i - 2)))
1332 end = i - 8;
1333 if (end < 0)
1334 end += 32;
1335 temp1 = remainder & ((0x0ff << end)
1336 | ((i < end) ? (0xff >> (32 - end)) : 0));
1337 remainder &= ~temp1;
1339 if (generate)
1341 rtx new_src;
1343 if (code == SET)
1344 emit_insn (gen_rtx_SET (VOIDmode,
1345 new_src = (subtargets
1346 ? gen_reg_rtx (mode)
1347 : target),
1348 GEN_INT (can_invert
1349 ? ~temp1 : temp1)));
1350 else if (code == MINUS)
1351 emit_insn (gen_rtx_SET (VOIDmode,
1352 new_src = (subtargets
1353 ? gen_reg_rtx (mode)
1354 : target),
1355 gen_rtx (code, mode, GEN_INT (temp1),
1356 source)));
1357 else
1358 emit_insn (gen_rtx_SET (VOIDmode,
1359 new_src = (remainder
1360 ? (subtargets
1361 ? gen_reg_rtx (mode)
1362 : target)
1363 : target),
1364 gen_rtx (code, mode, source,
1365 GEN_INT (can_invert ? ~temp1
1366 : (can_negate
1367 ? -temp1
1368 : temp1)))));
1369 source = new_src;
1372 if (code == SET)
1374 can_invert = 0;
1375 code = PLUS;
1377 else if (code == MINUS)
1378 code = PLUS;
1380 insns++;
1381 i -= 6;
1383 i -= 2;
1384 } while (remainder);
1386 return insns;
1389 /* Canonicalize a comparison so that we are more likely to recognize it.
1390 This can be done for a few constant compares, where we can make the
1391 immediate value easier to load. */
1392 enum rtx_code
1393 arm_canonicalize_comparison (code, op1)
1394 enum rtx_code code;
1395 rtx * op1;
1397 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1399 switch (code)
1401 case EQ:
1402 case NE:
1403 return code;
1405 case GT:
1406 case LE:
1407 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
1408 - 1)
1409 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1411 *op1 = GEN_INT (i+1);
1412 return code == GT ? GE : LT;
1414 break;
1416 case GE:
1417 case LT:
1418 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
1419 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1421 *op1 = GEN_INT (i-1);
1422 return code == GE ? GT : LE;
1424 break;
1426 case GTU:
1427 case LEU:
1428 if (i != ~ (HOST_UINT (0))
1429 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1431 *op1 = GEN_INT (i + 1);
1432 return code == GTU ? GEU : LTU;
1434 break;
1436 case GEU:
1437 case LTU:
1438 if (i != 0
1439 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1441 *op1 = GEN_INT (i - 1);
1442 return code == GEU ? GTU : LEU;
1444 break;
1446 default:
1447 abort ();
1450 return code;
1453 /* Decide whether a type should be returned in memory (true)
1454 or in a register (false). This is called by the macro
1455 RETURN_IN_MEMORY. */
1457 arm_return_in_memory (type)
1458 tree type;
1460 if (! AGGREGATE_TYPE_P (type))
1461 /* All simple types are returned in registers. */
1462 return 0;
1464 /* For the arm-wince targets we choose to be compitable with Microsoft's
1465 ARM and Thumb compilers, which always return aggregates in memory. */
1466 #ifndef ARM_WINCE
1468 if (int_size_in_bytes (type) > 4)
1469 /* All structures/unions bigger than one word are returned in memory. */
1470 return 1;
1472 if (TREE_CODE (type) == RECORD_TYPE)
1474 tree field;
1476 /* For a struct the APCS says that we only return in a register
1477 if the type is 'integer like' and every addressable element
1478 has an offset of zero. For practical purposes this means
1479 that the structure can have at most one non bit-field element
1480 and that this element must be the first one in the structure. */
1482 /* Find the first field, ignoring non FIELD_DECL things which will
1483 have been created by C++. */
1484 for (field = TYPE_FIELDS (type);
1485 field && TREE_CODE (field) != FIELD_DECL;
1486 field = TREE_CHAIN (field))
1487 continue;
1489 if (field == NULL)
1490 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1492 /* Check that the first field is valid for returning in a register. */
1494 /* ... Floats are not allowed */
1495 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1496 return 1;
1498 /* ... Aggregates that are not themselves valid for returning in
1499 a register are not allowed. */
1500 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1501 return 1;
1503 /* Now check the remaining fields, if any. Only bitfields are allowed,
1504 since they are not addressable. */
1505 for (field = TREE_CHAIN (field);
1506 field;
1507 field = TREE_CHAIN (field))
1509 if (TREE_CODE (field) != FIELD_DECL)
1510 continue;
1512 if (! DECL_BIT_FIELD_TYPE (field))
1513 return 1;
1516 return 0;
1519 if (TREE_CODE (type) == UNION_TYPE)
1521 tree field;
1523 /* Unions can be returned in registers if every element is
1524 integral, or can be returned in an integer register. */
1525 for (field = TYPE_FIELDS (type);
1526 field;
1527 field = TREE_CHAIN (field))
1529 if (TREE_CODE (field) != FIELD_DECL)
1530 continue;
1532 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1533 return 1;
1535 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1536 return 1;
1539 return 0;
1541 #endif /* not ARM_WINCE */
1543 /* Return all other types in memory. */
1544 return 1;
1547 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1548 for a call to a function whose data type is FNTYPE.
1549 For a library call, FNTYPE is NULL. */
1550 void
1551 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1552 CUMULATIVE_ARGS * pcum;
1553 tree fntype;
1554 rtx libname ATTRIBUTE_UNUSED;
1555 int indirect ATTRIBUTE_UNUSED;
1557 /* On the ARM, the offset starts at 0. */
1558 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1560 pcum->call_cookie = CALL_NORMAL;
1562 if (TARGET_LONG_CALLS)
1563 pcum->call_cookie = CALL_LONG;
1565 /* Check for long call/short call attributes. The attributes
1566 override any command line option. */
1567 if (fntype)
1569 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1570 pcum->call_cookie = CALL_SHORT;
1571 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1572 pcum->call_cookie = CALL_LONG;
1576 /* Determine where to put an argument to a function.
1577 Value is zero to push the argument on the stack,
1578 or a hard register in which to store the argument.
1580 MODE is the argument's machine mode.
1581 TYPE is the data type of the argument (as a tree).
1582 This is null for libcalls where that information may
1583 not be available.
1584 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1585 the preceding args and about the function being called.
1586 NAMED is nonzero if this argument is a named parameter
1587 (otherwise it is an extra parameter matching an ellipsis). */
1589 arm_function_arg (pcum, mode, type, named)
1590 CUMULATIVE_ARGS * pcum;
1591 enum machine_mode mode;
1592 tree type ATTRIBUTE_UNUSED;
1593 int named;
1595 if (mode == VOIDmode)
1596 /* Compute operand 2 of the call insn. */
1597 return GEN_INT (pcum->call_cookie);
1599 if (! named || pcum->nregs >= NUM_ARG_REGS)
1600 return NULL_RTX;
1602 return gen_rtx_REG (mode, pcum->nregs);
1605 /* Encode the current state of the #pragma [no_]long_calls. */
1606 typedef enum
1608 OFF, /* No #pramgma [no_]long_calls is in effect. */
1609 LONG, /* #pragma long_calls is in effect. */
1610 SHORT /* #pragma no_long_calls is in effect. */
1611 } arm_pragma_enum;
1613 static arm_pragma_enum arm_pragma_long_calls = OFF;
1615 /* Handle pragmas for compatibility with Intel's compilers.
1616 FIXME: This is incomplete, since it does not handle all
1617 the pragmas that the Intel compilers understand. */
1619 arm_process_pragma (p_getc, p_ungetc, pname)
1620 int (* p_getc) PARAMS ((void)) ATTRIBUTE_UNUSED;
1621 void (* p_ungetc) PARAMS ((int)) ATTRIBUTE_UNUSED;
1622 char * pname;
1624 /* Should be pragma 'far' or equivalent for callx/balx here. */
1625 if (strcmp (pname, "long_calls") == 0)
1626 arm_pragma_long_calls = LONG;
1627 else if (strcmp (pname, "no_long_calls") == 0)
1628 arm_pragma_long_calls = SHORT;
1629 else if (strcmp (pname, "long_calls_off") == 0)
1630 arm_pragma_long_calls = OFF;
1631 else
1632 return 0;
1634 return 1;
1637 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1638 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1639 assigned to TYPE. */
1641 arm_valid_type_attribute_p (type, attributes, identifier, args)
1642 tree type;
1643 tree attributes ATTRIBUTE_UNUSED;
1644 tree identifier;
1645 tree args;
1647 if ( TREE_CODE (type) != FUNCTION_TYPE
1648 && TREE_CODE (type) != METHOD_TYPE
1649 && TREE_CODE (type) != FIELD_DECL
1650 && TREE_CODE (type) != TYPE_DECL)
1651 return 0;
1653 /* Function calls made to this symbol must be done indirectly, because
1654 it may lie outside of the 26 bit addressing range of a normal function
1655 call. */
1656 if (is_attribute_p ("long_call", identifier))
1657 return (args == NULL_TREE);
1659 /* Whereas these functions are always known to reside within the 26 bit
1660 addressing range. */
1661 if (is_attribute_p ("short_call", identifier))
1662 return (args == NULL_TREE);
1664 return 0;
1667 /* Return 0 if the attributes for two types are incompatible, 1 if they
1668 are compatible, and 2 if they are nearly compatible (which causes a
1669 warning to be generated). */
1671 arm_comp_type_attributes (type1, type2)
1672 tree type1;
1673 tree type2;
1675 int l1, l2, s1, s2;
1677 /* Check for mismatch of non-default calling convention. */
1678 if (TREE_CODE (type1) != FUNCTION_TYPE)
1679 return 1;
1681 /* Check for mismatched call attributes. */
1682 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1683 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1684 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1685 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
1687 /* Only bother to check if an attribute is defined. */
1688 if (l1 | l2 | s1 | s2)
1690 /* If one type has an attribute, the other must have the same attribute. */
1691 if ((l1 != l2) || (s1 != s2))
1692 return 0;
1694 /* Disallow mixed attributes. */
1695 if ((l1 & s2) || (l2 & s1))
1696 return 0;
1699 return 1;
1702 /* Encode long_call or short_call attribute by prefixing
1703 symbol name in DECL with a special character FLAG. */
1704 void
1705 arm_encode_call_attribute (decl, flag)
1706 tree decl;
1707 char flag;
1709 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1710 int len = strlen (str);
1711 char * newstr;
1713 if (TREE_CODE (decl) != FUNCTION_DECL)
1714 return;
1716 /* Do not allow weak functions to be treated as short call. */
1717 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1718 return;
1720 if (ggc_p)
1721 newstr = ggc_alloc_string (NULL, len + 2);
1722 else
1723 newstr = permalloc (len + 2);
1725 sprintf (newstr, "%c%s", flag, str);
1727 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1730 /* Assigns default attributes to newly defined type. This is used to
1731 set short_call/long_call attributes for function types of
1732 functions defined inside corresponding #pragma scopes. */
1733 void
1734 arm_set_default_type_attributes (type)
1735 tree type;
1737 /* Add __attribute__ ((long_call)) to all functions, when
1738 inside #pragma long_calls or __attribute__ ((short_call)),
1739 when inside #pragma no_long_calls. */
1740 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1742 tree type_attr_list, attr_name;
1743 type_attr_list = TYPE_ATTRIBUTES (type);
1745 if (arm_pragma_long_calls == LONG)
1746 attr_name = get_identifier ("long_call");
1747 else if (arm_pragma_long_calls == SHORT)
1748 attr_name = get_identifier ("short_call");
1749 else
1750 return;
1752 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1753 TYPE_ATTRIBUTES (type) = type_attr_list;
1757 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
1758 defined within the current compilation unit. If this caanot be
1759 determined, then 0 is returned. */
1760 static int
1761 current_file_function_operand (sym_ref)
1762 rtx sym_ref;
1764 /* This is a bit of a fib. A function will have a short call flag
1765 applied to its name if it has the short call attribute, or it has
1766 already been defined within the current compilation unit. */
1767 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1768 return 1;
1770 /* The current funciton is always defined within the current compilation
1771 unit. if it s a weak defintion however, then this may not be the real
1772 defintion of the function, and so we have to say no. */
1773 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1774 && ! DECL_WEAK (current_function_decl))
1775 return 1;
1777 /* We cannot make the determination - default to returning 0. */
1778 return 0;
1781 /* Return non-zero if a 32 bit "long_call" should be generated for
1782 this call. We generate a long_call if the function:
1784 a. has an __attribute__((long call))
1785 or b. is within the scope of a #pragma long_calls
1786 or c. the -mlong-calls command line switch has been specified
1788 However we do not generate a long call if the function:
1790 d. has an __attribute__ ((short_call))
1791 or e. is inside the scope of a #pragma no_long_calls
1792 or f. has an __attribute__ ((section))
1793 or g. is defined within the current compilation unit.
1795 This function will be called by C fragments contained in the machine
1796 description file. CALL_REF and CALL_COOKIE correspond to the matched
1797 rtl operands. CALL_SYMBOL is used to distinguish between
1798 two different callers of the function. It is set to 1 in the
1799 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1800 and "call_value" patterns. This is because of the difference in the
1801 SYM_REFs passed by these patterns. */
1803 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1804 rtx sym_ref;
1805 int call_cookie;
1806 int call_symbol;
1808 if (! call_symbol)
1810 if (GET_CODE (sym_ref) != MEM)
1811 return 0;
1813 sym_ref = XEXP (sym_ref, 0);
1816 if (GET_CODE (sym_ref) != SYMBOL_REF)
1817 return 0;
1819 if (call_cookie & CALL_SHORT)
1820 return 0;
1822 if (TARGET_LONG_CALLS && flag_function_sections)
1823 return 1;
1825 if (current_file_function_operand (sym_ref, VOIDmode))
1826 return 0;
1828 return (call_cookie & CALL_LONG)
1829 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1830 || TARGET_LONG_CALLS;
1834 legitimate_pic_operand_p (x)
1835 rtx x;
1837 if (CONSTANT_P (x)
1838 && flag_pic
1839 && (GET_CODE (x) == SYMBOL_REF
1840 || (GET_CODE (x) == CONST
1841 && GET_CODE (XEXP (x, 0)) == PLUS
1842 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1843 return 0;
1845 return 1;
1849 legitimize_pic_address (orig, mode, reg)
1850 rtx orig;
1851 enum machine_mode mode;
1852 rtx reg;
1854 if (GET_CODE (orig) == SYMBOL_REF)
1856 rtx pic_ref, address;
1857 rtx insn;
1858 int subregs = 0;
1860 if (reg == 0)
1862 if (reload_in_progress || reload_completed)
1863 abort ();
1864 else
1865 reg = gen_reg_rtx (Pmode);
1867 subregs = 1;
1870 #ifdef AOF_ASSEMBLER
1871 /* The AOF assembler can generate relocations for these directly, and
1872 understands that the PIC register has to be added into the offset. */
1873 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1874 #else
1875 if (subregs)
1876 address = gen_reg_rtx (Pmode);
1877 else
1878 address = reg;
1880 emit_insn (gen_pic_load_addr (address, orig));
1882 pic_ref = gen_rtx_MEM (Pmode,
1883 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1884 address));
1885 RTX_UNCHANGING_P (pic_ref) = 1;
1886 insn = emit_move_insn (reg, pic_ref);
1887 #endif
1888 current_function_uses_pic_offset_table = 1;
1889 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1890 by loop. */
1891 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1892 REG_NOTES (insn));
1893 return reg;
1895 else if (GET_CODE (orig) == CONST)
1897 rtx base, offset;
1899 if (GET_CODE (XEXP (orig, 0)) == PLUS
1900 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1901 return orig;
1903 if (reg == 0)
1905 if (reload_in_progress || reload_completed)
1906 abort ();
1907 else
1908 reg = gen_reg_rtx (Pmode);
1911 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1913 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1914 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1915 base == reg ? 0 : reg);
1917 else
1918 abort ();
1920 if (GET_CODE (offset) == CONST_INT)
1922 /* The base register doesn't really matter, we only want to
1923 test the index for the appropriate mode. */
1924 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1926 if (! reload_in_progress && ! reload_completed)
1927 offset = force_reg (Pmode, offset);
1928 else
1929 abort ();
1931 win:
1932 if (GET_CODE (offset) == CONST_INT)
1933 return plus_constant_for_output (base, INTVAL (offset));
1936 if (GET_MODE_SIZE (mode) > 4
1937 && (GET_MODE_CLASS (mode) == MODE_INT
1938 || TARGET_SOFT_FLOAT))
1940 emit_insn (gen_addsi3 (reg, base, offset));
1941 return reg;
1944 return gen_rtx_PLUS (Pmode, base, offset);
1946 else if (GET_CODE (orig) == LABEL_REF)
1948 current_function_uses_pic_offset_table = 1;
1950 if (NEED_GOT_RELOC)
1952 rtx pic_ref, address = gen_reg_rtx (Pmode);
1954 emit_insn (gen_pic_load_addr (address, orig));
1955 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1957 emit_move_insn (address, pic_ref);
1958 return address;
1962 return orig;
1965 static rtx pic_rtx;
1968 is_pic (x)
1969 rtx x;
1971 if (x == pic_rtx)
1972 return 1;
1973 return 0;
1976 void
1977 arm_finalize_pic ()
1979 #ifndef AOF_ASSEMBLER
1980 rtx l1, pic_tmp, pic_tmp2, seq;
1981 rtx global_offset_table;
1983 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
1984 return;
1986 if (! flag_pic)
1987 abort ();
1989 start_sequence ();
1990 l1 = gen_label_rtx ();
1992 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1993 /* On the ARM the PC register contains 'dot + 8' at the time of the
1994 addition, on the Thumb it is 'dot + 4'. */
1995 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
1996 if (GOT_PCREL)
1997 pic_tmp2 = gen_rtx_CONST (VOIDmode,
1998 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
1999 else
2000 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2002 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2004 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
2005 if (TARGET_ARM)
2006 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2007 else
2008 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2010 seq = gen_sequence ();
2011 end_sequence ();
2012 emit_insn_after (seq, get_insns ());
2014 /* Need to emit this whether or not we obey regdecls,
2015 since setjmp/longjmp can cause life info to screw up. */
2016 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2017 #endif /* AOF_ASSEMBLER */
2020 #define REG_OR_SUBREG_REG(X) \
2021 (GET_CODE (X) == REG \
2022 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2024 #define REG_OR_SUBREG_RTX(X) \
2025 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2027 #ifndef COSTS_N_INSNS
2028 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2029 #endif
2032 arm_rtx_costs (x, code, outer)
2033 rtx x;
2034 enum rtx_code code;
2035 enum rtx_code outer;
2037 enum machine_mode mode = GET_MODE (x);
2038 enum rtx_code subcode;
2039 int extra_cost;
2041 if (TARGET_THUMB)
2043 switch (code)
2045 case ASHIFT:
2046 case ASHIFTRT:
2047 case LSHIFTRT:
2048 case ROTATERT:
2049 case PLUS:
2050 case MINUS:
2051 case COMPARE:
2052 case NEG:
2053 case NOT:
2054 return COSTS_N_INSNS (1);
2056 case MULT:
2057 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2059 int cycles = 0;
2060 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2062 while (i)
2064 i >>= 2;
2065 cycles ++;
2067 return COSTS_N_INSNS (2) + cycles;
2069 return COSTS_N_INSNS (1) + 16;
2071 case SET:
2072 return (COSTS_N_INSNS (1)
2073 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2074 + GET_CODE (SET_DEST (x)) == MEM));
2076 case CONST_INT:
2077 if (outer == SET)
2079 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2080 return 0;
2081 if (thumb_shiftable_const (INTVAL (x)))
2082 return COSTS_N_INSNS (2);
2083 return COSTS_N_INSNS (3);
2085 else if (outer == PLUS
2086 && INTVAL (x) < 256 && INTVAL (x) > -256)
2087 return 0;
2088 else if (outer == COMPARE
2089 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2090 return 0;
2091 else if (outer == ASHIFT || outer == ASHIFTRT
2092 || outer == LSHIFTRT)
2093 return 0;
2094 return COSTS_N_INSNS (2);
2096 case CONST:
2097 case CONST_DOUBLE:
2098 case LABEL_REF:
2099 case SYMBOL_REF:
2100 return COSTS_N_INSNS (3);
2102 case UDIV:
2103 case UMOD:
2104 case DIV:
2105 case MOD:
2106 return 100;
2108 case TRUNCATE:
2109 return 99;
2111 case AND:
2112 case XOR:
2113 case IOR:
2114 /* XXX guess. */
2115 return 8;
2117 case ADDRESSOF:
2118 case MEM:
2119 /* XXX another guess. */
2120 /* Memory costs quite a lot for the first word, but subsequent words
2121 load at the equivalent of a single insn each. */
2122 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2123 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2125 case IF_THEN_ELSE:
2126 /* XXX a guess. */
2127 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2128 return 14;
2129 return 2;
2131 case ZERO_EXTEND:
2132 /* XXX still guessing. */
2133 switch (GET_MODE (XEXP (x, 0)))
2135 case QImode:
2136 return (1 + (mode == DImode ? 4 : 0)
2137 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2139 case HImode:
2140 return (4 + (mode == DImode ? 4 : 0)
2141 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2143 case SImode:
2144 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2146 default:
2147 return 99;
2150 default:
2151 return 99;
2152 #if 0
2153 case FFS:
2154 case FLOAT:
2155 case FIX:
2156 case UNSIGNED_FIX:
2157 /* XXX guess */
2158 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2159 rtx_name[code]);
2160 abort ();
2161 #endif
2165 switch (code)
2167 case MEM:
2168 /* Memory costs quite a lot for the first word, but subsequent words
2169 load at the equivalent of a single insn each. */
2170 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2171 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2173 case DIV:
2174 case MOD:
2175 return 100;
2177 case ROTATE:
2178 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2179 return 4;
2180 /* Fall through */
2181 case ROTATERT:
2182 if (mode != SImode)
2183 return 8;
2184 /* Fall through */
2185 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2186 if (mode == DImode)
2187 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2188 + ((GET_CODE (XEXP (x, 0)) == REG
2189 || (GET_CODE (XEXP (x, 0)) == SUBREG
2190 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2191 ? 0 : 8));
2192 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2193 || (GET_CODE (XEXP (x, 0)) == SUBREG
2194 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2195 ? 0 : 4)
2196 + ((GET_CODE (XEXP (x, 1)) == REG
2197 || (GET_CODE (XEXP (x, 1)) == SUBREG
2198 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2199 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2200 ? 0 : 4));
2202 case MINUS:
2203 if (mode == DImode)
2204 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2205 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2206 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2207 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2208 ? 0 : 8));
2210 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2211 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2212 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2213 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2214 ? 0 : 8)
2215 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2216 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2217 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2218 ? 0 : 8));
2220 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2221 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2222 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2223 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2224 || subcode == ASHIFTRT || subcode == LSHIFTRT
2225 || subcode == ROTATE || subcode == ROTATERT
2226 || (subcode == MULT
2227 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2228 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2229 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2230 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2231 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2232 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2233 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2234 return 1;
2235 /* Fall through */
2237 case PLUS:
2238 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2239 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2240 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2241 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2242 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2243 ? 0 : 8));
2245 /* Fall through */
2246 case AND: case XOR: case IOR:
2247 extra_cost = 0;
2249 /* Normally the frame registers will be spilt into reg+const during
2250 reload, so it is a bad idea to combine them with other instructions,
2251 since then they might not be moved outside of loops. As a compromise
2252 we allow integration with ops that have a constant as their second
2253 operand. */
2254 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2255 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2256 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2257 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2258 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2259 extra_cost = 4;
2261 if (mode == DImode)
2262 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2263 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2264 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2265 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2266 ? 0 : 8));
2268 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2269 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2270 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2271 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2272 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2273 ? 0 : 4));
2275 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2276 return (1 + extra_cost
2277 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2278 || subcode == LSHIFTRT || subcode == ASHIFTRT
2279 || subcode == ROTATE || subcode == ROTATERT
2280 || (subcode == MULT
2281 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2282 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2283 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2284 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2285 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2286 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2287 ? 0 : 4));
2289 return 8;
2291 case MULT:
2292 /* There is no point basing this on the tuning, since it is always the
2293 fast variant if it exists at all. */
2294 if (arm_fast_multiply && mode == DImode
2295 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2296 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2297 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2298 return 8;
2300 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2301 || mode == DImode)
2302 return 30;
2304 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2306 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2307 & HOST_UINT (0xffffffff));
2308 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2309 int j;
2311 /* Tune as appropriate. */
2312 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2314 for (j = 0; i && j < 32; j += booth_unit_size)
2316 i >>= booth_unit_size;
2317 add_cost += 2;
2320 return add_cost;
2323 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2324 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2325 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2327 case TRUNCATE:
2328 if (arm_fast_multiply && mode == SImode
2329 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2330 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2331 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2332 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2333 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2334 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2335 return 8;
2336 return 99;
2338 case NEG:
2339 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2340 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2341 /* Fall through */
2342 case NOT:
2343 if (mode == DImode)
2344 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2346 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2348 case IF_THEN_ELSE:
2349 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2350 return 14;
2351 return 2;
2353 case COMPARE:
2354 return 1;
2356 case ABS:
2357 return 4 + (mode == DImode ? 4 : 0);
2359 case SIGN_EXTEND:
2360 if (GET_MODE (XEXP (x, 0)) == QImode)
2361 return (4 + (mode == DImode ? 4 : 0)
2362 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2363 /* Fall through */
2364 case ZERO_EXTEND:
2365 switch (GET_MODE (XEXP (x, 0)))
2367 case QImode:
2368 return (1 + (mode == DImode ? 4 : 0)
2369 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2371 case HImode:
2372 return (4 + (mode == DImode ? 4 : 0)
2373 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2375 case SImode:
2376 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2378 default:
2379 break;
2381 abort ();
2383 case CONST_INT:
2384 if (const_ok_for_arm (INTVAL (x)))
2385 return outer == SET ? 2 : -1;
2386 else if (outer == AND
2387 && const_ok_for_arm (~ INTVAL (x)))
2388 return -1;
2389 else if ((outer == COMPARE
2390 || outer == PLUS || outer == MINUS)
2391 && const_ok_for_arm (- INTVAL (x)))
2392 return -1;
2393 else
2394 return 5;
2396 case CONST:
2397 case LABEL_REF:
2398 case SYMBOL_REF:
2399 return 6;
2401 case CONST_DOUBLE:
2402 if (const_double_rtx_ok_for_fpu (x))
2403 return outer == SET ? 2 : -1;
2404 else if ((outer == COMPARE || outer == PLUS)
2405 && neg_const_double_rtx_ok_for_fpu (x))
2406 return -1;
2407 return 7;
2409 default:
2410 return 99;
2415 arm_adjust_cost (insn, link, dep, cost)
2416 rtx insn;
2417 rtx link;
2418 rtx dep;
2419 int cost;
2421 rtx i_pat, d_pat;
2423 /* XXX This is not strictly true for the FPA. */
2424 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2425 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2426 return 0;
2428 /* Call insns don't incur a stall, even if they follow a load. */
2429 if (REG_NOTE_KIND (link) == 0
2430 && GET_CODE (insn) == CALL_INSN)
2431 return 1;
2433 if ((i_pat = single_set (insn)) != NULL
2434 && GET_CODE (SET_SRC (i_pat)) == MEM
2435 && (d_pat = single_set (dep)) != NULL
2436 && GET_CODE (SET_DEST (d_pat)) == MEM)
2438 /* This is a load after a store, there is no conflict if the load reads
2439 from a cached area. Assume that loads from the stack, and from the
2440 constant pool are cached, and that others will miss. This is a
2441 hack. */
2443 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2444 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2445 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2446 || reg_mentioned_p (hard_frame_pointer_rtx,
2447 XEXP (SET_SRC (i_pat), 0)))
2448 return 1;
2451 return cost;
2454 /* This code has been fixed for cross compilation. */
2456 static int fpa_consts_inited = 0;
2458 char * strings_fpa[8] =
2460 "0", "1", "2", "3",
2461 "4", "5", "0.5", "10"
2464 static REAL_VALUE_TYPE values_fpa[8];
2466 static void
2467 init_fpa_table ()
2469 int i;
2470 REAL_VALUE_TYPE r;
2472 for (i = 0; i < 8; i++)
2474 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2475 values_fpa[i] = r;
2478 fpa_consts_inited = 1;
2481 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2484 const_double_rtx_ok_for_fpu (x)
2485 rtx x;
2487 REAL_VALUE_TYPE r;
2488 int i;
2490 if (!fpa_consts_inited)
2491 init_fpa_table ();
2493 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2494 if (REAL_VALUE_MINUS_ZERO (r))
2495 return 0;
2497 for (i = 0; i < 8; i++)
2498 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2499 return 1;
2501 return 0;
2504 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2507 neg_const_double_rtx_ok_for_fpu (x)
2508 rtx x;
2510 REAL_VALUE_TYPE r;
2511 int i;
2513 if (!fpa_consts_inited)
2514 init_fpa_table ();
2516 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2517 r = REAL_VALUE_NEGATE (r);
2518 if (REAL_VALUE_MINUS_ZERO (r))
2519 return 0;
2521 for (i = 0; i < 8; i++)
2522 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2523 return 1;
2525 return 0;
2528 /* Predicates for `match_operand' and `match_operator'. */
2530 /* s_register_operand is the same as register_operand, but it doesn't accept
2531 (SUBREG (MEM)...).
2533 This function exists because at the time it was put in it led to better
2534 code. SUBREG(MEM) always needs a reload in the places where
2535 s_register_operand is used, and this seemed to lead to excessive
2536 reloading. */
2539 s_register_operand (op, mode)
2540 register rtx op;
2541 enum machine_mode mode;
2543 if (GET_MODE (op) != mode && mode != VOIDmode)
2544 return 0;
2546 if (GET_CODE (op) == SUBREG)
2547 op = SUBREG_REG (op);
2549 /* We don't consider registers whose class is NO_REGS
2550 to be a register operand. */
2551 /* XXX might have to check for lo regs only for thumb ??? */
2552 return (GET_CODE (op) == REG
2553 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2554 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2557 /* Only accept reg, subreg(reg), const_int. */
2560 reg_or_int_operand (op, mode)
2561 register rtx op;
2562 enum machine_mode mode;
2564 if (GET_CODE (op) == CONST_INT)
2565 return 1;
2567 if (GET_MODE (op) != mode && mode != VOIDmode)
2568 return 0;
2570 if (GET_CODE (op) == SUBREG)
2571 op = SUBREG_REG (op);
2573 /* We don't consider registers whose class is NO_REGS
2574 to be a register operand. */
2575 return (GET_CODE (op) == REG
2576 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2577 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2580 /* Return 1 if OP is an item in memory, given that we are in reload. */
2583 arm_reload_memory_operand (op, mode)
2584 rtx op;
2585 enum machine_mode mode ATTRIBUTE_UNUSED;
2587 int regno = true_regnum (op);
2589 return (! CONSTANT_P (op)
2590 && (regno == -1
2591 || (GET_CODE (op) == REG
2592 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2595 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
2596 memory access (architecture V4).
2597 MODE is QImode if called when computing contraints, or VOIDmode when
2598 emitting patterns. In this latter case we cannot use memory_operand()
2599 because it will fail on badly formed MEMs, which is precisly what we are
2600 trying to catch. */
2602 bad_signed_byte_operand (op, mode)
2603 rtx op;
2604 enum machine_mode mode ATTRIBUTE_UNUSED;
2606 #if 0
2607 if ((mode == QImode && ! memory_operand (op, mode)) || GET_CODE (op) != MEM)
2608 return 0;
2609 #endif
2610 if (GET_CODE (op) != MEM)
2611 return 0;
2613 op = XEXP (op, 0);
2615 /* A sum of anything more complex than reg + reg or reg + const is bad. */
2616 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
2617 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2618 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2619 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
2620 return 1;
2622 /* Big constants are also bad. */
2623 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2624 && (INTVAL (XEXP (op, 1)) > 0xff
2625 || -INTVAL (XEXP (op, 1)) > 0xff))
2626 return 1;
2628 /* Everything else is good, or can will automatically be made so. */
2629 return 0;
2632 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
2635 arm_rhs_operand (op, mode)
2636 rtx op;
2637 enum machine_mode mode;
2639 return (s_register_operand (op, mode)
2640 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
2643 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2647 arm_rhsm_operand (op, mode)
2648 rtx op;
2649 enum machine_mode mode;
2651 return (s_register_operand (op, mode)
2652 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2653 || memory_operand (op, mode));
2656 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2657 constant that is valid when negated. */
2660 arm_add_operand (op, mode)
2661 rtx op;
2662 enum machine_mode mode;
2664 if (TARGET_THUMB)
2665 return thumb_cmp_operand (op, mode);
2667 return (s_register_operand (op, mode)
2668 || (GET_CODE (op) == CONST_INT
2669 && (const_ok_for_arm (INTVAL (op))
2670 || const_ok_for_arm (-INTVAL (op)))));
2674 arm_not_operand (op, mode)
2675 rtx op;
2676 enum machine_mode mode;
2678 return (s_register_operand (op, mode)
2679 || (GET_CODE (op) == CONST_INT
2680 && (const_ok_for_arm (INTVAL (op))
2681 || const_ok_for_arm (~INTVAL (op)))));
2684 /* Return TRUE if the operand is a memory reference which contains an
2685 offsettable address. */
2687 offsettable_memory_operand (op, mode)
2688 register rtx op;
2689 enum machine_mode mode;
2691 if (mode == VOIDmode)
2692 mode = GET_MODE (op);
2694 return (mode == GET_MODE (op)
2695 && GET_CODE (op) == MEM
2696 && offsettable_address_p (reload_completed | reload_in_progress,
2697 mode, XEXP (op, 0)));
2700 /* Return TRUE if the operand is a memory reference which is, or can be
2701 made word aligned by adjusting the offset. */
2703 alignable_memory_operand (op, mode)
2704 register rtx op;
2705 enum machine_mode mode;
2707 rtx reg;
2709 if (mode == VOIDmode)
2710 mode = GET_MODE (op);
2712 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2713 return 0;
2715 op = XEXP (op, 0);
2717 return ((GET_CODE (reg = op) == REG
2718 || (GET_CODE (op) == SUBREG
2719 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2720 || (GET_CODE (op) == PLUS
2721 && GET_CODE (XEXP (op, 1)) == CONST_INT
2722 && (GET_CODE (reg = XEXP (op, 0)) == REG
2723 || (GET_CODE (XEXP (op, 0)) == SUBREG
2724 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2725 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
2728 /* Similar to s_register_operand, but does not allow hard integer
2729 registers. */
2731 f_register_operand (op, mode)
2732 register rtx op;
2733 enum machine_mode mode;
2735 if (GET_MODE (op) != mode && mode != VOIDmode)
2736 return 0;
2738 if (GET_CODE (op) == SUBREG)
2739 op = SUBREG_REG (op);
2741 /* We don't consider registers whose class is NO_REGS
2742 to be a register operand. */
2743 return (GET_CODE (op) == REG
2744 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2745 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2748 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
2751 fpu_rhs_operand (op, mode)
2752 rtx op;
2753 enum machine_mode mode;
2755 if (s_register_operand (op, mode))
2756 return TRUE;
2758 if (GET_MODE (op) != mode && mode != VOIDmode)
2759 return FALSE;
2761 if (GET_CODE (op) == CONST_DOUBLE)
2762 return const_double_rtx_ok_for_fpu (op);
2764 return FALSE;
2768 fpu_add_operand (op, mode)
2769 rtx op;
2770 enum machine_mode mode;
2772 if (s_register_operand (op, mode))
2773 return TRUE;
2775 if (GET_MODE (op) != mode && mode != VOIDmode)
2776 return FALSE;
2778 if (GET_CODE (op) == CONST_DOUBLE)
2779 return (const_double_rtx_ok_for_fpu (op)
2780 || neg_const_double_rtx_ok_for_fpu (op));
2782 return FALSE;
2785 /* Return nonzero if OP is a constant power of two. */
2788 power_of_two_operand (op, mode)
2789 rtx op;
2790 enum machine_mode mode ATTRIBUTE_UNUSED;
2792 if (GET_CODE (op) == CONST_INT)
2794 HOST_WIDE_INT value = INTVAL (op);
2795 return value != 0 && (value & (value - 1)) == 0;
2797 return FALSE;
2800 /* Return TRUE for a valid operand of a DImode operation.
2801 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2802 Note that this disallows MEM(REG+REG), but allows
2803 MEM(PRE/POST_INC/DEC(REG)). */
2806 di_operand (op, mode)
2807 rtx op;
2808 enum machine_mode mode;
2810 if (s_register_operand (op, mode))
2811 return TRUE;
2813 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2814 return FALSE;
2816 if (GET_CODE (op) == SUBREG)
2817 op = SUBREG_REG (op);
2819 switch (GET_CODE (op))
2821 case CONST_DOUBLE:
2822 case CONST_INT:
2823 return TRUE;
2825 case MEM:
2826 return memory_address_p (DImode, XEXP (op, 0));
2828 default:
2829 return FALSE;
2833 /* Like di_operand, but don't accept constants. */
2835 nonimmediate_di_operand (op, mode)
2836 rtx op;
2837 enum machine_mode mode;
2839 if (s_register_operand (op, mode))
2840 return TRUE;
2842 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2843 return FALSE;
2845 if (GET_CODE (op) == SUBREG)
2846 op = SUBREG_REG (op);
2848 if (GET_CODE (op) == MEM)
2849 return memory_address_p (DImode, XEXP (op, 0));
2851 return FALSE;
2854 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
2855 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2856 Note that this disallows MEM(REG+REG), but allows
2857 MEM(PRE/POST_INC/DEC(REG)). */
2860 soft_df_operand (op, mode)
2861 rtx op;
2862 enum machine_mode mode;
2864 if (s_register_operand (op, mode))
2865 return TRUE;
2867 if (mode != VOIDmode && GET_MODE (op) != mode)
2868 return FALSE;
2870 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2871 return FALSE;
2873 if (GET_CODE (op) == SUBREG)
2874 op = SUBREG_REG (op);
2876 switch (GET_CODE (op))
2878 case CONST_DOUBLE:
2879 return TRUE;
2881 case MEM:
2882 return memory_address_p (DFmode, XEXP (op, 0));
2884 default:
2885 return FALSE;
2889 /* Like soft_df_operand, but don't accept constants. */
2891 nonimmediate_soft_df_operand (op, mode)
2892 rtx op;
2893 enum machine_mode mode;
2895 if (s_register_operand (op, mode))
2896 return TRUE;
2898 if (mode != VOIDmode && GET_MODE (op) != mode)
2899 return FALSE;
2901 if (GET_CODE (op) == SUBREG)
2902 op = SUBREG_REG (op);
2904 if (GET_CODE (op) == MEM)
2905 return memory_address_p (DFmode, XEXP (op, 0));
2906 return FALSE;
2909 /* Return TRUE for valid index operands. */
2911 index_operand (op, mode)
2912 rtx op;
2913 enum machine_mode mode;
2915 return (s_register_operand (op, mode)
2916 || (immediate_operand (op, mode)
2917 && (GET_CODE (op) != CONST_INT
2918 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
2921 /* Return TRUE for valid shifts by a constant. This also accepts any
2922 power of two on the (somewhat overly relaxed) assumption that the
2923 shift operator in this case was a mult. */
2926 const_shift_operand (op, mode)
2927 rtx op;
2928 enum machine_mode mode;
2930 return (power_of_two_operand (op, mode)
2931 || (immediate_operand (op, mode)
2932 && (GET_CODE (op) != CONST_INT
2933 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
2936 /* Return TRUE for arithmetic operators which can be combined with a multiply
2937 (shift). */
2940 shiftable_operator (x, mode)
2941 rtx x;
2942 enum machine_mode mode;
2944 if (GET_MODE (x) != mode)
2945 return FALSE;
2946 else
2948 enum rtx_code code = GET_CODE (x);
2950 return (code == PLUS || code == MINUS
2951 || code == IOR || code == XOR || code == AND);
2955 /* Return TRUE for binary logical operators. */
2958 logical_binary_operator (x, mode)
2959 rtx x;
2960 enum machine_mode mode;
2962 if (GET_MODE (x) != mode)
2963 return FALSE;
2964 else
2966 enum rtx_code code = GET_CODE (x);
2968 return (code == IOR || code == XOR || code == AND);
2972 /* Return TRUE for shift operators. */
2975 shift_operator (x, mode)
2976 rtx x;
2977 enum machine_mode mode;
2979 if (GET_MODE (x) != mode)
2980 return FALSE;
2981 else
2983 enum rtx_code code = GET_CODE (x);
2985 if (code == MULT)
2986 return power_of_two_operand (XEXP (x, 1), mode);
2988 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2989 || code == ROTATERT);
2993 /* Return TRUE if x is EQ or NE. */
2995 equality_operator (x, mode)
2996 rtx x;
2997 enum machine_mode mode ATTRIBUTE_UNUSED;
2999 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3002 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3004 minmax_operator (x, mode)
3005 rtx x;
3006 enum machine_mode mode;
3008 enum rtx_code code = GET_CODE (x);
3010 if (GET_MODE (x) != mode)
3011 return FALSE;
3013 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3016 /* Return TRUE if this is the condition code register, if we aren't given
3017 a mode, accept any class CCmode register. */
3019 cc_register (x, mode)
3020 rtx x;
3021 enum machine_mode mode;
3023 if (mode == VOIDmode)
3025 mode = GET_MODE (x);
3027 if (GET_MODE_CLASS (mode) != MODE_CC)
3028 return FALSE;
3031 if ( GET_MODE (x) == mode
3032 && GET_CODE (x) == REG
3033 && REGNO (x) == CC_REGNUM)
3034 return TRUE;
3036 return FALSE;
3039 /* Return TRUE if this is the condition code register, if we aren't given
3040 a mode, accept any class CCmode register which indicates a dominance
3041 expression. */
3043 dominant_cc_register (x, mode)
3044 rtx x;
3045 enum machine_mode mode;
3047 if (mode == VOIDmode)
3049 mode = GET_MODE (x);
3051 if (GET_MODE_CLASS (mode) != MODE_CC)
3052 return FALSE;
3055 if ( mode != CC_DNEmode && mode != CC_DEQmode
3056 && mode != CC_DLEmode && mode != CC_DLTmode
3057 && mode != CC_DGEmode && mode != CC_DGTmode
3058 && mode != CC_DLEUmode && mode != CC_DLTUmode
3059 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3060 return FALSE;
3062 return cc_register (x, mode);
3065 /* Return TRUE if X references a SYMBOL_REF. */
3067 symbol_mentioned_p (x)
3068 rtx x;
3070 register const char * fmt;
3071 register int i;
3073 if (GET_CODE (x) == SYMBOL_REF)
3074 return 1;
3076 fmt = GET_RTX_FORMAT (GET_CODE (x));
3078 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3080 if (fmt[i] == 'E')
3082 register int j;
3084 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3085 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3086 return 1;
3088 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3089 return 1;
3092 return 0;
3095 /* Return TRUE if X references a LABEL_REF. */
3097 label_mentioned_p (x)
3098 rtx x;
3100 register const char * fmt;
3101 register int i;
3103 if (GET_CODE (x) == LABEL_REF)
3104 return 1;
3106 fmt = GET_RTX_FORMAT (GET_CODE (x));
3107 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3109 if (fmt[i] == 'E')
3111 register int j;
3113 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3114 if (label_mentioned_p (XVECEXP (x, i, j)))
3115 return 1;
3117 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3118 return 1;
3121 return 0;
3124 enum rtx_code
3125 minmax_code (x)
3126 rtx x;
3128 enum rtx_code code = GET_CODE (x);
3130 if (code == SMAX)
3131 return GE;
3132 else if (code == SMIN)
3133 return LE;
3134 else if (code == UMIN)
3135 return LEU;
3136 else if (code == UMAX)
3137 return GEU;
3139 abort ();
3142 /* Return 1 if memory locations are adjacent. */
3144 adjacent_mem_locations (a, b)
3145 rtx a, b;
3147 int val0 = 0, val1 = 0;
3148 int reg0, reg1;
3150 if ((GET_CODE (XEXP (a, 0)) == REG
3151 || (GET_CODE (XEXP (a, 0)) == PLUS
3152 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3153 && (GET_CODE (XEXP (b, 0)) == REG
3154 || (GET_CODE (XEXP (b, 0)) == PLUS
3155 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3157 if (GET_CODE (XEXP (a, 0)) == PLUS)
3159 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3160 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3162 else
3163 reg0 = REGNO (XEXP (a, 0));
3164 if (GET_CODE (XEXP (b, 0)) == PLUS)
3166 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3167 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3169 else
3170 reg1 = REGNO (XEXP (b, 0));
3171 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3173 return 0;
3176 /* Return 1 if OP is a load multiple operation. It is known to be
3177 parallel and the first section will be tested. */
3179 load_multiple_operation (op, mode)
3180 rtx op;
3181 enum machine_mode mode ATTRIBUTE_UNUSED;
3183 HOST_WIDE_INT count = XVECLEN (op, 0);
3184 int dest_regno;
3185 rtx src_addr;
3186 HOST_WIDE_INT i = 1, base = 0;
3187 rtx elt;
3189 if (count <= 1
3190 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3191 return 0;
3193 /* Check to see if this might be a write-back. */
3194 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3196 i++;
3197 base = 1;
3199 /* Now check it more carefully. */
3200 if (GET_CODE (SET_DEST (elt)) != REG
3201 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3202 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3203 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3204 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3205 return 0;
3208 /* Perform a quick check so we don't blow up below. */
3209 if (count <= i
3210 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3211 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3212 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3213 return 0;
3215 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3216 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3218 for (; i < count; i++)
3220 elt = XVECEXP (op, 0, i);
3222 if (GET_CODE (elt) != SET
3223 || GET_CODE (SET_DEST (elt)) != REG
3224 || GET_MODE (SET_DEST (elt)) != SImode
3225 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3226 || GET_CODE (SET_SRC (elt)) != MEM
3227 || GET_MODE (SET_SRC (elt)) != SImode
3228 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3229 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3230 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3231 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3232 return 0;
3235 return 1;
3238 /* Return 1 if OP is a store multiple operation. It is known to be
3239 parallel and the first section will be tested. */
3241 store_multiple_operation (op, mode)
3242 rtx op;
3243 enum machine_mode mode ATTRIBUTE_UNUSED;
3245 HOST_WIDE_INT count = XVECLEN (op, 0);
3246 int src_regno;
3247 rtx dest_addr;
3248 HOST_WIDE_INT i = 1, base = 0;
3249 rtx elt;
3251 if (count <= 1
3252 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3253 return 0;
3255 /* Check to see if this might be a write-back. */
3256 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3258 i++;
3259 base = 1;
3261 /* Now check it more carefully. */
3262 if (GET_CODE (SET_DEST (elt)) != REG
3263 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3264 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3265 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3266 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3267 return 0;
3270 /* Perform a quick check so we don't blow up below. */
3271 if (count <= i
3272 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3273 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3274 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3275 return 0;
3277 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3278 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3280 for (; i < count; i++)
3282 elt = XVECEXP (op, 0, i);
3284 if (GET_CODE (elt) != SET
3285 || GET_CODE (SET_SRC (elt)) != REG
3286 || GET_MODE (SET_SRC (elt)) != SImode
3287 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3288 || GET_CODE (SET_DEST (elt)) != MEM
3289 || GET_MODE (SET_DEST (elt)) != SImode
3290 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3291 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3292 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3293 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3294 return 0;
3297 return 1;
3301 load_multiple_sequence (operands, nops, regs, base, load_offset)
3302 rtx * operands;
3303 int nops;
3304 int * regs;
3305 int * base;
3306 HOST_WIDE_INT * load_offset;
3308 int unsorted_regs[4];
3309 HOST_WIDE_INT unsorted_offsets[4];
3310 int order[4];
3311 int base_reg = -1;
3312 int i;
3314 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3315 extended if required. */
3316 if (nops < 2 || nops > 4)
3317 abort ();
3319 /* Loop over the operands and check that the memory references are
3320 suitable (ie immediate offsets from the same base register). At
3321 the same time, extract the target register, and the memory
3322 offsets. */
3323 for (i = 0; i < nops; i++)
3325 rtx reg;
3326 rtx offset;
3328 /* Convert a subreg of a mem into the mem itself. */
3329 if (GET_CODE (operands[nops + i]) == SUBREG)
3330 operands[nops + i] = alter_subreg (operands[nops + i]);
3332 if (GET_CODE (operands[nops + i]) != MEM)
3333 abort ();
3335 /* Don't reorder volatile memory references; it doesn't seem worth
3336 looking for the case where the order is ok anyway. */
3337 if (MEM_VOLATILE_P (operands[nops + i]))
3338 return 0;
3340 offset = const0_rtx;
3342 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3343 || (GET_CODE (reg) == SUBREG
3344 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3345 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3346 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3347 == REG)
3348 || (GET_CODE (reg) == SUBREG
3349 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3350 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3351 == CONST_INT)))
3353 if (i == 0)
3355 base_reg = REGNO (reg);
3356 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3357 ? REGNO (operands[i])
3358 : REGNO (SUBREG_REG (operands[i])));
3359 order[0] = 0;
3361 else
3363 if (base_reg != (int) REGNO (reg))
3364 /* Not addressed from the same base register. */
3365 return 0;
3367 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3368 ? REGNO (operands[i])
3369 : REGNO (SUBREG_REG (operands[i])));
3370 if (unsorted_regs[i] < unsorted_regs[order[0]])
3371 order[0] = i;
3374 /* If it isn't an integer register, or if it overwrites the
3375 base register but isn't the last insn in the list, then
3376 we can't do this. */
3377 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3378 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3379 return 0;
3381 unsorted_offsets[i] = INTVAL (offset);
3383 else
3384 /* Not a suitable memory address. */
3385 return 0;
3388 /* All the useful information has now been extracted from the
3389 operands into unsorted_regs and unsorted_offsets; additionally,
3390 order[0] has been set to the lowest numbered register in the
3391 list. Sort the registers into order, and check that the memory
3392 offsets are ascending and adjacent. */
3394 for (i = 1; i < nops; i++)
3396 int j;
3398 order[i] = order[i - 1];
3399 for (j = 0; j < nops; j++)
3400 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3401 && (order[i] == order[i - 1]
3402 || unsorted_regs[j] < unsorted_regs[order[i]]))
3403 order[i] = j;
3405 /* Have we found a suitable register? if not, one must be used more
3406 than once. */
3407 if (order[i] == order[i - 1])
3408 return 0;
3410 /* Is the memory address adjacent and ascending? */
3411 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3412 return 0;
3415 if (base)
3417 *base = base_reg;
3419 for (i = 0; i < nops; i++)
3420 regs[i] = unsorted_regs[order[i]];
3422 *load_offset = unsorted_offsets[order[0]];
3425 if (unsorted_offsets[order[0]] == 0)
3426 return 1; /* ldmia */
3428 if (unsorted_offsets[order[0]] == 4)
3429 return 2; /* ldmib */
3431 if (unsorted_offsets[order[nops - 1]] == 0)
3432 return 3; /* ldmda */
3434 if (unsorted_offsets[order[nops - 1]] == -4)
3435 return 4; /* ldmdb */
3437 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3438 if the offset isn't small enough. The reason 2 ldrs are faster
3439 is because these ARMs are able to do more than one cache access
3440 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3441 whilst the ARM8 has a double bandwidth cache. This means that
3442 these cores can do both an instruction fetch and a data fetch in
3443 a single cycle, so the trick of calculating the address into a
3444 scratch register (one of the result regs) and then doing a load
3445 multiple actually becomes slower (and no smaller in code size).
3446 That is the transformation
3448 ldr rd1, [rbase + offset]
3449 ldr rd2, [rbase + offset + 4]
3453 add rd1, rbase, offset
3454 ldmia rd1, {rd1, rd2}
3456 produces worse code -- '3 cycles + any stalls on rd2' instead of
3457 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3458 access per cycle, the first sequence could never complete in less
3459 than 6 cycles, whereas the ldm sequence would only take 5 and
3460 would make better use of sequential accesses if not hitting the
3461 cache.
3463 We cheat here and test 'arm_ld_sched' which we currently know to
3464 only be true for the ARM8, ARM9 and StrongARM. If this ever
3465 changes, then the test below needs to be reworked. */
3466 if (nops == 2 && arm_ld_sched)
3467 return 0;
3469 /* Can't do it without setting up the offset, only do this if it takes
3470 no more than one insn. */
3471 return (const_ok_for_arm (unsorted_offsets[order[0]])
3472 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3475 char *
3476 emit_ldm_seq (operands, nops)
3477 rtx * operands;
3478 int nops;
3480 int regs[4];
3481 int base_reg;
3482 HOST_WIDE_INT offset;
3483 char buf[100];
3484 int i;
3486 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3488 case 1:
3489 strcpy (buf, "ldm%?ia\t");
3490 break;
3492 case 2:
3493 strcpy (buf, "ldm%?ib\t");
3494 break;
3496 case 3:
3497 strcpy (buf, "ldm%?da\t");
3498 break;
3500 case 4:
3501 strcpy (buf, "ldm%?db\t");
3502 break;
3504 case 5:
3505 if (offset >= 0)
3506 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3507 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3508 (long) offset);
3509 else
3510 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3511 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3512 (long) -offset);
3513 output_asm_insn (buf, operands);
3514 base_reg = regs[0];
3515 strcpy (buf, "ldm%?ia\t");
3516 break;
3518 default:
3519 abort ();
3522 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3523 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3525 for (i = 1; i < nops; i++)
3526 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3527 reg_names[regs[i]]);
3529 strcat (buf, "}\t%@ phole ldm");
3531 output_asm_insn (buf, operands);
3532 return "";
3536 store_multiple_sequence (operands, nops, regs, base, load_offset)
3537 rtx * operands;
3538 int nops;
3539 int * regs;
3540 int * base;
3541 HOST_WIDE_INT * load_offset;
3543 int unsorted_regs[4];
3544 HOST_WIDE_INT unsorted_offsets[4];
3545 int order[4];
3546 int base_reg = -1;
3547 int i;
3549 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3550 extended if required. */
3551 if (nops < 2 || nops > 4)
3552 abort ();
3554 /* Loop over the operands and check that the memory references are
3555 suitable (ie immediate offsets from the same base register). At
3556 the same time, extract the target register, and the memory
3557 offsets. */
3558 for (i = 0; i < nops; i++)
3560 rtx reg;
3561 rtx offset;
3563 /* Convert a subreg of a mem into the mem itself. */
3564 if (GET_CODE (operands[nops + i]) == SUBREG)
3565 operands[nops + i] = alter_subreg (operands[nops + i]);
3567 if (GET_CODE (operands[nops + i]) != MEM)
3568 abort ();
3570 /* Don't reorder volatile memory references; it doesn't seem worth
3571 looking for the case where the order is ok anyway. */
3572 if (MEM_VOLATILE_P (operands[nops + i]))
3573 return 0;
3575 offset = const0_rtx;
3577 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3578 || (GET_CODE (reg) == SUBREG
3579 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3580 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3581 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3582 == REG)
3583 || (GET_CODE (reg) == SUBREG
3584 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3585 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3586 == CONST_INT)))
3588 if (i == 0)
3590 base_reg = REGNO (reg);
3591 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3592 ? REGNO (operands[i])
3593 : REGNO (SUBREG_REG (operands[i])));
3594 order[0] = 0;
3596 else
3598 if (base_reg != (int) REGNO (reg))
3599 /* Not addressed from the same base register. */
3600 return 0;
3602 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3603 ? REGNO (operands[i])
3604 : REGNO (SUBREG_REG (operands[i])));
3605 if (unsorted_regs[i] < unsorted_regs[order[0]])
3606 order[0] = i;
3609 /* If it isn't an integer register, then we can't do this. */
3610 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3611 return 0;
3613 unsorted_offsets[i] = INTVAL (offset);
3615 else
3616 /* Not a suitable memory address. */
3617 return 0;
3620 /* All the useful information has now been extracted from the
3621 operands into unsorted_regs and unsorted_offsets; additionally,
3622 order[0] has been set to the lowest numbered register in the
3623 list. Sort the registers into order, and check that the memory
3624 offsets are ascending and adjacent. */
3626 for (i = 1; i < nops; i++)
3628 int j;
3630 order[i] = order[i - 1];
3631 for (j = 0; j < nops; j++)
3632 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3633 && (order[i] == order[i - 1]
3634 || unsorted_regs[j] < unsorted_regs[order[i]]))
3635 order[i] = j;
3637 /* Have we found a suitable register? if not, one must be used more
3638 than once. */
3639 if (order[i] == order[i - 1])
3640 return 0;
3642 /* Is the memory address adjacent and ascending? */
3643 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3644 return 0;
3647 if (base)
3649 *base = base_reg;
3651 for (i = 0; i < nops; i++)
3652 regs[i] = unsorted_regs[order[i]];
3654 *load_offset = unsorted_offsets[order[0]];
3657 if (unsorted_offsets[order[0]] == 0)
3658 return 1; /* stmia */
3660 if (unsorted_offsets[order[0]] == 4)
3661 return 2; /* stmib */
3663 if (unsorted_offsets[order[nops - 1]] == 0)
3664 return 3; /* stmda */
3666 if (unsorted_offsets[order[nops - 1]] == -4)
3667 return 4; /* stmdb */
3669 return 0;
3672 char *
3673 emit_stm_seq (operands, nops)
3674 rtx * operands;
3675 int nops;
3677 int regs[4];
3678 int base_reg;
3679 HOST_WIDE_INT offset;
3680 char buf[100];
3681 int i;
3683 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3685 case 1:
3686 strcpy (buf, "stm%?ia\t");
3687 break;
3689 case 2:
3690 strcpy (buf, "stm%?ib\t");
3691 break;
3693 case 3:
3694 strcpy (buf, "stm%?da\t");
3695 break;
3697 case 4:
3698 strcpy (buf, "stm%?db\t");
3699 break;
3701 default:
3702 abort ();
3705 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3706 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3708 for (i = 1; i < nops; i++)
3709 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3710 reg_names[regs[i]]);
3712 strcat (buf, "}\t%@ phole stm");
3714 output_asm_insn (buf, operands);
3715 return "";
3719 multi_register_push (op, mode)
3720 rtx op;
3721 enum machine_mode mode ATTRIBUTE_UNUSED;
3723 if (GET_CODE (op) != PARALLEL
3724 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3725 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3726 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3727 return 0;
3729 return 1;
3732 /* Routines for use with attributes. */
3734 /* Return nonzero if ATTR is a valid attribute for DECL.
3735 ATTRIBUTES are any existing attributes and ARGS are
3736 the arguments supplied with ATTR.
3738 Supported attributes:
3740 naked:
3741 don't output any prologue or epilogue code, the user is assumed
3742 to do the right thing.
3744 interfacearm:
3745 Always assume that this function will be entered in ARM mode,
3746 not Thumb mode, and that the caller wishes to be returned to in
3747 ARM mode. */
3749 arm_valid_machine_decl_attribute (decl, attr, args)
3750 tree decl;
3751 tree attr;
3752 tree args;
3754 if (args != NULL_TREE)
3755 return 0;
3757 if (is_attribute_p ("naked", attr))
3758 return TREE_CODE (decl) == FUNCTION_DECL;
3760 #ifdef ARM_PE
3761 if (is_attribute_p ("interfacearm", attr))
3762 return TREE_CODE (decl) == FUNCTION_DECL;
3763 #endif /* ARM_PE */
3765 return 0;
3768 /* Return non-zero if FUNC is a naked function. */
3769 static int
3770 arm_naked_function_p (func)
3771 tree func;
3773 tree a;
3775 if (TREE_CODE (func) != FUNCTION_DECL)
3776 abort ();
3778 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3779 return a != NULL_TREE;
3782 /* Routines for use in generating RTL. */
3784 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
3785 in_struct_p, scalar_p)
3786 int base_regno;
3787 int count;
3788 rtx from;
3789 int up;
3790 int write_back;
3791 int unchanging_p;
3792 int in_struct_p;
3793 int scalar_p;
3795 int i = 0, j;
3796 rtx result;
3797 int sign = up ? 1 : -1;
3798 rtx mem;
3800 result = gen_rtx_PARALLEL (VOIDmode,
3801 rtvec_alloc (count + (write_back ? 1 : 0)));
3802 if (write_back)
3804 XVECEXP (result, 0, 0)
3805 = gen_rtx_SET (GET_MODE (from), from,
3806 plus_constant (from, count * 4 * sign));
3807 i = 1;
3808 count++;
3811 for (j = 0; i < count; i++, j++)
3813 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
3814 RTX_UNCHANGING_P (mem) = unchanging_p;
3815 MEM_IN_STRUCT_P (mem) = in_struct_p;
3816 MEM_SCALAR_P (mem) = scalar_p;
3817 XVECEXP (result, 0, i)
3818 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
3821 return result;
3825 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
3826 in_struct_p, scalar_p)
3827 int base_regno;
3828 int count;
3829 rtx to;
3830 int up;
3831 int write_back;
3832 int unchanging_p;
3833 int in_struct_p;
3834 int scalar_p;
3836 int i = 0, j;
3837 rtx result;
3838 int sign = up ? 1 : -1;
3839 rtx mem;
3841 result = gen_rtx_PARALLEL (VOIDmode,
3842 rtvec_alloc (count + (write_back ? 1 : 0)));
3843 if (write_back)
3845 XVECEXP (result, 0, 0)
3846 = gen_rtx_SET (GET_MODE (to), to,
3847 plus_constant (to, count * 4 * sign));
3848 i = 1;
3849 count++;
3852 for (j = 0; i < count; i++, j++)
3854 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
3855 RTX_UNCHANGING_P (mem) = unchanging_p;
3856 MEM_IN_STRUCT_P (mem) = in_struct_p;
3857 MEM_SCALAR_P (mem) = scalar_p;
3859 XVECEXP (result, 0, i)
3860 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
3863 return result;
3867 arm_gen_movstrqi (operands)
3868 rtx * operands;
3870 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
3871 int i;
3872 rtx src, dst;
3873 rtx st_src, st_dst, fin_src, fin_dst;
3874 rtx part_bytes_reg = NULL;
3875 rtx mem;
3876 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
3877 int dst_scalar_p, src_scalar_p;
3879 if (GET_CODE (operands[2]) != CONST_INT
3880 || GET_CODE (operands[3]) != CONST_INT
3881 || INTVAL (operands[2]) > 64
3882 || INTVAL (operands[3]) & 3)
3883 return 0;
3885 st_dst = XEXP (operands[0], 0);
3886 st_src = XEXP (operands[1], 0);
3888 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3889 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
3890 dst_scalar_p = MEM_SCALAR_P (operands[0]);
3891 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3892 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
3893 src_scalar_p = MEM_SCALAR_P (operands[1]);
3895 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3896 fin_src = src = copy_to_mode_reg (SImode, st_src);
3898 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
3899 out_words_to_go = INTVAL (operands[2]) / 4;
3900 last_bytes = INTVAL (operands[2]) & 3;
3902 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
3903 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
3905 for (i = 0; in_words_to_go >= 2; i+=4)
3907 if (in_words_to_go > 4)
3908 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
3909 src_unchanging_p,
3910 src_in_struct_p,
3911 src_scalar_p));
3912 else
3913 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
3914 FALSE, src_unchanging_p,
3915 src_in_struct_p, src_scalar_p));
3917 if (out_words_to_go)
3919 if (out_words_to_go > 4)
3920 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3921 dst_unchanging_p,
3922 dst_in_struct_p,
3923 dst_scalar_p));
3924 else if (out_words_to_go != 1)
3925 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3926 dst, TRUE,
3927 (last_bytes == 0
3928 ? FALSE : TRUE),
3929 dst_unchanging_p,
3930 dst_in_struct_p,
3931 dst_scalar_p));
3932 else
3934 mem = gen_rtx_MEM (SImode, dst);
3935 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3936 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3937 MEM_SCALAR_P (mem) = dst_scalar_p;
3938 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
3939 if (last_bytes != 0)
3940 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
3944 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3945 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3948 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3949 if (out_words_to_go)
3951 rtx sreg;
3953 mem = gen_rtx_MEM (SImode, src);
3954 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3955 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3956 MEM_SCALAR_P (mem) = src_scalar_p;
3957 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3958 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3960 mem = gen_rtx_MEM (SImode, dst);
3961 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3962 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3963 MEM_SCALAR_P (mem) = dst_scalar_p;
3964 emit_move_insn (mem, sreg);
3965 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3966 in_words_to_go--;
3968 if (in_words_to_go) /* Sanity check */
3969 abort ();
3972 if (in_words_to_go)
3974 if (in_words_to_go < 0)
3975 abort ();
3977 mem = gen_rtx_MEM (SImode, src);
3978 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3979 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3980 MEM_SCALAR_P (mem) = src_scalar_p;
3981 part_bytes_reg = copy_to_mode_reg (SImode, mem);
3984 if (last_bytes && part_bytes_reg == NULL)
3985 abort ();
3987 if (BYTES_BIG_ENDIAN && last_bytes)
3989 rtx tmp = gen_reg_rtx (SImode);
3991 /* The bytes we want are in the top end of the word. */
3992 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3993 GEN_INT (8 * (4 - last_bytes))));
3994 part_bytes_reg = tmp;
3996 while (last_bytes)
3998 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
3999 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4000 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4001 MEM_SCALAR_P (mem) = dst_scalar_p;
4002 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4004 if (--last_bytes)
4006 tmp = gen_reg_rtx (SImode);
4007 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4008 part_bytes_reg = tmp;
4013 else
4015 if (last_bytes > 1)
4017 mem = gen_rtx_MEM (HImode, dst);
4018 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4019 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4020 MEM_SCALAR_P (mem) = dst_scalar_p;
4021 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4022 last_bytes -= 2;
4023 if (last_bytes)
4025 rtx tmp = gen_reg_rtx (SImode);
4027 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4028 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4029 part_bytes_reg = tmp;
4033 if (last_bytes)
4035 mem = gen_rtx_MEM (QImode, dst);
4036 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4037 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4038 MEM_SCALAR_P (mem) = dst_scalar_p;
4039 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4043 return 1;
4046 /* Generate a memory reference for a half word, such that it will be loaded
4047 into the top 16 bits of the word. We can assume that the address is
4048 known to be alignable and of the form reg, or plus (reg, const). */
4050 arm_gen_rotated_half_load (memref)
4051 rtx memref;
4053 HOST_WIDE_INT offset = 0;
4054 rtx base = XEXP (memref, 0);
4056 if (GET_CODE (base) == PLUS)
4058 offset = INTVAL (XEXP (base, 1));
4059 base = XEXP (base, 0);
4062 /* If we aren't allowed to generate unaligned addresses, then fail. */
4063 if (TARGET_MMU_TRAPS
4064 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4065 return NULL;
4067 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4069 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4070 return base;
4072 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4075 static enum machine_mode
4076 select_dominance_cc_mode (x, y, cond_or)
4077 rtx x;
4078 rtx y;
4079 HOST_WIDE_INT cond_or;
4081 enum rtx_code cond1, cond2;
4082 int swapped = 0;
4084 /* Currently we will probably get the wrong result if the individual
4085 comparisons are not simple. This also ensures that it is safe to
4086 reverse a comparison if necessary. */
4087 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4088 != CCmode)
4089 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4090 != CCmode))
4091 return CCmode;
4093 if (cond_or)
4094 cond1 = reverse_condition (cond1);
4096 /* If the comparisons are not equal, and one doesn't dominate the other,
4097 then we can't do this. */
4098 if (cond1 != cond2
4099 && ! comparison_dominates_p (cond1, cond2)
4100 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
4101 return CCmode;
4103 if (swapped)
4105 enum rtx_code temp = cond1;
4106 cond1 = cond2;
4107 cond2 = temp;
4110 switch (cond1)
4112 case EQ:
4113 if (cond2 == EQ || ! cond_or)
4114 return CC_DEQmode;
4116 switch (cond2)
4118 case LE: return CC_DLEmode;
4119 case LEU: return CC_DLEUmode;
4120 case GE: return CC_DGEmode;
4121 case GEU: return CC_DGEUmode;
4122 default: break;
4125 break;
4127 case LT:
4128 if (cond2 == LT || ! cond_or)
4129 return CC_DLTmode;
4130 if (cond2 == LE)
4131 return CC_DLEmode;
4132 if (cond2 == NE)
4133 return CC_DNEmode;
4134 break;
4136 case GT:
4137 if (cond2 == GT || ! cond_or)
4138 return CC_DGTmode;
4139 if (cond2 == GE)
4140 return CC_DGEmode;
4141 if (cond2 == NE)
4142 return CC_DNEmode;
4143 break;
4145 case LTU:
4146 if (cond2 == LTU || ! cond_or)
4147 return CC_DLTUmode;
4148 if (cond2 == LEU)
4149 return CC_DLEUmode;
4150 if (cond2 == NE)
4151 return CC_DNEmode;
4152 break;
4154 case GTU:
4155 if (cond2 == GTU || ! cond_or)
4156 return CC_DGTUmode;
4157 if (cond2 == GEU)
4158 return CC_DGEUmode;
4159 if (cond2 == NE)
4160 return CC_DNEmode;
4161 break;
4163 /* The remaining cases only occur when both comparisons are the
4164 same. */
4165 case NE:
4166 return CC_DNEmode;
4168 case LE:
4169 return CC_DLEmode;
4171 case GE:
4172 return CC_DGEmode;
4174 case LEU:
4175 return CC_DLEUmode;
4177 case GEU:
4178 return CC_DGEUmode;
4180 default:
4181 break;
4184 abort ();
4187 enum machine_mode
4188 arm_select_cc_mode (op, x, y)
4189 enum rtx_code op;
4190 rtx x;
4191 rtx y;
4193 /* All floating point compares return CCFP if it is an equality
4194 comparison, and CCFPE otherwise. */
4195 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4196 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
4198 /* A compare with a shifted operand. Because of canonicalization, the
4199 comparison will have to be swapped when we emit the assembler. */
4200 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4201 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4202 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4203 || GET_CODE (x) == ROTATERT))
4204 return CC_SWPmode;
4206 /* This is a special case that is used by combine to allow a
4207 comparison of a shifted byte load to be split into a zero-extend
4208 followed by a comparison of the shifted integer (only valid for
4209 equalities and unsigned inequalities). */
4210 if (GET_MODE (x) == SImode
4211 && GET_CODE (x) == ASHIFT
4212 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4213 && GET_CODE (XEXP (x, 0)) == SUBREG
4214 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4215 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4216 && (op == EQ || op == NE
4217 || op == GEU || op == GTU || op == LTU || op == LEU)
4218 && GET_CODE (y) == CONST_INT)
4219 return CC_Zmode;
4221 /* An operation that sets the condition codes as a side-effect, the
4222 V flag is not set correctly, so we can only use comparisons where
4223 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4224 instead. */
4225 if (GET_MODE (x) == SImode
4226 && y == const0_rtx
4227 && (op == EQ || op == NE || op == LT || op == GE)
4228 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4229 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4230 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4231 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4232 || GET_CODE (x) == LSHIFTRT
4233 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4234 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4235 return CC_NOOVmode;
4237 /* A construct for a conditional compare, if the false arm contains
4238 0, then both conditions must be true, otherwise either condition
4239 must be true. Not all conditions are possible, so CCmode is
4240 returned if it can't be done. */
4241 if (GET_CODE (x) == IF_THEN_ELSE
4242 && (XEXP (x, 2) == const0_rtx
4243 || XEXP (x, 2) == const1_rtx)
4244 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4245 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4246 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4247 INTVAL (XEXP (x, 2)));
4249 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4250 return CC_Zmode;
4252 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4253 && GET_CODE (x) == PLUS
4254 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4255 return CC_Cmode;
4257 return CCmode;
4260 /* X and Y are two things to compare using CODE. Emit the compare insn and
4261 return the rtx for register 0 in the proper mode. FP means this is a
4262 floating point compare: I don't think that it is needed on the arm. */
4265 arm_gen_compare_reg (code, x, y)
4266 enum rtx_code code;
4267 rtx x, y;
4269 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4270 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4272 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4273 gen_rtx_COMPARE (mode, x, y)));
4275 return cc_reg;
4278 void
4279 arm_reload_in_hi (operands)
4280 rtx * operands;
4282 rtx ref = operands[1];
4283 rtx base, scratch;
4284 HOST_WIDE_INT offset = 0;
4286 if (GET_CODE (ref) == SUBREG)
4288 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4289 if (BYTES_BIG_ENDIAN)
4290 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4291 - MIN (UNITS_PER_WORD,
4292 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4293 ref = SUBREG_REG (ref);
4296 if (GET_CODE (ref) == REG)
4298 /* We have a pseudo which has been spilt onto the stack; there
4299 are two cases here: the first where there is a simple
4300 stack-slot replacement and a second where the stack-slot is
4301 out of range, or is used as a subreg. */
4302 if (reg_equiv_mem[REGNO (ref)])
4304 ref = reg_equiv_mem[REGNO (ref)];
4305 base = find_replacement (&XEXP (ref, 0));
4307 else
4308 /* The slot is out of range, or was dressed up in a SUBREG. */
4309 base = reg_equiv_address[REGNO (ref)];
4311 else
4312 base = find_replacement (&XEXP (ref, 0));
4314 /* Handle the case where the address is too complex to be offset by 1. */
4315 if (GET_CODE (base) == MINUS
4316 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4318 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4320 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4321 base = base_plus;
4323 else if (GET_CODE (base) == PLUS)
4325 /* The addend must be CONST_INT, or we would have dealt with it above. */
4326 HOST_WIDE_INT hi, lo;
4328 offset += INTVAL (XEXP (base, 1));
4329 base = XEXP (base, 0);
4331 /* Rework the address into a legal sequence of insns. */
4332 /* Valid range for lo is -4095 -> 4095 */
4333 lo = (offset >= 0
4334 ? (offset & 0xfff)
4335 : -((-offset) & 0xfff));
4337 /* Corner case, if lo is the max offset then we would be out of range
4338 once we have added the additional 1 below, so bump the msb into the
4339 pre-loading insn(s). */
4340 if (lo == 4095)
4341 lo &= 0x7ff;
4343 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4344 ^ HOST_INT (0x80000000))
4345 - HOST_INT (0x80000000));
4347 if (hi + lo != offset)
4348 abort ();
4350 if (hi != 0)
4352 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4354 /* Get the base address; addsi3 knows how to handle constants
4355 that require more than one insn. */
4356 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4357 base = base_plus;
4358 offset = lo;
4362 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4363 emit_insn (gen_zero_extendqisi2 (scratch,
4364 gen_rtx_MEM (QImode,
4365 plus_constant (base,
4366 offset))));
4367 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4368 gen_rtx_MEM (QImode,
4369 plus_constant (base,
4370 offset + 1))));
4371 if (! BYTES_BIG_ENDIAN)
4372 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4373 gen_rtx_IOR (SImode,
4374 gen_rtx_ASHIFT
4375 (SImode,
4376 gen_rtx_SUBREG (SImode, operands[0], 0),
4377 GEN_INT (8)),
4378 scratch)));
4379 else
4380 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4381 gen_rtx_IOR (SImode,
4382 gen_rtx_ASHIFT (SImode, scratch,
4383 GEN_INT (8)),
4384 gen_rtx_SUBREG (SImode, operands[0],
4385 0))));
4388 /* Handle storing a half-word to memory during reload by synthesising as two
4389 byte stores. Take care not to clobber the input values until after we
4390 have moved them somewhere safe. This code assumes that if the DImode
4391 scratch in operands[2] overlaps either the input value or output address
4392 in some way, then that value must die in this insn (we absolutely need
4393 two scratch registers for some corner cases). */
4394 void
4395 arm_reload_out_hi (operands)
4396 rtx * operands;
4398 rtx ref = operands[0];
4399 rtx outval = operands[1];
4400 rtx base, scratch;
4401 HOST_WIDE_INT offset = 0;
4403 if (GET_CODE (ref) == SUBREG)
4405 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4406 if (BYTES_BIG_ENDIAN)
4407 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4408 - MIN (UNITS_PER_WORD,
4409 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4410 ref = SUBREG_REG (ref);
4414 if (GET_CODE (ref) == REG)
4416 /* We have a pseudo which has been spilt onto the stack; there
4417 are two cases here: the first where there is a simple
4418 stack-slot replacement and a second where the stack-slot is
4419 out of range, or is used as a subreg. */
4420 if (reg_equiv_mem[REGNO (ref)])
4422 ref = reg_equiv_mem[REGNO (ref)];
4423 base = find_replacement (&XEXP (ref, 0));
4425 else
4426 /* The slot is out of range, or was dressed up in a SUBREG. */
4427 base = reg_equiv_address[REGNO (ref)];
4429 else
4430 base = find_replacement (&XEXP (ref, 0));
4432 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4434 /* Handle the case where the address is too complex to be offset by 1. */
4435 if (GET_CODE (base) == MINUS
4436 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4438 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4440 /* Be careful not to destroy OUTVAL. */
4441 if (reg_overlap_mentioned_p (base_plus, outval))
4443 /* Updating base_plus might destroy outval, see if we can
4444 swap the scratch and base_plus. */
4445 if (! reg_overlap_mentioned_p (scratch, outval))
4447 rtx tmp = scratch;
4448 scratch = base_plus;
4449 base_plus = tmp;
4451 else
4453 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4455 /* Be conservative and copy OUTVAL into the scratch now,
4456 this should only be necessary if outval is a subreg
4457 of something larger than a word. */
4458 /* XXX Might this clobber base? I can't see how it can,
4459 since scratch is known to overlap with OUTVAL, and
4460 must be wider than a word. */
4461 emit_insn (gen_movhi (scratch_hi, outval));
4462 outval = scratch_hi;
4466 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4467 base = base_plus;
4469 else if (GET_CODE (base) == PLUS)
4471 /* The addend must be CONST_INT, or we would have dealt with it above. */
4472 HOST_WIDE_INT hi, lo;
4474 offset += INTVAL (XEXP (base, 1));
4475 base = XEXP (base, 0);
4477 /* Rework the address into a legal sequence of insns. */
4478 /* Valid range for lo is -4095 -> 4095 */
4479 lo = (offset >= 0
4480 ? (offset & 0xfff)
4481 : -((-offset) & 0xfff));
4483 /* Corner case, if lo is the max offset then we would be out of range
4484 once we have added the additional 1 below, so bump the msb into the
4485 pre-loading insn(s). */
4486 if (lo == 4095)
4487 lo &= 0x7ff;
4489 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4490 ^ HOST_INT (0x80000000))
4491 - HOST_INT (0x80000000));
4493 if (hi + lo != offset)
4494 abort ();
4496 if (hi != 0)
4498 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4500 /* Be careful not to destroy OUTVAL. */
4501 if (reg_overlap_mentioned_p (base_plus, outval))
4503 /* Updating base_plus might destroy outval, see if we
4504 can swap the scratch and base_plus. */
4505 if (! reg_overlap_mentioned_p (scratch, outval))
4507 rtx tmp = scratch;
4508 scratch = base_plus;
4509 base_plus = tmp;
4511 else
4513 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4515 /* Be conservative and copy outval into scratch now,
4516 this should only be necessary if outval is a
4517 subreg of something larger than a word. */
4518 /* XXX Might this clobber base? I can't see how it
4519 can, since scratch is known to overlap with
4520 outval. */
4521 emit_insn (gen_movhi (scratch_hi, outval));
4522 outval = scratch_hi;
4526 /* Get the base address; addsi3 knows how to handle constants
4527 that require more than one insn. */
4528 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4529 base = base_plus;
4530 offset = lo;
4534 if (BYTES_BIG_ENDIAN)
4536 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4537 plus_constant (base, offset + 1)),
4538 gen_rtx_SUBREG (QImode, outval, 0)));
4539 emit_insn (gen_lshrsi3 (scratch,
4540 gen_rtx_SUBREG (SImode, outval, 0),
4541 GEN_INT (8)));
4542 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4543 gen_rtx_SUBREG (QImode, scratch, 0)));
4545 else
4547 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4548 gen_rtx_SUBREG (QImode, outval, 0)));
4549 emit_insn (gen_lshrsi3 (scratch,
4550 gen_rtx_SUBREG (SImode, outval, 0),
4551 GEN_INT (8)));
4552 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4553 plus_constant (base, offset + 1)),
4554 gen_rtx_SUBREG (QImode, scratch, 0)));
4558 /* Print a symbolic form of X to the debug file, F. */
4559 static void
4560 arm_print_value (f, x)
4561 FILE * f;
4562 rtx x;
4564 switch (GET_CODE (x))
4566 case CONST_INT:
4567 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4568 return;
4570 case CONST_DOUBLE:
4571 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4572 return;
4574 case CONST_STRING:
4575 fprintf (f, "\"%s\"", XSTR (x, 0));
4576 return;
4578 case SYMBOL_REF:
4579 fprintf (f, "`%s'", XSTR (x, 0));
4580 return;
4582 case LABEL_REF:
4583 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4584 return;
4586 case CONST:
4587 arm_print_value (f, XEXP (x, 0));
4588 return;
4590 case PLUS:
4591 arm_print_value (f, XEXP (x, 0));
4592 fprintf (f, "+");
4593 arm_print_value (f, XEXP (x, 1));
4594 return;
4596 case PC:
4597 fprintf (f, "pc");
4598 return;
4600 default:
4601 fprintf (f, "????");
4602 return;
4606 /* Routines for manipulation of the constant pool. */
4608 /* Arm instructions cannot load a large constant directly into a
4609 register; they have to come from a pc relative load. The constant
4610 must therefore be placed in the addressable range of the pc
4611 relative load. Depending on the precise pc relative load
4612 instruction the range is somewhere between 256 bytes and 4k. This
4613 means that we often have to dump a constant inside a function, and
4614 generate code to branch around it.
4616 It is important to minimize this, since the branches will slow
4617 things down and make the code larger.
4619 Normally we can hide the table after an existing unconditional
4620 branch so that there is no interruption of the flow, but in the
4621 worst case the code looks like this:
4623 ldr rn, L1
4625 b L2
4626 align
4627 L1: .long value
4631 ldr rn, L3
4633 b L4
4634 align
4635 L3: .long value
4639 We fix this by performing a scan after scheduling, which notices
4640 which instructions need to have their operands fetched from the
4641 constant table and builds the table.
4643 The algorithm starts by building a table of all the constants that
4644 need fixing up and all the natural barriers in the function (places
4645 where a constant table can be dropped without breaking the flow).
4646 For each fixup we note how far the pc-relative replacement will be
4647 able to reach and the offset of the instruction into the function.
4649 Having built the table we then group the fixes together to form
4650 tables that are as large as possible (subject to addressing
4651 constraints) and emit each table of constants after the last
4652 barrier that is within range of all the instructions in the group.
4653 If a group does not contain a barrier, then we forcibly create one
4654 by inserting a jump instruction into the flow. Once the table has
4655 been inserted, the insns are then modified to reference the
4656 relevant entry in the pool.
4658 Possible enhancements to the algorithm (not implemented) are:
4660 1) For some processors and object formats, there may be benefit in
4661 aligning the pools to the start of cache lines; this alignment
4662 would need to be taken into account when calculating addressability
4663 of a pool. */
4665 /* These typedefs are located at the start of this file, so that
4666 they can be used in the prototypes there. This comment is to
4667 remind readers of that fact so that the following structures
4668 can be understood more easily.
4670 typedef struct minipool_node Mnode;
4671 typedef struct minipool_fixup Mfix; */
4673 struct minipool_node
4675 /* Doubly linked chain of entries. */
4676 Mnode * next;
4677 Mnode * prev;
4678 /* The maximum offset into the code that this entry can be placed. While
4679 pushing fixes for forward references, all entries are sorted in order
4680 of increasing max_address. */
4681 HOST_WIDE_INT max_address;
4682 /* Similarly for a entry inserted for a backwards ref. */
4683 HOST_WIDE_INT min_address;
4684 /* The number of fixes referencing this entry. This can become zero
4685 if we "unpush" an entry. In this case we ignore the entry when we
4686 come to emit the code. */
4687 int refcount;
4688 /* The offset from the start of the minipool. */
4689 HOST_WIDE_INT offset;
4690 /* The value in table. */
4691 rtx value;
4692 /* The mode of value. */
4693 enum machine_mode mode;
4694 int fix_size;
4697 struct minipool_fixup
4699 Mfix * next;
4700 rtx insn;
4701 HOST_WIDE_INT address;
4702 rtx * loc;
4703 enum machine_mode mode;
4704 int fix_size;
4705 rtx value;
4706 Mnode * minipool;
4707 HOST_WIDE_INT forwards;
4708 HOST_WIDE_INT backwards;
4711 /* Fixes less than a word need padding out to a word boundary. */
4712 #define MINIPOOL_FIX_SIZE(mode) \
4713 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
4715 static Mnode * minipool_vector_head;
4716 static Mnode * minipool_vector_tail;
4717 static rtx minipool_vector_label;
4719 /* The linked list of all minipool fixes required for this function. */
4720 Mfix * minipool_fix_head;
4721 Mfix * minipool_fix_tail;
4722 /* The fix entry for the current minipool, once it has been placed. */
4723 Mfix * minipool_barrier;
4725 /* Determines if INSN is the start of a jump table. Returns the end
4726 of the TABLE or NULL_RTX. */
4727 static rtx
4728 is_jump_table (insn)
4729 rtx insn;
4731 rtx table;
4733 if (GET_CODE (insn) == JUMP_INSN
4734 && JUMP_LABEL (insn) != NULL
4735 && ((table = next_real_insn (JUMP_LABEL (insn)))
4736 == next_real_insn (insn))
4737 && table != NULL
4738 && GET_CODE (table) == JUMP_INSN
4739 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4740 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4741 return table;
4743 return NULL_RTX;
4746 static HOST_WIDE_INT
4747 get_jump_table_size (insn)
4748 rtx insn;
4750 rtx body = PATTERN (insn);
4751 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
4753 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4756 /* Move a minipool fix MP from its current location to before MAX_MP.
4757 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4758 contrains may need updating. */
4759 static Mnode *
4760 move_minipool_fix_forward_ref (mp, max_mp, max_address)
4761 Mnode * mp;
4762 Mnode * max_mp;
4763 HOST_WIDE_INT max_address;
4765 /* This should never be true and the code below assumes these are
4766 different. */
4767 if (mp == max_mp)
4768 abort ();
4770 if (max_mp == NULL)
4772 if (max_address < mp->max_address)
4773 mp->max_address = max_address;
4775 else
4777 if (max_address > max_mp->max_address - mp->fix_size)
4778 mp->max_address = max_mp->max_address - mp->fix_size;
4779 else
4780 mp->max_address = max_address;
4782 /* Unlink MP from its current position. Since max_mp is non-null,
4783 mp->prev must be non-null. */
4784 mp->prev->next = mp->next;
4785 if (mp->next != NULL)
4786 mp->next->prev = mp->prev;
4787 else
4788 minipool_vector_tail = mp->prev;
4790 /* Re-insert it before MAX_MP. */
4791 mp->next = max_mp;
4792 mp->prev = max_mp->prev;
4793 max_mp->prev = mp;
4795 if (mp->prev != NULL)
4796 mp->prev->next = mp;
4797 else
4798 minipool_vector_head = mp;
4801 /* Save the new entry. */
4802 max_mp = mp;
4804 /* Scan over the preceeding entries and adjust their addresses as
4805 required. */
4806 while (mp->prev != NULL
4807 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4809 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4810 mp = mp->prev;
4813 return max_mp;
4816 /* Add a constant to the minipool for a forward reference. Returns the
4817 node added or NULL if the constant will not fit in this pool. */
4818 static Mnode *
4819 add_minipool_forward_ref (fix)
4820 Mfix * fix;
4822 /* If set, max_mp is the first pool_entry that has a lower
4823 constraint than the one we are trying to add. */
4824 Mnode * max_mp = NULL;
4825 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4826 Mnode * mp;
4828 /* If this fix's address is greater than the address of the first
4829 entry, then we can't put the fix in this pool. We subtract the
4830 size of the current fix to ensure that if the table is fully
4831 packed we still have enough room to insert this value by suffling
4832 the other fixes forwards. */
4833 if (minipool_vector_head &&
4834 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4835 return NULL;
4837 /* Scan the pool to see if a constant with the same value has
4838 already been added. While we are doing this, also note the
4839 location where we must insert the constant if it doesn't already
4840 exist. */
4841 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4843 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4844 && fix->mode == mp->mode
4845 && (GET_CODE (fix->value) != CODE_LABEL
4846 || (CODE_LABEL_NUMBER (fix->value)
4847 == CODE_LABEL_NUMBER (mp->value)))
4848 && rtx_equal_p (fix->value, mp->value))
4850 /* More than one fix references this entry. */
4851 mp->refcount++;
4852 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4855 /* Note the insertion point if necessary. */
4856 if (max_mp == NULL
4857 && mp->max_address > max_address)
4858 max_mp = mp;
4861 /* The value is not currently in the minipool, so we need to create
4862 a new entry for it. If MAX_MP is NULL, the entry will be put on
4863 the end of the list since the placement is less constrained than
4864 any existing entry. Otherwise, we insert the new fix before
4865 MAX_MP and, if neceesary, adjust the constraints on the other
4866 entries. */
4867 mp = xmalloc (sizeof (* mp));
4868 mp->fix_size = fix->fix_size;
4869 mp->mode = fix->mode;
4870 mp->value = fix->value;
4871 mp->refcount = 1;
4872 /* Not yet required for a backwards ref. */
4873 mp->min_address = -65536;
4875 if (max_mp == NULL)
4877 mp->max_address = max_address;
4878 mp->next = NULL;
4879 mp->prev = minipool_vector_tail;
4881 if (mp->prev == NULL)
4883 minipool_vector_head = mp;
4884 minipool_vector_label = gen_label_rtx ();
4886 else
4887 mp->prev->next = mp;
4889 minipool_vector_tail = mp;
4891 else
4893 if (max_address > max_mp->max_address - mp->fix_size)
4894 mp->max_address = max_mp->max_address - mp->fix_size;
4895 else
4896 mp->max_address = max_address;
4898 mp->next = max_mp;
4899 mp->prev = max_mp->prev;
4900 max_mp->prev = mp;
4901 if (mp->prev != NULL)
4902 mp->prev->next = mp;
4903 else
4904 minipool_vector_head = mp;
4907 /* Save the new entry. */
4908 max_mp = mp;
4910 /* Scan over the preceeding entries and adjust their addresses as
4911 required. */
4912 while (mp->prev != NULL
4913 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4915 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4916 mp = mp->prev;
4919 return max_mp;
4922 static Mnode *
4923 move_minipool_fix_backward_ref (mp, min_mp, min_address)
4924 Mnode * mp;
4925 Mnode * min_mp;
4926 HOST_WIDE_INT min_address;
4928 HOST_WIDE_INT offset;
4930 /* This should never be true, and the code below assumes these are
4931 different. */
4932 if (mp == min_mp)
4933 abort ();
4935 if (min_mp == NULL)
4937 if (min_address > mp->min_address)
4938 mp->min_address = min_address;
4940 else
4942 /* We will adjust this below if it is too loose. */
4943 mp->min_address = min_address;
4945 /* Unlink MP from its current position. Since min_mp is non-null,
4946 mp->next must be non-null. */
4947 mp->next->prev = mp->prev;
4948 if (mp->prev != NULL)
4949 mp->prev->next = mp->next;
4950 else
4951 minipool_vector_head = mp->next;
4953 /* Reinsert it after MIN_MP. */
4954 mp->prev = min_mp;
4955 mp->next = min_mp->next;
4956 min_mp->next = mp;
4957 if (mp->next != NULL)
4958 mp->next->prev = mp;
4959 else
4960 minipool_vector_tail = mp;
4963 min_mp = mp;
4965 offset = 0;
4966 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4968 mp->offset = offset;
4969 if (mp->refcount > 0)
4970 offset += mp->fix_size;
4972 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
4973 mp->next->min_address = mp->min_address + mp->fix_size;
4976 return min_mp;
4979 /* Add a constant to the minipool for a backward reference. Returns the
4980 node added or NULL if the constant will not fit in this pool.
4982 Note that the code for insertion for a backwards reference can be
4983 somewhat confusing because the calculated offsets for each fix do
4984 not take into account the size of the pool (which is still under
4985 construction. */
4986 static Mnode *
4987 add_minipool_backward_ref (fix)
4988 Mfix * fix;
4990 /* If set, min_mp is the last pool_entry that has a lower constraint
4991 than the one we are trying to add. */
4992 Mnode * min_mp = NULL;
4993 /* This can be negative, since it is only a constraint. */
4994 HOST_WIDE_INT min_address = fix->address - fix->backwards;
4995 Mnode * mp;
4997 /* If we can't reach the current pool from this insn, or if we can't
4998 insert this entry at the end of the pool without pushing other
4999 fixes out of range, then we don't try. This ensures that we
5000 can't fail later on. */
5001 if (min_address >= minipool_barrier->address
5002 || (minipool_vector_tail->min_address + fix->fix_size
5003 >= minipool_barrier->address))
5004 return NULL;
5006 /* Scan the pool to see if a constant with the same value has
5007 already been added. While we are doing this, also note the
5008 location where we must insert the constant if it doesn't already
5009 exist. */
5010 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5012 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5013 && fix->mode == mp->mode
5014 && (GET_CODE (fix->value) != CODE_LABEL
5015 || (CODE_LABEL_NUMBER (fix->value)
5016 == CODE_LABEL_NUMBER (mp->value)))
5017 && rtx_equal_p (fix->value, mp->value)
5018 /* Check that there is enough slack to move this entry to the
5019 end of the table (this is conservative). */
5020 && (mp->max_address
5021 > (minipool_barrier->address
5022 + minipool_vector_tail->offset
5023 + minipool_vector_tail->fix_size)))
5025 mp->refcount++;
5026 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5029 if (min_mp != NULL)
5030 mp->min_address += fix->fix_size;
5031 else
5033 /* Note the insertion point if necessary. */
5034 if (mp->min_address < min_address)
5035 min_mp = mp;
5036 else if (mp->max_address
5037 < minipool_barrier->address + mp->offset + fix->fix_size)
5039 /* Inserting before this entry would push the fix beyond
5040 its maximum address (which can happen if we have
5041 re-located a forwards fix); force the new fix to come
5042 after it. */
5043 min_mp = mp;
5044 min_address = mp->min_address + fix->fix_size;
5049 /* We need to create a new entry. */
5050 mp = xmalloc (sizeof (* mp));
5051 mp->fix_size = fix->fix_size;
5052 mp->mode = fix->mode;
5053 mp->value = fix->value;
5054 mp->refcount = 1;
5055 mp->max_address = minipool_barrier->address + 65536;
5057 mp->min_address = min_address;
5059 if (min_mp == NULL)
5061 mp->prev = NULL;
5062 mp->next = minipool_vector_head;
5064 if (mp->next == NULL)
5066 minipool_vector_tail = mp;
5067 minipool_vector_label = gen_label_rtx ();
5069 else
5070 mp->next->prev = mp;
5072 minipool_vector_head = mp;
5074 else
5076 mp->next = min_mp->next;
5077 mp->prev = min_mp;
5078 min_mp->next = mp;
5080 if (mp->next != NULL)
5081 mp->next->prev = mp;
5082 else
5083 minipool_vector_tail = mp;
5086 /* Save the new entry. */
5087 min_mp = mp;
5089 if (mp->prev)
5090 mp = mp->prev;
5091 else
5092 mp->offset = 0;
5094 /* Scan over the following entries and adjust their offsets. */
5095 while (mp->next != NULL)
5097 if (mp->next->min_address < mp->min_address + mp->fix_size)
5098 mp->next->min_address = mp->min_address + mp->fix_size;
5100 if (mp->refcount)
5101 mp->next->offset = mp->offset + mp->fix_size;
5102 else
5103 mp->next->offset = mp->offset;
5105 mp = mp->next;
5108 return min_mp;
5111 static void
5112 assign_minipool_offsets (barrier)
5113 Mfix * barrier;
5115 HOST_WIDE_INT offset = 0;
5116 Mnode * mp;
5118 minipool_barrier = barrier;
5120 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5122 mp->offset = offset;
5124 if (mp->refcount > 0)
5125 offset += mp->fix_size;
5129 /* Output the literal table */
5130 static void
5131 dump_minipool (scan)
5132 rtx scan;
5134 Mnode * mp;
5135 Mnode * nmp;
5137 if (rtl_dump_file)
5138 fprintf (rtl_dump_file,
5139 ";; Emitting minipool after insn %u; address %ld\n",
5140 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5142 scan = emit_label_after (gen_label_rtx (), scan);
5143 scan = emit_insn_after (gen_align_4 (), scan);
5144 scan = emit_label_after (minipool_vector_label, scan);
5146 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5148 if (mp->refcount > 0)
5150 if (rtl_dump_file)
5152 fprintf (rtl_dump_file,
5153 ";; Offset %u, min %ld, max %ld ",
5154 (unsigned) mp->offset, (unsigned long) mp->min_address,
5155 (unsigned long) mp->max_address);
5156 arm_print_value (rtl_dump_file, mp->value);
5157 fputc ('\n', rtl_dump_file);
5160 switch (mp->fix_size)
5162 #ifdef HAVE_consttable_1
5163 case 1:
5164 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5165 break;
5167 #endif
5168 #ifdef HAVE_consttable_2
5169 case 2:
5170 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5171 break;
5173 #endif
5174 #ifdef HAVE_consttable_4
5175 case 4:
5176 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5177 break;
5179 #endif
5180 #ifdef HAVE_consttable_8
5181 case 8:
5182 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5183 break;
5185 #endif
5186 default:
5187 abort ();
5188 break;
5192 nmp = mp->next;
5193 free (mp);
5196 minipool_vector_head = minipool_vector_tail = NULL;
5197 scan = emit_insn_after (gen_consttable_end (), scan);
5198 scan = emit_barrier_after (scan);
5201 /* Return the cost of forcibly inserting a barrier after INSN. */
5202 static int
5203 arm_barrier_cost (insn)
5204 rtx insn;
5206 /* Basing the location of the pool on the loop depth is preferable,
5207 but at the moment, the basic block information seems to be
5208 corrupt by this stage of the compilation. */
5209 int base_cost = 50;
5210 rtx next = next_nonnote_insn (insn);
5212 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5213 base_cost -= 20;
5215 switch (GET_CODE (insn))
5217 case CODE_LABEL:
5218 /* It will always be better to place the table before the label, rather
5219 than after it. */
5220 return 50;
5222 case INSN:
5223 case CALL_INSN:
5224 return base_cost;
5226 case JUMP_INSN:
5227 return base_cost - 10;
5229 default:
5230 return base_cost + 10;
5234 /* Find the best place in the insn stream in the range
5235 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5236 Create the barrier by inserting a jump and add a new fix entry for
5237 it. */
5238 static Mfix *
5239 create_fix_barrier (fix, max_address)
5240 Mfix * fix;
5241 HOST_WIDE_INT max_address;
5243 HOST_WIDE_INT count = 0;
5244 rtx barrier;
5245 rtx from = fix->insn;
5246 rtx selected = from;
5247 int selected_cost;
5248 HOST_WIDE_INT selected_address;
5249 Mfix * new_fix;
5250 HOST_WIDE_INT max_count = max_address - fix->address;
5251 rtx label = gen_label_rtx ();
5253 selected_cost = arm_barrier_cost (from);
5254 selected_address = fix->address;
5256 while (from && count < max_count)
5258 rtx tmp;
5259 int new_cost;
5261 /* This code shouldn't have been called if there was a natural barrier
5262 within range. */
5263 if (GET_CODE (from) == BARRIER)
5264 abort ();
5266 /* Count the length of this insn. */
5267 count += get_attr_length (from);
5269 /* If there is a jump table, add its length. */
5270 tmp = is_jump_table (from);
5271 if (tmp != NULL)
5273 count += get_jump_table_size (tmp);
5275 /* Jump tables aren't in a basic block, so base the cost on
5276 the dispatch insn. If we select this location, we will
5277 still put the pool after the table. */
5278 new_cost = arm_barrier_cost (from);
5280 if (count < max_count && new_cost <= selected_cost)
5282 selected = tmp;
5283 selected_cost = new_cost;
5284 selected_address = fix->address + count;
5287 /* Continue after the dispatch table. */
5288 from = NEXT_INSN (tmp);
5289 continue;
5292 new_cost = arm_barrier_cost (from);
5294 if (count < max_count && new_cost <= selected_cost)
5296 selected = from;
5297 selected_cost = new_cost;
5298 selected_address = fix->address + count;
5301 from = NEXT_INSN (from);
5304 /* Create a new JUMP_INSN that branches around a barrier. */
5305 from = emit_jump_insn_after (gen_jump (label), selected);
5306 JUMP_LABEL (from) = label;
5307 barrier = emit_barrier_after (from);
5308 emit_label_after (label, barrier);
5310 /* Create a minipool barrier entry for the new barrier. */
5311 new_fix = (Mfix *) oballoc (sizeof (* new_fix));
5312 new_fix->insn = barrier;
5313 new_fix->address = selected_address;
5314 new_fix->next = fix->next;
5315 fix->next = new_fix;
5317 return new_fix;
5320 /* Record that there is a natural barrier in the insn stream at
5321 ADDRESS. */
5322 static void
5323 push_minipool_barrier (insn, address)
5324 rtx insn;
5325 HOST_WIDE_INT address;
5327 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
5329 fix->insn = insn;
5330 fix->address = address;
5332 fix->next = NULL;
5333 if (minipool_fix_head != NULL)
5334 minipool_fix_tail->next = fix;
5335 else
5336 minipool_fix_head = fix;
5338 minipool_fix_tail = fix;
5341 /* Record INSN, which will need fixing up to load a value from the
5342 minipool. ADDRESS is the offset of the insn since the start of the
5343 function; LOC is a pointer to the part of the insn which requires
5344 fixing; VALUE is the constant that must be loaded, which is of type
5345 MODE. */
5346 static void
5347 push_minipool_fix (insn, address, loc, mode, value)
5348 rtx insn;
5349 HOST_WIDE_INT address;
5350 rtx * loc;
5351 enum machine_mode mode;
5352 rtx value;
5354 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
5356 #ifdef AOF_ASSEMBLER
5357 /* PIC symbol refereneces need to be converted into offsets into the
5358 based area. */
5359 /* XXX This shouldn't be done here. */
5360 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5361 value = aof_pic_entry (value);
5362 #endif /* AOF_ASSEMBLER */
5364 fix->insn = insn;
5365 fix->address = address;
5366 fix->loc = loc;
5367 fix->mode = mode;
5368 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5369 fix->value = value;
5370 fix->forwards = get_attr_pool_range (insn);
5371 fix->backwards = get_attr_neg_pool_range (insn);
5372 fix->minipool = NULL;
5374 /* If an insn doesn't have a range defined for it, then it isn't
5375 expecting to be reworked by this code. Better to abort now than
5376 to generate duff assembly code. */
5377 if (fix->forwards == 0 && fix->backwards == 0)
5378 abort ();
5380 if (rtl_dump_file)
5382 fprintf (rtl_dump_file,
5383 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5384 GET_MODE_NAME (mode),
5385 INSN_UID (insn), (unsigned long) address,
5386 -1 * (long)fix->backwards, (long)fix->forwards);
5387 arm_print_value (rtl_dump_file, fix->value);
5388 fprintf (rtl_dump_file, "\n");
5391 /* Add it to the chain of fixes. */
5392 fix->next = NULL;
5394 if (minipool_fix_head != NULL)
5395 minipool_fix_tail->next = fix;
5396 else
5397 minipool_fix_head = fix;
5399 minipool_fix_tail = fix;
5402 /* Scan INSN and note any of its operands that need fixing. */
5403 static void
5404 note_invalid_constants (insn, address)
5405 rtx insn;
5406 HOST_WIDE_INT address;
5408 int opno;
5410 extract_insn (insn);
5412 if (! constrain_operands (1))
5413 fatal_insn_not_found (insn);
5415 /* Fill in recog_op_alt with information about the constraints of this
5416 insn. */
5417 preprocess_constraints ();
5419 for (opno = 0; opno < recog_data.n_operands; opno++)
5421 /* Things we need to fix can only occur in inputs. */
5422 if (recog_data.operand_type[opno] != OP_IN)
5423 continue;
5425 /* If this alternative is a memory reference, then any mention
5426 of constants in this alternative is really to fool reload
5427 into allowing us to accept one there. We need to fix them up
5428 now so that we output the right code. */
5429 if (recog_op_alt[opno][which_alternative].memory_ok)
5431 rtx op = recog_data.operand[opno];
5433 if (CONSTANT_P (op))
5434 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5435 recog_data.operand_mode[opno], op);
5436 #if 0
5437 /* RWE: Now we look correctly at the operands for the insn,
5438 this shouldn't be needed any more. */
5439 #ifndef AOF_ASSEMBLER
5440 /* XXX Is this still needed? */
5441 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
5442 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5443 recog_data.operand_mode[opno],
5444 XVECEXP (op, 0, 0));
5445 #endif
5446 #endif
5447 else if (GET_CODE (op) == MEM
5448 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5449 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
5450 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5451 recog_data.operand_mode[opno],
5452 get_pool_constant (XEXP (op, 0)));
5457 void
5458 arm_reorg (first)
5459 rtx first;
5461 rtx insn;
5462 HOST_WIDE_INT address = 0;
5463 Mfix * fix;
5465 minipool_fix_head = minipool_fix_tail = NULL;
5467 /* The first insn must always be a note, or the code below won't
5468 scan it properly. */
5469 if (GET_CODE (first) != NOTE)
5470 abort ();
5472 /* Scan all the insns and record the operands that will need fixing. */
5473 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
5476 if (GET_CODE (insn) == BARRIER)
5477 push_minipool_barrier (insn, address);
5478 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5479 || GET_CODE (insn) == JUMP_INSN)
5481 rtx table;
5483 note_invalid_constants (insn, address);
5484 address += get_attr_length (insn);
5486 /* If the insn is a vector jump, add the size of the table
5487 and skip the table. */
5488 if ((table = is_jump_table (insn)) != NULL)
5490 address += get_jump_table_size (table);
5491 insn = table;
5496 fix = minipool_fix_head;
5498 /* Now scan the fixups and perform the required changes. */
5499 while (fix)
5501 Mfix * ftmp;
5502 Mfix * fdel;
5503 Mfix * last_added_fix;
5504 Mfix * last_barrier = NULL;
5505 Mfix * this_fix;
5507 /* Skip any further barriers before the next fix. */
5508 while (fix && GET_CODE (fix->insn) == BARRIER)
5509 fix = fix->next;
5511 /* No more fixes. */
5512 if (fix == NULL)
5513 break;
5515 last_added_fix = NULL;
5517 for (ftmp = fix; ftmp; ftmp = ftmp->next)
5519 if (GET_CODE (ftmp->insn) == BARRIER)
5521 if (ftmp->address >= minipool_vector_head->max_address)
5522 break;
5524 last_barrier = ftmp;
5526 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5527 break;
5529 last_added_fix = ftmp; /* Keep track of the last fix added. */
5532 /* If we found a barrier, drop back to that; any fixes that we
5533 could have reached but come after the barrier will now go in
5534 the next mini-pool. */
5535 if (last_barrier != NULL)
5537 /* Reduce the refcount for those fixes that won't go into this
5538 pool after all. */
5539 for (fdel = last_barrier->next;
5540 fdel && fdel != ftmp;
5541 fdel = fdel->next)
5543 fdel->minipool->refcount--;
5544 fdel->minipool = NULL;
5547 ftmp = last_barrier;
5549 else
5551 /* ftmp is first fix that we can't fit into this pool and
5552 there no natural barriers that we could use. Insert a
5553 new barrier in the code somewhere between the previous
5554 fix and this one, and arrange to jump around it. */
5555 HOST_WIDE_INT max_address;
5557 /* The last item on the list of fixes must be a barrier, so
5558 we can never run off the end of the list of fixes without
5559 last_barrier being set. */
5560 if (ftmp == NULL)
5561 abort ();
5563 max_address = minipool_vector_head->max_address;
5564 /* Check that there isn't another fix that is in range that
5565 we couldn't fit into this pool because the pool was
5566 already too large: we need to put the pool before such an
5567 instruction. */
5568 if (ftmp->address < max_address)
5569 max_address = ftmp->address;
5571 last_barrier = create_fix_barrier (last_added_fix, max_address);
5574 assign_minipool_offsets (last_barrier);
5576 while (ftmp)
5578 if (GET_CODE (ftmp->insn) != BARRIER
5579 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5580 == NULL))
5581 break;
5583 ftmp = ftmp->next;
5586 /* Scan over the fixes we have identified for this pool, fixing them
5587 up and adding the constants to the pool itself. */
5588 for (this_fix = fix; this_fix && ftmp != this_fix;
5589 this_fix = this_fix->next)
5590 if (GET_CODE (this_fix->insn) != BARRIER)
5592 rtx addr
5593 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5594 minipool_vector_label),
5595 this_fix->minipool->offset);
5596 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5599 dump_minipool (last_barrier->insn);
5600 fix = ftmp;
5603 /* From now on we must synthesize any constants that we can't handle
5604 directly. This can happen if the RTL gets split during final
5605 instruction generation. */
5606 after_arm_reorg = 1;
5609 /* Routines to output assembly language. */
5611 /* If the rtx is the correct value then return the string of the number.
5612 In this way we can ensure that valid double constants are generated even
5613 when cross compiling. */
5614 char *
5615 fp_immediate_constant (x)
5616 rtx x;
5618 REAL_VALUE_TYPE r;
5619 int i;
5621 if (!fpa_consts_inited)
5622 init_fpa_table ();
5624 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5625 for (i = 0; i < 8; i++)
5626 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5627 return strings_fpa[i];
5629 abort ();
5632 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
5633 static char *
5634 fp_const_from_val (r)
5635 REAL_VALUE_TYPE * r;
5637 int i;
5639 if (! fpa_consts_inited)
5640 init_fpa_table ();
5642 for (i = 0; i < 8; i++)
5643 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5644 return strings_fpa[i];
5646 abort ();
5649 /* Output the operands of a LDM/STM instruction to STREAM.
5650 MASK is the ARM register set mask of which only bits 0-15 are important.
5651 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5652 must follow the register list. */
5654 static void
5655 print_multi_reg (stream, instr, reg, mask, hat)
5656 FILE * stream;
5657 char * instr;
5658 int reg;
5659 int mask;
5660 int hat;
5662 int i;
5663 int not_first = FALSE;
5665 fputc ('\t', stream);
5666 asm_fprintf (stream, instr, reg);
5667 fputs (", {", stream);
5669 for (i = 0; i <= LAST_ARM_REGNUM; i++)
5670 if (mask & (1 << i))
5672 if (not_first)
5673 fprintf (stream, ", ");
5675 asm_fprintf (stream, "%r", i);
5676 not_first = TRUE;
5679 fprintf (stream, "}%s\n", hat ? "^" : "");
5682 /* Output a 'call' insn. */
5684 char *
5685 output_call (operands)
5686 rtx * operands;
5688 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
5690 if (REGNO (operands[0]) == LR_REGNUM)
5692 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
5693 output_asm_insn ("mov%?\t%0, %|lr", operands);
5696 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5698 if (TARGET_INTERWORK)
5699 output_asm_insn ("bx%?\t%0", operands);
5700 else
5701 output_asm_insn ("mov%?\t%|pc, %0", operands);
5703 return "";
5706 static int
5707 eliminate_lr2ip (x)
5708 rtx * x;
5710 int something_changed = 0;
5711 rtx x0 = * x;
5712 int code = GET_CODE (x0);
5713 register int i, j;
5714 register const char * fmt;
5716 switch (code)
5718 case REG:
5719 if (REGNO (x0) == LR_REGNUM)
5721 *x = gen_rtx_REG (SImode, IP_REGNUM);
5722 return 1;
5724 return 0;
5725 default:
5726 /* Scan through the sub-elements and change any references there. */
5727 fmt = GET_RTX_FORMAT (code);
5729 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5730 if (fmt[i] == 'e')
5731 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5732 else if (fmt[i] == 'E')
5733 for (j = 0; j < XVECLEN (x0, i); j++)
5734 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
5736 return something_changed;
5740 /* Output a 'call' insn that is a reference in memory. */
5742 char *
5743 output_call_mem (operands)
5744 rtx * operands;
5746 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5747 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
5748 if (eliminate_lr2ip (&operands[0]))
5749 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
5751 if (TARGET_INTERWORK)
5753 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5754 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5755 output_asm_insn ("bx%?\t%|ip", operands);
5757 else
5759 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5760 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5763 return "";
5767 /* Output a move from arm registers to an fpu registers.
5768 OPERANDS[0] is an fpu register.
5769 OPERANDS[1] is the first registers of an arm register pair. */
5771 char *
5772 output_mov_long_double_fpu_from_arm (operands)
5773 rtx * operands;
5775 int arm_reg0 = REGNO (operands[1]);
5776 rtx ops[3];
5778 if (arm_reg0 == IP_REGNUM)
5779 abort ();
5781 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5782 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5783 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
5785 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5786 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
5788 return "";
5791 /* Output a move from an fpu register to arm registers.
5792 OPERANDS[0] is the first registers of an arm register pair.
5793 OPERANDS[1] is an fpu register. */
5795 char *
5796 output_mov_long_double_arm_from_fpu (operands)
5797 rtx * operands;
5799 int arm_reg0 = REGNO (operands[0]);
5800 rtx ops[3];
5802 if (arm_reg0 == IP_REGNUM)
5803 abort ();
5805 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5806 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5807 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
5809 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5810 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
5811 return "";
5814 /* Output a move from arm registers to arm registers of a long double
5815 OPERANDS[0] is the destination.
5816 OPERANDS[1] is the source. */
5817 char *
5818 output_mov_long_double_arm_from_arm (operands)
5819 rtx * operands;
5821 /* We have to be careful here because the two might overlap. */
5822 int dest_start = REGNO (operands[0]);
5823 int src_start = REGNO (operands[1]);
5824 rtx ops[2];
5825 int i;
5827 if (dest_start < src_start)
5829 for (i = 0; i < 3; i++)
5831 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5832 ops[1] = gen_rtx_REG (SImode, src_start + i);
5833 output_asm_insn ("mov%?\t%0, %1", ops);
5836 else
5838 for (i = 2; i >= 0; i--)
5840 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5841 ops[1] = gen_rtx_REG (SImode, src_start + i);
5842 output_asm_insn ("mov%?\t%0, %1", ops);
5846 return "";
5850 /* Output a move from arm registers to an fpu registers.
5851 OPERANDS[0] is an fpu register.
5852 OPERANDS[1] is the first registers of an arm register pair. */
5854 char *
5855 output_mov_double_fpu_from_arm (operands)
5856 rtx * operands;
5858 int arm_reg0 = REGNO (operands[1]);
5859 rtx ops[2];
5861 if (arm_reg0 == IP_REGNUM)
5862 abort ();
5864 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5865 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5866 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5867 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
5868 return "";
5871 /* Output a move from an fpu register to arm registers.
5872 OPERANDS[0] is the first registers of an arm register pair.
5873 OPERANDS[1] is an fpu register. */
5875 char *
5876 output_mov_double_arm_from_fpu (operands)
5877 rtx * operands;
5879 int arm_reg0 = REGNO (operands[0]);
5880 rtx ops[2];
5882 if (arm_reg0 == IP_REGNUM)
5883 abort ();
5885 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5886 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5887 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5888 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
5889 return "";
5892 /* Output a move between double words.
5893 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5894 or MEM<-REG and all MEMs must be offsettable addresses. */
5896 char *
5897 output_move_double (operands)
5898 rtx * operands;
5900 enum rtx_code code0 = GET_CODE (operands[0]);
5901 enum rtx_code code1 = GET_CODE (operands[1]);
5902 rtx otherops[3];
5904 if (code0 == REG)
5906 int reg0 = REGNO (operands[0]);
5908 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
5910 if (code1 == REG)
5912 int reg1 = REGNO (operands[1]);
5913 if (reg1 == IP_REGNUM)
5914 abort ();
5916 /* Ensure the second source is not overwritten. */
5917 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
5918 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
5919 else
5920 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
5922 else if (code1 == CONST_DOUBLE)
5924 if (GET_MODE (operands[1]) == DFmode)
5926 long l[2];
5927 union real_extract u;
5929 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
5930 sizeof (u));
5931 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
5932 otherops[1] = GEN_INT (l[1]);
5933 operands[1] = GEN_INT (l[0]);
5935 else if (GET_MODE (operands[1]) != VOIDmode)
5936 abort ();
5937 else if (WORDS_BIG_ENDIAN)
5940 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5941 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5943 else
5946 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5947 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5950 output_mov_immediate (operands);
5951 output_mov_immediate (otherops);
5953 else if (code1 == CONST_INT)
5955 #if HOST_BITS_PER_WIDE_INT > 32
5956 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
5957 what the upper word is. */
5958 if (WORDS_BIG_ENDIAN)
5960 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5961 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5963 else
5965 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5966 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5968 #else
5969 /* Sign extend the intval into the high-order word. */
5970 if (WORDS_BIG_ENDIAN)
5972 otherops[1] = operands[1];
5973 operands[1] = (INTVAL (operands[1]) < 0
5974 ? constm1_rtx : const0_rtx);
5976 else
5977 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
5978 #endif
5979 output_mov_immediate (otherops);
5980 output_mov_immediate (operands);
5982 else if (code1 == MEM)
5984 switch (GET_CODE (XEXP (operands[1], 0)))
5986 case REG:
5987 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
5988 break;
5990 case PRE_INC:
5991 abort (); /* Should never happen now. */
5992 break;
5994 case PRE_DEC:
5995 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
5996 break;
5998 case POST_INC:
5999 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6000 break;
6002 case POST_DEC:
6003 abort (); /* Should never happen now. */
6004 break;
6006 case LABEL_REF:
6007 case CONST:
6008 output_asm_insn ("adr%?\t%0, %1", operands);
6009 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6010 break;
6012 default:
6013 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6014 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6016 otherops[0] = operands[0];
6017 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6018 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6019 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6021 if (GET_CODE (otherops[2]) == CONST_INT)
6023 switch (INTVAL (otherops[2]))
6025 case -8:
6026 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6027 return "";
6028 case -4:
6029 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6030 return "";
6031 case 4:
6032 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6033 return "";
6035 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6036 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6037 else
6038 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6040 else
6041 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6043 else
6044 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6046 return "ldm%?ia\t%0, %M0";
6048 else
6050 otherops[1] = adj_offsettable_operand (operands[1], 4);
6051 /* Take care of overlapping base/data reg. */
6052 if (reg_mentioned_p (operands[0], operands[1]))
6054 output_asm_insn ("ldr%?\t%0, %1", otherops);
6055 output_asm_insn ("ldr%?\t%0, %1", operands);
6057 else
6059 output_asm_insn ("ldr%?\t%0, %1", operands);
6060 output_asm_insn ("ldr%?\t%0, %1", otherops);
6065 else
6066 abort (); /* Constraints should prevent this. */
6068 else if (code0 == MEM && code1 == REG)
6070 if (REGNO (operands[1]) == IP_REGNUM)
6071 abort ();
6073 switch (GET_CODE (XEXP (operands[0], 0)))
6075 case REG:
6076 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6077 break;
6079 case PRE_INC:
6080 abort (); /* Should never happen now. */
6081 break;
6083 case PRE_DEC:
6084 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6085 break;
6087 case POST_INC:
6088 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6089 break;
6091 case POST_DEC:
6092 abort (); /* Should never happen now. */
6093 break;
6095 case PLUS:
6096 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6098 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6100 case -8:
6101 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6102 return "";
6104 case -4:
6105 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6106 return "";
6108 case 4:
6109 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6110 return "";
6113 /* Fall through */
6115 default:
6116 otherops[0] = adj_offsettable_operand (operands[0], 4);
6117 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6118 output_asm_insn ("str%?\t%1, %0", operands);
6119 output_asm_insn ("str%?\t%1, %0", otherops);
6122 else
6123 abort (); /* Constraints should prevent this */
6125 return "";
6129 /* Output an arbitrary MOV reg, #n.
6130 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6132 char *
6133 output_mov_immediate (operands)
6134 rtx * operands;
6136 HOST_WIDE_INT n = INTVAL (operands[1]);
6137 int n_ones = 0;
6138 int i;
6140 /* Try to use one MOV */
6141 if (const_ok_for_arm (n))
6143 output_asm_insn ("mov%?\t%0, %1", operands);
6144 return "";
6147 /* Try to use one MVN */
6148 if (const_ok_for_arm (~n))
6150 operands[1] = GEN_INT (~n);
6151 output_asm_insn ("mvn%?\t%0, %1", operands);
6152 return "";
6155 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6157 for (i=0; i < 32; i++)
6158 if (n & 1 << i)
6159 n_ones++;
6161 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6162 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
6163 else
6164 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6166 return "";
6170 /* Output an ADD r, s, #n where n may be too big for one instruction. If
6171 adding zero to one register, output nothing. */
6173 char *
6174 output_add_immediate (operands)
6175 rtx * operands;
6177 HOST_WIDE_INT n = INTVAL (operands[2]);
6179 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6181 if (n < 0)
6182 output_multi_immediate (operands,
6183 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6184 -n);
6185 else
6186 output_multi_immediate (operands,
6187 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6191 return "";
6194 /* Output a multiple immediate operation.
6195 OPERANDS is the vector of operands referred to in the output patterns.
6196 INSTR1 is the output pattern to use for the first constant.
6197 INSTR2 is the output pattern to use for subsequent constants.
6198 IMMED_OP is the index of the constant slot in OPERANDS.
6199 N is the constant value. */
6201 static char *
6202 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6203 rtx * operands;
6204 char * instr1, * instr2;
6205 int immed_op;
6206 HOST_WIDE_INT n;
6208 #if HOST_BITS_PER_WIDE_INT > 32
6209 n &= HOST_UINT (0xffffffff);
6210 #endif
6212 if (n == 0)
6214 operands[immed_op] = const0_rtx;
6215 output_asm_insn (instr1, operands); /* Quick and easy output. */
6217 else
6219 int i;
6220 char *instr = instr1;
6222 /* Note that n is never zero here (which would give no output). */
6223 for (i = 0; i < 32; i += 2)
6225 if (n & (3 << i))
6227 operands[immed_op] = GEN_INT (n & (255 << i));
6228 output_asm_insn (instr, operands);
6229 instr = instr2;
6230 i += 6;
6234 return "";
6238 /* Return the appropriate ARM instruction for the operation code.
6239 The returned result should not be overwritten. OP is the rtx of the
6240 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6241 was shifted. */
6243 char *
6244 arithmetic_instr (op, shift_first_arg)
6245 rtx op;
6246 int shift_first_arg;
6248 switch (GET_CODE (op))
6250 case PLUS:
6251 return "add";
6253 case MINUS:
6254 return shift_first_arg ? "rsb" : "sub";
6256 case IOR:
6257 return "orr";
6259 case XOR:
6260 return "eor";
6262 case AND:
6263 return "and";
6265 default:
6266 abort ();
6271 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6272 for the operation code. The returned result should not be overwritten.
6273 OP is the rtx code of the shift.
6274 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6275 shift. */
6277 static char *
6278 shift_op (op, amountp)
6279 rtx op;
6280 HOST_WIDE_INT *amountp;
6282 char * mnem;
6283 enum rtx_code code = GET_CODE (op);
6285 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6286 *amountp = -1;
6287 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6288 *amountp = INTVAL (XEXP (op, 1));
6289 else
6290 abort ();
6292 switch (code)
6294 case ASHIFT:
6295 mnem = "asl";
6296 break;
6298 case ASHIFTRT:
6299 mnem = "asr";
6300 break;
6302 case LSHIFTRT:
6303 mnem = "lsr";
6304 break;
6306 case ROTATERT:
6307 mnem = "ror";
6308 break;
6310 case MULT:
6311 /* We never have to worry about the amount being other than a
6312 power of 2, since this case can never be reloaded from a reg. */
6313 if (*amountp != -1)
6314 *amountp = int_log2 (*amountp);
6315 else
6316 abort ();
6317 return "asl";
6319 default:
6320 abort ();
6323 if (*amountp != -1)
6325 /* This is not 100% correct, but follows from the desire to merge
6326 multiplication by a power of 2 with the recognizer for a
6327 shift. >=32 is not a valid shift for "asl", so we must try and
6328 output a shift that produces the correct arithmetical result.
6329 Using lsr #32 is identical except for the fact that the carry bit
6330 is not set correctly if we set the flags; but we never use the
6331 carry bit from such an operation, so we can ignore that. */
6332 if (code == ROTATERT)
6333 *amountp &= 31; /* Rotate is just modulo 32 */
6334 else if (*amountp != (*amountp & 31))
6336 if (code == ASHIFT)
6337 mnem = "lsr";
6338 *amountp = 32;
6341 /* Shifts of 0 are no-ops. */
6342 if (*amountp == 0)
6343 return NULL;
6346 return mnem;
6350 /* Obtain the shift from the POWER of two. */
6351 static HOST_WIDE_INT
6352 int_log2 (power)
6353 HOST_WIDE_INT power;
6355 HOST_WIDE_INT shift = 0;
6357 while ((((HOST_INT (1)) << shift) & power) == 0)
6359 if (shift > 31)
6360 abort ();
6361 shift++;
6364 return shift;
6367 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6368 /bin/as is horribly restrictive. */
6369 #define MAX_ASCII_LEN 51
6371 void
6372 output_ascii_pseudo_op (stream, p, len)
6373 FILE * stream;
6374 const unsigned char * p;
6375 int len;
6377 int i;
6378 int len_so_far = 0;
6380 fputs ("\t.ascii\t\"", stream);
6382 for (i = 0; i < len; i++)
6384 register int c = p[i];
6386 if (len_so_far >= MAX_ASCII_LEN)
6388 fputs ("\"\n\t.ascii\t\"", stream);
6389 len_so_far = 0;
6392 switch (c)
6394 case TARGET_TAB:
6395 fputs ("\\t", stream);
6396 len_so_far += 2;
6397 break;
6399 case TARGET_FF:
6400 fputs ("\\f", stream);
6401 len_so_far += 2;
6402 break;
6404 case TARGET_BS:
6405 fputs ("\\b", stream);
6406 len_so_far += 2;
6407 break;
6409 case TARGET_CR:
6410 fputs ("\\r", stream);
6411 len_so_far += 2;
6412 break;
6414 case TARGET_NEWLINE:
6415 fputs ("\\n", stream);
6416 c = p [i + 1];
6417 if ((c >= ' ' && c <= '~')
6418 || c == TARGET_TAB)
6419 /* This is a good place for a line break. */
6420 len_so_far = MAX_ASCII_LEN;
6421 else
6422 len_so_far += 2;
6423 break;
6425 case '\"':
6426 case '\\':
6427 putc ('\\', stream);
6428 len_so_far ++;
6429 /* drop through. */
6431 default:
6432 if (c >= ' ' && c <= '~')
6434 putc (c, stream);
6435 len_so_far ++;
6437 else
6439 fprintf (stream, "\\%03o", c);
6440 len_so_far += 4;
6442 break;
6446 fputs ("\"\n", stream);
6450 char *
6451 output_return_instruction (operand, really_return, reverse)
6452 rtx operand;
6453 int really_return;
6454 int reverse;
6456 char instr[100];
6457 int reg, live_regs = 0;
6458 int volatile_func = arm_volatile_func ();
6460 /* If a function is naked, don't use the "return" insn. */
6461 if (arm_naked_function_p (current_function_decl))
6462 return "";
6464 return_used_this_function = 1;
6466 if (TARGET_ABORT_NORETURN && volatile_func)
6468 /* If this function was declared non-returning, and we have found a tail
6469 call, then we have to trust that the called function won't return. */
6470 if (really_return)
6472 rtx ops[2];
6474 /* Otherwise, trap an attempted return by aborting. */
6475 ops[0] = operand;
6476 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6477 : "abort");
6478 assemble_external_libcall (ops[1]);
6479 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6482 return "";
6485 if (current_function_calls_alloca && ! really_return)
6486 abort ();
6488 for (reg = 0; reg <= 10; reg++)
6489 if (regs_ever_live[reg] && ! call_used_regs[reg])
6490 live_regs++;
6492 if (! TARGET_APCS_FRAME
6493 && ! frame_pointer_needed
6494 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6495 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6496 live_regs++;
6498 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6499 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6500 live_regs++;
6502 if (live_regs || regs_ever_live[LR_REGNUM])
6503 live_regs++;
6505 if (frame_pointer_needed)
6506 live_regs += 4;
6508 /* On some ARM architectures it is faster to use LDR rather than LDM to
6509 load a single register. On other architectures, the cost is the same. */
6510 if (live_regs == 1
6511 && regs_ever_live[LR_REGNUM]
6512 && ! really_return)
6513 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6514 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6515 else if (live_regs == 1
6516 && regs_ever_live[LR_REGNUM]
6517 && TARGET_APCS_32)
6518 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6519 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
6520 else if (live_regs)
6522 if (! regs_ever_live[LR_REGNUM])
6523 live_regs++;
6525 if (frame_pointer_needed)
6526 strcpy (instr,
6527 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
6528 else
6529 strcpy (instr,
6530 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
6532 for (reg = 0; reg <= 10; reg++)
6533 if (regs_ever_live[reg]
6534 && (! call_used_regs[reg]
6535 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
6536 && reg == PIC_OFFSET_TABLE_REGNUM)))
6538 strcat (instr, "%|");
6539 strcat (instr, reg_names[reg]);
6540 if (--live_regs)
6541 strcat (instr, ", ");
6544 if (frame_pointer_needed)
6546 strcat (instr, "%|");
6547 strcat (instr, reg_names[11]);
6548 strcat (instr, ", ");
6549 strcat (instr, "%|");
6550 strcat (instr, reg_names[13]);
6551 strcat (instr, ", ");
6552 strcat (instr, "%|");
6553 strcat (instr, TARGET_INTERWORK || (! really_return)
6554 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
6556 else
6558 if (! TARGET_APCS_FRAME
6559 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6560 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6562 strcat (instr, "%|");
6563 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6564 strcat (instr, ", ");
6567 strcat (instr, "%|");
6569 if (TARGET_INTERWORK && really_return)
6570 strcat (instr, reg_names[IP_REGNUM]);
6571 else
6572 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
6575 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
6576 output_asm_insn (instr, &operand);
6578 if (TARGET_INTERWORK && really_return)
6580 strcpy (instr, "bx%?");
6581 strcat (instr, reverse ? "%D0" : "%d0");
6582 strcat (instr, "\t%|");
6583 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6585 output_asm_insn (instr, & operand);
6588 else if (really_return)
6590 if (TARGET_INTERWORK)
6591 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
6592 else
6593 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6594 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
6596 output_asm_insn (instr, & operand);
6599 return "";
6602 /* Return nonzero if optimizing and the current function is volatile.
6603 Such functions never return, and many memory cycles can be saved
6604 by not storing register values that will never be needed again.
6605 This optimization was added to speed up context switching in a
6606 kernel application. */
6608 arm_volatile_func ()
6610 return (optimize > 0
6611 && current_function_nothrow
6612 && TREE_THIS_VOLATILE (current_function_decl));
6615 /* Write the function name into the code section, directly preceding
6616 the function prologue.
6618 Code will be output similar to this:
6620 .ascii "arm_poke_function_name", 0
6621 .align
6623 .word 0xff000000 + (t1 - t0)
6624 arm_poke_function_name
6625 mov ip, sp
6626 stmfd sp!, {fp, ip, lr, pc}
6627 sub fp, ip, #4
6629 When performing a stack backtrace, code can inspect the value
6630 of 'pc' stored at 'fp' + 0. If the trace function then looks
6631 at location pc - 12 and the top 8 bits are set, then we know
6632 that there is a function name embedded immediately preceding this
6633 location and has length ((pc[-3]) & 0xff000000).
6635 We assume that pc is declared as a pointer to an unsigned long.
6637 It is of no benefit to output the function name if we are assembling
6638 a leaf function. These function types will not contain a stack
6639 backtrace structure, therefore it is not possible to determine the
6640 function name. */
6642 void
6643 arm_poke_function_name (stream, name)
6644 FILE * stream;
6645 char * name;
6647 unsigned long alignlength;
6648 unsigned long length;
6649 rtx x;
6651 length = strlen (name) + 1;
6652 alignlength = ROUND_UP (length);
6654 ASM_OUTPUT_ASCII (stream, name, length);
6655 ASM_OUTPUT_ALIGN (stream, 2);
6656 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
6657 ASM_OUTPUT_INT (stream, x);
6660 /* The amount of stack adjustment that happens here, in output_return and in
6661 output_epilogue must be exactly the same as was calculated during reload,
6662 or things will point to the wrong place. The only time we can safely
6663 ignore this constraint is when a function has no arguments on the stack,
6664 no stack frame requirement and no live registers execpt for `lr'. If we
6665 can guarantee that by making all function calls into tail calls and that
6666 lr is not clobbered in any other way, then there is no need to push lr
6667 onto the stack. */
6668 void
6669 output_arm_prologue (f, frame_size)
6670 FILE * f;
6671 int frame_size;
6673 int reg, live_regs_mask = 0;
6674 int volatile_func = arm_volatile_func ();
6676 /* Nonzero if we must stuff some register arguments onto the stack as if
6677 they were passed there. */
6678 int store_arg_regs = 0;
6680 if (arm_ccfsm_state || arm_target_insn)
6681 abort (); /* Sanity check. */
6683 if (arm_naked_function_p (current_function_decl))
6684 return;
6686 return_used_this_function = 0;
6688 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6689 current_function_args_size,
6690 current_function_pretend_args_size, frame_size);
6691 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6692 frame_pointer_needed,
6693 current_function_anonymous_args);
6695 if (volatile_func)
6696 asm_fprintf (f, "\t%@ Volatile function.\n");
6698 if (current_function_anonymous_args && current_function_pretend_args_size)
6699 store_arg_regs = 1;
6701 for (reg = 0; reg <= 10; reg++)
6702 if (regs_ever_live[reg] && ! call_used_regs[reg])
6703 live_regs_mask |= (1 << reg);
6705 if (! TARGET_APCS_FRAME
6706 && ! frame_pointer_needed
6707 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6708 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6709 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6711 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6712 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6713 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6715 if (frame_pointer_needed)
6716 live_regs_mask |= 0xD800;
6717 else if (regs_ever_live[LR_REGNUM])
6719 live_regs_mask |= 1 << LR_REGNUM;
6722 if (live_regs_mask)
6723 /* If a di mode load/store multiple is used, and the base register
6724 is r3, then r4 can become an ever live register without lr
6725 doing so, in this case we need to push lr as well, or we
6726 will fail to get a proper return. */
6727 live_regs_mask |= 1 << LR_REGNUM;
6729 #ifdef AOF_ASSEMBLER
6730 if (flag_pic)
6731 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
6732 #endif
6735 char *
6736 arm_output_epilogue (really_return)
6737 int really_return;
6739 int reg;
6740 int live_regs_mask = 0;
6741 /* If we need this, then it will always be at least this much. */
6742 int floats_offset = 12;
6743 rtx operands[3];
6744 int frame_size = get_frame_size ();
6745 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6746 FILE * f = asm_out_file;
6747 int volatile_func = arm_volatile_func ();
6748 int return_regnum;
6750 if (use_return_insn (FALSE) && return_used_this_function)
6751 return "";
6753 /* Naked functions don't have epilogues. */
6754 if (arm_naked_function_p (current_function_decl))
6755 return "";
6757 /* If we are throwing an exception, the address we want to jump to is in
6758 R1; otherwise, it's in LR. */
6759 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6761 /* If we are throwing an exception, then we really must be doing a return,
6762 so we can't tail-call. */
6763 if (eh_ofs && ! really_return)
6764 abort();
6766 /* A volatile function should never return. Call abort. */
6767 if (TARGET_ABORT_NORETURN && volatile_func)
6769 rtx op;
6770 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
6771 assemble_external_libcall (op);
6772 output_asm_insn ("bl\t%a0", &op);
6773 return "";
6776 for (reg = 0; reg <= 10; reg++)
6777 if (regs_ever_live[reg] && ! call_used_regs[reg])
6779 live_regs_mask |= (1 << reg);
6780 floats_offset += 4;
6783 /* Handle the frame pointer as a special case. */
6784 if (! TARGET_APCS_FRAME
6785 && ! frame_pointer_needed
6786 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6787 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6789 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6790 floats_offset += 4;
6793 /* If we aren't loading the PIC register, don't stack it even though it may
6794 be live. */
6795 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6796 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6798 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6799 floats_offset += 4;
6802 if (frame_pointer_needed)
6804 if (arm_fpu_arch == FP_SOFT2)
6806 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
6807 if (regs_ever_live[reg] && ! call_used_regs[reg])
6809 floats_offset += 12;
6810 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6811 reg, FP_REGNUM, floats_offset);
6814 else
6816 int start_reg = LAST_ARM_FP_REGNUM;
6818 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
6820 if (regs_ever_live[reg] && ! call_used_regs[reg])
6822 floats_offset += 12;
6824 /* We can't unstack more than four registers at once. */
6825 if (start_reg - reg == 3)
6827 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6828 reg, FP_REGNUM, floats_offset);
6829 start_reg = reg - 1;
6832 else
6834 if (reg != start_reg)
6835 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6836 reg + 1, start_reg - reg,
6837 FP_REGNUM, floats_offset);
6838 start_reg = reg - 1;
6842 /* Just in case the last register checked also needs unstacking. */
6843 if (reg != start_reg)
6844 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6845 reg + 1, start_reg - reg,
6846 FP_REGNUM, floats_offset);
6849 if (TARGET_INTERWORK)
6851 live_regs_mask |= 0x6800;
6852 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6853 if (eh_ofs)
6854 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6855 REGNO (eh_ofs));
6856 if (really_return)
6857 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
6859 else if (eh_ofs || ! really_return)
6861 live_regs_mask |= 0x6800;
6862 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6863 if (eh_ofs)
6865 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6866 REGNO (eh_ofs));
6867 /* Even in 26-bit mode we do a mov (rather than a movs)
6868 because we don't have the PSR bits set in the
6869 address. */
6870 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6873 else
6875 live_regs_mask |= 0xA800;
6876 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
6877 TARGET_APCS_32 ? FALSE : TRUE);
6880 else
6882 /* Restore stack pointer if necessary. */
6883 if (frame_size + current_function_outgoing_args_size != 0)
6885 operands[0] = operands[1] = stack_pointer_rtx;
6886 operands[2] = GEN_INT (frame_size
6887 + current_function_outgoing_args_size);
6888 output_add_immediate (operands);
6891 if (arm_fpu_arch == FP_SOFT2)
6893 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
6894 if (regs_ever_live[reg] && ! call_used_regs[reg])
6895 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6896 reg, SP_REGNUM);
6898 else
6900 int start_reg = FIRST_ARM_FP_REGNUM;
6902 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
6904 if (regs_ever_live[reg] && ! call_used_regs[reg])
6906 if (reg - start_reg == 3)
6908 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6909 start_reg, SP_REGNUM);
6910 start_reg = reg + 1;
6913 else
6915 if (reg != start_reg)
6916 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6917 start_reg, reg - start_reg,
6918 SP_REGNUM);
6920 start_reg = reg + 1;
6924 /* Just in case the last register checked also needs unstacking. */
6925 if (reg != start_reg)
6926 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6927 start_reg, reg - start_reg, SP_REGNUM);
6930 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
6932 if (TARGET_INTERWORK)
6934 live_regs_mask |= 1 << LR_REGNUM;
6936 /* Handle LR on its own. */
6937 if (live_regs_mask == (1 << LR_REGNUM))
6939 if (eh_ofs)
6940 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6941 SP_REGNUM);
6942 else
6943 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6944 SP_REGNUM);
6946 else if (live_regs_mask != 0)
6947 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
6948 FALSE);
6950 if (eh_ofs)
6951 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6952 REGNO (eh_ofs));
6954 if (really_return)
6955 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
6957 else if (eh_ofs)
6959 if (live_regs_mask == 0)
6960 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
6961 else
6962 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
6963 live_regs_mask | (1 << LR_REGNUM), FALSE);
6965 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6966 REGNO (eh_ofs));
6967 /* Jump to the target; even in 26-bit mode. */
6968 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6970 else if (TARGET_APCS_32 && live_regs_mask == 0 && ! really_return)
6971 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
6972 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
6973 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
6974 else if (! really_return)
6975 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6976 live_regs_mask | (1 << LR_REGNUM), FALSE);
6977 else
6978 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6979 live_regs_mask | (1 << PC_REGNUM),
6980 TARGET_APCS_32 ? FALSE : TRUE);
6982 else
6984 if (live_regs_mask || regs_ever_live[LR_REGNUM])
6986 /* Restore the integer regs, and the return address into lr. */
6987 live_regs_mask |= 1 << LR_REGNUM;
6989 if (live_regs_mask == (1 << LR_REGNUM))
6991 if (eh_ofs)
6992 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6993 SP_REGNUM);
6994 else
6995 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6996 SP_REGNUM);
6998 else if (live_regs_mask != 0)
6999 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7000 FALSE);
7003 if (current_function_pretend_args_size)
7005 /* Unwind the pre-pushed regs. */
7006 operands[0] = operands[1] = stack_pointer_rtx;
7007 operands[2] = GEN_INT (current_function_pretend_args_size);
7008 output_add_immediate (operands);
7011 if (eh_ofs)
7012 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7013 REGNO (eh_ofs));
7015 if (really_return)
7017 /* And finally, go home. */
7018 if (TARGET_INTERWORK)
7019 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7020 else if (TARGET_APCS_32 || eh_ofs)
7021 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7022 else
7023 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7028 return "";
7031 void
7032 output_func_epilogue (frame_size)
7033 int frame_size;
7035 if (TARGET_THUMB)
7037 /* ??? Probably not safe to set this here, since it assumes that a
7038 function will be emitted as assembly immediately after we generate
7039 RTL for it. This does not happen for inline functions. */
7040 return_used_this_function = 0;
7042 else
7044 if (use_return_insn (FALSE)
7045 && return_used_this_function
7046 && (frame_size + current_function_outgoing_args_size) != 0
7047 && ! frame_pointer_needed)
7048 abort ();
7050 /* Reset the ARM-specific per-function variables. */
7051 current_function_anonymous_args = 0;
7052 after_arm_reorg = 0;
7056 /* Generate and emit an insn that we will recognize as a push_multi.
7057 Unfortunately, since this insn does not reflect very well the actual
7058 semantics of the operation, we need to annotate the insn for the benefit
7059 of DWARF2 frame unwind information. */
7060 static rtx
7061 emit_multi_reg_push (mask)
7062 int mask;
7064 int num_regs = 0;
7065 int i, j;
7066 rtx par;
7067 rtx dwarf;
7068 rtx tmp, reg;
7070 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7071 if (mask & (1 << i))
7072 num_regs ++;
7074 if (num_regs == 0 || num_regs > 16)
7075 abort ();
7077 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7078 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7079 RTX_FRAME_RELATED_P (dwarf) = 1;
7081 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7083 if (mask & (1 << i))
7085 reg = gen_rtx_REG (SImode, i);
7087 XVECEXP (par, 0, 0)
7088 = gen_rtx_SET (VOIDmode,
7089 gen_rtx_MEM (BLKmode,
7090 gen_rtx_PRE_DEC (BLKmode,
7091 stack_pointer_rtx)),
7092 gen_rtx_UNSPEC (BLKmode,
7093 gen_rtvec (1, reg),
7094 2));
7096 tmp = gen_rtx_SET (VOIDmode,
7097 gen_rtx_MEM (SImode,
7098 gen_rtx_PRE_DEC (BLKmode,
7099 stack_pointer_rtx)),
7100 reg);
7101 RTX_FRAME_RELATED_P (tmp) = 1;
7102 XVECEXP (dwarf, 0, num_regs - 1) = tmp;
7104 break;
7108 for (j = 1, i++; j < num_regs; i++)
7110 if (mask & (1 << i))
7112 reg = gen_rtx_REG (SImode, i);
7114 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7116 tmp = gen_rtx_SET (VOIDmode,
7117 gen_rtx_MEM (SImode,
7118 gen_rtx_PRE_DEC (BLKmode,
7119 stack_pointer_rtx)),
7120 reg);
7121 RTX_FRAME_RELATED_P (tmp) = 1;
7122 XVECEXP (dwarf, 0, num_regs - j - 1) = tmp;
7124 j++;
7128 par = emit_insn (par);
7129 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7130 REG_NOTES (par));
7131 return par;
7134 static rtx
7135 emit_sfm (base_reg, count)
7136 int base_reg;
7137 int count;
7139 rtx par;
7140 rtx dwarf;
7141 rtx tmp, reg;
7142 int i;
7144 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7145 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7146 RTX_FRAME_RELATED_P (dwarf) = 1;
7148 reg = gen_rtx_REG (XFmode, base_reg++);
7150 XVECEXP (par, 0, 0)
7151 = gen_rtx_SET (VOIDmode,
7152 gen_rtx_MEM (BLKmode,
7153 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7154 gen_rtx_UNSPEC (BLKmode,
7155 gen_rtvec (1, reg),
7156 2));
7158 = gen_rtx_SET (VOIDmode,
7159 gen_rtx_MEM (XFmode,
7160 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7161 reg);
7162 RTX_FRAME_RELATED_P (tmp) = 1;
7163 XVECEXP (dwarf, 0, count - 1) = tmp;
7165 for (i = 1; i < count; i++)
7167 reg = gen_rtx_REG (XFmode, base_reg++);
7168 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7170 tmp = gen_rtx_SET (VOIDmode,
7171 gen_rtx_MEM (XFmode,
7172 gen_rtx_PRE_DEC (BLKmode,
7173 stack_pointer_rtx)),
7174 reg);
7175 RTX_FRAME_RELATED_P (tmp) = 1;
7176 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7179 par = emit_insn (par);
7180 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7181 REG_NOTES (par));
7182 return par;
7185 void
7186 arm_expand_prologue ()
7188 int reg;
7189 rtx amount = GEN_INT (-(get_frame_size ()
7190 + current_function_outgoing_args_size));
7191 int live_regs_mask = 0;
7192 int store_arg_regs = 0;
7193 /* If this function doesn't return, then there is no need to push
7194 the call-saved regs. */
7195 int volatile_func = arm_volatile_func ();
7196 rtx insn;
7198 /* Naked functions don't have prologues. */
7199 if (arm_naked_function_p (current_function_decl))
7200 return;
7202 if (current_function_anonymous_args && current_function_pretend_args_size)
7203 store_arg_regs = 1;
7205 if (! volatile_func)
7207 for (reg = 0; reg <= 10; reg++)
7208 if (regs_ever_live[reg] && ! call_used_regs[reg])
7209 live_regs_mask |= 1 << reg;
7211 if (! TARGET_APCS_FRAME
7212 && ! frame_pointer_needed
7213 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7214 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7215 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7217 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7218 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7220 if (regs_ever_live[LR_REGNUM])
7221 live_regs_mask |= 1 << LR_REGNUM;
7224 if (frame_pointer_needed)
7226 live_regs_mask |= 0xD800;
7227 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
7228 stack_pointer_rtx));
7229 RTX_FRAME_RELATED_P (insn) = 1;
7232 if (current_function_pretend_args_size)
7234 if (store_arg_regs)
7235 insn = emit_multi_reg_push
7236 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
7237 else
7238 insn = emit_insn
7239 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7240 GEN_INT (-current_function_pretend_args_size)));
7241 RTX_FRAME_RELATED_P (insn) = 1;
7244 if (live_regs_mask)
7246 /* If we have to push any regs, then we must push lr as well, or
7247 we won't get a proper return. */
7248 live_regs_mask |= 1 << LR_REGNUM;
7249 insn = emit_multi_reg_push (live_regs_mask);
7250 RTX_FRAME_RELATED_P (insn) = 1;
7253 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7255 if (! volatile_func)
7257 if (arm_fpu_arch == FP_SOFT2)
7259 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7260 if (regs_ever_live[reg] && ! call_used_regs[reg])
7262 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7263 insn = gen_rtx_MEM (XFmode, insn);
7264 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7265 gen_rtx_REG (XFmode, reg)));
7266 RTX_FRAME_RELATED_P (insn) = 1;
7269 else
7271 int start_reg = LAST_ARM_FP_REGNUM;
7273 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7275 if (regs_ever_live[reg] && ! call_used_regs[reg])
7277 if (start_reg - reg == 3)
7279 insn = emit_sfm (reg, 4);
7280 RTX_FRAME_RELATED_P (insn) = 1;
7281 start_reg = reg - 1;
7284 else
7286 if (start_reg != reg)
7288 insn = emit_sfm (reg + 1, start_reg - reg);
7289 RTX_FRAME_RELATED_P (insn) = 1;
7291 start_reg = reg - 1;
7295 if (start_reg != reg)
7297 insn = emit_sfm (reg + 1, start_reg - reg);
7298 RTX_FRAME_RELATED_P (insn) = 1;
7303 if (frame_pointer_needed)
7305 insn = GEN_INT (-(4 + current_function_pretend_args_size));
7306 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
7307 gen_rtx_REG (SImode, IP_REGNUM),
7308 insn));
7309 RTX_FRAME_RELATED_P (insn) = 1;
7312 if (amount != const0_rtx)
7314 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7315 amount));
7316 RTX_FRAME_RELATED_P (insn) = 1;
7317 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7318 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
7321 /* If we are profiling, make sure no instructions are scheduled before
7322 the call to mcount. Similarly if the user has requested no
7323 scheduling in the prolog. */
7324 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
7325 emit_insn (gen_blockage ());
7328 /* If CODE is 'd', then the X is a condition operand and the instruction
7329 should only be executed if the condition is true.
7330 if CODE is 'D', then the X is a condition operand and the instruction
7331 should only be executed if the condition is false: however, if the mode
7332 of the comparison is CCFPEmode, then always execute the instruction -- we
7333 do this because in these circumstances !GE does not necessarily imply LT;
7334 in these cases the instruction pattern will take care to make sure that
7335 an instruction containing %d will follow, thereby undoing the effects of
7336 doing this instruction unconditionally.
7337 If CODE is 'N' then X is a floating point operand that must be negated
7338 before output.
7339 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7340 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7342 void
7343 arm_print_operand (stream, x, code)
7344 FILE * stream;
7345 rtx x;
7346 int code;
7348 switch (code)
7350 case '@':
7351 fputs (ASM_COMMENT_START, stream);
7352 return;
7354 case '_':
7355 fputs (user_label_prefix, stream);
7356 return;
7358 case '|':
7359 fputs (REGISTER_PREFIX, stream);
7360 return;
7362 case '?':
7363 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
7364 fputs (arm_condition_codes[arm_current_cc], stream);
7365 return;
7367 case 'N':
7369 REAL_VALUE_TYPE r;
7370 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7371 r = REAL_VALUE_NEGATE (r);
7372 fprintf (stream, "%s", fp_const_from_val (&r));
7374 return;
7376 case 'B':
7377 if (GET_CODE (x) == CONST_INT)
7379 HOST_WIDE_INT val;
7380 val = ARM_SIGN_EXTEND (~ INTVAL (x));
7381 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7383 else
7385 putc ('~', stream);
7386 output_addr_const (stream, x);
7388 return;
7390 case 'i':
7391 fprintf (stream, "%s", arithmetic_instr (x, 1));
7392 return;
7394 case 'I':
7395 fprintf (stream, "%s", arithmetic_instr (x, 0));
7396 return;
7398 case 'S':
7400 HOST_WIDE_INT val;
7401 char * shift = shift_op (x, & val);
7403 if (shift)
7405 fprintf (stream, ", %s ", shift_op (x, & val));
7406 if (val == -1)
7407 arm_print_operand (stream, XEXP (x, 1), 0);
7408 else
7410 fputc ('#', stream);
7411 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7415 return;
7417 /* An explanation of the 'Q', 'R' and 'H' register operands:
7419 In a pair of registers containing a DI or DF value the 'Q'
7420 operand returns the register number of the register containing
7421 the least signficant part of the value. The 'R' operand returns
7422 the register number of the register containing the most
7423 significant part of the value.
7425 The 'H' operand returns the higher of the two register numbers.
7426 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7427 same as the 'Q' operand, since the most signficant part of the
7428 value is held in the lower number register. The reverse is true
7429 on systems where WORDS_BIG_ENDIAN is false.
7431 The purpose of these operands is to distinguish between cases
7432 where the endian-ness of the values is important (for example
7433 when they are added together), and cases where the endian-ness
7434 is irrelevant, but the order of register operations is important.
7435 For example when loading a value from memory into a register
7436 pair, the endian-ness does not matter. Provided that the value
7437 from the lower memory address is put into the lower numbered
7438 register, and the value from the higher address is put into the
7439 higher numbered register, the load will work regardless of whether
7440 the value being loaded is big-wordian or little-wordian. The
7441 order of the two register loads can matter however, if the address
7442 of the memory location is actually held in one of the registers
7443 being overwritten by the load. */
7444 case 'Q':
7445 if (REGNO (x) > LAST_ARM_REGNUM)
7446 abort ();
7447 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
7448 return;
7450 case 'R':
7451 if (REGNO (x) > LAST_ARM_REGNUM)
7452 abort ();
7453 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7454 return;
7456 case 'H':
7457 if (REGNO (x) > LAST_ARM_REGNUM)
7458 abort ();
7459 asm_fprintf (stream, "%r", REGNO (x) + 1);
7460 return;
7462 case 'm':
7463 asm_fprintf (stream, "%r",
7464 GET_CODE (XEXP (x, 0)) == REG
7465 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
7466 return;
7468 case 'M':
7469 asm_fprintf (stream, "{%r-%r}",
7470 REGNO (x),
7471 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
7472 return;
7474 case 'd':
7475 if (! x)
7476 return;
7478 if (TARGET_ARM)
7479 fputs (arm_condition_codes[get_arm_condition_code (x)],
7480 stream);
7481 else
7482 fputs (thumb_condition_code (x, 0), stream);
7483 return;
7485 case 'D':
7486 if (! x)
7487 return;
7489 if (TARGET_ARM)
7490 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7491 (get_arm_condition_code (x))],
7492 stream);
7493 else
7494 fputs (thumb_condition_code (x, 1), stream);
7495 return;
7497 default:
7498 if (x == 0)
7499 abort ();
7501 if (GET_CODE (x) == REG)
7502 asm_fprintf (stream, "%r", REGNO (x));
7503 else if (GET_CODE (x) == MEM)
7505 output_memory_reference_mode = GET_MODE (x);
7506 output_address (XEXP (x, 0));
7508 else if (GET_CODE (x) == CONST_DOUBLE)
7509 fprintf (stream, "#%s", fp_immediate_constant (x));
7510 else if (GET_CODE (x) == NEG)
7511 abort (); /* This should never happen now. */
7512 else
7514 fputc ('#', stream);
7515 output_addr_const (stream, x);
7520 /* A finite state machine takes care of noticing whether or not instructions
7521 can be conditionally executed, and thus decrease execution time and code
7522 size by deleting branch instructions. The fsm is controlled by
7523 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7525 /* The state of the fsm controlling condition codes are:
7526 0: normal, do nothing special
7527 1: make ASM_OUTPUT_OPCODE not output this instruction
7528 2: make ASM_OUTPUT_OPCODE not output this instruction
7529 3: make instructions conditional
7530 4: make instructions conditional
7532 State transitions (state->state by whom under condition):
7533 0 -> 1 final_prescan_insn if the `target' is a label
7534 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7535 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7536 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7537 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7538 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7539 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7540 (the target insn is arm_target_insn).
7542 If the jump clobbers the conditions then we use states 2 and 4.
7544 A similar thing can be done with conditional return insns.
7546 XXX In case the `target' is an unconditional branch, this conditionalising
7547 of the instructions always reduces code size, but not always execution
7548 time. But then, I want to reduce the code size to somewhere near what
7549 /bin/cc produces. */
7551 /* Returns the index of the ARM condition code string in
7552 `arm_condition_codes'. COMPARISON should be an rtx like
7553 `(eq (...) (...))'. */
7555 static enum arm_cond_code
7556 get_arm_condition_code (comparison)
7557 rtx comparison;
7559 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
7560 register int code;
7561 register enum rtx_code comp_code = GET_CODE (comparison);
7563 if (GET_MODE_CLASS (mode) != MODE_CC)
7564 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
7565 XEXP (comparison, 1));
7567 switch (mode)
7569 case CC_DNEmode: code = ARM_NE; goto dominance;
7570 case CC_DEQmode: code = ARM_EQ; goto dominance;
7571 case CC_DGEmode: code = ARM_GE; goto dominance;
7572 case CC_DGTmode: code = ARM_GT; goto dominance;
7573 case CC_DLEmode: code = ARM_LE; goto dominance;
7574 case CC_DLTmode: code = ARM_LT; goto dominance;
7575 case CC_DGEUmode: code = ARM_CS; goto dominance;
7576 case CC_DGTUmode: code = ARM_HI; goto dominance;
7577 case CC_DLEUmode: code = ARM_LS; goto dominance;
7578 case CC_DLTUmode: code = ARM_CC;
7580 dominance:
7581 if (comp_code != EQ && comp_code != NE)
7582 abort ();
7584 if (comp_code == EQ)
7585 return ARM_INVERSE_CONDITION_CODE (code);
7586 return code;
7588 case CC_NOOVmode:
7589 switch (comp_code)
7591 case NE: return ARM_NE;
7592 case EQ: return ARM_EQ;
7593 case GE: return ARM_PL;
7594 case LT: return ARM_MI;
7595 default: abort ();
7598 case CC_Zmode:
7599 case CCFPmode:
7600 switch (comp_code)
7602 case NE: return ARM_NE;
7603 case EQ: return ARM_EQ;
7604 default: abort ();
7607 case CCFPEmode:
7608 switch (comp_code)
7610 case GE: return ARM_GE;
7611 case GT: return ARM_GT;
7612 case LE: return ARM_LS;
7613 case LT: return ARM_MI;
7614 default: abort ();
7617 case CC_SWPmode:
7618 switch (comp_code)
7620 case NE: return ARM_NE;
7621 case EQ: return ARM_EQ;
7622 case GE: return ARM_LE;
7623 case GT: return ARM_LT;
7624 case LE: return ARM_GE;
7625 case LT: return ARM_GT;
7626 case GEU: return ARM_LS;
7627 case GTU: return ARM_CC;
7628 case LEU: return ARM_CS;
7629 case LTU: return ARM_HI;
7630 default: abort ();
7633 case CC_Cmode:
7634 switch (comp_code)
7636 case LTU: return ARM_CS;
7637 case GEU: return ARM_CC;
7638 default: abort ();
7641 case CCmode:
7642 switch (comp_code)
7644 case NE: return ARM_NE;
7645 case EQ: return ARM_EQ;
7646 case GE: return ARM_GE;
7647 case GT: return ARM_GT;
7648 case LE: return ARM_LE;
7649 case LT: return ARM_LT;
7650 case GEU: return ARM_CS;
7651 case GTU: return ARM_HI;
7652 case LEU: return ARM_LS;
7653 case LTU: return ARM_CC;
7654 default: abort ();
7657 default: abort ();
7660 abort ();
7664 void
7665 arm_final_prescan_insn (insn)
7666 rtx insn;
7668 /* BODY will hold the body of INSN. */
7669 register rtx body = PATTERN (insn);
7671 /* This will be 1 if trying to repeat the trick, and things need to be
7672 reversed if it appears to fail. */
7673 int reverse = 0;
7675 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7676 taken are clobbered, even if the rtl suggests otherwise. It also
7677 means that we have to grub around within the jump expression to find
7678 out what the conditions are when the jump isn't taken. */
7679 int jump_clobbers = 0;
7681 /* If we start with a return insn, we only succeed if we find another one. */
7682 int seeking_return = 0;
7684 /* START_INSN will hold the insn from where we start looking. This is the
7685 first insn after the following code_label if REVERSE is true. */
7686 rtx start_insn = insn;
7688 /* If in state 4, check if the target branch is reached, in order to
7689 change back to state 0. */
7690 if (arm_ccfsm_state == 4)
7692 if (insn == arm_target_insn)
7694 arm_target_insn = NULL;
7695 arm_ccfsm_state = 0;
7697 return;
7700 /* If in state 3, it is possible to repeat the trick, if this insn is an
7701 unconditional branch to a label, and immediately following this branch
7702 is the previous target label which is only used once, and the label this
7703 branch jumps to is not too far off. */
7704 if (arm_ccfsm_state == 3)
7706 if (simplejump_p (insn))
7708 start_insn = next_nonnote_insn (start_insn);
7709 if (GET_CODE (start_insn) == BARRIER)
7711 /* XXX Isn't this always a barrier? */
7712 start_insn = next_nonnote_insn (start_insn);
7714 if (GET_CODE (start_insn) == CODE_LABEL
7715 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7716 && LABEL_NUSES (start_insn) == 1)
7717 reverse = TRUE;
7718 else
7719 return;
7721 else if (GET_CODE (body) == RETURN)
7723 start_insn = next_nonnote_insn (start_insn);
7724 if (GET_CODE (start_insn) == BARRIER)
7725 start_insn = next_nonnote_insn (start_insn);
7726 if (GET_CODE (start_insn) == CODE_LABEL
7727 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7728 && LABEL_NUSES (start_insn) == 1)
7730 reverse = TRUE;
7731 seeking_return = 1;
7733 else
7734 return;
7736 else
7737 return;
7740 if (arm_ccfsm_state != 0 && !reverse)
7741 abort ();
7742 if (GET_CODE (insn) != JUMP_INSN)
7743 return;
7745 /* This jump might be paralleled with a clobber of the condition codes
7746 the jump should always come first */
7747 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7748 body = XVECEXP (body, 0, 0);
7750 #if 0
7751 /* If this is a conditional return then we don't want to know */
7752 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7753 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7754 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7755 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7756 return;
7757 #endif
7759 if (reverse
7760 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7761 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7763 int insns_skipped;
7764 int fail = FALSE, succeed = FALSE;
7765 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
7766 int then_not_else = TRUE;
7767 rtx this_insn = start_insn, label = 0;
7769 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
7771 /* The code below is wrong for these, and I haven't time to
7772 fix it now. So we just do the safe thing and return. This
7773 whole function needs re-writing anyway. */
7774 jump_clobbers = 1;
7775 return;
7778 /* Register the insn jumped to. */
7779 if (reverse)
7781 if (!seeking_return)
7782 label = XEXP (SET_SRC (body), 0);
7784 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
7785 label = XEXP (XEXP (SET_SRC (body), 1), 0);
7786 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
7788 label = XEXP (XEXP (SET_SRC (body), 2), 0);
7789 then_not_else = FALSE;
7791 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
7792 seeking_return = 1;
7793 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
7795 seeking_return = 1;
7796 then_not_else = FALSE;
7798 else
7799 abort ();
7801 /* See how many insns this branch skips, and what kind of insns. If all
7802 insns are okay, and the label or unconditional branch to the same
7803 label is not too far away, succeed. */
7804 for (insns_skipped = 0;
7805 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
7807 rtx scanbody;
7809 this_insn = next_nonnote_insn (this_insn);
7810 if (!this_insn)
7811 break;
7813 switch (GET_CODE (this_insn))
7815 case CODE_LABEL:
7816 /* Succeed if it is the target label, otherwise fail since
7817 control falls in from somewhere else. */
7818 if (this_insn == label)
7820 if (jump_clobbers)
7822 arm_ccfsm_state = 2;
7823 this_insn = next_nonnote_insn (this_insn);
7825 else
7826 arm_ccfsm_state = 1;
7827 succeed = TRUE;
7829 else
7830 fail = TRUE;
7831 break;
7833 case BARRIER:
7834 /* Succeed if the following insn is the target label.
7835 Otherwise fail.
7836 If return insns are used then the last insn in a function
7837 will be a barrier. */
7838 this_insn = next_nonnote_insn (this_insn);
7839 if (this_insn && this_insn == label)
7841 if (jump_clobbers)
7843 arm_ccfsm_state = 2;
7844 this_insn = next_nonnote_insn (this_insn);
7846 else
7847 arm_ccfsm_state = 1;
7848 succeed = TRUE;
7850 else
7851 fail = TRUE;
7852 break;
7854 case CALL_INSN:
7855 /* If using 32-bit addresses the cc is not preserved over
7856 calls. */
7857 if (TARGET_APCS_32)
7859 /* Succeed if the following insn is the target label,
7860 or if the following two insns are a barrier and
7861 the target label. */
7862 this_insn = next_nonnote_insn (this_insn);
7863 if (this_insn && GET_CODE (this_insn) == BARRIER)
7864 this_insn = next_nonnote_insn (this_insn);
7866 if (this_insn && this_insn == label
7867 && insns_skipped < max_insns_skipped)
7869 if (jump_clobbers)
7871 arm_ccfsm_state = 2;
7872 this_insn = next_nonnote_insn (this_insn);
7874 else
7875 arm_ccfsm_state = 1;
7876 succeed = TRUE;
7878 else
7879 fail = TRUE;
7881 break;
7883 case JUMP_INSN:
7884 /* If this is an unconditional branch to the same label, succeed.
7885 If it is to another label, do nothing. If it is conditional,
7886 fail. */
7887 /* XXX Probably, the tests for SET and the PC are unnecessary. */
7889 scanbody = PATTERN (this_insn);
7890 if (GET_CODE (scanbody) == SET
7891 && GET_CODE (SET_DEST (scanbody)) == PC)
7893 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
7894 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
7896 arm_ccfsm_state = 2;
7897 succeed = TRUE;
7899 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
7900 fail = TRUE;
7902 /* Fail if a conditional return is undesirable (eg on a
7903 StrongARM), but still allow this if optimizing for size. */
7904 else if (GET_CODE (scanbody) == RETURN
7905 && ! use_return_insn (TRUE)
7906 && ! optimize_size)
7907 fail = TRUE;
7908 else if (GET_CODE (scanbody) == RETURN
7909 && seeking_return)
7911 arm_ccfsm_state = 2;
7912 succeed = TRUE;
7914 else if (GET_CODE (scanbody) == PARALLEL)
7916 switch (get_attr_conds (this_insn))
7918 case CONDS_NOCOND:
7919 break;
7920 default:
7921 fail = TRUE;
7922 break;
7925 break;
7927 case INSN:
7928 /* Instructions using or affecting the condition codes make it
7929 fail. */
7930 scanbody = PATTERN (this_insn);
7931 if (! (GET_CODE (scanbody) == SET
7932 || GET_CODE (scanbody) == PARALLEL)
7933 || get_attr_conds (this_insn) != CONDS_NOCOND)
7934 fail = TRUE;
7935 break;
7937 default:
7938 break;
7941 if (succeed)
7943 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
7944 arm_target_label = CODE_LABEL_NUMBER (label);
7945 else if (seeking_return || arm_ccfsm_state == 2)
7947 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
7949 this_insn = next_nonnote_insn (this_insn);
7950 if (this_insn && (GET_CODE (this_insn) == BARRIER
7951 || GET_CODE (this_insn) == CODE_LABEL))
7952 abort ();
7954 if (!this_insn)
7956 /* Oh, dear! we ran off the end.. give up */
7957 recog (PATTERN (insn), insn, NULL_PTR);
7958 arm_ccfsm_state = 0;
7959 arm_target_insn = NULL;
7960 return;
7962 arm_target_insn = this_insn;
7964 else
7965 abort ();
7966 if (jump_clobbers)
7968 if (reverse)
7969 abort ();
7970 arm_current_cc =
7971 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
7972 0), 0), 1));
7973 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
7974 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7975 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
7976 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7978 else
7980 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
7981 what it was. */
7982 if (!reverse)
7983 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
7984 0));
7987 if (reverse || then_not_else)
7988 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7991 /* Restore recog_data (getting the attributes of other insns can
7992 destroy this array, but final.c assumes that it remains intact
7993 across this call; since the insn has been recognized already we
7994 call recog direct). */
7995 recog (PATTERN (insn), insn, NULL_PTR);
8000 arm_regno_class (regno)
8001 int regno;
8003 if (TARGET_THUMB)
8005 if (regno == STACK_POINTER_REGNUM)
8006 return STACK_REG;
8007 if (regno == CC_REGNUM)
8008 return CC_REG;
8009 if (regno < 8)
8010 return LO_REGS;
8011 return HI_REGS;
8014 if ( regno <= LAST_ARM_REGNUM
8015 || regno == FRAME_POINTER_REGNUM
8016 || regno == ARG_POINTER_REGNUM)
8017 return GENERAL_REGS;
8019 if (regno == CC_REGNUM)
8020 return NO_REGS;
8022 return FPU_REGS;
8025 /* Handle a special case when computing the offset
8026 of an argument from the frame pointer. */
8028 arm_debugger_arg_offset (value, addr)
8029 int value;
8030 rtx addr;
8032 rtx insn;
8034 /* We are only interested if dbxout_parms() failed to compute the offset. */
8035 if (value != 0)
8036 return 0;
8038 /* We can only cope with the case where the address is held in a register. */
8039 if (GET_CODE (addr) != REG)
8040 return 0;
8042 /* If we are using the frame pointer to point at the argument, then
8043 an offset of 0 is correct. */
8044 if (REGNO (addr) == HARD_FRAME_POINTER_REGNUM)
8045 return 0;
8047 /* If we are using the stack pointer to point at the
8048 argument, then an offset of 0 is correct. */
8049 if ((TARGET_THUMB || ! frame_pointer_needed)
8050 && REGNO (addr) == SP_REGNUM)
8051 return 0;
8053 /* Oh dear. The argument is pointed to by a register rather
8054 than being held in a register, or being stored at a known
8055 offset from the frame pointer. Since GDB only understands
8056 those two kinds of argument we must translate the address
8057 held in the register into an offset from the frame pointer.
8058 We do this by searching through the insns for the function
8059 looking to see where this register gets its value. If the
8060 register is initialised from the frame pointer plus an offset
8061 then we are in luck and we can continue, otherwise we give up.
8063 This code is exercised by producing debugging information
8064 for a function with arguments like this:
8066 double func (double a, double b, int c, double d) {return d;}
8068 Without this code the stab for parameter 'd' will be set to
8069 an offset of 0 from the frame pointer, rather than 8. */
8071 /* The if() statement says:
8073 If the insn is a normal instruction
8074 and if the insn is setting the value in a register
8075 and if the register being set is the register holding the address of the argument
8076 and if the address is computing by an addition
8077 that involves adding to a register
8078 which is the frame pointer
8079 a constant integer
8081 then... */
8083 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8085 if ( GET_CODE (insn) == INSN
8086 && GET_CODE (PATTERN (insn)) == SET
8087 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8088 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8089 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
8090 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == HARD_FRAME_POINTER_REGNUM
8091 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8094 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8096 break;
8100 if (value == 0)
8102 debug_rtx (addr);
8103 warning ("Unable to compute real location of stacked parameter");
8104 value = 8; /* XXX magic hack */
8107 return value;
8111 /* Recursively search through all of the blocks in a function
8112 checking to see if any of the variables created in that
8113 function match the RTX called 'orig'. If they do then
8114 replace them with the RTX called 'new'. */
8116 static void
8117 replace_symbols_in_block (block, orig, new)
8118 tree block;
8119 rtx orig;
8120 rtx new;
8122 for (; block; block = BLOCK_CHAIN (block))
8124 tree sym;
8126 if (! TREE_USED (block))
8127 continue;
8129 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8131 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8132 || DECL_IGNORED_P (sym)
8133 || TREE_CODE (sym) != VAR_DECL
8134 || DECL_EXTERNAL (sym)
8135 || ! rtx_equal_p (DECL_RTL (sym), orig)
8137 continue;
8139 DECL_RTL (sym) = new;
8142 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8146 /* Return the number (counting from 0) of the least significant set
8147 bit in MASK. */
8148 #ifdef __GNUC__
8149 inline
8150 #endif
8151 static int
8152 number_of_first_bit_set (mask)
8153 int mask;
8155 int bit;
8157 for (bit = 0;
8158 (mask & (1 << bit)) == 0;
8159 ++ bit)
8160 continue;
8162 return bit;
8165 /* Generate code to return from a thumb function.
8166 If 'reg_containing_return_addr' is -1, then the return address is
8167 actually on the stack, at the stack pointer. */
8168 static void
8169 thumb_exit (f, reg_containing_return_addr, eh_ofs)
8170 FILE * f;
8171 int reg_containing_return_addr;
8172 rtx eh_ofs;
8174 unsigned regs_available_for_popping;
8175 unsigned regs_to_pop;
8176 int pops_needed;
8177 unsigned available;
8178 unsigned required;
8179 int mode;
8180 int size;
8181 int restore_a4 = FALSE;
8183 /* Compute the registers we need to pop. */
8184 regs_to_pop = 0;
8185 pops_needed = 0;
8187 /* There is an assumption here, that if eh_ofs is not NULL, the
8188 normal return address will have been pushed. */
8189 if (reg_containing_return_addr == -1 || eh_ofs)
8191 /* When we are generating a return for __builtin_eh_return,
8192 reg_containing_return_addr must specify the return regno. */
8193 if (eh_ofs && reg_containing_return_addr == -1)
8194 abort ();
8196 regs_to_pop |= 1 << LR_REGNUM;
8197 ++ pops_needed;
8200 if (TARGET_BACKTRACE)
8202 /* Restore the (ARM) frame pointer and stack pointer. */
8203 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8204 pops_needed += 2;
8207 /* If there is nothing to pop then just emit the BX instruction and
8208 return. */
8209 if (pops_needed == 0)
8211 if (eh_ofs)
8212 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8214 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8215 return;
8217 /* Otherwise if we are not supporting interworking and we have not created
8218 a backtrace structure and the function was not entered in ARM mode then
8219 just pop the return address straight into the PC. */
8220 else if ( ! TARGET_INTERWORK
8221 && ! TARGET_BACKTRACE
8222 && ! is_called_in_ARM_mode (current_function_decl))
8224 if (eh_ofs)
8226 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8227 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8228 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8230 else
8231 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8233 return;
8236 /* Find out how many of the (return) argument registers we can corrupt. */
8237 regs_available_for_popping = 0;
8239 /* If returning via __builtin_eh_return, the bottom three registers
8240 all contain information needed for the return. */
8241 if (eh_ofs)
8242 size = 12;
8243 else
8245 #ifdef RTX_CODE
8246 /* If we can deduce the registers used from the function's
8247 return value. This is more reliable that examining
8248 regs_ever_live[] because that will be set if the register is
8249 ever used in the function, not just if the register is used
8250 to hold a return value. */
8252 if (current_function_return_rtx != 0)
8253 mode = GET_MODE (current_function_return_rtx);
8254 else
8255 #endif
8256 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8258 size = GET_MODE_SIZE (mode);
8260 if (size == 0)
8262 /* In a void function we can use any argument register.
8263 In a function that returns a structure on the stack
8264 we can use the second and third argument registers. */
8265 if (mode == VOIDmode)
8266 regs_available_for_popping =
8267 (1 << ARG_REGISTER (1))
8268 | (1 << ARG_REGISTER (2))
8269 | (1 << ARG_REGISTER (3));
8270 else
8271 regs_available_for_popping =
8272 (1 << ARG_REGISTER (2))
8273 | (1 << ARG_REGISTER (3));
8275 else if (size <= 4)
8276 regs_available_for_popping =
8277 (1 << ARG_REGISTER (2))
8278 | (1 << ARG_REGISTER (3));
8279 else if (size <= 8)
8280 regs_available_for_popping =
8281 (1 << ARG_REGISTER (3));
8284 /* Match registers to be popped with registers into which we pop them. */
8285 for (available = regs_available_for_popping,
8286 required = regs_to_pop;
8287 required != 0 && available != 0;
8288 available &= ~(available & - available),
8289 required &= ~(required & - required))
8290 -- pops_needed;
8292 /* If we have any popping registers left over, remove them. */
8293 if (available > 0)
8294 regs_available_for_popping &= ~ available;
8296 /* Otherwise if we need another popping register we can use
8297 the fourth argument register. */
8298 else if (pops_needed)
8300 /* If we have not found any free argument registers and
8301 reg a4 contains the return address, we must move it. */
8302 if (regs_available_for_popping == 0
8303 && reg_containing_return_addr == LAST_ARG_REGNUM)
8305 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8306 reg_containing_return_addr = LR_REGNUM;
8308 else if (size > 12)
8310 /* Register a4 is being used to hold part of the return value,
8311 but we have dire need of a free, low register. */
8312 restore_a4 = TRUE;
8314 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8317 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8319 /* The fourth argument register is available. */
8320 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8322 -- pops_needed;
8326 /* Pop as many registers as we can. */
8327 thumb_pushpop (f, regs_available_for_popping, FALSE);
8329 /* Process the registers we popped. */
8330 if (reg_containing_return_addr == -1)
8332 /* The return address was popped into the lowest numbered register. */
8333 regs_to_pop &= ~ (1 << LR_REGNUM);
8335 reg_containing_return_addr =
8336 number_of_first_bit_set (regs_available_for_popping);
8338 /* Remove this register for the mask of available registers, so that
8339 the return address will not be corrupted by futher pops. */
8340 regs_available_for_popping &= ~ (1 << reg_containing_return_addr);
8343 /* If we popped other registers then handle them here. */
8344 if (regs_available_for_popping)
8346 int frame_pointer;
8348 /* Work out which register currently contains the frame pointer. */
8349 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8351 /* Move it into the correct place. */
8352 asm_fprintf (f, "\tmov\t%r, %r\n",
8353 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8355 /* (Temporarily) remove it from the mask of popped registers. */
8356 regs_available_for_popping &= ~ (1 << frame_pointer);
8357 regs_to_pop &= ~ (1 << ARM_HARD_FRAME_POINTER_REGNUM);
8359 if (regs_available_for_popping)
8361 int stack_pointer;
8363 /* We popped the stack pointer as well,
8364 find the register that contains it. */
8365 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8367 /* Move it into the stack register. */
8368 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8370 /* At this point we have popped all necessary registers, so
8371 do not worry about restoring regs_available_for_popping
8372 to its correct value:
8374 assert (pops_needed == 0)
8375 assert (regs_available_for_popping == (1 << frame_pointer))
8376 assert (regs_to_pop == (1 << STACK_POINTER)) */
8378 else
8380 /* Since we have just move the popped value into the frame
8381 pointer, the popping register is available for reuse, and
8382 we know that we still have the stack pointer left to pop. */
8383 regs_available_for_popping |= (1 << frame_pointer);
8387 /* If we still have registers left on the stack, but we no longer have
8388 any registers into which we can pop them, then we must move the return
8389 address into the link register and make available the register that
8390 contained it. */
8391 if (regs_available_for_popping == 0 && pops_needed > 0)
8393 regs_available_for_popping |= 1 << reg_containing_return_addr;
8395 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8396 reg_containing_return_addr);
8398 reg_containing_return_addr = LR_REGNUM;
8401 /* If we have registers left on the stack then pop some more.
8402 We know that at most we will want to pop FP and SP. */
8403 if (pops_needed > 0)
8405 int popped_into;
8406 int move_to;
8408 thumb_pushpop (f, regs_available_for_popping, FALSE);
8410 /* We have popped either FP or SP.
8411 Move whichever one it is into the correct register. */
8412 popped_into = number_of_first_bit_set (regs_available_for_popping);
8413 move_to = number_of_first_bit_set (regs_to_pop);
8415 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8417 regs_to_pop &= ~ (1 << move_to);
8419 -- pops_needed;
8422 /* If we still have not popped everything then we must have only
8423 had one register available to us and we are now popping the SP. */
8424 if (pops_needed > 0)
8426 int popped_into;
8428 thumb_pushpop (f, regs_available_for_popping, FALSE);
8430 popped_into = number_of_first_bit_set (regs_available_for_popping);
8432 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8434 assert (regs_to_pop == (1 << STACK_POINTER))
8435 assert (pops_needed == 1)
8439 /* If necessary restore the a4 register. */
8440 if (restore_a4)
8442 if (reg_containing_return_addr != LR_REGNUM)
8444 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8445 reg_containing_return_addr = LR_REGNUM;
8448 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8451 if (eh_ofs)
8452 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8454 /* Return to caller. */
8455 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8458 /* Emit code to push or pop registers to or from the stack. */
8459 static void
8460 thumb_pushpop (f, mask, push)
8461 FILE * f;
8462 int mask;
8463 int push;
8465 int regno;
8466 int lo_mask = mask & 0xFF;
8468 if (lo_mask == 0 && ! push && (mask & (1 << 15)))
8470 /* Special case. Do not generate a POP PC statement here, do it in
8471 thumb_exit() */
8472 thumb_exit (f, -1, NULL_RTX);
8473 return;
8476 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8478 /* Look at the low registers first. */
8479 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++, lo_mask >>= 1)
8481 if (lo_mask & 1)
8483 asm_fprintf (f, "%r", regno);
8485 if ((lo_mask & ~1) != 0)
8486 fprintf (f, ", ");
8490 if (push && (mask & (1 << LR_REGNUM)))
8492 /* Catch pushing the LR. */
8493 if (mask & 0xFF)
8494 fprintf (f, ", ");
8496 asm_fprintf (f, "%r", LR_REGNUM);
8498 else if (!push && (mask & (1 << PC_REGNUM)))
8500 /* Catch popping the PC. */
8501 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8503 /* The PC is never poped directly, instead
8504 it is popped into r3 and then BX is used. */
8505 fprintf (f, "}\n");
8507 thumb_exit (f, -1, NULL_RTX);
8509 return;
8511 else
8513 if (mask & 0xFF)
8514 fprintf (f, ", ");
8516 asm_fprintf (f, "%r", PC_REGNUM);
8520 fprintf (f, "}\n");
8523 void
8524 thumb_final_prescan_insn (insn)
8525 rtx insn;
8527 extern int * insn_addresses;
8529 if (flag_print_asm_name)
8530 asm_fprintf (asm_out_file, "%@ 0x%04x\n", insn_addresses[INSN_UID (insn)]);
8534 thumb_shiftable_const (val)
8535 unsigned HOST_WIDE_INT val;
8537 unsigned HOST_WIDE_INT mask = 0xff;
8538 int i;
8540 if (val == 0) /* XXX */
8541 return 0;
8543 for (i = 0; i < 25; i++)
8544 if ((val & (mask << i)) == val)
8545 return 1;
8547 return 0;
8550 /* Returns non-zero if the current function contains,
8551 or might contain a far jump. */
8553 thumb_far_jump_used_p (int in_prologue)
8555 rtx insn;
8557 /* This test is only important for leaf functions. */
8558 /* assert (! leaf_function_p ()); */
8560 /* If we have already decided that far jumps may be used,
8561 do not bother checking again, and always return true even if
8562 it turns out that they are not being used. Once we have made
8563 the decision that far jumps are present (and that hence the link
8564 register will be pushed onto the stack) we cannot go back on it. */
8565 if (cfun->machine->far_jump_used)
8566 return 1;
8568 /* If this function is not being called from the prologue/epilogue
8569 generation code then it must be being called from the
8570 INITIAL_ELIMINATION_OFFSET macro. */
8571 if (! in_prologue)
8573 /* In this case we know that we are being asked about the elimination
8574 of the arg pointer register. If that register is not being used,
8575 then there are no arguments on the stack, and we do not have to
8576 worry that a far jump might force the prologue to push the link
8577 register, changing the stack offsets. In this case we can just
8578 return false, since the presence of far jumps in the function will
8579 not affect stack offsets.
8581 If the arg pointer is live (or if it was live, but has now been
8582 eliminated and so set to dead) then we do have to test to see if
8583 the function might contain a far jump. This test can lead to some
8584 false negatives, since before reload is completed, then length of
8585 branch instructions is not known, so gcc defaults to returning their
8586 longest length, which in turn sets the far jump attribute to true.
8588 A false negative will not result in bad code being generated, but it
8589 will result in a needless push and pop of the link register. We
8590 hope that this does not occur too often. */
8591 if (regs_ever_live [ARG_POINTER_REGNUM])
8592 cfun->machine->arg_pointer_live = 1;
8593 else if (! cfun->machine->arg_pointer_live)
8594 return 0;
8597 /* Check to see if the function contains a branch
8598 insn with the far jump attribute set. */
8599 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8601 if (GET_CODE (insn) == JUMP_INSN
8602 /* Ignore tablejump patterns. */
8603 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8604 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8605 && get_attr_far_jump (insn) == FAR_JUMP_YES
8608 /* Record the fact that we have decied that
8609 the function does use far jumps. */
8610 cfun->machine->far_jump_used = 1;
8611 return 1;
8615 return 0;
8618 /* Return non-zero if FUNC must be entered in ARM mode. */
8620 is_called_in_ARM_mode (func)
8621 tree func;
8623 if (TREE_CODE (func) != FUNCTION_DECL)
8624 abort ();
8626 /* Ignore the problem about functions whoes address is taken. */
8627 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8628 return TRUE;
8630 #ifdef ARM_PE
8631 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8632 #else
8633 return FALSE;
8634 #endif
8637 /* The bits which aren't usefully expanded as rtl. */
8638 char *
8639 thumb_unexpanded_epilogue ()
8641 int regno;
8642 int live_regs_mask = 0;
8643 int high_regs_pushed = 0;
8644 int leaf_function = leaf_function_p ();
8645 int had_to_push_lr;
8646 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8648 if (return_used_this_function)
8649 return "";
8651 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8652 if (regs_ever_live[regno] && ! call_used_regs[regno]
8653 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8654 live_regs_mask |= 1 << regno;
8656 for (regno = 8; regno < 13; regno++)
8658 if (regs_ever_live[regno] && ! call_used_regs[regno]
8659 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8660 high_regs_pushed ++;
8663 /* The prolog may have pushed some high registers to use as
8664 work registers. eg the testuite file:
8665 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8666 compiles to produce:
8667 push {r4, r5, r6, r7, lr}
8668 mov r7, r9
8669 mov r6, r8
8670 push {r6, r7}
8671 as part of the prolog. We have to undo that pushing here. */
8673 if (high_regs_pushed)
8675 int mask = live_regs_mask;
8676 int next_hi_reg;
8677 int size;
8678 int mode;
8680 #ifdef RTX_CODE
8681 /* If we can deduce the registers used from the function's return value.
8682 This is more reliable that examining regs_ever_live[] because that
8683 will be set if the register is ever used in the function, not just if
8684 the register is used to hold a return value. */
8686 if (current_function_return_rtx != 0)
8687 mode = GET_MODE (current_function_return_rtx);
8688 else
8689 #endif
8690 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8692 size = GET_MODE_SIZE (mode);
8694 /* Unless we are returning a type of size > 12 register r3 is
8695 available. */
8696 if (size < 13)
8697 mask |= 1 << 3;
8699 if (mask == 0)
8700 /* Oh dear! We have no low registers into which we can pop
8701 high registers! */
8702 fatal ("No low registers available for popping high registers");
8704 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
8705 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
8706 && ! (TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
8707 break;
8709 while (high_regs_pushed)
8711 /* Find lo register(s) into which the high register(s) can
8712 be popped. */
8713 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8715 if (mask & (1 << regno))
8716 high_regs_pushed--;
8717 if (high_regs_pushed == 0)
8718 break;
8721 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8723 /* Pop the values into the low register(s). */
8724 thumb_pushpop (asm_out_file, mask, 0);
8726 /* Move the value(s) into the high registers. */
8727 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8729 if (mask & (1 << regno))
8731 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8732 regno);
8734 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
8735 if (regs_ever_live[next_hi_reg] &&
8736 ! call_used_regs[next_hi_reg]
8737 && ! (TARGET_SINGLE_PIC_BASE
8738 && (next_hi_reg == arm_pic_register)))
8739 break;
8745 had_to_push_lr = (live_regs_mask || ! leaf_function
8746 || thumb_far_jump_used_p (1));
8748 if (TARGET_BACKTRACE
8749 && ((live_regs_mask & 0xFF) == 0)
8750 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8752 /* The stack backtrace structure creation code had to
8753 push R7 in order to get a work register, so we pop
8754 it now. */
8755 live_regs_mask |= (1 << LAST_LO_REGNUM);
8758 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8760 if (had_to_push_lr
8761 && ! is_called_in_ARM_mode (current_function_decl)
8762 && ! eh_ofs)
8763 live_regs_mask |= 1 << PC_REGNUM;
8765 /* Either no argument registers were pushed or a backtrace
8766 structure was created which includes an adjusted stack
8767 pointer, so just pop everything. */
8768 if (live_regs_mask)
8769 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8771 if (eh_ofs)
8772 thumb_exit (asm_out_file, 2, eh_ofs);
8773 /* We have either just popped the return address into the
8774 PC or it is was kept in LR for the entire function or
8775 it is still on the stack because we do not want to
8776 return by doing a pop {pc}. */
8777 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
8778 thumb_exit (asm_out_file,
8779 (had_to_push_lr
8780 && is_called_in_ARM_mode (current_function_decl)) ?
8781 -1 : LR_REGNUM, NULL_RTX);
8783 else
8785 /* Pop everything but the return address. */
8786 live_regs_mask &= ~ (1 << PC_REGNUM);
8788 if (live_regs_mask)
8789 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8791 if (had_to_push_lr)
8792 /* Get the return address into a temporary register. */
8793 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
8795 /* Remove the argument registers that were pushed onto the stack. */
8796 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
8797 SP_REGNUM, SP_REGNUM,
8798 current_function_pretend_args_size);
8800 if (eh_ofs)
8801 thumb_exit (asm_out_file, 2, eh_ofs);
8802 else
8803 thumb_exit (asm_out_file,
8804 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
8807 return "";
8810 /* Functions to save and restore machine-specific function data. */
8812 static void
8813 arm_mark_machine_status (p)
8814 struct function * p;
8816 struct machine_function *machine = p->machine;
8818 ggc_mark_rtx (machine->ra_rtx);
8819 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
8822 static void
8823 arm_init_machine_status (p)
8824 struct function * p;
8826 p->machine =
8827 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
8830 /* Return an RTX indicating where the return address to the
8831 calling function can be found. */
8833 arm_return_addr (count, frame)
8834 int count;
8835 rtx frame ATTRIBUTE_UNUSED;
8837 rtx reg;
8839 if (count != 0)
8840 return NULL_RTX;
8842 reg = cfun->machine->ra_rtx;
8844 if (reg == NULL)
8846 rtx init;
8848 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
8849 the prologue. */
8850 reg = gen_reg_rtx (Pmode);
8851 cfun->machine->ra_rtx = reg;
8853 if (! TARGET_APCS_32)
8854 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
8855 GEN_INT (RETURN_ADDR_MASK26));
8856 else
8857 init = gen_rtx_REG (Pmode, LR_REGNUM);
8859 init = gen_rtx_SET (VOIDmode, reg, init);
8861 /* Emit the insn to the prologue with the other argument copies. */
8862 push_topmost_sequence ();
8863 emit_insn_after (init, get_insns ());
8864 pop_topmost_sequence ();
8867 return reg;
8870 /* Do anything needed before RTL is emitted for each function. */
8871 void
8872 arm_init_expanders ()
8874 /* Arrange to initialize and mark the machine per-function status. */
8875 init_machine_status = arm_init_machine_status;
8876 mark_machine_status = arm_mark_machine_status;
8879 /* Generate the rest of a function's prologue. */
8880 void
8881 thumb_expand_prologue ()
8883 HOST_WIDE_INT amount = (get_frame_size ()
8884 + current_function_outgoing_args_size);
8886 /* Naked functions don't have prologues. */
8887 if (arm_naked_function_p (current_function_decl))
8888 return;
8890 if (frame_pointer_needed)
8891 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
8893 if (amount)
8895 amount = ROUND_UP (amount);
8897 if (amount < 512)
8898 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8899 GEN_INT (- amount)));
8900 else
8902 int regno;
8903 rtx reg;
8905 /* The stack decrement is too big for an immediate value in a single
8906 insn. In theory we could issue multiple subtracts, but after
8907 three of them it becomes more space efficient to place the full
8908 value in the constant pool and load into a register. (Also the
8909 ARM debugger really likes to see only one stack decrement per
8910 function). So instead we look for a scratch register into which
8911 we can load the decrement, and then we subtract this from the
8912 stack pointer. Unfortunately on the thumb the only available
8913 scratch registers are the argument registers, and we cannot use
8914 these as they may hold arguments to the function. Instead we
8915 attempt to locate a call preserved register which is used by this
8916 function. If we can find one, then we know that it will have
8917 been pushed at the start of the prologue and so we can corrupt
8918 it now. */
8919 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
8920 if (regs_ever_live[regno]
8921 && ! call_used_regs[regno] /* Paranoia */
8922 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
8923 && ! (frame_pointer_needed
8924 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
8925 break;
8927 if (regno > LAST_LO_REGNUM) /* Very unlikely */
8929 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
8931 /* Choose an arbitary, non-argument low register. */
8932 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
8934 /* Save it by copying it into a high, scratch register. */
8935 emit_insn (gen_movsi (spare, reg));
8937 /* Decrement the stack. */
8938 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8939 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8940 reg));
8942 /* Restore the low register's original value. */
8943 emit_insn (gen_movsi (reg, spare));
8945 /* Emit a USE of the restored scratch register, so that flow
8946 analysis will not consider the restore redundant. The
8947 register won't be used again in this function and isn't
8948 restored by the epilogue. */
8949 emit_insn (gen_rtx_USE (VOIDmode, reg));
8951 else
8953 reg = gen_rtx (REG, SImode, regno);
8955 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8956 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8957 reg));
8962 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
8963 emit_insn (gen_blockage ());
8966 void
8967 thumb_expand_epilogue ()
8969 HOST_WIDE_INT amount = (get_frame_size ()
8970 + current_function_outgoing_args_size);
8972 /* Naked functions don't have epilogues. */
8973 if (arm_naked_function_p (current_function_decl))
8974 return;
8976 if (frame_pointer_needed)
8977 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
8978 else if (amount)
8980 amount = ROUND_UP (amount);
8982 if (amount < 512)
8983 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8984 GEN_INT (amount)));
8985 else
8987 /* r3 is always free in the epilogue. */
8988 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
8990 emit_insn (gen_movsi (reg, GEN_INT (amount)));
8991 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
8995 /* Emit a USE (stack_pointer_rtx), so that
8996 the stack adjustment will not be deleted. */
8997 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8999 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9000 emit_insn (gen_blockage ());
9003 void
9004 output_thumb_prologue (f)
9005 FILE * f;
9007 int live_regs_mask = 0;
9008 int high_regs_pushed = 0;
9009 int store_arg_regs = 0;
9010 int regno;
9012 if (arm_naked_function_p (current_function_decl))
9013 return;
9015 if (is_called_in_ARM_mode (current_function_decl))
9017 const char * name;
9019 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9020 abort ();
9021 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9022 abort ();
9023 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9025 /* Generate code sequence to switch us into Thumb mode. */
9026 /* The .code 32 directive has already been emitted by
9027 ASM_DECLARE_FUNCITON_NAME */
9028 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9029 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9031 /* Generate a label, so that the debugger will notice the
9032 change in instruction sets. This label is also used by
9033 the assembler to bypass the ARM code when this function
9034 is called from a Thumb encoded function elsewhere in the
9035 same file. Hence the definition of STUB_NAME here must
9036 agree with the definition in gas/config/tc-arm.c */
9038 #define STUB_NAME ".real_start_of"
9040 asm_fprintf (f, "\t.code\t16\n");
9041 #ifdef ARM_PE
9042 if (arm_dllexport_name_p (name))
9043 name = arm_strip_name_encoding (name);
9044 #endif
9045 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9046 asm_fprintf (f, "\t.thumb_func\n");
9047 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9050 if (current_function_anonymous_args && current_function_pretend_args_size)
9051 store_arg_regs = 1;
9053 if (current_function_pretend_args_size)
9055 if (store_arg_regs)
9057 int num_pushes;
9059 asm_fprintf (f, "\tpush\t{");
9061 num_pushes = NUM_INTS (current_function_pretend_args_size);
9063 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9064 regno <= LAST_ARG_REGNUM;
9065 regno ++)
9066 asm_fprintf (f, "%r%s", regno,
9067 regno == LAST_ARG_REGNUM ? "" : ", ");
9069 asm_fprintf (f, "}\n");
9071 else
9072 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9073 SP_REGNUM, SP_REGNUM,
9074 current_function_pretend_args_size);
9077 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++)
9078 if (regs_ever_live[regno] && ! call_used_regs[regno]
9079 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9080 live_regs_mask |= 1 << regno;
9082 if (live_regs_mask || ! leaf_function_p () || thumb_far_jump_used_p (1))
9083 live_regs_mask |= 1 << LR_REGNUM;
9085 if (TARGET_BACKTRACE)
9087 int offset;
9088 int work_register = 0;
9089 int wr;
9091 /* We have been asked to create a stack backtrace structure.
9092 The code looks like this:
9094 0 .align 2
9095 0 func:
9096 0 sub SP, #16 Reserve space for 4 registers.
9097 2 push {R7} Get a work register.
9098 4 add R7, SP, #20 Get the stack pointer before the push.
9099 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9100 8 mov R7, PC Get hold of the start of this code plus 12.
9101 10 str R7, [SP, #16] Store it.
9102 12 mov R7, FP Get hold of the current frame pointer.
9103 14 str R7, [SP, #4] Store it.
9104 16 mov R7, LR Get hold of the current return address.
9105 18 str R7, [SP, #12] Store it.
9106 20 add R7, SP, #16 Point at the start of the backtrace structure.
9107 22 mov FP, R7 Put this value into the frame pointer. */
9109 if ((live_regs_mask & 0xFF) == 0)
9111 /* See if the a4 register is free. */
9113 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9114 work_register = LAST_ARG_REGNUM;
9115 else /* We must push a register of our own */
9116 live_regs_mask |= (1 << LAST_LO_REGNUM);
9119 if (work_register == 0)
9121 /* Select a register from the list that will be pushed to
9122 use as our work register. */
9123 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9124 if ((1 << work_register) & live_regs_mask)
9125 break;
9128 asm_fprintf
9129 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9130 SP_REGNUM, SP_REGNUM);
9132 if (live_regs_mask)
9133 thumb_pushpop (f, live_regs_mask, 1);
9135 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9136 if (wr & live_regs_mask)
9137 offset += 4;
9139 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9140 offset + 16 + current_function_pretend_args_size);
9142 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9143 offset + 4);
9145 /* Make sure that the instruction fetching the PC is in the right place
9146 to calculate "start of backtrace creation code + 12". */
9147 if (live_regs_mask)
9149 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9150 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9151 offset + 12);
9152 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9153 ARM_HARD_FRAME_POINTER_REGNUM);
9154 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9155 offset);
9157 else
9159 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9160 ARM_HARD_FRAME_POINTER_REGNUM);
9161 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9162 offset);
9163 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9164 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9165 offset + 12);
9168 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9169 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9170 offset + 8);
9171 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9172 offset + 12);
9173 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9174 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9176 else if (live_regs_mask)
9177 thumb_pushpop (f, live_regs_mask, 1);
9179 for (regno = 8; regno < 13; regno++)
9181 if (regs_ever_live[regno] && ! call_used_regs[regno]
9182 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9183 high_regs_pushed ++;
9186 if (high_regs_pushed)
9188 int pushable_regs = 0;
9189 int mask = live_regs_mask & 0xff;
9190 int next_hi_reg;
9192 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9194 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
9195 && ! (TARGET_SINGLE_PIC_BASE
9196 && (next_hi_reg == arm_pic_register)))
9197 break;
9200 pushable_regs = mask;
9202 if (pushable_regs == 0)
9204 /* Desperation time -- this probably will never happen. */
9205 if (regs_ever_live[LAST_ARG_REGNUM]
9206 || ! call_used_regs[LAST_ARG_REGNUM])
9207 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9208 mask = 1 << LAST_ARG_REGNUM;
9211 while (high_regs_pushed > 0)
9213 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9215 if (mask & (1 << regno))
9217 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9219 high_regs_pushed --;
9221 if (high_regs_pushed)
9222 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9223 next_hi_reg--)
9225 if (regs_ever_live[next_hi_reg]
9226 && ! call_used_regs[next_hi_reg]
9227 && ! (TARGET_SINGLE_PIC_BASE
9228 && (next_hi_reg == arm_pic_register)))
9229 break;
9231 else
9233 mask &= ~ ((1 << regno) - 1);
9234 break;
9239 thumb_pushpop (f, mask, 1);
9242 if (pushable_regs == 0
9243 && (regs_ever_live[LAST_ARG_REGNUM]
9244 || ! call_used_regs[LAST_ARG_REGNUM]))
9245 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9249 /* Handle the case of a double word load into a low register from
9250 a computed memory address. The computed address may involve a
9251 register which is overwritten by the load. */
9253 char *
9254 thumb_load_double_from_address (operands)
9255 rtx * operands;
9257 rtx addr;
9258 rtx base;
9259 rtx offset;
9260 rtx arg1;
9261 rtx arg2;
9263 if (GET_CODE (operands[0]) != REG)
9264 fatal ("thumb_load_double_from_address: destination is not a register");
9266 if (GET_CODE (operands[1]) != MEM)
9268 debug_rtx (operands[1]);
9269 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9272 /* Get the memory address. */
9273 addr = XEXP (operands[1], 0);
9275 /* Work out how the memory address is computed. */
9276 switch (GET_CODE (addr))
9278 case REG:
9279 operands[2] = gen_rtx (MEM, SImode,
9280 plus_constant (XEXP (operands[1], 0), 4));
9282 if (REGNO (operands[0]) == REGNO (addr))
9284 output_asm_insn ("ldr\t%H0, %2", operands);
9285 output_asm_insn ("ldr\t%0, %1", operands);
9287 else
9289 output_asm_insn ("ldr\t%0, %1", operands);
9290 output_asm_insn ("ldr\t%H0, %2", operands);
9292 break;
9294 case CONST:
9295 /* Compute <address> + 4 for the high order load. */
9296 operands[2] = gen_rtx (MEM, SImode,
9297 plus_constant (XEXP (operands[1], 0), 4));
9299 output_asm_insn ("ldr\t%0, %1", operands);
9300 output_asm_insn ("ldr\t%H0, %2", operands);
9301 break;
9303 case PLUS:
9304 arg1 = XEXP (addr, 0);
9305 arg2 = XEXP (addr, 1);
9307 if (CONSTANT_P (arg1))
9308 base = arg2, offset = arg1;
9309 else
9310 base = arg1, offset = arg2;
9312 if (GET_CODE (base) != REG)
9313 fatal ("thumb_load_double_from_address: base is not a register");
9315 /* Catch the case of <address> = <reg> + <reg> */
9316 if (GET_CODE (offset) == REG)
9318 int reg_offset = REGNO (offset);
9319 int reg_base = REGNO (base);
9320 int reg_dest = REGNO (operands[0]);
9322 /* Add the base and offset registers together into the
9323 higher destination register. */
9324 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9325 reg_dest + 1, reg_base, reg_offset);
9327 /* Load the lower destination register from the address in
9328 the higher destination register. */
9329 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9330 reg_dest, reg_dest + 1);
9332 /* Load the higher destination register from its own address
9333 plus 4. */
9334 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9335 reg_dest + 1, reg_dest + 1);
9337 else
9339 /* Compute <address> + 4 for the high order load. */
9340 operands[2] = gen_rtx (MEM, SImode,
9341 plus_constant (XEXP (operands[1], 0), 4));
9343 /* If the computed address is held in the low order register
9344 then load the high order register first, otherwise always
9345 load the low order register first. */
9346 if (REGNO (operands[0]) == REGNO (base))
9348 output_asm_insn ("ldr\t%H0, %2", operands);
9349 output_asm_insn ("ldr\t%0, %1", operands);
9351 else
9353 output_asm_insn ("ldr\t%0, %1", operands);
9354 output_asm_insn ("ldr\t%H0, %2", operands);
9357 break;
9359 case LABEL_REF:
9360 /* With no registers to worry about we can just load the value
9361 directly. */
9362 operands[2] = gen_rtx (MEM, SImode,
9363 plus_constant (XEXP (operands[1], 0), 4));
9365 output_asm_insn ("ldr\t%H0, %2", operands);
9366 output_asm_insn ("ldr\t%0, %1", operands);
9367 break;
9369 default:
9370 debug_rtx (operands[1]);
9371 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9372 break;
9375 return "";
9379 char *
9380 thumb_output_move_mem_multiple (n, operands)
9381 int n;
9382 rtx * operands;
9384 rtx tmp;
9386 switch (n)
9388 case 2:
9389 if (REGNO (operands[2]) > REGNO (operands[3]))
9391 tmp = operands[2];
9392 operands[2] = operands[3];
9393 operands[3] = tmp;
9395 output_asm_insn ("ldmia\t%1!, {%2, %3}", operands);
9396 output_asm_insn ("stmia\t%0!, {%2, %3}", operands);
9397 break;
9399 case 3:
9400 if (REGNO (operands[2]) > REGNO (operands[3]))
9402 tmp = operands[2];
9403 operands[2] = operands[3];
9404 operands[3] = tmp;
9406 if (REGNO (operands[3]) > REGNO (operands[4]))
9408 tmp = operands[3];
9409 operands[3] = operands[4];
9410 operands[4] = tmp;
9412 if (REGNO (operands[2]) > REGNO (operands[3]))
9414 tmp = operands[2];
9415 operands[2] = operands[3];
9416 operands[3] = tmp;
9419 output_asm_insn ("ldmia\t%1!, {%2, %3, %4}", operands);
9420 output_asm_insn ("stmia\t%0!, {%2, %3, %4}", operands);
9421 break;
9423 default:
9424 abort ();
9427 return "";
9430 /* Routines for generating rtl */
9432 void
9433 thumb_expand_movstrqi (operands)
9434 rtx * operands;
9436 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9437 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9438 HOST_WIDE_INT len = INTVAL (operands[2]);
9439 HOST_WIDE_INT offset = 0;
9441 while (len >= 12)
9443 emit_insn (gen_movmem12b (out, in));
9444 len -= 12;
9447 if (len >= 8)
9449 emit_insn (gen_movmem8b (out, in));
9450 len -= 8;
9453 if (len >= 4)
9455 rtx reg = gen_reg_rtx (SImode);
9456 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9457 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9458 len -= 4;
9459 offset += 4;
9462 if (len >= 2)
9464 rtx reg = gen_reg_rtx (HImode);
9465 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9466 plus_constant (in, offset))));
9467 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9468 reg));
9469 len -= 2;
9470 offset += 2;
9473 if (len)
9475 rtx reg = gen_reg_rtx (QImode);
9476 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9477 plus_constant (in, offset))));
9478 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9479 reg));
9484 thumb_cmp_operand (op, mode)
9485 rtx op;
9486 enum machine_mode mode;
9488 return ((GET_CODE (op) == CONST_INT
9489 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9490 || register_operand (op, mode));
9493 static char *
9494 thumb_condition_code (x, invert)
9495 rtx x;
9496 int invert;
9498 static char * conds[] =
9500 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9501 "hi", "ls", "ge", "lt", "gt", "le"
9503 int val;
9505 switch (GET_CODE (x))
9507 case EQ: val = 0; break;
9508 case NE: val = 1; break;
9509 case GEU: val = 2; break;
9510 case LTU: val = 3; break;
9511 case GTU: val = 8; break;
9512 case LEU: val = 9; break;
9513 case GE: val = 10; break;
9514 case LT: val = 11; break;
9515 case GT: val = 12; break;
9516 case LE: val = 13; break;
9517 default:
9518 abort ();
9521 return conds[val ^ invert];
9524 /* Handle storing a half-word to memory during reload. */
9525 void
9526 thumb_reload_out_hi (operands)
9527 rtx * operands;
9529 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9532 /* Handle storing a half-word to memory during reload. */
9533 void
9534 thumb_reload_in_hi (operands)
9535 rtx * operands ATTRIBUTE_UNUSED;
9537 abort ();
9540 /* Return the length of a function name prefix
9541 that starts with the character 'c'. */
9542 static int
9543 arm_get_strip_length (char c)
9545 switch (c)
9547 ARM_NAME_ENCODING_LENGTHS
9548 default: return 0;
9552 /* Return a pointer to a function's name with any
9553 and all prefix encodings stripped from it. */
9554 const char *
9555 arm_strip_name_encoding (const char * name)
9557 int skip;
9559 while ((skip = arm_get_strip_length (* name)))
9560 name += skip;
9562 return name;
9565 #ifdef AOF_ASSEMBLER
9566 /* Special functions only needed when producing AOF syntax assembler. */
9568 rtx aof_pic_label = NULL_RTX;
9569 struct pic_chain
9571 struct pic_chain * next;
9572 char * symname;
9575 static struct pic_chain * aof_pic_chain = NULL;
9578 aof_pic_entry (x)
9579 rtx x;
9581 struct pic_chain ** chainp;
9582 int offset;
9584 if (aof_pic_label == NULL_RTX)
9586 /* We mark this here and not in arm_add_gc_roots() to avoid
9587 polluting even more code with ifdefs, and because it never
9588 contains anything useful until we assign to it here. */
9589 ggc_add_rtx_root (& aof_pic_label, 1);
9590 /* This needs to persist throughout the compilation. */
9591 end_temporary_allocation ();
9592 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
9593 resume_temporary_allocation ();
9596 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9597 offset += 4, chainp = &(*chainp)->next)
9598 if ((*chainp)->symname == XSTR (x, 0))
9599 return plus_constant (aof_pic_label, offset);
9601 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9602 (*chainp)->next = NULL;
9603 (*chainp)->symname = XSTR (x, 0);
9604 return plus_constant (aof_pic_label, offset);
9607 void
9608 aof_dump_pic_table (f)
9609 FILE * f;
9611 struct pic_chain * chain;
9613 if (aof_pic_chain == NULL)
9614 return;
9616 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9617 PIC_OFFSET_TABLE_REGNUM,
9618 PIC_OFFSET_TABLE_REGNUM);
9619 fputs ("|x$adcons|\n", f);
9621 for (chain = aof_pic_chain; chain; chain = chain->next)
9623 fputs ("\tDCD\t", f);
9624 assemble_name (f, chain->symname);
9625 fputs ("\n", f);
9629 int arm_text_section_count = 1;
9631 char *
9632 aof_text_section ()
9634 static char buf[100];
9635 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9636 arm_text_section_count++);
9637 if (flag_pic)
9638 strcat (buf, ", PIC, REENTRANT");
9639 return buf;
9642 static int arm_data_section_count = 1;
9644 char *
9645 aof_data_section ()
9647 static char buf[100];
9648 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9649 return buf;
9652 /* The AOF assembler is religiously strict about declarations of
9653 imported and exported symbols, so that it is impossible to declare
9654 a function as imported near the beginning of the file, and then to
9655 export it later on. It is, however, possible to delay the decision
9656 until all the functions in the file have been compiled. To get
9657 around this, we maintain a list of the imports and exports, and
9658 delete from it any that are subsequently defined. At the end of
9659 compilation we spit the remainder of the list out before the END
9660 directive. */
9662 struct import
9664 struct import * next;
9665 char * name;
9668 static struct import * imports_list = NULL;
9670 void
9671 aof_add_import (name)
9672 char * name;
9674 struct import * new;
9676 for (new = imports_list; new; new = new->next)
9677 if (new->name == name)
9678 return;
9680 new = (struct import *) xmalloc (sizeof (struct import));
9681 new->next = imports_list;
9682 imports_list = new;
9683 new->name = name;
9686 void
9687 aof_delete_import (name)
9688 char * name;
9690 struct import ** old;
9692 for (old = &imports_list; *old; old = & (*old)->next)
9694 if ((*old)->name == name)
9696 *old = (*old)->next;
9697 return;
9702 int arm_main_function = 0;
9704 void
9705 aof_dump_imports (f)
9706 FILE * f;
9708 /* The AOF assembler needs this to cause the startup code to be extracted
9709 from the library. Brining in __main causes the whole thing to work
9710 automagically. */
9711 if (arm_main_function)
9713 text_section ();
9714 fputs ("\tIMPORT __main\n", f);
9715 fputs ("\tDCD __main\n", f);
9718 /* Now dump the remaining imports. */
9719 while (imports_list)
9721 fprintf (f, "\tIMPORT\t");
9722 assemble_name (f, imports_list->name);
9723 fputc ('\n', f);
9724 imports_list = imports_list->next;
9727 #endif /* AOF_ASSEMBLER */