Fix typos in comments.
[official-gcc.git] / gcc / config / arm / arm.c
blob6a37bf829e29196ff47e91c00f0534c5dc6e48e8
1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 93, 94, 95, 96, 97, 98, 99, 2000 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "function.h"
40 #include "expr.h"
41 #include "toplev.h"
42 #include "recog.h"
43 #include "ggc.h"
44 #include "except.h"
45 #include "tm_p.h"
47 /* Forward definitions of types. */
48 typedef struct minipool_node Mnode;
49 typedef struct minipool_fixup Mfix;
51 /* In order to improve the layout of the prototypes below
52 some short type abbreviations are defined here. */
53 #define Hint HOST_WIDE_INT
54 #define Mmode enum machine_mode
55 #define Ulong unsigned long
57 /* Forward function declarations. */
58 static void arm_add_gc_roots PARAMS ((void));
59 static int arm_gen_constant PARAMS ((enum rtx_code, Mmode, Hint, rtx, rtx, int, int));
60 static int arm_naked_function_p PARAMS ((tree));
61 static Ulong bit_count PARAMS ((signed int));
62 static int const_ok_for_op PARAMS ((Hint, enum rtx_code));
63 static int eliminate_lr2ip PARAMS ((rtx *));
64 static rtx emit_multi_reg_push PARAMS ((int));
65 static rtx emit_sfm PARAMS ((int, int));
66 static const char * fp_const_from_val PARAMS ((REAL_VALUE_TYPE *));
67 static arm_cc get_arm_condition_code PARAMS ((rtx));
68 static void init_fpa_table PARAMS ((void));
69 static Hint int_log2 PARAMS ((Hint));
70 static rtx is_jump_table PARAMS ((rtx));
71 static const char * output_multi_immediate PARAMS ((rtx *, const char *, const char *, int, Hint));
72 static void print_multi_reg PARAMS ((FILE *, const char *, int, int, int));
73 static Mmode select_dominance_cc_mode PARAMS ((rtx, rtx, Hint));
74 static const char * shift_op PARAMS ((rtx, Hint *));
75 static void arm_init_machine_status PARAMS ((struct function *));
76 static void arm_mark_machine_status PARAMS ((struct function *));
77 static int number_of_first_bit_set PARAMS ((int));
78 static void replace_symbols_in_block PARAMS ((tree, rtx, rtx));
79 static void thumb_exit PARAMS ((FILE *, int, rtx));
80 static void thumb_pushpop PARAMS ((FILE *, int, int));
81 static const char * thumb_condition_code PARAMS ((rtx, int));
82 static rtx is_jump_table PARAMS ((rtx));
83 static Hint get_jump_table_size PARAMS ((rtx));
84 static Mnode * move_minipool_fix_forward_ref PARAMS ((Mnode *, Mnode *, Hint));
85 static Mnode * add_minipool_forward_ref PARAMS ((Mfix *));
86 static Mnode * move_minipool_fix_backward_ref PARAMS ((Mnode *, Mnode *, Hint));
87 static Mnode * add_minipool_backward_ref PARAMS ((Mfix *));
88 static void assign_minipool_offsets PARAMS ((Mfix *));
89 static void arm_print_value PARAMS ((FILE *, rtx));
90 static void dump_minipool PARAMS ((rtx));
91 static int arm_barrier_cost PARAMS ((rtx));
92 static Mfix * create_fix_barrier PARAMS ((Mfix *, Hint));
93 static void push_minipool_barrier PARAMS ((rtx, Hint));
94 static void push_minipool_fix PARAMS ((rtx, Hint, rtx *, Mmode, rtx));
95 static void note_invalid_constants PARAMS ((rtx, Hint));
97 #undef Hint
98 #undef Mmode
99 #undef Ulong
101 /* The maximum number of insns skipped which will be conditionalised if
102 possible. */
103 static int max_insns_skipped = 5;
105 extern FILE * asm_out_file;
107 /* True if we are currently building a constant table. */
108 int making_const_table;
110 /* Define the information needed to generate branch insns. This is
111 stored from the compare operation. */
112 rtx arm_compare_op0, arm_compare_op1;
114 /* What type of floating point are we tuning for? */
115 enum floating_point_type arm_fpu;
117 /* What type of floating point instructions are available? */
118 enum floating_point_type arm_fpu_arch;
120 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
121 enum prog_mode_type arm_prgmode;
123 /* Set by the -mfp=... option. */
124 const char * target_fp_name = NULL;
126 /* Used to parse -mstructure_size_boundary command line option. */
127 const char * structure_size_string = NULL;
128 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
130 /* Bit values used to identify processor capabilities. */
131 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
132 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
133 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
134 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
135 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
136 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
137 #define FL_THUMB (1 << 6) /* Thumb aware */
138 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
139 #define FL_STRONG (1 << 8) /* StrongARM */
141 /* The bits in this mask specify which instructions we are
142 allowed to generate. */
143 static int insn_flags = 0;
145 /* The bits in this mask specify which instruction scheduling options should
146 be used. Note - there is an overlap with the FL_FAST_MULT. For some
147 hardware we want to be able to generate the multiply instructions, but to
148 tune as if they were not present in the architecture. */
149 static int tune_flags = 0;
151 /* The following are used in the arm.md file as equivalents to bits
152 in the above two flag variables. */
154 /* Nonzero if this is an "M" variant of the processor. */
155 int arm_fast_multiply = 0;
157 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
158 int arm_arch4 = 0;
160 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
161 int arm_arch5 = 0;
163 /* Nonzero if this chip can benefit from load scheduling. */
164 int arm_ld_sched = 0;
166 /* Nonzero if this chip is a StrongARM. */
167 int arm_is_strong = 0;
169 /* Nonzero if this chip is a an ARM6 or an ARM7. */
170 int arm_is_6_or_7 = 0;
172 /* Nonzero if generating Thumb instructions. */
173 int thumb_code = 0;
175 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
176 must report the mode of the memory reference from PRINT_OPERAND to
177 PRINT_OPERAND_ADDRESS. */
178 enum machine_mode output_memory_reference_mode;
180 /* Nonzero if the prologue must setup `fp'. */
181 int current_function_anonymous_args;
183 /* The register number to be used for the PIC offset register. */
184 const char * arm_pic_register_string = NULL;
185 int arm_pic_register = 9;
187 /* Set to 1 when a return insn is output, this means that the epilogue
188 is not needed. */
189 int return_used_this_function;
191 /* Set to 1 after arm_reorg has started. Reset to start at the start of
192 the next function. */
193 static int after_arm_reorg = 0;
195 /* The maximum number of insns to be used when loading a constant. */
196 static int arm_constant_limit = 3;
198 /* For an explanation of these variables, see final_prescan_insn below. */
199 int arm_ccfsm_state;
200 enum arm_cond_code arm_current_cc;
201 rtx arm_target_insn;
202 int arm_target_label;
204 /* The condition codes of the ARM, and the inverse function. */
205 const char * arm_condition_codes[] =
207 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
208 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
211 #define streq(string1, string2) (strcmp (string1, string2) == 0)
213 /* Initialization code. */
215 struct processors
217 const char * name;
218 unsigned int flags;
221 /* Not all of these give usefully different compilation alternatives,
222 but there is no simple way of generalizing them. */
223 static struct processors all_cores[] =
225 /* ARM Cores */
227 {"arm2", FL_CO_PROC | FL_MODE26 },
228 {"arm250", FL_CO_PROC | FL_MODE26 },
229 {"arm3", FL_CO_PROC | FL_MODE26 },
230 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
231 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
232 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
233 {"arm610", FL_MODE26 | FL_MODE32 },
234 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
235 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
236 /* arm7m doesn't exist on its own, but only with D, (and I), but
237 those don't alter the code, so arm7m is sometimes used. */
238 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
239 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
240 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
241 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
242 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
243 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
244 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
245 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
246 {"arm710", FL_MODE26 | FL_MODE32 },
247 {"arm720", FL_MODE26 | FL_MODE32 },
248 {"arm710c", FL_MODE26 | FL_MODE32 },
249 {"arm7100", FL_MODE26 | FL_MODE32 },
250 {"arm7500", FL_MODE26 | FL_MODE32 },
251 /* Doesn't have an external co-proc, but does have embedded fpu. */
252 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
253 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
254 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
255 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
256 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
257 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
258 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
259 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
260 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
261 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
262 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
264 {NULL, 0}
267 static struct processors all_architectures[] =
269 /* ARM Architectures */
271 { "armv2", FL_CO_PROC | FL_MODE26 },
272 { "armv2a", FL_CO_PROC | FL_MODE26 },
273 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
274 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
275 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
276 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
277 implementations that support it, so we will leave it out for now. */
278 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
279 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
280 { NULL, 0 }
283 /* This is a magic stucture. The 'string' field is magically filled in
284 with a pointer to the value specified by the user on the command line
285 assuming that the user has specified such a value. */
287 struct arm_cpu_select arm_select[] =
289 /* string name processors */
290 { NULL, "-mcpu=", all_cores },
291 { NULL, "-march=", all_architectures },
292 { NULL, "-mtune=", all_cores }
295 /* Return the number of bits set in value' */
296 static unsigned long
297 bit_count (value)
298 signed int value;
300 unsigned long count = 0;
302 while (value)
304 value &= ~(value & - value);
305 ++ count;
308 return count;
311 /* Fix up any incompatible options that the user has specified.
312 This has now turned into a maze. */
313 void
314 arm_override_options ()
316 unsigned i;
318 /* Set up the flags based on the cpu/architecture selected by the user. */
319 for (i = sizeof (arm_select) / sizeof (arm_select[0]); i--;)
321 struct arm_cpu_select * ptr = arm_select + i;
323 if (ptr->string != NULL && ptr->string[0] != '\0')
325 const struct processors * sel;
327 for (sel = ptr->processors; sel->name != NULL; sel ++)
328 if (streq (ptr->string, sel->name))
330 if (i == 2)
331 tune_flags = sel->flags;
332 else
334 /* If we have been given an architecture and a processor
335 make sure that they are compatible. We only generate
336 a warning though, and we prefer the CPU over the
337 architecture. */
338 if (insn_flags != 0 && (insn_flags ^ sel->flags))
339 warning ("switch -mcpu=%s conflicts with -march= switch",
340 ptr->string);
342 insn_flags = sel->flags;
345 break;
348 if (sel->name == NULL)
349 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
353 /* If the user did not specify a processor, choose one for them. */
354 if (insn_flags == 0)
356 struct processors * sel;
357 unsigned int sought;
358 static struct cpu_default
360 int cpu;
361 const char * name;
363 cpu_defaults[] =
365 { TARGET_CPU_arm2, "arm2" },
366 { TARGET_CPU_arm6, "arm6" },
367 { TARGET_CPU_arm610, "arm610" },
368 { TARGET_CPU_arm710, "arm710" },
369 { TARGET_CPU_arm7m, "arm7m" },
370 { TARGET_CPU_arm7500fe, "arm7500fe" },
371 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
372 { TARGET_CPU_arm8, "arm8" },
373 { TARGET_CPU_arm810, "arm810" },
374 { TARGET_CPU_arm9, "arm9" },
375 { TARGET_CPU_strongarm, "strongarm" },
376 { TARGET_CPU_generic, "arm" },
377 { 0, 0 }
379 struct cpu_default * def;
381 /* Find the default. */
382 for (def = cpu_defaults; def->name; def ++)
383 if (def->cpu == TARGET_CPU_DEFAULT)
384 break;
386 /* Make sure we found the default CPU. */
387 if (def->name == NULL)
388 abort ();
390 /* Find the default CPU's flags. */
391 for (sel = all_cores; sel->name != NULL; sel ++)
392 if (streq (def->name, sel->name))
393 break;
395 if (sel->name == NULL)
396 abort ();
398 insn_flags = sel->flags;
400 /* Now check to see if the user has specified some command line
401 switch that require certain abilities from the cpu. */
402 sought = 0;
404 if (TARGET_INTERWORK || TARGET_THUMB)
406 sought |= (FL_THUMB | FL_MODE32);
408 /* Force apcs-32 to be used for interworking. */
409 target_flags |= ARM_FLAG_APCS_32;
411 /* There are no ARM processors that support both APCS-26 and
412 interworking. Therefore we force FL_MODE26 to be removed
413 from insn_flags here (if it was set), so that the search
414 below will always be able to find a compatible processor. */
415 insn_flags &= ~ FL_MODE26;
417 else if (! TARGET_APCS_32)
418 sought |= FL_MODE26;
420 if (sought != 0 && ((sought & insn_flags) != sought))
422 /* Try to locate a CPU type that supports all of the abilities
423 of the default CPU, plus the extra abilities requested by
424 the user. */
425 for (sel = all_cores; sel->name != NULL; sel ++)
426 if ((sel->flags & sought) == (sought | insn_flags))
427 break;
429 if (sel->name == NULL)
431 unsigned int current_bit_count = 0;
432 struct processors * best_fit = NULL;
434 /* Ideally we would like to issue an error message here
435 saying that it was not possible to find a CPU compatible
436 with the default CPU, but which also supports the command
437 line options specified by the programmer, and so they
438 ought to use the -mcpu=<name> command line option to
439 override the default CPU type.
441 Unfortunately this does not work with multilibing. We
442 need to be able to support multilibs for -mapcs-26 and for
443 -mthumb-interwork and there is no CPU that can support both
444 options. Instead if we cannot find a cpu that has both the
445 characteristics of the default cpu and the given command line
446 options we scan the array again looking for a best match. */
447 for (sel = all_cores; sel->name != NULL; sel ++)
448 if ((sel->flags & sought) == sought)
450 unsigned int count;
452 count = bit_count (sel->flags & insn_flags);
454 if (count >= current_bit_count)
456 best_fit = sel;
457 current_bit_count = count;
461 if (best_fit == NULL)
462 abort ();
463 else
464 sel = best_fit;
467 insn_flags = sel->flags;
471 /* If tuning has not been specified, tune for whichever processor or
472 architecture has been selected. */
473 if (tune_flags == 0)
474 tune_flags = insn_flags;
476 /* Make sure that the processor choice does not conflict with any of the
477 other command line choices. */
478 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
480 /* If APCS-32 was not the default then it must have been set by the
481 user, so issue a warning message. If the user has specified
482 "-mapcs-32 -mcpu=arm2" then we loose here. */
483 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
484 warning ("target CPU does not support APCS-32" );
485 target_flags &= ~ ARM_FLAG_APCS_32;
487 else if (! TARGET_APCS_32 && !(insn_flags & FL_MODE26))
489 warning ("target CPU does not support APCS-26" );
490 target_flags |= ARM_FLAG_APCS_32;
493 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
495 warning ("target CPU does not support interworking" );
496 target_flags &= ~ARM_FLAG_INTERWORK;
499 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
501 warning ("target CPU does not supoport THUMB instructions.");
502 target_flags &= ~ARM_FLAG_THUMB;
505 if (TARGET_APCS_FRAME && TARGET_THUMB)
507 /* warning ("ignoring -mapcs-frame because -mthumb was used."); */
508 target_flags &= ~ARM_FLAG_APCS_FRAME;
511 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
512 from here where no function is being compiled currently. */
513 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
514 && TARGET_ARM)
515 warning ("enabling backtrace support is only meaningful when compiling for the Thumb.");
517 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
518 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb.");
520 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
521 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb.");
523 /* If interworking is enabled then APCS-32 must be selected as well. */
524 if (TARGET_INTERWORK)
526 if (! TARGET_APCS_32)
527 warning ("interworking forces APCS-32 to be used" );
528 target_flags |= ARM_FLAG_APCS_32;
531 if (TARGET_APCS_STACK && ! TARGET_APCS_FRAME)
533 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
534 target_flags |= ARM_FLAG_APCS_FRAME;
537 if (TARGET_POKE_FUNCTION_NAME)
538 target_flags |= ARM_FLAG_APCS_FRAME;
540 if (TARGET_APCS_REENT && flag_pic)
541 fatal ("-fpic and -mapcs-reent are incompatible");
543 if (TARGET_APCS_REENT)
544 warning ("APCS reentrant code not supported. Ignored");
546 /* If this target is normally configured to use APCS frames, warn if they
547 are turned off and debugging is turned on. */
548 if (TARGET_ARM
549 && write_symbols != NO_DEBUG
550 && ! TARGET_APCS_FRAME
551 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
552 warning ("-g with -mno-apcs-frame may not give sensible debugging");
554 /* If stack checking is disabled, we can use r10 as the PIC register,
555 which keeps r9 available. */
556 if (flag_pic && ! TARGET_APCS_STACK)
557 arm_pic_register = 10;
559 if (TARGET_APCS_FLOAT)
560 warning ("Passing floating point arguments in fp regs not yet supported");
562 /* Initialise boolean versions of the flags, for use in the arm.md file. */
563 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
564 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
565 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
567 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
568 arm_is_strong = (tune_flags & FL_STRONG) != 0;
569 thumb_code = (TARGET_ARM == 0);
570 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
571 && !(tune_flags & FL_ARCH4))) != 0;
573 /* Default value for floating point code... if no co-processor
574 bus, then schedule for emulated floating point. Otherwise,
575 assume the user has an FPA.
576 Note: this does not prevent use of floating point instructions,
577 -msoft-float does that. */
578 arm_fpu = (tune_flags & FL_CO_PROC) ? FP_HARD : FP_SOFT3;
580 if (target_fp_name)
582 if (streq (target_fp_name, "2"))
583 arm_fpu_arch = FP_SOFT2;
584 else if (streq (target_fp_name, "3"))
585 arm_fpu_arch = FP_SOFT3;
586 else
587 fatal ("Invalid floating point emulation option: -mfpe-%s",
588 target_fp_name);
590 else
591 arm_fpu_arch = FP_DEFAULT;
593 if (TARGET_FPE && arm_fpu != FP_HARD)
594 arm_fpu = FP_SOFT2;
596 /* For arm2/3 there is no need to do any scheduling if there is only
597 a floating point emulator, or we are doing software floating-point. */
598 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD)
599 && (tune_flags & FL_MODE32) == 0)
600 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
602 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
604 if (structure_size_string != NULL)
606 int size = strtol (structure_size_string, NULL, 0);
608 if (size == 8 || size == 32)
609 arm_structure_size_boundary = size;
610 else
611 warning ("Structure size boundary can only be set to 8 or 32");
614 if (arm_pic_register_string != NULL)
616 int pic_register;
618 if (! flag_pic)
619 warning ("-mpic-register= is useless without -fpic");
621 pic_register = decode_reg_name (arm_pic_register_string);
623 /* Prevent the user from choosing an obviously stupid PIC register. */
624 if (pic_register < 0 || call_used_regs[pic_register]
625 || pic_register == HARD_FRAME_POINTER_REGNUM
626 || pic_register == STACK_POINTER_REGNUM
627 || pic_register >= PC_REGNUM)
628 error ("Unable to use '%s' for PIC register", arm_pic_register_string);
629 else
630 arm_pic_register = pic_register;
633 if (TARGET_THUMB && flag_schedule_insns)
635 /* Don't warn since it's on by default in -O2. */
636 flag_schedule_insns = 0;
639 /* If optimizing for space, don't synthesize constants.
640 For processors with load scheduling, it never costs more than 2 cycles
641 to load a constant, and the load scheduler may well reduce that to 1. */
642 if (optimize_size || (tune_flags & FL_LDSCHED))
643 arm_constant_limit = 1;
645 /* If optimizing for size, bump the number of instructions that we
646 are prepared to conditionally execute (even on a StrongARM).
647 Otherwise for the StrongARM, which has early execution of branches,
648 a sequence that is worth skipping is shorter. */
649 if (optimize_size)
650 max_insns_skipped = 6;
651 else if (arm_is_strong)
652 max_insns_skipped = 3;
654 /* Register global variables with the garbage collector. */
655 arm_add_gc_roots ();
658 static void
659 arm_add_gc_roots ()
661 ggc_add_rtx_root (&arm_compare_op0, 1);
662 ggc_add_rtx_root (&arm_compare_op1, 1);
663 ggc_add_rtx_root (&arm_target_insn, 1); /* Not sure this is really a root */
664 /* XXX: What about the minipool tables? */
667 /* Return 1 if it is possible to return using a single instruction. */
669 use_return_insn (iscond)
670 int iscond;
672 int regno;
674 /* Never use a return instruction before reload has run. */
675 if (! reload_completed
676 /* Or if the function is variadic. */
677 || current_function_pretend_args_size
678 || current_function_anonymous_args
679 /* Of if the function calls __builtin_eh_return () */
680 || cfun->machine->eh_epilogue_sp_ofs != NULL
681 /* Or if there is no frame pointer and there is a stack adjustment. */
682 || ((get_frame_size () + current_function_outgoing_args_size != 0)
683 && ! frame_pointer_needed))
684 return 0;
686 /* Can't be done if interworking with Thumb, and any registers have been
687 stacked. Similarly, on StrongARM, conditional returns are expensive
688 if they aren't taken and registers have been stacked. */
689 if (iscond && arm_is_strong && frame_pointer_needed)
690 return 0;
692 if ((iscond && arm_is_strong)
693 || TARGET_INTERWORK)
695 for (regno = 0; regno <= LAST_ARM_REGNUM; regno++)
696 if (regs_ever_live[regno] && ! call_used_regs[regno])
697 return 0;
699 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
700 return 0;
703 /* Can't be done if any of the FPU regs are pushed, since this also
704 requires an insn. */
705 if (TARGET_HARD_FLOAT)
706 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
707 if (regs_ever_live[regno] && ! call_used_regs[regno])
708 return 0;
710 /* If a function is naked, don't use the "return" insn. */
711 if (arm_naked_function_p (current_function_decl))
712 return 0;
714 return 1;
717 /* Return TRUE if int I is a valid immediate ARM constant. */
720 const_ok_for_arm (i)
721 HOST_WIDE_INT i;
723 unsigned HOST_WIDE_INT mask = ~ HOST_UINT (0xFF);
725 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
726 be all zero, or all one. */
727 if ((i & ~ HOST_UINT (0xffffffff)) != 0
728 && ((i & ~ HOST_UINT (0xffffffff))
729 != ((~ HOST_UINT (0))
730 & ~ HOST_UINT (0xffffffff))))
731 return FALSE;
733 /* Fast return for 0 and powers of 2 */
734 if ((i & (i - 1)) == 0)
735 return TRUE;
739 if ((i & mask & HOST_UINT (0xffffffff)) == 0)
740 return TRUE;
741 mask =
742 (mask << 2) | ((mask & HOST_UINT (0xffffffff))
743 >> (32 - 2)) | ~(HOST_UINT (0xffffffff));
744 } while (mask != ~ HOST_UINT (0xFF));
746 return FALSE;
749 /* Return true if I is a valid constant for the operation CODE. */
750 static int
751 const_ok_for_op (i, code)
752 HOST_WIDE_INT i;
753 enum rtx_code code;
755 if (const_ok_for_arm (i))
756 return 1;
758 switch (code)
760 case PLUS:
761 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
763 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
764 case XOR:
765 case IOR:
766 return 0;
768 case AND:
769 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
771 default:
772 abort ();
776 /* Emit a sequence of insns to handle a large constant.
777 CODE is the code of the operation required, it can be any of SET, PLUS,
778 IOR, AND, XOR, MINUS;
779 MODE is the mode in which the operation is being performed;
780 VAL is the integer to operate on;
781 SOURCE is the other operand (a register, or a null-pointer for SET);
782 SUBTARGETS means it is safe to create scratch registers if that will
783 either produce a simpler sequence, or we will want to cse the values.
784 Return value is the number of insns emitted. */
787 arm_split_constant (code, mode, val, target, source, subtargets)
788 enum rtx_code code;
789 enum machine_mode mode;
790 HOST_WIDE_INT val;
791 rtx target;
792 rtx source;
793 int subtargets;
795 if (subtargets || code == SET
796 || (GET_CODE (target) == REG && GET_CODE (source) == REG
797 && REGNO (target) != REGNO (source)))
799 /* After arm_reorg has been called, we can't fix up expensive
800 constants by pushing them into memory so we must synthesise
801 them in-line, regardless of the cost. This is only likely to
802 be more costly on chips that have load delay slots and we are
803 compiling without running the scheduler (so no splitting
804 occurred before the final instruction emission).
806 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
808 if (! after_arm_reorg
809 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
810 > arm_constant_limit + (code != SET)))
812 if (code == SET)
814 /* Currently SET is the only monadic value for CODE, all
815 the rest are diadic. */
816 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
817 return 1;
819 else
821 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
823 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
824 /* For MINUS, the value is subtracted from, since we never
825 have subtraction of a constant. */
826 if (code == MINUS)
827 emit_insn (gen_rtx_SET (VOIDmode, target,
828 gen_rtx_MINUS (mode, temp, source)));
829 else
830 emit_insn (gen_rtx_SET (VOIDmode, target,
831 gen_rtx (code, mode, source, temp)));
832 return 2;
837 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
840 /* As above, but extra parameter GENERATE which, if clear, suppresses
841 RTL generation. */
842 static int
843 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
844 enum rtx_code code;
845 enum machine_mode mode;
846 HOST_WIDE_INT val;
847 rtx target;
848 rtx source;
849 int subtargets;
850 int generate;
852 int can_invert = 0;
853 int can_negate = 0;
854 int can_negate_initial = 0;
855 int can_shift = 0;
856 int i;
857 int num_bits_set = 0;
858 int set_sign_bit_copies = 0;
859 int clear_sign_bit_copies = 0;
860 int clear_zero_bit_copies = 0;
861 int set_zero_bit_copies = 0;
862 int insns = 0;
863 unsigned HOST_WIDE_INT temp1, temp2;
864 unsigned HOST_WIDE_INT remainder = val & HOST_UINT (0xffffffff);
866 /* Find out which operations are safe for a given CODE. Also do a quick
867 check for degenerate cases; these can occur when DImode operations
868 are split. */
869 switch (code)
871 case SET:
872 can_invert = 1;
873 can_shift = 1;
874 can_negate = 1;
875 break;
877 case PLUS:
878 can_negate = 1;
879 can_negate_initial = 1;
880 break;
882 case IOR:
883 if (remainder == HOST_UINT (0xffffffff))
885 if (generate)
886 emit_insn (gen_rtx_SET (VOIDmode, target,
887 GEN_INT (ARM_SIGN_EXTEND (val))));
888 return 1;
890 if (remainder == 0)
892 if (reload_completed && rtx_equal_p (target, source))
893 return 0;
894 if (generate)
895 emit_insn (gen_rtx_SET (VOIDmode, target, source));
896 return 1;
898 break;
900 case AND:
901 if (remainder == 0)
903 if (generate)
904 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
905 return 1;
907 if (remainder == HOST_UINT (0xffffffff))
909 if (reload_completed && rtx_equal_p (target, source))
910 return 0;
911 if (generate)
912 emit_insn (gen_rtx_SET (VOIDmode, target, source));
913 return 1;
915 can_invert = 1;
916 break;
918 case XOR:
919 if (remainder == 0)
921 if (reload_completed && rtx_equal_p (target, source))
922 return 0;
923 if (generate)
924 emit_insn (gen_rtx_SET (VOIDmode, target, source));
925 return 1;
927 if (remainder == HOST_UINT (0xffffffff))
929 if (generate)
930 emit_insn (gen_rtx_SET (VOIDmode, target,
931 gen_rtx_NOT (mode, source)));
932 return 1;
935 /* We don't know how to handle this yet below. */
936 abort ();
938 case MINUS:
939 /* We treat MINUS as (val - source), since (source - val) is always
940 passed as (source + (-val)). */
941 if (remainder == 0)
943 if (generate)
944 emit_insn (gen_rtx_SET (VOIDmode, target,
945 gen_rtx_NEG (mode, source)));
946 return 1;
948 if (const_ok_for_arm (val))
950 if (generate)
951 emit_insn (gen_rtx_SET (VOIDmode, target,
952 gen_rtx_MINUS (mode, GEN_INT (val),
953 source)));
954 return 1;
956 can_negate = 1;
958 break;
960 default:
961 abort ();
964 /* If we can do it in one insn get out quickly. */
965 if (const_ok_for_arm (val)
966 || (can_negate_initial && const_ok_for_arm (-val))
967 || (can_invert && const_ok_for_arm (~val)))
969 if (generate)
970 emit_insn (gen_rtx_SET (VOIDmode, target,
971 (source ? gen_rtx (code, mode, source,
972 GEN_INT (val))
973 : GEN_INT (val))));
974 return 1;
977 /* Calculate a few attributes that may be useful for specific
978 optimizations. */
979 for (i = 31; i >= 0; i--)
981 if ((remainder & (1 << i)) == 0)
982 clear_sign_bit_copies++;
983 else
984 break;
987 for (i = 31; i >= 0; i--)
989 if ((remainder & (1 << i)) != 0)
990 set_sign_bit_copies++;
991 else
992 break;
995 for (i = 0; i <= 31; i++)
997 if ((remainder & (1 << i)) == 0)
998 clear_zero_bit_copies++;
999 else
1000 break;
1003 for (i = 0; i <= 31; i++)
1005 if ((remainder & (1 << i)) != 0)
1006 set_zero_bit_copies++;
1007 else
1008 break;
1011 switch (code)
1013 case SET:
1014 /* See if we can do this by sign_extending a constant that is known
1015 to be negative. This is a good, way of doing it, since the shift
1016 may well merge into a subsequent insn. */
1017 if (set_sign_bit_copies > 1)
1019 if (const_ok_for_arm
1020 (temp1 = ARM_SIGN_EXTEND (remainder
1021 << (set_sign_bit_copies - 1))))
1023 if (generate)
1025 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1026 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1027 GEN_INT (temp1)));
1028 emit_insn (gen_ashrsi3 (target, new_src,
1029 GEN_INT (set_sign_bit_copies - 1)));
1031 return 2;
1033 /* For an inverted constant, we will need to set the low bits,
1034 these will be shifted out of harm's way. */
1035 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1036 if (const_ok_for_arm (~temp1))
1038 if (generate)
1040 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1041 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1042 GEN_INT (temp1)));
1043 emit_insn (gen_ashrsi3 (target, new_src,
1044 GEN_INT (set_sign_bit_copies - 1)));
1046 return 2;
1050 /* See if we can generate this by setting the bottom (or the top)
1051 16 bits, and then shifting these into the other half of the
1052 word. We only look for the simplest cases, to do more would cost
1053 too much. Be careful, however, not to generate this when the
1054 alternative would take fewer insns. */
1055 if (val & HOST_UINT (0xffff0000))
1057 temp1 = remainder & HOST_UINT (0xffff0000);
1058 temp2 = remainder & 0x0000ffff;
1060 /* Overlaps outside this range are best done using other methods. */
1061 for (i = 9; i < 24; i++)
1063 if ((((temp2 | (temp2 << i))
1064 & HOST_UINT (0xffffffff)) == remainder)
1065 && ! const_ok_for_arm (temp2))
1067 rtx new_src = (subtargets
1068 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1069 : target);
1070 insns = arm_gen_constant (code, mode, temp2, new_src,
1071 source, subtargets, generate);
1072 source = new_src;
1073 if (generate)
1074 emit_insn (gen_rtx_SET
1075 (VOIDmode, target,
1076 gen_rtx_IOR (mode,
1077 gen_rtx_ASHIFT (mode, source,
1078 GEN_INT (i)),
1079 source)));
1080 return insns + 1;
1084 /* Don't duplicate cases already considered. */
1085 for (i = 17; i < 24; i++)
1087 if (((temp1 | (temp1 >> i)) == remainder)
1088 && ! const_ok_for_arm (temp1))
1090 rtx new_src = (subtargets
1091 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1092 : target);
1093 insns = arm_gen_constant (code, mode, temp1, new_src,
1094 source, subtargets, generate);
1095 source = new_src;
1096 if (generate)
1097 emit_insn
1098 (gen_rtx_SET (VOIDmode, target,
1099 gen_rtx_IOR
1100 (mode,
1101 gen_rtx_LSHIFTRT (mode, source,
1102 GEN_INT (i)),
1103 source)));
1104 return insns + 1;
1108 break;
1110 case IOR:
1111 case XOR:
1112 /* If we have IOR or XOR, and the constant can be loaded in a
1113 single instruction, and we can find a temporary to put it in,
1114 then this can be done in two instructions instead of 3-4. */
1115 if (subtargets
1116 /* TARGET can't be NULL if SUBTARGETS is 0 */
1117 || (reload_completed && ! reg_mentioned_p (target, source)))
1119 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
1121 if (generate)
1123 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1125 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1126 emit_insn (gen_rtx_SET (VOIDmode, target,
1127 gen_rtx (code, mode, source, sub)));
1129 return 2;
1133 if (code == XOR)
1134 break;
1136 if (set_sign_bit_copies > 8
1137 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1139 if (generate)
1141 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1142 rtx shift = GEN_INT (set_sign_bit_copies);
1144 emit_insn (gen_rtx_SET (VOIDmode, sub,
1145 gen_rtx_NOT (mode,
1146 gen_rtx_ASHIFT (mode,
1147 source,
1148 shift))));
1149 emit_insn (gen_rtx_SET (VOIDmode, target,
1150 gen_rtx_NOT (mode,
1151 gen_rtx_LSHIFTRT (mode, sub,
1152 shift))));
1154 return 2;
1157 if (set_zero_bit_copies > 8
1158 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1160 if (generate)
1162 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1163 rtx shift = GEN_INT (set_zero_bit_copies);
1165 emit_insn (gen_rtx_SET (VOIDmode, sub,
1166 gen_rtx_NOT (mode,
1167 gen_rtx_LSHIFTRT (mode,
1168 source,
1169 shift))));
1170 emit_insn (gen_rtx_SET (VOIDmode, target,
1171 gen_rtx_NOT (mode,
1172 gen_rtx_ASHIFT (mode, sub,
1173 shift))));
1175 return 2;
1178 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
1180 if (generate)
1182 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1183 emit_insn (gen_rtx_SET (VOIDmode, sub,
1184 gen_rtx_NOT (mode, source)));
1185 source = sub;
1186 if (subtargets)
1187 sub = gen_reg_rtx (mode);
1188 emit_insn (gen_rtx_SET (VOIDmode, sub,
1189 gen_rtx_AND (mode, source,
1190 GEN_INT (temp1))));
1191 emit_insn (gen_rtx_SET (VOIDmode, target,
1192 gen_rtx_NOT (mode, sub)));
1194 return 3;
1196 break;
1198 case AND:
1199 /* See if two shifts will do 2 or more insn's worth of work. */
1200 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1202 HOST_WIDE_INT shift_mask = (((HOST_UINT (0xffffffff))
1203 << (32 - clear_sign_bit_copies))
1204 & HOST_UINT (0xffffffff));
1206 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1208 if (generate)
1210 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1211 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1212 new_src, source, subtargets, 1);
1213 source = new_src;
1215 else
1217 rtx targ = subtargets ? NULL_RTX : target;
1218 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1219 targ, source, subtargets, 0);
1223 if (generate)
1225 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1226 rtx shift = GEN_INT (clear_sign_bit_copies);
1228 emit_insn (gen_ashlsi3 (new_src, source, shift));
1229 emit_insn (gen_lshrsi3 (target, new_src, shift));
1232 return insns + 2;
1235 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1237 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1239 if ((remainder | shift_mask) != HOST_UINT (0xffffffff))
1241 if (generate)
1243 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1245 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1246 new_src, source, subtargets, 1);
1247 source = new_src;
1249 else
1251 rtx targ = subtargets ? NULL_RTX : target;
1253 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1254 targ, source, subtargets, 0);
1258 if (generate)
1260 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1261 rtx shift = GEN_INT (clear_zero_bit_copies);
1263 emit_insn (gen_lshrsi3 (new_src, source, shift));
1264 emit_insn (gen_ashlsi3 (target, new_src, shift));
1267 return insns + 2;
1270 break;
1272 default:
1273 break;
1276 for (i = 0; i < 32; i++)
1277 if (remainder & (1 << i))
1278 num_bits_set++;
1280 if (code == AND || (can_invert && num_bits_set > 16))
1281 remainder = (~remainder) & HOST_UINT (0xffffffff);
1282 else if (code == PLUS && num_bits_set > 16)
1283 remainder = (-remainder) & HOST_UINT (0xffffffff);
1284 else
1286 can_invert = 0;
1287 can_negate = 0;
1290 /* Now try and find a way of doing the job in either two or three
1291 instructions.
1292 We start by looking for the largest block of zeros that are aligned on
1293 a 2-bit boundary, we then fill up the temps, wrapping around to the
1294 top of the word when we drop off the bottom.
1295 In the worst case this code should produce no more than four insns. */
1297 int best_start = 0;
1298 int best_consecutive_zeros = 0;
1300 for (i = 0; i < 32; i += 2)
1302 int consecutive_zeros = 0;
1304 if (! (remainder & (3 << i)))
1306 while ((i < 32) && ! (remainder & (3 << i)))
1308 consecutive_zeros += 2;
1309 i += 2;
1311 if (consecutive_zeros > best_consecutive_zeros)
1313 best_consecutive_zeros = consecutive_zeros;
1314 best_start = i - consecutive_zeros;
1316 i -= 2;
1320 /* Now start emitting the insns, starting with the one with the highest
1321 bit set: we do this so that the smallest number will be emitted last;
1322 this is more likely to be combinable with addressing insns. */
1323 i = best_start;
1326 int end;
1328 if (i <= 0)
1329 i += 32;
1330 if (remainder & (3 << (i - 2)))
1332 end = i - 8;
1333 if (end < 0)
1334 end += 32;
1335 temp1 = remainder & ((0x0ff << end)
1336 | ((i < end) ? (0xff >> (32 - end)) : 0));
1337 remainder &= ~temp1;
1339 if (generate)
1341 rtx new_src;
1343 if (code == SET)
1344 emit_insn (gen_rtx_SET (VOIDmode,
1345 new_src = (subtargets
1346 ? gen_reg_rtx (mode)
1347 : target),
1348 GEN_INT (can_invert
1349 ? ~temp1 : temp1)));
1350 else if (code == MINUS)
1351 emit_insn (gen_rtx_SET (VOIDmode,
1352 new_src = (subtargets
1353 ? gen_reg_rtx (mode)
1354 : target),
1355 gen_rtx (code, mode, GEN_INT (temp1),
1356 source)));
1357 else
1358 emit_insn (gen_rtx_SET (VOIDmode,
1359 new_src = (remainder
1360 ? (subtargets
1361 ? gen_reg_rtx (mode)
1362 : target)
1363 : target),
1364 gen_rtx (code, mode, source,
1365 GEN_INT (can_invert ? ~temp1
1366 : (can_negate
1367 ? -temp1
1368 : temp1)))));
1369 source = new_src;
1372 if (code == SET)
1374 can_invert = 0;
1375 code = PLUS;
1377 else if (code == MINUS)
1378 code = PLUS;
1380 insns++;
1381 i -= 6;
1383 i -= 2;
1384 } while (remainder);
1386 return insns;
1389 /* Canonicalize a comparison so that we are more likely to recognize it.
1390 This can be done for a few constant compares, where we can make the
1391 immediate value easier to load. */
1392 enum rtx_code
1393 arm_canonicalize_comparison (code, op1)
1394 enum rtx_code code;
1395 rtx * op1;
1397 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1399 switch (code)
1401 case EQ:
1402 case NE:
1403 return code;
1405 case GT:
1406 case LE:
1407 if (i != (((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
1408 - 1)
1409 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1411 *op1 = GEN_INT (i+1);
1412 return code == GT ? GE : LT;
1414 break;
1416 case GE:
1417 case LT:
1418 if (i != ((HOST_UINT (1)) << (HOST_BITS_PER_WIDE_INT - 1))
1419 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1421 *op1 = GEN_INT (i-1);
1422 return code == GE ? GT : LE;
1424 break;
1426 case GTU:
1427 case LEU:
1428 if (i != ~ (HOST_UINT (0))
1429 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1431 *op1 = GEN_INT (i + 1);
1432 return code == GTU ? GEU : LTU;
1434 break;
1436 case GEU:
1437 case LTU:
1438 if (i != 0
1439 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1441 *op1 = GEN_INT (i - 1);
1442 return code == GEU ? GTU : LEU;
1444 break;
1446 default:
1447 abort ();
1450 return code;
1453 /* Decide whether a type should be returned in memory (true)
1454 or in a register (false). This is called by the macro
1455 RETURN_IN_MEMORY. */
1457 arm_return_in_memory (type)
1458 tree type;
1460 if (! AGGREGATE_TYPE_P (type))
1461 /* All simple types are returned in registers. */
1462 return 0;
1464 /* For the arm-wince targets we choose to be compitable with Microsoft's
1465 ARM and Thumb compilers, which always return aggregates in memory. */
1466 #ifndef ARM_WINCE
1468 if (int_size_in_bytes (type) > 4)
1469 /* All structures/unions bigger than one word are returned in memory. */
1470 return 1;
1472 if (TREE_CODE (type) == RECORD_TYPE)
1474 tree field;
1476 /* For a struct the APCS says that we only return in a register
1477 if the type is 'integer like' and every addressable element
1478 has an offset of zero. For practical purposes this means
1479 that the structure can have at most one non bit-field element
1480 and that this element must be the first one in the structure. */
1482 /* Find the first field, ignoring non FIELD_DECL things which will
1483 have been created by C++. */
1484 for (field = TYPE_FIELDS (type);
1485 field && TREE_CODE (field) != FIELD_DECL;
1486 field = TREE_CHAIN (field))
1487 continue;
1489 if (field == NULL)
1490 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1492 /* Check that the first field is valid for returning in a register. */
1494 /* ... Floats are not allowed */
1495 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1496 return 1;
1498 /* ... Aggregates that are not themselves valid for returning in
1499 a register are not allowed. */
1500 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1501 return 1;
1503 /* Now check the remaining fields, if any. Only bitfields are allowed,
1504 since they are not addressable. */
1505 for (field = TREE_CHAIN (field);
1506 field;
1507 field = TREE_CHAIN (field))
1509 if (TREE_CODE (field) != FIELD_DECL)
1510 continue;
1512 if (! DECL_BIT_FIELD_TYPE (field))
1513 return 1;
1516 return 0;
1519 if (TREE_CODE (type) == UNION_TYPE)
1521 tree field;
1523 /* Unions can be returned in registers if every element is
1524 integral, or can be returned in an integer register. */
1525 for (field = TYPE_FIELDS (type);
1526 field;
1527 field = TREE_CHAIN (field))
1529 if (TREE_CODE (field) != FIELD_DECL)
1530 continue;
1532 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1533 return 1;
1535 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1536 return 1;
1539 return 0;
1541 #endif /* not ARM_WINCE */
1543 /* Return all other types in memory. */
1544 return 1;
1547 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1548 for a call to a function whose data type is FNTYPE.
1549 For a library call, FNTYPE is NULL. */
1550 void
1551 arm_init_cumulative_args (pcum, fntype, libname, indirect)
1552 CUMULATIVE_ARGS * pcum;
1553 tree fntype;
1554 rtx libname ATTRIBUTE_UNUSED;
1555 int indirect ATTRIBUTE_UNUSED;
1557 /* On the ARM, the offset starts at 0. */
1558 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype))) ? 1 : 0);
1560 pcum->call_cookie = CALL_NORMAL;
1562 if (TARGET_LONG_CALLS)
1563 pcum->call_cookie = CALL_LONG;
1565 /* Check for long call/short call attributes. The attributes
1566 override any command line option. */
1567 if (fntype)
1569 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
1570 pcum->call_cookie = CALL_SHORT;
1571 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
1572 pcum->call_cookie = CALL_LONG;
1576 /* Determine where to put an argument to a function.
1577 Value is zero to push the argument on the stack,
1578 or a hard register in which to store the argument.
1580 MODE is the argument's machine mode.
1581 TYPE is the data type of the argument (as a tree).
1582 This is null for libcalls where that information may
1583 not be available.
1584 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1585 the preceding args and about the function being called.
1586 NAMED is nonzero if this argument is a named parameter
1587 (otherwise it is an extra parameter matching an ellipsis). */
1589 arm_function_arg (pcum, mode, type, named)
1590 CUMULATIVE_ARGS * pcum;
1591 enum machine_mode mode;
1592 tree type ATTRIBUTE_UNUSED;
1593 int named;
1595 if (mode == VOIDmode)
1596 /* Compute operand 2 of the call insn. */
1597 return GEN_INT (pcum->call_cookie);
1599 if (! named || pcum->nregs >= NUM_ARG_REGS)
1600 return NULL_RTX;
1602 return gen_rtx_REG (mode, pcum->nregs);
1605 /* Encode the current state of the #pragma [no_]long_calls. */
1606 typedef enum
1608 OFF, /* No #pramgma [no_]long_calls is in effect. */
1609 LONG, /* #pragma long_calls is in effect. */
1610 SHORT /* #pragma no_long_calls is in effect. */
1611 } arm_pragma_enum;
1613 static arm_pragma_enum arm_pragma_long_calls = OFF;
1615 /* Handle pragmas for compatibility with Intel's compilers.
1616 FIXME: This is incomplete, since it does not handle all
1617 the pragmas that the Intel compilers understand. */
1619 arm_process_pragma (p_getc, p_ungetc, pname)
1620 int (* p_getc) PARAMS ((void)) ATTRIBUTE_UNUSED;
1621 void (* p_ungetc) PARAMS ((int)) ATTRIBUTE_UNUSED;
1622 char * pname;
1624 /* Should be pragma 'far' or equivalent for callx/balx here. */
1625 if (strcmp (pname, "long_calls") == 0)
1626 arm_pragma_long_calls = LONG;
1627 else if (strcmp (pname, "no_long_calls") == 0)
1628 arm_pragma_long_calls = SHORT;
1629 else if (strcmp (pname, "long_calls_off") == 0)
1630 arm_pragma_long_calls = OFF;
1631 else
1632 return 0;
1634 return 1;
1637 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
1638 attribute for TYPE. The attributes in ATTRIBUTES have previously been
1639 assigned to TYPE. */
1641 arm_valid_type_attribute_p (type, attributes, identifier, args)
1642 tree type;
1643 tree attributes ATTRIBUTE_UNUSED;
1644 tree identifier;
1645 tree args;
1647 if ( TREE_CODE (type) != FUNCTION_TYPE
1648 && TREE_CODE (type) != METHOD_TYPE
1649 && TREE_CODE (type) != FIELD_DECL
1650 && TREE_CODE (type) != TYPE_DECL)
1651 return 0;
1653 /* Function calls made to this symbol must be done indirectly, because
1654 it may lie outside of the 26 bit addressing range of a normal function
1655 call. */
1656 if (is_attribute_p ("long_call", identifier))
1657 return (args == NULL_TREE);
1659 /* Whereas these functions are always known to reside within the 26 bit
1660 addressing range. */
1661 if (is_attribute_p ("short_call", identifier))
1662 return (args == NULL_TREE);
1664 return 0;
1667 /* Return 0 if the attributes for two types are incompatible, 1 if they
1668 are compatible, and 2 if they are nearly compatible (which causes a
1669 warning to be generated). */
1671 arm_comp_type_attributes (type1, type2)
1672 tree type1;
1673 tree type2;
1675 int l1, l2, s1, s2;
1677 /* Check for mismatch of non-default calling convention. */
1678 if (TREE_CODE (type1) != FUNCTION_TYPE)
1679 return 1;
1681 /* Check for mismatched call attributes. */
1682 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
1683 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
1684 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
1685 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
1687 /* Only bother to check if an attribute is defined. */
1688 if (l1 | l2 | s1 | s2)
1690 /* If one type has an attribute, the other must have the same attribute. */
1691 if ((l1 != l2) || (s1 != s2))
1692 return 0;
1694 /* Disallow mixed attributes. */
1695 if ((l1 & s2) || (l2 & s1))
1696 return 0;
1699 return 1;
1702 /* Encode long_call or short_call attribute by prefixing
1703 symbol name in DECL with a special character FLAG. */
1704 void
1705 arm_encode_call_attribute (decl, flag)
1706 tree decl;
1707 int flag;
1709 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
1710 int len = strlen (str);
1711 char * newstr;
1713 if (TREE_CODE (decl) != FUNCTION_DECL)
1714 return;
1716 /* Do not allow weak functions to be treated as short call. */
1717 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
1718 return;
1720 if (ggc_p)
1721 newstr = ggc_alloc_string (NULL, len + 2);
1722 else
1723 newstr = permalloc (len + 2);
1725 sprintf (newstr, "%c%s", flag, str);
1727 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
1730 /* Assigns default attributes to newly defined type. This is used to
1731 set short_call/long_call attributes for function types of
1732 functions defined inside corresponding #pragma scopes. */
1733 void
1734 arm_set_default_type_attributes (type)
1735 tree type;
1737 /* Add __attribute__ ((long_call)) to all functions, when
1738 inside #pragma long_calls or __attribute__ ((short_call)),
1739 when inside #pragma no_long_calls. */
1740 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
1742 tree type_attr_list, attr_name;
1743 type_attr_list = TYPE_ATTRIBUTES (type);
1745 if (arm_pragma_long_calls == LONG)
1746 attr_name = get_identifier ("long_call");
1747 else if (arm_pragma_long_calls == SHORT)
1748 attr_name = get_identifier ("short_call");
1749 else
1750 return;
1752 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
1753 TYPE_ATTRIBUTES (type) = type_attr_list;
1757 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
1758 defined within the current compilation unit. If this caanot be
1759 determined, then 0 is returned. */
1760 static int
1761 current_file_function_operand (sym_ref)
1762 rtx sym_ref;
1764 /* This is a bit of a fib. A function will have a short call flag
1765 applied to its name if it has the short call attribute, or it has
1766 already been defined within the current compilation unit. */
1767 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
1768 return 1;
1770 /* The current function is always defined within the current compilation
1771 unit. if it s a weak defintion however, then this may not be the real
1772 defintion of the function, and so we have to say no. */
1773 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
1774 && ! DECL_WEAK (current_function_decl))
1775 return 1;
1777 /* We cannot make the determination - default to returning 0. */
1778 return 0;
1781 /* Return non-zero if a 32 bit "long_call" should be generated for
1782 this call. We generate a long_call if the function:
1784 a. has an __attribute__((long call))
1785 or b. is within the scope of a #pragma long_calls
1786 or c. the -mlong-calls command line switch has been specified
1788 However we do not generate a long call if the function:
1790 d. has an __attribute__ ((short_call))
1791 or e. is inside the scope of a #pragma no_long_calls
1792 or f. has an __attribute__ ((section))
1793 or g. is defined within the current compilation unit.
1795 This function will be called by C fragments contained in the machine
1796 description file. CALL_REF and CALL_COOKIE correspond to the matched
1797 rtl operands. CALL_SYMBOL is used to distinguish between
1798 two different callers of the function. It is set to 1 in the
1799 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
1800 and "call_value" patterns. This is because of the difference in the
1801 SYM_REFs passed by these patterns. */
1803 arm_is_longcall_p (sym_ref, call_cookie, call_symbol)
1804 rtx sym_ref;
1805 int call_cookie;
1806 int call_symbol;
1808 if (! call_symbol)
1810 if (GET_CODE (sym_ref) != MEM)
1811 return 0;
1813 sym_ref = XEXP (sym_ref, 0);
1816 if (GET_CODE (sym_ref) != SYMBOL_REF)
1817 return 0;
1819 if (call_cookie & CALL_SHORT)
1820 return 0;
1822 if (TARGET_LONG_CALLS && flag_function_sections)
1823 return 1;
1825 if (current_file_function_operand (sym_ref, VOIDmode))
1826 return 0;
1828 return (call_cookie & CALL_LONG)
1829 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
1830 || TARGET_LONG_CALLS;
1834 legitimate_pic_operand_p (x)
1835 rtx x;
1837 if (CONSTANT_P (x)
1838 && flag_pic
1839 && (GET_CODE (x) == SYMBOL_REF
1840 || (GET_CODE (x) == CONST
1841 && GET_CODE (XEXP (x, 0)) == PLUS
1842 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1843 return 0;
1845 return 1;
1849 legitimize_pic_address (orig, mode, reg)
1850 rtx orig;
1851 enum machine_mode mode;
1852 rtx reg;
1854 if (GET_CODE (orig) == SYMBOL_REF)
1856 rtx pic_ref, address;
1857 rtx insn;
1858 int subregs = 0;
1860 if (reg == 0)
1862 if (reload_in_progress || reload_completed)
1863 abort ();
1864 else
1865 reg = gen_reg_rtx (Pmode);
1867 subregs = 1;
1870 #ifdef AOF_ASSEMBLER
1871 /* The AOF assembler can generate relocations for these directly, and
1872 understands that the PIC register has to be added into the offset. */
1873 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1874 #else
1875 if (subregs)
1876 address = gen_reg_rtx (Pmode);
1877 else
1878 address = reg;
1880 emit_insn (gen_pic_load_addr (address, orig));
1882 pic_ref = gen_rtx_MEM (Pmode,
1883 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1884 address));
1885 RTX_UNCHANGING_P (pic_ref) = 1;
1886 insn = emit_move_insn (reg, pic_ref);
1887 #endif
1888 current_function_uses_pic_offset_table = 1;
1889 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1890 by loop. */
1891 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1892 REG_NOTES (insn));
1893 return reg;
1895 else if (GET_CODE (orig) == CONST)
1897 rtx base, offset;
1899 if (GET_CODE (XEXP (orig, 0)) == PLUS
1900 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1901 return orig;
1903 if (reg == 0)
1905 if (reload_in_progress || reload_completed)
1906 abort ();
1907 else
1908 reg = gen_reg_rtx (Pmode);
1911 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1913 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1914 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1915 base == reg ? 0 : reg);
1917 else
1918 abort ();
1920 if (GET_CODE (offset) == CONST_INT)
1922 /* The base register doesn't really matter, we only want to
1923 test the index for the appropriate mode. */
1924 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1926 if (! reload_in_progress && ! reload_completed)
1927 offset = force_reg (Pmode, offset);
1928 else
1929 abort ();
1931 win:
1932 if (GET_CODE (offset) == CONST_INT)
1933 return plus_constant_for_output (base, INTVAL (offset));
1936 if (GET_MODE_SIZE (mode) > 4
1937 && (GET_MODE_CLASS (mode) == MODE_INT
1938 || TARGET_SOFT_FLOAT))
1940 emit_insn (gen_addsi3 (reg, base, offset));
1941 return reg;
1944 return gen_rtx_PLUS (Pmode, base, offset);
1946 else if (GET_CODE (orig) == LABEL_REF)
1948 current_function_uses_pic_offset_table = 1;
1950 if (NEED_GOT_RELOC)
1952 rtx pic_ref, address = gen_reg_rtx (Pmode);
1954 emit_insn (gen_pic_load_addr (address, orig));
1955 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
1957 emit_move_insn (address, pic_ref);
1958 return address;
1962 return orig;
1965 static rtx pic_rtx;
1968 is_pic (x)
1969 rtx x;
1971 if (x == pic_rtx)
1972 return 1;
1973 return 0;
1976 void
1977 arm_finalize_pic ()
1979 #ifndef AOF_ASSEMBLER
1980 rtx l1, pic_tmp, pic_tmp2, seq;
1981 rtx global_offset_table;
1983 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
1984 return;
1986 if (! flag_pic)
1987 abort ();
1989 start_sequence ();
1990 l1 = gen_label_rtx ();
1992 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1993 /* On the ARM the PC register contains 'dot + 8' at the time of the
1994 addition, on the Thumb it is 'dot + 4'. */
1995 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
1996 if (GOT_PCREL)
1997 pic_tmp2 = gen_rtx_CONST (VOIDmode,
1998 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
1999 else
2000 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2002 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2004 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
2005 if (TARGET_ARM)
2006 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2007 else
2008 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2010 seq = gen_sequence ();
2011 end_sequence ();
2012 emit_insn_after (seq, get_insns ());
2014 /* Need to emit this whether or not we obey regdecls,
2015 since setjmp/longjmp can cause life info to screw up. */
2016 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2017 #endif /* AOF_ASSEMBLER */
2020 #define REG_OR_SUBREG_REG(X) \
2021 (GET_CODE (X) == REG \
2022 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
2024 #define REG_OR_SUBREG_RTX(X) \
2025 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
2027 #ifndef COSTS_N_INSNS
2028 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
2029 #endif
2032 arm_rtx_costs (x, code, outer)
2033 rtx x;
2034 enum rtx_code code;
2035 enum rtx_code outer;
2037 enum machine_mode mode = GET_MODE (x);
2038 enum rtx_code subcode;
2039 int extra_cost;
2041 if (TARGET_THUMB)
2043 switch (code)
2045 case ASHIFT:
2046 case ASHIFTRT:
2047 case LSHIFTRT:
2048 case ROTATERT:
2049 case PLUS:
2050 case MINUS:
2051 case COMPARE:
2052 case NEG:
2053 case NOT:
2054 return COSTS_N_INSNS (1);
2056 case MULT:
2057 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2059 int cycles = 0;
2060 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
2062 while (i)
2064 i >>= 2;
2065 cycles ++;
2067 return COSTS_N_INSNS (2) + cycles;
2069 return COSTS_N_INSNS (1) + 16;
2071 case SET:
2072 return (COSTS_N_INSNS (1)
2073 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
2074 + GET_CODE (SET_DEST (x)) == MEM));
2076 case CONST_INT:
2077 if (outer == SET)
2079 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2080 return 0;
2081 if (thumb_shiftable_const (INTVAL (x)))
2082 return COSTS_N_INSNS (2);
2083 return COSTS_N_INSNS (3);
2085 else if (outer == PLUS
2086 && INTVAL (x) < 256 && INTVAL (x) > -256)
2087 return 0;
2088 else if (outer == COMPARE
2089 && (unsigned HOST_WIDE_INT) INTVAL (x) < 256)
2090 return 0;
2091 else if (outer == ASHIFT || outer == ASHIFTRT
2092 || outer == LSHIFTRT)
2093 return 0;
2094 return COSTS_N_INSNS (2);
2096 case CONST:
2097 case CONST_DOUBLE:
2098 case LABEL_REF:
2099 case SYMBOL_REF:
2100 return COSTS_N_INSNS (3);
2102 case UDIV:
2103 case UMOD:
2104 case DIV:
2105 case MOD:
2106 return 100;
2108 case TRUNCATE:
2109 return 99;
2111 case AND:
2112 case XOR:
2113 case IOR:
2114 /* XXX guess. */
2115 return 8;
2117 case ADDRESSOF:
2118 case MEM:
2119 /* XXX another guess. */
2120 /* Memory costs quite a lot for the first word, but subsequent words
2121 load at the equivalent of a single insn each. */
2122 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2123 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2125 case IF_THEN_ELSE:
2126 /* XXX a guess. */
2127 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2128 return 14;
2129 return 2;
2131 case ZERO_EXTEND:
2132 /* XXX still guessing. */
2133 switch (GET_MODE (XEXP (x, 0)))
2135 case QImode:
2136 return (1 + (mode == DImode ? 4 : 0)
2137 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2139 case HImode:
2140 return (4 + (mode == DImode ? 4 : 0)
2141 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2143 case SImode:
2144 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2146 default:
2147 return 99;
2150 default:
2151 return 99;
2152 #if 0
2153 case FFS:
2154 case FLOAT:
2155 case FIX:
2156 case UNSIGNED_FIX:
2157 /* XXX guess */
2158 fprintf (stderr, "unexpected code for thumb in rtx_costs: %s\n",
2159 rtx_name[code]);
2160 abort ();
2161 #endif
2165 switch (code)
2167 case MEM:
2168 /* Memory costs quite a lot for the first word, but subsequent words
2169 load at the equivalent of a single insn each. */
2170 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
2171 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
2173 case DIV:
2174 case MOD:
2175 return 100;
2177 case ROTATE:
2178 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
2179 return 4;
2180 /* Fall through */
2181 case ROTATERT:
2182 if (mode != SImode)
2183 return 8;
2184 /* Fall through */
2185 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
2186 if (mode == DImode)
2187 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
2188 + ((GET_CODE (XEXP (x, 0)) == REG
2189 || (GET_CODE (XEXP (x, 0)) == SUBREG
2190 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2191 ? 0 : 8));
2192 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
2193 || (GET_CODE (XEXP (x, 0)) == SUBREG
2194 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
2195 ? 0 : 4)
2196 + ((GET_CODE (XEXP (x, 1)) == REG
2197 || (GET_CODE (XEXP (x, 1)) == SUBREG
2198 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
2199 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
2200 ? 0 : 4));
2202 case MINUS:
2203 if (mode == DImode)
2204 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
2205 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2206 || (GET_CODE (XEXP (x, 0)) == CONST_INT
2207 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
2208 ? 0 : 8));
2210 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2211 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2212 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2213 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2214 ? 0 : 8)
2215 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
2216 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
2217 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
2218 ? 0 : 8));
2220 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
2221 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
2222 && REG_OR_SUBREG_REG (XEXP (x, 1))))
2223 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
2224 || subcode == ASHIFTRT || subcode == LSHIFTRT
2225 || subcode == ROTATE || subcode == ROTATERT
2226 || (subcode == MULT
2227 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2228 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
2229 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
2230 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
2231 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
2232 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
2233 && REG_OR_SUBREG_REG (XEXP (x, 0))))
2234 return 1;
2235 /* Fall through */
2237 case PLUS:
2238 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2239 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2240 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2241 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
2242 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
2243 ? 0 : 8));
2245 /* Fall through */
2246 case AND: case XOR: case IOR:
2247 extra_cost = 0;
2249 /* Normally the frame registers will be spilt into reg+const during
2250 reload, so it is a bad idea to combine them with other instructions,
2251 since then they might not be moved outside of loops. As a compromise
2252 we allow integration with ops that have a constant as their second
2253 operand. */
2254 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
2255 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
2256 && GET_CODE (XEXP (x, 1)) != CONST_INT)
2257 || (REG_OR_SUBREG_REG (XEXP (x, 0))
2258 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
2259 extra_cost = 4;
2261 if (mode == DImode)
2262 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
2263 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2264 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2265 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2266 ? 0 : 8));
2268 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
2269 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
2270 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
2271 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2272 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
2273 ? 0 : 4));
2275 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
2276 return (1 + extra_cost
2277 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
2278 || subcode == LSHIFTRT || subcode == ASHIFTRT
2279 || subcode == ROTATE || subcode == ROTATERT
2280 || (subcode == MULT
2281 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2282 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
2283 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
2284 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
2285 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
2286 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
2287 ? 0 : 4));
2289 return 8;
2291 case MULT:
2292 /* There is no point basing this on the tuning, since it is always the
2293 fast variant if it exists at all. */
2294 if (arm_fast_multiply && mode == DImode
2295 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
2296 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
2297 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
2298 return 8;
2300 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2301 || mode == DImode)
2302 return 30;
2304 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2306 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
2307 & HOST_UINT (0xffffffff));
2308 int add_cost = const_ok_for_arm (i) ? 4 : 8;
2309 int j;
2311 /* Tune as appropriate. */
2312 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
2314 for (j = 0; i && j < 32; j += booth_unit_size)
2316 i >>= booth_unit_size;
2317 add_cost += 2;
2320 return add_cost;
2323 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
2324 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
2325 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
2327 case TRUNCATE:
2328 if (arm_fast_multiply && mode == SImode
2329 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
2330 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2331 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
2332 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
2333 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
2334 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
2335 return 8;
2336 return 99;
2338 case NEG:
2339 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2340 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
2341 /* Fall through */
2342 case NOT:
2343 if (mode == DImode)
2344 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2346 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
2348 case IF_THEN_ELSE:
2349 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
2350 return 14;
2351 return 2;
2353 case COMPARE:
2354 return 1;
2356 case ABS:
2357 return 4 + (mode == DImode ? 4 : 0);
2359 case SIGN_EXTEND:
2360 if (GET_MODE (XEXP (x, 0)) == QImode)
2361 return (4 + (mode == DImode ? 4 : 0)
2362 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2363 /* Fall through */
2364 case ZERO_EXTEND:
2365 switch (GET_MODE (XEXP (x, 0)))
2367 case QImode:
2368 return (1 + (mode == DImode ? 4 : 0)
2369 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2371 case HImode:
2372 return (4 + (mode == DImode ? 4 : 0)
2373 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2375 case SImode:
2376 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
2378 default:
2379 break;
2381 abort ();
2383 case CONST_INT:
2384 if (const_ok_for_arm (INTVAL (x)))
2385 return outer == SET ? 2 : -1;
2386 else if (outer == AND
2387 && const_ok_for_arm (~ INTVAL (x)))
2388 return -1;
2389 else if ((outer == COMPARE
2390 || outer == PLUS || outer == MINUS)
2391 && const_ok_for_arm (- INTVAL (x)))
2392 return -1;
2393 else
2394 return 5;
2396 case CONST:
2397 case LABEL_REF:
2398 case SYMBOL_REF:
2399 return 6;
2401 case CONST_DOUBLE:
2402 if (const_double_rtx_ok_for_fpu (x))
2403 return outer == SET ? 2 : -1;
2404 else if ((outer == COMPARE || outer == PLUS)
2405 && neg_const_double_rtx_ok_for_fpu (x))
2406 return -1;
2407 return 7;
2409 default:
2410 return 99;
2415 arm_adjust_cost (insn, link, dep, cost)
2416 rtx insn;
2417 rtx link;
2418 rtx dep;
2419 int cost;
2421 rtx i_pat, d_pat;
2423 /* XXX This is not strictly true for the FPA. */
2424 if (REG_NOTE_KIND (link) == REG_DEP_ANTI
2425 || REG_NOTE_KIND (link) == REG_DEP_OUTPUT)
2426 return 0;
2428 /* Call insns don't incur a stall, even if they follow a load. */
2429 if (REG_NOTE_KIND (link) == 0
2430 && GET_CODE (insn) == CALL_INSN)
2431 return 1;
2433 if ((i_pat = single_set (insn)) != NULL
2434 && GET_CODE (SET_SRC (i_pat)) == MEM
2435 && (d_pat = single_set (dep)) != NULL
2436 && GET_CODE (SET_DEST (d_pat)) == MEM)
2438 /* This is a load after a store, there is no conflict if the load reads
2439 from a cached area. Assume that loads from the stack, and from the
2440 constant pool are cached, and that others will miss. This is a
2441 hack. */
2443 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
2444 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2445 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
2446 || reg_mentioned_p (hard_frame_pointer_rtx,
2447 XEXP (SET_SRC (i_pat), 0)))
2448 return 1;
2451 return cost;
2454 /* This code has been fixed for cross compilation. */
2456 static int fpa_consts_inited = 0;
2458 static const char * strings_fpa[8] =
2460 "0", "1", "2", "3",
2461 "4", "5", "0.5", "10"
2464 static REAL_VALUE_TYPE values_fpa[8];
2466 static void
2467 init_fpa_table ()
2469 int i;
2470 REAL_VALUE_TYPE r;
2472 for (i = 0; i < 8; i++)
2474 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
2475 values_fpa[i] = r;
2478 fpa_consts_inited = 1;
2481 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2484 const_double_rtx_ok_for_fpu (x)
2485 rtx x;
2487 REAL_VALUE_TYPE r;
2488 int i;
2490 if (!fpa_consts_inited)
2491 init_fpa_table ();
2493 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2494 if (REAL_VALUE_MINUS_ZERO (r))
2495 return 0;
2497 for (i = 0; i < 8; i++)
2498 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2499 return 1;
2501 return 0;
2504 /* Return TRUE if rtx X is a valid immediate FPU constant. */
2507 neg_const_double_rtx_ok_for_fpu (x)
2508 rtx x;
2510 REAL_VALUE_TYPE r;
2511 int i;
2513 if (!fpa_consts_inited)
2514 init_fpa_table ();
2516 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2517 r = REAL_VALUE_NEGATE (r);
2518 if (REAL_VALUE_MINUS_ZERO (r))
2519 return 0;
2521 for (i = 0; i < 8; i++)
2522 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2523 return 1;
2525 return 0;
2528 /* Predicates for `match_operand' and `match_operator'. */
2530 /* s_register_operand is the same as register_operand, but it doesn't accept
2531 (SUBREG (MEM)...).
2533 This function exists because at the time it was put in it led to better
2534 code. SUBREG(MEM) always needs a reload in the places where
2535 s_register_operand is used, and this seemed to lead to excessive
2536 reloading. */
2539 s_register_operand (op, mode)
2540 register rtx op;
2541 enum machine_mode mode;
2543 if (GET_MODE (op) != mode && mode != VOIDmode)
2544 return 0;
2546 if (GET_CODE (op) == SUBREG)
2547 op = SUBREG_REG (op);
2549 /* We don't consider registers whose class is NO_REGS
2550 to be a register operand. */
2551 /* XXX might have to check for lo regs only for thumb ??? */
2552 return (GET_CODE (op) == REG
2553 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2554 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2557 /* Only accept reg, subreg(reg), const_int. */
2560 reg_or_int_operand (op, mode)
2561 register rtx op;
2562 enum machine_mode mode;
2564 if (GET_CODE (op) == CONST_INT)
2565 return 1;
2567 if (GET_MODE (op) != mode && mode != VOIDmode)
2568 return 0;
2570 if (GET_CODE (op) == SUBREG)
2571 op = SUBREG_REG (op);
2573 /* We don't consider registers whose class is NO_REGS
2574 to be a register operand. */
2575 return (GET_CODE (op) == REG
2576 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2577 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
2580 /* Return 1 if OP is an item in memory, given that we are in reload. */
2583 arm_reload_memory_operand (op, mode)
2584 rtx op;
2585 enum machine_mode mode ATTRIBUTE_UNUSED;
2587 int regno = true_regnum (op);
2589 return (! CONSTANT_P (op)
2590 && (regno == -1
2591 || (GET_CODE (op) == REG
2592 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2595 /* Return 1 if OP is a valid memory address, but not valid for a signed byte
2596 memory access (architecture V4).
2597 MODE is QImode if called when computing contraints, or VOIDmode when
2598 emitting patterns. In this latter case we cannot use memory_operand()
2599 because it will fail on badly formed MEMs, which is precisly what we are
2600 trying to catch. */
2602 bad_signed_byte_operand (op, mode)
2603 rtx op;
2604 enum machine_mode mode ATTRIBUTE_UNUSED;
2606 #if 0
2607 if ((mode == QImode && ! memory_operand (op, mode)) || GET_CODE (op) != MEM)
2608 return 0;
2609 #endif
2610 if (GET_CODE (op) != MEM)
2611 return 0;
2613 op = XEXP (op, 0);
2615 /* A sum of anything more complex than reg + reg or reg + const is bad. */
2616 if ((GET_CODE (op) == PLUS || GET_CODE (op) == MINUS)
2617 && (! s_register_operand (XEXP (op, 0), VOIDmode)
2618 || (! s_register_operand (XEXP (op, 1), VOIDmode)
2619 && GET_CODE (XEXP (op, 1)) != CONST_INT)))
2620 return 1;
2622 /* Big constants are also bad. */
2623 if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT
2624 && (INTVAL (XEXP (op, 1)) > 0xff
2625 || -INTVAL (XEXP (op, 1)) > 0xff))
2626 return 1;
2628 /* Everything else is good, or can will automatically be made so. */
2629 return 0;
2632 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
2635 arm_rhs_operand (op, mode)
2636 rtx op;
2637 enum machine_mode mode;
2639 return (s_register_operand (op, mode)
2640 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
2643 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
2647 arm_rhsm_operand (op, mode)
2648 rtx op;
2649 enum machine_mode mode;
2651 return (s_register_operand (op, mode)
2652 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
2653 || memory_operand (op, mode));
2656 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
2657 constant that is valid when negated. */
2660 arm_add_operand (op, mode)
2661 rtx op;
2662 enum machine_mode mode;
2664 if (TARGET_THUMB)
2665 return thumb_cmp_operand (op, mode);
2667 return (s_register_operand (op, mode)
2668 || (GET_CODE (op) == CONST_INT
2669 && (const_ok_for_arm (INTVAL (op))
2670 || const_ok_for_arm (-INTVAL (op)))));
2674 arm_not_operand (op, mode)
2675 rtx op;
2676 enum machine_mode mode;
2678 return (s_register_operand (op, mode)
2679 || (GET_CODE (op) == CONST_INT
2680 && (const_ok_for_arm (INTVAL (op))
2681 || const_ok_for_arm (~INTVAL (op)))));
2684 /* Return TRUE if the operand is a memory reference which contains an
2685 offsettable address. */
2687 offsettable_memory_operand (op, mode)
2688 register rtx op;
2689 enum machine_mode mode;
2691 if (mode == VOIDmode)
2692 mode = GET_MODE (op);
2694 return (mode == GET_MODE (op)
2695 && GET_CODE (op) == MEM
2696 && offsettable_address_p (reload_completed | reload_in_progress,
2697 mode, XEXP (op, 0)));
2700 /* Return TRUE if the operand is a memory reference which is, or can be
2701 made word aligned by adjusting the offset. */
2703 alignable_memory_operand (op, mode)
2704 register rtx op;
2705 enum machine_mode mode;
2707 rtx reg;
2709 if (mode == VOIDmode)
2710 mode = GET_MODE (op);
2712 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
2713 return 0;
2715 op = XEXP (op, 0);
2717 return ((GET_CODE (reg = op) == REG
2718 || (GET_CODE (op) == SUBREG
2719 && GET_CODE (reg = SUBREG_REG (op)) == REG)
2720 || (GET_CODE (op) == PLUS
2721 && GET_CODE (XEXP (op, 1)) == CONST_INT
2722 && (GET_CODE (reg = XEXP (op, 0)) == REG
2723 || (GET_CODE (XEXP (op, 0)) == SUBREG
2724 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
2725 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 32);
2728 /* Similar to s_register_operand, but does not allow hard integer
2729 registers. */
2731 f_register_operand (op, mode)
2732 register rtx op;
2733 enum machine_mode mode;
2735 if (GET_MODE (op) != mode && mode != VOIDmode)
2736 return 0;
2738 if (GET_CODE (op) == SUBREG)
2739 op = SUBREG_REG (op);
2741 /* We don't consider registers whose class is NO_REGS
2742 to be a register operand. */
2743 return (GET_CODE (op) == REG
2744 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
2745 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
2748 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
2751 fpu_rhs_operand (op, mode)
2752 rtx op;
2753 enum machine_mode mode;
2755 if (s_register_operand (op, mode))
2756 return TRUE;
2758 if (GET_MODE (op) != mode && mode != VOIDmode)
2759 return FALSE;
2761 if (GET_CODE (op) == CONST_DOUBLE)
2762 return const_double_rtx_ok_for_fpu (op);
2764 return FALSE;
2768 fpu_add_operand (op, mode)
2769 rtx op;
2770 enum machine_mode mode;
2772 if (s_register_operand (op, mode))
2773 return TRUE;
2775 if (GET_MODE (op) != mode && mode != VOIDmode)
2776 return FALSE;
2778 if (GET_CODE (op) == CONST_DOUBLE)
2779 return (const_double_rtx_ok_for_fpu (op)
2780 || neg_const_double_rtx_ok_for_fpu (op));
2782 return FALSE;
2785 /* Return nonzero if OP is a constant power of two. */
2788 power_of_two_operand (op, mode)
2789 rtx op;
2790 enum machine_mode mode ATTRIBUTE_UNUSED;
2792 if (GET_CODE (op) == CONST_INT)
2794 HOST_WIDE_INT value = INTVAL (op);
2795 return value != 0 && (value & (value - 1)) == 0;
2797 return FALSE;
2800 /* Return TRUE for a valid operand of a DImode operation.
2801 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2802 Note that this disallows MEM(REG+REG), but allows
2803 MEM(PRE/POST_INC/DEC(REG)). */
2806 di_operand (op, mode)
2807 rtx op;
2808 enum machine_mode mode;
2810 if (s_register_operand (op, mode))
2811 return TRUE;
2813 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2814 return FALSE;
2816 if (GET_CODE (op) == SUBREG)
2817 op = SUBREG_REG (op);
2819 switch (GET_CODE (op))
2821 case CONST_DOUBLE:
2822 case CONST_INT:
2823 return TRUE;
2825 case MEM:
2826 return memory_address_p (DImode, XEXP (op, 0));
2828 default:
2829 return FALSE;
2833 /* Like di_operand, but don't accept constants. */
2835 nonimmediate_di_operand (op, mode)
2836 rtx op;
2837 enum machine_mode mode;
2839 if (s_register_operand (op, mode))
2840 return TRUE;
2842 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && GET_MODE (op) != DImode)
2843 return FALSE;
2845 if (GET_CODE (op) == SUBREG)
2846 op = SUBREG_REG (op);
2848 if (GET_CODE (op) == MEM)
2849 return memory_address_p (DImode, XEXP (op, 0));
2851 return FALSE;
2854 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
2855 Either: REG, SUBREG, CONST_DOUBLE or MEM(DImode_address).
2856 Note that this disallows MEM(REG+REG), but allows
2857 MEM(PRE/POST_INC/DEC(REG)). */
2860 soft_df_operand (op, mode)
2861 rtx op;
2862 enum machine_mode mode;
2864 if (s_register_operand (op, mode))
2865 return TRUE;
2867 if (mode != VOIDmode && GET_MODE (op) != mode)
2868 return FALSE;
2870 if (GET_CODE (op) == SUBREG && CONSTANT_P (SUBREG_REG (op)))
2871 return FALSE;
2873 if (GET_CODE (op) == SUBREG)
2874 op = SUBREG_REG (op);
2876 switch (GET_CODE (op))
2878 case CONST_DOUBLE:
2879 return TRUE;
2881 case MEM:
2882 return memory_address_p (DFmode, XEXP (op, 0));
2884 default:
2885 return FALSE;
2889 /* Like soft_df_operand, but don't accept constants. */
2891 nonimmediate_soft_df_operand (op, mode)
2892 rtx op;
2893 enum machine_mode mode;
2895 if (s_register_operand (op, mode))
2896 return TRUE;
2898 if (mode != VOIDmode && GET_MODE (op) != mode)
2899 return FALSE;
2901 if (GET_CODE (op) == SUBREG)
2902 op = SUBREG_REG (op);
2904 if (GET_CODE (op) == MEM)
2905 return memory_address_p (DFmode, XEXP (op, 0));
2906 return FALSE;
2909 /* Return TRUE for valid index operands. */
2911 index_operand (op, mode)
2912 rtx op;
2913 enum machine_mode mode;
2915 return (s_register_operand (op, mode)
2916 || (immediate_operand (op, mode)
2917 && (GET_CODE (op) != CONST_INT
2918 || (INTVAL (op) < 4096 && INTVAL (op) > -4096))));
2921 /* Return TRUE for valid shifts by a constant. This also accepts any
2922 power of two on the (somewhat overly relaxed) assumption that the
2923 shift operator in this case was a mult. */
2926 const_shift_operand (op, mode)
2927 rtx op;
2928 enum machine_mode mode;
2930 return (power_of_two_operand (op, mode)
2931 || (immediate_operand (op, mode)
2932 && (GET_CODE (op) != CONST_INT
2933 || (INTVAL (op) < 32 && INTVAL (op) > 0))));
2936 /* Return TRUE for arithmetic operators which can be combined with a multiply
2937 (shift). */
2940 shiftable_operator (x, mode)
2941 rtx x;
2942 enum machine_mode mode;
2944 if (GET_MODE (x) != mode)
2945 return FALSE;
2946 else
2948 enum rtx_code code = GET_CODE (x);
2950 return (code == PLUS || code == MINUS
2951 || code == IOR || code == XOR || code == AND);
2955 /* Return TRUE for binary logical operators. */
2958 logical_binary_operator (x, mode)
2959 rtx x;
2960 enum machine_mode mode;
2962 if (GET_MODE (x) != mode)
2963 return FALSE;
2964 else
2966 enum rtx_code code = GET_CODE (x);
2968 return (code == IOR || code == XOR || code == AND);
2972 /* Return TRUE for shift operators. */
2975 shift_operator (x, mode)
2976 rtx x;
2977 enum machine_mode mode;
2979 if (GET_MODE (x) != mode)
2980 return FALSE;
2981 else
2983 enum rtx_code code = GET_CODE (x);
2985 if (code == MULT)
2986 return power_of_two_operand (XEXP (x, 1), mode);
2988 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2989 || code == ROTATERT);
2993 /* Return TRUE if x is EQ or NE. */
2995 equality_operator (x, mode)
2996 rtx x;
2997 enum machine_mode mode ATTRIBUTE_UNUSED;
2999 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
3002 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
3004 minmax_operator (x, mode)
3005 rtx x;
3006 enum machine_mode mode;
3008 enum rtx_code code = GET_CODE (x);
3010 if (GET_MODE (x) != mode)
3011 return FALSE;
3013 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
3016 /* Return TRUE if this is the condition code register, if we aren't given
3017 a mode, accept any class CCmode register. */
3019 cc_register (x, mode)
3020 rtx x;
3021 enum machine_mode mode;
3023 if (mode == VOIDmode)
3025 mode = GET_MODE (x);
3027 if (GET_MODE_CLASS (mode) != MODE_CC)
3028 return FALSE;
3031 if ( GET_MODE (x) == mode
3032 && GET_CODE (x) == REG
3033 && REGNO (x) == CC_REGNUM)
3034 return TRUE;
3036 return FALSE;
3039 /* Return TRUE if this is the condition code register, if we aren't given
3040 a mode, accept any class CCmode register which indicates a dominance
3041 expression. */
3043 dominant_cc_register (x, mode)
3044 rtx x;
3045 enum machine_mode mode;
3047 if (mode == VOIDmode)
3049 mode = GET_MODE (x);
3051 if (GET_MODE_CLASS (mode) != MODE_CC)
3052 return FALSE;
3055 if ( mode != CC_DNEmode && mode != CC_DEQmode
3056 && mode != CC_DLEmode && mode != CC_DLTmode
3057 && mode != CC_DGEmode && mode != CC_DGTmode
3058 && mode != CC_DLEUmode && mode != CC_DLTUmode
3059 && mode != CC_DGEUmode && mode != CC_DGTUmode)
3060 return FALSE;
3062 return cc_register (x, mode);
3065 /* Return TRUE if X references a SYMBOL_REF. */
3067 symbol_mentioned_p (x)
3068 rtx x;
3070 register const char * fmt;
3071 register int i;
3073 if (GET_CODE (x) == SYMBOL_REF)
3074 return 1;
3076 fmt = GET_RTX_FORMAT (GET_CODE (x));
3078 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3080 if (fmt[i] == 'E')
3082 register int j;
3084 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3085 if (symbol_mentioned_p (XVECEXP (x, i, j)))
3086 return 1;
3088 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
3089 return 1;
3092 return 0;
3095 /* Return TRUE if X references a LABEL_REF. */
3097 label_mentioned_p (x)
3098 rtx x;
3100 register const char * fmt;
3101 register int i;
3103 if (GET_CODE (x) == LABEL_REF)
3104 return 1;
3106 fmt = GET_RTX_FORMAT (GET_CODE (x));
3107 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3109 if (fmt[i] == 'E')
3111 register int j;
3113 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3114 if (label_mentioned_p (XVECEXP (x, i, j)))
3115 return 1;
3117 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
3118 return 1;
3121 return 0;
3124 enum rtx_code
3125 minmax_code (x)
3126 rtx x;
3128 enum rtx_code code = GET_CODE (x);
3130 if (code == SMAX)
3131 return GE;
3132 else if (code == SMIN)
3133 return LE;
3134 else if (code == UMIN)
3135 return LEU;
3136 else if (code == UMAX)
3137 return GEU;
3139 abort ();
3142 /* Return 1 if memory locations are adjacent. */
3144 adjacent_mem_locations (a, b)
3145 rtx a, b;
3147 int val0 = 0, val1 = 0;
3148 int reg0, reg1;
3150 if ((GET_CODE (XEXP (a, 0)) == REG
3151 || (GET_CODE (XEXP (a, 0)) == PLUS
3152 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
3153 && (GET_CODE (XEXP (b, 0)) == REG
3154 || (GET_CODE (XEXP (b, 0)) == PLUS
3155 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
3157 if (GET_CODE (XEXP (a, 0)) == PLUS)
3159 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
3160 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
3162 else
3163 reg0 = REGNO (XEXP (a, 0));
3164 if (GET_CODE (XEXP (b, 0)) == PLUS)
3166 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
3167 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
3169 else
3170 reg1 = REGNO (XEXP (b, 0));
3171 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
3173 return 0;
3176 /* Return 1 if OP is a load multiple operation. It is known to be
3177 parallel and the first section will be tested. */
3179 load_multiple_operation (op, mode)
3180 rtx op;
3181 enum machine_mode mode ATTRIBUTE_UNUSED;
3183 HOST_WIDE_INT count = XVECLEN (op, 0);
3184 int dest_regno;
3185 rtx src_addr;
3186 HOST_WIDE_INT i = 1, base = 0;
3187 rtx elt;
3189 if (count <= 1
3190 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3191 return 0;
3193 /* Check to see if this might be a write-back. */
3194 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3196 i++;
3197 base = 1;
3199 /* Now check it more carefully. */
3200 if (GET_CODE (SET_DEST (elt)) != REG
3201 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3202 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3203 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3204 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3205 return 0;
3208 /* Perform a quick check so we don't blow up below. */
3209 if (count <= i
3210 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3211 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
3212 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
3213 return 0;
3215 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
3216 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
3218 for (; i < count; i++)
3220 elt = XVECEXP (op, 0, i);
3222 if (GET_CODE (elt) != SET
3223 || GET_CODE (SET_DEST (elt)) != REG
3224 || GET_MODE (SET_DEST (elt)) != SImode
3225 || REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
3226 || GET_CODE (SET_SRC (elt)) != MEM
3227 || GET_MODE (SET_SRC (elt)) != SImode
3228 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
3229 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
3230 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
3231 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
3232 return 0;
3235 return 1;
3238 /* Return 1 if OP is a store multiple operation. It is known to be
3239 parallel and the first section will be tested. */
3241 store_multiple_operation (op, mode)
3242 rtx op;
3243 enum machine_mode mode ATTRIBUTE_UNUSED;
3245 HOST_WIDE_INT count = XVECLEN (op, 0);
3246 int src_regno;
3247 rtx dest_addr;
3248 HOST_WIDE_INT i = 1, base = 0;
3249 rtx elt;
3251 if (count <= 1
3252 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
3253 return 0;
3255 /* Check to see if this might be a write-back. */
3256 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
3258 i++;
3259 base = 1;
3261 /* Now check it more carefully. */
3262 if (GET_CODE (SET_DEST (elt)) != REG
3263 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
3264 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
3265 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
3266 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
3267 return 0;
3270 /* Perform a quick check so we don't blow up below. */
3271 if (count <= i
3272 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
3273 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
3274 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
3275 return 0;
3277 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
3278 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
3280 for (; i < count; i++)
3282 elt = XVECEXP (op, 0, i);
3284 if (GET_CODE (elt) != SET
3285 || GET_CODE (SET_SRC (elt)) != REG
3286 || GET_MODE (SET_SRC (elt)) != SImode
3287 || REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
3288 || GET_CODE (SET_DEST (elt)) != MEM
3289 || GET_MODE (SET_DEST (elt)) != SImode
3290 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
3291 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
3292 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
3293 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
3294 return 0;
3297 return 1;
3301 load_multiple_sequence (operands, nops, regs, base, load_offset)
3302 rtx * operands;
3303 int nops;
3304 int * regs;
3305 int * base;
3306 HOST_WIDE_INT * load_offset;
3308 int unsorted_regs[4];
3309 HOST_WIDE_INT unsorted_offsets[4];
3310 int order[4];
3311 int base_reg = -1;
3312 int i;
3314 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3315 extended if required. */
3316 if (nops < 2 || nops > 4)
3317 abort ();
3319 /* Loop over the operands and check that the memory references are
3320 suitable (ie immediate offsets from the same base register). At
3321 the same time, extract the target register, and the memory
3322 offsets. */
3323 for (i = 0; i < nops; i++)
3325 rtx reg;
3326 rtx offset;
3328 /* Convert a subreg of a mem into the mem itself. */
3329 if (GET_CODE (operands[nops + i]) == SUBREG)
3330 operands[nops + i] = alter_subreg (operands[nops + i]);
3332 if (GET_CODE (operands[nops + i]) != MEM)
3333 abort ();
3335 /* Don't reorder volatile memory references; it doesn't seem worth
3336 looking for the case where the order is ok anyway. */
3337 if (MEM_VOLATILE_P (operands[nops + i]))
3338 return 0;
3340 offset = const0_rtx;
3342 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3343 || (GET_CODE (reg) == SUBREG
3344 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3345 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3346 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3347 == REG)
3348 || (GET_CODE (reg) == SUBREG
3349 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3350 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3351 == CONST_INT)))
3353 if (i == 0)
3355 base_reg = REGNO (reg);
3356 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3357 ? REGNO (operands[i])
3358 : REGNO (SUBREG_REG (operands[i])));
3359 order[0] = 0;
3361 else
3363 if (base_reg != (int) REGNO (reg))
3364 /* Not addressed from the same base register. */
3365 return 0;
3367 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3368 ? REGNO (operands[i])
3369 : REGNO (SUBREG_REG (operands[i])));
3370 if (unsorted_regs[i] < unsorted_regs[order[0]])
3371 order[0] = i;
3374 /* If it isn't an integer register, or if it overwrites the
3375 base register but isn't the last insn in the list, then
3376 we can't do this. */
3377 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
3378 || (i != nops - 1 && unsorted_regs[i] == base_reg))
3379 return 0;
3381 unsorted_offsets[i] = INTVAL (offset);
3383 else
3384 /* Not a suitable memory address. */
3385 return 0;
3388 /* All the useful information has now been extracted from the
3389 operands into unsorted_regs and unsorted_offsets; additionally,
3390 order[0] has been set to the lowest numbered register in the
3391 list. Sort the registers into order, and check that the memory
3392 offsets are ascending and adjacent. */
3394 for (i = 1; i < nops; i++)
3396 int j;
3398 order[i] = order[i - 1];
3399 for (j = 0; j < nops; j++)
3400 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3401 && (order[i] == order[i - 1]
3402 || unsorted_regs[j] < unsorted_regs[order[i]]))
3403 order[i] = j;
3405 /* Have we found a suitable register? if not, one must be used more
3406 than once. */
3407 if (order[i] == order[i - 1])
3408 return 0;
3410 /* Is the memory address adjacent and ascending? */
3411 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3412 return 0;
3415 if (base)
3417 *base = base_reg;
3419 for (i = 0; i < nops; i++)
3420 regs[i] = unsorted_regs[order[i]];
3422 *load_offset = unsorted_offsets[order[0]];
3425 if (unsorted_offsets[order[0]] == 0)
3426 return 1; /* ldmia */
3428 if (unsorted_offsets[order[0]] == 4)
3429 return 2; /* ldmib */
3431 if (unsorted_offsets[order[nops - 1]] == 0)
3432 return 3; /* ldmda */
3434 if (unsorted_offsets[order[nops - 1]] == -4)
3435 return 4; /* ldmdb */
3437 /* For ARM8,9 & StrongARM, 2 ldr instructions are faster than an ldm
3438 if the offset isn't small enough. The reason 2 ldrs are faster
3439 is because these ARMs are able to do more than one cache access
3440 in a single cycle. The ARM9 and StrongARM have Harvard caches,
3441 whilst the ARM8 has a double bandwidth cache. This means that
3442 these cores can do both an instruction fetch and a data fetch in
3443 a single cycle, so the trick of calculating the address into a
3444 scratch register (one of the result regs) and then doing a load
3445 multiple actually becomes slower (and no smaller in code size).
3446 That is the transformation
3448 ldr rd1, [rbase + offset]
3449 ldr rd2, [rbase + offset + 4]
3453 add rd1, rbase, offset
3454 ldmia rd1, {rd1, rd2}
3456 produces worse code -- '3 cycles + any stalls on rd2' instead of
3457 '2 cycles + any stalls on rd2'. On ARMs with only one cache
3458 access per cycle, the first sequence could never complete in less
3459 than 6 cycles, whereas the ldm sequence would only take 5 and
3460 would make better use of sequential accesses if not hitting the
3461 cache.
3463 We cheat here and test 'arm_ld_sched' which we currently know to
3464 only be true for the ARM8, ARM9 and StrongARM. If this ever
3465 changes, then the test below needs to be reworked. */
3466 if (nops == 2 && arm_ld_sched)
3467 return 0;
3469 /* Can't do it without setting up the offset, only do this if it takes
3470 no more than one insn. */
3471 return (const_ok_for_arm (unsorted_offsets[order[0]])
3472 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
3475 const char *
3476 emit_ldm_seq (operands, nops)
3477 rtx * operands;
3478 int nops;
3480 int regs[4];
3481 int base_reg;
3482 HOST_WIDE_INT offset;
3483 char buf[100];
3484 int i;
3486 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3488 case 1:
3489 strcpy (buf, "ldm%?ia\t");
3490 break;
3492 case 2:
3493 strcpy (buf, "ldm%?ib\t");
3494 break;
3496 case 3:
3497 strcpy (buf, "ldm%?da\t");
3498 break;
3500 case 4:
3501 strcpy (buf, "ldm%?db\t");
3502 break;
3504 case 5:
3505 if (offset >= 0)
3506 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3507 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3508 (long) offset);
3509 else
3510 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
3511 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
3512 (long) -offset);
3513 output_asm_insn (buf, operands);
3514 base_reg = regs[0];
3515 strcpy (buf, "ldm%?ia\t");
3516 break;
3518 default:
3519 abort ();
3522 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3523 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3525 for (i = 1; i < nops; i++)
3526 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3527 reg_names[regs[i]]);
3529 strcat (buf, "}\t%@ phole ldm");
3531 output_asm_insn (buf, operands);
3532 return "";
3536 store_multiple_sequence (operands, nops, regs, base, load_offset)
3537 rtx * operands;
3538 int nops;
3539 int * regs;
3540 int * base;
3541 HOST_WIDE_INT * load_offset;
3543 int unsorted_regs[4];
3544 HOST_WIDE_INT unsorted_offsets[4];
3545 int order[4];
3546 int base_reg = -1;
3547 int i;
3549 /* Can only handle 2, 3, or 4 insns at present, though could be easily
3550 extended if required. */
3551 if (nops < 2 || nops > 4)
3552 abort ();
3554 /* Loop over the operands and check that the memory references are
3555 suitable (ie immediate offsets from the same base register). At
3556 the same time, extract the target register, and the memory
3557 offsets. */
3558 for (i = 0; i < nops; i++)
3560 rtx reg;
3561 rtx offset;
3563 /* Convert a subreg of a mem into the mem itself. */
3564 if (GET_CODE (operands[nops + i]) == SUBREG)
3565 operands[nops + i] = alter_subreg (operands[nops + i]);
3567 if (GET_CODE (operands[nops + i]) != MEM)
3568 abort ();
3570 /* Don't reorder volatile memory references; it doesn't seem worth
3571 looking for the case where the order is ok anyway. */
3572 if (MEM_VOLATILE_P (operands[nops + i]))
3573 return 0;
3575 offset = const0_rtx;
3577 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
3578 || (GET_CODE (reg) == SUBREG
3579 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3580 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
3581 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
3582 == REG)
3583 || (GET_CODE (reg) == SUBREG
3584 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
3585 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
3586 == CONST_INT)))
3588 if (i == 0)
3590 base_reg = REGNO (reg);
3591 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
3592 ? REGNO (operands[i])
3593 : REGNO (SUBREG_REG (operands[i])));
3594 order[0] = 0;
3596 else
3598 if (base_reg != (int) REGNO (reg))
3599 /* Not addressed from the same base register. */
3600 return 0;
3602 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
3603 ? REGNO (operands[i])
3604 : REGNO (SUBREG_REG (operands[i])));
3605 if (unsorted_regs[i] < unsorted_regs[order[0]])
3606 order[0] = i;
3609 /* If it isn't an integer register, then we can't do this. */
3610 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
3611 return 0;
3613 unsorted_offsets[i] = INTVAL (offset);
3615 else
3616 /* Not a suitable memory address. */
3617 return 0;
3620 /* All the useful information has now been extracted from the
3621 operands into unsorted_regs and unsorted_offsets; additionally,
3622 order[0] has been set to the lowest numbered register in the
3623 list. Sort the registers into order, and check that the memory
3624 offsets are ascending and adjacent. */
3626 for (i = 1; i < nops; i++)
3628 int j;
3630 order[i] = order[i - 1];
3631 for (j = 0; j < nops; j++)
3632 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
3633 && (order[i] == order[i - 1]
3634 || unsorted_regs[j] < unsorted_regs[order[i]]))
3635 order[i] = j;
3637 /* Have we found a suitable register? if not, one must be used more
3638 than once. */
3639 if (order[i] == order[i - 1])
3640 return 0;
3642 /* Is the memory address adjacent and ascending? */
3643 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
3644 return 0;
3647 if (base)
3649 *base = base_reg;
3651 for (i = 0; i < nops; i++)
3652 regs[i] = unsorted_regs[order[i]];
3654 *load_offset = unsorted_offsets[order[0]];
3657 if (unsorted_offsets[order[0]] == 0)
3658 return 1; /* stmia */
3660 if (unsorted_offsets[order[0]] == 4)
3661 return 2; /* stmib */
3663 if (unsorted_offsets[order[nops - 1]] == 0)
3664 return 3; /* stmda */
3666 if (unsorted_offsets[order[nops - 1]] == -4)
3667 return 4; /* stmdb */
3669 return 0;
3672 const char *
3673 emit_stm_seq (operands, nops)
3674 rtx * operands;
3675 int nops;
3677 int regs[4];
3678 int base_reg;
3679 HOST_WIDE_INT offset;
3680 char buf[100];
3681 int i;
3683 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
3685 case 1:
3686 strcpy (buf, "stm%?ia\t");
3687 break;
3689 case 2:
3690 strcpy (buf, "stm%?ib\t");
3691 break;
3693 case 3:
3694 strcpy (buf, "stm%?da\t");
3695 break;
3697 case 4:
3698 strcpy (buf, "stm%?db\t");
3699 break;
3701 default:
3702 abort ();
3705 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
3706 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
3708 for (i = 1; i < nops; i++)
3709 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
3710 reg_names[regs[i]]);
3712 strcat (buf, "}\t%@ phole stm");
3714 output_asm_insn (buf, operands);
3715 return "";
3719 multi_register_push (op, mode)
3720 rtx op;
3721 enum machine_mode mode ATTRIBUTE_UNUSED;
3723 if (GET_CODE (op) != PARALLEL
3724 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
3725 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
3726 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
3727 return 0;
3729 return 1;
3732 /* Routines for use with attributes. */
3734 /* Return nonzero if ATTR is a valid attribute for DECL.
3735 ATTRIBUTES are any existing attributes and ARGS are
3736 the arguments supplied with ATTR.
3738 Supported attributes:
3740 naked:
3741 don't output any prologue or epilogue code, the user is assumed
3742 to do the right thing.
3744 interfacearm:
3745 Always assume that this function will be entered in ARM mode,
3746 not Thumb mode, and that the caller wishes to be returned to in
3747 ARM mode. */
3749 arm_valid_machine_decl_attribute (decl, attr, args)
3750 tree decl;
3751 tree attr;
3752 tree args;
3754 if (args != NULL_TREE)
3755 return 0;
3757 if (is_attribute_p ("naked", attr))
3758 return TREE_CODE (decl) == FUNCTION_DECL;
3760 #ifdef ARM_PE
3761 if (is_attribute_p ("interfacearm", attr))
3762 return TREE_CODE (decl) == FUNCTION_DECL;
3763 #endif /* ARM_PE */
3765 return 0;
3768 /* Return non-zero if FUNC is a naked function. */
3769 static int
3770 arm_naked_function_p (func)
3771 tree func;
3773 tree a;
3775 if (TREE_CODE (func) != FUNCTION_DECL)
3776 abort ();
3778 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
3779 return a != NULL_TREE;
3782 /* Routines for use in generating RTL. */
3784 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
3785 in_struct_p, scalar_p)
3786 int base_regno;
3787 int count;
3788 rtx from;
3789 int up;
3790 int write_back;
3791 int unchanging_p;
3792 int in_struct_p;
3793 int scalar_p;
3795 int i = 0, j;
3796 rtx result;
3797 int sign = up ? 1 : -1;
3798 rtx mem;
3800 result = gen_rtx_PARALLEL (VOIDmode,
3801 rtvec_alloc (count + (write_back ? 1 : 0)));
3802 if (write_back)
3804 XVECEXP (result, 0, 0)
3805 = gen_rtx_SET (GET_MODE (from), from,
3806 plus_constant (from, count * 4 * sign));
3807 i = 1;
3808 count++;
3811 for (j = 0; i < count; i++, j++)
3813 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
3814 RTX_UNCHANGING_P (mem) = unchanging_p;
3815 MEM_IN_STRUCT_P (mem) = in_struct_p;
3816 MEM_SCALAR_P (mem) = scalar_p;
3817 XVECEXP (result, 0, i)
3818 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
3821 return result;
3825 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
3826 in_struct_p, scalar_p)
3827 int base_regno;
3828 int count;
3829 rtx to;
3830 int up;
3831 int write_back;
3832 int unchanging_p;
3833 int in_struct_p;
3834 int scalar_p;
3836 int i = 0, j;
3837 rtx result;
3838 int sign = up ? 1 : -1;
3839 rtx mem;
3841 result = gen_rtx_PARALLEL (VOIDmode,
3842 rtvec_alloc (count + (write_back ? 1 : 0)));
3843 if (write_back)
3845 XVECEXP (result, 0, 0)
3846 = gen_rtx_SET (GET_MODE (to), to,
3847 plus_constant (to, count * 4 * sign));
3848 i = 1;
3849 count++;
3852 for (j = 0; i < count; i++, j++)
3854 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
3855 RTX_UNCHANGING_P (mem) = unchanging_p;
3856 MEM_IN_STRUCT_P (mem) = in_struct_p;
3857 MEM_SCALAR_P (mem) = scalar_p;
3859 XVECEXP (result, 0, i)
3860 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
3863 return result;
3867 arm_gen_movstrqi (operands)
3868 rtx * operands;
3870 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
3871 int i;
3872 rtx src, dst;
3873 rtx st_src, st_dst, fin_src, fin_dst;
3874 rtx part_bytes_reg = NULL;
3875 rtx mem;
3876 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
3877 int dst_scalar_p, src_scalar_p;
3879 if (GET_CODE (operands[2]) != CONST_INT
3880 || GET_CODE (operands[3]) != CONST_INT
3881 || INTVAL (operands[2]) > 64
3882 || INTVAL (operands[3]) & 3)
3883 return 0;
3885 st_dst = XEXP (operands[0], 0);
3886 st_src = XEXP (operands[1], 0);
3888 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
3889 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
3890 dst_scalar_p = MEM_SCALAR_P (operands[0]);
3891 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
3892 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
3893 src_scalar_p = MEM_SCALAR_P (operands[1]);
3895 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
3896 fin_src = src = copy_to_mode_reg (SImode, st_src);
3898 in_words_to_go = NUM_INTS (INTVAL (operands[2]));
3899 out_words_to_go = INTVAL (operands[2]) / 4;
3900 last_bytes = INTVAL (operands[2]) & 3;
3902 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
3903 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
3905 for (i = 0; in_words_to_go >= 2; i+=4)
3907 if (in_words_to_go > 4)
3908 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
3909 src_unchanging_p,
3910 src_in_struct_p,
3911 src_scalar_p));
3912 else
3913 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
3914 FALSE, src_unchanging_p,
3915 src_in_struct_p, src_scalar_p));
3917 if (out_words_to_go)
3919 if (out_words_to_go > 4)
3920 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
3921 dst_unchanging_p,
3922 dst_in_struct_p,
3923 dst_scalar_p));
3924 else if (out_words_to_go != 1)
3925 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
3926 dst, TRUE,
3927 (last_bytes == 0
3928 ? FALSE : TRUE),
3929 dst_unchanging_p,
3930 dst_in_struct_p,
3931 dst_scalar_p));
3932 else
3934 mem = gen_rtx_MEM (SImode, dst);
3935 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3936 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3937 MEM_SCALAR_P (mem) = dst_scalar_p;
3938 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
3939 if (last_bytes != 0)
3940 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
3944 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
3945 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
3948 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
3949 if (out_words_to_go)
3951 rtx sreg;
3953 mem = gen_rtx_MEM (SImode, src);
3954 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3955 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3956 MEM_SCALAR_P (mem) = src_scalar_p;
3957 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
3958 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
3960 mem = gen_rtx_MEM (SImode, dst);
3961 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3962 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3963 MEM_SCALAR_P (mem) = dst_scalar_p;
3964 emit_move_insn (mem, sreg);
3965 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
3966 in_words_to_go--;
3968 if (in_words_to_go) /* Sanity check */
3969 abort ();
3972 if (in_words_to_go)
3974 if (in_words_to_go < 0)
3975 abort ();
3977 mem = gen_rtx_MEM (SImode, src);
3978 RTX_UNCHANGING_P (mem) = src_unchanging_p;
3979 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
3980 MEM_SCALAR_P (mem) = src_scalar_p;
3981 part_bytes_reg = copy_to_mode_reg (SImode, mem);
3984 if (last_bytes && part_bytes_reg == NULL)
3985 abort ();
3987 if (BYTES_BIG_ENDIAN && last_bytes)
3989 rtx tmp = gen_reg_rtx (SImode);
3991 /* The bytes we want are in the top end of the word. */
3992 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
3993 GEN_INT (8 * (4 - last_bytes))));
3994 part_bytes_reg = tmp;
3996 while (last_bytes)
3998 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
3999 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4000 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4001 MEM_SCALAR_P (mem) = dst_scalar_p;
4002 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4004 if (--last_bytes)
4006 tmp = gen_reg_rtx (SImode);
4007 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
4008 part_bytes_reg = tmp;
4013 else
4015 if (last_bytes > 1)
4017 mem = gen_rtx_MEM (HImode, dst);
4018 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4019 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4020 MEM_SCALAR_P (mem) = dst_scalar_p;
4021 emit_move_insn (mem, gen_rtx_SUBREG (HImode, part_bytes_reg, 0));
4022 last_bytes -= 2;
4023 if (last_bytes)
4025 rtx tmp = gen_reg_rtx (SImode);
4027 emit_insn (gen_addsi3 (dst, dst, GEN_INT (2)));
4028 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (16)));
4029 part_bytes_reg = tmp;
4033 if (last_bytes)
4035 mem = gen_rtx_MEM (QImode, dst);
4036 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
4037 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
4038 MEM_SCALAR_P (mem) = dst_scalar_p;
4039 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
4043 return 1;
4046 /* Generate a memory reference for a half word, such that it will be loaded
4047 into the top 16 bits of the word. We can assume that the address is
4048 known to be alignable and of the form reg, or plus (reg, const). */
4050 arm_gen_rotated_half_load (memref)
4051 rtx memref;
4053 HOST_WIDE_INT offset = 0;
4054 rtx base = XEXP (memref, 0);
4056 if (GET_CODE (base) == PLUS)
4058 offset = INTVAL (XEXP (base, 1));
4059 base = XEXP (base, 0);
4062 /* If we aren't allowed to generate unaligned addresses, then fail. */
4063 if (TARGET_MMU_TRAPS
4064 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
4065 return NULL;
4067 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
4069 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
4070 return base;
4072 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
4075 static enum machine_mode
4076 select_dominance_cc_mode (x, y, cond_or)
4077 rtx x;
4078 rtx y;
4079 HOST_WIDE_INT cond_or;
4081 enum rtx_code cond1, cond2;
4082 int swapped = 0;
4084 /* Currently we will probably get the wrong result if the individual
4085 comparisons are not simple. This also ensures that it is safe to
4086 reverse a comparison if necessary. */
4087 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
4088 != CCmode)
4089 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
4090 != CCmode))
4091 return CCmode;
4093 if (cond_or)
4094 cond1 = reverse_condition (cond1);
4096 /* If the comparisons are not equal, and one doesn't dominate the other,
4097 then we can't do this. */
4098 if (cond1 != cond2
4099 && ! comparison_dominates_p (cond1, cond2)
4100 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
4101 return CCmode;
4103 if (swapped)
4105 enum rtx_code temp = cond1;
4106 cond1 = cond2;
4107 cond2 = temp;
4110 switch (cond1)
4112 case EQ:
4113 if (cond2 == EQ || ! cond_or)
4114 return CC_DEQmode;
4116 switch (cond2)
4118 case LE: return CC_DLEmode;
4119 case LEU: return CC_DLEUmode;
4120 case GE: return CC_DGEmode;
4121 case GEU: return CC_DGEUmode;
4122 default: break;
4125 break;
4127 case LT:
4128 if (cond2 == LT || ! cond_or)
4129 return CC_DLTmode;
4130 if (cond2 == LE)
4131 return CC_DLEmode;
4132 if (cond2 == NE)
4133 return CC_DNEmode;
4134 break;
4136 case GT:
4137 if (cond2 == GT || ! cond_or)
4138 return CC_DGTmode;
4139 if (cond2 == GE)
4140 return CC_DGEmode;
4141 if (cond2 == NE)
4142 return CC_DNEmode;
4143 break;
4145 case LTU:
4146 if (cond2 == LTU || ! cond_or)
4147 return CC_DLTUmode;
4148 if (cond2 == LEU)
4149 return CC_DLEUmode;
4150 if (cond2 == NE)
4151 return CC_DNEmode;
4152 break;
4154 case GTU:
4155 if (cond2 == GTU || ! cond_or)
4156 return CC_DGTUmode;
4157 if (cond2 == GEU)
4158 return CC_DGEUmode;
4159 if (cond2 == NE)
4160 return CC_DNEmode;
4161 break;
4163 /* The remaining cases only occur when both comparisons are the
4164 same. */
4165 case NE:
4166 return CC_DNEmode;
4168 case LE:
4169 return CC_DLEmode;
4171 case GE:
4172 return CC_DGEmode;
4174 case LEU:
4175 return CC_DLEUmode;
4177 case GEU:
4178 return CC_DGEUmode;
4180 default:
4181 break;
4184 abort ();
4187 enum machine_mode
4188 arm_select_cc_mode (op, x, y)
4189 enum rtx_code op;
4190 rtx x;
4191 rtx y;
4193 /* All floating point compares return CCFP if it is an equality
4194 comparison, and CCFPE otherwise. */
4195 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
4196 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
4198 /* A compare with a shifted operand. Because of canonicalization, the
4199 comparison will have to be swapped when we emit the assembler. */
4200 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
4201 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4202 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
4203 || GET_CODE (x) == ROTATERT))
4204 return CC_SWPmode;
4206 /* This is a special case that is used by combine to allow a
4207 comparison of a shifted byte load to be split into a zero-extend
4208 followed by a comparison of the shifted integer (only valid for
4209 equalities and unsigned inequalities). */
4210 if (GET_MODE (x) == SImode
4211 && GET_CODE (x) == ASHIFT
4212 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
4213 && GET_CODE (XEXP (x, 0)) == SUBREG
4214 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
4215 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
4216 && (op == EQ || op == NE
4217 || op == GEU || op == GTU || op == LTU || op == LEU)
4218 && GET_CODE (y) == CONST_INT)
4219 return CC_Zmode;
4221 /* An operation that sets the condition codes as a side-effect, the
4222 V flag is not set correctly, so we can only use comparisons where
4223 this doesn't matter. (For LT and GE we can use "mi" and "pl"
4224 instead. */
4225 if (GET_MODE (x) == SImode
4226 && y == const0_rtx
4227 && (op == EQ || op == NE || op == LT || op == GE)
4228 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
4229 || GET_CODE (x) == AND || GET_CODE (x) == IOR
4230 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
4231 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
4232 || GET_CODE (x) == LSHIFTRT
4233 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
4234 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
4235 return CC_NOOVmode;
4237 /* A construct for a conditional compare, if the false arm contains
4238 0, then both conditions must be true, otherwise either condition
4239 must be true. Not all conditions are possible, so CCmode is
4240 returned if it can't be done. */
4241 if (GET_CODE (x) == IF_THEN_ELSE
4242 && (XEXP (x, 2) == const0_rtx
4243 || XEXP (x, 2) == const1_rtx)
4244 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4245 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
4246 return select_dominance_cc_mode (XEXP (x, 0), XEXP (x, 1),
4247 INTVAL (XEXP (x, 2)));
4249 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
4250 return CC_Zmode;
4252 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
4253 && GET_CODE (x) == PLUS
4254 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
4255 return CC_Cmode;
4257 return CCmode;
4260 /* X and Y are two things to compare using CODE. Emit the compare insn and
4261 return the rtx for register 0 in the proper mode. FP means this is a
4262 floating point compare: I don't think that it is needed on the arm. */
4265 arm_gen_compare_reg (code, x, y)
4266 enum rtx_code code;
4267 rtx x, y;
4269 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
4270 rtx cc_reg = gen_rtx_REG (mode, CC_REGNUM);
4272 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4273 gen_rtx_COMPARE (mode, x, y)));
4275 return cc_reg;
4278 void
4279 arm_reload_in_hi (operands)
4280 rtx * operands;
4282 rtx ref = operands[1];
4283 rtx base, scratch;
4284 HOST_WIDE_INT offset = 0;
4286 if (GET_CODE (ref) == SUBREG)
4288 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4289 if (BYTES_BIG_ENDIAN)
4290 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4291 - MIN (UNITS_PER_WORD,
4292 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4293 ref = SUBREG_REG (ref);
4296 if (GET_CODE (ref) == REG)
4298 /* We have a pseudo which has been spilt onto the stack; there
4299 are two cases here: the first where there is a simple
4300 stack-slot replacement and a second where the stack-slot is
4301 out of range, or is used as a subreg. */
4302 if (reg_equiv_mem[REGNO (ref)])
4304 ref = reg_equiv_mem[REGNO (ref)];
4305 base = find_replacement (&XEXP (ref, 0));
4307 else
4308 /* The slot is out of range, or was dressed up in a SUBREG. */
4309 base = reg_equiv_address[REGNO (ref)];
4311 else
4312 base = find_replacement (&XEXP (ref, 0));
4314 /* Handle the case where the address is too complex to be offset by 1. */
4315 if (GET_CODE (base) == MINUS
4316 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4318 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4320 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4321 base = base_plus;
4323 else if (GET_CODE (base) == PLUS)
4325 /* The addend must be CONST_INT, or we would have dealt with it above. */
4326 HOST_WIDE_INT hi, lo;
4328 offset += INTVAL (XEXP (base, 1));
4329 base = XEXP (base, 0);
4331 /* Rework the address into a legal sequence of insns. */
4332 /* Valid range for lo is -4095 -> 4095 */
4333 lo = (offset >= 0
4334 ? (offset & 0xfff)
4335 : -((-offset) & 0xfff));
4337 /* Corner case, if lo is the max offset then we would be out of range
4338 once we have added the additional 1 below, so bump the msb into the
4339 pre-loading insn(s). */
4340 if (lo == 4095)
4341 lo &= 0x7ff;
4343 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4344 ^ HOST_INT (0x80000000))
4345 - HOST_INT (0x80000000));
4347 if (hi + lo != offset)
4348 abort ();
4350 if (hi != 0)
4352 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4354 /* Get the base address; addsi3 knows how to handle constants
4355 that require more than one insn. */
4356 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4357 base = base_plus;
4358 offset = lo;
4362 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4363 emit_insn (gen_zero_extendqisi2 (scratch,
4364 gen_rtx_MEM (QImode,
4365 plus_constant (base,
4366 offset))));
4367 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
4368 gen_rtx_MEM (QImode,
4369 plus_constant (base,
4370 offset + 1))));
4371 if (! BYTES_BIG_ENDIAN)
4372 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4373 gen_rtx_IOR (SImode,
4374 gen_rtx_ASHIFT
4375 (SImode,
4376 gen_rtx_SUBREG (SImode, operands[0], 0),
4377 GEN_INT (8)),
4378 scratch)));
4379 else
4380 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
4381 gen_rtx_IOR (SImode,
4382 gen_rtx_ASHIFT (SImode, scratch,
4383 GEN_INT (8)),
4384 gen_rtx_SUBREG (SImode, operands[0],
4385 0))));
4388 /* Handle storing a half-word to memory during reload by synthesising as two
4389 byte stores. Take care not to clobber the input values until after we
4390 have moved them somewhere safe. This code assumes that if the DImode
4391 scratch in operands[2] overlaps either the input value or output address
4392 in some way, then that value must die in this insn (we absolutely need
4393 two scratch registers for some corner cases). */
4394 void
4395 arm_reload_out_hi (operands)
4396 rtx * operands;
4398 rtx ref = operands[0];
4399 rtx outval = operands[1];
4400 rtx base, scratch;
4401 HOST_WIDE_INT offset = 0;
4403 if (GET_CODE (ref) == SUBREG)
4405 offset = SUBREG_WORD (ref) * UNITS_PER_WORD;
4406 if (BYTES_BIG_ENDIAN)
4407 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (ref)))
4408 - MIN (UNITS_PER_WORD,
4409 GET_MODE_SIZE (GET_MODE (SUBREG_REG (ref)))));
4410 ref = SUBREG_REG (ref);
4414 if (GET_CODE (ref) == REG)
4416 /* We have a pseudo which has been spilt onto the stack; there
4417 are two cases here: the first where there is a simple
4418 stack-slot replacement and a second where the stack-slot is
4419 out of range, or is used as a subreg. */
4420 if (reg_equiv_mem[REGNO (ref)])
4422 ref = reg_equiv_mem[REGNO (ref)];
4423 base = find_replacement (&XEXP (ref, 0));
4425 else
4426 /* The slot is out of range, or was dressed up in a SUBREG. */
4427 base = reg_equiv_address[REGNO (ref)];
4429 else
4430 base = find_replacement (&XEXP (ref, 0));
4432 scratch = gen_rtx_REG (SImode, REGNO (operands[2]));
4434 /* Handle the case where the address is too complex to be offset by 1. */
4435 if (GET_CODE (base) == MINUS
4436 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
4438 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4440 /* Be careful not to destroy OUTVAL. */
4441 if (reg_overlap_mentioned_p (base_plus, outval))
4443 /* Updating base_plus might destroy outval, see if we can
4444 swap the scratch and base_plus. */
4445 if (! reg_overlap_mentioned_p (scratch, outval))
4447 rtx tmp = scratch;
4448 scratch = base_plus;
4449 base_plus = tmp;
4451 else
4453 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4455 /* Be conservative and copy OUTVAL into the scratch now,
4456 this should only be necessary if outval is a subreg
4457 of something larger than a word. */
4458 /* XXX Might this clobber base? I can't see how it can,
4459 since scratch is known to overlap with OUTVAL, and
4460 must be wider than a word. */
4461 emit_insn (gen_movhi (scratch_hi, outval));
4462 outval = scratch_hi;
4466 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
4467 base = base_plus;
4469 else if (GET_CODE (base) == PLUS)
4471 /* The addend must be CONST_INT, or we would have dealt with it above. */
4472 HOST_WIDE_INT hi, lo;
4474 offset += INTVAL (XEXP (base, 1));
4475 base = XEXP (base, 0);
4477 /* Rework the address into a legal sequence of insns. */
4478 /* Valid range for lo is -4095 -> 4095 */
4479 lo = (offset >= 0
4480 ? (offset & 0xfff)
4481 : -((-offset) & 0xfff));
4483 /* Corner case, if lo is the max offset then we would be out of range
4484 once we have added the additional 1 below, so bump the msb into the
4485 pre-loading insn(s). */
4486 if (lo == 4095)
4487 lo &= 0x7ff;
4489 hi = ((((offset - lo) & HOST_INT (0xffffffff))
4490 ^ HOST_INT (0x80000000))
4491 - HOST_INT (0x80000000));
4493 if (hi + lo != offset)
4494 abort ();
4496 if (hi != 0)
4498 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
4500 /* Be careful not to destroy OUTVAL. */
4501 if (reg_overlap_mentioned_p (base_plus, outval))
4503 /* Updating base_plus might destroy outval, see if we
4504 can swap the scratch and base_plus. */
4505 if (! reg_overlap_mentioned_p (scratch, outval))
4507 rtx tmp = scratch;
4508 scratch = base_plus;
4509 base_plus = tmp;
4511 else
4513 rtx scratch_hi = gen_rtx_REG (HImode, REGNO (operands[2]));
4515 /* Be conservative and copy outval into scratch now,
4516 this should only be necessary if outval is a
4517 subreg of something larger than a word. */
4518 /* XXX Might this clobber base? I can't see how it
4519 can, since scratch is known to overlap with
4520 outval. */
4521 emit_insn (gen_movhi (scratch_hi, outval));
4522 outval = scratch_hi;
4526 /* Get the base address; addsi3 knows how to handle constants
4527 that require more than one insn. */
4528 emit_insn (gen_addsi3 (base_plus, base, GEN_INT (hi)));
4529 base = base_plus;
4530 offset = lo;
4534 if (BYTES_BIG_ENDIAN)
4536 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4537 plus_constant (base, offset + 1)),
4538 gen_rtx_SUBREG (QImode, outval, 0)));
4539 emit_insn (gen_lshrsi3 (scratch,
4540 gen_rtx_SUBREG (SImode, outval, 0),
4541 GEN_INT (8)));
4542 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4543 gen_rtx_SUBREG (QImode, scratch, 0)));
4545 else
4547 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, offset)),
4548 gen_rtx_SUBREG (QImode, outval, 0)));
4549 emit_insn (gen_lshrsi3 (scratch,
4550 gen_rtx_SUBREG (SImode, outval, 0),
4551 GEN_INT (8)));
4552 emit_insn (gen_movqi (gen_rtx_MEM (QImode,
4553 plus_constant (base, offset + 1)),
4554 gen_rtx_SUBREG (QImode, scratch, 0)));
4558 /* Print a symbolic form of X to the debug file, F. */
4559 static void
4560 arm_print_value (f, x)
4561 FILE * f;
4562 rtx x;
4564 switch (GET_CODE (x))
4566 case CONST_INT:
4567 fprintf (f, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
4568 return;
4570 case CONST_DOUBLE:
4571 fprintf (f, "<0x%lx,0x%lx>", (long)XWINT (x, 2), (long)XWINT (x, 3));
4572 return;
4574 case CONST_STRING:
4575 fprintf (f, "\"%s\"", XSTR (x, 0));
4576 return;
4578 case SYMBOL_REF:
4579 fprintf (f, "`%s'", XSTR (x, 0));
4580 return;
4582 case LABEL_REF:
4583 fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
4584 return;
4586 case CONST:
4587 arm_print_value (f, XEXP (x, 0));
4588 return;
4590 case PLUS:
4591 arm_print_value (f, XEXP (x, 0));
4592 fprintf (f, "+");
4593 arm_print_value (f, XEXP (x, 1));
4594 return;
4596 case PC:
4597 fprintf (f, "pc");
4598 return;
4600 default:
4601 fprintf (f, "????");
4602 return;
4606 /* Routines for manipulation of the constant pool. */
4608 /* Arm instructions cannot load a large constant directly into a
4609 register; they have to come from a pc relative load. The constant
4610 must therefore be placed in the addressable range of the pc
4611 relative load. Depending on the precise pc relative load
4612 instruction the range is somewhere between 256 bytes and 4k. This
4613 means that we often have to dump a constant inside a function, and
4614 generate code to branch around it.
4616 It is important to minimize this, since the branches will slow
4617 things down and make the code larger.
4619 Normally we can hide the table after an existing unconditional
4620 branch so that there is no interruption of the flow, but in the
4621 worst case the code looks like this:
4623 ldr rn, L1
4625 b L2
4626 align
4627 L1: .long value
4631 ldr rn, L3
4633 b L4
4634 align
4635 L3: .long value
4639 We fix this by performing a scan after scheduling, which notices
4640 which instructions need to have their operands fetched from the
4641 constant table and builds the table.
4643 The algorithm starts by building a table of all the constants that
4644 need fixing up and all the natural barriers in the function (places
4645 where a constant table can be dropped without breaking the flow).
4646 For each fixup we note how far the pc-relative replacement will be
4647 able to reach and the offset of the instruction into the function.
4649 Having built the table we then group the fixes together to form
4650 tables that are as large as possible (subject to addressing
4651 constraints) and emit each table of constants after the last
4652 barrier that is within range of all the instructions in the group.
4653 If a group does not contain a barrier, then we forcibly create one
4654 by inserting a jump instruction into the flow. Once the table has
4655 been inserted, the insns are then modified to reference the
4656 relevant entry in the pool.
4658 Possible enhancements to the algorithm (not implemented) are:
4660 1) For some processors and object formats, there may be benefit in
4661 aligning the pools to the start of cache lines; this alignment
4662 would need to be taken into account when calculating addressability
4663 of a pool. */
4665 /* These typedefs are located at the start of this file, so that
4666 they can be used in the prototypes there. This comment is to
4667 remind readers of that fact so that the following structures
4668 can be understood more easily.
4670 typedef struct minipool_node Mnode;
4671 typedef struct minipool_fixup Mfix; */
4673 struct minipool_node
4675 /* Doubly linked chain of entries. */
4676 Mnode * next;
4677 Mnode * prev;
4678 /* The maximum offset into the code that this entry can be placed. While
4679 pushing fixes for forward references, all entries are sorted in order
4680 of increasing max_address. */
4681 HOST_WIDE_INT max_address;
4682 /* Similarly for a entry inserted for a backwards ref. */
4683 HOST_WIDE_INT min_address;
4684 /* The number of fixes referencing this entry. This can become zero
4685 if we "unpush" an entry. In this case we ignore the entry when we
4686 come to emit the code. */
4687 int refcount;
4688 /* The offset from the start of the minipool. */
4689 HOST_WIDE_INT offset;
4690 /* The value in table. */
4691 rtx value;
4692 /* The mode of value. */
4693 enum machine_mode mode;
4694 int fix_size;
4697 struct minipool_fixup
4699 Mfix * next;
4700 rtx insn;
4701 HOST_WIDE_INT address;
4702 rtx * loc;
4703 enum machine_mode mode;
4704 int fix_size;
4705 rtx value;
4706 Mnode * minipool;
4707 HOST_WIDE_INT forwards;
4708 HOST_WIDE_INT backwards;
4711 /* Fixes less than a word need padding out to a word boundary. */
4712 #define MINIPOOL_FIX_SIZE(mode) \
4713 (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4)
4715 static Mnode * minipool_vector_head;
4716 static Mnode * minipool_vector_tail;
4717 static rtx minipool_vector_label;
4719 /* The linked list of all minipool fixes required for this function. */
4720 Mfix * minipool_fix_head;
4721 Mfix * minipool_fix_tail;
4722 /* The fix entry for the current minipool, once it has been placed. */
4723 Mfix * minipool_barrier;
4725 /* Determines if INSN is the start of a jump table. Returns the end
4726 of the TABLE or NULL_RTX. */
4727 static rtx
4728 is_jump_table (insn)
4729 rtx insn;
4731 rtx table;
4733 if (GET_CODE (insn) == JUMP_INSN
4734 && JUMP_LABEL (insn) != NULL
4735 && ((table = next_real_insn (JUMP_LABEL (insn)))
4736 == next_real_insn (insn))
4737 && table != NULL
4738 && GET_CODE (table) == JUMP_INSN
4739 && (GET_CODE (PATTERN (table)) == ADDR_VEC
4740 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
4741 return table;
4743 return NULL_RTX;
4746 static HOST_WIDE_INT
4747 get_jump_table_size (insn)
4748 rtx insn;
4750 rtx body = PATTERN (insn);
4751 int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
4753 return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
4756 /* Move a minipool fix MP from its current location to before MAX_MP.
4757 If MAX_MP is NULL, then MP doesn't need moving, but the addressing
4758 contrains may need updating. */
4759 static Mnode *
4760 move_minipool_fix_forward_ref (mp, max_mp, max_address)
4761 Mnode * mp;
4762 Mnode * max_mp;
4763 HOST_WIDE_INT max_address;
4765 /* This should never be true and the code below assumes these are
4766 different. */
4767 if (mp == max_mp)
4768 abort ();
4770 if (max_mp == NULL)
4772 if (max_address < mp->max_address)
4773 mp->max_address = max_address;
4775 else
4777 if (max_address > max_mp->max_address - mp->fix_size)
4778 mp->max_address = max_mp->max_address - mp->fix_size;
4779 else
4780 mp->max_address = max_address;
4782 /* Unlink MP from its current position. Since max_mp is non-null,
4783 mp->prev must be non-null. */
4784 mp->prev->next = mp->next;
4785 if (mp->next != NULL)
4786 mp->next->prev = mp->prev;
4787 else
4788 minipool_vector_tail = mp->prev;
4790 /* Re-insert it before MAX_MP. */
4791 mp->next = max_mp;
4792 mp->prev = max_mp->prev;
4793 max_mp->prev = mp;
4795 if (mp->prev != NULL)
4796 mp->prev->next = mp;
4797 else
4798 minipool_vector_head = mp;
4801 /* Save the new entry. */
4802 max_mp = mp;
4804 /* Scan over the preceeding entries and adjust their addresses as
4805 required. */
4806 while (mp->prev != NULL
4807 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4809 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4810 mp = mp->prev;
4813 return max_mp;
4816 /* Add a constant to the minipool for a forward reference. Returns the
4817 node added or NULL if the constant will not fit in this pool. */
4818 static Mnode *
4819 add_minipool_forward_ref (fix)
4820 Mfix * fix;
4822 /* If set, max_mp is the first pool_entry that has a lower
4823 constraint than the one we are trying to add. */
4824 Mnode * max_mp = NULL;
4825 HOST_WIDE_INT max_address = fix->address + fix->forwards;
4826 Mnode * mp;
4828 /* If this fix's address is greater than the address of the first
4829 entry, then we can't put the fix in this pool. We subtract the
4830 size of the current fix to ensure that if the table is fully
4831 packed we still have enough room to insert this value by suffling
4832 the other fixes forwards. */
4833 if (minipool_vector_head &&
4834 fix->address >= minipool_vector_head->max_address - fix->fix_size)
4835 return NULL;
4837 /* Scan the pool to see if a constant with the same value has
4838 already been added. While we are doing this, also note the
4839 location where we must insert the constant if it doesn't already
4840 exist. */
4841 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4843 if (GET_CODE (fix->value) == GET_CODE (mp->value)
4844 && fix->mode == mp->mode
4845 && (GET_CODE (fix->value) != CODE_LABEL
4846 || (CODE_LABEL_NUMBER (fix->value)
4847 == CODE_LABEL_NUMBER (mp->value)))
4848 && rtx_equal_p (fix->value, mp->value))
4850 /* More than one fix references this entry. */
4851 mp->refcount++;
4852 return move_minipool_fix_forward_ref (mp, max_mp, max_address);
4855 /* Note the insertion point if necessary. */
4856 if (max_mp == NULL
4857 && mp->max_address > max_address)
4858 max_mp = mp;
4861 /* The value is not currently in the minipool, so we need to create
4862 a new entry for it. If MAX_MP is NULL, the entry will be put on
4863 the end of the list since the placement is less constrained than
4864 any existing entry. Otherwise, we insert the new fix before
4865 MAX_MP and, if neceesary, adjust the constraints on the other
4866 entries. */
4867 mp = xmalloc (sizeof (* mp));
4868 mp->fix_size = fix->fix_size;
4869 mp->mode = fix->mode;
4870 mp->value = fix->value;
4871 mp->refcount = 1;
4872 /* Not yet required for a backwards ref. */
4873 mp->min_address = -65536;
4875 if (max_mp == NULL)
4877 mp->max_address = max_address;
4878 mp->next = NULL;
4879 mp->prev = minipool_vector_tail;
4881 if (mp->prev == NULL)
4883 minipool_vector_head = mp;
4884 minipool_vector_label = gen_label_rtx ();
4886 else
4887 mp->prev->next = mp;
4889 minipool_vector_tail = mp;
4891 else
4893 if (max_address > max_mp->max_address - mp->fix_size)
4894 mp->max_address = max_mp->max_address - mp->fix_size;
4895 else
4896 mp->max_address = max_address;
4898 mp->next = max_mp;
4899 mp->prev = max_mp->prev;
4900 max_mp->prev = mp;
4901 if (mp->prev != NULL)
4902 mp->prev->next = mp;
4903 else
4904 minipool_vector_head = mp;
4907 /* Save the new entry. */
4908 max_mp = mp;
4910 /* Scan over the preceeding entries and adjust their addresses as
4911 required. */
4912 while (mp->prev != NULL
4913 && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
4915 mp->prev->max_address = mp->max_address - mp->prev->fix_size;
4916 mp = mp->prev;
4919 return max_mp;
4922 static Mnode *
4923 move_minipool_fix_backward_ref (mp, min_mp, min_address)
4924 Mnode * mp;
4925 Mnode * min_mp;
4926 HOST_WIDE_INT min_address;
4928 HOST_WIDE_INT offset;
4930 /* This should never be true, and the code below assumes these are
4931 different. */
4932 if (mp == min_mp)
4933 abort ();
4935 if (min_mp == NULL)
4937 if (min_address > mp->min_address)
4938 mp->min_address = min_address;
4940 else
4942 /* We will adjust this below if it is too loose. */
4943 mp->min_address = min_address;
4945 /* Unlink MP from its current position. Since min_mp is non-null,
4946 mp->next must be non-null. */
4947 mp->next->prev = mp->prev;
4948 if (mp->prev != NULL)
4949 mp->prev->next = mp->next;
4950 else
4951 minipool_vector_head = mp->next;
4953 /* Reinsert it after MIN_MP. */
4954 mp->prev = min_mp;
4955 mp->next = min_mp->next;
4956 min_mp->next = mp;
4957 if (mp->next != NULL)
4958 mp->next->prev = mp;
4959 else
4960 minipool_vector_tail = mp;
4963 min_mp = mp;
4965 offset = 0;
4966 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
4968 mp->offset = offset;
4969 if (mp->refcount > 0)
4970 offset += mp->fix_size;
4972 if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
4973 mp->next->min_address = mp->min_address + mp->fix_size;
4976 return min_mp;
4979 /* Add a constant to the minipool for a backward reference. Returns the
4980 node added or NULL if the constant will not fit in this pool.
4982 Note that the code for insertion for a backwards reference can be
4983 somewhat confusing because the calculated offsets for each fix do
4984 not take into account the size of the pool (which is still under
4985 construction. */
4986 static Mnode *
4987 add_minipool_backward_ref (fix)
4988 Mfix * fix;
4990 /* If set, min_mp is the last pool_entry that has a lower constraint
4991 than the one we are trying to add. */
4992 Mnode * min_mp = NULL;
4993 /* This can be negative, since it is only a constraint. */
4994 HOST_WIDE_INT min_address = fix->address - fix->backwards;
4995 Mnode * mp;
4997 /* If we can't reach the current pool from this insn, or if we can't
4998 insert this entry at the end of the pool without pushing other
4999 fixes out of range, then we don't try. This ensures that we
5000 can't fail later on. */
5001 if (min_address >= minipool_barrier->address
5002 || (minipool_vector_tail->min_address + fix->fix_size
5003 >= minipool_barrier->address))
5004 return NULL;
5006 /* Scan the pool to see if a constant with the same value has
5007 already been added. While we are doing this, also note the
5008 location where we must insert the constant if it doesn't already
5009 exist. */
5010 for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
5012 if (GET_CODE (fix->value) == GET_CODE (mp->value)
5013 && fix->mode == mp->mode
5014 && (GET_CODE (fix->value) != CODE_LABEL
5015 || (CODE_LABEL_NUMBER (fix->value)
5016 == CODE_LABEL_NUMBER (mp->value)))
5017 && rtx_equal_p (fix->value, mp->value)
5018 /* Check that there is enough slack to move this entry to the
5019 end of the table (this is conservative). */
5020 && (mp->max_address
5021 > (minipool_barrier->address
5022 + minipool_vector_tail->offset
5023 + minipool_vector_tail->fix_size)))
5025 mp->refcount++;
5026 return move_minipool_fix_backward_ref (mp, min_mp, min_address);
5029 if (min_mp != NULL)
5030 mp->min_address += fix->fix_size;
5031 else
5033 /* Note the insertion point if necessary. */
5034 if (mp->min_address < min_address)
5035 min_mp = mp;
5036 else if (mp->max_address
5037 < minipool_barrier->address + mp->offset + fix->fix_size)
5039 /* Inserting before this entry would push the fix beyond
5040 its maximum address (which can happen if we have
5041 re-located a forwards fix); force the new fix to come
5042 after it. */
5043 min_mp = mp;
5044 min_address = mp->min_address + fix->fix_size;
5049 /* We need to create a new entry. */
5050 mp = xmalloc (sizeof (* mp));
5051 mp->fix_size = fix->fix_size;
5052 mp->mode = fix->mode;
5053 mp->value = fix->value;
5054 mp->refcount = 1;
5055 mp->max_address = minipool_barrier->address + 65536;
5057 mp->min_address = min_address;
5059 if (min_mp == NULL)
5061 mp->prev = NULL;
5062 mp->next = minipool_vector_head;
5064 if (mp->next == NULL)
5066 minipool_vector_tail = mp;
5067 minipool_vector_label = gen_label_rtx ();
5069 else
5070 mp->next->prev = mp;
5072 minipool_vector_head = mp;
5074 else
5076 mp->next = min_mp->next;
5077 mp->prev = min_mp;
5078 min_mp->next = mp;
5080 if (mp->next != NULL)
5081 mp->next->prev = mp;
5082 else
5083 minipool_vector_tail = mp;
5086 /* Save the new entry. */
5087 min_mp = mp;
5089 if (mp->prev)
5090 mp = mp->prev;
5091 else
5092 mp->offset = 0;
5094 /* Scan over the following entries and adjust their offsets. */
5095 while (mp->next != NULL)
5097 if (mp->next->min_address < mp->min_address + mp->fix_size)
5098 mp->next->min_address = mp->min_address + mp->fix_size;
5100 if (mp->refcount)
5101 mp->next->offset = mp->offset + mp->fix_size;
5102 else
5103 mp->next->offset = mp->offset;
5105 mp = mp->next;
5108 return min_mp;
5111 static void
5112 assign_minipool_offsets (barrier)
5113 Mfix * barrier;
5115 HOST_WIDE_INT offset = 0;
5116 Mnode * mp;
5118 minipool_barrier = barrier;
5120 for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
5122 mp->offset = offset;
5124 if (mp->refcount > 0)
5125 offset += mp->fix_size;
5129 /* Output the literal table */
5130 static void
5131 dump_minipool (scan)
5132 rtx scan;
5134 Mnode * mp;
5135 Mnode * nmp;
5137 if (rtl_dump_file)
5138 fprintf (rtl_dump_file,
5139 ";; Emitting minipool after insn %u; address %ld\n",
5140 INSN_UID (scan), (unsigned long) minipool_barrier->address);
5142 scan = emit_label_after (gen_label_rtx (), scan);
5143 scan = emit_insn_after (gen_align_4 (), scan);
5144 scan = emit_label_after (minipool_vector_label, scan);
5146 for (mp = minipool_vector_head; mp != NULL; mp = nmp)
5148 if (mp->refcount > 0)
5150 if (rtl_dump_file)
5152 fprintf (rtl_dump_file,
5153 ";; Offset %u, min %ld, max %ld ",
5154 (unsigned) mp->offset, (unsigned long) mp->min_address,
5155 (unsigned long) mp->max_address);
5156 arm_print_value (rtl_dump_file, mp->value);
5157 fputc ('\n', rtl_dump_file);
5160 switch (mp->fix_size)
5162 #ifdef HAVE_consttable_1
5163 case 1:
5164 scan = emit_insn_after (gen_consttable_1 (mp->value), scan);
5165 break;
5167 #endif
5168 #ifdef HAVE_consttable_2
5169 case 2:
5170 scan = emit_insn_after (gen_consttable_2 (mp->value), scan);
5171 break;
5173 #endif
5174 #ifdef HAVE_consttable_4
5175 case 4:
5176 scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
5177 break;
5179 #endif
5180 #ifdef HAVE_consttable_8
5181 case 8:
5182 scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
5183 break;
5185 #endif
5186 default:
5187 abort ();
5188 break;
5192 nmp = mp->next;
5193 free (mp);
5196 minipool_vector_head = minipool_vector_tail = NULL;
5197 scan = emit_insn_after (gen_consttable_end (), scan);
5198 scan = emit_barrier_after (scan);
5201 /* Return the cost of forcibly inserting a barrier after INSN. */
5202 static int
5203 arm_barrier_cost (insn)
5204 rtx insn;
5206 /* Basing the location of the pool on the loop depth is preferable,
5207 but at the moment, the basic block information seems to be
5208 corrupt by this stage of the compilation. */
5209 int base_cost = 50;
5210 rtx next = next_nonnote_insn (insn);
5212 if (next != NULL && GET_CODE (next) == CODE_LABEL)
5213 base_cost -= 20;
5215 switch (GET_CODE (insn))
5217 case CODE_LABEL:
5218 /* It will always be better to place the table before the label, rather
5219 than after it. */
5220 return 50;
5222 case INSN:
5223 case CALL_INSN:
5224 return base_cost;
5226 case JUMP_INSN:
5227 return base_cost - 10;
5229 default:
5230 return base_cost + 10;
5234 /* Find the best place in the insn stream in the range
5235 (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
5236 Create the barrier by inserting a jump and add a new fix entry for
5237 it. */
5238 static Mfix *
5239 create_fix_barrier (fix, max_address)
5240 Mfix * fix;
5241 HOST_WIDE_INT max_address;
5243 HOST_WIDE_INT count = 0;
5244 rtx barrier;
5245 rtx from = fix->insn;
5246 rtx selected = from;
5247 int selected_cost;
5248 HOST_WIDE_INT selected_address;
5249 Mfix * new_fix;
5250 HOST_WIDE_INT max_count = max_address - fix->address;
5251 rtx label = gen_label_rtx ();
5253 selected_cost = arm_barrier_cost (from);
5254 selected_address = fix->address;
5256 while (from && count < max_count)
5258 rtx tmp;
5259 int new_cost;
5261 /* This code shouldn't have been called if there was a natural barrier
5262 within range. */
5263 if (GET_CODE (from) == BARRIER)
5264 abort ();
5266 /* Count the length of this insn. */
5267 count += get_attr_length (from);
5269 /* If there is a jump table, add its length. */
5270 tmp = is_jump_table (from);
5271 if (tmp != NULL)
5273 count += get_jump_table_size (tmp);
5275 /* Jump tables aren't in a basic block, so base the cost on
5276 the dispatch insn. If we select this location, we will
5277 still put the pool after the table. */
5278 new_cost = arm_barrier_cost (from);
5280 if (count < max_count && new_cost <= selected_cost)
5282 selected = tmp;
5283 selected_cost = new_cost;
5284 selected_address = fix->address + count;
5287 /* Continue after the dispatch table. */
5288 from = NEXT_INSN (tmp);
5289 continue;
5292 new_cost = arm_barrier_cost (from);
5294 if (count < max_count && new_cost <= selected_cost)
5296 selected = from;
5297 selected_cost = new_cost;
5298 selected_address = fix->address + count;
5301 from = NEXT_INSN (from);
5304 /* Create a new JUMP_INSN that branches around a barrier. */
5305 from = emit_jump_insn_after (gen_jump (label), selected);
5306 JUMP_LABEL (from) = label;
5307 barrier = emit_barrier_after (from);
5308 emit_label_after (label, barrier);
5310 /* Create a minipool barrier entry for the new barrier. */
5311 new_fix = (Mfix *) oballoc (sizeof (* new_fix));
5312 new_fix->insn = barrier;
5313 new_fix->address = selected_address;
5314 new_fix->next = fix->next;
5315 fix->next = new_fix;
5317 return new_fix;
5320 /* Record that there is a natural barrier in the insn stream at
5321 ADDRESS. */
5322 static void
5323 push_minipool_barrier (insn, address)
5324 rtx insn;
5325 HOST_WIDE_INT address;
5327 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
5329 fix->insn = insn;
5330 fix->address = address;
5332 fix->next = NULL;
5333 if (minipool_fix_head != NULL)
5334 minipool_fix_tail->next = fix;
5335 else
5336 minipool_fix_head = fix;
5338 minipool_fix_tail = fix;
5341 /* Record INSN, which will need fixing up to load a value from the
5342 minipool. ADDRESS is the offset of the insn since the start of the
5343 function; LOC is a pointer to the part of the insn which requires
5344 fixing; VALUE is the constant that must be loaded, which is of type
5345 MODE. */
5346 static void
5347 push_minipool_fix (insn, address, loc, mode, value)
5348 rtx insn;
5349 HOST_WIDE_INT address;
5350 rtx * loc;
5351 enum machine_mode mode;
5352 rtx value;
5354 Mfix * fix = (Mfix *) oballoc (sizeof (* fix));
5356 #ifdef AOF_ASSEMBLER
5357 /* PIC symbol refereneces need to be converted into offsets into the
5358 based area. */
5359 /* XXX This shouldn't be done here. */
5360 if (flag_pic && GET_CODE (value) == SYMBOL_REF)
5361 value = aof_pic_entry (value);
5362 #endif /* AOF_ASSEMBLER */
5364 fix->insn = insn;
5365 fix->address = address;
5366 fix->loc = loc;
5367 fix->mode = mode;
5368 fix->fix_size = MINIPOOL_FIX_SIZE (mode);
5369 fix->value = value;
5370 fix->forwards = get_attr_pool_range (insn);
5371 fix->backwards = get_attr_neg_pool_range (insn);
5372 fix->minipool = NULL;
5374 /* If an insn doesn't have a range defined for it, then it isn't
5375 expecting to be reworked by this code. Better to abort now than
5376 to generate duff assembly code. */
5377 if (fix->forwards == 0 && fix->backwards == 0)
5378 abort ();
5380 if (rtl_dump_file)
5382 fprintf (rtl_dump_file,
5383 ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
5384 GET_MODE_NAME (mode),
5385 INSN_UID (insn), (unsigned long) address,
5386 -1 * (long)fix->backwards, (long)fix->forwards);
5387 arm_print_value (rtl_dump_file, fix->value);
5388 fprintf (rtl_dump_file, "\n");
5391 /* Add it to the chain of fixes. */
5392 fix->next = NULL;
5394 if (minipool_fix_head != NULL)
5395 minipool_fix_tail->next = fix;
5396 else
5397 minipool_fix_head = fix;
5399 minipool_fix_tail = fix;
5402 /* Scan INSN and note any of its operands that need fixing. */
5403 static void
5404 note_invalid_constants (insn, address)
5405 rtx insn;
5406 HOST_WIDE_INT address;
5408 int opno;
5410 extract_insn (insn);
5412 if (! constrain_operands (1))
5413 fatal_insn_not_found (insn);
5415 /* Fill in recog_op_alt with information about the constraints of this
5416 insn. */
5417 preprocess_constraints ();
5419 for (opno = 0; opno < recog_data.n_operands; opno++)
5421 /* Things we need to fix can only occur in inputs. */
5422 if (recog_data.operand_type[opno] != OP_IN)
5423 continue;
5425 /* If this alternative is a memory reference, then any mention
5426 of constants in this alternative is really to fool reload
5427 into allowing us to accept one there. We need to fix them up
5428 now so that we output the right code. */
5429 if (recog_op_alt[opno][which_alternative].memory_ok)
5431 rtx op = recog_data.operand[opno];
5433 if (CONSTANT_P (op))
5434 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5435 recog_data.operand_mode[opno], op);
5436 #if 0
5437 /* RWE: Now we look correctly at the operands for the insn,
5438 this shouldn't be needed any more. */
5439 #ifndef AOF_ASSEMBLER
5440 /* XXX Is this still needed? */
5441 else if (GET_CODE (op) == UNSPEC && XINT (op, 1) == 3)
5442 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5443 recog_data.operand_mode[opno],
5444 XVECEXP (op, 0, 0));
5445 #endif
5446 #endif
5447 else if (GET_CODE (op) == MEM
5448 && GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5449 && CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
5450 push_minipool_fix (insn, address, recog_data.operand_loc[opno],
5451 recog_data.operand_mode[opno],
5452 get_pool_constant (XEXP (op, 0)));
5457 void
5458 arm_reorg (first)
5459 rtx first;
5461 rtx insn;
5462 HOST_WIDE_INT address = 0;
5463 Mfix * fix;
5465 minipool_fix_head = minipool_fix_tail = NULL;
5467 /* The first insn must always be a note, or the code below won't
5468 scan it properly. */
5469 if (GET_CODE (first) != NOTE)
5470 abort ();
5472 /* Scan all the insns and record the operands that will need fixing. */
5473 for (insn = next_nonnote_insn (first); insn; insn = next_nonnote_insn (insn))
5476 if (GET_CODE (insn) == BARRIER)
5477 push_minipool_barrier (insn, address);
5478 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
5479 || GET_CODE (insn) == JUMP_INSN)
5481 rtx table;
5483 note_invalid_constants (insn, address);
5484 address += get_attr_length (insn);
5486 /* If the insn is a vector jump, add the size of the table
5487 and skip the table. */
5488 if ((table = is_jump_table (insn)) != NULL)
5490 address += get_jump_table_size (table);
5491 insn = table;
5496 fix = minipool_fix_head;
5498 /* Now scan the fixups and perform the required changes. */
5499 while (fix)
5501 Mfix * ftmp;
5502 Mfix * fdel;
5503 Mfix * last_added_fix;
5504 Mfix * last_barrier = NULL;
5505 Mfix * this_fix;
5507 /* Skip any further barriers before the next fix. */
5508 while (fix && GET_CODE (fix->insn) == BARRIER)
5509 fix = fix->next;
5511 /* No more fixes. */
5512 if (fix == NULL)
5513 break;
5515 last_added_fix = NULL;
5517 for (ftmp = fix; ftmp; ftmp = ftmp->next)
5519 if (GET_CODE (ftmp->insn) == BARRIER)
5521 if (ftmp->address >= minipool_vector_head->max_address)
5522 break;
5524 last_barrier = ftmp;
5526 else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
5527 break;
5529 last_added_fix = ftmp; /* Keep track of the last fix added. */
5532 /* If we found a barrier, drop back to that; any fixes that we
5533 could have reached but come after the barrier will now go in
5534 the next mini-pool. */
5535 if (last_barrier != NULL)
5537 /* Reduce the refcount for those fixes that won't go into this
5538 pool after all. */
5539 for (fdel = last_barrier->next;
5540 fdel && fdel != ftmp;
5541 fdel = fdel->next)
5543 fdel->minipool->refcount--;
5544 fdel->minipool = NULL;
5547 ftmp = last_barrier;
5549 else
5551 /* ftmp is first fix that we can't fit into this pool and
5552 there no natural barriers that we could use. Insert a
5553 new barrier in the code somewhere between the previous
5554 fix and this one, and arrange to jump around it. */
5555 HOST_WIDE_INT max_address;
5557 /* The last item on the list of fixes must be a barrier, so
5558 we can never run off the end of the list of fixes without
5559 last_barrier being set. */
5560 if (ftmp == NULL)
5561 abort ();
5563 max_address = minipool_vector_head->max_address;
5564 /* Check that there isn't another fix that is in range that
5565 we couldn't fit into this pool because the pool was
5566 already too large: we need to put the pool before such an
5567 instruction. */
5568 if (ftmp->address < max_address)
5569 max_address = ftmp->address;
5571 last_barrier = create_fix_barrier (last_added_fix, max_address);
5574 assign_minipool_offsets (last_barrier);
5576 while (ftmp)
5578 if (GET_CODE (ftmp->insn) != BARRIER
5579 && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
5580 == NULL))
5581 break;
5583 ftmp = ftmp->next;
5586 /* Scan over the fixes we have identified for this pool, fixing them
5587 up and adding the constants to the pool itself. */
5588 for (this_fix = fix; this_fix && ftmp != this_fix;
5589 this_fix = this_fix->next)
5590 if (GET_CODE (this_fix->insn) != BARRIER)
5592 rtx addr
5593 = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
5594 minipool_vector_label),
5595 this_fix->minipool->offset);
5596 *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
5599 dump_minipool (last_barrier->insn);
5600 fix = ftmp;
5603 /* From now on we must synthesize any constants that we can't handle
5604 directly. This can happen if the RTL gets split during final
5605 instruction generation. */
5606 after_arm_reorg = 1;
5609 /* Routines to output assembly language. */
5611 /* If the rtx is the correct value then return the string of the number.
5612 In this way we can ensure that valid double constants are generated even
5613 when cross compiling. */
5614 const char *
5615 fp_immediate_constant (x)
5616 rtx x;
5618 REAL_VALUE_TYPE r;
5619 int i;
5621 if (!fpa_consts_inited)
5622 init_fpa_table ();
5624 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5625 for (i = 0; i < 8; i++)
5626 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
5627 return strings_fpa[i];
5629 abort ();
5632 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
5633 static const char *
5634 fp_const_from_val (r)
5635 REAL_VALUE_TYPE * r;
5637 int i;
5639 if (! fpa_consts_inited)
5640 init_fpa_table ();
5642 for (i = 0; i < 8; i++)
5643 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
5644 return strings_fpa[i];
5646 abort ();
5649 /* Output the operands of a LDM/STM instruction to STREAM.
5650 MASK is the ARM register set mask of which only bits 0-15 are important.
5651 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
5652 must follow the register list. */
5654 static void
5655 print_multi_reg (stream, instr, reg, mask, hat)
5656 FILE * stream;
5657 const char * instr;
5658 int reg;
5659 int mask;
5660 int hat;
5662 int i;
5663 int not_first = FALSE;
5665 fputc ('\t', stream);
5666 asm_fprintf (stream, instr, reg);
5667 fputs (", {", stream);
5669 for (i = 0; i <= LAST_ARM_REGNUM; i++)
5670 if (mask & (1 << i))
5672 if (not_first)
5673 fprintf (stream, ", ");
5675 asm_fprintf (stream, "%r", i);
5676 not_first = TRUE;
5679 fprintf (stream, "}%s\n", hat ? "^" : "");
5682 /* Output a 'call' insn. */
5684 const char *
5685 output_call (operands)
5686 rtx * operands;
5688 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
5690 if (REGNO (operands[0]) == LR_REGNUM)
5692 operands[0] = gen_rtx_REG (SImode, IP_REGNUM);
5693 output_asm_insn ("mov%?\t%0, %|lr", operands);
5696 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5698 if (TARGET_INTERWORK)
5699 output_asm_insn ("bx%?\t%0", operands);
5700 else
5701 output_asm_insn ("mov%?\t%|pc, %0", operands);
5703 return "";
5706 static int
5707 eliminate_lr2ip (x)
5708 rtx * x;
5710 int something_changed = 0;
5711 rtx x0 = * x;
5712 int code = GET_CODE (x0);
5713 register int i, j;
5714 register const char * fmt;
5716 switch (code)
5718 case REG:
5719 if (REGNO (x0) == LR_REGNUM)
5721 *x = gen_rtx_REG (SImode, IP_REGNUM);
5722 return 1;
5724 return 0;
5725 default:
5726 /* Scan through the sub-elements and change any references there. */
5727 fmt = GET_RTX_FORMAT (code);
5729 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5730 if (fmt[i] == 'e')
5731 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
5732 else if (fmt[i] == 'E')
5733 for (j = 0; j < XVECLEN (x0, i); j++)
5734 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
5736 return something_changed;
5740 /* Output a 'call' insn that is a reference in memory. */
5742 const char *
5743 output_call_mem (operands)
5744 rtx * operands;
5746 operands[0] = copy_rtx (operands[0]); /* Be ultra careful. */
5747 /* Handle calls using lr by using ip (which may be clobbered in subr anyway). */
5748 if (eliminate_lr2ip (&operands[0]))
5749 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
5751 if (TARGET_INTERWORK)
5753 output_asm_insn ("ldr%?\t%|ip, %0", operands);
5754 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5755 output_asm_insn ("bx%?\t%|ip", operands);
5757 else
5759 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
5760 output_asm_insn ("ldr%?\t%|pc, %0", operands);
5763 return "";
5767 /* Output a move from arm registers to an fpu registers.
5768 OPERANDS[0] is an fpu register.
5769 OPERANDS[1] is the first registers of an arm register pair. */
5771 const char *
5772 output_mov_long_double_fpu_from_arm (operands)
5773 rtx * operands;
5775 int arm_reg0 = REGNO (operands[1]);
5776 rtx ops[3];
5778 if (arm_reg0 == IP_REGNUM)
5779 abort ();
5781 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5782 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5783 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
5785 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
5786 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
5788 return "";
5791 /* Output a move from an fpu register to arm registers.
5792 OPERANDS[0] is the first registers of an arm register pair.
5793 OPERANDS[1] is an fpu register. */
5795 const char *
5796 output_mov_long_double_arm_from_fpu (operands)
5797 rtx * operands;
5799 int arm_reg0 = REGNO (operands[0]);
5800 rtx ops[3];
5802 if (arm_reg0 == IP_REGNUM)
5803 abort ();
5805 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5806 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5807 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
5809 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
5810 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
5811 return "";
5814 /* Output a move from arm registers to arm registers of a long double
5815 OPERANDS[0] is the destination.
5816 OPERANDS[1] is the source. */
5817 const char *
5818 output_mov_long_double_arm_from_arm (operands)
5819 rtx * operands;
5821 /* We have to be careful here because the two might overlap. */
5822 int dest_start = REGNO (operands[0]);
5823 int src_start = REGNO (operands[1]);
5824 rtx ops[2];
5825 int i;
5827 if (dest_start < src_start)
5829 for (i = 0; i < 3; i++)
5831 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5832 ops[1] = gen_rtx_REG (SImode, src_start + i);
5833 output_asm_insn ("mov%?\t%0, %1", ops);
5836 else
5838 for (i = 2; i >= 0; i--)
5840 ops[0] = gen_rtx_REG (SImode, dest_start + i);
5841 ops[1] = gen_rtx_REG (SImode, src_start + i);
5842 output_asm_insn ("mov%?\t%0, %1", ops);
5846 return "";
5850 /* Output a move from arm registers to an fpu registers.
5851 OPERANDS[0] is an fpu register.
5852 OPERANDS[1] is the first registers of an arm register pair. */
5854 const char *
5855 output_mov_double_fpu_from_arm (operands)
5856 rtx * operands;
5858 int arm_reg0 = REGNO (operands[1]);
5859 rtx ops[2];
5861 if (arm_reg0 == IP_REGNUM)
5862 abort ();
5864 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5865 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5866 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
5867 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
5868 return "";
5871 /* Output a move from an fpu register to arm registers.
5872 OPERANDS[0] is the first registers of an arm register pair.
5873 OPERANDS[1] is an fpu register. */
5875 const char *
5876 output_mov_double_arm_from_fpu (operands)
5877 rtx * operands;
5879 int arm_reg0 = REGNO (operands[0]);
5880 rtx ops[2];
5882 if (arm_reg0 == IP_REGNUM)
5883 abort ();
5885 ops[0] = gen_rtx_REG (SImode, arm_reg0);
5886 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
5887 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
5888 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
5889 return "";
5892 /* Output a move between double words.
5893 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
5894 or MEM<-REG and all MEMs must be offsettable addresses. */
5896 const char *
5897 output_move_double (operands)
5898 rtx * operands;
5900 enum rtx_code code0 = GET_CODE (operands[0]);
5901 enum rtx_code code1 = GET_CODE (operands[1]);
5902 rtx otherops[3];
5904 if (code0 == REG)
5906 int reg0 = REGNO (operands[0]);
5908 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
5910 if (code1 == REG)
5912 int reg1 = REGNO (operands[1]);
5913 if (reg1 == IP_REGNUM)
5914 abort ();
5916 /* Ensure the second source is not overwritten. */
5917 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
5918 output_asm_insn ("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
5919 else
5920 output_asm_insn ("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
5922 else if (code1 == CONST_DOUBLE)
5924 if (GET_MODE (operands[1]) == DFmode)
5926 long l[2];
5927 union real_extract u;
5929 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
5930 sizeof (u));
5931 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
5932 otherops[1] = GEN_INT (l[1]);
5933 operands[1] = GEN_INT (l[0]);
5935 else if (GET_MODE (operands[1]) != VOIDmode)
5936 abort ();
5937 else if (WORDS_BIG_ENDIAN)
5940 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5941 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5943 else
5946 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
5947 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
5950 output_mov_immediate (operands);
5951 output_mov_immediate (otherops);
5953 else if (code1 == CONST_INT)
5955 #if HOST_BITS_PER_WIDE_INT > 32
5956 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
5957 what the upper word is. */
5958 if (WORDS_BIG_ENDIAN)
5960 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5961 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5963 else
5965 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
5966 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
5968 #else
5969 /* Sign extend the intval into the high-order word. */
5970 if (WORDS_BIG_ENDIAN)
5972 otherops[1] = operands[1];
5973 operands[1] = (INTVAL (operands[1]) < 0
5974 ? constm1_rtx : const0_rtx);
5976 else
5977 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
5978 #endif
5979 output_mov_immediate (otherops);
5980 output_mov_immediate (operands);
5982 else if (code1 == MEM)
5984 switch (GET_CODE (XEXP (operands[1], 0)))
5986 case REG:
5987 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
5988 break;
5990 case PRE_INC:
5991 abort (); /* Should never happen now. */
5992 break;
5994 case PRE_DEC:
5995 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
5996 break;
5998 case POST_INC:
5999 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
6000 break;
6002 case POST_DEC:
6003 abort (); /* Should never happen now. */
6004 break;
6006 case LABEL_REF:
6007 case CONST:
6008 output_asm_insn ("adr%?\t%0, %1", operands);
6009 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
6010 break;
6012 default:
6013 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1),
6014 GET_MODE (XEXP (XEXP (operands[1], 0), 1))))
6016 otherops[0] = operands[0];
6017 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
6018 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
6019 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
6021 if (GET_CODE (otherops[2]) == CONST_INT)
6023 switch (INTVAL (otherops[2]))
6025 case -8:
6026 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
6027 return "";
6028 case -4:
6029 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
6030 return "";
6031 case 4:
6032 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
6033 return "";
6035 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
6036 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
6037 else
6038 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6040 else
6041 output_asm_insn ("add%?\t%0, %1, %2", otherops);
6043 else
6044 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
6046 return "ldm%?ia\t%0, %M0";
6048 else
6050 otherops[1] = adj_offsettable_operand (operands[1], 4);
6051 /* Take care of overlapping base/data reg. */
6052 if (reg_mentioned_p (operands[0], operands[1]))
6054 output_asm_insn ("ldr%?\t%0, %1", otherops);
6055 output_asm_insn ("ldr%?\t%0, %1", operands);
6057 else
6059 output_asm_insn ("ldr%?\t%0, %1", operands);
6060 output_asm_insn ("ldr%?\t%0, %1", otherops);
6065 else
6066 abort (); /* Constraints should prevent this. */
6068 else if (code0 == MEM && code1 == REG)
6070 if (REGNO (operands[1]) == IP_REGNUM)
6071 abort ();
6073 switch (GET_CODE (XEXP (operands[0], 0)))
6075 case REG:
6076 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
6077 break;
6079 case PRE_INC:
6080 abort (); /* Should never happen now. */
6081 break;
6083 case PRE_DEC:
6084 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
6085 break;
6087 case POST_INC:
6088 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
6089 break;
6091 case POST_DEC:
6092 abort (); /* Should never happen now. */
6093 break;
6095 case PLUS:
6096 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
6098 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
6100 case -8:
6101 output_asm_insn ("stm%?db\t%m0, %M1", operands);
6102 return "";
6104 case -4:
6105 output_asm_insn ("stm%?da\t%m0, %M1", operands);
6106 return "";
6108 case 4:
6109 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
6110 return "";
6113 /* Fall through */
6115 default:
6116 otherops[0] = adj_offsettable_operand (operands[0], 4);
6117 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
6118 output_asm_insn ("str%?\t%1, %0", operands);
6119 output_asm_insn ("str%?\t%1, %0", otherops);
6122 else
6123 abort (); /* Constraints should prevent this */
6125 return "";
6129 /* Output an arbitrary MOV reg, #n.
6130 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
6132 const char *
6133 output_mov_immediate (operands)
6134 rtx * operands;
6136 HOST_WIDE_INT n = INTVAL (operands[1]);
6137 int n_ones = 0;
6138 int i;
6140 /* Try to use one MOV */
6141 if (const_ok_for_arm (n))
6143 output_asm_insn ("mov%?\t%0, %1", operands);
6144 return "";
6147 /* Try to use one MVN */
6148 if (const_ok_for_arm (~n))
6150 operands[1] = GEN_INT (~n);
6151 output_asm_insn ("mvn%?\t%0, %1", operands);
6152 return "";
6155 /* If all else fails, make it out of ORRs or BICs as appropriate. */
6157 for (i=0; i < 32; i++)
6158 if (n & 1 << i)
6159 n_ones++;
6161 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
6162 output_multi_immediate (operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1, ~n);
6163 else
6164 output_multi_immediate (operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1, n);
6166 return "";
6170 /* Output an ADD r, s, #n where n may be too big for one instruction. If
6171 adding zero to one register, output nothing. */
6173 const char *
6174 output_add_immediate (operands)
6175 rtx * operands;
6177 HOST_WIDE_INT n = INTVAL (operands[2]);
6179 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
6181 if (n < 0)
6182 output_multi_immediate (operands,
6183 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
6184 -n);
6185 else
6186 output_multi_immediate (operands,
6187 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
6191 return "";
6194 /* Output a multiple immediate operation.
6195 OPERANDS is the vector of operands referred to in the output patterns.
6196 INSTR1 is the output pattern to use for the first constant.
6197 INSTR2 is the output pattern to use for subsequent constants.
6198 IMMED_OP is the index of the constant slot in OPERANDS.
6199 N is the constant value. */
6201 static const char *
6202 output_multi_immediate (operands, instr1, instr2, immed_op, n)
6203 rtx * operands;
6204 const char * instr1;
6205 const char * instr2;
6206 int immed_op;
6207 HOST_WIDE_INT n;
6209 #if HOST_BITS_PER_WIDE_INT > 32
6210 n &= HOST_UINT (0xffffffff);
6211 #endif
6213 if (n == 0)
6215 operands[immed_op] = const0_rtx;
6216 output_asm_insn (instr1, operands); /* Quick and easy output. */
6218 else
6220 int i;
6221 const char * instr = instr1;
6223 /* Note that n is never zero here (which would give no output). */
6224 for (i = 0; i < 32; i += 2)
6226 if (n & (3 << i))
6228 operands[immed_op] = GEN_INT (n & (255 << i));
6229 output_asm_insn (instr, operands);
6230 instr = instr2;
6231 i += 6;
6236 return "";
6240 /* Return the appropriate ARM instruction for the operation code.
6241 The returned result should not be overwritten. OP is the rtx of the
6242 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
6243 was shifted. */
6245 const char *
6246 arithmetic_instr (op, shift_first_arg)
6247 rtx op;
6248 int shift_first_arg;
6250 switch (GET_CODE (op))
6252 case PLUS:
6253 return "add";
6255 case MINUS:
6256 return shift_first_arg ? "rsb" : "sub";
6258 case IOR:
6259 return "orr";
6261 case XOR:
6262 return "eor";
6264 case AND:
6265 return "and";
6267 default:
6268 abort ();
6273 /* Ensure valid constant shifts and return the appropriate shift mnemonic
6274 for the operation code. The returned result should not be overwritten.
6275 OP is the rtx code of the shift.
6276 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
6277 shift. */
6279 static const char *
6280 shift_op (op, amountp)
6281 rtx op;
6282 HOST_WIDE_INT *amountp;
6284 const char * mnem;
6285 enum rtx_code code = GET_CODE (op);
6287 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
6288 *amountp = -1;
6289 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
6290 *amountp = INTVAL (XEXP (op, 1));
6291 else
6292 abort ();
6294 switch (code)
6296 case ASHIFT:
6297 mnem = "asl";
6298 break;
6300 case ASHIFTRT:
6301 mnem = "asr";
6302 break;
6304 case LSHIFTRT:
6305 mnem = "lsr";
6306 break;
6308 case ROTATERT:
6309 mnem = "ror";
6310 break;
6312 case MULT:
6313 /* We never have to worry about the amount being other than a
6314 power of 2, since this case can never be reloaded from a reg. */
6315 if (*amountp != -1)
6316 *amountp = int_log2 (*amountp);
6317 else
6318 abort ();
6319 return "asl";
6321 default:
6322 abort ();
6325 if (*amountp != -1)
6327 /* This is not 100% correct, but follows from the desire to merge
6328 multiplication by a power of 2 with the recognizer for a
6329 shift. >=32 is not a valid shift for "asl", so we must try and
6330 output a shift that produces the correct arithmetical result.
6331 Using lsr #32 is identical except for the fact that the carry bit
6332 is not set correctly if we set the flags; but we never use the
6333 carry bit from such an operation, so we can ignore that. */
6334 if (code == ROTATERT)
6335 *amountp &= 31; /* Rotate is just modulo 32 */
6336 else if (*amountp != (*amountp & 31))
6338 if (code == ASHIFT)
6339 mnem = "lsr";
6340 *amountp = 32;
6343 /* Shifts of 0 are no-ops. */
6344 if (*amountp == 0)
6345 return NULL;
6348 return mnem;
6352 /* Obtain the shift from the POWER of two. */
6353 static HOST_WIDE_INT
6354 int_log2 (power)
6355 HOST_WIDE_INT power;
6357 HOST_WIDE_INT shift = 0;
6359 while ((((HOST_INT (1)) << shift) & power) == 0)
6361 if (shift > 31)
6362 abort ();
6363 shift++;
6366 return shift;
6369 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
6370 /bin/as is horribly restrictive. */
6371 #define MAX_ASCII_LEN 51
6373 void
6374 output_ascii_pseudo_op (stream, p, len)
6375 FILE * stream;
6376 const unsigned char * p;
6377 int len;
6379 int i;
6380 int len_so_far = 0;
6382 fputs ("\t.ascii\t\"", stream);
6384 for (i = 0; i < len; i++)
6386 register int c = p[i];
6388 if (len_so_far >= MAX_ASCII_LEN)
6390 fputs ("\"\n\t.ascii\t\"", stream);
6391 len_so_far = 0;
6394 switch (c)
6396 case TARGET_TAB:
6397 fputs ("\\t", stream);
6398 len_so_far += 2;
6399 break;
6401 case TARGET_FF:
6402 fputs ("\\f", stream);
6403 len_so_far += 2;
6404 break;
6406 case TARGET_BS:
6407 fputs ("\\b", stream);
6408 len_so_far += 2;
6409 break;
6411 case TARGET_CR:
6412 fputs ("\\r", stream);
6413 len_so_far += 2;
6414 break;
6416 case TARGET_NEWLINE:
6417 fputs ("\\n", stream);
6418 c = p [i + 1];
6419 if ((c >= ' ' && c <= '~')
6420 || c == TARGET_TAB)
6421 /* This is a good place for a line break. */
6422 len_so_far = MAX_ASCII_LEN;
6423 else
6424 len_so_far += 2;
6425 break;
6427 case '\"':
6428 case '\\':
6429 putc ('\\', stream);
6430 len_so_far ++;
6431 /* drop through. */
6433 default:
6434 if (c >= ' ' && c <= '~')
6436 putc (c, stream);
6437 len_so_far ++;
6439 else
6441 fprintf (stream, "\\%03o", c);
6442 len_so_far += 4;
6444 break;
6448 fputs ("\"\n", stream);
6452 const char *
6453 output_return_instruction (operand, really_return, reverse)
6454 rtx operand;
6455 int really_return;
6456 int reverse;
6458 char instr[100];
6459 int reg, live_regs = 0;
6460 int volatile_func = arm_volatile_func ();
6462 /* If a function is naked, don't use the "return" insn. */
6463 if (arm_naked_function_p (current_function_decl))
6464 return "";
6466 return_used_this_function = 1;
6468 if (TARGET_ABORT_NORETURN && volatile_func)
6470 /* If this function was declared non-returning, and we have found a tail
6471 call, then we have to trust that the called function won't return. */
6472 if (really_return)
6474 rtx ops[2];
6476 /* Otherwise, trap an attempted return by aborting. */
6477 ops[0] = operand;
6478 ops[1] = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)"
6479 : "abort");
6480 assemble_external_libcall (ops[1]);
6481 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
6484 return "";
6487 if (current_function_calls_alloca && ! really_return)
6488 abort ();
6490 for (reg = 0; reg <= 10; reg++)
6491 if (regs_ever_live[reg] && ! call_used_regs[reg])
6492 live_regs++;
6494 if (! TARGET_APCS_FRAME
6495 && ! frame_pointer_needed
6496 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6497 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6498 live_regs++;
6500 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6501 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6502 live_regs++;
6504 if (live_regs || regs_ever_live[LR_REGNUM])
6505 live_regs++;
6507 if (frame_pointer_needed)
6508 live_regs += 4;
6510 /* On some ARM architectures it is faster to use LDR rather than LDM to
6511 load a single register. On other architectures, the cost is the same. */
6512 if (live_regs == 1
6513 && regs_ever_live[LR_REGNUM]
6514 && ! really_return)
6515 output_asm_insn (reverse ? "ldr%?%D0\t%|lr, [%|sp], #4"
6516 : "ldr%?%d0\t%|lr, [%|sp], #4", &operand);
6517 else if (live_regs == 1
6518 && regs_ever_live[LR_REGNUM]
6519 && TARGET_APCS_32)
6520 output_asm_insn (reverse ? "ldr%?%D0\t%|pc, [%|sp], #4"
6521 : "ldr%?%d0\t%|pc, [%|sp], #4", &operand);
6522 else if (live_regs)
6524 if (! regs_ever_live[LR_REGNUM])
6525 live_regs++;
6527 if (frame_pointer_needed)
6528 strcpy (instr,
6529 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
6530 else
6531 strcpy (instr,
6532 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
6534 for (reg = 0; reg <= 10; reg++)
6535 if (regs_ever_live[reg]
6536 && (! call_used_regs[reg]
6537 || (flag_pic && ! TARGET_SINGLE_PIC_BASE
6538 && reg == PIC_OFFSET_TABLE_REGNUM)))
6540 strcat (instr, "%|");
6541 strcat (instr, reg_names[reg]);
6542 if (--live_regs)
6543 strcat (instr, ", ");
6546 if (frame_pointer_needed)
6548 strcat (instr, "%|");
6549 strcat (instr, reg_names[11]);
6550 strcat (instr, ", ");
6551 strcat (instr, "%|");
6552 strcat (instr, reg_names[13]);
6553 strcat (instr, ", ");
6554 strcat (instr, "%|");
6555 strcat (instr, TARGET_INTERWORK || (! really_return)
6556 ? reg_names[LR_REGNUM] : reg_names[PC_REGNUM] );
6558 else
6560 if (! TARGET_APCS_FRAME
6561 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6562 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6564 strcat (instr, "%|");
6565 strcat (instr, reg_names[HARD_FRAME_POINTER_REGNUM]);
6566 strcat (instr, ", ");
6569 strcat (instr, "%|");
6571 if (TARGET_INTERWORK && really_return)
6572 strcat (instr, reg_names[IP_REGNUM]);
6573 else
6574 strcat (instr, really_return ? reg_names[PC_REGNUM] : reg_names[LR_REGNUM]);
6577 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
6578 output_asm_insn (instr, &operand);
6580 if (TARGET_INTERWORK && really_return)
6582 strcpy (instr, "bx%?");
6583 strcat (instr, reverse ? "%D0" : "%d0");
6584 strcat (instr, "\t%|");
6585 strcat (instr, frame_pointer_needed ? "lr" : "ip");
6587 output_asm_insn (instr, & operand);
6590 else if (really_return)
6592 if (TARGET_INTERWORK)
6593 sprintf (instr, "bx%%?%%%s0\t%%|lr", reverse ? "D" : "d");
6594 else
6595 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
6596 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
6598 output_asm_insn (instr, & operand);
6601 return "";
6604 /* Return nonzero if optimizing and the current function is volatile.
6605 Such functions never return, and many memory cycles can be saved
6606 by not storing register values that will never be needed again.
6607 This optimization was added to speed up context switching in a
6608 kernel application. */
6610 arm_volatile_func ()
6612 return (optimize > 0
6613 && current_function_nothrow
6614 && TREE_THIS_VOLATILE (current_function_decl));
6617 /* Write the function name into the code section, directly preceding
6618 the function prologue.
6620 Code will be output similar to this:
6622 .ascii "arm_poke_function_name", 0
6623 .align
6625 .word 0xff000000 + (t1 - t0)
6626 arm_poke_function_name
6627 mov ip, sp
6628 stmfd sp!, {fp, ip, lr, pc}
6629 sub fp, ip, #4
6631 When performing a stack backtrace, code can inspect the value
6632 of 'pc' stored at 'fp' + 0. If the trace function then looks
6633 at location pc - 12 and the top 8 bits are set, then we know
6634 that there is a function name embedded immediately preceding this
6635 location and has length ((pc[-3]) & 0xff000000).
6637 We assume that pc is declared as a pointer to an unsigned long.
6639 It is of no benefit to output the function name if we are assembling
6640 a leaf function. These function types will not contain a stack
6641 backtrace structure, therefore it is not possible to determine the
6642 function name. */
6644 void
6645 arm_poke_function_name (stream, name)
6646 FILE * stream;
6647 char * name;
6649 unsigned long alignlength;
6650 unsigned long length;
6651 rtx x;
6653 length = strlen (name) + 1;
6654 alignlength = ROUND_UP (length);
6656 ASM_OUTPUT_ASCII (stream, name, length);
6657 ASM_OUTPUT_ALIGN (stream, 2);
6658 x = GEN_INT (HOST_UINT(0xff000000) + alignlength);
6659 ASM_OUTPUT_INT (stream, x);
6662 /* The amount of stack adjustment that happens here, in output_return and in
6663 output_epilogue must be exactly the same as was calculated during reload,
6664 or things will point to the wrong place. The only time we can safely
6665 ignore this constraint is when a function has no arguments on the stack,
6666 no stack frame requirement and no live registers execpt for `lr'. If we
6667 can guarantee that by making all function calls into tail calls and that
6668 lr is not clobbered in any other way, then there is no need to push lr
6669 onto the stack. */
6670 void
6671 output_arm_prologue (f, frame_size)
6672 FILE * f;
6673 int frame_size;
6675 int reg, live_regs_mask = 0;
6676 int volatile_func = arm_volatile_func ();
6678 /* Nonzero if we must stuff some register arguments onto the stack as if
6679 they were passed there. */
6680 int store_arg_regs = 0;
6682 if (arm_ccfsm_state || arm_target_insn)
6683 abort (); /* Sanity check. */
6685 if (arm_naked_function_p (current_function_decl))
6686 return;
6688 return_used_this_function = 0;
6690 asm_fprintf (f, "\t%@ args = %d, pretend = %d, frame = %d\n",
6691 current_function_args_size,
6692 current_function_pretend_args_size, frame_size);
6693 asm_fprintf (f, "\t%@ frame_needed = %d, current_function_anonymous_args = %d\n",
6694 frame_pointer_needed,
6695 current_function_anonymous_args);
6697 if (volatile_func)
6698 asm_fprintf (f, "\t%@ Volatile function.\n");
6700 if (current_function_anonymous_args && current_function_pretend_args_size)
6701 store_arg_regs = 1;
6703 for (reg = 0; reg <= 10; reg++)
6704 if (regs_ever_live[reg] && ! call_used_regs[reg])
6705 live_regs_mask |= (1 << reg);
6707 if (! TARGET_APCS_FRAME
6708 && ! frame_pointer_needed
6709 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6710 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6711 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6713 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6714 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6715 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6717 if (frame_pointer_needed)
6718 live_regs_mask |= 0xD800;
6719 else if (regs_ever_live[LR_REGNUM])
6721 live_regs_mask |= 1 << LR_REGNUM;
6724 if (live_regs_mask)
6725 /* If a di mode load/store multiple is used, and the base register
6726 is r3, then r4 can become an ever live register without lr
6727 doing so, in this case we need to push lr as well, or we
6728 will fail to get a proper return. */
6729 live_regs_mask |= 1 << LR_REGNUM;
6731 #ifdef AOF_ASSEMBLER
6732 if (flag_pic)
6733 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, PIC_OFFSET_TABLE_REGNUM);
6734 #endif
6737 const char *
6738 arm_output_epilogue (really_return)
6739 int really_return;
6741 int reg;
6742 int live_regs_mask = 0;
6743 /* If we need this, then it will always be at least this much. */
6744 int floats_offset = 12;
6745 rtx operands[3];
6746 int frame_size = get_frame_size ();
6747 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
6748 FILE * f = asm_out_file;
6749 int volatile_func = arm_volatile_func ();
6750 int return_regnum;
6752 if (use_return_insn (FALSE) && return_used_this_function)
6753 return "";
6755 /* Naked functions don't have epilogues. */
6756 if (arm_naked_function_p (current_function_decl))
6757 return "";
6759 /* If we are throwing an exception, the address we want to jump to is in
6760 R1; otherwise, it's in LR. */
6761 return_regnum = eh_ofs ? 2 : LR_REGNUM;
6763 /* If we are throwing an exception, then we really must be doing a return,
6764 so we can't tail-call. */
6765 if (eh_ofs && ! really_return)
6766 abort();
6768 /* A volatile function should never return. Call abort. */
6769 if (TARGET_ABORT_NORETURN && volatile_func)
6771 rtx op;
6772 op = gen_rtx_SYMBOL_REF (Pmode, NEED_PLT_RELOC ? "abort(PLT)" : "abort");
6773 assemble_external_libcall (op);
6774 output_asm_insn ("bl\t%a0", &op);
6775 return "";
6778 for (reg = 0; reg <= 10; reg++)
6779 if (regs_ever_live[reg] && ! call_used_regs[reg])
6781 live_regs_mask |= (1 << reg);
6782 floats_offset += 4;
6785 /* Handle the frame pointer as a special case. */
6786 if (! TARGET_APCS_FRAME
6787 && ! frame_pointer_needed
6788 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
6789 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
6791 live_regs_mask |= (1 << HARD_FRAME_POINTER_REGNUM);
6792 floats_offset += 4;
6795 /* If we aren't loading the PIC register, don't stack it even though it may
6796 be live. */
6797 if (flag_pic && ! TARGET_SINGLE_PIC_BASE
6798 && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6800 live_regs_mask |= (1 << PIC_OFFSET_TABLE_REGNUM);
6801 floats_offset += 4;
6804 if (frame_pointer_needed)
6806 if (arm_fpu_arch == FP_SOFT2)
6808 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
6809 if (regs_ever_live[reg] && ! call_used_regs[reg])
6811 floats_offset += 12;
6812 asm_fprintf (f, "\tldfe\t%r, [%r, #-%d]\n",
6813 reg, FP_REGNUM, floats_offset);
6816 else
6818 int start_reg = LAST_ARM_FP_REGNUM;
6820 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg--)
6822 if (regs_ever_live[reg] && ! call_used_regs[reg])
6824 floats_offset += 12;
6826 /* We can't unstack more than four registers at once. */
6827 if (start_reg - reg == 3)
6829 asm_fprintf (f, "\tlfm\t%r, 4, [%r, #-%d]\n",
6830 reg, FP_REGNUM, floats_offset);
6831 start_reg = reg - 1;
6834 else
6836 if (reg != start_reg)
6837 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6838 reg + 1, start_reg - reg,
6839 FP_REGNUM, floats_offset);
6840 start_reg = reg - 1;
6844 /* Just in case the last register checked also needs unstacking. */
6845 if (reg != start_reg)
6846 asm_fprintf (f, "\tlfm\t%r, %d, [%r, #-%d]\n",
6847 reg + 1, start_reg - reg,
6848 FP_REGNUM, floats_offset);
6851 if (TARGET_INTERWORK)
6853 live_regs_mask |= 0x6800;
6854 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6855 if (eh_ofs)
6856 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6857 REGNO (eh_ofs));
6858 if (really_return)
6859 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
6861 else if (eh_ofs || ! really_return)
6863 live_regs_mask |= 0x6800;
6864 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask, FALSE);
6865 if (eh_ofs)
6867 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6868 REGNO (eh_ofs));
6869 /* Even in 26-bit mode we do a mov (rather than a movs)
6870 because we don't have the PSR bits set in the
6871 address. */
6872 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6875 else
6877 live_regs_mask |= 0xA800;
6878 print_multi_reg (f, "ldmea\t%r", FP_REGNUM, live_regs_mask,
6879 TARGET_APCS_32 ? FALSE : TRUE);
6882 else
6884 /* Restore stack pointer if necessary. */
6885 if (frame_size + current_function_outgoing_args_size != 0)
6887 operands[0] = operands[1] = stack_pointer_rtx;
6888 operands[2] = GEN_INT (frame_size
6889 + current_function_outgoing_args_size);
6890 output_add_immediate (operands);
6893 if (arm_fpu_arch == FP_SOFT2)
6895 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
6896 if (regs_ever_live[reg] && ! call_used_regs[reg])
6897 asm_fprintf (f, "\tldfe\t%r, [%r], #12\n",
6898 reg, SP_REGNUM);
6900 else
6902 int start_reg = FIRST_ARM_FP_REGNUM;
6904 for (reg = FIRST_ARM_FP_REGNUM; reg <= LAST_ARM_FP_REGNUM; reg++)
6906 if (regs_ever_live[reg] && ! call_used_regs[reg])
6908 if (reg - start_reg == 3)
6910 asm_fprintf (f, "\tlfmfd\t%r, 4, [%r]!\n",
6911 start_reg, SP_REGNUM);
6912 start_reg = reg + 1;
6915 else
6917 if (reg != start_reg)
6918 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6919 start_reg, reg - start_reg,
6920 SP_REGNUM);
6922 start_reg = reg + 1;
6926 /* Just in case the last register checked also needs unstacking. */
6927 if (reg != start_reg)
6928 asm_fprintf (f, "\tlfmfd\t%r, %d, [%r]!\n",
6929 start_reg, reg - start_reg, SP_REGNUM);
6932 if (current_function_pretend_args_size == 0 && regs_ever_live[LR_REGNUM])
6934 if (TARGET_INTERWORK)
6936 live_regs_mask |= 1 << LR_REGNUM;
6938 /* Handle LR on its own. */
6939 if (live_regs_mask == (1 << LR_REGNUM))
6941 if (eh_ofs)
6942 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6943 SP_REGNUM);
6944 else
6945 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6946 SP_REGNUM);
6948 else if (live_regs_mask != 0)
6949 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
6950 FALSE);
6952 if (eh_ofs)
6953 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6954 REGNO (eh_ofs));
6956 if (really_return)
6957 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
6959 else if (eh_ofs)
6961 if (live_regs_mask == 0)
6962 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM, SP_REGNUM);
6963 else
6964 print_multi_reg (f, "\tldmfd\t%r!", SP_REGNUM,
6965 live_regs_mask | (1 << LR_REGNUM), FALSE);
6967 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
6968 REGNO (eh_ofs));
6969 /* Jump to the target; even in 26-bit mode. */
6970 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
6972 else if (TARGET_APCS_32 && live_regs_mask == 0 && ! really_return)
6973 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM, SP_REGNUM);
6974 else if (TARGET_APCS_32 && live_regs_mask == 0 && really_return)
6975 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", PC_REGNUM, SP_REGNUM);
6976 else if (! really_return)
6977 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6978 live_regs_mask | (1 << LR_REGNUM), FALSE);
6979 else
6980 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM,
6981 live_regs_mask | (1 << PC_REGNUM),
6982 TARGET_APCS_32 ? FALSE : TRUE);
6984 else
6986 if (live_regs_mask || regs_ever_live[LR_REGNUM])
6988 /* Restore the integer regs, and the return address into lr. */
6989 live_regs_mask |= 1 << LR_REGNUM;
6991 if (live_regs_mask == (1 << LR_REGNUM))
6993 if (eh_ofs)
6994 asm_fprintf (f, "\tadd\t%r, %r, #4\n", SP_REGNUM,
6995 SP_REGNUM);
6996 else
6997 asm_fprintf (f, "\tldr\t%r, [%r], #4\n", LR_REGNUM,
6998 SP_REGNUM);
7000 else if (live_regs_mask != 0)
7001 print_multi_reg (f, "ldmfd\t%r!", SP_REGNUM, live_regs_mask,
7002 FALSE);
7005 if (current_function_pretend_args_size)
7007 /* Unwind the pre-pushed regs. */
7008 operands[0] = operands[1] = stack_pointer_rtx;
7009 operands[2] = GEN_INT (current_function_pretend_args_size);
7010 output_add_immediate (operands);
7013 if (eh_ofs)
7014 asm_fprintf (f, "\tadd\t%r, %r, %r\n", SP_REGNUM, SP_REGNUM,
7015 REGNO (eh_ofs));
7017 if (really_return)
7019 /* And finally, go home. */
7020 if (TARGET_INTERWORK)
7021 asm_fprintf (f, "\tbx\t%r\n", return_regnum);
7022 else if (TARGET_APCS_32 || eh_ofs)
7023 asm_fprintf (f, "\tmov\t%r, %r\n", PC_REGNUM, return_regnum);
7024 else
7025 asm_fprintf (f, "\tmovs\t%r, %r\n", PC_REGNUM, return_regnum);
7030 return "";
7033 void
7034 output_func_epilogue (frame_size)
7035 int frame_size;
7037 if (TARGET_THUMB)
7039 /* ??? Probably not safe to set this here, since it assumes that a
7040 function will be emitted as assembly immediately after we generate
7041 RTL for it. This does not happen for inline functions. */
7042 return_used_this_function = 0;
7044 else
7046 if (use_return_insn (FALSE)
7047 && return_used_this_function
7048 && (frame_size + current_function_outgoing_args_size) != 0
7049 && ! frame_pointer_needed)
7050 abort ();
7052 /* Reset the ARM-specific per-function variables. */
7053 current_function_anonymous_args = 0;
7054 after_arm_reorg = 0;
7058 /* Generate and emit an insn that we will recognize as a push_multi.
7059 Unfortunately, since this insn does not reflect very well the actual
7060 semantics of the operation, we need to annotate the insn for the benefit
7061 of DWARF2 frame unwind information. */
7062 static rtx
7063 emit_multi_reg_push (mask)
7064 int mask;
7066 int num_regs = 0;
7067 int i, j;
7068 rtx par;
7069 rtx dwarf;
7070 rtx tmp, reg;
7072 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7073 if (mask & (1 << i))
7074 num_regs ++;
7076 if (num_regs == 0 || num_regs > 16)
7077 abort ();
7079 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7080 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
7081 RTX_FRAME_RELATED_P (dwarf) = 1;
7083 for (i = 0; i <= LAST_ARM_REGNUM; i++)
7085 if (mask & (1 << i))
7087 reg = gen_rtx_REG (SImode, i);
7089 XVECEXP (par, 0, 0)
7090 = gen_rtx_SET (VOIDmode,
7091 gen_rtx_MEM (BLKmode,
7092 gen_rtx_PRE_DEC (BLKmode,
7093 stack_pointer_rtx)),
7094 gen_rtx_UNSPEC (BLKmode,
7095 gen_rtvec (1, reg),
7096 2));
7098 tmp = gen_rtx_SET (VOIDmode,
7099 gen_rtx_MEM (SImode,
7100 gen_rtx_PRE_DEC (BLKmode,
7101 stack_pointer_rtx)),
7102 reg);
7103 RTX_FRAME_RELATED_P (tmp) = 1;
7104 XVECEXP (dwarf, 0, num_regs - 1) = tmp;
7106 break;
7110 for (j = 1, i++; j < num_regs; i++)
7112 if (mask & (1 << i))
7114 reg = gen_rtx_REG (SImode, i);
7116 XVECEXP (par, 0, j) = gen_rtx_USE (VOIDmode, reg);
7118 tmp = gen_rtx_SET (VOIDmode,
7119 gen_rtx_MEM (SImode,
7120 gen_rtx_PRE_DEC (BLKmode,
7121 stack_pointer_rtx)),
7122 reg);
7123 RTX_FRAME_RELATED_P (tmp) = 1;
7124 XVECEXP (dwarf, 0, num_regs - j - 1) = tmp;
7126 j++;
7130 par = emit_insn (par);
7131 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7132 REG_NOTES (par));
7133 return par;
7136 static rtx
7137 emit_sfm (base_reg, count)
7138 int base_reg;
7139 int count;
7141 rtx par;
7142 rtx dwarf;
7143 rtx tmp, reg;
7144 int i;
7146 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7147 dwarf = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
7148 RTX_FRAME_RELATED_P (dwarf) = 1;
7150 reg = gen_rtx_REG (XFmode, base_reg++);
7152 XVECEXP (par, 0, 0)
7153 = gen_rtx_SET (VOIDmode,
7154 gen_rtx_MEM (BLKmode,
7155 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7156 gen_rtx_UNSPEC (BLKmode,
7157 gen_rtvec (1, reg),
7158 2));
7160 = gen_rtx_SET (VOIDmode,
7161 gen_rtx_MEM (XFmode,
7162 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
7163 reg);
7164 RTX_FRAME_RELATED_P (tmp) = 1;
7165 XVECEXP (dwarf, 0, count - 1) = tmp;
7167 for (i = 1; i < count; i++)
7169 reg = gen_rtx_REG (XFmode, base_reg++);
7170 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode, reg);
7172 tmp = gen_rtx_SET (VOIDmode,
7173 gen_rtx_MEM (XFmode,
7174 gen_rtx_PRE_DEC (BLKmode,
7175 stack_pointer_rtx)),
7176 reg);
7177 RTX_FRAME_RELATED_P (tmp) = 1;
7178 XVECEXP (dwarf, 0, count - i - 1) = tmp;
7181 par = emit_insn (par);
7182 REG_NOTES (par) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
7183 REG_NOTES (par));
7184 return par;
7187 void
7188 arm_expand_prologue ()
7190 int reg;
7191 rtx amount = GEN_INT (-(get_frame_size ()
7192 + current_function_outgoing_args_size));
7193 int live_regs_mask = 0;
7194 int store_arg_regs = 0;
7195 /* If this function doesn't return, then there is no need to push
7196 the call-saved regs. */
7197 int volatile_func = arm_volatile_func ();
7198 rtx insn;
7200 /* Naked functions don't have prologues. */
7201 if (arm_naked_function_p (current_function_decl))
7202 return;
7204 if (current_function_anonymous_args && current_function_pretend_args_size)
7205 store_arg_regs = 1;
7207 if (! volatile_func)
7209 for (reg = 0; reg <= 10; reg++)
7210 if (regs_ever_live[reg] && ! call_used_regs[reg])
7211 live_regs_mask |= 1 << reg;
7213 if (! TARGET_APCS_FRAME
7214 && ! frame_pointer_needed
7215 && regs_ever_live[HARD_FRAME_POINTER_REGNUM]
7216 && ! call_used_regs[HARD_FRAME_POINTER_REGNUM])
7217 live_regs_mask |= 1 << HARD_FRAME_POINTER_REGNUM;
7219 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
7220 live_regs_mask |= 1 << PIC_OFFSET_TABLE_REGNUM;
7222 if (regs_ever_live[LR_REGNUM])
7223 live_regs_mask |= 1 << LR_REGNUM;
7226 if (frame_pointer_needed)
7228 live_regs_mask |= 0xD800;
7229 insn = emit_insn (gen_movsi (gen_rtx_REG (SImode, IP_REGNUM),
7230 stack_pointer_rtx));
7231 RTX_FRAME_RELATED_P (insn) = 1;
7234 if (current_function_pretend_args_size)
7236 if (store_arg_regs)
7237 insn = emit_multi_reg_push
7238 ((0xf0 >> (current_function_pretend_args_size / 4)) & 0xf);
7239 else
7240 insn = emit_insn
7241 (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7242 GEN_INT (-current_function_pretend_args_size)));
7243 RTX_FRAME_RELATED_P (insn) = 1;
7246 if (live_regs_mask)
7248 /* If we have to push any regs, then we must push lr as well, or
7249 we won't get a proper return. */
7250 live_regs_mask |= 1 << LR_REGNUM;
7251 insn = emit_multi_reg_push (live_regs_mask);
7252 RTX_FRAME_RELATED_P (insn) = 1;
7255 /* For now the integer regs are still pushed in output_arm_epilogue (). */
7257 if (! volatile_func)
7259 if (arm_fpu_arch == FP_SOFT2)
7261 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7262 if (regs_ever_live[reg] && ! call_used_regs[reg])
7264 insn = gen_rtx_PRE_DEC (XFmode, stack_pointer_rtx);
7265 insn = gen_rtx_MEM (XFmode, insn);
7266 insn = emit_insn (gen_rtx_SET (VOIDmode, insn,
7267 gen_rtx_REG (XFmode, reg)));
7268 RTX_FRAME_RELATED_P (insn) = 1;
7271 else
7273 int start_reg = LAST_ARM_FP_REGNUM;
7275 for (reg = LAST_ARM_FP_REGNUM; reg >= FIRST_ARM_FP_REGNUM; reg --)
7277 if (regs_ever_live[reg] && ! call_used_regs[reg])
7279 if (start_reg - reg == 3)
7281 insn = emit_sfm (reg, 4);
7282 RTX_FRAME_RELATED_P (insn) = 1;
7283 start_reg = reg - 1;
7286 else
7288 if (start_reg != reg)
7290 insn = emit_sfm (reg + 1, start_reg - reg);
7291 RTX_FRAME_RELATED_P (insn) = 1;
7293 start_reg = reg - 1;
7297 if (start_reg != reg)
7299 insn = emit_sfm (reg + 1, start_reg - reg);
7300 RTX_FRAME_RELATED_P (insn) = 1;
7305 if (frame_pointer_needed)
7307 insn = GEN_INT (-(4 + current_function_pretend_args_size));
7308 insn = emit_insn (gen_addsi3 (hard_frame_pointer_rtx,
7309 gen_rtx_REG (SImode, IP_REGNUM),
7310 insn));
7311 RTX_FRAME_RELATED_P (insn) = 1;
7314 if (amount != const0_rtx)
7316 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
7317 amount));
7318 RTX_FRAME_RELATED_P (insn) = 1;
7319 emit_insn (gen_rtx_CLOBBER (VOIDmode,
7320 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
7323 /* If we are profiling, make sure no instructions are scheduled before
7324 the call to mcount. Similarly if the user has requested no
7325 scheduling in the prolog. */
7326 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
7327 emit_insn (gen_blockage ());
7330 /* If CODE is 'd', then the X is a condition operand and the instruction
7331 should only be executed if the condition is true.
7332 if CODE is 'D', then the X is a condition operand and the instruction
7333 should only be executed if the condition is false: however, if the mode
7334 of the comparison is CCFPEmode, then always execute the instruction -- we
7335 do this because in these circumstances !GE does not necessarily imply LT;
7336 in these cases the instruction pattern will take care to make sure that
7337 an instruction containing %d will follow, thereby undoing the effects of
7338 doing this instruction unconditionally.
7339 If CODE is 'N' then X is a floating point operand that must be negated
7340 before output.
7341 If CODE is 'B' then output a bitwise inverted value of X (a const int).
7342 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
7344 void
7345 arm_print_operand (stream, x, code)
7346 FILE * stream;
7347 rtx x;
7348 int code;
7350 switch (code)
7352 case '@':
7353 fputs (ASM_COMMENT_START, stream);
7354 return;
7356 case '_':
7357 fputs (user_label_prefix, stream);
7358 return;
7360 case '|':
7361 fputs (REGISTER_PREFIX, stream);
7362 return;
7364 case '?':
7365 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
7366 fputs (arm_condition_codes[arm_current_cc], stream);
7367 return;
7369 case 'N':
7371 REAL_VALUE_TYPE r;
7372 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
7373 r = REAL_VALUE_NEGATE (r);
7374 fprintf (stream, "%s", fp_const_from_val (&r));
7376 return;
7378 case 'B':
7379 if (GET_CODE (x) == CONST_INT)
7381 HOST_WIDE_INT val;
7382 val = ARM_SIGN_EXTEND (~ INTVAL (x));
7383 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7385 else
7387 putc ('~', stream);
7388 output_addr_const (stream, x);
7390 return;
7392 case 'i':
7393 fprintf (stream, "%s", arithmetic_instr (x, 1));
7394 return;
7396 case 'I':
7397 fprintf (stream, "%s", arithmetic_instr (x, 0));
7398 return;
7400 case 'S':
7402 HOST_WIDE_INT val;
7403 const char * shift = shift_op (x, & val);
7405 if (shift)
7407 fprintf (stream, ", %s ", shift_op (x, & val));
7408 if (val == -1)
7409 arm_print_operand (stream, XEXP (x, 1), 0);
7410 else
7412 fputc ('#', stream);
7413 fprintf (stream, HOST_WIDE_INT_PRINT_DEC, val);
7417 return;
7419 /* An explanation of the 'Q', 'R' and 'H' register operands:
7421 In a pair of registers containing a DI or DF value the 'Q'
7422 operand returns the register number of the register containing
7423 the least signficant part of the value. The 'R' operand returns
7424 the register number of the register containing the most
7425 significant part of the value.
7427 The 'H' operand returns the higher of the two register numbers.
7428 On a run where WORDS_BIG_ENDIAN is true the 'H' operand is the
7429 same as the 'Q' operand, since the most signficant part of the
7430 value is held in the lower number register. The reverse is true
7431 on systems where WORDS_BIG_ENDIAN is false.
7433 The purpose of these operands is to distinguish between cases
7434 where the endian-ness of the values is important (for example
7435 when they are added together), and cases where the endian-ness
7436 is irrelevant, but the order of register operations is important.
7437 For example when loading a value from memory into a register
7438 pair, the endian-ness does not matter. Provided that the value
7439 from the lower memory address is put into the lower numbered
7440 register, and the value from the higher address is put into the
7441 higher numbered register, the load will work regardless of whether
7442 the value being loaded is big-wordian or little-wordian. The
7443 order of the two register loads can matter however, if the address
7444 of the memory location is actually held in one of the registers
7445 being overwritten by the load. */
7446 case 'Q':
7447 if (REGNO (x) > LAST_ARM_REGNUM)
7448 abort ();
7449 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0));
7450 return;
7452 case 'R':
7453 if (REGNO (x) > LAST_ARM_REGNUM)
7454 abort ();
7455 asm_fprintf (stream, "%r", REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1));
7456 return;
7458 case 'H':
7459 if (REGNO (x) > LAST_ARM_REGNUM)
7460 abort ();
7461 asm_fprintf (stream, "%r", REGNO (x) + 1);
7462 return;
7464 case 'm':
7465 asm_fprintf (stream, "%r",
7466 GET_CODE (XEXP (x, 0)) == REG
7467 ? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
7468 return;
7470 case 'M':
7471 asm_fprintf (stream, "{%r-%r}",
7472 REGNO (x),
7473 REGNO (x) + NUM_REGS (GET_MODE (x)) - 1);
7474 return;
7476 case 'd':
7477 if (! x)
7478 return;
7480 if (TARGET_ARM)
7481 fputs (arm_condition_codes[get_arm_condition_code (x)],
7482 stream);
7483 else
7484 fputs (thumb_condition_code (x, 0), stream);
7485 return;
7487 case 'D':
7488 if (! x)
7489 return;
7491 if (TARGET_ARM)
7492 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
7493 (get_arm_condition_code (x))],
7494 stream);
7495 else
7496 fputs (thumb_condition_code (x, 1), stream);
7497 return;
7499 default:
7500 if (x == 0)
7501 abort ();
7503 if (GET_CODE (x) == REG)
7504 asm_fprintf (stream, "%r", REGNO (x));
7505 else if (GET_CODE (x) == MEM)
7507 output_memory_reference_mode = GET_MODE (x);
7508 output_address (XEXP (x, 0));
7510 else if (GET_CODE (x) == CONST_DOUBLE)
7511 fprintf (stream, "#%s", fp_immediate_constant (x));
7512 else if (GET_CODE (x) == NEG)
7513 abort (); /* This should never happen now. */
7514 else
7516 fputc ('#', stream);
7517 output_addr_const (stream, x);
7522 /* A finite state machine takes care of noticing whether or not instructions
7523 can be conditionally executed, and thus decrease execution time and code
7524 size by deleting branch instructions. The fsm is controlled by
7525 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
7527 /* The state of the fsm controlling condition codes are:
7528 0: normal, do nothing special
7529 1: make ASM_OUTPUT_OPCODE not output this instruction
7530 2: make ASM_OUTPUT_OPCODE not output this instruction
7531 3: make instructions conditional
7532 4: make instructions conditional
7534 State transitions (state->state by whom under condition):
7535 0 -> 1 final_prescan_insn if the `target' is a label
7536 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
7537 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
7538 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
7539 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
7540 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
7541 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
7542 (the target insn is arm_target_insn).
7544 If the jump clobbers the conditions then we use states 2 and 4.
7546 A similar thing can be done with conditional return insns.
7548 XXX In case the `target' is an unconditional branch, this conditionalising
7549 of the instructions always reduces code size, but not always execution
7550 time. But then, I want to reduce the code size to somewhere near what
7551 /bin/cc produces. */
7553 /* Returns the index of the ARM condition code string in
7554 `arm_condition_codes'. COMPARISON should be an rtx like
7555 `(eq (...) (...))'. */
7557 static enum arm_cond_code
7558 get_arm_condition_code (comparison)
7559 rtx comparison;
7561 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
7562 register int code;
7563 register enum rtx_code comp_code = GET_CODE (comparison);
7565 if (GET_MODE_CLASS (mode) != MODE_CC)
7566 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
7567 XEXP (comparison, 1));
7569 switch (mode)
7571 case CC_DNEmode: code = ARM_NE; goto dominance;
7572 case CC_DEQmode: code = ARM_EQ; goto dominance;
7573 case CC_DGEmode: code = ARM_GE; goto dominance;
7574 case CC_DGTmode: code = ARM_GT; goto dominance;
7575 case CC_DLEmode: code = ARM_LE; goto dominance;
7576 case CC_DLTmode: code = ARM_LT; goto dominance;
7577 case CC_DGEUmode: code = ARM_CS; goto dominance;
7578 case CC_DGTUmode: code = ARM_HI; goto dominance;
7579 case CC_DLEUmode: code = ARM_LS; goto dominance;
7580 case CC_DLTUmode: code = ARM_CC;
7582 dominance:
7583 if (comp_code != EQ && comp_code != NE)
7584 abort ();
7586 if (comp_code == EQ)
7587 return ARM_INVERSE_CONDITION_CODE (code);
7588 return code;
7590 case CC_NOOVmode:
7591 switch (comp_code)
7593 case NE: return ARM_NE;
7594 case EQ: return ARM_EQ;
7595 case GE: return ARM_PL;
7596 case LT: return ARM_MI;
7597 default: abort ();
7600 case CC_Zmode:
7601 case CCFPmode:
7602 switch (comp_code)
7604 case NE: return ARM_NE;
7605 case EQ: return ARM_EQ;
7606 default: abort ();
7609 case CCFPEmode:
7610 switch (comp_code)
7612 case GE: return ARM_GE;
7613 case GT: return ARM_GT;
7614 case LE: return ARM_LS;
7615 case LT: return ARM_MI;
7616 default: abort ();
7619 case CC_SWPmode:
7620 switch (comp_code)
7622 case NE: return ARM_NE;
7623 case EQ: return ARM_EQ;
7624 case GE: return ARM_LE;
7625 case GT: return ARM_LT;
7626 case LE: return ARM_GE;
7627 case LT: return ARM_GT;
7628 case GEU: return ARM_LS;
7629 case GTU: return ARM_CC;
7630 case LEU: return ARM_CS;
7631 case LTU: return ARM_HI;
7632 default: abort ();
7635 case CC_Cmode:
7636 switch (comp_code)
7638 case LTU: return ARM_CS;
7639 case GEU: return ARM_CC;
7640 default: abort ();
7643 case CCmode:
7644 switch (comp_code)
7646 case NE: return ARM_NE;
7647 case EQ: return ARM_EQ;
7648 case GE: return ARM_GE;
7649 case GT: return ARM_GT;
7650 case LE: return ARM_LE;
7651 case LT: return ARM_LT;
7652 case GEU: return ARM_CS;
7653 case GTU: return ARM_HI;
7654 case LEU: return ARM_LS;
7655 case LTU: return ARM_CC;
7656 default: abort ();
7659 default: abort ();
7662 abort ();
7666 void
7667 arm_final_prescan_insn (insn)
7668 rtx insn;
7670 /* BODY will hold the body of INSN. */
7671 register rtx body = PATTERN (insn);
7673 /* This will be 1 if trying to repeat the trick, and things need to be
7674 reversed if it appears to fail. */
7675 int reverse = 0;
7677 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
7678 taken are clobbered, even if the rtl suggests otherwise. It also
7679 means that we have to grub around within the jump expression to find
7680 out what the conditions are when the jump isn't taken. */
7681 int jump_clobbers = 0;
7683 /* If we start with a return insn, we only succeed if we find another one. */
7684 int seeking_return = 0;
7686 /* START_INSN will hold the insn from where we start looking. This is the
7687 first insn after the following code_label if REVERSE is true. */
7688 rtx start_insn = insn;
7690 /* If in state 4, check if the target branch is reached, in order to
7691 change back to state 0. */
7692 if (arm_ccfsm_state == 4)
7694 if (insn == arm_target_insn)
7696 arm_target_insn = NULL;
7697 arm_ccfsm_state = 0;
7699 return;
7702 /* If in state 3, it is possible to repeat the trick, if this insn is an
7703 unconditional branch to a label, and immediately following this branch
7704 is the previous target label which is only used once, and the label this
7705 branch jumps to is not too far off. */
7706 if (arm_ccfsm_state == 3)
7708 if (simplejump_p (insn))
7710 start_insn = next_nonnote_insn (start_insn);
7711 if (GET_CODE (start_insn) == BARRIER)
7713 /* XXX Isn't this always a barrier? */
7714 start_insn = next_nonnote_insn (start_insn);
7716 if (GET_CODE (start_insn) == CODE_LABEL
7717 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7718 && LABEL_NUSES (start_insn) == 1)
7719 reverse = TRUE;
7720 else
7721 return;
7723 else if (GET_CODE (body) == RETURN)
7725 start_insn = next_nonnote_insn (start_insn);
7726 if (GET_CODE (start_insn) == BARRIER)
7727 start_insn = next_nonnote_insn (start_insn);
7728 if (GET_CODE (start_insn) == CODE_LABEL
7729 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
7730 && LABEL_NUSES (start_insn) == 1)
7732 reverse = TRUE;
7733 seeking_return = 1;
7735 else
7736 return;
7738 else
7739 return;
7742 if (arm_ccfsm_state != 0 && !reverse)
7743 abort ();
7744 if (GET_CODE (insn) != JUMP_INSN)
7745 return;
7747 /* This jump might be paralleled with a clobber of the condition codes
7748 the jump should always come first */
7749 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
7750 body = XVECEXP (body, 0, 0);
7752 #if 0
7753 /* If this is a conditional return then we don't want to know */
7754 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7755 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
7756 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
7757 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
7758 return;
7759 #endif
7761 if (reverse
7762 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
7763 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
7765 int insns_skipped;
7766 int fail = FALSE, succeed = FALSE;
7767 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
7768 int then_not_else = TRUE;
7769 rtx this_insn = start_insn, label = 0;
7771 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
7773 /* The code below is wrong for these, and I haven't time to
7774 fix it now. So we just do the safe thing and return. This
7775 whole function needs re-writing anyway. */
7776 jump_clobbers = 1;
7777 return;
7780 /* Register the insn jumped to. */
7781 if (reverse)
7783 if (!seeking_return)
7784 label = XEXP (SET_SRC (body), 0);
7786 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
7787 label = XEXP (XEXP (SET_SRC (body), 1), 0);
7788 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
7790 label = XEXP (XEXP (SET_SRC (body), 2), 0);
7791 then_not_else = FALSE;
7793 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
7794 seeking_return = 1;
7795 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
7797 seeking_return = 1;
7798 then_not_else = FALSE;
7800 else
7801 abort ();
7803 /* See how many insns this branch skips, and what kind of insns. If all
7804 insns are okay, and the label or unconditional branch to the same
7805 label is not too far away, succeed. */
7806 for (insns_skipped = 0;
7807 !fail && !succeed && insns_skipped++ < max_insns_skipped;)
7809 rtx scanbody;
7811 this_insn = next_nonnote_insn (this_insn);
7812 if (!this_insn)
7813 break;
7815 switch (GET_CODE (this_insn))
7817 case CODE_LABEL:
7818 /* Succeed if it is the target label, otherwise fail since
7819 control falls in from somewhere else. */
7820 if (this_insn == label)
7822 if (jump_clobbers)
7824 arm_ccfsm_state = 2;
7825 this_insn = next_nonnote_insn (this_insn);
7827 else
7828 arm_ccfsm_state = 1;
7829 succeed = TRUE;
7831 else
7832 fail = TRUE;
7833 break;
7835 case BARRIER:
7836 /* Succeed if the following insn is the target label.
7837 Otherwise fail.
7838 If return insns are used then the last insn in a function
7839 will be a barrier. */
7840 this_insn = next_nonnote_insn (this_insn);
7841 if (this_insn && this_insn == label)
7843 if (jump_clobbers)
7845 arm_ccfsm_state = 2;
7846 this_insn = next_nonnote_insn (this_insn);
7848 else
7849 arm_ccfsm_state = 1;
7850 succeed = TRUE;
7852 else
7853 fail = TRUE;
7854 break;
7856 case CALL_INSN:
7857 /* If using 32-bit addresses the cc is not preserved over
7858 calls. */
7859 if (TARGET_APCS_32)
7861 /* Succeed if the following insn is the target label,
7862 or if the following two insns are a barrier and
7863 the target label. */
7864 this_insn = next_nonnote_insn (this_insn);
7865 if (this_insn && GET_CODE (this_insn) == BARRIER)
7866 this_insn = next_nonnote_insn (this_insn);
7868 if (this_insn && this_insn == label
7869 && insns_skipped < max_insns_skipped)
7871 if (jump_clobbers)
7873 arm_ccfsm_state = 2;
7874 this_insn = next_nonnote_insn (this_insn);
7876 else
7877 arm_ccfsm_state = 1;
7878 succeed = TRUE;
7880 else
7881 fail = TRUE;
7883 break;
7885 case JUMP_INSN:
7886 /* If this is an unconditional branch to the same label, succeed.
7887 If it is to another label, do nothing. If it is conditional,
7888 fail. */
7889 /* XXX Probably, the tests for SET and the PC are unnecessary. */
7891 scanbody = PATTERN (this_insn);
7892 if (GET_CODE (scanbody) == SET
7893 && GET_CODE (SET_DEST (scanbody)) == PC)
7895 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
7896 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
7898 arm_ccfsm_state = 2;
7899 succeed = TRUE;
7901 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
7902 fail = TRUE;
7904 /* Fail if a conditional return is undesirable (eg on a
7905 StrongARM), but still allow this if optimizing for size. */
7906 else if (GET_CODE (scanbody) == RETURN
7907 && ! use_return_insn (TRUE)
7908 && ! optimize_size)
7909 fail = TRUE;
7910 else if (GET_CODE (scanbody) == RETURN
7911 && seeking_return)
7913 arm_ccfsm_state = 2;
7914 succeed = TRUE;
7916 else if (GET_CODE (scanbody) == PARALLEL)
7918 switch (get_attr_conds (this_insn))
7920 case CONDS_NOCOND:
7921 break;
7922 default:
7923 fail = TRUE;
7924 break;
7927 break;
7929 case INSN:
7930 /* Instructions using or affecting the condition codes make it
7931 fail. */
7932 scanbody = PATTERN (this_insn);
7933 if (! (GET_CODE (scanbody) == SET
7934 || GET_CODE (scanbody) == PARALLEL)
7935 || get_attr_conds (this_insn) != CONDS_NOCOND)
7936 fail = TRUE;
7937 break;
7939 default:
7940 break;
7943 if (succeed)
7945 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
7946 arm_target_label = CODE_LABEL_NUMBER (label);
7947 else if (seeking_return || arm_ccfsm_state == 2)
7949 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
7951 this_insn = next_nonnote_insn (this_insn);
7952 if (this_insn && (GET_CODE (this_insn) == BARRIER
7953 || GET_CODE (this_insn) == CODE_LABEL))
7954 abort ();
7956 if (!this_insn)
7958 /* Oh, dear! we ran off the end.. give up */
7959 recog (PATTERN (insn), insn, NULL_PTR);
7960 arm_ccfsm_state = 0;
7961 arm_target_insn = NULL;
7962 return;
7964 arm_target_insn = this_insn;
7966 else
7967 abort ();
7968 if (jump_clobbers)
7970 if (reverse)
7971 abort ();
7972 arm_current_cc =
7973 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
7974 0), 0), 1));
7975 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
7976 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7977 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
7978 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7980 else
7982 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
7983 what it was. */
7984 if (!reverse)
7985 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
7986 0));
7989 if (reverse || then_not_else)
7990 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
7993 /* Restore recog_data (getting the attributes of other insns can
7994 destroy this array, but final.c assumes that it remains intact
7995 across this call; since the insn has been recognized already we
7996 call recog direct). */
7997 recog (PATTERN (insn), insn, NULL_PTR);
8002 arm_regno_class (regno)
8003 int regno;
8005 if (TARGET_THUMB)
8007 if (regno == STACK_POINTER_REGNUM)
8008 return STACK_REG;
8009 if (regno == CC_REGNUM)
8010 return CC_REG;
8011 if (regno < 8)
8012 return LO_REGS;
8013 return HI_REGS;
8016 if ( regno <= LAST_ARM_REGNUM
8017 || regno == FRAME_POINTER_REGNUM
8018 || regno == ARG_POINTER_REGNUM)
8019 return GENERAL_REGS;
8021 if (regno == CC_REGNUM)
8022 return NO_REGS;
8024 return FPU_REGS;
8027 /* Handle a special case when computing the offset
8028 of an argument from the frame pointer. */
8030 arm_debugger_arg_offset (value, addr)
8031 int value;
8032 rtx addr;
8034 rtx insn;
8036 /* We are only interested if dbxout_parms() failed to compute the offset. */
8037 if (value != 0)
8038 return 0;
8040 /* We can only cope with the case where the address is held in a register. */
8041 if (GET_CODE (addr) != REG)
8042 return 0;
8044 /* If we are using the frame pointer to point at the argument, then
8045 an offset of 0 is correct. */
8046 if (REGNO (addr) == (unsigned) HARD_FRAME_POINTER_REGNUM)
8047 return 0;
8049 /* If we are using the stack pointer to point at the
8050 argument, then an offset of 0 is correct. */
8051 if ((TARGET_THUMB || ! frame_pointer_needed)
8052 && REGNO (addr) == SP_REGNUM)
8053 return 0;
8055 /* Oh dear. The argument is pointed to by a register rather
8056 than being held in a register, or being stored at a known
8057 offset from the frame pointer. Since GDB only understands
8058 those two kinds of argument we must translate the address
8059 held in the register into an offset from the frame pointer.
8060 We do this by searching through the insns for the function
8061 looking to see where this register gets its value. If the
8062 register is initialised from the frame pointer plus an offset
8063 then we are in luck and we can continue, otherwise we give up.
8065 This code is exercised by producing debugging information
8066 for a function with arguments like this:
8068 double func (double a, double b, int c, double d) {return d;}
8070 Without this code the stab for parameter 'd' will be set to
8071 an offset of 0 from the frame pointer, rather than 8. */
8073 /* The if() statement says:
8075 If the insn is a normal instruction
8076 and if the insn is setting the value in a register
8077 and if the register being set is the register holding the address of the argument
8078 and if the address is computing by an addition
8079 that involves adding to a register
8080 which is the frame pointer
8081 a constant integer
8083 then... */
8085 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8087 if ( GET_CODE (insn) == INSN
8088 && GET_CODE (PATTERN (insn)) == SET
8089 && REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
8090 && GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
8091 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
8092 && REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
8093 && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
8096 value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
8098 break;
8102 if (value == 0)
8104 debug_rtx (addr);
8105 warning ("Unable to compute real location of stacked parameter");
8106 value = 8; /* XXX magic hack */
8109 return value;
8113 /* Recursively search through all of the blocks in a function
8114 checking to see if any of the variables created in that
8115 function match the RTX called 'orig'. If they do then
8116 replace them with the RTX called 'new'. */
8118 static void
8119 replace_symbols_in_block (block, orig, new)
8120 tree block;
8121 rtx orig;
8122 rtx new;
8124 for (; block; block = BLOCK_CHAIN (block))
8126 tree sym;
8128 if (! TREE_USED (block))
8129 continue;
8131 for (sym = BLOCK_VARS (block); sym; sym = TREE_CHAIN (sym))
8133 if ( (DECL_NAME (sym) == 0 && TREE_CODE (sym) != TYPE_DECL)
8134 || DECL_IGNORED_P (sym)
8135 || TREE_CODE (sym) != VAR_DECL
8136 || DECL_EXTERNAL (sym)
8137 || ! rtx_equal_p (DECL_RTL (sym), orig)
8139 continue;
8141 DECL_RTL (sym) = new;
8144 replace_symbols_in_block (BLOCK_SUBBLOCKS (block), orig, new);
8148 /* Return the number (counting from 0) of the least significant set
8149 bit in MASK. */
8150 #ifdef __GNUC__
8151 inline
8152 #endif
8153 static int
8154 number_of_first_bit_set (mask)
8155 int mask;
8157 int bit;
8159 for (bit = 0;
8160 (mask & (1 << bit)) == 0;
8161 ++ bit)
8162 continue;
8164 return bit;
8167 /* Generate code to return from a thumb function.
8168 If 'reg_containing_return_addr' is -1, then the return address is
8169 actually on the stack, at the stack pointer. */
8170 static void
8171 thumb_exit (f, reg_containing_return_addr, eh_ofs)
8172 FILE * f;
8173 int reg_containing_return_addr;
8174 rtx eh_ofs;
8176 unsigned regs_available_for_popping;
8177 unsigned regs_to_pop;
8178 int pops_needed;
8179 unsigned available;
8180 unsigned required;
8181 int mode;
8182 int size;
8183 int restore_a4 = FALSE;
8185 /* Compute the registers we need to pop. */
8186 regs_to_pop = 0;
8187 pops_needed = 0;
8189 /* There is an assumption here, that if eh_ofs is not NULL, the
8190 normal return address will have been pushed. */
8191 if (reg_containing_return_addr == -1 || eh_ofs)
8193 /* When we are generating a return for __builtin_eh_return,
8194 reg_containing_return_addr must specify the return regno. */
8195 if (eh_ofs && reg_containing_return_addr == -1)
8196 abort ();
8198 regs_to_pop |= 1 << LR_REGNUM;
8199 ++ pops_needed;
8202 if (TARGET_BACKTRACE)
8204 /* Restore the (ARM) frame pointer and stack pointer. */
8205 regs_to_pop |= (1 << ARM_HARD_FRAME_POINTER_REGNUM) | (1 << SP_REGNUM);
8206 pops_needed += 2;
8209 /* If there is nothing to pop then just emit the BX instruction and
8210 return. */
8211 if (pops_needed == 0)
8213 if (eh_ofs)
8214 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8216 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8217 return;
8219 /* Otherwise if we are not supporting interworking and we have not created
8220 a backtrace structure and the function was not entered in ARM mode then
8221 just pop the return address straight into the PC. */
8222 else if ( ! TARGET_INTERWORK
8223 && ! TARGET_BACKTRACE
8224 && ! is_called_in_ARM_mode (current_function_decl))
8226 if (eh_ofs)
8228 asm_fprintf (f, "\tadd\t%r, #4\n", SP_REGNUM);
8229 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8230 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8232 else
8233 asm_fprintf (f, "\tpop\t{%r}\n", PC_REGNUM);
8235 return;
8238 /* Find out how many of the (return) argument registers we can corrupt. */
8239 regs_available_for_popping = 0;
8241 /* If returning via __builtin_eh_return, the bottom three registers
8242 all contain information needed for the return. */
8243 if (eh_ofs)
8244 size = 12;
8245 else
8247 #ifdef RTX_CODE
8248 /* If we can deduce the registers used from the function's
8249 return value. This is more reliable that examining
8250 regs_ever_live[] because that will be set if the register is
8251 ever used in the function, not just if the register is used
8252 to hold a return value. */
8254 if (current_function_return_rtx != 0)
8255 mode = GET_MODE (current_function_return_rtx);
8256 else
8257 #endif
8258 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8260 size = GET_MODE_SIZE (mode);
8262 if (size == 0)
8264 /* In a void function we can use any argument register.
8265 In a function that returns a structure on the stack
8266 we can use the second and third argument registers. */
8267 if (mode == VOIDmode)
8268 regs_available_for_popping =
8269 (1 << ARG_REGISTER (1))
8270 | (1 << ARG_REGISTER (2))
8271 | (1 << ARG_REGISTER (3));
8272 else
8273 regs_available_for_popping =
8274 (1 << ARG_REGISTER (2))
8275 | (1 << ARG_REGISTER (3));
8277 else if (size <= 4)
8278 regs_available_for_popping =
8279 (1 << ARG_REGISTER (2))
8280 | (1 << ARG_REGISTER (3));
8281 else if (size <= 8)
8282 regs_available_for_popping =
8283 (1 << ARG_REGISTER (3));
8286 /* Match registers to be popped with registers into which we pop them. */
8287 for (available = regs_available_for_popping,
8288 required = regs_to_pop;
8289 required != 0 && available != 0;
8290 available &= ~(available & - available),
8291 required &= ~(required & - required))
8292 -- pops_needed;
8294 /* If we have any popping registers left over, remove them. */
8295 if (available > 0)
8296 regs_available_for_popping &= ~ available;
8298 /* Otherwise if we need another popping register we can use
8299 the fourth argument register. */
8300 else if (pops_needed)
8302 /* If we have not found any free argument registers and
8303 reg a4 contains the return address, we must move it. */
8304 if (regs_available_for_popping == 0
8305 && reg_containing_return_addr == LAST_ARG_REGNUM)
8307 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8308 reg_containing_return_addr = LR_REGNUM;
8310 else if (size > 12)
8312 /* Register a4 is being used to hold part of the return value,
8313 but we have dire need of a free, low register. */
8314 restore_a4 = TRUE;
8316 asm_fprintf (f, "\tmov\t%r, %r\n",IP_REGNUM, LAST_ARG_REGNUM);
8319 if (reg_containing_return_addr != LAST_ARG_REGNUM)
8321 /* The fourth argument register is available. */
8322 regs_available_for_popping |= 1 << LAST_ARG_REGNUM;
8324 -- pops_needed;
8328 /* Pop as many registers as we can. */
8329 thumb_pushpop (f, regs_available_for_popping, FALSE);
8331 /* Process the registers we popped. */
8332 if (reg_containing_return_addr == -1)
8334 /* The return address was popped into the lowest numbered register. */
8335 regs_to_pop &= ~ (1 << LR_REGNUM);
8337 reg_containing_return_addr =
8338 number_of_first_bit_set (regs_available_for_popping);
8340 /* Remove this register for the mask of available registers, so that
8341 the return address will not be corrupted by futher pops. */
8342 regs_available_for_popping &= ~ (1 << reg_containing_return_addr);
8345 /* If we popped other registers then handle them here. */
8346 if (regs_available_for_popping)
8348 int frame_pointer;
8350 /* Work out which register currently contains the frame pointer. */
8351 frame_pointer = number_of_first_bit_set (regs_available_for_popping);
8353 /* Move it into the correct place. */
8354 asm_fprintf (f, "\tmov\t%r, %r\n",
8355 ARM_HARD_FRAME_POINTER_REGNUM, frame_pointer);
8357 /* (Temporarily) remove it from the mask of popped registers. */
8358 regs_available_for_popping &= ~ (1 << frame_pointer);
8359 regs_to_pop &= ~ (1 << ARM_HARD_FRAME_POINTER_REGNUM);
8361 if (regs_available_for_popping)
8363 int stack_pointer;
8365 /* We popped the stack pointer as well,
8366 find the register that contains it. */
8367 stack_pointer = number_of_first_bit_set (regs_available_for_popping);
8369 /* Move it into the stack register. */
8370 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, stack_pointer);
8372 /* At this point we have popped all necessary registers, so
8373 do not worry about restoring regs_available_for_popping
8374 to its correct value:
8376 assert (pops_needed == 0)
8377 assert (regs_available_for_popping == (1 << frame_pointer))
8378 assert (regs_to_pop == (1 << STACK_POINTER)) */
8380 else
8382 /* Since we have just move the popped value into the frame
8383 pointer, the popping register is available for reuse, and
8384 we know that we still have the stack pointer left to pop. */
8385 regs_available_for_popping |= (1 << frame_pointer);
8389 /* If we still have registers left on the stack, but we no longer have
8390 any registers into which we can pop them, then we must move the return
8391 address into the link register and make available the register that
8392 contained it. */
8393 if (regs_available_for_popping == 0 && pops_needed > 0)
8395 regs_available_for_popping |= 1 << reg_containing_return_addr;
8397 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM,
8398 reg_containing_return_addr);
8400 reg_containing_return_addr = LR_REGNUM;
8403 /* If we have registers left on the stack then pop some more.
8404 We know that at most we will want to pop FP and SP. */
8405 if (pops_needed > 0)
8407 int popped_into;
8408 int move_to;
8410 thumb_pushpop (f, regs_available_for_popping, FALSE);
8412 /* We have popped either FP or SP.
8413 Move whichever one it is into the correct register. */
8414 popped_into = number_of_first_bit_set (regs_available_for_popping);
8415 move_to = number_of_first_bit_set (regs_to_pop);
8417 asm_fprintf (f, "\tmov\t%r, %r\n", move_to, popped_into);
8419 regs_to_pop &= ~ (1 << move_to);
8421 -- pops_needed;
8424 /* If we still have not popped everything then we must have only
8425 had one register available to us and we are now popping the SP. */
8426 if (pops_needed > 0)
8428 int popped_into;
8430 thumb_pushpop (f, regs_available_for_popping, FALSE);
8432 popped_into = number_of_first_bit_set (regs_available_for_popping);
8434 asm_fprintf (f, "\tmov\t%r, %r\n", SP_REGNUM, popped_into);
8436 assert (regs_to_pop == (1 << STACK_POINTER))
8437 assert (pops_needed == 1)
8441 /* If necessary restore the a4 register. */
8442 if (restore_a4)
8444 if (reg_containing_return_addr != LR_REGNUM)
8446 asm_fprintf (f, "\tmov\t%r, %r\n", LR_REGNUM, LAST_ARG_REGNUM);
8447 reg_containing_return_addr = LR_REGNUM;
8450 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
8453 if (eh_ofs)
8454 asm_fprintf (f, "\tadd\t%r, %r\n", SP_REGNUM, REGNO (eh_ofs));
8456 /* Return to caller. */
8457 asm_fprintf (f, "\tbx\t%r\n", reg_containing_return_addr);
8460 /* Emit code to push or pop registers to or from the stack. */
8461 static void
8462 thumb_pushpop (f, mask, push)
8463 FILE * f;
8464 int mask;
8465 int push;
8467 int regno;
8468 int lo_mask = mask & 0xFF;
8470 if (lo_mask == 0 && ! push && (mask & (1 << 15)))
8472 /* Special case. Do not generate a POP PC statement here, do it in
8473 thumb_exit() */
8474 thumb_exit (f, -1, NULL_RTX);
8475 return;
8478 fprintf (f, "\t%s\t{", push ? "push" : "pop");
8480 /* Look at the low registers first. */
8481 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++, lo_mask >>= 1)
8483 if (lo_mask & 1)
8485 asm_fprintf (f, "%r", regno);
8487 if ((lo_mask & ~1) != 0)
8488 fprintf (f, ", ");
8492 if (push && (mask & (1 << LR_REGNUM)))
8494 /* Catch pushing the LR. */
8495 if (mask & 0xFF)
8496 fprintf (f, ", ");
8498 asm_fprintf (f, "%r", LR_REGNUM);
8500 else if (!push && (mask & (1 << PC_REGNUM)))
8502 /* Catch popping the PC. */
8503 if (TARGET_INTERWORK || TARGET_BACKTRACE)
8505 /* The PC is never poped directly, instead
8506 it is popped into r3 and then BX is used. */
8507 fprintf (f, "}\n");
8509 thumb_exit (f, -1, NULL_RTX);
8511 return;
8513 else
8515 if (mask & 0xFF)
8516 fprintf (f, ", ");
8518 asm_fprintf (f, "%r", PC_REGNUM);
8522 fprintf (f, "}\n");
8525 void
8526 thumb_final_prescan_insn (insn)
8527 rtx insn;
8529 extern int * insn_addresses;
8531 if (flag_print_asm_name)
8532 asm_fprintf (asm_out_file, "%@ 0x%04x\n", insn_addresses[INSN_UID (insn)]);
8536 thumb_shiftable_const (val)
8537 unsigned HOST_WIDE_INT val;
8539 unsigned HOST_WIDE_INT mask = 0xff;
8540 int i;
8542 if (val == 0) /* XXX */
8543 return 0;
8545 for (i = 0; i < 25; i++)
8546 if ((val & (mask << i)) == val)
8547 return 1;
8549 return 0;
8552 /* Returns non-zero if the current function contains,
8553 or might contain a far jump. */
8555 thumb_far_jump_used_p (int in_prologue)
8557 rtx insn;
8559 /* This test is only important for leaf functions. */
8560 /* assert (! leaf_function_p ()); */
8562 /* If we have already decided that far jumps may be used,
8563 do not bother checking again, and always return true even if
8564 it turns out that they are not being used. Once we have made
8565 the decision that far jumps are present (and that hence the link
8566 register will be pushed onto the stack) we cannot go back on it. */
8567 if (cfun->machine->far_jump_used)
8568 return 1;
8570 /* If this function is not being called from the prologue/epilogue
8571 generation code then it must be being called from the
8572 INITIAL_ELIMINATION_OFFSET macro. */
8573 if (! in_prologue)
8575 /* In this case we know that we are being asked about the elimination
8576 of the arg pointer register. If that register is not being used,
8577 then there are no arguments on the stack, and we do not have to
8578 worry that a far jump might force the prologue to push the link
8579 register, changing the stack offsets. In this case we can just
8580 return false, since the presence of far jumps in the function will
8581 not affect stack offsets.
8583 If the arg pointer is live (or if it was live, but has now been
8584 eliminated and so set to dead) then we do have to test to see if
8585 the function might contain a far jump. This test can lead to some
8586 false negatives, since before reload is completed, then length of
8587 branch instructions is not known, so gcc defaults to returning their
8588 longest length, which in turn sets the far jump attribute to true.
8590 A false negative will not result in bad code being generated, but it
8591 will result in a needless push and pop of the link register. We
8592 hope that this does not occur too often. */
8593 if (regs_ever_live [ARG_POINTER_REGNUM])
8594 cfun->machine->arg_pointer_live = 1;
8595 else if (! cfun->machine->arg_pointer_live)
8596 return 0;
8599 /* Check to see if the function contains a branch
8600 insn with the far jump attribute set. */
8601 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8603 if (GET_CODE (insn) == JUMP_INSN
8604 /* Ignore tablejump patterns. */
8605 && GET_CODE (PATTERN (insn)) != ADDR_VEC
8606 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
8607 && get_attr_far_jump (insn) == FAR_JUMP_YES
8610 /* Record the fact that we have decied that
8611 the function does use far jumps. */
8612 cfun->machine->far_jump_used = 1;
8613 return 1;
8617 return 0;
8620 /* Return non-zero if FUNC must be entered in ARM mode. */
8622 is_called_in_ARM_mode (func)
8623 tree func;
8625 if (TREE_CODE (func) != FUNCTION_DECL)
8626 abort ();
8628 /* Ignore the problem about functions whoes address is taken. */
8629 if (TARGET_CALLEE_INTERWORKING && TREE_PUBLIC (func))
8630 return TRUE;
8632 #ifdef ARM_PE
8633 return lookup_attribute ("interfacearm", DECL_MACHINE_ATTRIBUTES (func)) != NULL_TREE;
8634 #else
8635 return FALSE;
8636 #endif
8639 /* The bits which aren't usefully expanded as rtl. */
8640 const char *
8641 thumb_unexpanded_epilogue ()
8643 int regno;
8644 int live_regs_mask = 0;
8645 int high_regs_pushed = 0;
8646 int leaf_function = leaf_function_p ();
8647 int had_to_push_lr;
8648 rtx eh_ofs = cfun->machine->eh_epilogue_sp_ofs;
8650 if (return_used_this_function)
8651 return "";
8653 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8654 if (regs_ever_live[regno] && ! call_used_regs[regno]
8655 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8656 live_regs_mask |= 1 << regno;
8658 for (regno = 8; regno < 13; regno++)
8660 if (regs_ever_live[regno] && ! call_used_regs[regno]
8661 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
8662 high_regs_pushed ++;
8665 /* The prolog may have pushed some high registers to use as
8666 work registers. eg the testuite file:
8667 gcc/testsuite/gcc/gcc.c-torture/execute/complex-2.c
8668 compiles to produce:
8669 push {r4, r5, r6, r7, lr}
8670 mov r7, r9
8671 mov r6, r8
8672 push {r6, r7}
8673 as part of the prolog. We have to undo that pushing here. */
8675 if (high_regs_pushed)
8677 int mask = live_regs_mask;
8678 int next_hi_reg;
8679 int size;
8680 int mode;
8682 #ifdef RTX_CODE
8683 /* If we can deduce the registers used from the function's return value.
8684 This is more reliable that examining regs_ever_live[] because that
8685 will be set if the register is ever used in the function, not just if
8686 the register is used to hold a return value. */
8688 if (current_function_return_rtx != 0)
8689 mode = GET_MODE (current_function_return_rtx);
8690 else
8691 #endif
8692 mode = DECL_MODE (DECL_RESULT (current_function_decl));
8694 size = GET_MODE_SIZE (mode);
8696 /* Unless we are returning a type of size > 12 register r3 is
8697 available. */
8698 if (size < 13)
8699 mask |= 1 << 3;
8701 if (mask == 0)
8702 /* Oh dear! We have no low registers into which we can pop
8703 high registers! */
8704 fatal ("No low registers available for popping high registers");
8706 for (next_hi_reg = 8; next_hi_reg < 13; next_hi_reg++)
8707 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
8708 && ! (TARGET_SINGLE_PIC_BASE && (next_hi_reg == arm_pic_register)))
8709 break;
8711 while (high_regs_pushed)
8713 /* Find lo register(s) into which the high register(s) can
8714 be popped. */
8715 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8717 if (mask & (1 << regno))
8718 high_regs_pushed--;
8719 if (high_regs_pushed == 0)
8720 break;
8723 mask &= (2 << regno) - 1; /* A noop if regno == 8 */
8725 /* Pop the values into the low register(s). */
8726 thumb_pushpop (asm_out_file, mask, 0);
8728 /* Move the value(s) into the high registers. */
8729 for (regno = 0; regno <= LAST_LO_REGNUM; regno++)
8731 if (mask & (1 << regno))
8733 asm_fprintf (asm_out_file, "\tmov\t%r, %r\n", next_hi_reg,
8734 regno);
8736 for (next_hi_reg++; next_hi_reg < 13; next_hi_reg++)
8737 if (regs_ever_live[next_hi_reg] &&
8738 ! call_used_regs[next_hi_reg]
8739 && ! (TARGET_SINGLE_PIC_BASE
8740 && (next_hi_reg == arm_pic_register)))
8741 break;
8747 had_to_push_lr = (live_regs_mask || ! leaf_function
8748 || thumb_far_jump_used_p (1));
8750 if (TARGET_BACKTRACE
8751 && ((live_regs_mask & 0xFF) == 0)
8752 && regs_ever_live [LAST_ARG_REGNUM] != 0)
8754 /* The stack backtrace structure creation code had to
8755 push R7 in order to get a work register, so we pop
8756 it now. */
8757 live_regs_mask |= (1 << LAST_LO_REGNUM);
8760 if (current_function_pretend_args_size == 0 || TARGET_BACKTRACE)
8762 if (had_to_push_lr
8763 && ! is_called_in_ARM_mode (current_function_decl)
8764 && ! eh_ofs)
8765 live_regs_mask |= 1 << PC_REGNUM;
8767 /* Either no argument registers were pushed or a backtrace
8768 structure was created which includes an adjusted stack
8769 pointer, so just pop everything. */
8770 if (live_regs_mask)
8771 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8773 if (eh_ofs)
8774 thumb_exit (asm_out_file, 2, eh_ofs);
8775 /* We have either just popped the return address into the
8776 PC or it is was kept in LR for the entire function or
8777 it is still on the stack because we do not want to
8778 return by doing a pop {pc}. */
8779 else if ((live_regs_mask & (1 << PC_REGNUM)) == 0)
8780 thumb_exit (asm_out_file,
8781 (had_to_push_lr
8782 && is_called_in_ARM_mode (current_function_decl)) ?
8783 -1 : LR_REGNUM, NULL_RTX);
8785 else
8787 /* Pop everything but the return address. */
8788 live_regs_mask &= ~ (1 << PC_REGNUM);
8790 if (live_regs_mask)
8791 thumb_pushpop (asm_out_file, live_regs_mask, FALSE);
8793 if (had_to_push_lr)
8794 /* Get the return address into a temporary register. */
8795 thumb_pushpop (asm_out_file, 1 << LAST_ARG_REGNUM, 0);
8797 /* Remove the argument registers that were pushed onto the stack. */
8798 asm_fprintf (asm_out_file, "\tadd\t%r, %r, #%d\n",
8799 SP_REGNUM, SP_REGNUM,
8800 current_function_pretend_args_size);
8802 if (eh_ofs)
8803 thumb_exit (asm_out_file, 2, eh_ofs);
8804 else
8805 thumb_exit (asm_out_file,
8806 had_to_push_lr ? LAST_ARG_REGNUM : LR_REGNUM, NULL_RTX);
8809 return "";
8812 /* Functions to save and restore machine-specific function data. */
8814 static void
8815 arm_mark_machine_status (p)
8816 struct function * p;
8818 struct machine_function *machine = p->machine;
8820 ggc_mark_rtx (machine->ra_rtx);
8821 ggc_mark_rtx (machine->eh_epilogue_sp_ofs);
8824 static void
8825 arm_init_machine_status (p)
8826 struct function * p;
8828 p->machine =
8829 (struct machine_function *) xcalloc (1, sizeof (struct machine_function));
8832 /* Return an RTX indicating where the return address to the
8833 calling function can be found. */
8835 arm_return_addr (count, frame)
8836 int count;
8837 rtx frame ATTRIBUTE_UNUSED;
8839 rtx reg;
8841 if (count != 0)
8842 return NULL_RTX;
8844 reg = cfun->machine->ra_rtx;
8846 if (reg == NULL)
8848 rtx init;
8850 /* No rtx yet. Invent one, and initialize it for r14 (lr) in
8851 the prologue. */
8852 reg = gen_reg_rtx (Pmode);
8853 cfun->machine->ra_rtx = reg;
8855 if (! TARGET_APCS_32)
8856 init = gen_rtx_AND (Pmode, gen_rtx_REG (Pmode, LR_REGNUM),
8857 GEN_INT (RETURN_ADDR_MASK26));
8858 else
8859 init = gen_rtx_REG (Pmode, LR_REGNUM);
8861 init = gen_rtx_SET (VOIDmode, reg, init);
8863 /* Emit the insn to the prologue with the other argument copies. */
8864 push_topmost_sequence ();
8865 emit_insn_after (init, get_insns ());
8866 pop_topmost_sequence ();
8869 return reg;
8872 /* Do anything needed before RTL is emitted for each function. */
8873 void
8874 arm_init_expanders ()
8876 /* Arrange to initialize and mark the machine per-function status. */
8877 init_machine_status = arm_init_machine_status;
8878 mark_machine_status = arm_mark_machine_status;
8881 /* Generate the rest of a function's prologue. */
8882 void
8883 thumb_expand_prologue ()
8885 HOST_WIDE_INT amount = (get_frame_size ()
8886 + current_function_outgoing_args_size);
8888 /* Naked functions don't have prologues. */
8889 if (arm_naked_function_p (current_function_decl))
8890 return;
8892 if (frame_pointer_needed)
8893 emit_insn (gen_movsi (hard_frame_pointer_rtx, stack_pointer_rtx));
8895 if (amount)
8897 amount = ROUND_UP (amount);
8899 if (amount < 512)
8900 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8901 GEN_INT (- amount)));
8902 else
8904 int regno;
8905 rtx reg;
8907 /* The stack decrement is too big for an immediate value in a single
8908 insn. In theory we could issue multiple subtracts, but after
8909 three of them it becomes more space efficient to place the full
8910 value in the constant pool and load into a register. (Also the
8911 ARM debugger really likes to see only one stack decrement per
8912 function). So instead we look for a scratch register into which
8913 we can load the decrement, and then we subtract this from the
8914 stack pointer. Unfortunately on the thumb the only available
8915 scratch registers are the argument registers, and we cannot use
8916 these as they may hold arguments to the function. Instead we
8917 attempt to locate a call preserved register which is used by this
8918 function. If we can find one, then we know that it will have
8919 been pushed at the start of the prologue and so we can corrupt
8920 it now. */
8921 for (regno = LAST_ARG_REGNUM + 1; regno <= LAST_LO_REGNUM; regno++)
8922 if (regs_ever_live[regno]
8923 && ! call_used_regs[regno] /* Paranoia */
8924 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register))
8925 && ! (frame_pointer_needed
8926 && (regno == THUMB_HARD_FRAME_POINTER_REGNUM)))
8927 break;
8929 if (regno > LAST_LO_REGNUM) /* Very unlikely */
8931 rtx spare = gen_rtx (REG, SImode, IP_REGNUM);
8933 /* Choose an arbitary, non-argument low register. */
8934 reg = gen_rtx (REG, SImode, LAST_LO_REGNUM);
8936 /* Save it by copying it into a high, scratch register. */
8937 emit_insn (gen_movsi (spare, reg));
8939 /* Decrement the stack. */
8940 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8941 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8942 reg));
8944 /* Restore the low register's original value. */
8945 emit_insn (gen_movsi (reg, spare));
8947 /* Emit a USE of the restored scratch register, so that flow
8948 analysis will not consider the restore redundant. The
8949 register won't be used again in this function and isn't
8950 restored by the epilogue. */
8951 emit_insn (gen_rtx_USE (VOIDmode, reg));
8953 else
8955 reg = gen_rtx (REG, SImode, regno);
8957 emit_insn (gen_movsi (reg, GEN_INT (- amount)));
8958 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8959 reg));
8964 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
8965 emit_insn (gen_blockage ());
8968 void
8969 thumb_expand_epilogue ()
8971 HOST_WIDE_INT amount = (get_frame_size ()
8972 + current_function_outgoing_args_size);
8974 /* Naked functions don't have epilogues. */
8975 if (arm_naked_function_p (current_function_decl))
8976 return;
8978 if (frame_pointer_needed)
8979 emit_insn (gen_movsi (stack_pointer_rtx, hard_frame_pointer_rtx));
8980 else if (amount)
8982 amount = ROUND_UP (amount);
8984 if (amount < 512)
8985 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
8986 GEN_INT (amount)));
8987 else
8989 /* r3 is always free in the epilogue. */
8990 rtx reg = gen_rtx (REG, SImode, LAST_ARG_REGNUM);
8992 emit_insn (gen_movsi (reg, GEN_INT (amount)));
8993 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
8997 /* Emit a USE (stack_pointer_rtx), so that
8998 the stack adjustment will not be deleted. */
8999 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
9001 if (profile_flag || profile_block_flag || TARGET_NO_SCHED_PRO)
9002 emit_insn (gen_blockage ());
9005 void
9006 output_thumb_prologue (f)
9007 FILE * f;
9009 int live_regs_mask = 0;
9010 int high_regs_pushed = 0;
9011 int store_arg_regs = 0;
9012 int regno;
9014 if (arm_naked_function_p (current_function_decl))
9015 return;
9017 if (is_called_in_ARM_mode (current_function_decl))
9019 const char * name;
9021 if (GET_CODE (DECL_RTL (current_function_decl)) != MEM)
9022 abort ();
9023 if (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0)) != SYMBOL_REF)
9024 abort ();
9025 name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9027 /* Generate code sequence to switch us into Thumb mode. */
9028 /* The .code 32 directive has already been emitted by
9029 ASM_DECLARE_FUNCTION_NAME. */
9030 asm_fprintf (f, "\torr\t%r, %r, #1\n", IP_REGNUM, PC_REGNUM);
9031 asm_fprintf (f, "\tbx\t%r\n", IP_REGNUM);
9033 /* Generate a label, so that the debugger will notice the
9034 change in instruction sets. This label is also used by
9035 the assembler to bypass the ARM code when this function
9036 is called from a Thumb encoded function elsewhere in the
9037 same file. Hence the definition of STUB_NAME here must
9038 agree with the definition in gas/config/tc-arm.c */
9040 #define STUB_NAME ".real_start_of"
9042 asm_fprintf (f, "\t.code\t16\n");
9043 #ifdef ARM_PE
9044 if (arm_dllexport_name_p (name))
9045 name = arm_strip_name_encoding (name);
9046 #endif
9047 asm_fprintf (f, "\t.globl %s%U%s\n", STUB_NAME, name);
9048 asm_fprintf (f, "\t.thumb_func\n");
9049 asm_fprintf (f, "%s%U%s:\n", STUB_NAME, name);
9052 if (current_function_anonymous_args && current_function_pretend_args_size)
9053 store_arg_regs = 1;
9055 if (current_function_pretend_args_size)
9057 if (store_arg_regs)
9059 int num_pushes;
9061 asm_fprintf (f, "\tpush\t{");
9063 num_pushes = NUM_INTS (current_function_pretend_args_size);
9065 for (regno = LAST_ARG_REGNUM + 1 - num_pushes;
9066 regno <= LAST_ARG_REGNUM;
9067 regno ++)
9068 asm_fprintf (f, "%r%s", regno,
9069 regno == LAST_ARG_REGNUM ? "" : ", ");
9071 asm_fprintf (f, "}\n");
9073 else
9074 asm_fprintf (f, "\tsub\t%r, %r, #%d\n",
9075 SP_REGNUM, SP_REGNUM,
9076 current_function_pretend_args_size);
9079 for (regno = 0; regno <= LAST_LO_REGNUM; regno ++)
9080 if (regs_ever_live[regno] && ! call_used_regs[regno]
9081 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9082 live_regs_mask |= 1 << regno;
9084 if (live_regs_mask || ! leaf_function_p () || thumb_far_jump_used_p (1))
9085 live_regs_mask |= 1 << LR_REGNUM;
9087 if (TARGET_BACKTRACE)
9089 int offset;
9090 int work_register = 0;
9091 int wr;
9093 /* We have been asked to create a stack backtrace structure.
9094 The code looks like this:
9096 0 .align 2
9097 0 func:
9098 0 sub SP, #16 Reserve space for 4 registers.
9099 2 push {R7} Get a work register.
9100 4 add R7, SP, #20 Get the stack pointer before the push.
9101 6 str R7, [SP, #8] Store the stack pointer (before reserving the space).
9102 8 mov R7, PC Get hold of the start of this code plus 12.
9103 10 str R7, [SP, #16] Store it.
9104 12 mov R7, FP Get hold of the current frame pointer.
9105 14 str R7, [SP, #4] Store it.
9106 16 mov R7, LR Get hold of the current return address.
9107 18 str R7, [SP, #12] Store it.
9108 20 add R7, SP, #16 Point at the start of the backtrace structure.
9109 22 mov FP, R7 Put this value into the frame pointer. */
9111 if ((live_regs_mask & 0xFF) == 0)
9113 /* See if the a4 register is free. */
9115 if (regs_ever_live [LAST_ARG_REGNUM] == 0)
9116 work_register = LAST_ARG_REGNUM;
9117 else /* We must push a register of our own */
9118 live_regs_mask |= (1 << LAST_LO_REGNUM);
9121 if (work_register == 0)
9123 /* Select a register from the list that will be pushed to
9124 use as our work register. */
9125 for (work_register = (LAST_LO_REGNUM + 1); work_register--;)
9126 if ((1 << work_register) & live_regs_mask)
9127 break;
9130 asm_fprintf
9131 (f, "\tsub\t%r, %r, #16\t%@ Create stack backtrace structure\n",
9132 SP_REGNUM, SP_REGNUM);
9134 if (live_regs_mask)
9135 thumb_pushpop (f, live_regs_mask, 1);
9137 for (offset = 0, wr = 1 << 15; wr != 0; wr >>= 1)
9138 if (wr & live_regs_mask)
9139 offset += 4;
9141 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9142 offset + 16 + current_function_pretend_args_size);
9144 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9145 offset + 4);
9147 /* Make sure that the instruction fetching the PC is in the right place
9148 to calculate "start of backtrace creation code + 12". */
9149 if (live_regs_mask)
9151 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9152 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9153 offset + 12);
9154 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9155 ARM_HARD_FRAME_POINTER_REGNUM);
9156 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9157 offset);
9159 else
9161 asm_fprintf (f, "\tmov\t%r, %r\n", work_register,
9162 ARM_HARD_FRAME_POINTER_REGNUM);
9163 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9164 offset);
9165 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, PC_REGNUM);
9166 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9167 offset + 12);
9170 asm_fprintf (f, "\tmov\t%r, %r\n", work_register, LR_REGNUM);
9171 asm_fprintf (f, "\tstr\t%r, [%r, #%d]\n", work_register, SP_REGNUM,
9172 offset + 8);
9173 asm_fprintf (f, "\tadd\t%r, %r, #%d\n", work_register, SP_REGNUM,
9174 offset + 12);
9175 asm_fprintf (f, "\tmov\t%r, %r\t\t%@ Backtrace structure created\n",
9176 ARM_HARD_FRAME_POINTER_REGNUM, work_register);
9178 else if (live_regs_mask)
9179 thumb_pushpop (f, live_regs_mask, 1);
9181 for (regno = 8; regno < 13; regno++)
9183 if (regs_ever_live[regno] && ! call_used_regs[regno]
9184 && ! (TARGET_SINGLE_PIC_BASE && (regno == arm_pic_register)))
9185 high_regs_pushed ++;
9188 if (high_regs_pushed)
9190 int pushable_regs = 0;
9191 int mask = live_regs_mask & 0xff;
9192 int next_hi_reg;
9194 for (next_hi_reg = 12; next_hi_reg > LAST_LO_REGNUM; next_hi_reg--)
9196 if (regs_ever_live[next_hi_reg] && ! call_used_regs[next_hi_reg]
9197 && ! (TARGET_SINGLE_PIC_BASE
9198 && (next_hi_reg == arm_pic_register)))
9199 break;
9202 pushable_regs = mask;
9204 if (pushable_regs == 0)
9206 /* Desperation time -- this probably will never happen. */
9207 if (regs_ever_live[LAST_ARG_REGNUM]
9208 || ! call_used_regs[LAST_ARG_REGNUM])
9209 asm_fprintf (f, "\tmov\t%r, %r\n", IP_REGNUM, LAST_ARG_REGNUM);
9210 mask = 1 << LAST_ARG_REGNUM;
9213 while (high_regs_pushed > 0)
9215 for (regno = LAST_LO_REGNUM; regno >= 0; regno--)
9217 if (mask & (1 << regno))
9219 asm_fprintf (f, "\tmov\t%r, %r\n", regno, next_hi_reg);
9221 high_regs_pushed --;
9223 if (high_regs_pushed)
9224 for (next_hi_reg--; next_hi_reg > LAST_LO_REGNUM;
9225 next_hi_reg--)
9227 if (regs_ever_live[next_hi_reg]
9228 && ! call_used_regs[next_hi_reg]
9229 && ! (TARGET_SINGLE_PIC_BASE
9230 && (next_hi_reg == arm_pic_register)))
9231 break;
9233 else
9235 mask &= ~ ((1 << regno) - 1);
9236 break;
9241 thumb_pushpop (f, mask, 1);
9244 if (pushable_regs == 0
9245 && (regs_ever_live[LAST_ARG_REGNUM]
9246 || ! call_used_regs[LAST_ARG_REGNUM]))
9247 asm_fprintf (f, "\tmov\t%r, %r\n", LAST_ARG_REGNUM, IP_REGNUM);
9251 /* Handle the case of a double word load into a low register from
9252 a computed memory address. The computed address may involve a
9253 register which is overwritten by the load. */
9255 const char *
9256 thumb_load_double_from_address (operands)
9257 rtx * operands;
9259 rtx addr;
9260 rtx base;
9261 rtx offset;
9262 rtx arg1;
9263 rtx arg2;
9265 if (GET_CODE (operands[0]) != REG)
9266 fatal ("thumb_load_double_from_address: destination is not a register");
9268 if (GET_CODE (operands[1]) != MEM)
9270 debug_rtx (operands[1]);
9271 fatal ("thumb_load_double_from_address: source is not a computed memory address");
9274 /* Get the memory address. */
9275 addr = XEXP (operands[1], 0);
9277 /* Work out how the memory address is computed. */
9278 switch (GET_CODE (addr))
9280 case REG:
9281 operands[2] = gen_rtx (MEM, SImode,
9282 plus_constant (XEXP (operands[1], 0), 4));
9284 if (REGNO (operands[0]) == REGNO (addr))
9286 output_asm_insn ("ldr\t%H0, %2", operands);
9287 output_asm_insn ("ldr\t%0, %1", operands);
9289 else
9291 output_asm_insn ("ldr\t%0, %1", operands);
9292 output_asm_insn ("ldr\t%H0, %2", operands);
9294 break;
9296 case CONST:
9297 /* Compute <address> + 4 for the high order load. */
9298 operands[2] = gen_rtx (MEM, SImode,
9299 plus_constant (XEXP (operands[1], 0), 4));
9301 output_asm_insn ("ldr\t%0, %1", operands);
9302 output_asm_insn ("ldr\t%H0, %2", operands);
9303 break;
9305 case PLUS:
9306 arg1 = XEXP (addr, 0);
9307 arg2 = XEXP (addr, 1);
9309 if (CONSTANT_P (arg1))
9310 base = arg2, offset = arg1;
9311 else
9312 base = arg1, offset = arg2;
9314 if (GET_CODE (base) != REG)
9315 fatal ("thumb_load_double_from_address: base is not a register");
9317 /* Catch the case of <address> = <reg> + <reg> */
9318 if (GET_CODE (offset) == REG)
9320 int reg_offset = REGNO (offset);
9321 int reg_base = REGNO (base);
9322 int reg_dest = REGNO (operands[0]);
9324 /* Add the base and offset registers together into the
9325 higher destination register. */
9326 asm_fprintf (asm_out_file, "\tadd\t%r, %r, %r",
9327 reg_dest + 1, reg_base, reg_offset);
9329 /* Load the lower destination register from the address in
9330 the higher destination register. */
9331 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #0]",
9332 reg_dest, reg_dest + 1);
9334 /* Load the higher destination register from its own address
9335 plus 4. */
9336 asm_fprintf (asm_out_file, "\tldr\t%r, [%r, #4]",
9337 reg_dest + 1, reg_dest + 1);
9339 else
9341 /* Compute <address> + 4 for the high order load. */
9342 operands[2] = gen_rtx (MEM, SImode,
9343 plus_constant (XEXP (operands[1], 0), 4));
9345 /* If the computed address is held in the low order register
9346 then load the high order register first, otherwise always
9347 load the low order register first. */
9348 if (REGNO (operands[0]) == REGNO (base))
9350 output_asm_insn ("ldr\t%H0, %2", operands);
9351 output_asm_insn ("ldr\t%0, %1", operands);
9353 else
9355 output_asm_insn ("ldr\t%0, %1", operands);
9356 output_asm_insn ("ldr\t%H0, %2", operands);
9359 break;
9361 case LABEL_REF:
9362 /* With no registers to worry about we can just load the value
9363 directly. */
9364 operands[2] = gen_rtx (MEM, SImode,
9365 plus_constant (XEXP (operands[1], 0), 4));
9367 output_asm_insn ("ldr\t%H0, %2", operands);
9368 output_asm_insn ("ldr\t%0, %1", operands);
9369 break;
9371 default:
9372 debug_rtx (operands[1]);
9373 fatal ("thumb_load_double_from_address: Unhandled address calculation");
9374 break;
9377 return "";
9381 const char *
9382 thumb_output_move_mem_multiple (n, operands)
9383 int n;
9384 rtx * operands;
9386 rtx tmp;
9388 switch (n)
9390 case 2:
9391 if (REGNO (operands[2]) > REGNO (operands[3]))
9393 tmp = operands[2];
9394 operands[2] = operands[3];
9395 operands[3] = tmp;
9397 output_asm_insn ("ldmia\t%1!, {%2, %3}", operands);
9398 output_asm_insn ("stmia\t%0!, {%2, %3}", operands);
9399 break;
9401 case 3:
9402 if (REGNO (operands[2]) > REGNO (operands[3]))
9404 tmp = operands[2];
9405 operands[2] = operands[3];
9406 operands[3] = tmp;
9408 if (REGNO (operands[3]) > REGNO (operands[4]))
9410 tmp = operands[3];
9411 operands[3] = operands[4];
9412 operands[4] = tmp;
9414 if (REGNO (operands[2]) > REGNO (operands[3]))
9416 tmp = operands[2];
9417 operands[2] = operands[3];
9418 operands[3] = tmp;
9421 output_asm_insn ("ldmia\t%1!, {%2, %3, %4}", operands);
9422 output_asm_insn ("stmia\t%0!, {%2, %3, %4}", operands);
9423 break;
9425 default:
9426 abort ();
9429 return "";
9432 /* Routines for generating rtl */
9434 void
9435 thumb_expand_movstrqi (operands)
9436 rtx * operands;
9438 rtx out = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
9439 rtx in = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
9440 HOST_WIDE_INT len = INTVAL (operands[2]);
9441 HOST_WIDE_INT offset = 0;
9443 while (len >= 12)
9445 emit_insn (gen_movmem12b (out, in));
9446 len -= 12;
9449 if (len >= 8)
9451 emit_insn (gen_movmem8b (out, in));
9452 len -= 8;
9455 if (len >= 4)
9457 rtx reg = gen_reg_rtx (SImode);
9458 emit_insn (gen_movsi (reg, gen_rtx (MEM, SImode, in)));
9459 emit_insn (gen_movsi (gen_rtx (MEM, SImode, out), reg));
9460 len -= 4;
9461 offset += 4;
9464 if (len >= 2)
9466 rtx reg = gen_reg_rtx (HImode);
9467 emit_insn (gen_movhi (reg, gen_rtx (MEM, HImode,
9468 plus_constant (in, offset))));
9469 emit_insn (gen_movhi (gen_rtx (MEM, HImode, plus_constant (out, offset)),
9470 reg));
9471 len -= 2;
9472 offset += 2;
9475 if (len)
9477 rtx reg = gen_reg_rtx (QImode);
9478 emit_insn (gen_movqi (reg, gen_rtx (MEM, QImode,
9479 plus_constant (in, offset))));
9480 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (out, offset)),
9481 reg));
9486 thumb_cmp_operand (op, mode)
9487 rtx op;
9488 enum machine_mode mode;
9490 return ((GET_CODE (op) == CONST_INT
9491 && (unsigned HOST_WIDE_INT) (INTVAL (op)) < 256)
9492 || register_operand (op, mode));
9495 static const char *
9496 thumb_condition_code (x, invert)
9497 rtx x;
9498 int invert;
9500 static const char * conds[] =
9502 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
9503 "hi", "ls", "ge", "lt", "gt", "le"
9505 int val;
9507 switch (GET_CODE (x))
9509 case EQ: val = 0; break;
9510 case NE: val = 1; break;
9511 case GEU: val = 2; break;
9512 case LTU: val = 3; break;
9513 case GTU: val = 8; break;
9514 case LEU: val = 9; break;
9515 case GE: val = 10; break;
9516 case LT: val = 11; break;
9517 case GT: val = 12; break;
9518 case LE: val = 13; break;
9519 default:
9520 abort ();
9523 return conds[val ^ invert];
9526 /* Handle storing a half-word to memory during reload. */
9527 void
9528 thumb_reload_out_hi (operands)
9529 rtx * operands;
9531 emit_insn (gen_thumb_movhi_clobber (operands[0], operands[1], operands[2]));
9534 /* Handle storing a half-word to memory during reload. */
9535 void
9536 thumb_reload_in_hi (operands)
9537 rtx * operands ATTRIBUTE_UNUSED;
9539 abort ();
9542 /* Return the length of a function name prefix
9543 that starts with the character 'c'. */
9544 static int
9545 arm_get_strip_length (char c)
9547 switch (c)
9549 ARM_NAME_ENCODING_LENGTHS
9550 default: return 0;
9554 /* Return a pointer to a function's name with any
9555 and all prefix encodings stripped from it. */
9556 const char *
9557 arm_strip_name_encoding (const char * name)
9559 int skip;
9561 while ((skip = arm_get_strip_length (* name)))
9562 name += skip;
9564 return name;
9567 #ifdef AOF_ASSEMBLER
9568 /* Special functions only needed when producing AOF syntax assembler. */
9570 rtx aof_pic_label = NULL_RTX;
9571 struct pic_chain
9573 struct pic_chain * next;
9574 char * symname;
9577 static struct pic_chain * aof_pic_chain = NULL;
9580 aof_pic_entry (x)
9581 rtx x;
9583 struct pic_chain ** chainp;
9584 int offset;
9586 if (aof_pic_label == NULL_RTX)
9588 /* We mark this here and not in arm_add_gc_roots() to avoid
9589 polluting even more code with ifdefs, and because it never
9590 contains anything useful until we assign to it here. */
9591 ggc_add_rtx_root (& aof_pic_label, 1);
9592 /* This needs to persist throughout the compilation. */
9593 end_temporary_allocation ();
9594 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
9595 resume_temporary_allocation ();
9598 for (offset = 0, chainp = &aof_pic_chain; *chainp;
9599 offset += 4, chainp = &(*chainp)->next)
9600 if ((*chainp)->symname == XSTR (x, 0))
9601 return plus_constant (aof_pic_label, offset);
9603 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
9604 (*chainp)->next = NULL;
9605 (*chainp)->symname = XSTR (x, 0);
9606 return plus_constant (aof_pic_label, offset);
9609 void
9610 aof_dump_pic_table (f)
9611 FILE * f;
9613 struct pic_chain * chain;
9615 if (aof_pic_chain == NULL)
9616 return;
9618 asm_fprintf (f, "\tAREA |%r$$adcons|, BASED %r\n",
9619 PIC_OFFSET_TABLE_REGNUM,
9620 PIC_OFFSET_TABLE_REGNUM);
9621 fputs ("|x$adcons|\n", f);
9623 for (chain = aof_pic_chain; chain; chain = chain->next)
9625 fputs ("\tDCD\t", f);
9626 assemble_name (f, chain->symname);
9627 fputs ("\n", f);
9631 int arm_text_section_count = 1;
9633 char *
9634 aof_text_section ()
9636 static char buf[100];
9637 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
9638 arm_text_section_count++);
9639 if (flag_pic)
9640 strcat (buf, ", PIC, REENTRANT");
9641 return buf;
9644 static int arm_data_section_count = 1;
9646 char *
9647 aof_data_section ()
9649 static char buf[100];
9650 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
9651 return buf;
9654 /* The AOF assembler is religiously strict about declarations of
9655 imported and exported symbols, so that it is impossible to declare
9656 a function as imported near the beginning of the file, and then to
9657 export it later on. It is, however, possible to delay the decision
9658 until all the functions in the file have been compiled. To get
9659 around this, we maintain a list of the imports and exports, and
9660 delete from it any that are subsequently defined. At the end of
9661 compilation we spit the remainder of the list out before the END
9662 directive. */
9664 struct import
9666 struct import * next;
9667 char * name;
9670 static struct import * imports_list = NULL;
9672 void
9673 aof_add_import (name)
9674 char * name;
9676 struct import * new;
9678 for (new = imports_list; new; new = new->next)
9679 if (new->name == name)
9680 return;
9682 new = (struct import *) xmalloc (sizeof (struct import));
9683 new->next = imports_list;
9684 imports_list = new;
9685 new->name = name;
9688 void
9689 aof_delete_import (name)
9690 char * name;
9692 struct import ** old;
9694 for (old = &imports_list; *old; old = & (*old)->next)
9696 if ((*old)->name == name)
9698 *old = (*old)->next;
9699 return;
9704 int arm_main_function = 0;
9706 void
9707 aof_dump_imports (f)
9708 FILE * f;
9710 /* The AOF assembler needs this to cause the startup code to be extracted
9711 from the library. Brining in __main causes the whole thing to work
9712 automagically. */
9713 if (arm_main_function)
9715 text_section ();
9716 fputs ("\tIMPORT __main\n", f);
9717 fputs ("\tDCD __main\n", f);
9720 /* Now dump the remaining imports. */
9721 while (imports_list)
9723 fprintf (f, "\tIMPORT\t");
9724 assemble_name (f, imports_list->name);
9725 fputc ('\n', f);
9726 imports_list = imports_list->next;
9729 #endif /* AOF_ASSEMBLER */