import of gcc-2.8
[official-gcc.git] / gcc / config / arm / arm.c
blobb73da5b1f37d35fc61107751965bf6c46999b183
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include "config.h"
25 #include <stdio.h>
26 #include <string.h>
27 #include "assert.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "tree.h"
40 #include "expr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
49 static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
50 static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
51 HOST_WIDE_INT));
52 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
54 static int arm_naked_function_p PROTO ((tree));
55 static void init_fpa_table PROTO ((void));
56 static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
57 rtx, HOST_WIDE_INT));
58 static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode));
59 static void dump_table PROTO ((rtx));
60 static int fixit PROTO ((rtx, enum machine_mode, int));
61 static rtx find_barrier PROTO ((rtx, int));
62 static int broken_move PROTO ((rtx));
63 static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
64 static int eliminate_lr2ip PROTO ((rtx *));
65 static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
66 static int pattern_really_clobbers_lr PROTO ((rtx));
67 static int function_really_clobbers_lr PROTO ((rtx));
68 static void emit_multi_reg_push PROTO ((int));
69 static void emit_sfm PROTO ((int, int));
70 static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
72 /* Define the information needed to generate branch insns. This is
73 stored from the compare operation. */
75 rtx arm_compare_op0, arm_compare_op1;
76 int arm_compare_fp;
78 /* What type of cpu are we compiling for? */
79 enum processor_type arm_cpu;
81 /* What type of floating point are we tuning for? */
82 enum floating_point_type arm_fpu;
84 /* What type of floating point instructions are available? */
85 enum floating_point_type arm_fpu_arch;
87 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
88 enum prog_mode_type arm_prgmode;
90 /* Set by the -mfp=... option */
91 char *target_fp_name = NULL;
93 /* Nonzero if this is an "M" variant of the processor. */
94 int arm_fast_multiply = 0;
96 /* Nonzero if this chip supports the ARM Architecture 4 extensions */
97 int arm_arch4 = 0;
99 /* Set to the features we should tune the code for (multiply speed etc). */
100 int tune_flags = 0;
102 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
103 must report the mode of the memory reference from PRINT_OPERAND to
104 PRINT_OPERAND_ADDRESS. */
105 enum machine_mode output_memory_reference_mode;
107 /* Nonzero if the prologue must setup `fp'. */
108 int current_function_anonymous_args;
110 /* The register number to be used for the PIC offset register. */
111 int arm_pic_register = 9;
113 /* Location counter of .text segment. */
114 int arm_text_location = 0;
116 /* Set to one if we think that lr is only saved because of subroutine calls,
117 but all of these can be `put after' return insns */
118 int lr_save_eliminated;
120 /* Set to 1 when a return insn is output, this means that the epilogue
121 is not needed. */
123 static int return_used_this_function;
125 static int arm_constant_limit = 3;
127 /* For an explanation of these variables, see final_prescan_insn below. */
128 int arm_ccfsm_state;
129 enum arm_cond_code arm_current_cc;
130 rtx arm_target_insn;
131 int arm_target_label;
133 /* The condition codes of the ARM, and the inverse function. */
134 char *arm_condition_codes[] =
136 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
137 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
140 static enum arm_cond_code get_arm_condition_code ();
143 /* Initialization code */
145 struct arm_cpu_select arm_select[4] =
147 /* switch name, tune arch */
148 { (char *)0, "--with-cpu=", 1, 1 },
149 { (char *)0, "-mcpu=", 1, 1 },
150 { (char *)0, "-march=", 0, 1 },
151 { (char *)0, "-mtune=", 1, 0 },
154 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
155 #define FL_FAST_MULT 0x02 /* Fast multiply */
156 #define FL_MODE26 0x04 /* 26-bit mode support */
157 #define FL_MODE32 0x08 /* 32-bit mode support */
158 #define FL_ARCH4 0x10 /* Architecture rel 4 */
159 #define FL_THUMB 0x20 /* Thumb aware */
161 struct processors
163 char *name;
164 enum processor_type type;
165 unsigned int flags;
168 /* Not all of these give usefully different compilation alternatives,
169 but there is no simple way of generalizing them. */
170 static struct processors all_procs[] =
172 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
173 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
174 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
175 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
176 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
177 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
178 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
179 /* arm7m doesn't exist on its own, only in conjunction with D, (and I), but
180 those don't alter the code, so it is sometimes known as the arm7m */
181 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
182 | FL_MODE26)},
183 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
184 | FL_MODE26)},
185 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
186 | FL_MODE26)},
187 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
188 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
189 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
190 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
191 /* Doesn't really have an external co-proc, but does have embedded fpu */
192 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
193 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
194 | FL_ARCH4 | FL_THUMB)},
195 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
196 | FL_ARCH4)},
197 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
198 | FL_ARCH4)},
199 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
200 | FL_ARCH4)},
201 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
202 | FL_ARCH4)},
203 {"armv2", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
204 {"armv2a", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
205 {"armv3", PROCESSOR_NONE, FL_CO_PROC | FL_MODE32 | FL_MODE26},
206 {"armv3m", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
207 | FL_MODE26)},
208 {"armv4", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
209 | FL_MODE26 | FL_ARCH4)},
210 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
211 implementations that support it, so we will leave it out for now. */
212 {"armv4t", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
213 | FL_ARCH4)},
214 {NULL, 0, 0}
217 /* Fix up any incompatible options that the user has specified.
218 This has now turned into a maze. */
219 void
220 arm_override_options ()
222 int arm_thumb_aware = 0;
223 int flags = 0;
224 int i;
225 struct arm_cpu_select *ptr;
226 static struct cpu_default {
227 int cpu;
228 char *name;
229 } cpu_defaults[] = {
230 { TARGET_CPU_arm2, "arm2" },
231 { TARGET_CPU_arm6, "arm6" },
232 { TARGET_CPU_arm610, "arm610" },
233 { TARGET_CPU_arm7dm, "arm7dm" },
234 { TARGET_CPU_arm7500fe, "arm7500fe" },
235 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
236 { TARGET_CPU_arm8, "arm8" },
237 { TARGET_CPU_arm810, "arm810" },
238 { TARGET_CPU_strongarm, "strongarm" },
239 { 0, 0 }
241 struct cpu_default *def;
243 /* Set the default. */
244 for (def = &cpu_defaults[0]; def->name; ++def)
245 if (def->cpu == TARGET_CPU_DEFAULT)
246 break;
247 if (! def->name)
248 abort ();
250 arm_select[0].string = def->name;
252 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
254 ptr = &arm_select[i];
255 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
257 struct processors *sel;
259 for (sel = all_procs; sel->name != NULL; sel++)
260 if (! strcmp (ptr->string, sel->name))
262 /* -march= is the only flag that can take an architecture
263 type, so if we match when the tune bit is set, the
264 option was invalid. */
265 if (ptr->set_tune_p)
267 if (sel->type == PROCESSOR_NONE)
268 continue; /* Its an architecture, not a cpu */
270 arm_cpu = sel->type;
271 tune_flags = sel->flags;
274 if (ptr->set_arch_p)
275 flags = sel->flags;
277 break;
280 if (sel->name == NULL)
281 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
285 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
286 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
288 if (TARGET_POKE_FUNCTION_NAME)
289 target_flags |= ARM_FLAG_APCS_FRAME;
291 if (TARGET_6)
292 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
294 if (TARGET_3)
295 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
297 if (TARGET_APCS_REENT && flag_pic)
298 fatal ("-fpic and -mapcs-reent are incompatible");
300 if (TARGET_APCS_REENT)
301 warning ("APCS reentrant code not supported.");
303 /* If stack checking is disabled, we can use r10 as the PIC register,
304 which keeps r9 available. */
305 if (flag_pic && ! TARGET_APCS_STACK)
306 arm_pic_register = 10;
308 /* Well, I'm about to have a go, but pic is NOT going to be compatible
309 with APCS reentrancy, since that requires too much support in the
310 assembler and linker, and the ARMASM assembler seems to lack some
311 required directives. */
312 if (flag_pic)
313 warning ("Position independent code not supported. Ignored");
315 if (TARGET_APCS_FLOAT)
316 warning ("Passing floating point arguments in fp regs not yet supported");
318 if (TARGET_APCS_STACK && ! TARGET_APCS)
320 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
321 target_flags |= ARM_FLAG_APCS_FRAME;
324 /* Default is to tune for an FPA */
325 arm_fpu = FP_HARD;
327 /* Default value for floating point code... if no co-processor
328 bus, then schedule for emulated floating point. Otherwise,
329 assume the user has an FPA.
330 Note: this does not prevent use of floating point instructions,
331 -msoft-float does that. */
332 if (tune_flags & FL_CO_PROC == 0)
333 arm_fpu = FP_SOFT3;
335 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
336 arm_arch4 = (flags & FL_ARCH4) != 0;
337 arm_thumb_aware = (flags & FL_THUMB) != 0;
339 if (target_fp_name)
341 if (strcmp (target_fp_name, "2") == 0)
342 arm_fpu_arch = FP_SOFT2;
343 else if (strcmp (target_fp_name, "3") == 0)
344 arm_fpu_arch = FP_HARD;
345 else
346 fatal ("Invalid floating point emulation option: -mfpe=%s",
347 target_fp_name);
349 else
350 arm_fpu_arch = FP_DEFAULT;
352 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
354 warning ("This processor variant does not support Thumb interworking");
355 target_flags &= ~ARM_FLAG_THUMB;
358 if (TARGET_FPE && arm_fpu != FP_HARD)
359 arm_fpu = FP_SOFT2;
361 /* For arm2/3 there is no need to do any scheduling if there is only
362 a floating point emulator, or we are doing software floating-point. */
363 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
364 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
366 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
370 /* Return 1 if it is possible to return using a single instruction */
373 use_return_insn ()
375 int regno;
377 if (!reload_completed ||current_function_pretend_args_size
378 || current_function_anonymous_args
379 || ((get_frame_size () + current_function_outgoing_args_size != 0)
380 && !(TARGET_APCS || frame_pointer_needed)))
381 return 0;
383 /* Can't be done if interworking with Thumb, and any registers have been
384 stacked */
385 if (TARGET_THUMB_INTERWORK)
386 for (regno = 0; regno < 16; regno++)
387 if (regs_ever_live[regno] && ! call_used_regs[regno])
388 return 0;
390 /* Can't be done if any of the FPU regs are pushed, since this also
391 requires an insn */
392 for (regno = 16; regno < 24; regno++)
393 if (regs_ever_live[regno] && ! call_used_regs[regno])
394 return 0;
396 /* If a function is naked, don't use the "return" insn. */
397 if (arm_naked_function_p (current_function_decl))
398 return 0;
400 return 1;
403 /* Return TRUE if int I is a valid immediate ARM constant. */
406 const_ok_for_arm (i)
407 HOST_WIDE_INT i;
409 unsigned HOST_WIDE_INT mask = ~0xFF;
411 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
412 be all zero, or all one. */
413 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
414 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
415 != (((HOST_WIDE_INT) -1) & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
416 return FALSE;
418 /* Fast return for 0 and powers of 2 */
419 if ((i & (i - 1)) == 0)
420 return TRUE;
424 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
425 return TRUE;
426 mask =
427 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
428 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
429 } while (mask != ~0xFF);
431 return FALSE;
434 /* Return true if I is a valid constant for the operation CODE. */
436 const_ok_for_op (i, code, mode)
437 HOST_WIDE_INT i;
438 enum rtx_code code;
439 enum machine_mode mode;
441 if (const_ok_for_arm (i))
442 return 1;
444 switch (code)
446 case PLUS:
447 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
449 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
450 case XOR:
451 case IOR:
452 return 0;
454 case AND:
455 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
457 default:
458 abort ();
462 /* Emit a sequence of insns to handle a large constant.
463 CODE is the code of the operation required, it can be any of SET, PLUS,
464 IOR, AND, XOR, MINUS;
465 MODE is the mode in which the operation is being performed;
466 VAL is the integer to operate on;
467 SOURCE is the other operand (a register, or a null-pointer for SET);
468 SUBTARGETS means it is safe to create scratch registers if that will
469 either produce a simpler sequence, or we will want to cse the values.
470 Return value is the number of insns emitted. */
473 arm_split_constant (code, mode, val, target, source, subtargets)
474 enum rtx_code code;
475 enum machine_mode mode;
476 HOST_WIDE_INT val;
477 rtx target;
478 rtx source;
479 int subtargets;
481 if (subtargets || code == SET
482 || (GET_CODE (target) == REG && GET_CODE (source) == REG
483 && REGNO (target) != REGNO (source)))
485 rtx temp;
487 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
488 > arm_constant_limit + (code != SET))
490 if (code == SET)
492 /* Currently SET is the only monadic value for CODE, all
493 the rest are diadic. */
494 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
495 return 1;
497 else
499 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
501 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
502 /* For MINUS, the value is subtracted from, since we never
503 have subtraction of a constant. */
504 if (code == MINUS)
505 emit_insn (gen_rtx (SET, VOIDmode, target,
506 gen_rtx (code, mode, temp, source)));
507 else
508 emit_insn (gen_rtx (SET, VOIDmode, target,
509 gen_rtx (code, mode, source, temp)));
510 return 2;
515 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
518 /* As above, but extra parameter GENERATE which, if clear, suppresses
519 RTL generation. */
521 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
522 enum rtx_code code;
523 enum machine_mode mode;
524 HOST_WIDE_INT val;
525 rtx target;
526 rtx source;
527 int subtargets;
528 int generate;
530 int can_add = 0;
531 int can_invert = 0;
532 int can_negate = 0;
533 int can_negate_initial = 0;
534 int can_shift = 0;
535 int i;
536 int num_bits_set = 0;
537 int set_sign_bit_copies = 0;
538 int clear_sign_bit_copies = 0;
539 int clear_zero_bit_copies = 0;
540 int set_zero_bit_copies = 0;
541 int insns = 0;
542 rtx new_src;
543 unsigned HOST_WIDE_INT temp1, temp2;
544 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
546 /* find out which operations are safe for a given CODE. Also do a quick
547 check for degenerate cases; these can occur when DImode operations
548 are split. */
549 switch (code)
551 case SET:
552 can_invert = 1;
553 can_shift = 1;
554 can_negate = 1;
555 break;
557 case PLUS:
558 can_negate = 1;
559 can_negate_initial = 1;
560 break;
562 case IOR:
563 if (remainder == 0xffffffff)
565 if (generate)
566 emit_insn (gen_rtx (SET, VOIDmode, target,
567 GEN_INT (ARM_SIGN_EXTEND (val))));
568 return 1;
570 if (remainder == 0)
572 if (reload_completed && rtx_equal_p (target, source))
573 return 0;
574 if (generate)
575 emit_insn (gen_rtx (SET, VOIDmode, target, source));
576 return 1;
578 break;
580 case AND:
581 if (remainder == 0)
583 if (generate)
584 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
585 return 1;
587 if (remainder == 0xffffffff)
589 if (reload_completed && rtx_equal_p (target, source))
590 return 0;
591 if (generate)
592 emit_insn (gen_rtx (SET, VOIDmode, target, source));
593 return 1;
595 can_invert = 1;
596 break;
598 case XOR:
599 if (remainder == 0)
601 if (reload_completed && rtx_equal_p (target, source))
602 return 0;
603 if (generate)
604 emit_insn (gen_rtx (SET, VOIDmode, target, source));
605 return 1;
607 if (remainder == 0xffffffff)
609 if (generate)
610 emit_insn (gen_rtx (SET, VOIDmode, target,
611 gen_rtx (NOT, mode, source)));
612 return 1;
615 /* We don't know how to handle this yet below. */
616 abort ();
618 case MINUS:
619 /* We treat MINUS as (val - source), since (source - val) is always
620 passed as (source + (-val)). */
621 if (remainder == 0)
623 if (generate)
624 emit_insn (gen_rtx (SET, VOIDmode, target,
625 gen_rtx (NEG, mode, source)));
626 return 1;
628 if (const_ok_for_arm (val))
630 if (generate)
631 emit_insn (gen_rtx (SET, VOIDmode, target,
632 gen_rtx (MINUS, mode, GEN_INT (val), source)));
633 return 1;
635 can_negate = 1;
637 break;
639 default:
640 abort ();
643 /* If we can do it in one insn get out quickly */
644 if (const_ok_for_arm (val)
645 || (can_negate_initial && const_ok_for_arm (-val))
646 || (can_invert && const_ok_for_arm (~val)))
648 if (generate)
649 emit_insn (gen_rtx (SET, VOIDmode, target,
650 (source ? gen_rtx (code, mode, source,
651 GEN_INT (val))
652 : GEN_INT (val))));
653 return 1;
657 /* Calculate a few attributes that may be useful for specific
658 optimizations. */
660 for (i = 31; i >= 0; i--)
662 if ((remainder & (1 << i)) == 0)
663 clear_sign_bit_copies++;
664 else
665 break;
668 for (i = 31; i >= 0; i--)
670 if ((remainder & (1 << i)) != 0)
671 set_sign_bit_copies++;
672 else
673 break;
676 for (i = 0; i <= 31; i++)
678 if ((remainder & (1 << i)) == 0)
679 clear_zero_bit_copies++;
680 else
681 break;
684 for (i = 0; i <= 31; i++)
686 if ((remainder & (1 << i)) != 0)
687 set_zero_bit_copies++;
688 else
689 break;
692 switch (code)
694 case SET:
695 /* See if we can do this by sign_extending a constant that is known
696 to be negative. This is a good, way of doing it, since the shift
697 may well merge into a subsequent insn. */
698 if (set_sign_bit_copies > 1)
700 if (const_ok_for_arm
701 (temp1 = ARM_SIGN_EXTEND (remainder
702 << (set_sign_bit_copies - 1))))
704 if (generate)
706 new_src = subtargets ? gen_reg_rtx (mode) : target;
707 emit_insn (gen_rtx (SET, VOIDmode, new_src,
708 GEN_INT (temp1)));
709 emit_insn (gen_ashrsi3 (target, new_src,
710 GEN_INT (set_sign_bit_copies - 1)));
712 return 2;
714 /* For an inverted constant, we will need to set the low bits,
715 these will be shifted out of harm's way. */
716 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
717 if (const_ok_for_arm (~temp1))
719 if (generate)
721 new_src = subtargets ? gen_reg_rtx (mode) : target;
722 emit_insn (gen_rtx (SET, VOIDmode, new_src,
723 GEN_INT (temp1)));
724 emit_insn (gen_ashrsi3 (target, new_src,
725 GEN_INT (set_sign_bit_copies - 1)));
727 return 2;
731 /* See if we can generate this by setting the bottom (or the top)
732 16 bits, and then shifting these into the other half of the
733 word. We only look for the simplest cases, to do more would cost
734 too much. Be careful, however, not to generate this when the
735 alternative would take fewer insns. */
736 if (val & 0xffff0000)
738 temp1 = remainder & 0xffff0000;
739 temp2 = remainder & 0x0000ffff;
741 /* Overlaps outside this range are best done using other methods. */
742 for (i = 9; i < 24; i++)
744 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
745 && ! const_ok_for_arm (temp2))
747 insns = arm_gen_constant (code, mode, temp2,
748 new_src = (subtargets
749 ? gen_reg_rtx (mode)
750 : target),
751 source, subtargets, generate);
752 source = new_src;
753 if (generate)
754 emit_insn (gen_rtx (SET, VOIDmode, target,
755 gen_rtx (IOR, mode,
756 gen_rtx (ASHIFT, mode, source,
757 GEN_INT (i)),
758 source)));
759 return insns + 1;
763 /* Don't duplicate cases already considered. */
764 for (i = 17; i < 24; i++)
766 if (((temp1 | (temp1 >> i)) == remainder)
767 && ! const_ok_for_arm (temp1))
769 insns = arm_gen_constant (code, mode, temp1,
770 new_src = (subtargets
771 ? gen_reg_rtx (mode)
772 : target),
773 source, subtargets, generate);
774 source = new_src;
775 if (generate)
776 emit_insn (gen_rtx (SET, VOIDmode, target,
777 gen_rtx (IOR, mode,
778 gen_rtx (LSHIFTRT, mode,
779 source, GEN_INT (i)),
780 source)));
781 return insns + 1;
785 break;
787 case IOR:
788 case XOR:
789 /* If we have IOR or XOR, and the constant can be loaded in a
790 single instruction, and we can find a temporary to put it in,
791 then this can be done in two instructions instead of 3-4. */
792 if (subtargets
793 || (reload_completed && ! reg_mentioned_p (target, source)))
795 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
797 if (generate)
799 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
801 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
802 emit_insn (gen_rtx (SET, VOIDmode, target,
803 gen_rtx (code, mode, source, sub)));
805 return 2;
809 if (code == XOR)
810 break;
812 if (set_sign_bit_copies > 8
813 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
815 if (generate)
817 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
818 rtx shift = GEN_INT (set_sign_bit_copies);
820 emit_insn (gen_rtx (SET, VOIDmode, sub,
821 gen_rtx (NOT, mode,
822 gen_rtx (ASHIFT, mode, source,
823 shift))));
824 emit_insn (gen_rtx (SET, VOIDmode, target,
825 gen_rtx (NOT, mode,
826 gen_rtx (LSHIFTRT, mode, sub,
827 shift))));
829 return 2;
832 if (set_zero_bit_copies > 8
833 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
835 if (generate)
837 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
838 rtx shift = GEN_INT (set_zero_bit_copies);
840 emit_insn (gen_rtx (SET, VOIDmode, sub,
841 gen_rtx (NOT, mode,
842 gen_rtx (LSHIFTRT, mode, source,
843 shift))));
844 emit_insn (gen_rtx (SET, VOIDmode, target,
845 gen_rtx (NOT, mode,
846 gen_rtx (ASHIFT, mode, sub,
847 shift))));
849 return 2;
852 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
854 if (generate)
856 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
857 emit_insn (gen_rtx (SET, VOIDmode, sub,
858 gen_rtx (NOT, mode, source)));
859 source = sub;
860 if (subtargets)
861 sub = gen_reg_rtx (mode);
862 emit_insn (gen_rtx (SET, VOIDmode, sub,
863 gen_rtx (AND, mode, source,
864 GEN_INT (temp1))));
865 emit_insn (gen_rtx (SET, VOIDmode, target,
866 gen_rtx (NOT, mode, sub)));
868 return 3;
870 break;
872 case AND:
873 /* See if two shifts will do 2 or more insn's worth of work. */
874 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
876 HOST_WIDE_INT shift_mask = ((0xffffffff
877 << (32 - clear_sign_bit_copies))
878 & 0xffffffff);
879 rtx new_source;
880 rtx shift;
882 if ((remainder | shift_mask) != 0xffffffff)
884 if (generate)
886 new_source = subtargets ? gen_reg_rtx (mode) : target;
887 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
888 new_source, source, subtargets, 1);
889 source = new_source;
891 else
892 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
893 new_source, source, subtargets, 0);
896 if (generate)
898 shift = GEN_INT (clear_sign_bit_copies);
899 new_source = subtargets ? gen_reg_rtx (mode) : target;
900 emit_insn (gen_ashlsi3 (new_source, source, shift));
901 emit_insn (gen_lshrsi3 (target, new_source, shift));
904 return insns + 2;
907 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
909 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
910 rtx new_source;
911 rtx shift;
913 if ((remainder | shift_mask) != 0xffffffff)
915 if (generate)
917 new_source = subtargets ? gen_reg_rtx (mode) : target;
918 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
919 new_source, source, subtargets, 1);
920 source = new_source;
922 else
923 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
924 new_source, source, subtargets, 0);
927 if (generate)
929 shift = GEN_INT (clear_zero_bit_copies);
930 new_source = subtargets ? gen_reg_rtx (mode) : target;
931 emit_insn (gen_lshrsi3 (new_source, source, shift));
932 emit_insn (gen_ashlsi3 (target, new_source, shift));
935 return insns + 2;
938 break;
940 default:
941 break;
944 for (i = 0; i < 32; i++)
945 if (remainder & (1 << i))
946 num_bits_set++;
948 if (code == AND || (can_invert && num_bits_set > 16))
949 remainder = (~remainder) & 0xffffffff;
950 else if (code == PLUS && num_bits_set > 16)
951 remainder = (-remainder) & 0xffffffff;
952 else
954 can_invert = 0;
955 can_negate = 0;
958 /* Now try and find a way of doing the job in either two or three
959 instructions.
960 We start by looking for the largest block of zeros that are aligned on
961 a 2-bit boundary, we then fill up the temps, wrapping around to the
962 top of the word when we drop off the bottom.
963 In the worst case this code should produce no more than four insns. */
965 int best_start = 0;
966 int best_consecutive_zeros = 0;
968 for (i = 0; i < 32; i += 2)
970 int consecutive_zeros = 0;
972 if (! (remainder & (3 << i)))
974 while ((i < 32) && ! (remainder & (3 << i)))
976 consecutive_zeros += 2;
977 i += 2;
979 if (consecutive_zeros > best_consecutive_zeros)
981 best_consecutive_zeros = consecutive_zeros;
982 best_start = i - consecutive_zeros;
984 i -= 2;
988 /* Now start emitting the insns, starting with the one with the highest
989 bit set: we do this so that the smallest number will be emitted last;
990 this is more likely to be combinable with addressing insns. */
991 i = best_start;
994 int end;
996 if (i <= 0)
997 i += 32;
998 if (remainder & (3 << (i - 2)))
1000 end = i - 8;
1001 if (end < 0)
1002 end += 32;
1003 temp1 = remainder & ((0x0ff << end)
1004 | ((i < end) ? (0xff >> (32 - end)) : 0));
1005 remainder &= ~temp1;
1007 if (code == SET)
1009 if (generate)
1010 emit_insn (gen_rtx (SET, VOIDmode,
1011 new_src = (subtargets
1012 ? gen_reg_rtx (mode)
1013 : target),
1014 GEN_INT (can_invert ? ~temp1 : temp1)));
1015 can_invert = 0;
1016 code = PLUS;
1018 else if (code == MINUS)
1020 if (generate)
1021 emit_insn (gen_rtx (SET, VOIDmode,
1022 new_src = (subtargets
1023 ? gen_reg_rtx (mode)
1024 : target),
1025 gen_rtx (code, mode, GEN_INT (temp1),
1026 source)));
1027 code = PLUS;
1029 else
1031 if (generate)
1032 emit_insn (gen_rtx (SET, VOIDmode,
1033 new_src = (remainder
1034 ? (subtargets
1035 ? gen_reg_rtx (mode)
1036 : target)
1037 : target),
1038 gen_rtx (code, mode, source,
1039 GEN_INT (can_invert ? ~temp1
1040 : (can_negate
1041 ? -temp1
1042 : temp1)))));
1045 insns++;
1046 source = new_src;
1047 i -= 6;
1049 i -= 2;
1050 } while (remainder);
1052 return insns;
1055 /* Canonicalize a comparison so that we are more likely to recognize it.
1056 This can be done for a few constant compares, where we can make the
1057 immediate value easier to load. */
1058 enum rtx_code
1059 arm_canonicalize_comparison (code, op1)
1060 enum rtx_code code;
1061 rtx *op1;
1063 HOST_WIDE_INT i = INTVAL (*op1);
1065 switch (code)
1067 case EQ:
1068 case NE:
1069 return code;
1071 case GT:
1072 case LE:
1073 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1)
1074 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1076 *op1 = GEN_INT (i+1);
1077 return code == GT ? GE : LT;
1079 break;
1081 case GE:
1082 case LT:
1083 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1))
1084 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1086 *op1 = GEN_INT (i-1);
1087 return code == GE ? GT : LE;
1089 break;
1091 case GTU:
1092 case LEU:
1093 if (i != ~0
1094 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1096 *op1 = GEN_INT (i + 1);
1097 return code == GTU ? GEU : LTU;
1099 break;
1101 case GEU:
1102 case LTU:
1103 if (i != 0
1104 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1106 *op1 = GEN_INT (i - 1);
1107 return code == GEU ? GTU : LEU;
1109 break;
1111 default:
1112 abort ();
1115 return code;
1119 /* Handle aggregates that are not laid out in a BLKmode element.
1120 This is a sub-element of RETURN_IN_MEMORY. */
1122 arm_return_in_memory (type)
1123 tree type;
1125 if (TREE_CODE (type) == RECORD_TYPE)
1127 tree field;
1129 /* For a struct, we can return in a register if every element was a
1130 bit-field. */
1131 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1132 if (TREE_CODE (field) != FIELD_DECL
1133 || ! DECL_BIT_FIELD_TYPE (field))
1134 return 1;
1136 return 0;
1138 else if (TREE_CODE (type) == UNION_TYPE)
1140 tree field;
1142 /* Unions can be returned in registers if every element is
1143 integral, or can be returned in an integer register. */
1144 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1146 if (TREE_CODE (field) != FIELD_DECL
1147 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1148 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1149 || FLOAT_TYPE_P (TREE_TYPE (field)))
1150 return 1;
1152 return 0;
1154 /* XXX Not sure what should be done for other aggregates, so put them in
1155 memory. */
1156 return 1;
1160 legitimate_pic_operand_p (x)
1161 rtx x;
1163 if (CONSTANT_P (x) && flag_pic
1164 && (GET_CODE (x) == SYMBOL_REF
1165 || (GET_CODE (x) == CONST
1166 && GET_CODE (XEXP (x, 0)) == PLUS
1167 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1168 return 0;
1170 return 1;
1174 legitimize_pic_address (orig, mode, reg)
1175 rtx orig;
1176 enum machine_mode mode;
1177 rtx reg;
1179 if (GET_CODE (orig) == SYMBOL_REF)
1181 rtx pic_ref, address;
1182 rtx insn;
1183 int subregs = 0;
1185 if (reg == 0)
1187 if (reload_in_progress || reload_completed)
1188 abort ();
1189 else
1190 reg = gen_reg_rtx (Pmode);
1192 subregs = 1;
1195 #ifdef AOF_ASSEMBLER
1196 /* The AOF assembler can generate relocations for these directly, and
1197 understands that the PIC register has to be added into the offset.
1199 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1200 #else
1201 if (subregs)
1202 address = gen_reg_rtx (Pmode);
1203 else
1204 address = reg;
1206 emit_insn (gen_pic_load_addr (address, orig));
1208 pic_ref = gen_rtx (MEM, Pmode,
1209 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address));
1210 RTX_UNCHANGING_P (pic_ref) = 1;
1211 insn = emit_move_insn (reg, pic_ref);
1212 #endif
1213 current_function_uses_pic_offset_table = 1;
1214 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1215 by loop. */
1216 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig,
1217 REG_NOTES (insn));
1218 return reg;
1220 else if (GET_CODE (orig) == CONST)
1222 rtx base, offset;
1224 if (GET_CODE (XEXP (orig, 0)) == PLUS
1225 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1226 return orig;
1228 if (reg == 0)
1230 if (reload_in_progress || reload_completed)
1231 abort ();
1232 else
1233 reg = gen_reg_rtx (Pmode);
1236 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1238 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1239 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1240 base == reg ? 0 : reg);
1242 else
1243 abort ();
1245 if (GET_CODE (offset) == CONST_INT)
1247 /* The base register doesn't really matter, we only want to
1248 test the index for the appropriate mode. */
1249 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1251 if (! reload_in_progress && ! reload_completed)
1252 offset = force_reg (Pmode, offset);
1253 else
1254 abort ();
1256 win:
1257 if (GET_CODE (offset) == CONST_INT)
1258 return plus_constant_for_output (base, INTVAL (offset));
1261 if (GET_MODE_SIZE (mode) > 4
1262 && (GET_MODE_CLASS (mode) == MODE_INT
1263 || TARGET_SOFT_FLOAT))
1265 emit_insn (gen_addsi3 (reg, base, offset));
1266 return reg;
1269 return gen_rtx (PLUS, Pmode, base, offset);
1271 else if (GET_CODE (orig) == LABEL_REF)
1272 current_function_uses_pic_offset_table = 1;
1274 return orig;
1277 static rtx pic_rtx;
1280 is_pic(x)
1281 rtx x;
1283 if (x == pic_rtx)
1284 return 1;
1285 return 0;
1288 void
1289 arm_finalize_pic ()
1291 #ifndef AOF_ASSEMBLER
1292 rtx l1, pic_tmp, pic_tmp2, seq;
1293 rtx global_offset_table;
1295 if (current_function_uses_pic_offset_table == 0)
1296 return;
1298 if (! flag_pic)
1299 abort ();
1301 start_sequence ();
1302 l1 = gen_label_rtx ();
1304 global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_");
1305 /* The PC contains 'dot'+8, but the label L1 is on the next
1306 instruction, so the offset is only 'dot'+4. */
1307 pic_tmp = gen_rtx (CONST, VOIDmode,
1308 gen_rtx (PLUS, Pmode,
1309 gen_rtx (LABEL_REF, VOIDmode, l1),
1310 GEN_INT (4)));
1311 pic_tmp2 = gen_rtx (CONST, VOIDmode,
1312 gen_rtx (PLUS, Pmode,
1313 global_offset_table,
1314 pc_rtx));
1316 pic_rtx = gen_rtx (CONST, Pmode,
1317 gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp));
1319 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1320 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1321 emit_label (l1);
1323 seq = gen_sequence ();
1324 end_sequence ();
1325 emit_insn_after (seq, get_insns ());
1327 /* Need to emit this whether or not we obey regdecls,
1328 since setjmp/longjmp can cause life info to screw up. */
1329 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
1330 #endif /* AOF_ASSEMBLER */
1333 #define REG_OR_SUBREG_REG(X) \
1334 (GET_CODE (X) == REG \
1335 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1337 #define REG_OR_SUBREG_RTX(X) \
1338 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1340 #define ARM_FRAME_RTX(X) \
1341 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1342 || (X) == arg_pointer_rtx)
1345 arm_rtx_costs (x, code, outer_code)
1346 rtx x;
1347 enum rtx_code code, outer_code;
1349 enum machine_mode mode = GET_MODE (x);
1350 enum rtx_code subcode;
1351 int extra_cost;
1353 switch (code)
1355 case MEM:
1356 /* Memory costs quite a lot for the first word, but subsequent words
1357 load at the equivalent of a single insn each. */
1358 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1359 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1361 case DIV:
1362 case MOD:
1363 return 100;
1365 case ROTATE:
1366 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1367 return 4;
1368 /* Fall through */
1369 case ROTATERT:
1370 if (mode != SImode)
1371 return 8;
1372 /* Fall through */
1373 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1374 if (mode == DImode)
1375 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1376 + ((GET_CODE (XEXP (x, 0)) == REG
1377 || (GET_CODE (XEXP (x, 0)) == SUBREG
1378 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1379 ? 0 : 8));
1380 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1381 || (GET_CODE (XEXP (x, 0)) == SUBREG
1382 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1383 ? 0 : 4)
1384 + ((GET_CODE (XEXP (x, 1)) == REG
1385 || (GET_CODE (XEXP (x, 1)) == SUBREG
1386 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1387 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1388 ? 0 : 4));
1390 case MINUS:
1391 if (mode == DImode)
1392 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1393 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1394 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1395 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1396 ? 0 : 8));
1398 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1399 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1400 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1401 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1402 ? 0 : 8)
1403 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1404 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1405 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1406 ? 0 : 8));
1408 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1409 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1410 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1411 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1412 || subcode == ASHIFTRT || subcode == LSHIFTRT
1413 || subcode == ROTATE || subcode == ROTATERT
1414 || (subcode == MULT
1415 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1416 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1417 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1418 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1419 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1420 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1421 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1422 return 1;
1423 /* Fall through */
1425 case PLUS:
1426 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1427 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1428 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1429 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1430 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1431 ? 0 : 8));
1433 /* Fall through */
1434 case AND: case XOR: case IOR:
1435 extra_cost = 0;
1437 /* Normally the frame registers will be spilt into reg+const during
1438 reload, so it is a bad idea to combine them with other instructions,
1439 since then they might not be moved outside of loops. As a compromise
1440 we allow integration with ops that have a constant as their second
1441 operand. */
1442 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1443 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1444 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1445 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1446 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1447 extra_cost = 4;
1449 if (mode == DImode)
1450 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1451 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1452 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1453 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1454 ? 0 : 8));
1456 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1457 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1458 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1459 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1460 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1461 ? 0 : 4));
1463 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1464 return (1 + extra_cost
1465 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1466 || subcode == LSHIFTRT || subcode == ASHIFTRT
1467 || subcode == ROTATE || subcode == ROTATERT
1468 || (subcode == MULT
1469 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1470 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1471 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1472 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1473 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1474 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1475 ? 0 : 4));
1477 return 8;
1479 case MULT:
1480 /* There is no point basing this on the tuning, since it is always the
1481 fast variant if it exists at all */
1482 if (arm_fast_multiply && mode == DImode
1483 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1484 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1485 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1486 return 8;
1488 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1489 || mode == DImode)
1490 return 30;
1492 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1494 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1495 & (unsigned HOST_WIDE_INT) 0xffffffff);
1496 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1497 int j;
1498 /* Tune as appropriate */
1499 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
1501 for (j = 0; i && j < 32; j += booth_unit_size)
1503 i >>= booth_unit_size;
1504 add_cost += 2;
1507 return add_cost;
1510 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
1511 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1512 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1514 case TRUNCATE:
1515 if (arm_fast_multiply && mode == SImode
1516 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1517 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1518 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1519 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1520 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1521 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1522 return 8;
1523 return 99;
1525 case NEG:
1526 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1527 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1528 /* Fall through */
1529 case NOT:
1530 if (mode == DImode)
1531 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1533 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1535 case IF_THEN_ELSE:
1536 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1537 return 14;
1538 return 2;
1540 case COMPARE:
1541 return 1;
1543 case ABS:
1544 return 4 + (mode == DImode ? 4 : 0);
1546 case SIGN_EXTEND:
1547 if (GET_MODE (XEXP (x, 0)) == QImode)
1548 return (4 + (mode == DImode ? 4 : 0)
1549 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1550 /* Fall through */
1551 case ZERO_EXTEND:
1552 switch (GET_MODE (XEXP (x, 0)))
1554 case QImode:
1555 return (1 + (mode == DImode ? 4 : 0)
1556 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1558 case HImode:
1559 return (4 + (mode == DImode ? 4 : 0)
1560 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1562 case SImode:
1563 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1565 abort ();
1567 default:
1568 return 99;
1573 arm_adjust_cost (insn, link, dep, cost)
1574 rtx insn;
1575 rtx link;
1576 rtx dep;
1577 int cost;
1579 rtx i_pat, d_pat;
1581 if ((i_pat = single_set (insn)) != NULL
1582 && GET_CODE (SET_SRC (i_pat)) == MEM
1583 && (d_pat = single_set (dep)) != NULL
1584 && GET_CODE (SET_DEST (d_pat)) == MEM)
1586 /* This is a load after a store, there is no conflict if the load reads
1587 from a cached area. Assume that loads from the stack, and from the
1588 constant pool are cached, and that others will miss. This is a
1589 hack. */
1591 /* debug_rtx (insn);
1592 debug_rtx (dep);
1593 debug_rtx (link);
1594 fprintf (stderr, "costs %d\n", cost); */
1596 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1597 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1598 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1599 || reg_mentioned_p (hard_frame_pointer_rtx,
1600 XEXP (SET_SRC (i_pat), 0)))
1602 /* fprintf (stderr, "***** Now 1\n"); */
1603 return 1;
1607 return cost;
1610 /* This code has been fixed for cross compilation. */
1612 static int fpa_consts_inited = 0;
1614 char *strings_fpa[8] = {
1615 "0", "1", "2", "3",
1616 "4", "5", "0.5", "10"
1619 static REAL_VALUE_TYPE values_fpa[8];
1621 static void
1622 init_fpa_table ()
1624 int i;
1625 REAL_VALUE_TYPE r;
1627 for (i = 0; i < 8; i++)
1629 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1630 values_fpa[i] = r;
1633 fpa_consts_inited = 1;
1636 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1639 const_double_rtx_ok_for_fpu (x)
1640 rtx x;
1642 REAL_VALUE_TYPE r;
1643 int i;
1645 if (!fpa_consts_inited)
1646 init_fpa_table ();
1648 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1649 if (REAL_VALUE_MINUS_ZERO (r))
1650 return 0;
1652 for (i = 0; i < 8; i++)
1653 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1654 return 1;
1656 return 0;
1659 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1662 neg_const_double_rtx_ok_for_fpu (x)
1663 rtx x;
1665 REAL_VALUE_TYPE r;
1666 int i;
1668 if (!fpa_consts_inited)
1669 init_fpa_table ();
1671 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1672 r = REAL_VALUE_NEGATE (r);
1673 if (REAL_VALUE_MINUS_ZERO (r))
1674 return 0;
1676 for (i = 0; i < 8; i++)
1677 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1678 return 1;
1680 return 0;
1683 /* Predicates for `match_operand' and `match_operator'. */
1685 /* s_register_operand is the same as register_operand, but it doesn't accept
1686 (SUBREG (MEM)...).
1688 This function exists because at the time it was put in it led to better
1689 code. SUBREG(MEM) always needs a reload in the places where
1690 s_register_operand is used, and this seemed to lead to excessive
1691 reloading. */
1694 s_register_operand (op, mode)
1695 register rtx op;
1696 enum machine_mode mode;
1698 if (GET_MODE (op) != mode && mode != VOIDmode)
1699 return 0;
1701 if (GET_CODE (op) == SUBREG)
1702 op = SUBREG_REG (op);
1704 /* We don't consider registers whose class is NO_REGS
1705 to be a register operand. */
1706 return (GET_CODE (op) == REG
1707 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1708 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1711 /* Only accept reg, subreg(reg), const_int. */
1714 reg_or_int_operand (op, mode)
1715 register rtx op;
1716 enum machine_mode mode;
1718 if (GET_CODE (op) == CONST_INT)
1719 return 1;
1721 if (GET_MODE (op) != mode && mode != VOIDmode)
1722 return 0;
1724 if (GET_CODE (op) == SUBREG)
1725 op = SUBREG_REG (op);
1727 /* We don't consider registers whose class is NO_REGS
1728 to be a register operand. */
1729 return (GET_CODE (op) == REG
1730 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1731 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1734 /* Return 1 if OP is an item in memory, given that we are in reload. */
1737 reload_memory_operand (op, mode)
1738 rtx op;
1739 enum machine_mode mode;
1741 int regno = true_regnum (op);
1743 return (! CONSTANT_P (op)
1744 && (regno == -1
1745 || (GET_CODE (op) == REG
1746 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1749 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1752 arm_rhs_operand (op, mode)
1753 rtx op;
1754 enum machine_mode mode;
1756 return (s_register_operand (op, mode)
1757 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1760 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1764 arm_rhsm_operand (op, mode)
1765 rtx op;
1766 enum machine_mode mode;
1768 return (s_register_operand (op, mode)
1769 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1770 || memory_operand (op, mode));
1773 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1774 constant that is valid when negated. */
1777 arm_add_operand (op, mode)
1778 rtx op;
1779 enum machine_mode mode;
1781 return (s_register_operand (op, mode)
1782 || (GET_CODE (op) == CONST_INT
1783 && (const_ok_for_arm (INTVAL (op))
1784 || const_ok_for_arm (-INTVAL (op)))));
1788 arm_not_operand (op, mode)
1789 rtx op;
1790 enum machine_mode mode;
1792 return (s_register_operand (op, mode)
1793 || (GET_CODE (op) == CONST_INT
1794 && (const_ok_for_arm (INTVAL (op))
1795 || const_ok_for_arm (~INTVAL (op)))));
1798 /* Return TRUE if the operand is a memory reference which contains an
1799 offsettable address. */
1801 offsettable_memory_operand (op, mode)
1802 register rtx op;
1803 enum machine_mode mode;
1805 if (mode == VOIDmode)
1806 mode = GET_MODE (op);
1808 return (mode == GET_MODE (op)
1809 && GET_CODE (op) == MEM
1810 && offsettable_address_p (reload_completed | reload_in_progress,
1811 mode, XEXP (op, 0)));
1814 /* Return TRUE if the operand is a memory reference which is, or can be
1815 made word aligned by adjusting the offset. */
1817 alignable_memory_operand (op, mode)
1818 register rtx op;
1819 enum machine_mode mode;
1821 rtx reg;
1823 if (mode == VOIDmode)
1824 mode = GET_MODE (op);
1826 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1827 return 0;
1829 op = XEXP (op, 0);
1831 return ((GET_CODE (reg = op) == REG
1832 || (GET_CODE (op) == SUBREG
1833 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1834 || (GET_CODE (op) == PLUS
1835 && GET_CODE (XEXP (op, 1)) == CONST_INT
1836 && (GET_CODE (reg = XEXP (op, 0)) == REG
1837 || (GET_CODE (XEXP (op, 0)) == SUBREG
1838 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1839 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1842 /* Similar to s_register_operand, but does not allow hard integer
1843 registers. */
1845 f_register_operand (op, mode)
1846 register rtx op;
1847 enum machine_mode mode;
1849 if (GET_MODE (op) != mode && mode != VOIDmode)
1850 return 0;
1852 if (GET_CODE (op) == SUBREG)
1853 op = SUBREG_REG (op);
1855 /* We don't consider registers whose class is NO_REGS
1856 to be a register operand. */
1857 return (GET_CODE (op) == REG
1858 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1859 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
1862 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1865 fpu_rhs_operand (op, mode)
1866 rtx op;
1867 enum machine_mode mode;
1869 if (s_register_operand (op, mode))
1870 return TRUE;
1871 else if (GET_CODE (op) == CONST_DOUBLE)
1872 return (const_double_rtx_ok_for_fpu (op));
1874 return FALSE;
1878 fpu_add_operand (op, mode)
1879 rtx op;
1880 enum machine_mode mode;
1882 if (s_register_operand (op, mode))
1883 return TRUE;
1884 else if (GET_CODE (op) == CONST_DOUBLE)
1885 return (const_double_rtx_ok_for_fpu (op)
1886 || neg_const_double_rtx_ok_for_fpu (op));
1888 return FALSE;
1891 /* Return nonzero if OP is a constant power of two. */
1894 power_of_two_operand (op, mode)
1895 rtx op;
1896 enum machine_mode mode;
1898 if (GET_CODE (op) == CONST_INT)
1900 HOST_WIDE_INT value = INTVAL(op);
1901 return value != 0 && (value & (value - 1)) == 0;
1903 return FALSE;
1906 /* Return TRUE for a valid operand of a DImode operation.
1907 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1908 Note that this disallows MEM(REG+REG), but allows
1909 MEM(PRE/POST_INC/DEC(REG)). */
1912 di_operand (op, mode)
1913 rtx op;
1914 enum machine_mode mode;
1916 if (s_register_operand (op, mode))
1917 return TRUE;
1919 switch (GET_CODE (op))
1921 case CONST_DOUBLE:
1922 case CONST_INT:
1923 return TRUE;
1925 case MEM:
1926 return memory_address_p (DImode, XEXP (op, 0));
1928 default:
1929 return FALSE;
1933 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1934 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1935 Note that this disallows MEM(REG+REG), but allows
1936 MEM(PRE/POST_INC/DEC(REG)). */
1939 soft_df_operand (op, mode)
1940 rtx op;
1941 enum machine_mode mode;
1943 if (s_register_operand (op, mode))
1944 return TRUE;
1946 switch (GET_CODE (op))
1948 case CONST_DOUBLE:
1949 return TRUE;
1951 case MEM:
1952 return memory_address_p (DFmode, XEXP (op, 0));
1954 default:
1955 return FALSE;
1959 /* Return TRUE for valid index operands. */
1962 index_operand (op, mode)
1963 rtx op;
1964 enum machine_mode mode;
1966 return (s_register_operand(op, mode)
1967 || (immediate_operand (op, mode)
1968 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1971 /* Return TRUE for valid shifts by a constant. This also accepts any
1972 power of two on the (somewhat overly relaxed) assumption that the
1973 shift operator in this case was a mult. */
1976 const_shift_operand (op, mode)
1977 rtx op;
1978 enum machine_mode mode;
1980 return (power_of_two_operand (op, mode)
1981 || (immediate_operand (op, mode)
1982 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1985 /* Return TRUE for arithmetic operators which can be combined with a multiply
1986 (shift). */
1989 shiftable_operator (x, mode)
1990 rtx x;
1991 enum machine_mode mode;
1993 if (GET_MODE (x) != mode)
1994 return FALSE;
1995 else
1997 enum rtx_code code = GET_CODE (x);
1999 return (code == PLUS || code == MINUS
2000 || code == IOR || code == XOR || code == AND);
2004 /* Return TRUE for shift operators. */
2007 shift_operator (x, mode)
2008 rtx x;
2009 enum machine_mode mode;
2011 if (GET_MODE (x) != mode)
2012 return FALSE;
2013 else
2015 enum rtx_code code = GET_CODE (x);
2017 if (code == MULT)
2018 return power_of_two_operand (XEXP (x, 1));
2020 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2021 || code == ROTATERT);
2025 int equality_operator (x, mode)
2026 rtx x;
2027 enum machine_mode mode;
2029 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
2032 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2035 minmax_operator (x, mode)
2036 rtx x;
2037 enum machine_mode mode;
2039 enum rtx_code code = GET_CODE (x);
2041 if (GET_MODE (x) != mode)
2042 return FALSE;
2044 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
2047 /* return TRUE if x is EQ or NE */
2049 /* Return TRUE if this is the condition code register, if we aren't given
2050 a mode, accept any class CCmode register */
2053 cc_register (x, mode)
2054 rtx x;
2055 enum machine_mode mode;
2057 if (mode == VOIDmode)
2059 mode = GET_MODE (x);
2060 if (GET_MODE_CLASS (mode) != MODE_CC)
2061 return FALSE;
2064 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2065 return TRUE;
2067 return FALSE;
2070 /* Return TRUE if this is the condition code register, if we aren't given
2071 a mode, accept any class CCmode register which indicates a dominance
2072 expression. */
2075 dominant_cc_register (x, mode)
2076 rtx x;
2077 enum machine_mode mode;
2079 if (mode == VOIDmode)
2081 mode = GET_MODE (x);
2082 if (GET_MODE_CLASS (mode) != MODE_CC)
2083 return FALSE;
2086 if (mode != CC_DNEmode && mode != CC_DEQmode
2087 && mode != CC_DLEmode && mode != CC_DLTmode
2088 && mode != CC_DGEmode && mode != CC_DGTmode
2089 && mode != CC_DLEUmode && mode != CC_DLTUmode
2090 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2091 return FALSE;
2093 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2094 return TRUE;
2096 return FALSE;
2099 /* Return TRUE if X references a SYMBOL_REF. */
2101 symbol_mentioned_p (x)
2102 rtx x;
2104 register char *fmt;
2105 register int i;
2107 if (GET_CODE (x) == SYMBOL_REF)
2108 return 1;
2110 fmt = GET_RTX_FORMAT (GET_CODE (x));
2111 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2113 if (fmt[i] == 'E')
2115 register int j;
2117 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2118 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2119 return 1;
2121 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2122 return 1;
2125 return 0;
2128 /* Return TRUE if X references a LABEL_REF. */
2130 label_mentioned_p (x)
2131 rtx x;
2133 register char *fmt;
2134 register int i;
2136 if (GET_CODE (x) == LABEL_REF)
2137 return 1;
2139 fmt = GET_RTX_FORMAT (GET_CODE (x));
2140 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2142 if (fmt[i] == 'E')
2144 register int j;
2146 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2147 if (label_mentioned_p (XVECEXP (x, i, j)))
2148 return 1;
2150 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2151 return 1;
2154 return 0;
2157 enum rtx_code
2158 minmax_code (x)
2159 rtx x;
2161 enum rtx_code code = GET_CODE (x);
2163 if (code == SMAX)
2164 return GE;
2165 else if (code == SMIN)
2166 return LE;
2167 else if (code == UMIN)
2168 return LEU;
2169 else if (code == UMAX)
2170 return GEU;
2172 abort ();
2175 /* Return 1 if memory locations are adjacent */
2178 adjacent_mem_locations (a, b)
2179 rtx a, b;
2181 int val0 = 0, val1 = 0;
2182 int reg0, reg1;
2184 if ((GET_CODE (XEXP (a, 0)) == REG
2185 || (GET_CODE (XEXP (a, 0)) == PLUS
2186 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2187 && (GET_CODE (XEXP (b, 0)) == REG
2188 || (GET_CODE (XEXP (b, 0)) == PLUS
2189 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2191 if (GET_CODE (XEXP (a, 0)) == PLUS)
2193 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2194 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2196 else
2197 reg0 = REGNO (XEXP (a, 0));
2198 if (GET_CODE (XEXP (b, 0)) == PLUS)
2200 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2201 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2203 else
2204 reg1 = REGNO (XEXP (b, 0));
2205 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2207 return 0;
2210 /* Return 1 if OP is a load multiple operation. It is known to be
2211 parallel and the first section will be tested. */
2214 load_multiple_operation (op, mode)
2215 rtx op;
2216 enum machine_mode mode;
2218 HOST_WIDE_INT count = XVECLEN (op, 0);
2219 int dest_regno;
2220 rtx src_addr;
2221 HOST_WIDE_INT i = 1, base = 0;
2222 rtx elt;
2224 if (count <= 1
2225 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2226 return 0;
2228 /* Check to see if this might be a write-back */
2229 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2231 i++;
2232 base = 1;
2234 /* Now check it more carefully */
2235 if (GET_CODE (SET_DEST (elt)) != REG
2236 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2237 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2238 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2239 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2240 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2241 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2242 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2243 != REGNO (SET_DEST (elt)))
2244 return 0;
2246 count--;
2249 /* Perform a quick check so we don't blow up below. */
2250 if (count <= i
2251 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2252 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2253 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2254 return 0;
2256 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2257 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2259 for (; i < count; i++)
2261 rtx elt = XVECEXP (op, 0, i);
2263 if (GET_CODE (elt) != SET
2264 || GET_CODE (SET_DEST (elt)) != REG
2265 || GET_MODE (SET_DEST (elt)) != SImode
2266 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2267 || GET_CODE (SET_SRC (elt)) != MEM
2268 || GET_MODE (SET_SRC (elt)) != SImode
2269 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2270 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2271 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2272 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2273 return 0;
2276 return 1;
2279 /* Return 1 if OP is a store multiple operation. It is known to be
2280 parallel and the first section will be tested. */
2283 store_multiple_operation (op, mode)
2284 rtx op;
2285 enum machine_mode mode;
2287 HOST_WIDE_INT count = XVECLEN (op, 0);
2288 int src_regno;
2289 rtx dest_addr;
2290 HOST_WIDE_INT i = 1, base = 0;
2291 rtx elt;
2293 if (count <= 1
2294 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2295 return 0;
2297 /* Check to see if this might be a write-back */
2298 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2300 i++;
2301 base = 1;
2303 /* Now check it more carefully */
2304 if (GET_CODE (SET_DEST (elt)) != REG
2305 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2306 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2307 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2308 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2309 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2310 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2311 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2312 != REGNO (SET_DEST (elt)))
2313 return 0;
2315 count--;
2318 /* Perform a quick check so we don't blow up below. */
2319 if (count <= i
2320 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2321 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2322 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2323 return 0;
2325 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2326 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2328 for (; i < count; i++)
2330 elt = XVECEXP (op, 0, i);
2332 if (GET_CODE (elt) != SET
2333 || GET_CODE (SET_SRC (elt)) != REG
2334 || GET_MODE (SET_SRC (elt)) != SImode
2335 || REGNO (SET_SRC (elt)) != src_regno + i - base
2336 || GET_CODE (SET_DEST (elt)) != MEM
2337 || GET_MODE (SET_DEST (elt)) != SImode
2338 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2339 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2340 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2341 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2342 return 0;
2345 return 1;
2349 load_multiple_sequence (operands, nops, regs, base, load_offset)
2350 rtx *operands;
2351 int nops;
2352 int *regs;
2353 int *base;
2354 HOST_WIDE_INT *load_offset;
2356 int unsorted_regs[4];
2357 HOST_WIDE_INT unsorted_offsets[4];
2358 int order[4];
2359 int base_reg;
2360 int i;
2362 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2363 extended if required. */
2364 if (nops < 2 || nops > 4)
2365 abort ();
2367 /* Loop over the operands and check that the memory references are
2368 suitable (ie immediate offsets from the same base register). At
2369 the same time, extract the target register, and the memory
2370 offsets. */
2371 for (i = 0; i < nops; i++)
2373 rtx reg;
2374 rtx offset;
2376 /* Convert a subreg of a mem into the mem itself. */
2377 if (GET_CODE (operands[nops + i]) == SUBREG)
2378 operands[nops + i] = alter_subreg(operands[nops + i]);
2380 if (GET_CODE (operands[nops + i]) != MEM)
2381 abort ();
2383 /* Don't reorder volatile memory references; it doesn't seem worth
2384 looking for the case where the order is ok anyway. */
2385 if (MEM_VOLATILE_P (operands[nops + i]))
2386 return 0;
2388 offset = const0_rtx;
2390 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2391 || (GET_CODE (reg) == SUBREG
2392 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2393 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2394 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2395 == REG)
2396 || (GET_CODE (reg) == SUBREG
2397 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2398 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2399 == CONST_INT)))
2401 if (i == 0)
2403 base_reg = REGNO(reg);
2404 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2405 ? REGNO (operands[i])
2406 : REGNO (SUBREG_REG (operands[i])));
2407 order[0] = 0;
2409 else
2411 if (base_reg != REGNO (reg))
2412 /* Not addressed from the same base register. */
2413 return 0;
2415 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2416 ? REGNO (operands[i])
2417 : REGNO (SUBREG_REG (operands[i])));
2418 if (unsorted_regs[i] < unsorted_regs[order[0]])
2419 order[0] = i;
2422 /* If it isn't an integer register, or if it overwrites the
2423 base register but isn't the last insn in the list, then
2424 we can't do this. */
2425 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2426 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2427 return 0;
2429 unsorted_offsets[i] = INTVAL (offset);
2431 else
2432 /* Not a suitable memory address. */
2433 return 0;
2436 /* All the useful information has now been extracted from the
2437 operands into unsorted_regs and unsorted_offsets; additionally,
2438 order[0] has been set to the lowest numbered register in the
2439 list. Sort the registers into order, and check that the memory
2440 offsets are ascending and adjacent. */
2442 for (i = 1; i < nops; i++)
2444 int j;
2446 order[i] = order[i - 1];
2447 for (j = 0; j < nops; j++)
2448 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2449 && (order[i] == order[i - 1]
2450 || unsorted_regs[j] < unsorted_regs[order[i]]))
2451 order[i] = j;
2453 /* Have we found a suitable register? if not, one must be used more
2454 than once. */
2455 if (order[i] == order[i - 1])
2456 return 0;
2458 /* Is the memory address adjacent and ascending? */
2459 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2460 return 0;
2463 if (base)
2465 *base = base_reg;
2467 for (i = 0; i < nops; i++)
2468 regs[i] = unsorted_regs[order[i]];
2470 *load_offset = unsorted_offsets[order[0]];
2473 if (unsorted_offsets[order[0]] == 0)
2474 return 1; /* ldmia */
2476 if (unsorted_offsets[order[0]] == 4)
2477 return 2; /* ldmib */
2479 if (unsorted_offsets[order[nops - 1]] == 0)
2480 return 3; /* ldmda */
2482 if (unsorted_offsets[order[nops - 1]] == -4)
2483 return 4; /* ldmdb */
2485 /* Can't do it without setting up the offset, only do this if it takes
2486 no more than one insn. */
2487 return (const_ok_for_arm (unsorted_offsets[order[0]])
2488 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2491 char *
2492 emit_ldm_seq (operands, nops)
2493 rtx *operands;
2494 int nops;
2496 int regs[4];
2497 int base_reg;
2498 HOST_WIDE_INT offset;
2499 char buf[100];
2500 int i;
2502 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2504 case 1:
2505 strcpy (buf, "ldm%?ia\t");
2506 break;
2508 case 2:
2509 strcpy (buf, "ldm%?ib\t");
2510 break;
2512 case 3:
2513 strcpy (buf, "ldm%?da\t");
2514 break;
2516 case 4:
2517 strcpy (buf, "ldm%?db\t");
2518 break;
2520 case 5:
2521 if (offset >= 0)
2522 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2523 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2524 (long) offset);
2525 else
2526 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2527 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2528 (long) -offset);
2529 output_asm_insn (buf, operands);
2530 base_reg = regs[0];
2531 strcpy (buf, "ldm%?ia\t");
2532 break;
2534 default:
2535 abort ();
2538 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2539 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2541 for (i = 1; i < nops; i++)
2542 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2543 reg_names[regs[i]]);
2545 strcat (buf, "}\t%@ phole ldm");
2547 output_asm_insn (buf, operands);
2548 return "";
2552 store_multiple_sequence (operands, nops, regs, base, load_offset)
2553 rtx *operands;
2554 int nops;
2555 int *regs;
2556 int *base;
2557 HOST_WIDE_INT *load_offset;
2559 int unsorted_regs[4];
2560 HOST_WIDE_INT unsorted_offsets[4];
2561 int order[4];
2562 int base_reg;
2563 int i;
2565 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2566 extended if required. */
2567 if (nops < 2 || nops > 4)
2568 abort ();
2570 /* Loop over the operands and check that the memory references are
2571 suitable (ie immediate offsets from the same base register). At
2572 the same time, extract the target register, and the memory
2573 offsets. */
2574 for (i = 0; i < nops; i++)
2576 rtx reg;
2577 rtx offset;
2579 /* Convert a subreg of a mem into the mem itself. */
2580 if (GET_CODE (operands[nops + i]) == SUBREG)
2581 operands[nops + i] = alter_subreg(operands[nops + i]);
2583 if (GET_CODE (operands[nops + i]) != MEM)
2584 abort ();
2586 /* Don't reorder volatile memory references; it doesn't seem worth
2587 looking for the case where the order is ok anyway. */
2588 if (MEM_VOLATILE_P (operands[nops + i]))
2589 return 0;
2591 offset = const0_rtx;
2593 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2594 || (GET_CODE (reg) == SUBREG
2595 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2596 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2597 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2598 == REG)
2599 || (GET_CODE (reg) == SUBREG
2600 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2601 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2602 == CONST_INT)))
2604 if (i == 0)
2606 base_reg = REGNO(reg);
2607 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2608 ? REGNO (operands[i])
2609 : REGNO (SUBREG_REG (operands[i])));
2610 order[0] = 0;
2612 else
2614 if (base_reg != REGNO (reg))
2615 /* Not addressed from the same base register. */
2616 return 0;
2618 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2619 ? REGNO (operands[i])
2620 : REGNO (SUBREG_REG (operands[i])));
2621 if (unsorted_regs[i] < unsorted_regs[order[0]])
2622 order[0] = i;
2625 /* If it isn't an integer register, then we can't do this. */
2626 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2627 return 0;
2629 unsorted_offsets[i] = INTVAL (offset);
2631 else
2632 /* Not a suitable memory address. */
2633 return 0;
2636 /* All the useful information has now been extracted from the
2637 operands into unsorted_regs and unsorted_offsets; additionally,
2638 order[0] has been set to the lowest numbered register in the
2639 list. Sort the registers into order, and check that the memory
2640 offsets are ascending and adjacent. */
2642 for (i = 1; i < nops; i++)
2644 int j;
2646 order[i] = order[i - 1];
2647 for (j = 0; j < nops; j++)
2648 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2649 && (order[i] == order[i - 1]
2650 || unsorted_regs[j] < unsorted_regs[order[i]]))
2651 order[i] = j;
2653 /* Have we found a suitable register? if not, one must be used more
2654 than once. */
2655 if (order[i] == order[i - 1])
2656 return 0;
2658 /* Is the memory address adjacent and ascending? */
2659 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2660 return 0;
2663 if (base)
2665 *base = base_reg;
2667 for (i = 0; i < nops; i++)
2668 regs[i] = unsorted_regs[order[i]];
2670 *load_offset = unsorted_offsets[order[0]];
2673 if (unsorted_offsets[order[0]] == 0)
2674 return 1; /* stmia */
2676 if (unsorted_offsets[order[0]] == 4)
2677 return 2; /* stmib */
2679 if (unsorted_offsets[order[nops - 1]] == 0)
2680 return 3; /* stmda */
2682 if (unsorted_offsets[order[nops - 1]] == -4)
2683 return 4; /* stmdb */
2685 return 0;
2688 char *
2689 emit_stm_seq (operands, nops)
2690 rtx *operands;
2691 int nops;
2693 int regs[4];
2694 int base_reg;
2695 HOST_WIDE_INT offset;
2696 char buf[100];
2697 int i;
2699 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2701 case 1:
2702 strcpy (buf, "stm%?ia\t");
2703 break;
2705 case 2:
2706 strcpy (buf, "stm%?ib\t");
2707 break;
2709 case 3:
2710 strcpy (buf, "stm%?da\t");
2711 break;
2713 case 4:
2714 strcpy (buf, "stm%?db\t");
2715 break;
2717 default:
2718 abort ();
2721 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2722 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2724 for (i = 1; i < nops; i++)
2725 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2726 reg_names[regs[i]]);
2728 strcat (buf, "}\t%@ phole stm");
2730 output_asm_insn (buf, operands);
2731 return "";
2735 multi_register_push (op, mode)
2736 rtx op;
2737 enum machine_mode mode;
2739 if (GET_CODE (op) != PARALLEL
2740 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2741 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2742 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2743 return 0;
2745 return 1;
2749 /* Routines for use with attributes */
2751 /* Return nonzero if ATTR is a valid attribute for DECL.
2752 ATTRIBUTES are any existing attributes and ARGS are the arguments
2753 supplied with ATTR.
2755 Supported attributes:
2757 naked: don't output any prologue or epilogue code, the user is assumed
2758 to do the right thing. */
2761 arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2762 tree decl;
2763 tree attributes;
2764 tree attr;
2765 tree args;
2767 if (args != NULL_TREE)
2768 return 0;
2770 if (is_attribute_p ("naked", attr))
2771 return TREE_CODE (decl) == FUNCTION_DECL;
2772 return 0;
2775 /* Return non-zero if FUNC is a naked function. */
2777 static int
2778 arm_naked_function_p (func)
2779 tree func;
2781 tree a;
2783 if (TREE_CODE (func) != FUNCTION_DECL)
2784 abort ();
2786 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2787 return a != NULL_TREE;
2790 /* Routines for use in generating RTL */
2793 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
2794 in_struct_p)
2795 int base_regno;
2796 int count;
2797 rtx from;
2798 int up;
2799 int write_back;
2800 int unchanging_p;
2801 int in_struct_p;
2803 int i = 0, j;
2804 rtx result;
2805 int sign = up ? 1 : -1;
2806 rtx mem;
2808 result = gen_rtx (PARALLEL, VOIDmode,
2809 rtvec_alloc (count + (write_back ? 2 : 0)));
2810 if (write_back)
2812 XVECEXP (result, 0, 0)
2813 = gen_rtx (SET, GET_MODE (from), from,
2814 plus_constant (from, count * 4 * sign));
2815 i = 1;
2816 count++;
2819 for (j = 0; i < count; i++, j++)
2821 mem = gen_rtx (MEM, SImode, plus_constant (from, j * 4 * sign));
2822 RTX_UNCHANGING_P (mem) = unchanging_p;
2823 MEM_IN_STRUCT_P (mem) = in_struct_p;
2825 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode,
2826 gen_rtx (REG, SImode, base_regno + j),
2827 mem);
2830 if (write_back)
2831 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2833 return result;
2837 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
2838 in_struct_p)
2839 int base_regno;
2840 int count;
2841 rtx to;
2842 int up;
2843 int write_back;
2844 int unchanging_p;
2845 int in_struct_p;
2847 int i = 0, j;
2848 rtx result;
2849 int sign = up ? 1 : -1;
2850 rtx mem;
2852 result = gen_rtx (PARALLEL, VOIDmode,
2853 rtvec_alloc (count + (write_back ? 2 : 0)));
2854 if (write_back)
2856 XVECEXP (result, 0, 0)
2857 = gen_rtx (SET, GET_MODE (to), to,
2858 plus_constant (to, count * 4 * sign));
2859 i = 1;
2860 count++;
2863 for (j = 0; i < count; i++, j++)
2865 mem = gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign));
2866 RTX_UNCHANGING_P (mem) = unchanging_p;
2867 MEM_IN_STRUCT_P (mem) = in_struct_p;
2869 XVECEXP (result, 0, i) = gen_rtx (SET, VOIDmode, mem,
2870 gen_rtx (REG, SImode, base_regno + j));
2873 if (write_back)
2874 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2876 return result;
2880 arm_gen_movstrqi (operands)
2881 rtx *operands;
2883 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
2884 int i, r;
2885 rtx src, dst;
2886 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
2887 rtx part_bytes_reg = NULL;
2888 rtx mem;
2889 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
2890 extern int optimize;
2892 if (GET_CODE (operands[2]) != CONST_INT
2893 || GET_CODE (operands[3]) != CONST_INT
2894 || INTVAL (operands[2]) > 64
2895 || INTVAL (operands[3]) & 3)
2896 return 0;
2898 st_dst = XEXP (operands[0], 0);
2899 st_src = XEXP (operands[1], 0);
2901 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
2902 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2903 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
2904 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2906 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2907 fin_src = src = copy_to_mode_reg (SImode, st_src);
2909 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2910 out_words_to_go = INTVAL (operands[2]) / 4;
2911 last_bytes = INTVAL (operands[2]) & 3;
2913 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2914 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2916 for (i = 0; in_words_to_go >= 2; i+=4)
2918 if (in_words_to_go > 4)
2919 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
2920 src_unchanging_p, src_in_struct_p));
2921 else
2922 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
2923 FALSE, src_unchanging_p,
2924 src_in_struct_p));
2926 if (out_words_to_go)
2928 if (out_words_to_go > 4)
2929 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
2930 dst_unchanging_p,
2931 dst_in_struct_p));
2932 else if (out_words_to_go != 1)
2933 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2934 dst, TRUE,
2935 (last_bytes == 0
2936 ? FALSE : TRUE),
2937 dst_unchanging_p,
2938 dst_in_struct_p));
2939 else
2941 mem = gen_rtx (MEM, SImode, dst);
2942 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2943 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2944 emit_move_insn (mem, gen_rtx (REG, SImode, 0));
2945 if (last_bytes != 0)
2946 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2950 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2951 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2954 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2955 if (out_words_to_go)
2957 rtx sreg;
2959 mem = gen_rtx (MEM, SImode, src);
2960 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2961 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2962 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
2963 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2965 mem = gen_rtx (MEM, SImode, dst);
2966 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2967 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2968 emit_move_insn (mem, sreg);
2969 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2970 in_words_to_go--;
2972 if (in_words_to_go) /* Sanity check */
2973 abort ();
2976 if (in_words_to_go)
2978 if (in_words_to_go < 0)
2979 abort ();
2981 mem = gen_rtx (MEM, SImode, src);
2982 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2983 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2984 part_bytes_reg = copy_to_mode_reg (SImode, mem);
2987 if (BYTES_BIG_ENDIAN && last_bytes)
2989 rtx tmp = gen_reg_rtx (SImode);
2991 if (part_bytes_reg == NULL)
2992 abort ();
2994 /* The bytes we want are in the top end of the word */
2995 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2996 GEN_INT (8 * (4 - last_bytes))));
2997 part_bytes_reg = tmp;
2999 while (last_bytes)
3001 mem = gen_rtx (MEM, QImode, plus_constant (dst, last_bytes - 1));
3002 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3003 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3004 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
3005 if (--last_bytes)
3007 tmp = gen_reg_rtx (SImode);
3008 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3009 part_bytes_reg = tmp;
3014 else
3016 while (last_bytes)
3018 if (part_bytes_reg == NULL)
3019 abort ();
3021 mem = gen_rtx (MEM, QImode, dst);
3022 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3023 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3024 emit_move_insn (mem, gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
3025 if (--last_bytes)
3027 rtx tmp = gen_reg_rtx (SImode);
3029 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
3030 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3031 part_bytes_reg = tmp;
3036 return 1;
3039 /* Generate a memory reference for a half word, such that it will be loaded
3040 into the top 16 bits of the word. We can assume that the address is
3041 known to be alignable and of the form reg, or plus (reg, const). */
3043 gen_rotated_half_load (memref)
3044 rtx memref;
3046 HOST_WIDE_INT offset = 0;
3047 rtx base = XEXP (memref, 0);
3049 if (GET_CODE (base) == PLUS)
3051 offset = INTVAL (XEXP (base, 1));
3052 base = XEXP (base, 0);
3055 /* If we aren't allowed to generate unaligned addresses, then fail. */
3056 if (TARGET_SHORT_BY_BYTES
3057 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3058 return NULL;
3060 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
3062 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3063 return base;
3065 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
3068 static enum machine_mode
3069 select_dominance_cc_mode (op, x, y, cond_or)
3070 enum rtx_code op;
3071 rtx x;
3072 rtx y;
3073 HOST_WIDE_INT cond_or;
3075 enum rtx_code cond1, cond2;
3076 int swapped = 0;
3078 /* Currently we will probably get the wrong result if the individual
3079 comparisons are not simple. This also ensures that it is safe to
3080 reverse a comparison if necessary. */
3081 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3082 != CCmode)
3083 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3084 != CCmode))
3085 return CCmode;
3087 if (cond_or)
3088 cond1 = reverse_condition (cond1);
3090 /* If the comparisons are not equal, and one doesn't dominate the other,
3091 then we can't do this. */
3092 if (cond1 != cond2
3093 && ! comparison_dominates_p (cond1, cond2)
3094 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3095 return CCmode;
3097 if (swapped)
3099 enum rtx_code temp = cond1;
3100 cond1 = cond2;
3101 cond2 = temp;
3104 switch (cond1)
3106 case EQ:
3107 if (cond2 == EQ || ! cond_or)
3108 return CC_DEQmode;
3110 switch (cond2)
3112 case LE: return CC_DLEmode;
3113 case LEU: return CC_DLEUmode;
3114 case GE: return CC_DGEmode;
3115 case GEU: return CC_DGEUmode;
3118 break;
3120 case LT:
3121 if (cond2 == LT || ! cond_or)
3122 return CC_DLTmode;
3123 if (cond2 == LE)
3124 return CC_DLEmode;
3125 if (cond2 == NE)
3126 return CC_DNEmode;
3127 break;
3129 case GT:
3130 if (cond2 == GT || ! cond_or)
3131 return CC_DGTmode;
3132 if (cond2 == GE)
3133 return CC_DGEmode;
3134 if (cond2 == NE)
3135 return CC_DNEmode;
3136 break;
3138 case LTU:
3139 if (cond2 == LTU || ! cond_or)
3140 return CC_DLTUmode;
3141 if (cond2 == LEU)
3142 return CC_DLEUmode;
3143 if (cond2 == NE)
3144 return CC_DNEmode;
3145 break;
3147 case GTU:
3148 if (cond2 == GTU || ! cond_or)
3149 return CC_DGTUmode;
3150 if (cond2 == GEU)
3151 return CC_DGEUmode;
3152 if (cond2 == NE)
3153 return CC_DNEmode;
3154 break;
3156 /* The remaining cases only occur when both comparisons are the
3157 same. */
3158 case NE:
3159 return CC_DNEmode;
3161 case LE:
3162 return CC_DLEmode;
3164 case GE:
3165 return CC_DGEmode;
3167 case LEU:
3168 return CC_DLEUmode;
3170 case GEU:
3171 return CC_DGEUmode;
3174 abort ();
3177 enum machine_mode
3178 arm_select_cc_mode (op, x, y)
3179 enum rtx_code op;
3180 rtx x;
3181 rtx y;
3183 /* All floating point compares return CCFP if it is an equality
3184 comparison, and CCFPE otherwise. */
3185 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3186 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3188 /* A compare with a shifted operand. Because of canonicalization, the
3189 comparison will have to be swapped when we emit the assembler. */
3190 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3191 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3192 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3193 || GET_CODE (x) == ROTATERT))
3194 return CC_SWPmode;
3196 /* This is a special case that is used by combine to allow a
3197 comparison of a shifted byte load to be split into a zero-extend
3198 followed by a comparison of the shifted integer (only valid for
3199 equalities and unsigned inequalities). */
3200 if (GET_MODE (x) == SImode
3201 && GET_CODE (x) == ASHIFT
3202 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3203 && GET_CODE (XEXP (x, 0)) == SUBREG
3204 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3205 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3206 && (op == EQ || op == NE
3207 || op == GEU || op == GTU || op == LTU || op == LEU)
3208 && GET_CODE (y) == CONST_INT)
3209 return CC_Zmode;
3211 /* An operation that sets the condition codes as a side-effect, the
3212 V flag is not set correctly, so we can only use comparisons where
3213 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3214 instead. */
3215 if (GET_MODE (x) == SImode
3216 && y == const0_rtx
3217 && (op == EQ || op == NE || op == LT || op == GE)
3218 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3219 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3220 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3221 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3222 || GET_CODE (x) == LSHIFTRT
3223 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3224 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3225 return CC_NOOVmode;
3227 /* A construct for a conditional compare, if the false arm contains
3228 0, then both conditions must be true, otherwise either condition
3229 must be true. Not all conditions are possible, so CCmode is
3230 returned if it can't be done. */
3231 if (GET_CODE (x) == IF_THEN_ELSE
3232 && (XEXP (x, 2) == const0_rtx
3233 || XEXP (x, 2) == const1_rtx)
3234 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3235 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3236 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3237 INTVAL (XEXP (x, 2)));
3239 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3240 return CC_Zmode;
3242 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3243 && GET_CODE (x) == PLUS
3244 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3245 return CC_Cmode;
3247 return CCmode;
3250 /* X and Y are two things to compare using CODE. Emit the compare insn and
3251 return the rtx for register 0 in the proper mode. FP means this is a
3252 floating point compare: I don't think that it is needed on the arm. */
3255 gen_compare_reg (code, x, y, fp)
3256 enum rtx_code code;
3257 rtx x, y;
3259 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3260 rtx cc_reg = gen_rtx (REG, mode, 24);
3262 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
3263 gen_rtx (COMPARE, mode, x, y)));
3265 return cc_reg;
3268 void
3269 arm_reload_in_hi (operands)
3270 rtx *operands;
3272 rtx base = find_replacement (&XEXP (operands[1], 0));
3274 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
3275 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
3276 gen_rtx (MEM, QImode,
3277 plus_constant (base, 1))));
3278 if (BYTES_BIG_ENDIAN)
3279 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3280 operands[0], 0),
3281 gen_rtx (IOR, SImode,
3282 gen_rtx (ASHIFT, SImode,
3283 gen_rtx (SUBREG, SImode,
3284 operands[0], 0),
3285 GEN_INT (8)),
3286 operands[2])));
3287 else
3288 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3289 operands[0], 0),
3290 gen_rtx (IOR, SImode,
3291 gen_rtx (ASHIFT, SImode,
3292 operands[2],
3293 GEN_INT (8)),
3294 gen_rtx (SUBREG, SImode, operands[0], 0))));
3297 void
3298 arm_reload_out_hi (operands)
3299 rtx *operands;
3301 rtx base = find_replacement (&XEXP (operands[0], 0));
3303 if (BYTES_BIG_ENDIAN)
3305 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3306 gen_rtx (SUBREG, QImode, operands[1], 0)));
3307 emit_insn (gen_lshrsi3 (operands[2],
3308 gen_rtx (SUBREG, SImode, operands[1], 0),
3309 GEN_INT (8)));
3310 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3311 gen_rtx (SUBREG, QImode, operands[2], 0)));
3313 else
3315 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3316 gen_rtx (SUBREG, QImode, operands[1], 0)));
3317 emit_insn (gen_lshrsi3 (operands[2],
3318 gen_rtx (SUBREG, SImode, operands[1], 0),
3319 GEN_INT (8)));
3320 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3321 gen_rtx (SUBREG, QImode, operands[2], 0)));
3325 /* Routines for manipulation of the constant pool. */
3326 /* This is unashamedly hacked from the version in sh.c, since the problem is
3327 extremely similar. */
3329 /* Arm instructions cannot load a large constant into a register,
3330 constants have to come from a pc relative load. The reference of a pc
3331 relative load instruction must be less than 1k infront of the instruction.
3332 This means that we often have to dump a constant inside a function, and
3333 generate code to branch around it.
3335 It is important to minimize this, since the branches will slow things
3336 down and make things bigger.
3338 Worst case code looks like:
3340 ldr rn, L1
3341 b L2
3342 align
3343 L1: .long value
3347 ldr rn, L3
3348 b L4
3349 align
3350 L3: .long value
3354 We fix this by performing a scan before scheduling, which notices which
3355 instructions need to have their operands fetched from the constant table
3356 and builds the table.
3359 The algorithm is:
3361 scan, find an instruction which needs a pcrel move. Look forward, find th
3362 last barrier which is within MAX_COUNT bytes of the requirement.
3363 If there isn't one, make one. Process all the instructions between
3364 the find and the barrier.
3366 In the above example, we can tell that L3 is within 1k of L1, so
3367 the first move can be shrunk from the 2 insn+constant sequence into
3368 just 1 insn, and the constant moved to L3 to make:
3370 ldr rn, L1
3372 ldr rn, L3
3373 b L4
3374 align
3375 L1: .long value
3376 L3: .long value
3379 Then the second move becomes the target for the shortening process.
3383 typedef struct
3385 rtx value; /* Value in table */
3386 HOST_WIDE_INT next_offset;
3387 enum machine_mode mode; /* Mode of value */
3388 } pool_node;
3390 /* The maximum number of constants that can fit into one pool, since
3391 the pc relative range is 0...1020 bytes and constants are at least 4
3392 bytes long */
3394 #define MAX_POOL_SIZE (1020/4)
3395 static pool_node pool_vector[MAX_POOL_SIZE];
3396 static int pool_size;
3397 static rtx pool_vector_label;
3399 /* Add a constant to the pool and return its label. */
3400 static HOST_WIDE_INT
3401 add_constant (x, mode)
3402 rtx x;
3403 enum machine_mode mode;
3405 int i;
3406 rtx lab;
3407 HOST_WIDE_INT offset;
3409 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3410 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3411 x = get_pool_constant (XEXP (x, 0));
3412 #ifndef AOF_ASSEMBLER
3413 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3414 x = XVECEXP (x, 0, 0);
3415 #endif
3417 #ifdef AOF_ASSEMBLER
3418 /* PIC Symbol references need to be converted into offsets into the
3419 based area. */
3420 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3421 x = aof_pic_entry (x);
3422 #endif /* AOF_ASSEMBLER */
3424 /* First see if we've already got it */
3425 for (i = 0; i < pool_size; i++)
3427 if (GET_CODE (x) == pool_vector[i].value->code
3428 && mode == pool_vector[i].mode)
3430 if (GET_CODE (x) == CODE_LABEL)
3432 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3433 continue;
3435 if (rtx_equal_p (x, pool_vector[i].value))
3436 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3440 /* Need a new one */
3441 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3442 offset = 0;
3443 if (pool_size == 0)
3444 pool_vector_label = gen_label_rtx ();
3445 else
3446 pool_vector[pool_size].next_offset
3447 += (offset = pool_vector[pool_size - 1].next_offset);
3449 pool_vector[pool_size].value = x;
3450 pool_vector[pool_size].mode = mode;
3451 pool_size++;
3452 return offset;
3455 /* Output the literal table */
3456 static void
3457 dump_table (scan)
3458 rtx scan;
3460 int i;
3462 scan = emit_label_after (gen_label_rtx (), scan);
3463 scan = emit_insn_after (gen_align_4 (), scan);
3464 scan = emit_label_after (pool_vector_label, scan);
3466 for (i = 0; i < pool_size; i++)
3468 pool_node *p = pool_vector + i;
3470 switch (GET_MODE_SIZE (p->mode))
3472 case 4:
3473 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3474 break;
3476 case 8:
3477 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3478 break;
3480 default:
3481 abort ();
3482 break;
3486 scan = emit_insn_after (gen_consttable_end (), scan);
3487 scan = emit_barrier_after (scan);
3488 pool_size = 0;
3491 /* Non zero if the src operand needs to be fixed up */
3492 static int
3493 fixit (src, mode, destreg)
3494 rtx src;
3495 enum machine_mode mode;
3496 int destreg;
3498 if (CONSTANT_P (src))
3500 if (GET_CODE (src) == CONST_INT)
3501 return (! const_ok_for_arm (INTVAL (src))
3502 && ! const_ok_for_arm (~INTVAL (src)));
3503 if (GET_CODE (src) == CONST_DOUBLE)
3504 return (GET_MODE (src) == VOIDmode
3505 || destreg < 16
3506 || (! const_double_rtx_ok_for_fpu (src)
3507 && ! neg_const_double_rtx_ok_for_fpu (src)));
3508 return symbol_mentioned_p (src);
3510 #ifndef AOF_ASSEMBLER
3511 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3512 return 1;
3513 #endif
3514 else
3515 return (mode == SImode && GET_CODE (src) == MEM
3516 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3517 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3520 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3521 static rtx
3522 find_barrier (from, max_count)
3523 rtx from;
3524 int max_count;
3526 int count = 0;
3527 rtx found_barrier = 0;
3529 while (from && count < max_count)
3531 if (GET_CODE (from) == BARRIER)
3532 found_barrier = from;
3534 /* Count the length of this insn */
3535 if (GET_CODE (from) == INSN
3536 && GET_CODE (PATTERN (from)) == SET
3537 && CONSTANT_P (SET_SRC (PATTERN (from)))
3538 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
3540 rtx src = SET_SRC (PATTERN (from));
3541 count += 2;
3543 else
3544 count += get_attr_length (from);
3546 from = NEXT_INSN (from);
3549 if (!found_barrier)
3551 /* We didn't find a barrier in time to
3552 dump our stuff, so we'll make one */
3553 rtx label = gen_label_rtx ();
3555 if (from)
3556 from = PREV_INSN (from);
3557 else
3558 from = get_last_insn ();
3560 /* Walk back to be just before any jump */
3561 while (GET_CODE (from) == JUMP_INSN
3562 || GET_CODE (from) == NOTE
3563 || GET_CODE (from) == CODE_LABEL)
3564 from = PREV_INSN (from);
3566 from = emit_jump_insn_after (gen_jump (label), from);
3567 JUMP_LABEL (from) = label;
3568 found_barrier = emit_barrier_after (from);
3569 emit_label_after (label, found_barrier);
3570 return found_barrier;
3573 return found_barrier;
3576 /* Non zero if the insn is a move instruction which needs to be fixed. */
3577 static int
3578 broken_move (insn)
3579 rtx insn;
3581 if (!INSN_DELETED_P (insn)
3582 && GET_CODE (insn) == INSN
3583 && GET_CODE (PATTERN (insn)) == SET)
3585 rtx pat = PATTERN (insn);
3586 rtx src = SET_SRC (pat);
3587 rtx dst = SET_DEST (pat);
3588 int destreg;
3589 enum machine_mode mode = GET_MODE (dst);
3590 if (dst == pc_rtx)
3591 return 0;
3593 if (GET_CODE (dst) == REG)
3594 destreg = REGNO (dst);
3595 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3596 destreg = REGNO (SUBREG_REG (dst));
3598 return fixit (src, mode, destreg);
3600 return 0;
3603 void
3604 arm_reorg (first)
3605 rtx first;
3607 rtx insn;
3608 int count_size;
3609 int regno;
3611 #if 0
3612 /* The ldr instruction can work with up to a 4k offset, and most constants
3613 will be loaded with one of these instructions; however, the adr
3614 instruction and the ldf instructions only work with a 1k offset. This
3615 code needs to be rewritten to use the 4k offset when possible, and to
3616 adjust when a 1k offset is needed. For now we just use a 1k offset
3617 from the start. */
3618 count_size = 4000;
3620 /* Floating point operands can't work further than 1024 bytes from the
3621 PC, so to make things simple we restrict all loads for such functions.
3623 if (TARGET_HARD_FLOAT)
3624 for (regno = 16; regno < 24; regno++)
3625 if (regs_ever_live[regno])
3627 count_size = 1000;
3628 break;
3630 #else
3631 count_size = 1000;
3632 #endif /* 0 */
3634 for (insn = first; insn; insn = NEXT_INSN (insn))
3636 if (broken_move (insn))
3638 /* This is a broken move instruction, scan ahead looking for
3639 a barrier to stick the constant table behind */
3640 rtx scan;
3641 rtx barrier = find_barrier (insn, count_size);
3643 /* Now find all the moves between the points and modify them */
3644 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3646 if (broken_move (scan))
3648 /* This is a broken move instruction, add it to the pool */
3649 rtx pat = PATTERN (scan);
3650 rtx src = SET_SRC (pat);
3651 rtx dst = SET_DEST (pat);
3652 enum machine_mode mode = GET_MODE (dst);
3653 HOST_WIDE_INT offset;
3654 rtx newinsn = scan;
3655 rtx newsrc;
3656 rtx addr;
3657 int scratch;
3659 /* If this is an HImode constant load, convert it into
3660 an SImode constant load. Since the register is always
3661 32 bits this is safe. We have to do this, since the
3662 load pc-relative instruction only does a 32-bit load. */
3663 if (mode == HImode)
3665 mode = SImode;
3666 if (GET_CODE (dst) != REG)
3667 abort ();
3668 PUT_MODE (dst, SImode);
3671 offset = add_constant (src, mode);
3672 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3673 pool_vector_label),
3674 offset);
3676 /* For wide moves to integer regs we need to split the
3677 address calculation off into a separate insn, so that
3678 the load can then be done with a load-multiple. This is
3679 safe, since we have already noted the length of such
3680 insns to be 8, and we are immediately over-writing the
3681 scratch we have grabbed with the final result. */
3682 if (GET_MODE_SIZE (mode) > 4
3683 && (scratch = REGNO (dst)) < 16)
3685 rtx reg = gen_rtx (REG, SImode, scratch);
3686 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3687 newinsn);
3688 addr = reg;
3691 newsrc = gen_rtx (MEM, mode, addr);
3693 /* Build a jump insn wrapper around the move instead
3694 of an ordinary insn, because we want to have room for
3695 the target label rtx in fld[7], which an ordinary
3696 insn doesn't have. */
3697 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
3698 dst, newsrc),
3699 newinsn);
3700 JUMP_LABEL (newinsn) = pool_vector_label;
3702 /* But it's still an ordinary insn */
3703 PUT_CODE (newinsn, INSN);
3705 /* Kill old insn */
3706 delete_insn (scan);
3707 scan = newinsn;
3710 dump_table (barrier);
3711 insn = scan;
3717 /* Routines to output assembly language. */
3719 /* If the rtx is the correct value then return the string of the number.
3720 In this way we can ensure that valid double constants are generated even
3721 when cross compiling. */
3722 char *
3723 fp_immediate_constant (x)
3724 rtx x;
3726 REAL_VALUE_TYPE r;
3727 int i;
3729 if (!fpa_consts_inited)
3730 init_fpa_table ();
3732 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3733 for (i = 0; i < 8; i++)
3734 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3735 return strings_fpa[i];
3737 abort ();
3740 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3741 static char *
3742 fp_const_from_val (r)
3743 REAL_VALUE_TYPE *r;
3745 int i;
3747 if (! fpa_consts_inited)
3748 init_fpa_table ();
3750 for (i = 0; i < 8; i++)
3751 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3752 return strings_fpa[i];
3754 abort ();
3757 /* Output the operands of a LDM/STM instruction to STREAM.
3758 MASK is the ARM register set mask of which only bits 0-15 are important.
3759 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3760 must follow the register list. */
3762 void
3763 print_multi_reg (stream, instr, mask, hat)
3764 FILE *stream;
3765 char *instr;
3766 int mask, hat;
3768 int i;
3769 int not_first = FALSE;
3771 fputc ('\t', stream);
3772 fprintf (stream, instr, REGISTER_PREFIX);
3773 fputs (", {", stream);
3774 for (i = 0; i < 16; i++)
3775 if (mask & (1 << i))
3777 if (not_first)
3778 fprintf (stream, ", ");
3779 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
3780 not_first = TRUE;
3783 fprintf (stream, "}%s\n", hat ? "^" : "");
3786 /* Output a 'call' insn. */
3788 char *
3789 output_call (operands)
3790 rtx *operands;
3792 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3794 if (REGNO (operands[0]) == 14)
3796 operands[0] = gen_rtx (REG, SImode, 12);
3797 output_asm_insn ("mov%?\t%0, %|lr", operands);
3799 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3800 output_asm_insn ("mov%?\t%|pc, %0", operands);
3801 return "";
3804 static int
3805 eliminate_lr2ip (x)
3806 rtx *x;
3808 int something_changed = 0;
3809 rtx x0 = *x;
3810 int code = GET_CODE (x0);
3811 register int i, j;
3812 register char *fmt;
3814 switch (code)
3816 case REG:
3817 if (REGNO (x0) == 14)
3819 *x = gen_rtx (REG, SImode, 12);
3820 return 1;
3822 return 0;
3823 default:
3824 /* Scan through the sub-elements and change any references there */
3825 fmt = GET_RTX_FORMAT (code);
3826 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3827 if (fmt[i] == 'e')
3828 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3829 else if (fmt[i] == 'E')
3830 for (j = 0; j < XVECLEN (x0, i); j++)
3831 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3832 return something_changed;
3836 /* Output a 'call' insn that is a reference in memory. */
3838 char *
3839 output_call_mem (operands)
3840 rtx *operands;
3842 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3843 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3845 if (eliminate_lr2ip (&operands[0]))
3846 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
3848 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3849 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3850 return "";
3854 /* Output a move from arm registers to an fpu registers.
3855 OPERANDS[0] is an fpu register.
3856 OPERANDS[1] is the first registers of an arm register pair. */
3858 char *
3859 output_mov_long_double_fpu_from_arm (operands)
3860 rtx *operands;
3862 int arm_reg0 = REGNO (operands[1]);
3863 rtx ops[3];
3865 if (arm_reg0 == 12)
3866 abort();
3868 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3869 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3870 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3872 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3873 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
3874 return "";
3877 /* Output a move from an fpu register to arm registers.
3878 OPERANDS[0] is the first registers of an arm register pair.
3879 OPERANDS[1] is an fpu register. */
3881 char *
3882 output_mov_long_double_arm_from_fpu (operands)
3883 rtx *operands;
3885 int arm_reg0 = REGNO (operands[0]);
3886 rtx ops[3];
3888 if (arm_reg0 == 12)
3889 abort();
3891 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3892 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3893 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3895 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3896 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
3897 return "";
3900 /* Output a move from arm registers to arm registers of a long double
3901 OPERANDS[0] is the destination.
3902 OPERANDS[1] is the source. */
3903 char *
3904 output_mov_long_double_arm_from_arm (operands)
3905 rtx *operands;
3907 /* We have to be careful here because the two might overlap */
3908 int dest_start = REGNO (operands[0]);
3909 int src_start = REGNO (operands[1]);
3910 rtx ops[2];
3911 int i;
3913 if (dest_start < src_start)
3915 for (i = 0; i < 3; i++)
3917 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3918 ops[1] = gen_rtx (REG, SImode, src_start + i);
3919 output_asm_insn ("mov%?\t%0, %1", ops);
3922 else
3924 for (i = 2; i >= 0; i--)
3926 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3927 ops[1] = gen_rtx (REG, SImode, src_start + i);
3928 output_asm_insn ("mov%?\t%0, %1", ops);
3932 return "";
3936 /* Output a move from arm registers to an fpu registers.
3937 OPERANDS[0] is an fpu register.
3938 OPERANDS[1] is the first registers of an arm register pair. */
3940 char *
3941 output_mov_double_fpu_from_arm (operands)
3942 rtx *operands;
3944 int arm_reg0 = REGNO (operands[1]);
3945 rtx ops[2];
3947 if (arm_reg0 == 12)
3948 abort();
3949 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3950 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3951 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
3952 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
3953 return "";
3956 /* Output a move from an fpu register to arm registers.
3957 OPERANDS[0] is the first registers of an arm register pair.
3958 OPERANDS[1] is an fpu register. */
3960 char *
3961 output_mov_double_arm_from_fpu (operands)
3962 rtx *operands;
3964 int arm_reg0 = REGNO (operands[0]);
3965 rtx ops[2];
3967 if (arm_reg0 == 12)
3968 abort();
3970 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3971 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3972 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
3973 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
3974 return "";
3977 /* Output a move between double words.
3978 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
3979 or MEM<-REG and all MEMs must be offsettable addresses. */
3981 char *
3982 output_move_double (operands)
3983 rtx *operands;
3985 enum rtx_code code0 = GET_CODE (operands[0]);
3986 enum rtx_code code1 = GET_CODE (operands[1]);
3987 rtx otherops[3];
3989 if (code0 == REG)
3991 int reg0 = REGNO (operands[0]);
3993 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
3994 if (code1 == REG)
3996 int reg1 = REGNO (operands[1]);
3997 if (reg1 == 12)
3998 abort();
4000 /* Ensure the second source is not overwritten */
4001 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4002 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
4003 else
4004 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
4006 else if (code1 == CONST_DOUBLE)
4008 if (GET_MODE (operands[1]) == DFmode)
4010 long l[2];
4011 union real_extract u;
4013 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4014 sizeof (u));
4015 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4016 otherops[1] = GEN_INT(l[1]);
4017 operands[1] = GEN_INT(l[0]);
4019 else if (GET_MODE (operands[1]) != VOIDmode)
4020 abort ();
4021 else if (WORDS_BIG_ENDIAN)
4024 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4025 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4027 else
4030 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4031 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4033 output_mov_immediate (operands);
4034 output_mov_immediate (otherops);
4036 else if (code1 == CONST_INT)
4038 #if HOST_BITS_PER_WIDE_INT > 32
4039 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4040 what the upper word is. */
4041 if (WORDS_BIG_ENDIAN)
4043 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4044 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4046 else
4048 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4049 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4051 #else
4052 /* Sign extend the intval into the high-order word */
4053 if (WORDS_BIG_ENDIAN)
4055 otherops[1] = operands[1];
4056 operands[1] = (INTVAL (operands[1]) < 0
4057 ? constm1_rtx : const0_rtx);
4059 else
4060 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
4061 #endif
4062 output_mov_immediate (otherops);
4063 output_mov_immediate (operands);
4065 else if (code1 == MEM)
4067 switch (GET_CODE (XEXP (operands[1], 0)))
4069 case REG:
4070 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
4071 break;
4073 case PRE_INC:
4074 abort (); /* Should never happen now */
4075 break;
4077 case PRE_DEC:
4078 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
4079 break;
4081 case POST_INC:
4082 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
4083 break;
4085 case POST_DEC:
4086 abort (); /* Should never happen now */
4087 break;
4089 case LABEL_REF:
4090 case CONST:
4091 output_asm_insn ("adr%?\t%0, %1", operands);
4092 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4093 break;
4095 default:
4096 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
4098 otherops[0] = operands[0];
4099 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4100 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4101 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4103 if (GET_CODE (otherops[2]) == CONST_INT)
4105 switch (INTVAL (otherops[2]))
4107 case -8:
4108 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4109 return "";
4110 case -4:
4111 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4112 return "";
4113 case 4:
4114 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4115 return "";
4117 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4118 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4119 else
4120 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4122 else
4123 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4125 else
4126 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4127 return "ldm%?ia\t%0, %M0";
4129 else
4131 otherops[1] = adj_offsettable_operand (operands[1], 4);
4132 /* Take care of overlapping base/data reg. */
4133 if (reg_mentioned_p (operands[0], operands[1]))
4135 output_asm_insn ("ldr%?\t%0, %1", otherops);
4136 output_asm_insn ("ldr%?\t%0, %1", operands);
4138 else
4140 output_asm_insn ("ldr%?\t%0, %1", operands);
4141 output_asm_insn ("ldr%?\t%0, %1", otherops);
4146 else
4147 abort(); /* Constraints should prevent this */
4149 else if (code0 == MEM && code1 == REG)
4151 if (REGNO (operands[1]) == 12)
4152 abort();
4154 switch (GET_CODE (XEXP (operands[0], 0)))
4156 case REG:
4157 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
4158 break;
4160 case PRE_INC:
4161 abort (); /* Should never happen now */
4162 break;
4164 case PRE_DEC:
4165 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
4166 break;
4168 case POST_INC:
4169 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
4170 break;
4172 case POST_DEC:
4173 abort (); /* Should never happen now */
4174 break;
4176 case PLUS:
4177 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4179 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4181 case -8:
4182 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4183 return "";
4185 case -4:
4186 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4187 return "";
4189 case 4:
4190 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4191 return "";
4194 /* Fall through */
4196 default:
4197 otherops[0] = adj_offsettable_operand (operands[0], 4);
4198 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
4199 output_asm_insn ("str%?\t%1, %0", operands);
4200 output_asm_insn ("str%?\t%1, %0", otherops);
4203 else
4204 abort(); /* Constraints should prevent this */
4206 return "";
4210 /* Output an arbitrary MOV reg, #n.
4211 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4213 char *
4214 output_mov_immediate (operands)
4215 rtx *operands;
4217 HOST_WIDE_INT n = INTVAL (operands[1]);
4218 int n_ones = 0;
4219 int i;
4221 /* Try to use one MOV */
4222 if (const_ok_for_arm (n))
4224 output_asm_insn ("mov%?\t%0, %1", operands);
4225 return "";
4228 /* Try to use one MVN */
4229 if (const_ok_for_arm (~n))
4231 operands[1] = GEN_INT (~n);
4232 output_asm_insn ("mvn%?\t%0, %1", operands);
4233 return "";
4236 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4238 for (i=0; i < 32; i++)
4239 if (n & 1 << i)
4240 n_ones++;
4242 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
4243 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4244 ~n);
4245 else
4246 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4249 return "";
4253 /* Output an ADD r, s, #n where n may be too big for one instruction. If
4254 adding zero to one register, output nothing. */
4256 char *
4257 output_add_immediate (operands)
4258 rtx *operands;
4260 HOST_WIDE_INT n = INTVAL (operands[2]);
4262 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4264 if (n < 0)
4265 output_multi_immediate (operands,
4266 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4267 -n);
4268 else
4269 output_multi_immediate (operands,
4270 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4274 return "";
4277 /* Output a multiple immediate operation.
4278 OPERANDS is the vector of operands referred to in the output patterns.
4279 INSTR1 is the output pattern to use for the first constant.
4280 INSTR2 is the output pattern to use for subsequent constants.
4281 IMMED_OP is the index of the constant slot in OPERANDS.
4282 N is the constant value. */
4284 static char *
4285 output_multi_immediate (operands, instr1, instr2, immed_op, n)
4286 rtx *operands;
4287 char *instr1, *instr2;
4288 int immed_op;
4289 HOST_WIDE_INT n;
4291 #if HOST_BITS_PER_WIDE_INT > 32
4292 n &= 0xffffffff;
4293 #endif
4295 if (n == 0)
4297 operands[immed_op] = const0_rtx;
4298 output_asm_insn (instr1, operands); /* Quick and easy output */
4300 else
4302 int i;
4303 char *instr = instr1;
4305 /* Note that n is never zero here (which would give no output) */
4306 for (i = 0; i < 32; i += 2)
4308 if (n & (3 << i))
4310 operands[immed_op] = GEN_INT (n & (255 << i));
4311 output_asm_insn (instr, operands);
4312 instr = instr2;
4313 i += 6;
4317 return "";
4321 /* Return the appropriate ARM instruction for the operation code.
4322 The returned result should not be overwritten. OP is the rtx of the
4323 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4324 was shifted. */
4326 char *
4327 arithmetic_instr (op, shift_first_arg)
4328 rtx op;
4329 int shift_first_arg;
4331 switch (GET_CODE (op))
4333 case PLUS:
4334 return "add";
4336 case MINUS:
4337 return shift_first_arg ? "rsb" : "sub";
4339 case IOR:
4340 return "orr";
4342 case XOR:
4343 return "eor";
4345 case AND:
4346 return "and";
4348 default:
4349 abort ();
4354 /* Ensure valid constant shifts and return the appropriate shift mnemonic
4355 for the operation code. The returned result should not be overwritten.
4356 OP is the rtx code of the shift.
4357 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4358 shift. */
4360 static char *
4361 shift_op (op, amountp)
4362 rtx op;
4363 HOST_WIDE_INT *amountp;
4365 char *mnem;
4366 enum rtx_code code = GET_CODE (op);
4368 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4369 *amountp = -1;
4370 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4371 *amountp = INTVAL (XEXP (op, 1));
4372 else
4373 abort ();
4375 switch (code)
4377 case ASHIFT:
4378 mnem = "asl";
4379 break;
4381 case ASHIFTRT:
4382 mnem = "asr";
4383 break;
4385 case LSHIFTRT:
4386 mnem = "lsr";
4387 break;
4389 case ROTATERT:
4390 mnem = "ror";
4391 break;
4393 case MULT:
4394 /* We never have to worry about the amount being other than a
4395 power of 2, since this case can never be reloaded from a reg. */
4396 if (*amountp != -1)
4397 *amountp = int_log2 (*amountp);
4398 else
4399 abort ();
4400 return "asl";
4402 default:
4403 abort ();
4406 if (*amountp != -1)
4408 /* This is not 100% correct, but follows from the desire to merge
4409 multiplication by a power of 2 with the recognizer for a
4410 shift. >=32 is not a valid shift for "asl", so we must try and
4411 output a shift that produces the correct arithmetical result.
4412 Using lsr #32 is identical except for the fact that the carry bit
4413 is not set correctly if we set the flags; but we never use the
4414 carry bit from such an operation, so we can ignore that. */
4415 if (code == ROTATERT)
4416 *amountp &= 31; /* Rotate is just modulo 32 */
4417 else if (*amountp != (*amountp & 31))
4419 if (code == ASHIFT)
4420 mnem = "lsr";
4421 *amountp = 32;
4424 /* Shifts of 0 are no-ops. */
4425 if (*amountp == 0)
4426 return NULL;
4429 return mnem;
4433 /* Obtain the shift from the POWER of two. */
4435 static HOST_WIDE_INT
4436 int_log2 (power)
4437 HOST_WIDE_INT power;
4439 HOST_WIDE_INT shift = 0;
4441 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
4443 if (shift > 31)
4444 abort ();
4445 shift++;
4448 return shift;
4451 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
4452 /bin/as is horribly restrictive. */
4454 void
4455 output_ascii_pseudo_op (stream, p, len)
4456 FILE *stream;
4457 unsigned char *p;
4458 int len;
4460 int i;
4461 int len_so_far = 1000;
4462 int chars_so_far = 0;
4464 for (i = 0; i < len; i++)
4466 register int c = p[i];
4468 if (len_so_far > 50)
4470 if (chars_so_far)
4471 fputs ("\"\n", stream);
4472 fputs ("\t.ascii\t\"", stream);
4473 len_so_far = 0;
4474 chars_so_far = 0;
4477 if (c == '\"' || c == '\\')
4479 putc('\\', stream);
4480 len_so_far++;
4483 if (c >= ' ' && c < 0177)
4485 putc (c, stream);
4486 len_so_far++;
4488 else
4490 fprintf (stream, "\\%03o", c);
4491 len_so_far +=4;
4494 chars_so_far++;
4497 fputs ("\"\n", stream);
4501 /* Try to determine whether a pattern really clobbers the link register.
4502 This information is useful when peepholing, so that lr need not be pushed
4503 if we combine a call followed by a return.
4504 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4505 such a check should not be needed because these only update an existing
4506 value within a register; the register must still be set elsewhere within
4507 the function. */
4509 static int
4510 pattern_really_clobbers_lr (x)
4511 rtx x;
4513 int i;
4515 switch (GET_CODE (x))
4517 case SET:
4518 switch (GET_CODE (SET_DEST (x)))
4520 case REG:
4521 return REGNO (SET_DEST (x)) == 14;
4523 case SUBREG:
4524 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4525 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
4527 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4528 return 0;
4529 abort ();
4531 default:
4532 return 0;
4535 case PARALLEL:
4536 for (i = 0; i < XVECLEN (x, 0); i++)
4537 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4538 return 1;
4539 return 0;
4541 case CLOBBER:
4542 switch (GET_CODE (XEXP (x, 0)))
4544 case REG:
4545 return REGNO (XEXP (x, 0)) == 14;
4547 case SUBREG:
4548 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4549 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4550 abort ();
4552 default:
4553 return 0;
4556 case UNSPEC:
4557 return 1;
4559 default:
4560 return 0;
4564 static int
4565 function_really_clobbers_lr (first)
4566 rtx first;
4568 rtx insn, next;
4570 for (insn = first; insn; insn = next_nonnote_insn (insn))
4572 switch (GET_CODE (insn))
4574 case BARRIER:
4575 case NOTE:
4576 case CODE_LABEL:
4577 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4578 case INLINE_HEADER:
4579 break;
4581 case INSN:
4582 if (pattern_really_clobbers_lr (PATTERN (insn)))
4583 return 1;
4584 break;
4586 case CALL_INSN:
4587 /* Don't yet know how to handle those calls that are not to a
4588 SYMBOL_REF */
4589 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4590 abort ();
4592 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4594 case CALL:
4595 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4596 != SYMBOL_REF)
4597 return 1;
4598 break;
4600 case SET:
4601 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4602 0, 0)), 0), 0))
4603 != SYMBOL_REF)
4604 return 1;
4605 break;
4607 default: /* Don't recognize it, be safe */
4608 return 1;
4611 /* A call can be made (by peepholing) not to clobber lr iff it is
4612 followed by a return. There may, however, be a use insn iff
4613 we are returning the result of the call.
4614 If we run off the end of the insn chain, then that means the
4615 call was at the end of the function. Unfortunately we don't
4616 have a return insn for the peephole to recognize, so we
4617 must reject this. (Can this be fixed by adding our own insn?) */
4618 if ((next = next_nonnote_insn (insn)) == NULL)
4619 return 1;
4621 /* No need to worry about lr if the call never returns */
4622 if (GET_CODE (next) == BARRIER)
4623 break;
4625 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4626 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4627 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4628 == REGNO (XEXP (PATTERN (next), 0))))
4629 if ((next = next_nonnote_insn (next)) == NULL)
4630 return 1;
4632 if (GET_CODE (next) == JUMP_INSN
4633 && GET_CODE (PATTERN (next)) == RETURN)
4634 break;
4635 return 1;
4637 default:
4638 abort ();
4642 /* We have reached the end of the chain so lr was _not_ clobbered */
4643 return 0;
4646 char *
4647 output_return_instruction (operand, really_return, reverse)
4648 rtx operand;
4649 int really_return;
4650 int reverse;
4652 char instr[100];
4653 int reg, live_regs = 0;
4654 int volatile_func = (optimize > 0
4655 && TREE_THIS_VOLATILE (current_function_decl));
4657 return_used_this_function = 1;
4659 if (volatile_func)
4661 rtx ops[2];
4662 /* If this function was declared non-returning, and we have found a tail
4663 call, then we have to trust that the called function won't return. */
4664 if (! really_return)
4665 return "";
4667 /* Otherwise, trap an attempted return by aborting. */
4668 ops[0] = operand;
4669 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
4670 assemble_external_libcall (ops[1]);
4671 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
4672 return "";
4675 if (current_function_calls_alloca && ! really_return)
4676 abort();
4678 for (reg = 0; reg <= 10; reg++)
4679 if (regs_ever_live[reg] && ! call_used_regs[reg])
4680 live_regs++;
4682 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
4683 live_regs++;
4685 if (frame_pointer_needed)
4686 live_regs += 4;
4688 if (live_regs)
4690 if (lr_save_eliminated || ! regs_ever_live[14])
4691 live_regs++;
4693 if (frame_pointer_needed)
4694 strcpy (instr,
4695 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
4696 else
4697 strcpy (instr,
4698 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
4700 for (reg = 0; reg <= 10; reg++)
4701 if (regs_ever_live[reg] && ! call_used_regs[reg])
4703 strcat (instr, "%|");
4704 strcat (instr, reg_names[reg]);
4705 if (--live_regs)
4706 strcat (instr, ", ");
4709 if (frame_pointer_needed)
4711 strcat (instr, "%|");
4712 strcat (instr, reg_names[11]);
4713 strcat (instr, ", ");
4714 strcat (instr, "%|");
4715 strcat (instr, reg_names[13]);
4716 strcat (instr, ", ");
4717 strcat (instr, "%|");
4718 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4720 else
4722 strcat (instr, "%|");
4723 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4725 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
4726 output_asm_insn (instr, &operand);
4728 else if (really_return)
4730 if (TARGET_THUMB_INTERWORK)
4731 sprintf (instr, "bx%%?%%%s\t%%|lr", reverse ? "D" : "d");
4732 else
4733 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4734 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
4735 output_asm_insn (instr, &operand);
4738 return "";
4741 /* Return nonzero if optimizing and the current function is volatile.
4742 Such functions never return, and many memory cycles can be saved
4743 by not storing register values that will never be needed again.
4744 This optimization was added to speed up context switching in a
4745 kernel application. */
4748 arm_volatile_func ()
4750 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4753 /* The amount of stack adjustment that happens here, in output_return and in
4754 output_epilogue must be exactly the same as was calculated during reload,
4755 or things will point to the wrong place. The only time we can safely
4756 ignore this constraint is when a function has no arguments on the stack,
4757 no stack frame requirement and no live registers execpt for `lr'. If we
4758 can guarantee that by making all function calls into tail calls and that
4759 lr is not clobbered in any other way, then there is no need to push lr
4760 onto the stack. */
4762 void
4763 output_func_prologue (f, frame_size)
4764 FILE *f;
4765 int frame_size;
4767 int reg, live_regs_mask = 0;
4768 rtx operands[3];
4769 int volatile_func = (optimize > 0
4770 && TREE_THIS_VOLATILE (current_function_decl));
4772 /* Nonzero if we must stuff some register arguments onto the stack as if
4773 they were passed there. */
4774 int store_arg_regs = 0;
4776 if (arm_ccfsm_state || arm_target_insn)
4777 abort (); /* Sanity check */
4779 if (arm_naked_function_p (current_function_decl))
4780 return;
4782 return_used_this_function = 0;
4783 lr_save_eliminated = 0;
4785 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4786 ASM_COMMENT_START, current_function_args_size,
4787 current_function_pretend_args_size, frame_size);
4788 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4789 ASM_COMMENT_START, frame_pointer_needed,
4790 current_function_anonymous_args);
4792 if (volatile_func)
4793 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
4795 if (current_function_anonymous_args && current_function_pretend_args_size)
4796 store_arg_regs = 1;
4798 for (reg = 0; reg <= 10; reg++)
4799 if (regs_ever_live[reg] && ! call_used_regs[reg])
4800 live_regs_mask |= (1 << reg);
4802 if (frame_pointer_needed)
4803 live_regs_mask |= 0xD800;
4804 else if (regs_ever_live[14])
4806 if (! current_function_args_size
4807 && ! function_really_clobbers_lr (get_insns ()))
4808 lr_save_eliminated = 1;
4809 else
4810 live_regs_mask |= 0x4000;
4813 if (live_regs_mask)
4815 /* if a di mode load/store multiple is used, and the base register
4816 is r3, then r4 can become an ever live register without lr
4817 doing so, in this case we need to push lr as well, or we
4818 will fail to get a proper return. */
4820 live_regs_mask |= 0x4000;
4821 lr_save_eliminated = 0;
4825 if (lr_save_eliminated)
4826 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4827 ASM_COMMENT_START);
4829 #ifdef AOF_ASSEMBLER
4830 if (flag_pic)
4831 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4832 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4833 #endif
4837 void
4838 output_func_epilogue (f, frame_size)
4839 FILE *f;
4840 int frame_size;
4842 int reg, live_regs_mask = 0;
4843 /* If we need this then it will always be at least this much */
4844 int floats_offset = 12;
4845 rtx operands[3];
4846 int volatile_func = (optimize > 0
4847 && TREE_THIS_VOLATILE (current_function_decl));
4849 if (use_return_insn() && return_used_this_function)
4851 if ((frame_size + current_function_outgoing_args_size) != 0
4852 && !(frame_pointer_needed || TARGET_APCS))
4853 abort ();
4854 goto epilogue_done;
4857 /* Naked functions don't have epilogues. */
4858 if (arm_naked_function_p (current_function_decl))
4859 goto epilogue_done;
4861 /* A volatile function should never return. Call abort. */
4862 if (volatile_func)
4864 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
4865 assemble_external_libcall (op);
4866 output_asm_insn ("bl\t%a0", &op);
4867 goto epilogue_done;
4870 for (reg = 0; reg <= 10; reg++)
4871 if (regs_ever_live[reg] && ! call_used_regs[reg])
4873 live_regs_mask |= (1 << reg);
4874 floats_offset += 4;
4877 if (frame_pointer_needed)
4879 if (arm_fpu_arch == FP_SOFT2)
4881 for (reg = 23; reg > 15; reg--)
4882 if (regs_ever_live[reg] && ! call_used_regs[reg])
4884 floats_offset += 12;
4885 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4886 reg_names[reg], REGISTER_PREFIX, floats_offset);
4889 else
4891 int start_reg = 23;
4893 for (reg = 23; reg > 15; reg--)
4895 if (regs_ever_live[reg] && ! call_used_regs[reg])
4897 floats_offset += 12;
4898 /* We can't unstack more than four registers at once */
4899 if (start_reg - reg == 3)
4901 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
4902 REGISTER_PREFIX, reg_names[reg],
4903 REGISTER_PREFIX, floats_offset);
4904 start_reg = reg - 1;
4907 else
4909 if (reg != start_reg)
4910 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4911 REGISTER_PREFIX, reg_names[reg + 1],
4912 start_reg - reg, REGISTER_PREFIX, floats_offset);
4914 start_reg = reg - 1;
4918 /* Just in case the last register checked also needs unstacking. */
4919 if (reg != start_reg)
4920 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4921 REGISTER_PREFIX, reg_names[reg + 1],
4922 start_reg - reg, REGISTER_PREFIX, floats_offset);
4925 if (TARGET_THUMB_INTERWORK)
4927 live_regs_mask |= 0x6800;
4928 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
4929 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
4931 else
4933 live_regs_mask |= 0xA800;
4934 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4935 TARGET_APCS_32 ? FALSE : TRUE);
4938 else
4940 /* Restore stack pointer if necessary. */
4941 if (frame_size + current_function_outgoing_args_size != 0)
4943 operands[0] = operands[1] = stack_pointer_rtx;
4944 operands[2] = GEN_INT (frame_size
4945 + current_function_outgoing_args_size);
4946 output_add_immediate (operands);
4949 if (arm_fpu_arch == FP_SOFT2)
4951 for (reg = 16; reg < 24; reg++)
4952 if (regs_ever_live[reg] && ! call_used_regs[reg])
4953 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
4954 reg_names[reg], REGISTER_PREFIX);
4956 else
4958 int start_reg = 16;
4960 for (reg = 16; reg < 24; reg++)
4962 if (regs_ever_live[reg] && ! call_used_regs[reg])
4964 if (reg - start_reg == 3)
4966 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
4967 REGISTER_PREFIX, reg_names[start_reg],
4968 REGISTER_PREFIX);
4969 start_reg = reg + 1;
4972 else
4974 if (reg != start_reg)
4975 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
4976 REGISTER_PREFIX, reg_names[start_reg],
4977 reg - start_reg, REGISTER_PREFIX);
4979 start_reg = reg + 1;
4983 /* Just in case the last register checked also needs unstacking. */
4984 if (reg != start_reg)
4985 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
4986 REGISTER_PREFIX, reg_names[start_reg],
4987 reg - start_reg, REGISTER_PREFIX);
4990 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
4992 if (TARGET_THUMB_INTERWORK)
4994 if (! lr_save_eliminated)
4995 print_multi_reg(f, "ldmfd\t%ssp!", live_regs_mask | 0x4000,
4996 FALSE);
4998 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5000 else if (lr_save_eliminated)
5001 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5002 : "\tmovs\t%spc, %slr\n"),
5003 REGISTER_PREFIX, REGISTER_PREFIX, f);
5004 else
5005 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5006 TARGET_APCS_32 ? FALSE : TRUE);
5008 else
5010 if (live_regs_mask || regs_ever_live[14])
5012 /* Restore the integer regs, and the return address into lr */
5013 if (! lr_save_eliminated)
5014 live_regs_mask |= 0x4000;
5016 if (live_regs_mask != 0)
5017 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
5020 if (current_function_pretend_args_size)
5022 /* Unwind the pre-pushed regs */
5023 operands[0] = operands[1] = stack_pointer_rtx;
5024 operands[2] = gen_rtx (CONST_INT, VOIDmode,
5025 current_function_pretend_args_size);
5026 output_add_immediate (operands);
5028 /* And finally, go home */
5029 if (TARGET_THUMB_INTERWORK)
5030 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5031 else
5032 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5033 : "\tmovs\t%spc, %slr\n"),
5034 REGISTER_PREFIX, REGISTER_PREFIX, f);
5038 epilogue_done:
5040 current_function_anonymous_args = 0;
5043 static void
5044 emit_multi_reg_push (mask)
5045 int mask;
5047 int num_regs = 0;
5048 int i, j;
5049 rtx par;
5051 for (i = 0; i < 16; i++)
5052 if (mask & (1 << i))
5053 num_regs++;
5055 if (num_regs == 0 || num_regs > 16)
5056 abort ();
5058 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
5060 for (i = 0; i < 16; i++)
5062 if (mask & (1 << i))
5064 XVECEXP (par, 0, 0)
5065 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
5066 gen_rtx (PRE_DEC, BLKmode,
5067 stack_pointer_rtx)),
5068 gen_rtx (UNSPEC, BLKmode,
5069 gen_rtvec (1, gen_rtx (REG, SImode, i)),
5070 2));
5071 break;
5075 for (j = 1, i++; j < num_regs; i++)
5077 if (mask & (1 << i))
5079 XVECEXP (par, 0, j)
5080 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
5081 j++;
5085 emit_insn (par);
5088 static void
5089 emit_sfm (base_reg, count)
5090 int base_reg;
5091 int count;
5093 rtx par;
5094 int i;
5096 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (count));
5098 XVECEXP (par, 0, 0) = gen_rtx (SET, VOIDmode,
5099 gen_rtx (MEM, BLKmode,
5100 gen_rtx (PRE_DEC, BLKmode,
5101 stack_pointer_rtx)),
5102 gen_rtx (UNSPEC, BLKmode,
5103 gen_rtvec (1, gen_rtx (REG, XFmode,
5104 base_reg++)),
5105 2));
5106 for (i = 1; i < count; i++)
5107 XVECEXP (par, 0, i) = gen_rtx (USE, VOIDmode,
5108 gen_rtx (REG, XFmode, base_reg++));
5110 emit_insn (par);
5113 void
5114 arm_expand_prologue ()
5116 int reg;
5117 rtx amount = GEN_INT (-(get_frame_size ()
5118 + current_function_outgoing_args_size));
5119 rtx push_insn;
5120 int num_regs;
5121 int live_regs_mask = 0;
5122 int store_arg_regs = 0;
5123 int volatile_func = (optimize > 0
5124 && TREE_THIS_VOLATILE (current_function_decl));
5126 /* Naked functions don't have prologues. */
5127 if (arm_naked_function_p (current_function_decl))
5128 return;
5130 if (current_function_anonymous_args && current_function_pretend_args_size)
5131 store_arg_regs = 1;
5133 if (! volatile_func)
5134 for (reg = 0; reg <= 10; reg++)
5135 if (regs_ever_live[reg] && ! call_used_regs[reg])
5136 live_regs_mask |= 1 << reg;
5138 if (! volatile_func && regs_ever_live[14])
5139 live_regs_mask |= 0x4000;
5141 if (frame_pointer_needed)
5143 live_regs_mask |= 0xD800;
5144 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
5145 stack_pointer_rtx));
5148 if (current_function_pretend_args_size)
5150 if (store_arg_regs)
5151 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5152 & 0xf);
5153 else
5154 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5155 GEN_INT (-current_function_pretend_args_size)));
5158 if (live_regs_mask)
5160 /* If we have to push any regs, then we must push lr as well, or
5161 we won't get a proper return. */
5162 live_regs_mask |= 0x4000;
5163 emit_multi_reg_push (live_regs_mask);
5166 /* For now the integer regs are still pushed in output_func_epilogue (). */
5168 if (! volatile_func)
5170 if (arm_fpu_arch == FP_SOFT2)
5172 for (reg = 23; reg > 15; reg--)
5173 if (regs_ever_live[reg] && ! call_used_regs[reg])
5174 emit_insn (gen_rtx (SET, VOIDmode,
5175 gen_rtx (MEM, XFmode,
5176 gen_rtx (PRE_DEC, XFmode,
5177 stack_pointer_rtx)),
5178 gen_rtx (REG, XFmode, reg)));
5180 else
5182 int start_reg = 23;
5184 for (reg = 23; reg > 15; reg--)
5186 if (regs_ever_live[reg] && ! call_used_regs[reg])
5188 if (start_reg - reg == 3)
5190 emit_sfm (reg, 4);
5191 start_reg = reg - 1;
5194 else
5196 if (start_reg != reg)
5197 emit_sfm (reg + 1, start_reg - reg);
5198 start_reg = reg - 1;
5202 if (start_reg != reg)
5203 emit_sfm (reg + 1, start_reg - reg);
5207 if (frame_pointer_needed)
5208 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
5209 (GEN_INT
5210 (-(4 + current_function_pretend_args_size)))));
5212 if (amount != const0_rtx)
5214 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
5215 emit_insn (gen_rtx (CLOBBER, VOIDmode,
5216 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
5219 /* If we are profiling, make sure no instructions are scheduled before
5220 the call to mcount. */
5221 if (profile_flag || profile_block_flag)
5222 emit_insn (gen_blockage ());
5226 /* If CODE is 'd', then the X is a condition operand and the instruction
5227 should only be executed if the condition is true.
5228 if CODE is 'D', then the X is a condition operand and the instruction
5229 should only be executed if the condition is false: however, if the mode
5230 of the comparison is CCFPEmode, then always execute the instruction -- we
5231 do this because in these circumstances !GE does not necessarily imply LT;
5232 in these cases the instruction pattern will take care to make sure that
5233 an instruction containing %d will follow, thereby undoing the effects of
5234 doing this instruction unconditionally.
5235 If CODE is 'N' then X is a floating point operand that must be negated
5236 before output.
5237 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5238 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5240 void
5241 arm_print_operand (stream, x, code)
5242 FILE *stream;
5243 rtx x;
5244 int code;
5246 switch (code)
5248 case '@':
5249 fputs (ASM_COMMENT_START, stream);
5250 return;
5252 case '|':
5253 fputs (REGISTER_PREFIX, stream);
5254 return;
5256 case '?':
5257 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5258 fputs (arm_condition_codes[arm_current_cc], stream);
5259 return;
5261 case 'N':
5263 REAL_VALUE_TYPE r;
5264 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5265 r = REAL_VALUE_NEGATE (r);
5266 fprintf (stream, "%s", fp_const_from_val (&r));
5268 return;
5270 case 'B':
5271 if (GET_CODE (x) == CONST_INT)
5272 fprintf (stream,
5273 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5274 "%d",
5275 #else
5276 "%ld",
5277 #endif
5278 ARM_SIGN_EXTEND (~ INTVAL (x)));
5279 else
5281 putc ('~', stream);
5282 output_addr_const (stream, x);
5284 return;
5286 case 'i':
5287 fprintf (stream, "%s", arithmetic_instr (x, 1));
5288 return;
5290 case 'I':
5291 fprintf (stream, "%s", arithmetic_instr (x, 0));
5292 return;
5294 case 'S':
5296 HOST_WIDE_INT val;
5297 char *shift = shift_op (x, &val);
5299 if (shift)
5301 fprintf (stream, ", %s ", shift_op (x, &val));
5302 if (val == -1)
5303 arm_print_operand (stream, XEXP (x, 1), 0);
5304 else
5305 fprintf (stream,
5306 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5307 "#%d",
5308 #else
5309 "#%ld",
5310 #endif
5311 val);
5314 return;
5316 case 'Q':
5317 if (REGNO (x) > 15)
5318 abort ();
5319 fputs (REGISTER_PREFIX, stream);
5320 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5321 return;
5323 case 'R':
5324 if (REGNO (x) > 15)
5325 abort ();
5326 fputs (REGISTER_PREFIX, stream);
5327 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
5328 return;
5330 case 'm':
5331 fputs (REGISTER_PREFIX, stream);
5332 if (GET_CODE (XEXP (x, 0)) == REG)
5333 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5334 else
5335 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5336 return;
5338 case 'M':
5339 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5340 REGISTER_PREFIX, reg_names[REGNO (x) - 1
5341 + ((GET_MODE_SIZE (GET_MODE (x))
5342 + GET_MODE_SIZE (SImode) - 1)
5343 / GET_MODE_SIZE (SImode))]);
5344 return;
5346 case 'd':
5347 if (x)
5348 fputs (arm_condition_codes[get_arm_condition_code (x)],
5349 stream);
5350 return;
5352 case 'D':
5353 if (x)
5354 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5355 (get_arm_condition_code (x))],
5356 stream);
5357 return;
5359 default:
5360 if (x == 0)
5361 abort ();
5363 if (GET_CODE (x) == REG)
5365 fputs (REGISTER_PREFIX, stream);
5366 fputs (reg_names[REGNO (x)], stream);
5368 else if (GET_CODE (x) == MEM)
5370 output_memory_reference_mode = GET_MODE (x);
5371 output_address (XEXP (x, 0));
5373 else if (GET_CODE (x) == CONST_DOUBLE)
5374 fprintf (stream, "#%s", fp_immediate_constant (x));
5375 else if (GET_CODE (x) == NEG)
5376 abort (); /* This should never happen now. */
5377 else
5379 fputc ('#', stream);
5380 output_addr_const (stream, x);
5386 /* A finite state machine takes care of noticing whether or not instructions
5387 can be conditionally executed, and thus decrease execution time and code
5388 size by deleting branch instructions. The fsm is controlled by
5389 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5391 /* The state of the fsm controlling condition codes are:
5392 0: normal, do nothing special
5393 1: make ASM_OUTPUT_OPCODE not output this instruction
5394 2: make ASM_OUTPUT_OPCODE not output this instruction
5395 3: make instructions conditional
5396 4: make instructions conditional
5398 State transitions (state->state by whom under condition):
5399 0 -> 1 final_prescan_insn if the `target' is a label
5400 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5401 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5402 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5403 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5404 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5405 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5406 (the target insn is arm_target_insn).
5408 If the jump clobbers the conditions then we use states 2 and 4.
5410 A similar thing can be done with conditional return insns.
5412 XXX In case the `target' is an unconditional branch, this conditionalising
5413 of the instructions always reduces code size, but not always execution
5414 time. But then, I want to reduce the code size to somewhere near what
5415 /bin/cc produces. */
5417 /* Returns the index of the ARM condition code string in
5418 `arm_condition_codes'. COMPARISON should be an rtx like
5419 `(eq (...) (...))'. */
5421 static enum arm_cond_code
5422 get_arm_condition_code (comparison)
5423 rtx comparison;
5425 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
5426 register int code;
5427 register enum rtx_code comp_code = GET_CODE (comparison);
5429 if (GET_MODE_CLASS (mode) != MODE_CC)
5430 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5431 XEXP (comparison, 1));
5433 switch (mode)
5435 case CC_DNEmode: code = ARM_NE; goto dominance;
5436 case CC_DEQmode: code = ARM_EQ; goto dominance;
5437 case CC_DGEmode: code = ARM_GE; goto dominance;
5438 case CC_DGTmode: code = ARM_GT; goto dominance;
5439 case CC_DLEmode: code = ARM_LE; goto dominance;
5440 case CC_DLTmode: code = ARM_LT; goto dominance;
5441 case CC_DGEUmode: code = ARM_CS; goto dominance;
5442 case CC_DGTUmode: code = ARM_HI; goto dominance;
5443 case CC_DLEUmode: code = ARM_LS; goto dominance;
5444 case CC_DLTUmode: code = ARM_CC;
5446 dominance:
5447 if (comp_code != EQ && comp_code != NE)
5448 abort ();
5450 if (comp_code == EQ)
5451 return ARM_INVERSE_CONDITION_CODE (code);
5452 return code;
5454 case CC_NOOVmode:
5455 switch (comp_code)
5457 case NE: return ARM_NE;
5458 case EQ: return ARM_EQ;
5459 case GE: return ARM_PL;
5460 case LT: return ARM_MI;
5461 default: abort ();
5464 case CC_Zmode:
5465 case CCFPmode:
5466 switch (comp_code)
5468 case NE: return ARM_NE;
5469 case EQ: return ARM_EQ;
5470 default: abort ();
5473 case CCFPEmode:
5474 switch (comp_code)
5476 case GE: return ARM_GE;
5477 case GT: return ARM_GT;
5478 case LE: return ARM_LS;
5479 case LT: return ARM_MI;
5480 default: abort ();
5483 case CC_SWPmode:
5484 switch (comp_code)
5486 case NE: return ARM_NE;
5487 case EQ: return ARM_EQ;
5488 case GE: return ARM_LE;
5489 case GT: return ARM_LT;
5490 case LE: return ARM_GE;
5491 case LT: return ARM_GT;
5492 case GEU: return ARM_LS;
5493 case GTU: return ARM_CC;
5494 case LEU: return ARM_CS;
5495 case LTU: return ARM_HI;
5496 default: abort ();
5499 case CC_Cmode:
5500 switch (comp_code)
5502 case LTU: return ARM_CS;
5503 case GEU: return ARM_CC;
5504 default: abort ();
5507 case CCmode:
5508 switch (comp_code)
5510 case NE: return ARM_NE;
5511 case EQ: return ARM_EQ;
5512 case GE: return ARM_GE;
5513 case GT: return ARM_GT;
5514 case LE: return ARM_LE;
5515 case LT: return ARM_LT;
5516 case GEU: return ARM_CS;
5517 case GTU: return ARM_HI;
5518 case LEU: return ARM_LS;
5519 case LTU: return ARM_CC;
5520 default: abort ();
5523 default: abort ();
5526 abort ();
5530 void
5531 final_prescan_insn (insn, opvec, noperands)
5532 rtx insn;
5533 rtx *opvec;
5534 int noperands;
5536 /* BODY will hold the body of INSN. */
5537 register rtx body = PATTERN (insn);
5539 /* This will be 1 if trying to repeat the trick, and things need to be
5540 reversed if it appears to fail. */
5541 int reverse = 0;
5543 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5544 taken are clobbered, even if the rtl suggests otherwise. It also
5545 means that we have to grub around within the jump expression to find
5546 out what the conditions are when the jump isn't taken. */
5547 int jump_clobbers = 0;
5549 /* If we start with a return insn, we only succeed if we find another one. */
5550 int seeking_return = 0;
5552 /* START_INSN will hold the insn from where we start looking. This is the
5553 first insn after the following code_label if REVERSE is true. */
5554 rtx start_insn = insn;
5556 /* If in state 4, check if the target branch is reached, in order to
5557 change back to state 0. */
5558 if (arm_ccfsm_state == 4)
5560 if (insn == arm_target_insn)
5562 arm_target_insn = NULL;
5563 arm_ccfsm_state = 0;
5565 return;
5568 /* If in state 3, it is possible to repeat the trick, if this insn is an
5569 unconditional branch to a label, and immediately following this branch
5570 is the previous target label which is only used once, and the label this
5571 branch jumps to is not too far off. */
5572 if (arm_ccfsm_state == 3)
5574 if (simplejump_p (insn))
5576 start_insn = next_nonnote_insn (start_insn);
5577 if (GET_CODE (start_insn) == BARRIER)
5579 /* XXX Isn't this always a barrier? */
5580 start_insn = next_nonnote_insn (start_insn);
5582 if (GET_CODE (start_insn) == CODE_LABEL
5583 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5584 && LABEL_NUSES (start_insn) == 1)
5585 reverse = TRUE;
5586 else
5587 return;
5589 else if (GET_CODE (body) == RETURN)
5591 start_insn = next_nonnote_insn (start_insn);
5592 if (GET_CODE (start_insn) == BARRIER)
5593 start_insn = next_nonnote_insn (start_insn);
5594 if (GET_CODE (start_insn) == CODE_LABEL
5595 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5596 && LABEL_NUSES (start_insn) == 1)
5598 reverse = TRUE;
5599 seeking_return = 1;
5601 else
5602 return;
5604 else
5605 return;
5608 if (arm_ccfsm_state != 0 && !reverse)
5609 abort ();
5610 if (GET_CODE (insn) != JUMP_INSN)
5611 return;
5613 /* This jump might be paralleled with a clobber of the condition codes
5614 the jump should always come first */
5615 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5616 body = XVECEXP (body, 0, 0);
5618 #if 0
5619 /* If this is a conditional return then we don't want to know */
5620 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5621 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5622 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5623 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5624 return;
5625 #endif
5627 if (reverse
5628 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5629 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5631 int insns_skipped;
5632 int fail = FALSE, succeed = FALSE;
5633 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5634 int then_not_else = TRUE;
5635 rtx this_insn = start_insn, label = 0;
5637 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5639 /* The code below is wrong for these, and I haven't time to
5640 fix it now. So we just do the safe thing and return. This
5641 whole function needs re-writing anyway. */
5642 jump_clobbers = 1;
5643 return;
5646 /* Register the insn jumped to. */
5647 if (reverse)
5649 if (!seeking_return)
5650 label = XEXP (SET_SRC (body), 0);
5652 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5653 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5654 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5656 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5657 then_not_else = FALSE;
5659 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5660 seeking_return = 1;
5661 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5663 seeking_return = 1;
5664 then_not_else = FALSE;
5666 else
5667 abort ();
5669 /* See how many insns this branch skips, and what kind of insns. If all
5670 insns are okay, and the label or unconditional branch to the same
5671 label is not too far away, succeed. */
5672 for (insns_skipped = 0;
5673 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
5675 rtx scanbody;
5677 this_insn = next_nonnote_insn (this_insn);
5678 if (!this_insn)
5679 break;
5681 scanbody = PATTERN (this_insn);
5683 switch (GET_CODE (this_insn))
5685 case CODE_LABEL:
5686 /* Succeed if it is the target label, otherwise fail since
5687 control falls in from somewhere else. */
5688 if (this_insn == label)
5690 if (jump_clobbers)
5692 arm_ccfsm_state = 2;
5693 this_insn = next_nonnote_insn (this_insn);
5695 else
5696 arm_ccfsm_state = 1;
5697 succeed = TRUE;
5699 else
5700 fail = TRUE;
5701 break;
5703 case BARRIER:
5704 /* Succeed if the following insn is the target label.
5705 Otherwise fail.
5706 If return insns are used then the last insn in a function
5707 will be a barrier. */
5708 this_insn = next_nonnote_insn (this_insn);
5709 if (this_insn && this_insn == label)
5711 if (jump_clobbers)
5713 arm_ccfsm_state = 2;
5714 this_insn = next_nonnote_insn (this_insn);
5716 else
5717 arm_ccfsm_state = 1;
5718 succeed = TRUE;
5720 else
5721 fail = TRUE;
5722 break;
5724 case CALL_INSN:
5725 /* If using 32-bit addresses the cc is not preserved over
5726 calls */
5727 if (TARGET_APCS_32)
5729 /* Succeed if the following insn is the target label,
5730 or if the following two insns are a barrier and
5731 the target label. */
5732 this_insn = next_nonnote_insn (this_insn);
5733 if (this_insn && GET_CODE (this_insn) == BARRIER)
5734 this_insn = next_nonnote_insn (this_insn);
5736 if (this_insn && this_insn == label
5737 && insns_skipped < MAX_INSNS_SKIPPED)
5739 if (jump_clobbers)
5741 arm_ccfsm_state = 2;
5742 this_insn = next_nonnote_insn (this_insn);
5744 else
5745 arm_ccfsm_state = 1;
5746 succeed = TRUE;
5748 else
5749 fail = TRUE;
5751 break;
5753 case JUMP_INSN:
5754 /* If this is an unconditional branch to the same label, succeed.
5755 If it is to another label, do nothing. If it is conditional,
5756 fail. */
5757 /* XXX Probably, the test for the SET and the PC are unnecessary. */
5759 if (GET_CODE (scanbody) == SET
5760 && GET_CODE (SET_DEST (scanbody)) == PC)
5762 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5763 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5765 arm_ccfsm_state = 2;
5766 succeed = TRUE;
5768 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5769 fail = TRUE;
5771 else if (GET_CODE (scanbody) == RETURN
5772 && seeking_return)
5774 arm_ccfsm_state = 2;
5775 succeed = TRUE;
5777 else if (GET_CODE (scanbody) == PARALLEL)
5779 switch (get_attr_conds (this_insn))
5781 case CONDS_NOCOND:
5782 break;
5783 default:
5784 fail = TRUE;
5785 break;
5788 break;
5790 case INSN:
5791 /* Instructions using or affecting the condition codes make it
5792 fail. */
5793 if ((GET_CODE (scanbody) == SET
5794 || GET_CODE (scanbody) == PARALLEL)
5795 && get_attr_conds (this_insn) != CONDS_NOCOND)
5796 fail = TRUE;
5797 break;
5799 default:
5800 break;
5803 if (succeed)
5805 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
5806 arm_target_label = CODE_LABEL_NUMBER (label);
5807 else if (seeking_return || arm_ccfsm_state == 2)
5809 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5811 this_insn = next_nonnote_insn (this_insn);
5812 if (this_insn && (GET_CODE (this_insn) == BARRIER
5813 || GET_CODE (this_insn) == CODE_LABEL))
5814 abort ();
5816 if (!this_insn)
5818 /* Oh, dear! we ran off the end.. give up */
5819 recog (PATTERN (insn), insn, NULL_PTR);
5820 arm_ccfsm_state = 0;
5821 arm_target_insn = NULL;
5822 return;
5824 arm_target_insn = this_insn;
5826 else
5827 abort ();
5828 if (jump_clobbers)
5830 if (reverse)
5831 abort ();
5832 arm_current_cc =
5833 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5834 0), 0), 1));
5835 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5836 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5837 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5838 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5840 else
5842 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5843 what it was. */
5844 if (!reverse)
5845 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5846 0));
5849 if (reverse || then_not_else)
5850 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5852 /* restore recog_operand (getting the attributes of other insns can
5853 destroy this array, but final.c assumes that it remains intact
5854 across this call; since the insn has been recognized already we
5855 call recog direct). */
5856 recog (PATTERN (insn), insn, NULL_PTR);
5860 #ifdef AOF_ASSEMBLER
5861 /* Special functions only needed when producing AOF syntax assembler. */
5863 rtx aof_pic_label = NULL_RTX;
5864 struct pic_chain
5866 struct pic_chain *next;
5867 char *symname;
5870 static struct pic_chain *aof_pic_chain = NULL;
5873 aof_pic_entry (x)
5874 rtx x;
5876 struct pic_chain **chainp;
5877 int offset;
5879 if (aof_pic_label == NULL_RTX)
5881 /* This needs to persist throughout the compilation. */
5882 end_temporary_allocation ();
5883 aof_pic_label = gen_rtx (SYMBOL_REF, Pmode, "x$adcons");
5884 resume_temporary_allocation ();
5887 for (offset = 0, chainp = &aof_pic_chain; *chainp;
5888 offset += 4, chainp = &(*chainp)->next)
5889 if ((*chainp)->symname == XSTR (x, 0))
5890 return plus_constant (aof_pic_label, offset);
5892 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
5893 (*chainp)->next = NULL;
5894 (*chainp)->symname = XSTR (x, 0);
5895 return plus_constant (aof_pic_label, offset);
5898 void
5899 aof_dump_pic_table (f)
5900 FILE *f;
5902 struct pic_chain *chain;
5904 if (aof_pic_chain == NULL)
5905 return;
5907 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
5908 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
5909 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5910 fputs ("|x$adcons|\n", f);
5912 for (chain = aof_pic_chain; chain; chain = chain->next)
5914 fputs ("\tDCD\t", f);
5915 assemble_name (f, chain->symname);
5916 fputs ("\n", f);
5920 int arm_text_section_count = 1;
5922 char *
5923 aof_text_section ()
5925 static char buf[100];
5926 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5927 arm_text_section_count++);
5928 if (flag_pic)
5929 strcat (buf, ", PIC, REENTRANT");
5930 return buf;
5933 static int arm_data_section_count = 1;
5935 char *
5936 aof_data_section ()
5938 static char buf[100];
5939 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5940 return buf;
5943 /* The AOF assembler is religiously strict about declarations of
5944 imported and exported symbols, so that it is impossible to declare
5945 a function as imported near the beginning of the file, and then to
5946 export it later on. It is, however, possible to delay the decision
5947 until all the functions in the file have been compiled. To get
5948 around this, we maintain a list of the imports and exports, and
5949 delete from it any that are subsequently defined. At the end of
5950 compilation we spit the remainder of the list out before the END
5951 directive. */
5953 struct import
5955 struct import *next;
5956 char *name;
5959 static struct import *imports_list = NULL;
5961 void
5962 aof_add_import (name)
5963 char *name;
5965 struct import *new;
5967 for (new = imports_list; new; new = new->next)
5968 if (new->name == name)
5969 return;
5971 new = (struct import *) xmalloc (sizeof (struct import));
5972 new->next = imports_list;
5973 imports_list = new;
5974 new->name = name;
5977 void
5978 aof_delete_import (name)
5979 char *name;
5981 struct import **old;
5983 for (old = &imports_list; *old; old = & (*old)->next)
5985 if ((*old)->name == name)
5987 *old = (*old)->next;
5988 return;
5993 int arm_main_function = 0;
5995 void
5996 aof_dump_imports (f)
5997 FILE *f;
5999 /* The AOF assembler needs this to cause the startup code to be extracted
6000 from the library. Brining in __main causes the whole thing to work
6001 automagically. */
6002 if (arm_main_function)
6004 text_section ();
6005 fputs ("\tIMPORT __main\n", f);
6006 fputs ("\tDCD __main\n", f);
6009 /* Now dump the remaining imports. */
6010 while (imports_list)
6012 fprintf (f, "\tIMPORT\t");
6013 assemble_name (f, imports_list->name);
6014 fputc ('\n', f);
6015 imports_list = imports_list->next;
6018 #endif /* AOF_ASSEMBLER */