Import final gcc2 snapshot (990109)
[official-gcc.git] / gcc / config / arm / arm.c
blob731bd225b5f96bf43991b19b1279b87781b7d4ca
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 93, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "reload.h"
37 #include "tree.h"
38 #include "expr.h"
40 /* The maximum number of insns skipped which will be conditionalised if
41 possible. */
42 #define MAX_INSNS_SKIPPED 5
44 /* Some function declarations. */
45 extern FILE *asm_out_file;
47 static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
48 static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
49 HOST_WIDE_INT));
50 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
51 HOST_WIDE_INT, rtx, rtx, int, int));
52 static int arm_naked_function_p PROTO ((tree));
53 static void init_fpa_table PROTO ((void));
54 static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
55 rtx, HOST_WIDE_INT));
56 static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode));
57 static void dump_table PROTO ((rtx));
58 static int fixit PROTO ((rtx, enum machine_mode, int));
59 static rtx find_barrier PROTO ((rtx, int));
60 static int broken_move PROTO ((rtx));
61 static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
62 static int eliminate_lr2ip PROTO ((rtx *));
63 static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
64 static int pattern_really_clobbers_lr PROTO ((rtx));
65 static int function_really_clobbers_lr PROTO ((rtx));
66 static void emit_multi_reg_push PROTO ((int));
67 static void emit_sfm PROTO ((int, int));
68 static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
70 /* Define the information needed to generate branch insns. This is
71 stored from the compare operation. */
73 rtx arm_compare_op0, arm_compare_op1;
74 int arm_compare_fp;
76 /* What type of cpu are we compiling for? */
77 enum processor_type arm_cpu;
79 /* What type of floating point are we tuning for? */
80 enum floating_point_type arm_fpu;
82 /* What type of floating point instructions are available? */
83 enum floating_point_type arm_fpu_arch;
85 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
86 enum prog_mode_type arm_prgmode;
88 /* Set by the -mfp=... option */
89 char *target_fp_name = NULL;
91 /* Nonzero if this is an "M" variant of the processor. */
92 int arm_fast_multiply = 0;
94 /* Nonzero if this chip supports the ARM Architecture 4 extensions */
95 int arm_arch4 = 0;
97 /* Set to the features we should tune the code for (multiply speed etc). */
98 int tune_flags = 0;
100 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
101 must report the mode of the memory reference from PRINT_OPERAND to
102 PRINT_OPERAND_ADDRESS. */
103 enum machine_mode output_memory_reference_mode;
105 /* Nonzero if the prologue must setup `fp'. */
106 int current_function_anonymous_args;
108 /* The register number to be used for the PIC offset register. */
109 int arm_pic_register = 9;
111 /* Location counter of .text segment. */
112 int arm_text_location = 0;
114 /* Set to one if we think that lr is only saved because of subroutine calls,
115 but all of these can be `put after' return insns */
116 int lr_save_eliminated;
118 /* Set to 1 when a return insn is output, this means that the epilogue
119 is not needed. */
121 static int return_used_this_function;
123 static int arm_constant_limit = 3;
125 /* For an explanation of these variables, see final_prescan_insn below. */
126 int arm_ccfsm_state;
127 enum arm_cond_code arm_current_cc;
128 rtx arm_target_insn;
129 int arm_target_label;
131 /* The condition codes of the ARM, and the inverse function. */
132 char *arm_condition_codes[] =
134 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
135 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
138 static enum arm_cond_code get_arm_condition_code ();
141 /* Initialization code */
143 struct arm_cpu_select arm_select[4] =
145 /* switch name, tune arch */
146 { (char *)0, "--with-cpu=", 1, 1 },
147 { (char *)0, "-mcpu=", 1, 1 },
148 { (char *)0, "-march=", 0, 1 },
149 { (char *)0, "-mtune=", 1, 0 },
152 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
153 #define FL_FAST_MULT 0x02 /* Fast multiply */
154 #define FL_MODE26 0x04 /* 26-bit mode support */
155 #define FL_MODE32 0x08 /* 32-bit mode support */
156 #define FL_ARCH4 0x10 /* Architecture rel 4 */
157 #define FL_THUMB 0x20 /* Thumb aware */
159 struct processors
161 char *name;
162 enum processor_type type;
163 unsigned int flags;
166 /* Not all of these give usefully different compilation alternatives,
167 but there is no simple way of generalizing them. */
168 static struct processors all_procs[] =
170 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
171 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
172 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
173 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
174 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
175 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
176 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
177 /* arm7m doesn't exist on its own, only in conjunction with D, (and I), but
178 those don't alter the code, so it is sometimes known as the arm7m */
179 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
180 | FL_MODE26)},
181 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
182 | FL_MODE26)},
183 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
184 | FL_MODE26)},
185 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
186 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
187 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
188 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
189 /* Doesn't really have an external co-proc, but does have embedded fpu */
190 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
191 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
192 | FL_ARCH4 | FL_THUMB)},
193 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
194 | FL_ARCH4)},
195 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
196 | FL_ARCH4)},
197 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
198 | FL_ARCH4)},
199 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
200 | FL_ARCH4)},
201 {"armv2", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
202 {"armv2a", PROCESSOR_NONE, FL_CO_PROC | FL_MODE26},
203 {"armv3", PROCESSOR_NONE, FL_CO_PROC | FL_MODE32 | FL_MODE26},
204 {"armv3m", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
205 | FL_MODE26)},
206 {"armv4", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
207 | FL_MODE26 | FL_ARCH4)},
208 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
209 implementations that support it, so we will leave it out for now. */
210 {"armv4t", PROCESSOR_NONE, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
211 | FL_ARCH4)},
212 {NULL, 0, 0}
215 /* Fix up any incompatible options that the user has specified.
216 This has now turned into a maze. */
217 void
218 arm_override_options ()
220 int arm_thumb_aware = 0;
221 int flags = 0;
222 int i;
223 struct arm_cpu_select *ptr;
224 static struct cpu_default {
225 int cpu;
226 char *name;
227 } cpu_defaults[] = {
228 { TARGET_CPU_arm2, "arm2" },
229 { TARGET_CPU_arm6, "arm6" },
230 { TARGET_CPU_arm610, "arm610" },
231 { TARGET_CPU_arm7dm, "arm7dm" },
232 { TARGET_CPU_arm7500fe, "arm7500fe" },
233 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
234 { TARGET_CPU_arm8, "arm8" },
235 { TARGET_CPU_arm810, "arm810" },
236 { TARGET_CPU_strongarm, "strongarm" },
237 { 0, 0 }
239 struct cpu_default *def;
241 /* Set the default. */
242 for (def = &cpu_defaults[0]; def->name; ++def)
243 if (def->cpu == TARGET_CPU_DEFAULT)
244 break;
245 if (! def->name)
246 abort ();
248 arm_select[0].string = def->name;
250 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
252 ptr = &arm_select[i];
253 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
255 struct processors *sel;
257 for (sel = all_procs; sel->name != NULL; sel++)
258 if (! strcmp (ptr->string, sel->name))
260 /* -march= is the only flag that can take an architecture
261 type, so if we match when the tune bit is set, the
262 option was invalid. */
263 if (ptr->set_tune_p)
265 if (sel->type == PROCESSOR_NONE)
266 continue; /* Its an architecture, not a cpu */
268 arm_cpu = sel->type;
269 tune_flags = sel->flags;
272 if (ptr->set_arch_p)
273 flags = sel->flags;
275 break;
278 if (sel->name == NULL)
279 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
283 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
284 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
286 if (TARGET_POKE_FUNCTION_NAME)
287 target_flags |= ARM_FLAG_APCS_FRAME;
289 if (TARGET_6)
290 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
292 if (TARGET_3)
293 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
295 if (TARGET_APCS_REENT && flag_pic)
296 fatal ("-fpic and -mapcs-reent are incompatible");
298 if (TARGET_APCS_REENT)
299 warning ("APCS reentrant code not supported.");
301 /* If stack checking is disabled, we can use r10 as the PIC register,
302 which keeps r9 available. */
303 if (flag_pic && ! TARGET_APCS_STACK)
304 arm_pic_register = 10;
306 /* Well, I'm about to have a go, but pic is NOT going to be compatible
307 with APCS reentrancy, since that requires too much support in the
308 assembler and linker, and the ARMASM assembler seems to lack some
309 required directives. */
310 if (flag_pic)
311 warning ("Position independent code not supported. Ignored");
313 if (TARGET_APCS_FLOAT)
314 warning ("Passing floating point arguments in fp regs not yet supported");
316 if (TARGET_APCS_STACK && ! TARGET_APCS)
318 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
319 target_flags |= ARM_FLAG_APCS_FRAME;
322 /* Default is to tune for an FPA */
323 arm_fpu = FP_HARD;
325 /* Default value for floating point code... if no co-processor
326 bus, then schedule for emulated floating point. Otherwise,
327 assume the user has an FPA.
328 Note: this does not prevent use of floating point instructions,
329 -msoft-float does that. */
330 if (tune_flags & FL_CO_PROC == 0)
331 arm_fpu = FP_SOFT3;
333 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
334 arm_arch4 = (flags & FL_ARCH4) != 0;
335 arm_thumb_aware = (flags & FL_THUMB) != 0;
337 if (target_fp_name)
339 if (strcmp (target_fp_name, "2") == 0)
340 arm_fpu_arch = FP_SOFT2;
341 else if (strcmp (target_fp_name, "3") == 0)
342 arm_fpu_arch = FP_HARD;
343 else
344 fatal ("Invalid floating point emulation option: -mfpe=%s",
345 target_fp_name);
347 else
348 arm_fpu_arch = FP_DEFAULT;
350 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
352 warning ("This processor variant does not support Thumb interworking");
353 target_flags &= ~ARM_FLAG_THUMB;
356 if (TARGET_FPE && arm_fpu != FP_HARD)
357 arm_fpu = FP_SOFT2;
359 /* For arm2/3 there is no need to do any scheduling if there is only
360 a floating point emulator, or we are doing software floating-point. */
361 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
362 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
364 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
368 /* Return 1 if it is possible to return using a single instruction */
371 use_return_insn ()
373 int regno;
375 if (!reload_completed ||current_function_pretend_args_size
376 || current_function_anonymous_args
377 || ((get_frame_size () + current_function_outgoing_args_size != 0)
378 && !(TARGET_APCS || frame_pointer_needed)))
379 return 0;
381 /* Can't be done if interworking with Thumb, and any registers have been
382 stacked */
383 if (TARGET_THUMB_INTERWORK)
384 for (regno = 0; regno < 16; regno++)
385 if (regs_ever_live[regno] && ! call_used_regs[regno])
386 return 0;
388 /* Can't be done if any of the FPU regs are pushed, since this also
389 requires an insn */
390 for (regno = 16; regno < 24; regno++)
391 if (regs_ever_live[regno] && ! call_used_regs[regno])
392 return 0;
394 /* If a function is naked, don't use the "return" insn. */
395 if (arm_naked_function_p (current_function_decl))
396 return 0;
398 return 1;
401 /* Return TRUE if int I is a valid immediate ARM constant. */
404 const_ok_for_arm (i)
405 HOST_WIDE_INT i;
407 unsigned HOST_WIDE_INT mask = ~0xFF;
409 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
410 be all zero, or all one. */
411 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
412 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
413 != (((HOST_WIDE_INT) -1) & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
414 return FALSE;
416 /* Fast return for 0 and powers of 2 */
417 if ((i & (i - 1)) == 0)
418 return TRUE;
422 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
423 return TRUE;
424 mask =
425 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
426 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
427 } while (mask != ~0xFF);
429 return FALSE;
432 /* Return true if I is a valid constant for the operation CODE. */
434 const_ok_for_op (i, code, mode)
435 HOST_WIDE_INT i;
436 enum rtx_code code;
437 enum machine_mode mode;
439 if (const_ok_for_arm (i))
440 return 1;
442 switch (code)
444 case PLUS:
445 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
447 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
448 case XOR:
449 case IOR:
450 return 0;
452 case AND:
453 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
455 default:
456 abort ();
460 /* Emit a sequence of insns to handle a large constant.
461 CODE is the code of the operation required, it can be any of SET, PLUS,
462 IOR, AND, XOR, MINUS;
463 MODE is the mode in which the operation is being performed;
464 VAL is the integer to operate on;
465 SOURCE is the other operand (a register, or a null-pointer for SET);
466 SUBTARGETS means it is safe to create scratch registers if that will
467 either produce a simpler sequence, or we will want to cse the values.
468 Return value is the number of insns emitted. */
471 arm_split_constant (code, mode, val, target, source, subtargets)
472 enum rtx_code code;
473 enum machine_mode mode;
474 HOST_WIDE_INT val;
475 rtx target;
476 rtx source;
477 int subtargets;
479 if (subtargets || code == SET
480 || (GET_CODE (target) == REG && GET_CODE (source) == REG
481 && REGNO (target) != REGNO (source)))
483 rtx temp;
485 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
486 > arm_constant_limit + (code != SET))
488 if (code == SET)
490 /* Currently SET is the only monadic value for CODE, all
491 the rest are diadic. */
492 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
493 return 1;
495 else
497 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
499 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
500 /* For MINUS, the value is subtracted from, since we never
501 have subtraction of a constant. */
502 if (code == MINUS)
503 emit_insn (gen_rtx_SET (VOIDmode, target,
504 gen_rtx (code, mode, temp, source)));
505 else
506 emit_insn (gen_rtx_SET (VOIDmode, target,
507 gen_rtx (code, mode, source, temp)));
508 return 2;
513 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
516 /* As above, but extra parameter GENERATE which, if clear, suppresses
517 RTL generation. */
519 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
520 enum rtx_code code;
521 enum machine_mode mode;
522 HOST_WIDE_INT val;
523 rtx target;
524 rtx source;
525 int subtargets;
526 int generate;
528 int can_add = 0;
529 int can_invert = 0;
530 int can_negate = 0;
531 int can_negate_initial = 0;
532 int can_shift = 0;
533 int i;
534 int num_bits_set = 0;
535 int set_sign_bit_copies = 0;
536 int clear_sign_bit_copies = 0;
537 int clear_zero_bit_copies = 0;
538 int set_zero_bit_copies = 0;
539 int insns = 0;
540 rtx new_src;
541 unsigned HOST_WIDE_INT temp1, temp2;
542 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
544 /* find out which operations are safe for a given CODE. Also do a quick
545 check for degenerate cases; these can occur when DImode operations
546 are split. */
547 switch (code)
549 case SET:
550 can_invert = 1;
551 can_shift = 1;
552 can_negate = 1;
553 break;
555 case PLUS:
556 can_negate = 1;
557 can_negate_initial = 1;
558 break;
560 case IOR:
561 if (remainder == 0xffffffff)
563 if (generate)
564 emit_insn (gen_rtx_SET (VOIDmode, target,
565 GEN_INT (ARM_SIGN_EXTEND (val))));
566 return 1;
568 if (remainder == 0)
570 if (reload_completed && rtx_equal_p (target, source))
571 return 0;
572 if (generate)
573 emit_insn (gen_rtx_SET (VOIDmode, target, source));
574 return 1;
576 break;
578 case AND:
579 if (remainder == 0)
581 if (generate)
582 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
583 return 1;
585 if (remainder == 0xffffffff)
587 if (reload_completed && rtx_equal_p (target, source))
588 return 0;
589 if (generate)
590 emit_insn (gen_rtx_SET (VOIDmode, target, source));
591 return 1;
593 can_invert = 1;
594 break;
596 case XOR:
597 if (remainder == 0)
599 if (reload_completed && rtx_equal_p (target, source))
600 return 0;
601 if (generate)
602 emit_insn (gen_rtx_SET (VOIDmode, target, source));
603 return 1;
605 if (remainder == 0xffffffff)
607 if (generate)
608 emit_insn (gen_rtx_SET (VOIDmode, target,
609 gen_rtx_NOT (mode, source)));
610 return 1;
613 /* We don't know how to handle this yet below. */
614 abort ();
616 case MINUS:
617 /* We treat MINUS as (val - source), since (source - val) is always
618 passed as (source + (-val)). */
619 if (remainder == 0)
621 if (generate)
622 emit_insn (gen_rtx_SET (VOIDmode, target,
623 gen_rtx_NEG (mode, source)));
624 return 1;
626 if (const_ok_for_arm (val))
628 if (generate)
629 emit_insn (gen_rtx_SET (VOIDmode, target,
630 gen_rtx_MINUS (mode, GEN_INT (val),
631 source)));
632 return 1;
634 can_negate = 1;
636 break;
638 default:
639 abort ();
642 /* If we can do it in one insn get out quickly */
643 if (const_ok_for_arm (val)
644 || (can_negate_initial && const_ok_for_arm (-val))
645 || (can_invert && const_ok_for_arm (~val)))
647 if (generate)
648 emit_insn (gen_rtx_SET (VOIDmode, target,
649 (source ? gen_rtx (code, mode, source,
650 GEN_INT (val))
651 : GEN_INT (val))));
652 return 1;
656 /* Calculate a few attributes that may be useful for specific
657 optimizations. */
659 for (i = 31; i >= 0; i--)
661 if ((remainder & (1 << i)) == 0)
662 clear_sign_bit_copies++;
663 else
664 break;
667 for (i = 31; i >= 0; i--)
669 if ((remainder & (1 << i)) != 0)
670 set_sign_bit_copies++;
671 else
672 break;
675 for (i = 0; i <= 31; i++)
677 if ((remainder & (1 << i)) == 0)
678 clear_zero_bit_copies++;
679 else
680 break;
683 for (i = 0; i <= 31; i++)
685 if ((remainder & (1 << i)) != 0)
686 set_zero_bit_copies++;
687 else
688 break;
691 switch (code)
693 case SET:
694 /* See if we can do this by sign_extending a constant that is known
695 to be negative. This is a good, way of doing it, since the shift
696 may well merge into a subsequent insn. */
697 if (set_sign_bit_copies > 1)
699 if (const_ok_for_arm
700 (temp1 = ARM_SIGN_EXTEND (remainder
701 << (set_sign_bit_copies - 1))))
703 if (generate)
705 new_src = subtargets ? gen_reg_rtx (mode) : target;
706 emit_insn (gen_rtx_SET (VOIDmode, new_src,
707 GEN_INT (temp1)));
708 emit_insn (gen_ashrsi3 (target, new_src,
709 GEN_INT (set_sign_bit_copies - 1)));
711 return 2;
713 /* For an inverted constant, we will need to set the low bits,
714 these will be shifted out of harm's way. */
715 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
716 if (const_ok_for_arm (~temp1))
718 if (generate)
720 new_src = subtargets ? gen_reg_rtx (mode) : target;
721 emit_insn (gen_rtx_SET (VOIDmode, new_src,
722 GEN_INT (temp1)));
723 emit_insn (gen_ashrsi3 (target, new_src,
724 GEN_INT (set_sign_bit_copies - 1)));
726 return 2;
730 /* See if we can generate this by setting the bottom (or the top)
731 16 bits, and then shifting these into the other half of the
732 word. We only look for the simplest cases, to do more would cost
733 too much. Be careful, however, not to generate this when the
734 alternative would take fewer insns. */
735 if (val & 0xffff0000)
737 temp1 = remainder & 0xffff0000;
738 temp2 = remainder & 0x0000ffff;
740 /* Overlaps outside this range are best done using other methods. */
741 for (i = 9; i < 24; i++)
743 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
744 && ! const_ok_for_arm (temp2))
746 insns = arm_gen_constant (code, mode, temp2,
747 new_src = (subtargets
748 ? gen_reg_rtx (mode)
749 : target),
750 source, subtargets, generate);
751 source = new_src;
752 if (generate)
753 emit_insn (gen_rtx_SET
754 (VOIDmode, target,
755 gen_rtx_IOR (mode,
756 gen_rtx_ASHIFT (mode, source,
757 GEN_INT (i)),
758 source)));
759 return insns + 1;
763 /* Don't duplicate cases already considered. */
764 for (i = 17; i < 24; i++)
766 if (((temp1 | (temp1 >> i)) == remainder)
767 && ! const_ok_for_arm (temp1))
769 insns = arm_gen_constant (code, mode, temp1,
770 new_src = (subtargets
771 ? gen_reg_rtx (mode)
772 : target),
773 source, subtargets, generate);
774 source = new_src;
775 if (generate)
776 emit_insn
777 (gen_rtx_SET (VOIDmode, target,
778 gen_rtx_IOR
779 (mode,
780 gen_rtx_LSHIFTRT (mode, source,
781 GEN_INT (i)),
782 source)));
783 return insns + 1;
787 break;
789 case IOR:
790 case XOR:
791 /* If we have IOR or XOR, and the constant can be loaded in a
792 single instruction, and we can find a temporary to put it in,
793 then this can be done in two instructions instead of 3-4. */
794 if (subtargets
795 || (reload_completed && ! reg_mentioned_p (target, source)))
797 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
799 if (generate)
801 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
803 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
804 emit_insn (gen_rtx_SET (VOIDmode, target,
805 gen_rtx (code, mode, source, sub)));
807 return 2;
811 if (code == XOR)
812 break;
814 if (set_sign_bit_copies > 8
815 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
817 if (generate)
819 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
820 rtx shift = GEN_INT (set_sign_bit_copies);
822 emit_insn (gen_rtx_SET (VOIDmode, sub,
823 gen_rtx_NOT (mode,
824 gen_rtx_ASHIFT (mode,
825 source,
826 shift))));
827 emit_insn (gen_rtx_SET (VOIDmode, target,
828 gen_rtx_NOT (mode,
829 gen_rtx_LSHIFTRT (mode, sub,
830 shift))));
832 return 2;
835 if (set_zero_bit_copies > 8
836 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
838 if (generate)
840 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
841 rtx shift = GEN_INT (set_zero_bit_copies);
843 emit_insn (gen_rtx_SET (VOIDmode, sub,
844 gen_rtx_NOT (mode,
845 gen_rtx_LSHIFTRT (mode,
846 source,
847 shift))));
848 emit_insn (gen_rtx_SET (VOIDmode, target,
849 gen_rtx_NOT (mode,
850 gen_rtx_ASHIFT (mode, sub,
851 shift))));
853 return 2;
856 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
858 if (generate)
860 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
861 emit_insn (gen_rtx_SET (VOIDmode, sub,
862 gen_rtx_NOT (mode, source)));
863 source = sub;
864 if (subtargets)
865 sub = gen_reg_rtx (mode);
866 emit_insn (gen_rtx_SET (VOIDmode, sub,
867 gen_rtx_AND (mode, source,
868 GEN_INT (temp1))));
869 emit_insn (gen_rtx_SET (VOIDmode, target,
870 gen_rtx_NOT (mode, sub)));
872 return 3;
874 break;
876 case AND:
877 /* See if two shifts will do 2 or more insn's worth of work. */
878 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
880 HOST_WIDE_INT shift_mask = ((0xffffffff
881 << (32 - clear_sign_bit_copies))
882 & 0xffffffff);
883 rtx new_source;
884 rtx shift;
886 if ((remainder | shift_mask) != 0xffffffff)
888 if (generate)
890 new_source = subtargets ? gen_reg_rtx (mode) : target;
891 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
892 new_source, source, subtargets, 1);
893 source = new_source;
895 else
896 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
897 new_source, source, subtargets, 0);
900 if (generate)
902 shift = GEN_INT (clear_sign_bit_copies);
903 new_source = subtargets ? gen_reg_rtx (mode) : target;
904 emit_insn (gen_ashlsi3 (new_source, source, shift));
905 emit_insn (gen_lshrsi3 (target, new_source, shift));
908 return insns + 2;
911 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
913 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
914 rtx new_source;
915 rtx shift;
917 if ((remainder | shift_mask) != 0xffffffff)
919 if (generate)
921 new_source = subtargets ? gen_reg_rtx (mode) : target;
922 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
923 new_source, source, subtargets, 1);
924 source = new_source;
926 else
927 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
928 new_source, source, subtargets, 0);
931 if (generate)
933 shift = GEN_INT (clear_zero_bit_copies);
934 new_source = subtargets ? gen_reg_rtx (mode) : target;
935 emit_insn (gen_lshrsi3 (new_source, source, shift));
936 emit_insn (gen_ashlsi3 (target, new_source, shift));
939 return insns + 2;
942 break;
944 default:
945 break;
948 for (i = 0; i < 32; i++)
949 if (remainder & (1 << i))
950 num_bits_set++;
952 if (code == AND || (can_invert && num_bits_set > 16))
953 remainder = (~remainder) & 0xffffffff;
954 else if (code == PLUS && num_bits_set > 16)
955 remainder = (-remainder) & 0xffffffff;
956 else
958 can_invert = 0;
959 can_negate = 0;
962 /* Now try and find a way of doing the job in either two or three
963 instructions.
964 We start by looking for the largest block of zeros that are aligned on
965 a 2-bit boundary, we then fill up the temps, wrapping around to the
966 top of the word when we drop off the bottom.
967 In the worst case this code should produce no more than four insns. */
969 int best_start = 0;
970 int best_consecutive_zeros = 0;
972 for (i = 0; i < 32; i += 2)
974 int consecutive_zeros = 0;
976 if (! (remainder & (3 << i)))
978 while ((i < 32) && ! (remainder & (3 << i)))
980 consecutive_zeros += 2;
981 i += 2;
983 if (consecutive_zeros > best_consecutive_zeros)
985 best_consecutive_zeros = consecutive_zeros;
986 best_start = i - consecutive_zeros;
988 i -= 2;
992 /* Now start emitting the insns, starting with the one with the highest
993 bit set: we do this so that the smallest number will be emitted last;
994 this is more likely to be combinable with addressing insns. */
995 i = best_start;
998 int end;
1000 if (i <= 0)
1001 i += 32;
1002 if (remainder & (3 << (i - 2)))
1004 end = i - 8;
1005 if (end < 0)
1006 end += 32;
1007 temp1 = remainder & ((0x0ff << end)
1008 | ((i < end) ? (0xff >> (32 - end)) : 0));
1009 remainder &= ~temp1;
1011 if (code == SET)
1013 if (generate)
1014 emit_insn (gen_rtx_SET (VOIDmode,
1015 new_src = (subtargets
1016 ? gen_reg_rtx (mode)
1017 : target),
1018 GEN_INT (can_invert
1019 ? ~temp1 : temp1)));
1020 can_invert = 0;
1021 code = PLUS;
1023 else if (code == MINUS)
1025 if (generate)
1026 emit_insn (gen_rtx_SET (VOIDmode,
1027 new_src = (subtargets
1028 ? gen_reg_rtx (mode)
1029 : target),
1030 gen_rtx (code, mode, GEN_INT (temp1),
1031 source)));
1032 code = PLUS;
1034 else
1036 if (generate)
1037 emit_insn (gen_rtx_SET (VOIDmode,
1038 new_src = (remainder
1039 ? (subtargets
1040 ? gen_reg_rtx (mode)
1041 : target)
1042 : target),
1043 gen_rtx (code, mode, source,
1044 GEN_INT (can_invert ? ~temp1
1045 : (can_negate
1046 ? -temp1
1047 : temp1)))));
1050 insns++;
1051 source = new_src;
1052 i -= 6;
1054 i -= 2;
1055 } while (remainder);
1057 return insns;
1060 /* Canonicalize a comparison so that we are more likely to recognize it.
1061 This can be done for a few constant compares, where we can make the
1062 immediate value easier to load. */
1063 enum rtx_code
1064 arm_canonicalize_comparison (code, op1)
1065 enum rtx_code code;
1066 rtx *op1;
1068 HOST_WIDE_INT i = INTVAL (*op1);
1070 switch (code)
1072 case EQ:
1073 case NE:
1074 return code;
1076 case GT:
1077 case LE:
1078 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1)
1079 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1081 *op1 = GEN_INT (i+1);
1082 return code == GT ? GE : LT;
1084 break;
1086 case GE:
1087 case LT:
1088 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1))
1089 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1091 *op1 = GEN_INT (i-1);
1092 return code == GE ? GT : LE;
1094 break;
1096 case GTU:
1097 case LEU:
1098 if (i != ~0
1099 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1101 *op1 = GEN_INT (i + 1);
1102 return code == GTU ? GEU : LTU;
1104 break;
1106 case GEU:
1107 case LTU:
1108 if (i != 0
1109 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1111 *op1 = GEN_INT (i - 1);
1112 return code == GEU ? GTU : LEU;
1114 break;
1116 default:
1117 abort ();
1120 return code;
1124 /* Handle aggregates that are not laid out in a BLKmode element.
1125 This is a sub-element of RETURN_IN_MEMORY. */
1127 arm_return_in_memory (type)
1128 tree type;
1130 if (TREE_CODE (type) == RECORD_TYPE)
1132 tree field;
1134 /* For a struct, we can return in a register if every element was a
1135 bit-field. */
1136 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1137 if (TREE_CODE (field) != FIELD_DECL
1138 || ! DECL_BIT_FIELD_TYPE (field))
1139 return 1;
1141 return 0;
1143 else if (TREE_CODE (type) == UNION_TYPE)
1145 tree field;
1147 /* Unions can be returned in registers if every element is
1148 integral, or can be returned in an integer register. */
1149 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1151 if (TREE_CODE (field) != FIELD_DECL
1152 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1153 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1154 || FLOAT_TYPE_P (TREE_TYPE (field)))
1155 return 1;
1157 return 0;
1159 /* XXX Not sure what should be done for other aggregates, so put them in
1160 memory. */
1161 return 1;
1165 legitimate_pic_operand_p (x)
1166 rtx x;
1168 if (CONSTANT_P (x) && flag_pic
1169 && (GET_CODE (x) == SYMBOL_REF
1170 || (GET_CODE (x) == CONST
1171 && GET_CODE (XEXP (x, 0)) == PLUS
1172 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1173 return 0;
1175 return 1;
1179 legitimize_pic_address (orig, mode, reg)
1180 rtx orig;
1181 enum machine_mode mode;
1182 rtx reg;
1184 if (GET_CODE (orig) == SYMBOL_REF)
1186 rtx pic_ref, address;
1187 rtx insn;
1188 int subregs = 0;
1190 if (reg == 0)
1192 if (reload_in_progress || reload_completed)
1193 abort ();
1194 else
1195 reg = gen_reg_rtx (Pmode);
1197 subregs = 1;
1200 #ifdef AOF_ASSEMBLER
1201 /* The AOF assembler can generate relocations for these directly, and
1202 understands that the PIC register has to be added into the offset.
1204 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1205 #else
1206 if (subregs)
1207 address = gen_reg_rtx (Pmode);
1208 else
1209 address = reg;
1211 emit_insn (gen_pic_load_addr (address, orig));
1213 pic_ref = gen_rtx_MEM (Pmode,
1214 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
1215 address));
1216 RTX_UNCHANGING_P (pic_ref) = 1;
1217 insn = emit_move_insn (reg, pic_ref);
1218 #endif
1219 current_function_uses_pic_offset_table = 1;
1220 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1221 by loop. */
1222 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
1223 REG_NOTES (insn));
1224 return reg;
1226 else if (GET_CODE (orig) == CONST)
1228 rtx base, offset;
1230 if (GET_CODE (XEXP (orig, 0)) == PLUS
1231 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1232 return orig;
1234 if (reg == 0)
1236 if (reload_in_progress || reload_completed)
1237 abort ();
1238 else
1239 reg = gen_reg_rtx (Pmode);
1242 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1244 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1245 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1246 base == reg ? 0 : reg);
1248 else
1249 abort ();
1251 if (GET_CODE (offset) == CONST_INT)
1253 /* The base register doesn't really matter, we only want to
1254 test the index for the appropriate mode. */
1255 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1257 if (! reload_in_progress && ! reload_completed)
1258 offset = force_reg (Pmode, offset);
1259 else
1260 abort ();
1262 win:
1263 if (GET_CODE (offset) == CONST_INT)
1264 return plus_constant_for_output (base, INTVAL (offset));
1267 if (GET_MODE_SIZE (mode) > 4
1268 && (GET_MODE_CLASS (mode) == MODE_INT
1269 || TARGET_SOFT_FLOAT))
1271 emit_insn (gen_addsi3 (reg, base, offset));
1272 return reg;
1275 return gen_rtx_PLUS (Pmode, base, offset);
1277 else if (GET_CODE (orig) == LABEL_REF)
1278 current_function_uses_pic_offset_table = 1;
1280 return orig;
1283 static rtx pic_rtx;
1286 is_pic(x)
1287 rtx x;
1289 if (x == pic_rtx)
1290 return 1;
1291 return 0;
1294 void
1295 arm_finalize_pic ()
1297 #ifndef AOF_ASSEMBLER
1298 rtx l1, pic_tmp, pic_tmp2, seq;
1299 rtx global_offset_table;
1301 if (current_function_uses_pic_offset_table == 0)
1302 return;
1304 if (! flag_pic)
1305 abort ();
1307 start_sequence ();
1308 l1 = gen_label_rtx ();
1310 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1311 /* The PC contains 'dot'+8, but the label L1 is on the next
1312 instruction, so the offset is only 'dot'+4. */
1313 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1),
1314 GEN_INT (4));
1315 pic_tmp2 = gen_rtx_CONST (VOIDmode,
1316 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
1318 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
1320 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1321 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1322 emit_label (l1);
1324 seq = gen_sequence ();
1325 end_sequence ();
1326 emit_insn_after (seq, get_insns ());
1328 /* Need to emit this whether or not we obey regdecls,
1329 since setjmp/longjmp can cause life info to screw up. */
1330 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
1331 #endif /* AOF_ASSEMBLER */
1334 #define REG_OR_SUBREG_REG(X) \
1335 (GET_CODE (X) == REG \
1336 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1338 #define REG_OR_SUBREG_RTX(X) \
1339 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1341 #define ARM_FRAME_RTX(X) \
1342 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1343 || (X) == arg_pointer_rtx)
1346 arm_rtx_costs (x, code, outer_code)
1347 rtx x;
1348 enum rtx_code code, outer_code;
1350 enum machine_mode mode = GET_MODE (x);
1351 enum rtx_code subcode;
1352 int extra_cost;
1354 switch (code)
1356 case MEM:
1357 /* Memory costs quite a lot for the first word, but subsequent words
1358 load at the equivalent of a single insn each. */
1359 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1360 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1362 case DIV:
1363 case MOD:
1364 return 100;
1366 case ROTATE:
1367 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1368 return 4;
1369 /* Fall through */
1370 case ROTATERT:
1371 if (mode != SImode)
1372 return 8;
1373 /* Fall through */
1374 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1375 if (mode == DImode)
1376 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1377 + ((GET_CODE (XEXP (x, 0)) == REG
1378 || (GET_CODE (XEXP (x, 0)) == SUBREG
1379 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1380 ? 0 : 8));
1381 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1382 || (GET_CODE (XEXP (x, 0)) == SUBREG
1383 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1384 ? 0 : 4)
1385 + ((GET_CODE (XEXP (x, 1)) == REG
1386 || (GET_CODE (XEXP (x, 1)) == SUBREG
1387 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1388 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1389 ? 0 : 4));
1391 case MINUS:
1392 if (mode == DImode)
1393 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1394 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1395 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1396 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1397 ? 0 : 8));
1399 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1400 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1401 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1402 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1403 ? 0 : 8)
1404 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1405 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1406 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1407 ? 0 : 8));
1409 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1410 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1411 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1412 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1413 || subcode == ASHIFTRT || subcode == LSHIFTRT
1414 || subcode == ROTATE || subcode == ROTATERT
1415 || (subcode == MULT
1416 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1417 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1418 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1419 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1420 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1421 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1422 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1423 return 1;
1424 /* Fall through */
1426 case PLUS:
1427 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1428 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1429 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1430 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1431 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1432 ? 0 : 8));
1434 /* Fall through */
1435 case AND: case XOR: case IOR:
1436 extra_cost = 0;
1438 /* Normally the frame registers will be spilt into reg+const during
1439 reload, so it is a bad idea to combine them with other instructions,
1440 since then they might not be moved outside of loops. As a compromise
1441 we allow integration with ops that have a constant as their second
1442 operand. */
1443 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1444 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1445 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1446 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1447 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1448 extra_cost = 4;
1450 if (mode == DImode)
1451 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1452 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1453 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1454 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1455 ? 0 : 8));
1457 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1458 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1459 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1460 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1461 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1462 ? 0 : 4));
1464 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1465 return (1 + extra_cost
1466 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1467 || subcode == LSHIFTRT || subcode == ASHIFTRT
1468 || subcode == ROTATE || subcode == ROTATERT
1469 || (subcode == MULT
1470 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1471 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1472 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1473 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1474 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1475 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1476 ? 0 : 4));
1478 return 8;
1480 case MULT:
1481 /* There is no point basing this on the tuning, since it is always the
1482 fast variant if it exists at all */
1483 if (arm_fast_multiply && mode == DImode
1484 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1485 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1486 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1487 return 8;
1489 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1490 || mode == DImode)
1491 return 30;
1493 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1495 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1496 & (unsigned HOST_WIDE_INT) 0xffffffff);
1497 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1498 int j;
1499 /* Tune as appropriate */
1500 int booth_unit_size = ((tune_flags & FL_FAST_MULT) ? 8 : 2);
1502 for (j = 0; i && j < 32; j += booth_unit_size)
1504 i >>= booth_unit_size;
1505 add_cost += 2;
1508 return add_cost;
1511 return (((tune_flags & FL_FAST_MULT) ? 8 : 30)
1512 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1513 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1515 case TRUNCATE:
1516 if (arm_fast_multiply && mode == SImode
1517 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
1518 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
1519 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1520 == GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)))
1521 && (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ZERO_EXTEND
1522 || GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == SIGN_EXTEND))
1523 return 8;
1524 return 99;
1526 case NEG:
1527 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1528 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1529 /* Fall through */
1530 case NOT:
1531 if (mode == DImode)
1532 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1534 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1536 case IF_THEN_ELSE:
1537 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1538 return 14;
1539 return 2;
1541 case COMPARE:
1542 return 1;
1544 case ABS:
1545 return 4 + (mode == DImode ? 4 : 0);
1547 case SIGN_EXTEND:
1548 if (GET_MODE (XEXP (x, 0)) == QImode)
1549 return (4 + (mode == DImode ? 4 : 0)
1550 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1551 /* Fall through */
1552 case ZERO_EXTEND:
1553 switch (GET_MODE (XEXP (x, 0)))
1555 case QImode:
1556 return (1 + (mode == DImode ? 4 : 0)
1557 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1559 case HImode:
1560 return (4 + (mode == DImode ? 4 : 0)
1561 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1563 case SImode:
1564 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1566 abort ();
1568 default:
1569 return 99;
1574 arm_adjust_cost (insn, link, dep, cost)
1575 rtx insn;
1576 rtx link;
1577 rtx dep;
1578 int cost;
1580 rtx i_pat, d_pat;
1582 if ((i_pat = single_set (insn)) != NULL
1583 && GET_CODE (SET_SRC (i_pat)) == MEM
1584 && (d_pat = single_set (dep)) != NULL
1585 && GET_CODE (SET_DEST (d_pat)) == MEM)
1587 /* This is a load after a store, there is no conflict if the load reads
1588 from a cached area. Assume that loads from the stack, and from the
1589 constant pool are cached, and that others will miss. This is a
1590 hack. */
1592 /* debug_rtx (insn);
1593 debug_rtx (dep);
1594 debug_rtx (link);
1595 fprintf (stderr, "costs %d\n", cost); */
1597 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1598 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1599 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1600 || reg_mentioned_p (hard_frame_pointer_rtx,
1601 XEXP (SET_SRC (i_pat), 0)))
1603 /* fprintf (stderr, "***** Now 1\n"); */
1604 return 1;
1608 return cost;
1611 /* This code has been fixed for cross compilation. */
1613 static int fpa_consts_inited = 0;
1615 char *strings_fpa[8] = {
1616 "0", "1", "2", "3",
1617 "4", "5", "0.5", "10"
1620 static REAL_VALUE_TYPE values_fpa[8];
1622 static void
1623 init_fpa_table ()
1625 int i;
1626 REAL_VALUE_TYPE r;
1628 for (i = 0; i < 8; i++)
1630 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1631 values_fpa[i] = r;
1634 fpa_consts_inited = 1;
1637 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1640 const_double_rtx_ok_for_fpu (x)
1641 rtx x;
1643 REAL_VALUE_TYPE r;
1644 int i;
1646 if (!fpa_consts_inited)
1647 init_fpa_table ();
1649 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1650 if (REAL_VALUE_MINUS_ZERO (r))
1651 return 0;
1653 for (i = 0; i < 8; i++)
1654 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1655 return 1;
1657 return 0;
1660 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1663 neg_const_double_rtx_ok_for_fpu (x)
1664 rtx x;
1666 REAL_VALUE_TYPE r;
1667 int i;
1669 if (!fpa_consts_inited)
1670 init_fpa_table ();
1672 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1673 r = REAL_VALUE_NEGATE (r);
1674 if (REAL_VALUE_MINUS_ZERO (r))
1675 return 0;
1677 for (i = 0; i < 8; i++)
1678 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1679 return 1;
1681 return 0;
1684 /* Predicates for `match_operand' and `match_operator'. */
1686 /* s_register_operand is the same as register_operand, but it doesn't accept
1687 (SUBREG (MEM)...).
1689 This function exists because at the time it was put in it led to better
1690 code. SUBREG(MEM) always needs a reload in the places where
1691 s_register_operand is used, and this seemed to lead to excessive
1692 reloading. */
1695 s_register_operand (op, mode)
1696 register rtx op;
1697 enum machine_mode mode;
1699 if (GET_MODE (op) != mode && mode != VOIDmode)
1700 return 0;
1702 if (GET_CODE (op) == SUBREG)
1703 op = SUBREG_REG (op);
1705 /* We don't consider registers whose class is NO_REGS
1706 to be a register operand. */
1707 return (GET_CODE (op) == REG
1708 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1709 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1712 /* Only accept reg, subreg(reg), const_int. */
1715 reg_or_int_operand (op, mode)
1716 register rtx op;
1717 enum machine_mode mode;
1719 if (GET_CODE (op) == CONST_INT)
1720 return 1;
1722 if (GET_MODE (op) != mode && mode != VOIDmode)
1723 return 0;
1725 if (GET_CODE (op) == SUBREG)
1726 op = SUBREG_REG (op);
1728 /* We don't consider registers whose class is NO_REGS
1729 to be a register operand. */
1730 return (GET_CODE (op) == REG
1731 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1732 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1735 /* Return 1 if OP is an item in memory, given that we are in reload. */
1738 reload_memory_operand (op, mode)
1739 rtx op;
1740 enum machine_mode mode;
1742 int regno = true_regnum (op);
1744 return (! CONSTANT_P (op)
1745 && (regno == -1
1746 || (GET_CODE (op) == REG
1747 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1750 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1753 arm_rhs_operand (op, mode)
1754 rtx op;
1755 enum machine_mode mode;
1757 return (s_register_operand (op, mode)
1758 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1761 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1765 arm_rhsm_operand (op, mode)
1766 rtx op;
1767 enum machine_mode mode;
1769 return (s_register_operand (op, mode)
1770 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1771 || memory_operand (op, mode));
1774 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1775 constant that is valid when negated. */
1778 arm_add_operand (op, mode)
1779 rtx op;
1780 enum machine_mode mode;
1782 return (s_register_operand (op, mode)
1783 || (GET_CODE (op) == CONST_INT
1784 && (const_ok_for_arm (INTVAL (op))
1785 || const_ok_for_arm (-INTVAL (op)))));
1789 arm_not_operand (op, mode)
1790 rtx op;
1791 enum machine_mode mode;
1793 return (s_register_operand (op, mode)
1794 || (GET_CODE (op) == CONST_INT
1795 && (const_ok_for_arm (INTVAL (op))
1796 || const_ok_for_arm (~INTVAL (op)))));
1799 /* Return TRUE if the operand is a memory reference which contains an
1800 offsettable address. */
1802 offsettable_memory_operand (op, mode)
1803 register rtx op;
1804 enum machine_mode mode;
1806 if (mode == VOIDmode)
1807 mode = GET_MODE (op);
1809 return (mode == GET_MODE (op)
1810 && GET_CODE (op) == MEM
1811 && offsettable_address_p (reload_completed | reload_in_progress,
1812 mode, XEXP (op, 0)));
1815 /* Return TRUE if the operand is a memory reference which is, or can be
1816 made word aligned by adjusting the offset. */
1818 alignable_memory_operand (op, mode)
1819 register rtx op;
1820 enum machine_mode mode;
1822 rtx reg;
1824 if (mode == VOIDmode)
1825 mode = GET_MODE (op);
1827 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1828 return 0;
1830 op = XEXP (op, 0);
1832 return ((GET_CODE (reg = op) == REG
1833 || (GET_CODE (op) == SUBREG
1834 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1835 || (GET_CODE (op) == PLUS
1836 && GET_CODE (XEXP (op, 1)) == CONST_INT
1837 && (GET_CODE (reg = XEXP (op, 0)) == REG
1838 || (GET_CODE (XEXP (op, 0)) == SUBREG
1839 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1840 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1843 /* Similar to s_register_operand, but does not allow hard integer
1844 registers. */
1846 f_register_operand (op, mode)
1847 register rtx op;
1848 enum machine_mode mode;
1850 if (GET_MODE (op) != mode && mode != VOIDmode)
1851 return 0;
1853 if (GET_CODE (op) == SUBREG)
1854 op = SUBREG_REG (op);
1856 /* We don't consider registers whose class is NO_REGS
1857 to be a register operand. */
1858 return (GET_CODE (op) == REG
1859 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1860 || REGNO_REG_CLASS (REGNO (op)) == FPU_REGS));
1863 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1866 fpu_rhs_operand (op, mode)
1867 rtx op;
1868 enum machine_mode mode;
1870 if (s_register_operand (op, mode))
1871 return TRUE;
1872 else if (GET_CODE (op) == CONST_DOUBLE)
1873 return (const_double_rtx_ok_for_fpu (op));
1875 return FALSE;
1879 fpu_add_operand (op, mode)
1880 rtx op;
1881 enum machine_mode mode;
1883 if (s_register_operand (op, mode))
1884 return TRUE;
1885 else if (GET_CODE (op) == CONST_DOUBLE)
1886 return (const_double_rtx_ok_for_fpu (op)
1887 || neg_const_double_rtx_ok_for_fpu (op));
1889 return FALSE;
1892 /* Return nonzero if OP is a constant power of two. */
1895 power_of_two_operand (op, mode)
1896 rtx op;
1897 enum machine_mode mode;
1899 if (GET_CODE (op) == CONST_INT)
1901 HOST_WIDE_INT value = INTVAL(op);
1902 return value != 0 && (value & (value - 1)) == 0;
1904 return FALSE;
1907 /* Return TRUE for a valid operand of a DImode operation.
1908 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1909 Note that this disallows MEM(REG+REG), but allows
1910 MEM(PRE/POST_INC/DEC(REG)). */
1913 di_operand (op, mode)
1914 rtx op;
1915 enum machine_mode mode;
1917 if (s_register_operand (op, mode))
1918 return TRUE;
1920 switch (GET_CODE (op))
1922 case CONST_DOUBLE:
1923 case CONST_INT:
1924 return TRUE;
1926 case MEM:
1927 return memory_address_p (DImode, XEXP (op, 0));
1929 default:
1930 return FALSE;
1934 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1935 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1936 Note that this disallows MEM(REG+REG), but allows
1937 MEM(PRE/POST_INC/DEC(REG)). */
1940 soft_df_operand (op, mode)
1941 rtx op;
1942 enum machine_mode mode;
1944 if (s_register_operand (op, mode))
1945 return TRUE;
1947 switch (GET_CODE (op))
1949 case CONST_DOUBLE:
1950 return TRUE;
1952 case MEM:
1953 return memory_address_p (DFmode, XEXP (op, 0));
1955 default:
1956 return FALSE;
1960 /* Return TRUE for valid index operands. */
1963 index_operand (op, mode)
1964 rtx op;
1965 enum machine_mode mode;
1967 return (s_register_operand(op, mode)
1968 || (immediate_operand (op, mode)
1969 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1972 /* Return TRUE for valid shifts by a constant. This also accepts any
1973 power of two on the (somewhat overly relaxed) assumption that the
1974 shift operator in this case was a mult. */
1977 const_shift_operand (op, mode)
1978 rtx op;
1979 enum machine_mode mode;
1981 return (power_of_two_operand (op, mode)
1982 || (immediate_operand (op, mode)
1983 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1986 /* Return TRUE for arithmetic operators which can be combined with a multiply
1987 (shift). */
1990 shiftable_operator (x, mode)
1991 rtx x;
1992 enum machine_mode mode;
1994 if (GET_MODE (x) != mode)
1995 return FALSE;
1996 else
1998 enum rtx_code code = GET_CODE (x);
2000 return (code == PLUS || code == MINUS
2001 || code == IOR || code == XOR || code == AND);
2005 /* Return TRUE for shift operators. */
2008 shift_operator (x, mode)
2009 rtx x;
2010 enum machine_mode mode;
2012 if (GET_MODE (x) != mode)
2013 return FALSE;
2014 else
2016 enum rtx_code code = GET_CODE (x);
2018 if (code == MULT)
2019 return power_of_two_operand (XEXP (x, 1));
2021 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
2022 || code == ROTATERT);
2026 int equality_operator (x, mode)
2027 rtx x;
2028 enum machine_mode mode;
2030 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
2033 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
2036 minmax_operator (x, mode)
2037 rtx x;
2038 enum machine_mode mode;
2040 enum rtx_code code = GET_CODE (x);
2042 if (GET_MODE (x) != mode)
2043 return FALSE;
2045 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
2048 /* return TRUE if x is EQ or NE */
2050 /* Return TRUE if this is the condition code register, if we aren't given
2051 a mode, accept any class CCmode register */
2054 cc_register (x, mode)
2055 rtx x;
2056 enum machine_mode mode;
2058 if (mode == VOIDmode)
2060 mode = GET_MODE (x);
2061 if (GET_MODE_CLASS (mode) != MODE_CC)
2062 return FALSE;
2065 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2066 return TRUE;
2068 return FALSE;
2071 /* Return TRUE if this is the condition code register, if we aren't given
2072 a mode, accept any class CCmode register which indicates a dominance
2073 expression. */
2076 dominant_cc_register (x, mode)
2077 rtx x;
2078 enum machine_mode mode;
2080 if (mode == VOIDmode)
2082 mode = GET_MODE (x);
2083 if (GET_MODE_CLASS (mode) != MODE_CC)
2084 return FALSE;
2087 if (mode != CC_DNEmode && mode != CC_DEQmode
2088 && mode != CC_DLEmode && mode != CC_DLTmode
2089 && mode != CC_DGEmode && mode != CC_DGTmode
2090 && mode != CC_DLEUmode && mode != CC_DLTUmode
2091 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2092 return FALSE;
2094 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2095 return TRUE;
2097 return FALSE;
2100 /* Return TRUE if X references a SYMBOL_REF. */
2102 symbol_mentioned_p (x)
2103 rtx x;
2105 register char *fmt;
2106 register int i;
2108 if (GET_CODE (x) == SYMBOL_REF)
2109 return 1;
2111 fmt = GET_RTX_FORMAT (GET_CODE (x));
2112 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2114 if (fmt[i] == 'E')
2116 register int j;
2118 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2119 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2120 return 1;
2122 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2123 return 1;
2126 return 0;
2129 /* Return TRUE if X references a LABEL_REF. */
2131 label_mentioned_p (x)
2132 rtx x;
2134 register char *fmt;
2135 register int i;
2137 if (GET_CODE (x) == LABEL_REF)
2138 return 1;
2140 fmt = GET_RTX_FORMAT (GET_CODE (x));
2141 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2143 if (fmt[i] == 'E')
2145 register int j;
2147 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2148 if (label_mentioned_p (XVECEXP (x, i, j)))
2149 return 1;
2151 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2152 return 1;
2155 return 0;
2158 enum rtx_code
2159 minmax_code (x)
2160 rtx x;
2162 enum rtx_code code = GET_CODE (x);
2164 if (code == SMAX)
2165 return GE;
2166 else if (code == SMIN)
2167 return LE;
2168 else if (code == UMIN)
2169 return LEU;
2170 else if (code == UMAX)
2171 return GEU;
2173 abort ();
2176 /* Return 1 if memory locations are adjacent */
2179 adjacent_mem_locations (a, b)
2180 rtx a, b;
2182 int val0 = 0, val1 = 0;
2183 int reg0, reg1;
2185 if ((GET_CODE (XEXP (a, 0)) == REG
2186 || (GET_CODE (XEXP (a, 0)) == PLUS
2187 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2188 && (GET_CODE (XEXP (b, 0)) == REG
2189 || (GET_CODE (XEXP (b, 0)) == PLUS
2190 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2192 if (GET_CODE (XEXP (a, 0)) == PLUS)
2194 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2195 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2197 else
2198 reg0 = REGNO (XEXP (a, 0));
2199 if (GET_CODE (XEXP (b, 0)) == PLUS)
2201 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2202 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2204 else
2205 reg1 = REGNO (XEXP (b, 0));
2206 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2208 return 0;
2211 /* Return 1 if OP is a load multiple operation. It is known to be
2212 parallel and the first section will be tested. */
2215 load_multiple_operation (op, mode)
2216 rtx op;
2217 enum machine_mode mode;
2219 HOST_WIDE_INT count = XVECLEN (op, 0);
2220 int dest_regno;
2221 rtx src_addr;
2222 HOST_WIDE_INT i = 1, base = 0;
2223 rtx elt;
2225 if (count <= 1
2226 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2227 return 0;
2229 /* Check to see if this might be a write-back */
2230 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2232 i++;
2233 base = 1;
2235 /* Now check it more carefully */
2236 if (GET_CODE (SET_DEST (elt)) != REG
2237 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2238 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2239 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2240 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2241 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2242 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2243 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2244 != REGNO (SET_DEST (elt)))
2245 return 0;
2247 count--;
2250 /* Perform a quick check so we don't blow up below. */
2251 if (count <= i
2252 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2253 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2254 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2255 return 0;
2257 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2258 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2260 for (; i < count; i++)
2262 rtx elt = XVECEXP (op, 0, i);
2264 if (GET_CODE (elt) != SET
2265 || GET_CODE (SET_DEST (elt)) != REG
2266 || GET_MODE (SET_DEST (elt)) != SImode
2267 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2268 || GET_CODE (SET_SRC (elt)) != MEM
2269 || GET_MODE (SET_SRC (elt)) != SImode
2270 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2271 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2272 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2273 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2274 return 0;
2277 return 1;
2280 /* Return 1 if OP is a store multiple operation. It is known to be
2281 parallel and the first section will be tested. */
2284 store_multiple_operation (op, mode)
2285 rtx op;
2286 enum machine_mode mode;
2288 HOST_WIDE_INT count = XVECLEN (op, 0);
2289 int src_regno;
2290 rtx dest_addr;
2291 HOST_WIDE_INT i = 1, base = 0;
2292 rtx elt;
2294 if (count <= 1
2295 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2296 return 0;
2298 /* Check to see if this might be a write-back */
2299 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2301 i++;
2302 base = 1;
2304 /* Now check it more carefully */
2305 if (GET_CODE (SET_DEST (elt)) != REG
2306 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2307 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2308 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2309 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2310 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2311 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2312 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2313 != REGNO (SET_DEST (elt)))
2314 return 0;
2316 count--;
2319 /* Perform a quick check so we don't blow up below. */
2320 if (count <= i
2321 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2322 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2323 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2324 return 0;
2326 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2327 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2329 for (; i < count; i++)
2331 elt = XVECEXP (op, 0, i);
2333 if (GET_CODE (elt) != SET
2334 || GET_CODE (SET_SRC (elt)) != REG
2335 || GET_MODE (SET_SRC (elt)) != SImode
2336 || REGNO (SET_SRC (elt)) != src_regno + i - base
2337 || GET_CODE (SET_DEST (elt)) != MEM
2338 || GET_MODE (SET_DEST (elt)) != SImode
2339 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2340 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2341 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2342 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2343 return 0;
2346 return 1;
2350 load_multiple_sequence (operands, nops, regs, base, load_offset)
2351 rtx *operands;
2352 int nops;
2353 int *regs;
2354 int *base;
2355 HOST_WIDE_INT *load_offset;
2357 int unsorted_regs[4];
2358 HOST_WIDE_INT unsorted_offsets[4];
2359 int order[4];
2360 int base_reg;
2361 int i;
2363 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2364 extended if required. */
2365 if (nops < 2 || nops > 4)
2366 abort ();
2368 /* Loop over the operands and check that the memory references are
2369 suitable (ie immediate offsets from the same base register). At
2370 the same time, extract the target register, and the memory
2371 offsets. */
2372 for (i = 0; i < nops; i++)
2374 rtx reg;
2375 rtx offset;
2377 /* Convert a subreg of a mem into the mem itself. */
2378 if (GET_CODE (operands[nops + i]) == SUBREG)
2379 operands[nops + i] = alter_subreg(operands[nops + i]);
2381 if (GET_CODE (operands[nops + i]) != MEM)
2382 abort ();
2384 /* Don't reorder volatile memory references; it doesn't seem worth
2385 looking for the case where the order is ok anyway. */
2386 if (MEM_VOLATILE_P (operands[nops + i]))
2387 return 0;
2389 offset = const0_rtx;
2391 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2392 || (GET_CODE (reg) == SUBREG
2393 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2394 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2395 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2396 == REG)
2397 || (GET_CODE (reg) == SUBREG
2398 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2399 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2400 == CONST_INT)))
2402 if (i == 0)
2404 base_reg = REGNO(reg);
2405 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2406 ? REGNO (operands[i])
2407 : REGNO (SUBREG_REG (operands[i])));
2408 order[0] = 0;
2410 else
2412 if (base_reg != REGNO (reg))
2413 /* Not addressed from the same base register. */
2414 return 0;
2416 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2417 ? REGNO (operands[i])
2418 : REGNO (SUBREG_REG (operands[i])));
2419 if (unsorted_regs[i] < unsorted_regs[order[0]])
2420 order[0] = i;
2423 /* If it isn't an integer register, or if it overwrites the
2424 base register but isn't the last insn in the list, then
2425 we can't do this. */
2426 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2427 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2428 return 0;
2430 unsorted_offsets[i] = INTVAL (offset);
2432 else
2433 /* Not a suitable memory address. */
2434 return 0;
2437 /* All the useful information has now been extracted from the
2438 operands into unsorted_regs and unsorted_offsets; additionally,
2439 order[0] has been set to the lowest numbered register in the
2440 list. Sort the registers into order, and check that the memory
2441 offsets are ascending and adjacent. */
2443 for (i = 1; i < nops; i++)
2445 int j;
2447 order[i] = order[i - 1];
2448 for (j = 0; j < nops; j++)
2449 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2450 && (order[i] == order[i - 1]
2451 || unsorted_regs[j] < unsorted_regs[order[i]]))
2452 order[i] = j;
2454 /* Have we found a suitable register? if not, one must be used more
2455 than once. */
2456 if (order[i] == order[i - 1])
2457 return 0;
2459 /* Is the memory address adjacent and ascending? */
2460 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2461 return 0;
2464 if (base)
2466 *base = base_reg;
2468 for (i = 0; i < nops; i++)
2469 regs[i] = unsorted_regs[order[i]];
2471 *load_offset = unsorted_offsets[order[0]];
2474 if (unsorted_offsets[order[0]] == 0)
2475 return 1; /* ldmia */
2477 if (unsorted_offsets[order[0]] == 4)
2478 return 2; /* ldmib */
2480 if (unsorted_offsets[order[nops - 1]] == 0)
2481 return 3; /* ldmda */
2483 if (unsorted_offsets[order[nops - 1]] == -4)
2484 return 4; /* ldmdb */
2486 /* Can't do it without setting up the offset, only do this if it takes
2487 no more than one insn. */
2488 return (const_ok_for_arm (unsorted_offsets[order[0]])
2489 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2492 char *
2493 emit_ldm_seq (operands, nops)
2494 rtx *operands;
2495 int nops;
2497 int regs[4];
2498 int base_reg;
2499 HOST_WIDE_INT offset;
2500 char buf[100];
2501 int i;
2503 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2505 case 1:
2506 strcpy (buf, "ldm%?ia\t");
2507 break;
2509 case 2:
2510 strcpy (buf, "ldm%?ib\t");
2511 break;
2513 case 3:
2514 strcpy (buf, "ldm%?da\t");
2515 break;
2517 case 4:
2518 strcpy (buf, "ldm%?db\t");
2519 break;
2521 case 5:
2522 if (offset >= 0)
2523 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2524 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2525 (long) offset);
2526 else
2527 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2528 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2529 (long) -offset);
2530 output_asm_insn (buf, operands);
2531 base_reg = regs[0];
2532 strcpy (buf, "ldm%?ia\t");
2533 break;
2535 default:
2536 abort ();
2539 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2540 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2542 for (i = 1; i < nops; i++)
2543 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2544 reg_names[regs[i]]);
2546 strcat (buf, "}\t%@ phole ldm");
2548 output_asm_insn (buf, operands);
2549 return "";
2553 store_multiple_sequence (operands, nops, regs, base, load_offset)
2554 rtx *operands;
2555 int nops;
2556 int *regs;
2557 int *base;
2558 HOST_WIDE_INT *load_offset;
2560 int unsorted_regs[4];
2561 HOST_WIDE_INT unsorted_offsets[4];
2562 int order[4];
2563 int base_reg;
2564 int i;
2566 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2567 extended if required. */
2568 if (nops < 2 || nops > 4)
2569 abort ();
2571 /* Loop over the operands and check that the memory references are
2572 suitable (ie immediate offsets from the same base register). At
2573 the same time, extract the target register, and the memory
2574 offsets. */
2575 for (i = 0; i < nops; i++)
2577 rtx reg;
2578 rtx offset;
2580 /* Convert a subreg of a mem into the mem itself. */
2581 if (GET_CODE (operands[nops + i]) == SUBREG)
2582 operands[nops + i] = alter_subreg(operands[nops + i]);
2584 if (GET_CODE (operands[nops + i]) != MEM)
2585 abort ();
2587 /* Don't reorder volatile memory references; it doesn't seem worth
2588 looking for the case where the order is ok anyway. */
2589 if (MEM_VOLATILE_P (operands[nops + i]))
2590 return 0;
2592 offset = const0_rtx;
2594 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2595 || (GET_CODE (reg) == SUBREG
2596 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2597 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2598 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2599 == REG)
2600 || (GET_CODE (reg) == SUBREG
2601 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2602 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2603 == CONST_INT)))
2605 if (i == 0)
2607 base_reg = REGNO(reg);
2608 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2609 ? REGNO (operands[i])
2610 : REGNO (SUBREG_REG (operands[i])));
2611 order[0] = 0;
2613 else
2615 if (base_reg != REGNO (reg))
2616 /* Not addressed from the same base register. */
2617 return 0;
2619 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2620 ? REGNO (operands[i])
2621 : REGNO (SUBREG_REG (operands[i])));
2622 if (unsorted_regs[i] < unsorted_regs[order[0]])
2623 order[0] = i;
2626 /* If it isn't an integer register, then we can't do this. */
2627 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2628 return 0;
2630 unsorted_offsets[i] = INTVAL (offset);
2632 else
2633 /* Not a suitable memory address. */
2634 return 0;
2637 /* All the useful information has now been extracted from the
2638 operands into unsorted_regs and unsorted_offsets; additionally,
2639 order[0] has been set to the lowest numbered register in the
2640 list. Sort the registers into order, and check that the memory
2641 offsets are ascending and adjacent. */
2643 for (i = 1; i < nops; i++)
2645 int j;
2647 order[i] = order[i - 1];
2648 for (j = 0; j < nops; j++)
2649 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2650 && (order[i] == order[i - 1]
2651 || unsorted_regs[j] < unsorted_regs[order[i]]))
2652 order[i] = j;
2654 /* Have we found a suitable register? if not, one must be used more
2655 than once. */
2656 if (order[i] == order[i - 1])
2657 return 0;
2659 /* Is the memory address adjacent and ascending? */
2660 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2661 return 0;
2664 if (base)
2666 *base = base_reg;
2668 for (i = 0; i < nops; i++)
2669 regs[i] = unsorted_regs[order[i]];
2671 *load_offset = unsorted_offsets[order[0]];
2674 if (unsorted_offsets[order[0]] == 0)
2675 return 1; /* stmia */
2677 if (unsorted_offsets[order[0]] == 4)
2678 return 2; /* stmib */
2680 if (unsorted_offsets[order[nops - 1]] == 0)
2681 return 3; /* stmda */
2683 if (unsorted_offsets[order[nops - 1]] == -4)
2684 return 4; /* stmdb */
2686 return 0;
2689 char *
2690 emit_stm_seq (operands, nops)
2691 rtx *operands;
2692 int nops;
2694 int regs[4];
2695 int base_reg;
2696 HOST_WIDE_INT offset;
2697 char buf[100];
2698 int i;
2700 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2702 case 1:
2703 strcpy (buf, "stm%?ia\t");
2704 break;
2706 case 2:
2707 strcpy (buf, "stm%?ib\t");
2708 break;
2710 case 3:
2711 strcpy (buf, "stm%?da\t");
2712 break;
2714 case 4:
2715 strcpy (buf, "stm%?db\t");
2716 break;
2718 default:
2719 abort ();
2722 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2723 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2725 for (i = 1; i < nops; i++)
2726 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2727 reg_names[regs[i]]);
2729 strcat (buf, "}\t%@ phole stm");
2731 output_asm_insn (buf, operands);
2732 return "";
2736 multi_register_push (op, mode)
2737 rtx op;
2738 enum machine_mode mode;
2740 if (GET_CODE (op) != PARALLEL
2741 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2742 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2743 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2744 return 0;
2746 return 1;
2750 /* Routines for use with attributes */
2752 /* Return nonzero if ATTR is a valid attribute for DECL.
2753 ATTRIBUTES are any existing attributes and ARGS are the arguments
2754 supplied with ATTR.
2756 Supported attributes:
2758 naked: don't output any prologue or epilogue code, the user is assumed
2759 to do the right thing. */
2762 arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2763 tree decl;
2764 tree attributes;
2765 tree attr;
2766 tree args;
2768 if (args != NULL_TREE)
2769 return 0;
2771 if (is_attribute_p ("naked", attr))
2772 return TREE_CODE (decl) == FUNCTION_DECL;
2773 return 0;
2776 /* Return non-zero if FUNC is a naked function. */
2778 static int
2779 arm_naked_function_p (func)
2780 tree func;
2782 tree a;
2784 if (TREE_CODE (func) != FUNCTION_DECL)
2785 abort ();
2787 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2788 return a != NULL_TREE;
2791 /* Routines for use in generating RTL */
2794 arm_gen_load_multiple (base_regno, count, from, up, write_back, unchanging_p,
2795 in_struct_p)
2796 int base_regno;
2797 int count;
2798 rtx from;
2799 int up;
2800 int write_back;
2801 int unchanging_p;
2802 int in_struct_p;
2804 int i = 0, j;
2805 rtx result;
2806 int sign = up ? 1 : -1;
2807 rtx mem;
2809 result = gen_rtx_PARALLEL (VOIDmode,
2810 rtvec_alloc (count + (write_back ? 2 : 0)));
2811 if (write_back)
2813 XVECEXP (result, 0, 0)
2814 = gen_rtx_SET (GET_MODE (from), from,
2815 plus_constant (from, count * 4 * sign));
2816 i = 1;
2817 count++;
2820 for (j = 0; i < count; i++, j++)
2822 mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4 * sign));
2823 RTX_UNCHANGING_P (mem) = unchanging_p;
2824 MEM_IN_STRUCT_P (mem) = in_struct_p;
2826 XVECEXP (result, 0, i)
2827 = gen_rtx_SET (VOIDmode, gen_rtx_REG (SImode, base_regno + j), mem);
2830 if (write_back)
2831 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, from);
2833 return result;
2837 arm_gen_store_multiple (base_regno, count, to, up, write_back, unchanging_p,
2838 in_struct_p)
2839 int base_regno;
2840 int count;
2841 rtx to;
2842 int up;
2843 int write_back;
2844 int unchanging_p;
2845 int in_struct_p;
2847 int i = 0, j;
2848 rtx result;
2849 int sign = up ? 1 : -1;
2850 rtx mem;
2852 result = gen_rtx_PARALLEL (VOIDmode,
2853 rtvec_alloc (count + (write_back ? 2 : 0)));
2854 if (write_back)
2856 XVECEXP (result, 0, 0)
2857 = gen_rtx_SET (GET_MODE (to), to,
2858 plus_constant (to, count * 4 * sign));
2859 i = 1;
2860 count++;
2863 for (j = 0; i < count; i++, j++)
2865 mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4 * sign));
2866 RTX_UNCHANGING_P (mem) = unchanging_p;
2867 MEM_IN_STRUCT_P (mem) = in_struct_p;
2869 XVECEXP (result, 0, i)
2870 = gen_rtx_SET (VOIDmode, mem, gen_rtx_REG (SImode, base_regno + j));
2873 if (write_back)
2874 XVECEXP (result, 0, i) = gen_rtx_CLOBBER (SImode, to);
2876 return result;
2880 arm_gen_movstrqi (operands)
2881 rtx *operands;
2883 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
2884 int i, r;
2885 rtx src, dst;
2886 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
2887 rtx part_bytes_reg = NULL;
2888 rtx mem;
2889 int dst_unchanging_p, dst_in_struct_p, src_unchanging_p, src_in_struct_p;
2890 extern int optimize;
2892 if (GET_CODE (operands[2]) != CONST_INT
2893 || GET_CODE (operands[3]) != CONST_INT
2894 || INTVAL (operands[2]) > 64
2895 || INTVAL (operands[3]) & 3)
2896 return 0;
2898 st_dst = XEXP (operands[0], 0);
2899 st_src = XEXP (operands[1], 0);
2901 dst_unchanging_p = RTX_UNCHANGING_P (operands[0]);
2902 dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
2903 src_unchanging_p = RTX_UNCHANGING_P (operands[1]);
2904 src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
2906 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2907 fin_src = src = copy_to_mode_reg (SImode, st_src);
2909 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2910 out_words_to_go = INTVAL (operands[2]) / 4;
2911 last_bytes = INTVAL (operands[2]) & 3;
2913 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2914 part_bytes_reg = gen_rtx_REG (SImode, (in_words_to_go - 1) & 3);
2916 for (i = 0; in_words_to_go >= 2; i+=4)
2918 if (in_words_to_go > 4)
2919 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE,
2920 src_unchanging_p, src_in_struct_p));
2921 else
2922 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
2923 FALSE, src_unchanging_p,
2924 src_in_struct_p));
2926 if (out_words_to_go)
2928 if (out_words_to_go > 4)
2929 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE,
2930 dst_unchanging_p,
2931 dst_in_struct_p));
2932 else if (out_words_to_go != 1)
2933 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2934 dst, TRUE,
2935 (last_bytes == 0
2936 ? FALSE : TRUE),
2937 dst_unchanging_p,
2938 dst_in_struct_p));
2939 else
2941 mem = gen_rtx_MEM (SImode, dst);
2942 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2943 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2944 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
2945 if (last_bytes != 0)
2946 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2950 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2951 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2954 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2955 if (out_words_to_go)
2957 rtx sreg;
2959 mem = gen_rtx_MEM (SImode, src);
2960 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2961 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2962 emit_move_insn (sreg = gen_reg_rtx (SImode), mem);
2963 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2965 mem = gen_rtx_MEM (SImode, dst);
2966 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
2967 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
2968 emit_move_insn (mem, sreg);
2969 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2970 in_words_to_go--;
2972 if (in_words_to_go) /* Sanity check */
2973 abort ();
2976 if (in_words_to_go)
2978 if (in_words_to_go < 0)
2979 abort ();
2981 mem = gen_rtx_MEM (SImode, src);
2982 RTX_UNCHANGING_P (mem) = src_unchanging_p;
2983 MEM_IN_STRUCT_P (mem) = src_in_struct_p;
2984 part_bytes_reg = copy_to_mode_reg (SImode, mem);
2987 if (BYTES_BIG_ENDIAN && last_bytes)
2989 rtx tmp = gen_reg_rtx (SImode);
2991 if (part_bytes_reg == NULL)
2992 abort ();
2994 /* The bytes we want are in the top end of the word */
2995 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2996 GEN_INT (8 * (4 - last_bytes))));
2997 part_bytes_reg = tmp;
2999 while (last_bytes)
3001 mem = gen_rtx_MEM (QImode, plus_constant (dst, last_bytes - 1));
3002 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3003 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3004 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
3005 if (--last_bytes)
3007 tmp = gen_reg_rtx (SImode);
3008 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3009 part_bytes_reg = tmp;
3014 else
3016 while (last_bytes)
3018 if (part_bytes_reg == NULL)
3019 abort ();
3021 mem = gen_rtx_MEM (QImode, dst);
3022 RTX_UNCHANGING_P (mem) = dst_unchanging_p;
3023 MEM_IN_STRUCT_P (mem) = dst_in_struct_p;
3024 emit_move_insn (mem, gen_rtx_SUBREG (QImode, part_bytes_reg, 0));
3025 if (--last_bytes)
3027 rtx tmp = gen_reg_rtx (SImode);
3029 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
3030 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
3031 part_bytes_reg = tmp;
3036 return 1;
3039 /* Generate a memory reference for a half word, such that it will be loaded
3040 into the top 16 bits of the word. We can assume that the address is
3041 known to be alignable and of the form reg, or plus (reg, const). */
3043 gen_rotated_half_load (memref)
3044 rtx memref;
3046 HOST_WIDE_INT offset = 0;
3047 rtx base = XEXP (memref, 0);
3049 if (GET_CODE (base) == PLUS)
3051 offset = INTVAL (XEXP (base, 1));
3052 base = XEXP (base, 0);
3055 /* If we aren't allowed to generate unaligned addresses, then fail. */
3056 if (TARGET_SHORT_BY_BYTES
3057 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
3058 return NULL;
3060 base = gen_rtx_MEM (SImode, plus_constant (base, offset & ~2));
3062 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
3063 return base;
3065 return gen_rtx_ROTATE (SImode, base, GEN_INT (16));
3068 static enum machine_mode
3069 select_dominance_cc_mode (op, x, y, cond_or)
3070 enum rtx_code op;
3071 rtx x;
3072 rtx y;
3073 HOST_WIDE_INT cond_or;
3075 enum rtx_code cond1, cond2;
3076 int swapped = 0;
3078 /* Currently we will probably get the wrong result if the individual
3079 comparisons are not simple. This also ensures that it is safe to
3080 reverse a comparison if necessary. */
3081 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
3082 != CCmode)
3083 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
3084 != CCmode))
3085 return CCmode;
3087 if (cond_or)
3088 cond1 = reverse_condition (cond1);
3090 /* If the comparisons are not equal, and one doesn't dominate the other,
3091 then we can't do this. */
3092 if (cond1 != cond2
3093 && ! comparison_dominates_p (cond1, cond2)
3094 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
3095 return CCmode;
3097 if (swapped)
3099 enum rtx_code temp = cond1;
3100 cond1 = cond2;
3101 cond2 = temp;
3104 switch (cond1)
3106 case EQ:
3107 if (cond2 == EQ || ! cond_or)
3108 return CC_DEQmode;
3110 switch (cond2)
3112 case LE: return CC_DLEmode;
3113 case LEU: return CC_DLEUmode;
3114 case GE: return CC_DGEmode;
3115 case GEU: return CC_DGEUmode;
3118 break;
3120 case LT:
3121 if (cond2 == LT || ! cond_or)
3122 return CC_DLTmode;
3123 if (cond2 == LE)
3124 return CC_DLEmode;
3125 if (cond2 == NE)
3126 return CC_DNEmode;
3127 break;
3129 case GT:
3130 if (cond2 == GT || ! cond_or)
3131 return CC_DGTmode;
3132 if (cond2 == GE)
3133 return CC_DGEmode;
3134 if (cond2 == NE)
3135 return CC_DNEmode;
3136 break;
3138 case LTU:
3139 if (cond2 == LTU || ! cond_or)
3140 return CC_DLTUmode;
3141 if (cond2 == LEU)
3142 return CC_DLEUmode;
3143 if (cond2 == NE)
3144 return CC_DNEmode;
3145 break;
3147 case GTU:
3148 if (cond2 == GTU || ! cond_or)
3149 return CC_DGTUmode;
3150 if (cond2 == GEU)
3151 return CC_DGEUmode;
3152 if (cond2 == NE)
3153 return CC_DNEmode;
3154 break;
3156 /* The remaining cases only occur when both comparisons are the
3157 same. */
3158 case NE:
3159 return CC_DNEmode;
3161 case LE:
3162 return CC_DLEmode;
3164 case GE:
3165 return CC_DGEmode;
3167 case LEU:
3168 return CC_DLEUmode;
3170 case GEU:
3171 return CC_DGEUmode;
3174 abort ();
3177 enum machine_mode
3178 arm_select_cc_mode (op, x, y)
3179 enum rtx_code op;
3180 rtx x;
3181 rtx y;
3183 /* All floating point compares return CCFP if it is an equality
3184 comparison, and CCFPE otherwise. */
3185 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3186 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3188 /* A compare with a shifted operand. Because of canonicalization, the
3189 comparison will have to be swapped when we emit the assembler. */
3190 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3191 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3192 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3193 || GET_CODE (x) == ROTATERT))
3194 return CC_SWPmode;
3196 /* This is a special case that is used by combine to allow a
3197 comparison of a shifted byte load to be split into a zero-extend
3198 followed by a comparison of the shifted integer (only valid for
3199 equalities and unsigned inequalities). */
3200 if (GET_MODE (x) == SImode
3201 && GET_CODE (x) == ASHIFT
3202 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3203 && GET_CODE (XEXP (x, 0)) == SUBREG
3204 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3205 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3206 && (op == EQ || op == NE
3207 || op == GEU || op == GTU || op == LTU || op == LEU)
3208 && GET_CODE (y) == CONST_INT)
3209 return CC_Zmode;
3211 /* An operation that sets the condition codes as a side-effect, the
3212 V flag is not set correctly, so we can only use comparisons where
3213 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3214 instead. */
3215 if (GET_MODE (x) == SImode
3216 && y == const0_rtx
3217 && (op == EQ || op == NE || op == LT || op == GE)
3218 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3219 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3220 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3221 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3222 || GET_CODE (x) == LSHIFTRT
3223 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3224 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3225 return CC_NOOVmode;
3227 /* A construct for a conditional compare, if the false arm contains
3228 0, then both conditions must be true, otherwise either condition
3229 must be true. Not all conditions are possible, so CCmode is
3230 returned if it can't be done. */
3231 if (GET_CODE (x) == IF_THEN_ELSE
3232 && (XEXP (x, 2) == const0_rtx
3233 || XEXP (x, 2) == const1_rtx)
3234 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3235 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3236 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3237 INTVAL (XEXP (x, 2)));
3239 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3240 return CC_Zmode;
3242 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3243 && GET_CODE (x) == PLUS
3244 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3245 return CC_Cmode;
3247 return CCmode;
3250 /* X and Y are two things to compare using CODE. Emit the compare insn and
3251 return the rtx for register 0 in the proper mode. FP means this is a
3252 floating point compare: I don't think that it is needed on the arm. */
3255 gen_compare_reg (code, x, y, fp)
3256 enum rtx_code code;
3257 rtx x, y;
3259 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3260 rtx cc_reg = gen_rtx_REG (mode, 24);
3262 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
3263 gen_rtx_COMPARE (mode, x, y)));
3265 return cc_reg;
3268 void
3269 arm_reload_in_hi (operands)
3270 rtx *operands;
3272 rtx base = find_replacement (&XEXP (operands[1], 0));
3274 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx_MEM (QImode, base)));
3275 /* Handle the case where the address is too complex to be offset by 1. */
3276 if (GET_CODE (base) == MINUS
3277 || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
3279 rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[0]));
3281 emit_insn (gen_rtx_SET (VOIDmode, base_plus, base));
3282 base = base_plus;
3285 emit_insn (gen_zero_extendqisi2 (gen_rtx_SUBREG (SImode, operands[0], 0),
3286 gen_rtx_MEM (QImode,
3287 plus_constant (base, 1))));
3288 if (BYTES_BIG_ENDIAN)
3289 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3290 gen_rtx_IOR (SImode,
3291 gen_rtx_ASHIFT
3292 (SImode,
3293 gen_rtx_SUBREG (SImode, operands[0], 0),
3294 GEN_INT (8)),
3295 operands[2])));
3296 else
3297 emit_insn (gen_rtx_SET (VOIDmode, gen_rtx_SUBREG (SImode, operands[0], 0),
3298 gen_rtx_IOR (SImode,
3299 gen_rtx_ASHIFT (SImode, operands[2],
3300 GEN_INT (8)),
3301 gen_rtx_SUBREG (SImode, operands[0],
3302 0))));
3305 void
3306 arm_reload_out_hi (operands)
3307 rtx *operands;
3309 rtx base = find_replacement (&XEXP (operands[0], 0));
3311 if (BYTES_BIG_ENDIAN)
3313 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, 1)),
3314 gen_rtx_SUBREG (QImode, operands[1], 0)));
3315 emit_insn (gen_lshrsi3 (operands[2],
3316 gen_rtx_SUBREG (SImode, operands[1], 0),
3317 GEN_INT (8)));
3318 emit_insn (gen_movqi (gen_rtx_MEM (QImode, base),
3319 gen_rtx_SUBREG (QImode, operands[2], 0)));
3321 else
3323 emit_insn (gen_movqi (gen_rtx_MEM (QImode, base),
3324 gen_rtx_SUBREG (QImode, operands[1], 0)));
3325 emit_insn (gen_lshrsi3 (operands[2],
3326 gen_rtx_SUBREG (SImode, operands[1], 0),
3327 GEN_INT (8)));
3328 emit_insn (gen_movqi (gen_rtx_MEM (QImode, plus_constant (base, 1)),
3329 gen_rtx_SUBREG (QImode, operands[2], 0)));
3333 /* Routines for manipulation of the constant pool. */
3334 /* This is unashamedly hacked from the version in sh.c, since the problem is
3335 extremely similar. */
3337 /* Arm instructions cannot load a large constant into a register,
3338 constants have to come from a pc relative load. The reference of a pc
3339 relative load instruction must be less than 1k infront of the instruction.
3340 This means that we often have to dump a constant inside a function, and
3341 generate code to branch around it.
3343 It is important to minimize this, since the branches will slow things
3344 down and make things bigger.
3346 Worst case code looks like:
3348 ldr rn, L1
3349 b L2
3350 align
3351 L1: .long value
3355 ldr rn, L3
3356 b L4
3357 align
3358 L3: .long value
3362 We fix this by performing a scan before scheduling, which notices which
3363 instructions need to have their operands fetched from the constant table
3364 and builds the table.
3367 The algorithm is:
3369 scan, find an instruction which needs a pcrel move. Look forward, find th
3370 last barrier which is within MAX_COUNT bytes of the requirement.
3371 If there isn't one, make one. Process all the instructions between
3372 the find and the barrier.
3374 In the above example, we can tell that L3 is within 1k of L1, so
3375 the first move can be shrunk from the 2 insn+constant sequence into
3376 just 1 insn, and the constant moved to L3 to make:
3378 ldr rn, L1
3380 ldr rn, L3
3381 b L4
3382 align
3383 L1: .long value
3384 L3: .long value
3387 Then the second move becomes the target for the shortening process.
3391 typedef struct
3393 rtx value; /* Value in table */
3394 HOST_WIDE_INT next_offset;
3395 enum machine_mode mode; /* Mode of value */
3396 } pool_node;
3398 /* The maximum number of constants that can fit into one pool, since
3399 the pc relative range is 0...1020 bytes and constants are at least 4
3400 bytes long */
3402 #define MAX_POOL_SIZE (1020/4)
3403 static pool_node pool_vector[MAX_POOL_SIZE];
3404 static int pool_size;
3405 static rtx pool_vector_label;
3407 /* Add a constant to the pool and return its label. */
3408 static HOST_WIDE_INT
3409 add_constant (x, mode)
3410 rtx x;
3411 enum machine_mode mode;
3413 int i;
3414 rtx lab;
3415 HOST_WIDE_INT offset;
3417 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3418 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3419 x = get_pool_constant (XEXP (x, 0));
3420 #ifndef AOF_ASSEMBLER
3421 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3422 x = XVECEXP (x, 0, 0);
3423 #endif
3425 #ifdef AOF_ASSEMBLER
3426 /* PIC Symbol references need to be converted into offsets into the
3427 based area. */
3428 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3429 x = aof_pic_entry (x);
3430 #endif /* AOF_ASSEMBLER */
3432 /* First see if we've already got it */
3433 for (i = 0; i < pool_size; i++)
3435 if (GET_CODE (x) == pool_vector[i].value->code
3436 && mode == pool_vector[i].mode)
3438 if (GET_CODE (x) == CODE_LABEL)
3440 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3441 continue;
3443 if (rtx_equal_p (x, pool_vector[i].value))
3444 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3448 /* Need a new one */
3449 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3450 offset = 0;
3451 if (pool_size == 0)
3452 pool_vector_label = gen_label_rtx ();
3453 else
3454 pool_vector[pool_size].next_offset
3455 += (offset = pool_vector[pool_size - 1].next_offset);
3457 pool_vector[pool_size].value = x;
3458 pool_vector[pool_size].mode = mode;
3459 pool_size++;
3460 return offset;
3463 /* Output the literal table */
3464 static void
3465 dump_table (scan)
3466 rtx scan;
3468 int i;
3470 scan = emit_label_after (gen_label_rtx (), scan);
3471 scan = emit_insn_after (gen_align_4 (), scan);
3472 scan = emit_label_after (pool_vector_label, scan);
3474 for (i = 0; i < pool_size; i++)
3476 pool_node *p = pool_vector + i;
3478 switch (GET_MODE_SIZE (p->mode))
3480 case 4:
3481 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3482 break;
3484 case 8:
3485 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3486 break;
3488 default:
3489 abort ();
3490 break;
3494 scan = emit_insn_after (gen_consttable_end (), scan);
3495 scan = emit_barrier_after (scan);
3496 pool_size = 0;
3499 /* Non zero if the src operand needs to be fixed up */
3500 static int
3501 fixit (src, mode, destreg)
3502 rtx src;
3503 enum machine_mode mode;
3504 int destreg;
3506 if (CONSTANT_P (src))
3508 if (GET_CODE (src) == CONST_INT)
3509 return (! const_ok_for_arm (INTVAL (src))
3510 && ! const_ok_for_arm (~INTVAL (src)));
3511 if (GET_CODE (src) == CONST_DOUBLE)
3512 return (GET_MODE (src) == VOIDmode
3513 || destreg < 16
3514 || (! const_double_rtx_ok_for_fpu (src)
3515 && ! neg_const_double_rtx_ok_for_fpu (src)));
3516 return symbol_mentioned_p (src);
3518 #ifndef AOF_ASSEMBLER
3519 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3520 return 1;
3521 #endif
3522 else
3523 return (mode == SImode && GET_CODE (src) == MEM
3524 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3525 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3528 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3529 static rtx
3530 find_barrier (from, max_count)
3531 rtx from;
3532 int max_count;
3534 int count = 0;
3535 rtx found_barrier = 0;
3536 rtx last = from;
3538 while (from && count < max_count)
3540 if (GET_CODE (from) == BARRIER)
3541 found_barrier = from;
3543 /* Count the length of this insn */
3544 if (GET_CODE (from) == INSN
3545 && GET_CODE (PATTERN (from)) == SET
3546 && CONSTANT_P (SET_SRC (PATTERN (from)))
3547 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
3549 rtx src = SET_SRC (PATTERN (from));
3550 count += 8;
3552 else
3553 count += get_attr_length (from);
3555 last = from;
3556 from = NEXT_INSN (from);
3559 if (!found_barrier)
3561 /* We didn't find a barrier in time to
3562 dump our stuff, so we'll make one */
3563 rtx label = gen_label_rtx ();
3565 if (from)
3566 from = PREV_INSN (last);
3567 else
3568 from = get_last_insn ();
3570 /* Walk back to be just before any jump */
3571 while (GET_CODE (from) == JUMP_INSN
3572 || GET_CODE (from) == NOTE
3573 || GET_CODE (from) == CODE_LABEL)
3574 from = PREV_INSN (from);
3576 from = emit_jump_insn_after (gen_jump (label), from);
3577 JUMP_LABEL (from) = label;
3578 found_barrier = emit_barrier_after (from);
3579 emit_label_after (label, found_barrier);
3580 return found_barrier;
3583 return found_barrier;
3586 /* Non zero if the insn is a move instruction which needs to be fixed. */
3587 static int
3588 broken_move (insn)
3589 rtx insn;
3591 if (!INSN_DELETED_P (insn)
3592 && GET_CODE (insn) == INSN
3593 && GET_CODE (PATTERN (insn)) == SET)
3595 rtx pat = PATTERN (insn);
3596 rtx src = SET_SRC (pat);
3597 rtx dst = SET_DEST (pat);
3598 int destreg;
3599 enum machine_mode mode = GET_MODE (dst);
3600 if (dst == pc_rtx)
3601 return 0;
3603 if (GET_CODE (dst) == REG)
3604 destreg = REGNO (dst);
3605 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3606 destreg = REGNO (SUBREG_REG (dst));
3608 return fixit (src, mode, destreg);
3610 return 0;
3613 void
3614 arm_reorg (first)
3615 rtx first;
3617 rtx insn;
3618 int count_size;
3619 int regno;
3621 #if 0
3622 /* The ldr instruction can work with up to a 4k offset, and most constants
3623 will be loaded with one of these instructions; however, the adr
3624 instruction and the ldf instructions only work with a 1k offset. This
3625 code needs to be rewritten to use the 4k offset when possible, and to
3626 adjust when a 1k offset is needed. For now we just use a 1k offset
3627 from the start. */
3628 count_size = 4000;
3630 /* Floating point operands can't work further than 1024 bytes from the
3631 PC, so to make things simple we restrict all loads for such functions.
3633 if (TARGET_HARD_FLOAT)
3634 for (regno = 16; regno < 24; regno++)
3635 if (regs_ever_live[regno])
3637 count_size = 1000;
3638 break;
3640 #else
3641 count_size = 1000;
3642 #endif /* 0 */
3644 for (insn = first; insn; insn = NEXT_INSN (insn))
3646 if (broken_move (insn))
3648 /* This is a broken move instruction, scan ahead looking for
3649 a barrier to stick the constant table behind */
3650 rtx scan;
3651 rtx barrier = find_barrier (insn, count_size);
3653 /* Now find all the moves between the points and modify them */
3654 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3656 if (broken_move (scan))
3658 /* This is a broken move instruction, add it to the pool */
3659 rtx pat = PATTERN (scan);
3660 rtx src = SET_SRC (pat);
3661 rtx dst = SET_DEST (pat);
3662 enum machine_mode mode = GET_MODE (dst);
3663 HOST_WIDE_INT offset;
3664 rtx newinsn = scan;
3665 rtx newsrc;
3666 rtx addr;
3667 int scratch;
3669 /* If this is an HImode constant load, convert it into
3670 an SImode constant load. Since the register is always
3671 32 bits this is safe. We have to do this, since the
3672 load pc-relative instruction only does a 32-bit load. */
3673 if (mode == HImode)
3675 mode = SImode;
3676 if (GET_CODE (dst) != REG)
3677 abort ();
3678 PUT_MODE (dst, SImode);
3681 offset = add_constant (src, mode);
3682 addr = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
3683 pool_vector_label),
3684 offset);
3686 /* For wide moves to integer regs we need to split the
3687 address calculation off into a separate insn, so that
3688 the load can then be done with a load-multiple. This is
3689 safe, since we have already noted the length of such
3690 insns to be 8, and we are immediately over-writing the
3691 scratch we have grabbed with the final result. */
3692 if (GET_MODE_SIZE (mode) > 4
3693 && (scratch = REGNO (dst)) < 16)
3695 rtx reg = gen_rtx_REG (SImode, scratch);
3696 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3697 newinsn);
3698 addr = reg;
3701 newsrc = gen_rtx_MEM (mode, addr);
3703 /* Build a jump insn wrapper around the move instead
3704 of an ordinary insn, because we want to have room for
3705 the target label rtx in fld[7], which an ordinary
3706 insn doesn't have. */
3707 newinsn = emit_jump_insn_after (gen_rtx_SET (VOIDmode,
3708 dst, newsrc),
3709 newinsn);
3710 JUMP_LABEL (newinsn) = pool_vector_label;
3712 /* But it's still an ordinary insn */
3713 PUT_CODE (newinsn, INSN);
3715 /* Kill old insn */
3716 delete_insn (scan);
3717 scan = newinsn;
3720 dump_table (barrier);
3721 insn = scan;
3727 /* Routines to output assembly language. */
3729 /* If the rtx is the correct value then return the string of the number.
3730 In this way we can ensure that valid double constants are generated even
3731 when cross compiling. */
3732 char *
3733 fp_immediate_constant (x)
3734 rtx x;
3736 REAL_VALUE_TYPE r;
3737 int i;
3739 if (!fpa_consts_inited)
3740 init_fpa_table ();
3742 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3743 for (i = 0; i < 8; i++)
3744 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3745 return strings_fpa[i];
3747 abort ();
3750 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3751 static char *
3752 fp_const_from_val (r)
3753 REAL_VALUE_TYPE *r;
3755 int i;
3757 if (! fpa_consts_inited)
3758 init_fpa_table ();
3760 for (i = 0; i < 8; i++)
3761 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3762 return strings_fpa[i];
3764 abort ();
3767 /* Output the operands of a LDM/STM instruction to STREAM.
3768 MASK is the ARM register set mask of which only bits 0-15 are important.
3769 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3770 must follow the register list. */
3772 void
3773 print_multi_reg (stream, instr, mask, hat)
3774 FILE *stream;
3775 char *instr;
3776 int mask, hat;
3778 int i;
3779 int not_first = FALSE;
3781 fputc ('\t', stream);
3782 fprintf (stream, instr, REGISTER_PREFIX);
3783 fputs (", {", stream);
3784 for (i = 0; i < 16; i++)
3785 if (mask & (1 << i))
3787 if (not_first)
3788 fprintf (stream, ", ");
3789 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
3790 not_first = TRUE;
3793 fprintf (stream, "}%s\n", hat ? "^" : "");
3796 /* Output a 'call' insn. */
3798 char *
3799 output_call (operands)
3800 rtx *operands;
3802 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3804 if (REGNO (operands[0]) == 14)
3806 operands[0] = gen_rtx_REG (SImode, 12);
3807 output_asm_insn ("mov%?\t%0, %|lr", operands);
3809 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3810 output_asm_insn ("mov%?\t%|pc, %0", operands);
3811 return "";
3814 static int
3815 eliminate_lr2ip (x)
3816 rtx *x;
3818 int something_changed = 0;
3819 rtx x0 = *x;
3820 int code = GET_CODE (x0);
3821 register int i, j;
3822 register char *fmt;
3824 switch (code)
3826 case REG:
3827 if (REGNO (x0) == 14)
3829 *x = gen_rtx_REG (SImode, 12);
3830 return 1;
3832 return 0;
3833 default:
3834 /* Scan through the sub-elements and change any references there */
3835 fmt = GET_RTX_FORMAT (code);
3836 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3837 if (fmt[i] == 'e')
3838 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3839 else if (fmt[i] == 'E')
3840 for (j = 0; j < XVECLEN (x0, i); j++)
3841 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3842 return something_changed;
3846 /* Output a 'call' insn that is a reference in memory. */
3848 char *
3849 output_call_mem (operands)
3850 rtx *operands;
3852 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3853 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3855 if (eliminate_lr2ip (&operands[0]))
3856 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
3858 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3859 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3860 return "";
3864 /* Output a move from arm registers to an fpu registers.
3865 OPERANDS[0] is an fpu register.
3866 OPERANDS[1] is the first registers of an arm register pair. */
3868 char *
3869 output_mov_long_double_fpu_from_arm (operands)
3870 rtx *operands;
3872 int arm_reg0 = REGNO (operands[1]);
3873 rtx ops[3];
3875 if (arm_reg0 == 12)
3876 abort();
3878 ops[0] = gen_rtx_REG (SImode, arm_reg0);
3879 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
3880 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
3882 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3883 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
3884 return "";
3887 /* Output a move from an fpu register to arm registers.
3888 OPERANDS[0] is the first registers of an arm register pair.
3889 OPERANDS[1] is an fpu register. */
3891 char *
3892 output_mov_long_double_arm_from_fpu (operands)
3893 rtx *operands;
3895 int arm_reg0 = REGNO (operands[0]);
3896 rtx ops[3];
3898 if (arm_reg0 == 12)
3899 abort();
3901 ops[0] = gen_rtx_REG (SImode, arm_reg0);
3902 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
3903 ops[2] = gen_rtx_REG (SImode, 2 + arm_reg0);
3905 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3906 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
3907 return "";
3910 /* Output a move from arm registers to arm registers of a long double
3911 OPERANDS[0] is the destination.
3912 OPERANDS[1] is the source. */
3913 char *
3914 output_mov_long_double_arm_from_arm (operands)
3915 rtx *operands;
3917 /* We have to be careful here because the two might overlap */
3918 int dest_start = REGNO (operands[0]);
3919 int src_start = REGNO (operands[1]);
3920 rtx ops[2];
3921 int i;
3923 if (dest_start < src_start)
3925 for (i = 0; i < 3; i++)
3927 ops[0] = gen_rtx_REG (SImode, dest_start + i);
3928 ops[1] = gen_rtx_REG (SImode, src_start + i);
3929 output_asm_insn ("mov%?\t%0, %1", ops);
3932 else
3934 for (i = 2; i >= 0; i--)
3936 ops[0] = gen_rtx_REG (SImode, dest_start + i);
3937 ops[1] = gen_rtx_REG (SImode, src_start + i);
3938 output_asm_insn ("mov%?\t%0, %1", ops);
3942 return "";
3946 /* Output a move from arm registers to an fpu registers.
3947 OPERANDS[0] is an fpu register.
3948 OPERANDS[1] is the first registers of an arm register pair. */
3950 char *
3951 output_mov_double_fpu_from_arm (operands)
3952 rtx *operands;
3954 int arm_reg0 = REGNO (operands[1]);
3955 rtx ops[2];
3957 if (arm_reg0 == 12)
3958 abort();
3959 ops[0] = gen_rtx_REG (SImode, arm_reg0);
3960 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
3961 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
3962 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
3963 return "";
3966 /* Output a move from an fpu register to arm registers.
3967 OPERANDS[0] is the first registers of an arm register pair.
3968 OPERANDS[1] is an fpu register. */
3970 char *
3971 output_mov_double_arm_from_fpu (operands)
3972 rtx *operands;
3974 int arm_reg0 = REGNO (operands[0]);
3975 rtx ops[2];
3977 if (arm_reg0 == 12)
3978 abort();
3980 ops[0] = gen_rtx_REG (SImode, arm_reg0);
3981 ops[1] = gen_rtx_REG (SImode, 1 + arm_reg0);
3982 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
3983 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
3984 return "";
3987 /* Output a move between double words.
3988 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
3989 or MEM<-REG and all MEMs must be offsettable addresses. */
3991 char *
3992 output_move_double (operands)
3993 rtx *operands;
3995 enum rtx_code code0 = GET_CODE (operands[0]);
3996 enum rtx_code code1 = GET_CODE (operands[1]);
3997 rtx otherops[3];
3999 if (code0 == REG)
4001 int reg0 = REGNO (operands[0]);
4003 otherops[0] = gen_rtx_REG (SImode, 1 + reg0);
4004 if (code1 == REG)
4006 int reg1 = REGNO (operands[1]);
4007 if (reg1 == 12)
4008 abort();
4010 /* Ensure the second source is not overwritten */
4011 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
4012 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
4013 else
4014 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
4016 else if (code1 == CONST_DOUBLE)
4018 if (GET_MODE (operands[1]) == DFmode)
4020 long l[2];
4021 union real_extract u;
4023 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
4024 sizeof (u));
4025 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
4026 otherops[1] = GEN_INT(l[1]);
4027 operands[1] = GEN_INT(l[0]);
4029 else if (GET_MODE (operands[1]) != VOIDmode)
4030 abort ();
4031 else if (WORDS_BIG_ENDIAN)
4034 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4035 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4037 else
4040 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
4041 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
4043 output_mov_immediate (operands);
4044 output_mov_immediate (otherops);
4046 else if (code1 == CONST_INT)
4048 #if HOST_BITS_PER_WIDE_INT > 32
4049 /* If HOST_WIDE_INT is more than 32 bits, the intval tells us
4050 what the upper word is. */
4051 if (WORDS_BIG_ENDIAN)
4053 otherops[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4054 operands[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4056 else
4058 otherops[1] = GEN_INT (INTVAL (operands[1]) >> 32);
4059 operands[1] = GEN_INT (ARM_SIGN_EXTEND (INTVAL (operands[1])));
4061 #else
4062 /* Sign extend the intval into the high-order word */
4063 if (WORDS_BIG_ENDIAN)
4065 otherops[1] = operands[1];
4066 operands[1] = (INTVAL (operands[1]) < 0
4067 ? constm1_rtx : const0_rtx);
4069 else
4070 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
4071 #endif
4072 output_mov_immediate (otherops);
4073 output_mov_immediate (operands);
4075 else if (code1 == MEM)
4077 switch (GET_CODE (XEXP (operands[1], 0)))
4079 case REG:
4080 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
4081 break;
4083 case PRE_INC:
4084 abort (); /* Should never happen now */
4085 break;
4087 case PRE_DEC:
4088 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
4089 break;
4091 case POST_INC:
4092 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
4093 break;
4095 case POST_DEC:
4096 abort (); /* Should never happen now */
4097 break;
4099 case LABEL_REF:
4100 case CONST:
4101 output_asm_insn ("adr%?\t%0, %1", operands);
4102 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
4103 break;
4105 default:
4106 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
4108 otherops[0] = operands[0];
4109 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
4110 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
4111 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
4113 if (GET_CODE (otherops[2]) == CONST_INT)
4115 switch (INTVAL (otherops[2]))
4117 case -8:
4118 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
4119 return "";
4120 case -4:
4121 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
4122 return "";
4123 case 4:
4124 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
4125 return "";
4127 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
4128 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
4129 else
4130 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4132 else
4133 output_asm_insn ("add%?\t%0, %1, %2", otherops);
4135 else
4136 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
4137 return "ldm%?ia\t%0, %M0";
4139 else
4141 otherops[1] = adj_offsettable_operand (operands[1], 4);
4142 /* Take care of overlapping base/data reg. */
4143 if (reg_mentioned_p (operands[0], operands[1]))
4145 output_asm_insn ("ldr%?\t%0, %1", otherops);
4146 output_asm_insn ("ldr%?\t%0, %1", operands);
4148 else
4150 output_asm_insn ("ldr%?\t%0, %1", operands);
4151 output_asm_insn ("ldr%?\t%0, %1", otherops);
4156 else
4157 abort(); /* Constraints should prevent this */
4159 else if (code0 == MEM && code1 == REG)
4161 if (REGNO (operands[1]) == 12)
4162 abort();
4164 switch (GET_CODE (XEXP (operands[0], 0)))
4166 case REG:
4167 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
4168 break;
4170 case PRE_INC:
4171 abort (); /* Should never happen now */
4172 break;
4174 case PRE_DEC:
4175 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
4176 break;
4178 case POST_INC:
4179 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
4180 break;
4182 case POST_DEC:
4183 abort (); /* Should never happen now */
4184 break;
4186 case PLUS:
4187 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4189 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4191 case -8:
4192 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4193 return "";
4195 case -4:
4196 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4197 return "";
4199 case 4:
4200 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4201 return "";
4204 /* Fall through */
4206 default:
4207 otherops[0] = adj_offsettable_operand (operands[0], 4);
4208 otherops[1] = gen_rtx_REG (SImode, 1 + REGNO (operands[1]));
4209 output_asm_insn ("str%?\t%1, %0", operands);
4210 output_asm_insn ("str%?\t%1, %0", otherops);
4213 else
4214 abort(); /* Constraints should prevent this */
4216 return "";
4220 /* Output an arbitrary MOV reg, #n.
4221 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4223 char *
4224 output_mov_immediate (operands)
4225 rtx *operands;
4227 HOST_WIDE_INT n = INTVAL (operands[1]);
4228 int n_ones = 0;
4229 int i;
4231 /* Try to use one MOV */
4232 if (const_ok_for_arm (n))
4234 output_asm_insn ("mov%?\t%0, %1", operands);
4235 return "";
4238 /* Try to use one MVN */
4239 if (const_ok_for_arm (~n))
4241 operands[1] = GEN_INT (~n);
4242 output_asm_insn ("mvn%?\t%0, %1", operands);
4243 return "";
4246 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4248 for (i=0; i < 32; i++)
4249 if (n & 1 << i)
4250 n_ones++;
4252 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
4253 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4254 ~n);
4255 else
4256 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4259 return "";
4263 /* Output an ADD r, s, #n where n may be too big for one instruction. If
4264 adding zero to one register, output nothing. */
4266 char *
4267 output_add_immediate (operands)
4268 rtx *operands;
4270 HOST_WIDE_INT n = INTVAL (operands[2]);
4272 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4274 if (n < 0)
4275 output_multi_immediate (operands,
4276 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4277 -n);
4278 else
4279 output_multi_immediate (operands,
4280 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4284 return "";
4287 /* Output a multiple immediate operation.
4288 OPERANDS is the vector of operands referred to in the output patterns.
4289 INSTR1 is the output pattern to use for the first constant.
4290 INSTR2 is the output pattern to use for subsequent constants.
4291 IMMED_OP is the index of the constant slot in OPERANDS.
4292 N is the constant value. */
4294 static char *
4295 output_multi_immediate (operands, instr1, instr2, immed_op, n)
4296 rtx *operands;
4297 char *instr1, *instr2;
4298 int immed_op;
4299 HOST_WIDE_INT n;
4301 #if HOST_BITS_PER_WIDE_INT > 32
4302 n &= 0xffffffff;
4303 #endif
4305 if (n == 0)
4307 operands[immed_op] = const0_rtx;
4308 output_asm_insn (instr1, operands); /* Quick and easy output */
4310 else
4312 int i;
4313 char *instr = instr1;
4315 /* Note that n is never zero here (which would give no output) */
4316 for (i = 0; i < 32; i += 2)
4318 if (n & (3 << i))
4320 operands[immed_op] = GEN_INT (n & (255 << i));
4321 output_asm_insn (instr, operands);
4322 instr = instr2;
4323 i += 6;
4327 return "";
4331 /* Return the appropriate ARM instruction for the operation code.
4332 The returned result should not be overwritten. OP is the rtx of the
4333 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4334 was shifted. */
4336 char *
4337 arithmetic_instr (op, shift_first_arg)
4338 rtx op;
4339 int shift_first_arg;
4341 switch (GET_CODE (op))
4343 case PLUS:
4344 return "add";
4346 case MINUS:
4347 return shift_first_arg ? "rsb" : "sub";
4349 case IOR:
4350 return "orr";
4352 case XOR:
4353 return "eor";
4355 case AND:
4356 return "and";
4358 default:
4359 abort ();
4364 /* Ensure valid constant shifts and return the appropriate shift mnemonic
4365 for the operation code. The returned result should not be overwritten.
4366 OP is the rtx code of the shift.
4367 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4368 shift. */
4370 static char *
4371 shift_op (op, amountp)
4372 rtx op;
4373 HOST_WIDE_INT *amountp;
4375 char *mnem;
4376 enum rtx_code code = GET_CODE (op);
4378 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4379 *amountp = -1;
4380 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4381 *amountp = INTVAL (XEXP (op, 1));
4382 else
4383 abort ();
4385 switch (code)
4387 case ASHIFT:
4388 mnem = "asl";
4389 break;
4391 case ASHIFTRT:
4392 mnem = "asr";
4393 break;
4395 case LSHIFTRT:
4396 mnem = "lsr";
4397 break;
4399 case ROTATERT:
4400 mnem = "ror";
4401 break;
4403 case MULT:
4404 /* We never have to worry about the amount being other than a
4405 power of 2, since this case can never be reloaded from a reg. */
4406 if (*amountp != -1)
4407 *amountp = int_log2 (*amountp);
4408 else
4409 abort ();
4410 return "asl";
4412 default:
4413 abort ();
4416 if (*amountp != -1)
4418 /* This is not 100% correct, but follows from the desire to merge
4419 multiplication by a power of 2 with the recognizer for a
4420 shift. >=32 is not a valid shift for "asl", so we must try and
4421 output a shift that produces the correct arithmetical result.
4422 Using lsr #32 is identical except for the fact that the carry bit
4423 is not set correctly if we set the flags; but we never use the
4424 carry bit from such an operation, so we can ignore that. */
4425 if (code == ROTATERT)
4426 *amountp &= 31; /* Rotate is just modulo 32 */
4427 else if (*amountp != (*amountp & 31))
4429 if (code == ASHIFT)
4430 mnem = "lsr";
4431 *amountp = 32;
4434 /* Shifts of 0 are no-ops. */
4435 if (*amountp == 0)
4436 return NULL;
4439 return mnem;
4443 /* Obtain the shift from the POWER of two. */
4445 static HOST_WIDE_INT
4446 int_log2 (power)
4447 HOST_WIDE_INT power;
4449 HOST_WIDE_INT shift = 0;
4451 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
4453 if (shift > 31)
4454 abort ();
4455 shift++;
4458 return shift;
4461 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
4462 /bin/as is horribly restrictive. */
4464 void
4465 output_ascii_pseudo_op (stream, p, len)
4466 FILE *stream;
4467 unsigned char *p;
4468 int len;
4470 int i;
4471 int len_so_far = 1000;
4472 int chars_so_far = 0;
4474 for (i = 0; i < len; i++)
4476 register int c = p[i];
4478 if (len_so_far > 50)
4480 if (chars_so_far)
4481 fputs ("\"\n", stream);
4482 fputs ("\t.ascii\t\"", stream);
4483 len_so_far = 0;
4484 chars_so_far = 0;
4487 if (c == '\"' || c == '\\')
4489 putc('\\', stream);
4490 len_so_far++;
4493 if (c >= ' ' && c < 0177)
4495 putc (c, stream);
4496 len_so_far++;
4498 else
4500 fprintf (stream, "\\%03o", c);
4501 len_so_far +=4;
4504 chars_so_far++;
4507 fputs ("\"\n", stream);
4511 /* Try to determine whether a pattern really clobbers the link register.
4512 This information is useful when peepholing, so that lr need not be pushed
4513 if we combine a call followed by a return.
4514 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4515 such a check should not be needed because these only update an existing
4516 value within a register; the register must still be set elsewhere within
4517 the function. */
4519 static int
4520 pattern_really_clobbers_lr (x)
4521 rtx x;
4523 int i;
4525 switch (GET_CODE (x))
4527 case SET:
4528 switch (GET_CODE (SET_DEST (x)))
4530 case REG:
4531 return REGNO (SET_DEST (x)) == 14;
4533 case SUBREG:
4534 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4535 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
4537 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4538 return 0;
4539 abort ();
4541 default:
4542 return 0;
4545 case PARALLEL:
4546 for (i = 0; i < XVECLEN (x, 0); i++)
4547 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4548 return 1;
4549 return 0;
4551 case CLOBBER:
4552 switch (GET_CODE (XEXP (x, 0)))
4554 case REG:
4555 return REGNO (XEXP (x, 0)) == 14;
4557 case SUBREG:
4558 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4559 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4560 abort ();
4562 default:
4563 return 0;
4566 case UNSPEC:
4567 return 1;
4569 default:
4570 return 0;
4574 static int
4575 function_really_clobbers_lr (first)
4576 rtx first;
4578 rtx insn, next;
4580 for (insn = first; insn; insn = next_nonnote_insn (insn))
4582 switch (GET_CODE (insn))
4584 case BARRIER:
4585 case NOTE:
4586 case CODE_LABEL:
4587 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4588 case INLINE_HEADER:
4589 break;
4591 case INSN:
4592 if (pattern_really_clobbers_lr (PATTERN (insn)))
4593 return 1;
4594 break;
4596 case CALL_INSN:
4597 /* Don't yet know how to handle those calls that are not to a
4598 SYMBOL_REF */
4599 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4600 abort ();
4602 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4604 case CALL:
4605 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4606 != SYMBOL_REF)
4607 return 1;
4608 break;
4610 case SET:
4611 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4612 0, 0)), 0), 0))
4613 != SYMBOL_REF)
4614 return 1;
4615 break;
4617 default: /* Don't recognize it, be safe */
4618 return 1;
4621 /* A call can be made (by peepholing) not to clobber lr iff it is
4622 followed by a return. There may, however, be a use insn iff
4623 we are returning the result of the call.
4624 If we run off the end of the insn chain, then that means the
4625 call was at the end of the function. Unfortunately we don't
4626 have a return insn for the peephole to recognize, so we
4627 must reject this. (Can this be fixed by adding our own insn?) */
4628 if ((next = next_nonnote_insn (insn)) == NULL)
4629 return 1;
4631 /* No need to worry about lr if the call never returns */
4632 if (GET_CODE (next) == BARRIER)
4633 break;
4635 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4636 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4637 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4638 == REGNO (XEXP (PATTERN (next), 0))))
4639 if ((next = next_nonnote_insn (next)) == NULL)
4640 return 1;
4642 if (GET_CODE (next) == JUMP_INSN
4643 && GET_CODE (PATTERN (next)) == RETURN)
4644 break;
4645 return 1;
4647 default:
4648 abort ();
4652 /* We have reached the end of the chain so lr was _not_ clobbered */
4653 return 0;
4656 char *
4657 output_return_instruction (operand, really_return, reverse)
4658 rtx operand;
4659 int really_return;
4660 int reverse;
4662 char instr[100];
4663 int reg, live_regs = 0;
4664 int volatile_func = (optimize > 0
4665 && TREE_THIS_VOLATILE (current_function_decl));
4667 return_used_this_function = 1;
4669 if (volatile_func)
4671 rtx ops[2];
4672 /* If this function was declared non-returning, and we have found a tail
4673 call, then we have to trust that the called function won't return. */
4674 if (! really_return)
4675 return "";
4677 /* Otherwise, trap an attempted return by aborting. */
4678 ops[0] = operand;
4679 ops[1] = gen_rtx_SYMBOL_REF (Pmode, "abort");
4680 assemble_external_libcall (ops[1]);
4681 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
4682 return "";
4685 if (current_function_calls_alloca && ! really_return)
4686 abort();
4688 for (reg = 0; reg <= 10; reg++)
4689 if (regs_ever_live[reg] && ! call_used_regs[reg])
4690 live_regs++;
4692 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
4693 live_regs++;
4695 if (frame_pointer_needed)
4696 live_regs += 4;
4698 if (live_regs)
4700 if (lr_save_eliminated || ! regs_ever_live[14])
4701 live_regs++;
4703 if (frame_pointer_needed)
4704 strcpy (instr,
4705 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
4706 else
4707 strcpy (instr,
4708 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
4710 for (reg = 0; reg <= 10; reg++)
4711 if (regs_ever_live[reg] && ! call_used_regs[reg])
4713 strcat (instr, "%|");
4714 strcat (instr, reg_names[reg]);
4715 if (--live_regs)
4716 strcat (instr, ", ");
4719 if (frame_pointer_needed)
4721 strcat (instr, "%|");
4722 strcat (instr, reg_names[11]);
4723 strcat (instr, ", ");
4724 strcat (instr, "%|");
4725 strcat (instr, reg_names[13]);
4726 strcat (instr, ", ");
4727 strcat (instr, "%|");
4728 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4730 else
4732 strcat (instr, "%|");
4733 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4735 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
4736 output_asm_insn (instr, &operand);
4738 else if (really_return)
4740 if (TARGET_THUMB_INTERWORK)
4741 sprintf (instr, "bx%%?%%%s\t%%|lr", reverse ? "D" : "d");
4742 else
4743 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4744 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
4745 output_asm_insn (instr, &operand);
4748 return "";
4751 /* Return nonzero if optimizing and the current function is volatile.
4752 Such functions never return, and many memory cycles can be saved
4753 by not storing register values that will never be needed again.
4754 This optimization was added to speed up context switching in a
4755 kernel application. */
4758 arm_volatile_func ()
4760 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4763 /* The amount of stack adjustment that happens here, in output_return and in
4764 output_epilogue must be exactly the same as was calculated during reload,
4765 or things will point to the wrong place. The only time we can safely
4766 ignore this constraint is when a function has no arguments on the stack,
4767 no stack frame requirement and no live registers execpt for `lr'. If we
4768 can guarantee that by making all function calls into tail calls and that
4769 lr is not clobbered in any other way, then there is no need to push lr
4770 onto the stack. */
4772 void
4773 output_func_prologue (f, frame_size)
4774 FILE *f;
4775 int frame_size;
4777 int reg, live_regs_mask = 0;
4778 rtx operands[3];
4779 int volatile_func = (optimize > 0
4780 && TREE_THIS_VOLATILE (current_function_decl));
4782 /* Nonzero if we must stuff some register arguments onto the stack as if
4783 they were passed there. */
4784 int store_arg_regs = 0;
4786 if (arm_ccfsm_state || arm_target_insn)
4787 abort (); /* Sanity check */
4789 if (arm_naked_function_p (current_function_decl))
4790 return;
4792 return_used_this_function = 0;
4793 lr_save_eliminated = 0;
4795 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4796 ASM_COMMENT_START, current_function_args_size,
4797 current_function_pretend_args_size, frame_size);
4798 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4799 ASM_COMMENT_START, frame_pointer_needed,
4800 current_function_anonymous_args);
4802 if (volatile_func)
4803 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
4805 if (current_function_anonymous_args && current_function_pretend_args_size)
4806 store_arg_regs = 1;
4808 for (reg = 0; reg <= 10; reg++)
4809 if (regs_ever_live[reg] && ! call_used_regs[reg])
4810 live_regs_mask |= (1 << reg);
4812 if (frame_pointer_needed)
4813 live_regs_mask |= 0xD800;
4814 else if (regs_ever_live[14])
4816 if (! current_function_args_size
4817 && ! function_really_clobbers_lr (get_insns ()))
4818 lr_save_eliminated = 1;
4819 else
4820 live_regs_mask |= 0x4000;
4823 if (live_regs_mask)
4825 /* if a di mode load/store multiple is used, and the base register
4826 is r3, then r4 can become an ever live register without lr
4827 doing so, in this case we need to push lr as well, or we
4828 will fail to get a proper return. */
4830 live_regs_mask |= 0x4000;
4831 lr_save_eliminated = 0;
4835 if (lr_save_eliminated)
4836 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4837 ASM_COMMENT_START);
4839 #ifdef AOF_ASSEMBLER
4840 if (flag_pic)
4841 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4842 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4843 #endif
4847 void
4848 output_func_epilogue (f, frame_size)
4849 FILE *f;
4850 int frame_size;
4852 int reg, live_regs_mask = 0;
4853 /* If we need this then it will always be at least this much */
4854 int floats_offset = 12;
4855 rtx operands[3];
4856 int volatile_func = (optimize > 0
4857 && TREE_THIS_VOLATILE (current_function_decl));
4859 if (use_return_insn() && return_used_this_function)
4861 if ((frame_size + current_function_outgoing_args_size) != 0
4862 && !(frame_pointer_needed || TARGET_APCS))
4863 abort ();
4864 goto epilogue_done;
4867 /* Naked functions don't have epilogues. */
4868 if (arm_naked_function_p (current_function_decl))
4869 goto epilogue_done;
4871 /* A volatile function should never return. Call abort. */
4872 if (volatile_func)
4874 rtx op = gen_rtx_SYMBOL_REF (Pmode, "abort");
4875 assemble_external_libcall (op);
4876 output_asm_insn ("bl\t%a0", &op);
4877 goto epilogue_done;
4880 for (reg = 0; reg <= 10; reg++)
4881 if (regs_ever_live[reg] && ! call_used_regs[reg])
4883 live_regs_mask |= (1 << reg);
4884 floats_offset += 4;
4887 if (frame_pointer_needed)
4889 if (arm_fpu_arch == FP_SOFT2)
4891 for (reg = 23; reg > 15; reg--)
4892 if (regs_ever_live[reg] && ! call_used_regs[reg])
4894 floats_offset += 12;
4895 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4896 reg_names[reg], REGISTER_PREFIX, floats_offset);
4899 else
4901 int start_reg = 23;
4903 for (reg = 23; reg > 15; reg--)
4905 if (regs_ever_live[reg] && ! call_used_regs[reg])
4907 floats_offset += 12;
4908 /* We can't unstack more than four registers at once */
4909 if (start_reg - reg == 3)
4911 fprintf (f, "\tlfm\t%s%s, 4, [%sfp, #-%d]\n",
4912 REGISTER_PREFIX, reg_names[reg],
4913 REGISTER_PREFIX, floats_offset);
4914 start_reg = reg - 1;
4917 else
4919 if (reg != start_reg)
4920 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4921 REGISTER_PREFIX, reg_names[reg + 1],
4922 start_reg - reg, REGISTER_PREFIX, floats_offset);
4924 start_reg = reg - 1;
4928 /* Just in case the last register checked also needs unstacking. */
4929 if (reg != start_reg)
4930 fprintf (f, "\tlfm\t%s%s, %d, [%sfp, #-%d]\n",
4931 REGISTER_PREFIX, reg_names[reg + 1],
4932 start_reg - reg, REGISTER_PREFIX, floats_offset);
4935 if (TARGET_THUMB_INTERWORK)
4937 live_regs_mask |= 0x6800;
4938 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask, FALSE);
4939 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
4941 else
4943 live_regs_mask |= 0xA800;
4944 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4945 TARGET_APCS_32 ? FALSE : TRUE);
4948 else
4950 /* Restore stack pointer if necessary. */
4951 if (frame_size + current_function_outgoing_args_size != 0)
4953 operands[0] = operands[1] = stack_pointer_rtx;
4954 operands[2] = GEN_INT (frame_size
4955 + current_function_outgoing_args_size);
4956 output_add_immediate (operands);
4959 if (arm_fpu_arch == FP_SOFT2)
4961 for (reg = 16; reg < 24; reg++)
4962 if (regs_ever_live[reg] && ! call_used_regs[reg])
4963 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
4964 reg_names[reg], REGISTER_PREFIX);
4966 else
4968 int start_reg = 16;
4970 for (reg = 16; reg < 24; reg++)
4972 if (regs_ever_live[reg] && ! call_used_regs[reg])
4974 if (reg - start_reg == 3)
4976 fprintf (f, "\tlfmfd\t%s%s, 4, [%ssp]!\n",
4977 REGISTER_PREFIX, reg_names[start_reg],
4978 REGISTER_PREFIX);
4979 start_reg = reg + 1;
4982 else
4984 if (reg != start_reg)
4985 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
4986 REGISTER_PREFIX, reg_names[start_reg],
4987 reg - start_reg, REGISTER_PREFIX);
4989 start_reg = reg + 1;
4993 /* Just in case the last register checked also needs unstacking. */
4994 if (reg != start_reg)
4995 fprintf (f, "\tlfmfd\t%s%s, %d, [%ssp]!\n",
4996 REGISTER_PREFIX, reg_names[start_reg],
4997 reg - start_reg, REGISTER_PREFIX);
5000 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
5002 if (TARGET_THUMB_INTERWORK)
5004 if (! lr_save_eliminated)
5005 print_multi_reg(f, "ldmfd\t%ssp!", live_regs_mask | 0x4000,
5006 FALSE);
5008 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5010 else if (lr_save_eliminated)
5011 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5012 : "\tmovs\t%spc, %slr\n"),
5013 REGISTER_PREFIX, REGISTER_PREFIX, f);
5014 else
5015 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
5016 TARGET_APCS_32 ? FALSE : TRUE);
5018 else
5020 if (live_regs_mask || regs_ever_live[14])
5022 /* Restore the integer regs, and the return address into lr */
5023 if (! lr_save_eliminated)
5024 live_regs_mask |= 0x4000;
5026 if (live_regs_mask != 0)
5027 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
5030 if (current_function_pretend_args_size)
5032 /* Unwind the pre-pushed regs */
5033 operands[0] = operands[1] = stack_pointer_rtx;
5034 operands[2] = GEN_INT (current_function_pretend_args_size);
5035 output_add_immediate (operands);
5037 /* And finally, go home */
5038 if (TARGET_THUMB_INTERWORK)
5039 fprintf (f, "\tbx\t%slr\n", REGISTER_PREFIX);
5040 else
5041 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
5042 : "\tmovs\t%spc, %slr\n"),
5043 REGISTER_PREFIX, REGISTER_PREFIX, f);
5047 epilogue_done:
5049 current_function_anonymous_args = 0;
5052 static void
5053 emit_multi_reg_push (mask)
5054 int mask;
5056 int num_regs = 0;
5057 int i, j;
5058 rtx par;
5060 for (i = 0; i < 16; i++)
5061 if (mask & (1 << i))
5062 num_regs++;
5064 if (num_regs == 0 || num_regs > 16)
5065 abort ();
5067 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_regs));
5069 for (i = 0; i < 16; i++)
5071 if (mask & (1 << i))
5073 XVECEXP (par, 0, 0)
5074 = gen_rtx_SET (VOIDmode,
5075 gen_rtx_MEM (BLKmode,
5076 gen_rtx_PRE_DEC (BLKmode,
5077 stack_pointer_rtx)),
5078 gen_rtx_UNSPEC (BLKmode,
5079 gen_rtvec (1,
5080 gen_rtx_REG (SImode, i)),
5081 2));
5082 break;
5086 for (j = 1, i++; j < num_regs; i++)
5088 if (mask & (1 << i))
5090 XVECEXP (par, 0, j)
5091 = gen_rtx_USE (VOIDmode, gen_rtx_REG (SImode, i));
5092 j++;
5096 emit_insn (par);
5099 static void
5100 emit_sfm (base_reg, count)
5101 int base_reg;
5102 int count;
5104 rtx par;
5105 int i;
5107 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
5109 XVECEXP (par, 0, 0)
5110 = gen_rtx_SET (VOIDmode,
5111 gen_rtx_MEM (BLKmode,
5112 gen_rtx_PRE_DEC (BLKmode, stack_pointer_rtx)),
5113 gen_rtx_UNSPEC (BLKmode,
5114 gen_rtvec (1, gen_rtx_REG (XFmode,
5115 base_reg++)),
5116 2));
5118 for (i = 1; i < count; i++)
5119 XVECEXP (par, 0, i) = gen_rtx_USE (VOIDmode,
5120 gen_rtx_REG (XFmode, base_reg++));
5122 emit_insn (par);
5125 void
5126 arm_expand_prologue ()
5128 int reg;
5129 rtx amount = GEN_INT (-(get_frame_size ()
5130 + current_function_outgoing_args_size));
5131 rtx push_insn;
5132 int num_regs;
5133 int live_regs_mask = 0;
5134 int store_arg_regs = 0;
5135 int volatile_func = (optimize > 0
5136 && TREE_THIS_VOLATILE (current_function_decl));
5138 /* Naked functions don't have prologues. */
5139 if (arm_naked_function_p (current_function_decl))
5140 return;
5142 if (current_function_anonymous_args && current_function_pretend_args_size)
5143 store_arg_regs = 1;
5145 if (! volatile_func)
5146 for (reg = 0; reg <= 10; reg++)
5147 if (regs_ever_live[reg] && ! call_used_regs[reg])
5148 live_regs_mask |= 1 << reg;
5150 if (! volatile_func && regs_ever_live[14])
5151 live_regs_mask |= 0x4000;
5153 if (frame_pointer_needed)
5155 live_regs_mask |= 0xD800;
5156 emit_insn (gen_movsi (gen_rtx_REG (SImode, 12),
5157 stack_pointer_rtx));
5160 if (current_function_pretend_args_size)
5162 if (store_arg_regs)
5163 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
5164 & 0xf);
5165 else
5166 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
5167 GEN_INT (-current_function_pretend_args_size)));
5170 if (live_regs_mask)
5172 /* If we have to push any regs, then we must push lr as well, or
5173 we won't get a proper return. */
5174 live_regs_mask |= 0x4000;
5175 emit_multi_reg_push (live_regs_mask);
5178 /* For now the integer regs are still pushed in output_func_epilogue (). */
5180 if (! volatile_func)
5182 if (arm_fpu_arch == FP_SOFT2)
5184 for (reg = 23; reg > 15; reg--)
5185 if (regs_ever_live[reg] && ! call_used_regs[reg])
5186 emit_insn (gen_rtx_SET
5187 (VOIDmode,
5188 gen_rtx_MEM (XFmode,
5189 gen_rtx_PRE_DEC (XFmode,
5190 stack_pointer_rtx)),
5191 gen_rtx_REG (XFmode, reg)));
5193 else
5195 int start_reg = 23;
5197 for (reg = 23; reg > 15; reg--)
5199 if (regs_ever_live[reg] && ! call_used_regs[reg])
5201 if (start_reg - reg == 3)
5203 emit_sfm (reg, 4);
5204 start_reg = reg - 1;
5207 else
5209 if (start_reg != reg)
5210 emit_sfm (reg + 1, start_reg - reg);
5211 start_reg = reg - 1;
5215 if (start_reg != reg)
5216 emit_sfm (reg + 1, start_reg - reg);
5220 if (frame_pointer_needed)
5221 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx_REG (SImode, 12),
5222 (GEN_INT
5223 (-(4 + current_function_pretend_args_size)))));
5225 if (amount != const0_rtx)
5227 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
5228 emit_insn (gen_rtx_CLOBBER (VOIDmode,
5229 gen_rtx_MEM (BLKmode, stack_pointer_rtx)));
5232 /* If we are profiling, make sure no instructions are scheduled before
5233 the call to mcount. */
5234 if (profile_flag || profile_block_flag)
5235 emit_insn (gen_blockage ());
5239 /* If CODE is 'd', then the X is a condition operand and the instruction
5240 should only be executed if the condition is true.
5241 if CODE is 'D', then the X is a condition operand and the instruction
5242 should only be executed if the condition is false: however, if the mode
5243 of the comparison is CCFPEmode, then always execute the instruction -- we
5244 do this because in these circumstances !GE does not necessarily imply LT;
5245 in these cases the instruction pattern will take care to make sure that
5246 an instruction containing %d will follow, thereby undoing the effects of
5247 doing this instruction unconditionally.
5248 If CODE is 'N' then X is a floating point operand that must be negated
5249 before output.
5250 If CODE is 'B' then output a bitwise inverted value of X (a const int).
5251 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
5253 void
5254 arm_print_operand (stream, x, code)
5255 FILE *stream;
5256 rtx x;
5257 int code;
5259 switch (code)
5261 case '@':
5262 fputs (ASM_COMMENT_START, stream);
5263 return;
5265 case '|':
5266 fputs (REGISTER_PREFIX, stream);
5267 return;
5269 case '?':
5270 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
5271 fputs (arm_condition_codes[arm_current_cc], stream);
5272 return;
5274 case 'N':
5276 REAL_VALUE_TYPE r;
5277 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
5278 r = REAL_VALUE_NEGATE (r);
5279 fprintf (stream, "%s", fp_const_from_val (&r));
5281 return;
5283 case 'B':
5284 if (GET_CODE (x) == CONST_INT)
5285 fprintf (stream,
5286 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5287 "%d",
5288 #else
5289 "%ld",
5290 #endif
5291 ARM_SIGN_EXTEND (~ INTVAL (x)));
5292 else
5294 putc ('~', stream);
5295 output_addr_const (stream, x);
5297 return;
5299 case 'i':
5300 fprintf (stream, "%s", arithmetic_instr (x, 1));
5301 return;
5303 case 'I':
5304 fprintf (stream, "%s", arithmetic_instr (x, 0));
5305 return;
5307 case 'S':
5309 HOST_WIDE_INT val;
5310 char *shift = shift_op (x, &val);
5312 if (shift)
5314 fprintf (stream, ", %s ", shift_op (x, &val));
5315 if (val == -1)
5316 arm_print_operand (stream, XEXP (x, 1), 0);
5317 else
5318 fprintf (stream,
5319 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5320 "#%d",
5321 #else
5322 "#%ld",
5323 #endif
5324 val);
5327 return;
5329 case 'Q':
5330 if (REGNO (x) > 15)
5331 abort ();
5332 fputs (REGISTER_PREFIX, stream);
5333 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5334 return;
5336 case 'R':
5337 if (REGNO (x) > 15)
5338 abort ();
5339 fputs (REGISTER_PREFIX, stream);
5340 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
5341 return;
5343 case 'm':
5344 fputs (REGISTER_PREFIX, stream);
5345 if (GET_CODE (XEXP (x, 0)) == REG)
5346 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5347 else
5348 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5349 return;
5351 case 'M':
5352 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5353 REGISTER_PREFIX, reg_names[REGNO (x) - 1
5354 + ((GET_MODE_SIZE (GET_MODE (x))
5355 + GET_MODE_SIZE (SImode) - 1)
5356 / GET_MODE_SIZE (SImode))]);
5357 return;
5359 case 'd':
5360 if (x)
5361 fputs (arm_condition_codes[get_arm_condition_code (x)],
5362 stream);
5363 return;
5365 case 'D':
5366 if (x)
5367 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5368 (get_arm_condition_code (x))],
5369 stream);
5370 return;
5372 default:
5373 if (x == 0)
5374 abort ();
5376 if (GET_CODE (x) == REG)
5378 fputs (REGISTER_PREFIX, stream);
5379 fputs (reg_names[REGNO (x)], stream);
5381 else if (GET_CODE (x) == MEM)
5383 output_memory_reference_mode = GET_MODE (x);
5384 output_address (XEXP (x, 0));
5386 else if (GET_CODE (x) == CONST_DOUBLE)
5387 fprintf (stream, "#%s", fp_immediate_constant (x));
5388 else if (GET_CODE (x) == NEG)
5389 abort (); /* This should never happen now. */
5390 else
5392 fputc ('#', stream);
5393 output_addr_const (stream, x);
5399 /* A finite state machine takes care of noticing whether or not instructions
5400 can be conditionally executed, and thus decrease execution time and code
5401 size by deleting branch instructions. The fsm is controlled by
5402 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5404 /* The state of the fsm controlling condition codes are:
5405 0: normal, do nothing special
5406 1: make ASM_OUTPUT_OPCODE not output this instruction
5407 2: make ASM_OUTPUT_OPCODE not output this instruction
5408 3: make instructions conditional
5409 4: make instructions conditional
5411 State transitions (state->state by whom under condition):
5412 0 -> 1 final_prescan_insn if the `target' is a label
5413 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5414 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5415 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5416 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5417 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5418 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5419 (the target insn is arm_target_insn).
5421 If the jump clobbers the conditions then we use states 2 and 4.
5423 A similar thing can be done with conditional return insns.
5425 XXX In case the `target' is an unconditional branch, this conditionalising
5426 of the instructions always reduces code size, but not always execution
5427 time. But then, I want to reduce the code size to somewhere near what
5428 /bin/cc produces. */
5430 /* Returns the index of the ARM condition code string in
5431 `arm_condition_codes'. COMPARISON should be an rtx like
5432 `(eq (...) (...))'. */
5434 static enum arm_cond_code
5435 get_arm_condition_code (comparison)
5436 rtx comparison;
5438 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
5439 register int code;
5440 register enum rtx_code comp_code = GET_CODE (comparison);
5442 if (GET_MODE_CLASS (mode) != MODE_CC)
5443 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5444 XEXP (comparison, 1));
5446 switch (mode)
5448 case CC_DNEmode: code = ARM_NE; goto dominance;
5449 case CC_DEQmode: code = ARM_EQ; goto dominance;
5450 case CC_DGEmode: code = ARM_GE; goto dominance;
5451 case CC_DGTmode: code = ARM_GT; goto dominance;
5452 case CC_DLEmode: code = ARM_LE; goto dominance;
5453 case CC_DLTmode: code = ARM_LT; goto dominance;
5454 case CC_DGEUmode: code = ARM_CS; goto dominance;
5455 case CC_DGTUmode: code = ARM_HI; goto dominance;
5456 case CC_DLEUmode: code = ARM_LS; goto dominance;
5457 case CC_DLTUmode: code = ARM_CC;
5459 dominance:
5460 if (comp_code != EQ && comp_code != NE)
5461 abort ();
5463 if (comp_code == EQ)
5464 return ARM_INVERSE_CONDITION_CODE (code);
5465 return code;
5467 case CC_NOOVmode:
5468 switch (comp_code)
5470 case NE: return ARM_NE;
5471 case EQ: return ARM_EQ;
5472 case GE: return ARM_PL;
5473 case LT: return ARM_MI;
5474 default: abort ();
5477 case CC_Zmode:
5478 case CCFPmode:
5479 switch (comp_code)
5481 case NE: return ARM_NE;
5482 case EQ: return ARM_EQ;
5483 default: abort ();
5486 case CCFPEmode:
5487 switch (comp_code)
5489 case GE: return ARM_GE;
5490 case GT: return ARM_GT;
5491 case LE: return ARM_LS;
5492 case LT: return ARM_MI;
5493 default: abort ();
5496 case CC_SWPmode:
5497 switch (comp_code)
5499 case NE: return ARM_NE;
5500 case EQ: return ARM_EQ;
5501 case GE: return ARM_LE;
5502 case GT: return ARM_LT;
5503 case LE: return ARM_GE;
5504 case LT: return ARM_GT;
5505 case GEU: return ARM_LS;
5506 case GTU: return ARM_CC;
5507 case LEU: return ARM_CS;
5508 case LTU: return ARM_HI;
5509 default: abort ();
5512 case CC_Cmode:
5513 switch (comp_code)
5515 case LTU: return ARM_CS;
5516 case GEU: return ARM_CC;
5517 default: abort ();
5520 case CCmode:
5521 switch (comp_code)
5523 case NE: return ARM_NE;
5524 case EQ: return ARM_EQ;
5525 case GE: return ARM_GE;
5526 case GT: return ARM_GT;
5527 case LE: return ARM_LE;
5528 case LT: return ARM_LT;
5529 case GEU: return ARM_CS;
5530 case GTU: return ARM_HI;
5531 case LEU: return ARM_LS;
5532 case LTU: return ARM_CC;
5533 default: abort ();
5536 default: abort ();
5539 abort ();
5543 void
5544 final_prescan_insn (insn, opvec, noperands)
5545 rtx insn;
5546 rtx *opvec;
5547 int noperands;
5549 /* BODY will hold the body of INSN. */
5550 register rtx body = PATTERN (insn);
5552 /* This will be 1 if trying to repeat the trick, and things need to be
5553 reversed if it appears to fail. */
5554 int reverse = 0;
5556 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5557 taken are clobbered, even if the rtl suggests otherwise. It also
5558 means that we have to grub around within the jump expression to find
5559 out what the conditions are when the jump isn't taken. */
5560 int jump_clobbers = 0;
5562 /* If we start with a return insn, we only succeed if we find another one. */
5563 int seeking_return = 0;
5565 /* START_INSN will hold the insn from where we start looking. This is the
5566 first insn after the following code_label if REVERSE is true. */
5567 rtx start_insn = insn;
5569 /* If in state 4, check if the target branch is reached, in order to
5570 change back to state 0. */
5571 if (arm_ccfsm_state == 4)
5573 if (insn == arm_target_insn)
5575 arm_target_insn = NULL;
5576 arm_ccfsm_state = 0;
5578 return;
5581 /* If in state 3, it is possible to repeat the trick, if this insn is an
5582 unconditional branch to a label, and immediately following this branch
5583 is the previous target label which is only used once, and the label this
5584 branch jumps to is not too far off. */
5585 if (arm_ccfsm_state == 3)
5587 if (simplejump_p (insn))
5589 start_insn = next_nonnote_insn (start_insn);
5590 if (GET_CODE (start_insn) == BARRIER)
5592 /* XXX Isn't this always a barrier? */
5593 start_insn = next_nonnote_insn (start_insn);
5595 if (GET_CODE (start_insn) == CODE_LABEL
5596 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5597 && LABEL_NUSES (start_insn) == 1)
5598 reverse = TRUE;
5599 else
5600 return;
5602 else if (GET_CODE (body) == RETURN)
5604 start_insn = next_nonnote_insn (start_insn);
5605 if (GET_CODE (start_insn) == BARRIER)
5606 start_insn = next_nonnote_insn (start_insn);
5607 if (GET_CODE (start_insn) == CODE_LABEL
5608 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5609 && LABEL_NUSES (start_insn) == 1)
5611 reverse = TRUE;
5612 seeking_return = 1;
5614 else
5615 return;
5617 else
5618 return;
5621 if (arm_ccfsm_state != 0 && !reverse)
5622 abort ();
5623 if (GET_CODE (insn) != JUMP_INSN)
5624 return;
5626 /* This jump might be paralleled with a clobber of the condition codes
5627 the jump should always come first */
5628 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5629 body = XVECEXP (body, 0, 0);
5631 #if 0
5632 /* If this is a conditional return then we don't want to know */
5633 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5634 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5635 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5636 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5637 return;
5638 #endif
5640 if (reverse
5641 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5642 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5644 int insns_skipped;
5645 int fail = FALSE, succeed = FALSE;
5646 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5647 int then_not_else = TRUE;
5648 rtx this_insn = start_insn, label = 0;
5650 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5652 /* The code below is wrong for these, and I haven't time to
5653 fix it now. So we just do the safe thing and return. This
5654 whole function needs re-writing anyway. */
5655 jump_clobbers = 1;
5656 return;
5659 /* Register the insn jumped to. */
5660 if (reverse)
5662 if (!seeking_return)
5663 label = XEXP (SET_SRC (body), 0);
5665 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5666 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5667 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5669 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5670 then_not_else = FALSE;
5672 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5673 seeking_return = 1;
5674 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5676 seeking_return = 1;
5677 then_not_else = FALSE;
5679 else
5680 abort ();
5682 /* See how many insns this branch skips, and what kind of insns. If all
5683 insns are okay, and the label or unconditional branch to the same
5684 label is not too far away, succeed. */
5685 for (insns_skipped = 0;
5686 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
5688 rtx scanbody;
5690 this_insn = next_nonnote_insn (this_insn);
5691 if (!this_insn)
5692 break;
5694 scanbody = PATTERN (this_insn);
5696 switch (GET_CODE (this_insn))
5698 case CODE_LABEL:
5699 /* Succeed if it is the target label, otherwise fail since
5700 control falls in from somewhere else. */
5701 if (this_insn == label)
5703 if (jump_clobbers)
5705 arm_ccfsm_state = 2;
5706 this_insn = next_nonnote_insn (this_insn);
5708 else
5709 arm_ccfsm_state = 1;
5710 succeed = TRUE;
5712 else
5713 fail = TRUE;
5714 break;
5716 case BARRIER:
5717 /* Succeed if the following insn is the target label.
5718 Otherwise fail.
5719 If return insns are used then the last insn in a function
5720 will be a barrier. */
5721 this_insn = next_nonnote_insn (this_insn);
5722 if (this_insn && this_insn == label)
5724 if (jump_clobbers)
5726 arm_ccfsm_state = 2;
5727 this_insn = next_nonnote_insn (this_insn);
5729 else
5730 arm_ccfsm_state = 1;
5731 succeed = TRUE;
5733 else
5734 fail = TRUE;
5735 break;
5737 case CALL_INSN:
5738 /* If using 32-bit addresses the cc is not preserved over
5739 calls */
5740 if (TARGET_APCS_32)
5742 /* Succeed if the following insn is the target label,
5743 or if the following two insns are a barrier and
5744 the target label. */
5745 this_insn = next_nonnote_insn (this_insn);
5746 if (this_insn && GET_CODE (this_insn) == BARRIER)
5747 this_insn = next_nonnote_insn (this_insn);
5749 if (this_insn && this_insn == label
5750 && insns_skipped < MAX_INSNS_SKIPPED)
5752 if (jump_clobbers)
5754 arm_ccfsm_state = 2;
5755 this_insn = next_nonnote_insn (this_insn);
5757 else
5758 arm_ccfsm_state = 1;
5759 succeed = TRUE;
5761 else
5762 fail = TRUE;
5764 break;
5766 case JUMP_INSN:
5767 /* If this is an unconditional branch to the same label, succeed.
5768 If it is to another label, do nothing. If it is conditional,
5769 fail. */
5770 /* XXX Probably, the test for the SET and the PC are unnecessary. */
5772 if (GET_CODE (scanbody) == SET
5773 && GET_CODE (SET_DEST (scanbody)) == PC)
5775 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5776 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5778 arm_ccfsm_state = 2;
5779 succeed = TRUE;
5781 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5782 fail = TRUE;
5784 else if (GET_CODE (scanbody) == RETURN
5785 && seeking_return)
5787 arm_ccfsm_state = 2;
5788 succeed = TRUE;
5790 else if (GET_CODE (scanbody) == PARALLEL)
5792 switch (get_attr_conds (this_insn))
5794 case CONDS_NOCOND:
5795 break;
5796 default:
5797 fail = TRUE;
5798 break;
5801 break;
5803 case INSN:
5804 /* Instructions using or affecting the condition codes make it
5805 fail. */
5806 if ((GET_CODE (scanbody) == SET
5807 || GET_CODE (scanbody) == PARALLEL)
5808 && get_attr_conds (this_insn) != CONDS_NOCOND)
5809 fail = TRUE;
5810 break;
5812 default:
5813 break;
5816 if (succeed)
5818 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
5819 arm_target_label = CODE_LABEL_NUMBER (label);
5820 else if (seeking_return || arm_ccfsm_state == 2)
5822 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5824 this_insn = next_nonnote_insn (this_insn);
5825 if (this_insn && (GET_CODE (this_insn) == BARRIER
5826 || GET_CODE (this_insn) == CODE_LABEL))
5827 abort ();
5829 if (!this_insn)
5831 /* Oh, dear! we ran off the end.. give up */
5832 recog (PATTERN (insn), insn, NULL_PTR);
5833 arm_ccfsm_state = 0;
5834 arm_target_insn = NULL;
5835 return;
5837 arm_target_insn = this_insn;
5839 else
5840 abort ();
5841 if (jump_clobbers)
5843 if (reverse)
5844 abort ();
5845 arm_current_cc =
5846 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5847 0), 0), 1));
5848 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5849 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5850 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5851 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5853 else
5855 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5856 what it was. */
5857 if (!reverse)
5858 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5859 0));
5862 if (reverse || then_not_else)
5863 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5865 /* restore recog_operand (getting the attributes of other insns can
5866 destroy this array, but final.c assumes that it remains intact
5867 across this call; since the insn has been recognized already we
5868 call recog direct). */
5869 recog (PATTERN (insn), insn, NULL_PTR);
5873 #ifdef AOF_ASSEMBLER
5874 /* Special functions only needed when producing AOF syntax assembler. */
5876 rtx aof_pic_label = NULL_RTX;
5877 struct pic_chain
5879 struct pic_chain *next;
5880 char *symname;
5883 static struct pic_chain *aof_pic_chain = NULL;
5886 aof_pic_entry (x)
5887 rtx x;
5889 struct pic_chain **chainp;
5890 int offset;
5892 if (aof_pic_label == NULL_RTX)
5894 /* This needs to persist throughout the compilation. */
5895 end_temporary_allocation ();
5896 aof_pic_label = gen_rtx_SYMBOL_REF (Pmode, "x$adcons");
5897 resume_temporary_allocation ();
5900 for (offset = 0, chainp = &aof_pic_chain; *chainp;
5901 offset += 4, chainp = &(*chainp)->next)
5902 if ((*chainp)->symname == XSTR (x, 0))
5903 return plus_constant (aof_pic_label, offset);
5905 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
5906 (*chainp)->next = NULL;
5907 (*chainp)->symname = XSTR (x, 0);
5908 return plus_constant (aof_pic_label, offset);
5911 void
5912 aof_dump_pic_table (f)
5913 FILE *f;
5915 struct pic_chain *chain;
5917 if (aof_pic_chain == NULL)
5918 return;
5920 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
5921 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
5922 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5923 fputs ("|x$adcons|\n", f);
5925 for (chain = aof_pic_chain; chain; chain = chain->next)
5927 fputs ("\tDCD\t", f);
5928 assemble_name (f, chain->symname);
5929 fputs ("\n", f);
5933 int arm_text_section_count = 1;
5935 char *
5936 aof_text_section ()
5938 static char buf[100];
5939 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5940 arm_text_section_count++);
5941 if (flag_pic)
5942 strcat (buf, ", PIC, REENTRANT");
5943 return buf;
5946 static int arm_data_section_count = 1;
5948 char *
5949 aof_data_section ()
5951 static char buf[100];
5952 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5953 return buf;
5956 /* The AOF assembler is religiously strict about declarations of
5957 imported and exported symbols, so that it is impossible to declare
5958 a function as imported near the beginning of the file, and then to
5959 export it later on. It is, however, possible to delay the decision
5960 until all the functions in the file have been compiled. To get
5961 around this, we maintain a list of the imports and exports, and
5962 delete from it any that are subsequently defined. At the end of
5963 compilation we spit the remainder of the list out before the END
5964 directive. */
5966 struct import
5968 struct import *next;
5969 char *name;
5972 static struct import *imports_list = NULL;
5974 void
5975 aof_add_import (name)
5976 char *name;
5978 struct import *new;
5980 for (new = imports_list; new; new = new->next)
5981 if (new->name == name)
5982 return;
5984 new = (struct import *) xmalloc (sizeof (struct import));
5985 new->next = imports_list;
5986 imports_list = new;
5987 new->name = name;
5990 void
5991 aof_delete_import (name)
5992 char *name;
5994 struct import **old;
5996 for (old = &imports_list; *old; old = & (*old)->next)
5998 if ((*old)->name == name)
6000 *old = (*old)->next;
6001 return;
6006 int arm_main_function = 0;
6008 void
6009 aof_dump_imports (f)
6010 FILE *f;
6012 /* The AOF assembler needs this to cause the startup code to be extracted
6013 from the library. Brining in __main causes the whole thing to work
6014 automagically. */
6015 if (arm_main_function)
6017 text_section ();
6018 fputs ("\tIMPORT __main\n", f);
6019 fputs ("\tDCD __main\n", f);
6022 /* Now dump the remaining imports. */
6023 while (imports_list)
6025 fprintf (f, "\tIMPORT\t");
6026 assemble_name (f, imports_list->name);
6027 fputc ('\n', f);
6028 imports_list = imports_list->next;
6031 #endif /* AOF_ASSEMBLER */