(arm_gen_constant): New function.
[official-gcc.git] / gcc / config / arm / arm.c
blob079e4b647e945d6d0eebd1cd0e900f29abf155d8
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include <stdio.h>
25 #include <string.h>
26 #include "assert.h"
27 #include "config.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "tree.h"
40 #include "expr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
48 extern char *output_multi_immediate ();
49 extern void arm_increase_location ();
51 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
52 static int get_prologue_size PROTO ((void));
53 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
54 HOST_WIDE_INT, rtx, rtx, int, int));
56 /* Define the information needed to generate branch insns. This is
57 stored from the compare operation. */
59 rtx arm_compare_op0, arm_compare_op1;
60 int arm_compare_fp;
62 /* What type of cpu are we compiling for? */
63 enum processor_type arm_cpu;
65 /* What type of floating point are we compiling for? */
66 enum floating_point_type arm_fpu;
68 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
69 enum prog_mode_type arm_prgmode;
71 char *target_cpu_name = ARM_CPU_NAME;
72 char *target_fpe_name = NULL;
74 /* Nonzero if this is an "M" variant of the processor. */
75 int arm_fast_multiply = 0;
77 /* Nonzero if this chip support the ARM Architecture 4 extensions */
78 int arm_arch4 = 0;
80 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
81 must report the mode of the memory reference from PRINT_OPERAND to
82 PRINT_OPERAND_ADDRESS. */
83 enum machine_mode output_memory_reference_mode;
85 /* Nonzero if the prologue must setup `fp'. */
86 int current_function_anonymous_args;
88 /* Location counter of .text segment. */
89 int arm_text_location = 0;
91 /* Set to one if we think that lr is only saved because of subroutine calls,
92 but all of these can be `put after' return insns */
93 int lr_save_eliminated;
95 /* A hash table is used to store text segment labels and their associated
96 offset from the start of the text segment. */
97 struct label_offset
99 char *name;
100 int offset;
101 struct label_offset *cdr;
104 #define LABEL_HASH_SIZE 257
106 static struct label_offset *offset_table[LABEL_HASH_SIZE];
108 /* Set to 1 when a return insn is output, this means that the epilogue
109 is not needed. */
111 static int return_used_this_function;
113 static int arm_constant_limit = 3;
115 /* For an explanation of these variables, see final_prescan_insn below. */
116 int arm_ccfsm_state;
117 int arm_current_cc;
118 rtx arm_target_insn;
119 int arm_target_label;
121 /* The condition codes of the ARM, and the inverse function. */
122 char *arm_condition_codes[] =
124 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
125 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
128 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
131 /* Initialization code */
133 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
134 #define FL_FAST_MULT 0x02 /* Fast multiply */
135 #define FL_MODE26 0x04 /* 26-bit mode support */
136 #define FL_MODE32 0x08 /* 32-bit mode support */
137 #define FL_ARCH4 0x10 /* Architecture rel 4 */
138 #define FL_THUMB 0x20 /* Thumb aware */
139 struct processors
141 char *name;
142 enum processor_type type;
143 unsigned int flags;
146 /* Not all of these give usefully different compilation alternatives,
147 but there is no simple way of generalizing them. */
148 static struct processors all_procs[] =
150 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
151 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
152 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
153 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
154 {"arm60", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
155 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
156 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
157 {"arm620", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
158 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
159 {"arm70", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
160 {"arm7d", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
161 {"arm7di", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
162 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
163 | FL_MODE26)},
164 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
165 | FL_MODE26)},
166 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
167 {"arm700i", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
169 {"arm710c", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
170 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
171 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
172 | FL_ARCH4 | FL_THUMB)},
173 {NULL, 0, 0}
176 /* Fix up any incompatible options that the user has specified.
177 This has now turned into a maze. */
178 void
179 arm_override_options ()
181 int arm_thumb_aware = 0;
183 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
184 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
186 if (TARGET_POKE_FUNCTION_NAME)
187 target_flags |= ARM_FLAG_APCS_FRAME;
189 if (TARGET_6)
191 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu-<proc>");
192 target_flags |= ARM_FLAG_APCS_32;
193 arm_cpu = PROCESSOR_ARM6;
196 if (TARGET_3)
198 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu-<proc>");
199 target_flags &= ~ARM_FLAG_APCS_32;
200 arm_cpu = PROCESSOR_ARM2;
203 if ((TARGET_3 || TARGET_6) && target_cpu_name != NULL)
204 fatal ("Incompatible mix of old and new options. -m%d and -mcpu-%s",
205 TARGET_3 ? 3 : 6, target_cpu_name);
207 if (TARGET_APCS_REENT && flag_pic)
208 fatal ("-fpic and -mapcs-reent are incompatible");
210 if (TARGET_APCS_REENT)
211 warning ("APCS reentrant code not supported. Ignored");
213 if (flag_pic)
214 warning ("Position independent code not supported. Ignored");
216 if (TARGET_APCS_FLOAT)
217 warning ("Passing floating point arguments in fp regs not yet supported");
219 if (TARGET_APCS_STACK && ! TARGET_APCS)
221 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
222 target_flags |= ARM_FLAG_APCS_FRAME;
225 arm_cpu = TARGET_6 ? PROCESSOR_ARM6: PROCESSOR_ARM2;
226 arm_fpu = FP_HARD;
228 if (target_cpu_name != NULL)
230 char *c = target_cpu_name;
231 struct processors *proc;
233 /* Match against the supported types. */
234 for (proc = all_procs; proc->name != NULL; proc++)
236 if (strcmp (proc->name, c) == 0)
237 break;
240 if (proc->name)
242 arm_cpu = proc->type;
244 /* Default value for floating point code... if no co-processor
245 bus, then schedule for emulated floating point. Otherwise,
246 assume the user has an FPA, unless overridden with -mfpe-... */
247 if (proc->flags & FL_CO_PROC == 0)
248 arm_fpu = FP_SOFT3;
249 else
250 arm_fpu = FP_HARD;
251 arm_fast_multiply = (proc->flags & FL_FAST_MULT) != 0;
252 arm_arch4 = (proc->flags & FL_ARCH4) != 0;
253 arm_thumb_aware = (proc->flags & FL_THUMB) != 0;
254 /* Processors with a load delay slot can load constants faster,
255 from the pool than it takes to construct them, so reduce the
256 complexity of the constant that we will try to generate
257 inline. */
259 else
260 fatal ("Unrecognized cpu type: %s", target_cpu_name);
263 if (target_fpe_name)
265 if (strcmp (target_fpe_name, "2") == 0)
266 arm_fpu = FP_SOFT2;
267 else if (strcmp (target_fpe_name, "3") == 0)
268 arm_fpu = FP_SOFT3;
269 else
270 fatal ("Invalid floating point emulation option: -mfpe-%s",
271 target_fpe_name);
274 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
276 warning ("This processor variant does not support Thumb interworking");
277 target_flags &= ~ARM_FLAG_THUMB;
280 if (TARGET_FPE && arm_fpu != FP_HARD)
281 arm_fpu = FP_SOFT2;
283 /* For arm2/3 there is no need to do any scheduling if there is only
284 a floating point emulator, or we are doing software floating-point. */
285 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
286 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
288 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
292 /* Return 1 if it is possible to return using a single instruction */
295 use_return_insn ()
297 int regno;
299 if (!reload_completed ||current_function_pretend_args_size
300 || current_function_anonymous_args
301 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
302 return 0;
304 /* Can't be done if any of the FPU regs are pushed, since this also
305 requires an insn */
306 for (regno = 20; regno < 24; regno++)
307 if (regs_ever_live[regno])
308 return 0;
310 return 1;
313 /* Return TRUE if int I is a valid immediate ARM constant. */
316 const_ok_for_arm (i)
317 HOST_WIDE_INT i;
319 unsigned HOST_WIDE_INT mask = ~0xFF;
321 /* Fast return for 0 and powers of 2 */
322 if ((i & (i - 1)) == 0)
323 return TRUE;
327 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
328 return TRUE;
329 mask =
330 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
331 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
332 } while (mask != ~0xFF);
334 return FALSE;
337 /* Return true if I is a valid constant for the operation CODE. */
339 const_ok_for_op (i, code, mode)
340 HOST_WIDE_INT i;
341 enum rtx_code code;
342 enum machine_mode mode;
344 if (const_ok_for_arm (i))
345 return 1;
347 switch (code)
349 case PLUS:
350 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
352 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
353 case XOR:
354 case IOR:
355 return 0;
357 case AND:
358 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
360 default:
361 abort ();
365 /* Emit a sequence of insns to handle a large constant.
366 CODE is the code of the operation required, it can be any of SET, PLUS,
367 IOR, AND, XOR, MINUS;
368 MODE is the mode in which the operation is being performed;
369 VAL is the integer to operate on;
370 SOURCE is the other operand (a register, or a null-pointer for SET);
371 SUBTARGETS means it is safe to create scratch registers if that will
372 either produce a simpler sequence, or we will want to cse the values.
373 Return value is the number of insns emitted. */
376 arm_split_constant (code, mode, val, target, source, subtargets)
377 enum rtx_code code;
378 enum machine_mode mode;
379 HOST_WIDE_INT val;
380 rtx target;
381 rtx source;
382 int subtargets;
384 if (subtargets || code == SET
385 || (GET_CODE (target) == REG && GET_CODE (source) == REG
386 && REGNO (target) != REGNO (source)))
388 rtx temp;
390 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
391 > arm_constant_limit + (code != SET))
393 if (code == SET)
395 /* Currently SET is the only monadic value for CODE, all
396 the rest are diadic. */
397 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
398 return 1;
400 else
402 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
404 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
405 /* For MINUS, the value is subtracted from, since we never
406 have subtraction of a constant. */
407 if (code == MINUS)
408 emit_insn (gen_rtx (SET, VOIDmode, target,
409 gen_rtx (code, mode, temp, source)));
410 else
411 emit_insn (gen_rtx (SET, VOIDmode, target,
412 gen_rtx (code, mode, source, temp)));
413 return 2;
418 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
421 /* As above, but extra parameter GENERATE which, if clear, suppresses
422 RTL generation. */
424 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
425 enum rtx_code code;
426 enum machine_mode mode;
427 HOST_WIDE_INT val;
428 rtx target;
429 rtx source;
430 int subtargets;
431 int generate;
433 int can_add = 0;
434 int can_invert = 0;
435 int can_negate = 0;
436 int can_negate_initial = 0;
437 int can_shift = 0;
438 int i;
439 int num_bits_set = 0;
440 int set_sign_bit_copies = 0;
441 int clear_sign_bit_copies = 0;
442 int clear_zero_bit_copies = 0;
443 int set_zero_bit_copies = 0;
444 int insns = 0;
445 rtx new_src;
446 unsigned HOST_WIDE_INT temp1, temp2;
447 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
449 /* find out which operations are safe for a given CODE. Also do a quick
450 check for degenerate cases; these can occur when DImode operations
451 are split. */
452 switch (code)
454 case SET:
455 can_invert = 1;
456 can_shift = 1;
457 can_negate = 1;
458 break;
460 case PLUS:
461 can_negate = 1;
462 can_negate_initial = 1;
463 break;
465 case IOR:
466 if (remainder == 0xffffffff)
468 if (generate)
469 emit_insn (gen_rtx (SET, VOIDmode, target,
470 GEN_INT (ARM_SIGN_EXTEND (val))));
471 return 1;
473 if (remainder == 0)
475 if (reload_completed && rtx_equal_p (target, source))
476 return 0;
477 if (generate)
478 emit_insn (gen_rtx (SET, VOIDmode, target, source));
479 return 1;
481 break;
483 case AND:
484 if (remainder == 0)
486 if (generate)
487 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
488 return 1;
490 if (remainder == 0xffffffff)
492 if (reload_completed && rtx_equal_p (target, source))
493 return 0;
494 if (generate)
495 emit_insn (gen_rtx (SET, VOIDmode, target, source));
496 return 1;
498 can_invert = 1;
499 break;
501 case XOR:
502 if (remainder == 0)
504 if (reload_completed && rtx_equal_p (target, source))
505 return 0;
506 if (generate)
507 emit_insn (gen_rtx (SET, VOIDmode, target, source));
508 return 1;
510 if (remainder == 0xffffffff)
512 if (generate)
513 emit_insn (gen_rtx (SET, VOIDmode, target,
514 gen_rtx (NOT, mode, source)));
515 return 1;
518 /* We don't know how to handle this yet below. */
519 abort ();
521 case MINUS:
522 /* We treat MINUS as (val - source), since (source - val) is always
523 passed as (source + (-val)). */
524 if (remainder == 0)
526 if (generate)
527 emit_insn (gen_rtx (SET, VOIDmode, target,
528 gen_rtx (NEG, mode, source)));
529 return 1;
531 if (const_ok_for_arm (val))
533 if (generate)
534 emit_insn (gen_rtx (SET, VOIDmode, target,
535 gen_rtx (MINUS, mode, GEN_INT (val), source)));
536 return 1;
538 can_negate = 1;
540 break;
542 default:
543 abort ();
546 /* If we can do it in one insn get out quickly */
547 if (const_ok_for_arm (val)
548 || (can_negate_initial && const_ok_for_arm (-val))
549 || (can_invert && const_ok_for_arm (~val)))
551 if (generate)
552 emit_insn (gen_rtx (SET, VOIDmode, target,
553 (source ? gen_rtx (code, mode, source,
554 GEN_INT (val))
555 : GEN_INT (val))));
556 return 1;
560 /* Calculate a few attributes that may be useful for specific
561 optimizations. */
563 for (i = 31; i >= 0; i--)
565 if ((remainder & (1 << i)) == 0)
566 clear_sign_bit_copies++;
567 else
568 break;
571 for (i = 31; i >= 0; i--)
573 if ((remainder & (1 << i)) != 0)
574 set_sign_bit_copies++;
575 else
576 break;
579 for (i = 0; i <= 31; i++)
581 if ((remainder & (1 << i)) == 0)
582 clear_zero_bit_copies++;
583 else
584 break;
587 for (i = 0; i <= 31; i++)
589 if ((remainder & (1 << i)) != 0)
590 set_zero_bit_copies++;
591 else
592 break;
595 switch (code)
597 case SET:
598 /* See if we can do this by sign_extending a constant that is known
599 to be negative. This is a good, way of doing it, since the shift
600 may well merge into a subsequent insn. */
601 if (set_sign_bit_copies > 1)
603 if (const_ok_for_arm
604 (temp1 = ARM_SIGN_EXTEND (remainder
605 << (set_sign_bit_copies - 1))))
607 if (generate)
609 new_src = subtargets ? gen_reg_rtx (mode) : target;
610 emit_insn (gen_rtx (SET, VOIDmode, new_src,
611 GEN_INT (temp1)));
612 emit_insn (gen_ashrsi3 (target, new_src,
613 GEN_INT (set_sign_bit_copies - 1)));
615 return 2;
617 /* For an inverted constant, we will need to set the low bits,
618 these will be shifted out of harm's way. */
619 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
620 if (const_ok_for_arm (~temp1))
622 if (generate)
624 new_src = subtargets ? gen_reg_rtx (mode) : target;
625 emit_insn (gen_rtx (SET, VOIDmode, new_src,
626 GEN_INT (temp1)));
627 emit_insn (gen_ashrsi3 (target, new_src,
628 GEN_INT (set_sign_bit_copies - 1)));
630 return 2;
634 /* See if we can generate this by setting the bottom (or the top)
635 16 bits, and then shifting these into the other half of the
636 word. We only look for the simplest cases, to do more would cost
637 too much. Be careful, however, not to generate this when the
638 alternative would take fewer insns. */
639 if (val & 0xffff0000)
641 temp1 = remainder & 0xffff0000;
642 temp2 = remainder & 0x0000ffff;
644 /* Overlaps outside this range are best done using other methods. */
645 for (i = 9; i < 24; i++)
647 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
648 && ! const_ok_for_arm (temp2))
650 insns = arm_gen_constant (code, mode, temp2,
651 new_src = (subtargets
652 ? gen_reg_rtx (mode)
653 : target),
654 source, subtargets, generate);
655 source = new_src;
656 if (generate)
657 emit_insn (gen_rtx (SET, VOIDmode, target,
658 gen_rtx (IOR, mode,
659 gen_rtx (ASHIFT, mode, source,
660 GEN_INT (i)),
661 source)));
662 return insns + 1;
666 /* Don't duplicate cases already considered. */
667 for (i = 17; i < 24; i++)
669 if (((temp1 | (temp1 >> i)) == remainder)
670 && ! const_ok_for_arm (temp1))
672 insns = arm_gen_constant (code, mode, temp1,
673 new_src = (subtargets
674 ? gen_reg_rtx (mode)
675 : target),
676 source, subtargets, generate);
677 source = new_src;
678 if (generate)
679 emit_insn (gen_rtx (SET, VOIDmode, target,
680 gen_rtx (IOR, mode,
681 gen_rtx (LSHIFTRT, mode,
682 source, GEN_INT (i)),
683 source)));
684 return insns + 1;
688 break;
690 case IOR:
691 case XOR:
692 /* If we have IOR or XOR, and the inverse of the constant can be loaded
693 in a single instruction, and we can find a temporary to put it in,
694 then this can be done in two instructions instead of 3-4. */
695 if (subtargets
696 || (reload_completed && ! reg_mentioned_p (target, source)))
698 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
700 if (generate)
702 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
704 emit_insn (gen_rtx (SET, VOIDmode, sub,
705 GEN_INT (ARM_SIGN_EXTEND (~ val))));
706 emit_insn (gen_rtx (SET, VOIDmode, target,
707 gen_rtx (code, mode, source, sub)));
709 return 2;
713 if (code == XOR)
714 break;
716 if (set_sign_bit_copies > 8
717 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
719 if (generate)
721 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
722 rtx shift = GEN_INT (set_sign_bit_copies);
724 emit_insn (gen_rtx (SET, VOIDmode, sub,
725 gen_rtx (NOT, mode,
726 gen_rtx (ASHIFT, mode, source,
727 shift))));
728 emit_insn (gen_rtx (SET, VOIDmode, target,
729 gen_rtx (NOT, mode,
730 gen_rtx (LSHIFTRT, mode, sub,
731 shift))));
733 return 2;
736 if (set_zero_bit_copies > 8
737 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
739 if (generate)
741 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
742 rtx shift = GEN_INT (set_zero_bit_copies);
744 emit_insn (gen_rtx (SET, VOIDmode, sub,
745 gen_rtx (NOT, mode,
746 gen_rtx (LSHIFTRT, mode, source,
747 shift))));
748 emit_insn (gen_rtx (SET, VOIDmode, target,
749 gen_rtx (NOT, mode,
750 gen_rtx (ASHIFT, mode, sub,
751 shift))));
753 return 2;
756 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
758 if (generate)
760 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
761 emit_insn (gen_rtx (SET, VOIDmode, sub,
762 gen_rtx (NOT, mode, source)));
763 source = sub;
764 if (subtargets)
765 sub = gen_reg_rtx (mode);
766 emit_insn (gen_rtx (SET, VOIDmode, sub,
767 gen_rtx (AND, mode, source,
768 GEN_INT (temp1))));
769 emit_insn (gen_rtx (SET, VOIDmode, target,
770 gen_rtx (NOT, mode, sub)));
772 return 3;
774 break;
776 case AND:
777 /* See if two shifts will do 2 or more insn's worth of work. */
778 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
780 HOST_WIDE_INT shift_mask = ((0xffffffff
781 << (32 - clear_sign_bit_copies))
782 & 0xffffffff);
783 rtx new_source;
784 rtx shift;
786 if ((remainder | shift_mask) != 0xffffffff)
788 if (generate)
790 new_source = subtargets ? gen_reg_rtx (mode) : target;
791 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
792 new_source, source, subtargets, 1);
793 source = new_source;
795 else
796 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
797 new_source, source, subtargets, 0);
800 if (generate)
802 shift = GEN_INT (clear_sign_bit_copies);
803 new_source = subtargets ? gen_reg_rtx (mode) : target;
804 emit_insn (gen_ashlsi3 (new_source, source, shift));
805 emit_insn (gen_lshrsi3 (target, new_source, shift));
808 return insns + 2;
811 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
813 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
814 rtx new_source;
815 rtx shift;
817 if ((remainder | shift_mask) != 0xffffffff)
819 if (generate)
821 new_source = subtargets ? gen_reg_rtx (mode) : target;
822 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
823 new_source, source, subtargets, 1);
824 source = new_source;
826 else
827 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
828 new_source, source, subtargets, 0);
831 if (generate)
833 shift = GEN_INT (clear_zero_bit_copies);
834 new_source = subtargets ? gen_reg_rtx (mode) : target;
835 emit_insn (gen_lshrsi3 (new_source, source, shift));
836 emit_insn (gen_ashlsi3 (target, new_source, shift));
839 return insns + 2;
842 break;
844 default:
845 break;
848 for (i = 0; i < 32; i++)
849 if (remainder & (1 << i))
850 num_bits_set++;
852 if (code == AND || (can_invert && num_bits_set > 16))
853 remainder = (~remainder) & 0xffffffff;
854 else if (code == PLUS && num_bits_set > 16)
855 remainder = (-remainder) & 0xffffffff;
856 else
858 can_invert = 0;
859 can_negate = 0;
862 /* Now try and find a way of doing the job in either two or three
863 instructions.
864 We start by looking for the largest block of zeros that are aligned on
865 a 2-bit boundary, we then fill up the temps, wrapping around to the
866 top of the word when we drop off the bottom.
867 In the worst case this code should produce no more than four insns. */
869 int best_start = 0;
870 int best_consecutive_zeros = 0;
872 for (i = 0; i < 32; i += 2)
874 int consecutive_zeros = 0;
876 if (! (remainder & (3 << i)))
878 while ((i < 32) && ! (remainder & (3 << i)))
880 consecutive_zeros += 2;
881 i += 2;
883 if (consecutive_zeros > best_consecutive_zeros)
885 best_consecutive_zeros = consecutive_zeros;
886 best_start = i - consecutive_zeros;
888 i -= 2;
892 /* Now start emitting the insns, starting with the one with the highest
893 bit set: we do this so that the smallest number will be emitted last;
894 this is more likely to be combinable with addressing insns. */
895 i = best_start;
898 int end;
900 if (i <= 0)
901 i += 32;
902 if (remainder & (3 << (i - 2)))
904 end = i - 8;
905 if (end < 0)
906 end += 32;
907 temp1 = remainder & ((0x0ff << end)
908 | ((i < end) ? (0xff >> (32 - end)) : 0));
909 remainder &= ~temp1;
911 if (code == SET)
913 if (generate)
914 emit_insn (gen_rtx (SET, VOIDmode,
915 new_src = (subtargets
916 ? gen_reg_rtx (mode)
917 : target),
918 GEN_INT (can_invert ? ~temp1 : temp1)));
919 can_invert = 0;
920 code = PLUS;
922 else if (code == MINUS)
924 if (generate)
925 emit_insn (gen_rtx (SET, VOIDmode,
926 new_src = (subtargets
927 ? gen_reg_rtx (mode)
928 : target),
929 gen_rtx (code, mode, GEN_INT (temp1),
930 source)));
931 code = PLUS;
933 else
935 if (generate)
936 emit_insn (gen_rtx (SET, VOIDmode,
937 new_src = (remainder
938 ? (subtargets
939 ? gen_reg_rtx (mode)
940 : target)
941 : target),
942 gen_rtx (code, mode, source,
943 GEN_INT (can_invert ? ~temp1
944 : (can_negate
945 ? -temp1
946 : temp1)))));
949 insns++;
950 source = new_src;
951 i -= 6;
953 i -= 2;
954 } while (remainder);
956 return insns;
959 /* Handle aggregates that are not laid out in a BLKmode element.
960 This is a sub-element of RETURN_IN_MEMORY. */
962 arm_return_in_memory (type)
963 tree type;
965 if (TREE_CODE (type) == RECORD_TYPE)
967 tree field;
969 /* For a struct, we can return in a register if every element was a
970 bit-field. */
971 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
972 if (TREE_CODE (field) != FIELD_DECL
973 || ! DECL_BIT_FIELD_TYPE (field))
974 return 1;
976 return 0;
978 else if (TREE_CODE (type) == UNION_TYPE)
980 tree field;
982 /* Unions can be returned in registers if every element is
983 integral, or can be returned in an integer register. */
984 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
986 if (TREE_CODE (field) != FIELD_DECL
987 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
988 && RETURN_IN_MEMORY (TREE_TYPE (field)))
989 || FLOAT_TYPE_P (TREE_TYPE (field)))
990 return 1;
992 return 0;
994 /* XXX Not sure what should be done for other aggregates, so put them in
995 memory. */
996 return 1;
999 #define REG_OR_SUBREG_REG(X) \
1000 (GET_CODE (X) == REG \
1001 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1003 #define REG_OR_SUBREG_RTX(X) \
1004 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1006 #define ARM_FRAME_RTX(X) \
1007 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1008 || (X) == arg_pointer_rtx)
1011 arm_rtx_costs (x, code, outer_code)
1012 rtx x;
1013 enum rtx_code code, outer_code;
1015 enum machine_mode mode = GET_MODE (x);
1016 enum rtx_code subcode;
1017 int extra_cost;
1019 switch (code)
1021 case MEM:
1022 /* Memory costs quite a lot for the first word, but subsequent words
1023 load at the equivalent of a single insn each. */
1024 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1025 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1027 case DIV:
1028 case MOD:
1029 return 100;
1031 case ROTATE:
1032 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1033 return 4;
1034 /* Fall through */
1035 case ROTATERT:
1036 if (mode != SImode)
1037 return 8;
1038 /* Fall through */
1039 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1040 if (mode == DImode)
1041 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1042 + ((GET_CODE (XEXP (x, 0)) == REG
1043 || (GET_CODE (XEXP (x, 0)) == SUBREG
1044 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1045 ? 0 : 8));
1046 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1047 || (GET_CODE (XEXP (x, 0)) == SUBREG
1048 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1049 ? 0 : 4)
1050 + ((GET_CODE (XEXP (x, 1)) == REG
1051 || (GET_CODE (XEXP (x, 1)) == SUBREG
1052 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1053 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1054 ? 0 : 4));
1056 case MINUS:
1057 if (mode == DImode)
1058 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1059 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1060 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1061 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1062 ? 0 : 8));
1064 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1065 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1066 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1067 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1068 ? 0 : 8)
1069 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1070 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1071 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1072 ? 0 : 8));
1074 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1075 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1076 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1077 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1078 || subcode == ASHIFTRT || subcode == LSHIFTRT
1079 || subcode == ROTATE || subcode == ROTATERT
1080 || (subcode == MULT
1081 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1082 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1083 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1084 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1085 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1086 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1087 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1088 return 1;
1089 /* Fall through */
1091 case PLUS:
1092 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1093 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1094 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1095 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1096 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1097 ? 0 : 8));
1099 /* Fall through */
1100 case AND: case XOR: case IOR:
1101 extra_cost = 0;
1103 /* Normally the frame registers will be spilt into reg+const during
1104 reload, so it is a bad idea to combine them with other instructions,
1105 since then they might not be moved outside of loops. As a compromise
1106 we allow integration with ops that have a constant as their second
1107 operand. */
1108 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1109 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1110 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1111 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1112 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1113 extra_cost = 4;
1115 if (mode == DImode)
1116 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1117 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1118 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1120 ? 0 : 8));
1122 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1123 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1124 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1125 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1126 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1127 ? 0 : 4));
1129 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1130 return (1 + extra_cost
1131 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1132 || subcode == LSHIFTRT || subcode == ASHIFTRT
1133 || subcode == ROTATE || subcode == ROTATERT
1134 || (subcode == MULT
1135 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1136 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1137 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1138 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1139 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1140 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1141 ? 0 : 4));
1143 return 8;
1145 case MULT:
1146 if (arm_fast_multiply && mode == DImode
1147 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1148 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1149 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1150 return 8;
1152 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1153 || mode == DImode)
1154 return 30;
1156 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1158 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1159 & (unsigned HOST_WIDE_INT) 0xffffffff);
1160 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1161 int j;
1162 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1164 for (j = 0; i && j < 32; j += booth_unit_size)
1166 i >>= booth_unit_size;
1167 add_cost += 2;
1170 return add_cost;
1173 return ((arm_fast_multiply ? 8 : 30)
1174 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1175 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1177 case NEG:
1178 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1179 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1180 /* Fall through */
1181 case NOT:
1182 if (mode == DImode)
1183 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1185 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1187 case IF_THEN_ELSE:
1188 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1189 return 14;
1190 return 2;
1192 case COMPARE:
1193 return 1;
1195 case ABS:
1196 return 4 + (mode == DImode ? 4 : 0);
1198 case SIGN_EXTEND:
1199 if (GET_MODE (XEXP (x, 0)) == QImode)
1200 return (4 + (mode == DImode ? 4 : 0)
1201 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1202 /* Fall through */
1203 case ZERO_EXTEND:
1204 switch (GET_MODE (XEXP (x, 0)))
1206 case QImode:
1207 return (1 + (mode == DImode ? 4 : 0)
1208 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1210 case HImode:
1211 return (4 + (mode == DImode ? 4 : 0)
1212 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1214 case SImode:
1215 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1217 abort ();
1219 default:
1220 return 99;
1224 /* This code has been fixed for cross compilation. */
1226 static int fpa_consts_inited = 0;
1228 char *strings_fpa[8] = {
1229 "0", "1", "2", "3",
1230 "4", "5", "0.5", "10"
1233 static REAL_VALUE_TYPE values_fpa[8];
1235 static void
1236 init_fpa_table ()
1238 int i;
1239 REAL_VALUE_TYPE r;
1241 for (i = 0; i < 8; i++)
1243 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1244 values_fpa[i] = r;
1247 fpa_consts_inited = 1;
1250 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1253 const_double_rtx_ok_for_fpu (x)
1254 rtx x;
1256 REAL_VALUE_TYPE r;
1257 int i;
1259 if (!fpa_consts_inited)
1260 init_fpa_table ();
1262 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1263 if (REAL_VALUE_MINUS_ZERO (r))
1264 return 0;
1266 for (i = 0; i < 8; i++)
1267 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1268 return 1;
1270 return 0;
1273 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1276 neg_const_double_rtx_ok_for_fpu (x)
1277 rtx x;
1279 REAL_VALUE_TYPE r;
1280 int i;
1282 if (!fpa_consts_inited)
1283 init_fpa_table ();
1285 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1286 r = REAL_VALUE_NEGATE (r);
1287 if (REAL_VALUE_MINUS_ZERO (r))
1288 return 0;
1290 for (i = 0; i < 8; i++)
1291 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1292 return 1;
1294 return 0;
1297 /* Predicates for `match_operand' and `match_operator'. */
1299 /* s_register_operand is the same as register_operand, but it doesn't accept
1300 (SUBREG (MEM)...). */
1303 s_register_operand (op, mode)
1304 register rtx op;
1305 enum machine_mode mode;
1307 if (GET_MODE (op) != mode && mode != VOIDmode)
1308 return 0;
1310 if (GET_CODE (op) == SUBREG)
1311 op = SUBREG_REG (op);
1313 /* We don't consider registers whose class is NO_REGS
1314 to be a register operand. */
1315 return (GET_CODE (op) == REG
1316 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1317 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1320 /* Only accept reg, subreg(reg), const_int. */
1323 reg_or_int_operand (op, mode)
1324 register rtx op;
1325 enum machine_mode mode;
1327 if (GET_CODE (op) == CONST_INT)
1328 return 1;
1330 if (GET_MODE (op) != mode && mode != VOIDmode)
1331 return 0;
1333 if (GET_CODE (op) == SUBREG)
1334 op = SUBREG_REG (op);
1336 /* We don't consider registers whose class is NO_REGS
1337 to be a register operand. */
1338 return (GET_CODE (op) == REG
1339 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1340 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1343 /* Return 1 if OP is an item in memory, given that we are in reload. */
1346 reload_memory_operand (op, mode)
1347 rtx op;
1348 enum machine_mode mode;
1350 int regno = true_regnum (op);
1352 return (! CONSTANT_P (op)
1353 && (regno == -1
1354 || (GET_CODE (op) == REG
1355 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1358 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1361 arm_rhs_operand (op, mode)
1362 rtx op;
1363 enum machine_mode mode;
1365 return (s_register_operand (op, mode)
1366 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1369 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1373 arm_rhsm_operand (op, mode)
1374 rtx op;
1375 enum machine_mode mode;
1377 return (s_register_operand (op, mode)
1378 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1379 || memory_operand (op, mode));
1382 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1383 constant that is valid when negated. */
1386 arm_add_operand (op, mode)
1387 rtx op;
1388 enum machine_mode mode;
1390 return (s_register_operand (op, mode)
1391 || (GET_CODE (op) == CONST_INT
1392 && (const_ok_for_arm (INTVAL (op))
1393 || const_ok_for_arm (-INTVAL (op)))));
1397 arm_not_operand (op, mode)
1398 rtx op;
1399 enum machine_mode mode;
1401 return (s_register_operand (op, mode)
1402 || (GET_CODE (op) == CONST_INT
1403 && (const_ok_for_arm (INTVAL (op))
1404 || const_ok_for_arm (~INTVAL (op)))));
1407 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1410 fpu_rhs_operand (op, mode)
1411 rtx op;
1412 enum machine_mode mode;
1414 if (s_register_operand (op, mode))
1415 return TRUE;
1416 else if (GET_CODE (op) == CONST_DOUBLE)
1417 return (const_double_rtx_ok_for_fpu (op));
1419 return FALSE;
1423 fpu_add_operand (op, mode)
1424 rtx op;
1425 enum machine_mode mode;
1427 if (s_register_operand (op, mode))
1428 return TRUE;
1429 else if (GET_CODE (op) == CONST_DOUBLE)
1430 return (const_double_rtx_ok_for_fpu (op)
1431 || neg_const_double_rtx_ok_for_fpu (op));
1433 return FALSE;
1436 /* Return nonzero if OP is a constant power of two. */
1439 power_of_two_operand (op, mode)
1440 rtx op;
1441 enum machine_mode mode;
1443 if (GET_CODE (op) == CONST_INT)
1445 HOST_WIDE_INT value = INTVAL(op);
1446 return value != 0 && (value & (value - 1)) == 0;
1448 return FALSE;
1451 /* Return TRUE for a valid operand of a DImode operation.
1452 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1453 Note that this disallows MEM(REG+REG), but allows
1454 MEM(PRE/POST_INC/DEC(REG)). */
1457 di_operand (op, mode)
1458 rtx op;
1459 enum machine_mode mode;
1461 if (s_register_operand (op, mode))
1462 return TRUE;
1464 switch (GET_CODE (op))
1466 case CONST_DOUBLE:
1467 case CONST_INT:
1468 return TRUE;
1470 case MEM:
1471 return memory_address_p (DImode, XEXP (op, 0));
1473 default:
1474 return FALSE;
1478 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1479 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1480 Note that this disallows MEM(REG+REG), but allows
1481 MEM(PRE/POST_INC/DEC(REG)). */
1484 soft_df_operand (op, mode)
1485 rtx op;
1486 enum machine_mode mode;
1488 if (s_register_operand (op, mode))
1489 return TRUE;
1491 switch (GET_CODE (op))
1493 case CONST_DOUBLE:
1494 return TRUE;
1496 case MEM:
1497 return memory_address_p (DFmode, XEXP (op, 0));
1499 default:
1500 return FALSE;
1504 /* Return TRUE for valid index operands. */
1507 index_operand (op, mode)
1508 rtx op;
1509 enum machine_mode mode;
1511 return (s_register_operand(op, mode)
1512 || (immediate_operand (op, mode)
1513 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1516 /* Return TRUE for valid shifts by a constant. This also accepts any
1517 power of two on the (somewhat overly relaxed) assumption that the
1518 shift operator in this case was a mult. */
1521 const_shift_operand (op, mode)
1522 rtx op;
1523 enum machine_mode mode;
1525 return (power_of_two_operand (op, mode)
1526 || (immediate_operand (op, mode)
1527 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1530 /* Return TRUE for arithmetic operators which can be combined with a multiply
1531 (shift). */
1534 shiftable_operator (x, mode)
1535 rtx x;
1536 enum machine_mode mode;
1538 if (GET_MODE (x) != mode)
1539 return FALSE;
1540 else
1542 enum rtx_code code = GET_CODE (x);
1544 return (code == PLUS || code == MINUS
1545 || code == IOR || code == XOR || code == AND);
1549 /* Return TRUE for shift operators. */
1552 shift_operator (x, mode)
1553 rtx x;
1554 enum machine_mode mode;
1556 if (GET_MODE (x) != mode)
1557 return FALSE;
1558 else
1560 enum rtx_code code = GET_CODE (x);
1562 if (code == MULT)
1563 return power_of_two_operand (XEXP (x, 1));
1565 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1566 || code == ROTATERT);
1570 int equality_operator (x, mode)
1571 rtx x;
1572 enum machine_mode mode;
1574 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1577 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1580 minmax_operator (x, mode)
1581 rtx x;
1582 enum machine_mode mode;
1584 enum rtx_code code = GET_CODE (x);
1586 if (GET_MODE (x) != mode)
1587 return FALSE;
1589 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1592 /* return TRUE if x is EQ or NE */
1594 /* Return TRUE if this is the condition code register, if we aren't given
1595 a mode, accept any class CCmode register */
1598 cc_register (x, mode)
1599 rtx x;
1600 enum machine_mode mode;
1602 if (mode == VOIDmode)
1604 mode = GET_MODE (x);
1605 if (GET_MODE_CLASS (mode) != MODE_CC)
1606 return FALSE;
1609 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1610 return TRUE;
1612 return FALSE;
1615 /* Return TRUE if this is the condition code register, if we aren't given
1616 a mode, accept any mode in class CC_MODE that is reversible */
1619 reversible_cc_register (x, mode)
1620 rtx x;
1621 enum machine_mode mode;
1623 if (mode == VOIDmode)
1625 mode = GET_MODE (x);
1626 if (GET_MODE_CLASS (mode) != MODE_CC
1627 && GET_CODE (x) == REG && REGNO (x) == 24)
1628 abort ();
1629 if (GET_MODE_CLASS (mode) != MODE_CC
1630 || (! flag_fast_math && ! REVERSIBLE_CC_MODE (mode)))
1631 return FALSE;
1634 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1635 return TRUE;
1637 return FALSE;
1640 /* Return TRUE if X references a SYMBOL_REF. */
1642 symbol_mentioned_p (x)
1643 rtx x;
1645 register char *fmt;
1646 register int i;
1648 if (GET_CODE (x) == SYMBOL_REF)
1649 return 1;
1651 fmt = GET_RTX_FORMAT (GET_CODE (x));
1652 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1654 if (fmt[i] == 'E')
1656 register int j;
1658 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1659 if (symbol_mentioned_p (XVECEXP (x, i, j)))
1660 return 1;
1662 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
1663 return 1;
1666 return 0;
1669 /* Return TRUE if X references a LABEL_REF. */
1671 label_mentioned_p (x)
1672 rtx x;
1674 register char *fmt;
1675 register int i;
1677 if (GET_CODE (x) == LABEL_REF)
1678 return 1;
1680 fmt = GET_RTX_FORMAT (GET_CODE (x));
1681 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1683 if (fmt[i] == 'E')
1685 register int j;
1687 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1688 if (label_mentioned_p (XVECEXP (x, i, j)))
1689 return 1;
1691 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
1692 return 1;
1695 return 0;
1698 enum rtx_code
1699 minmax_code (x)
1700 rtx x;
1702 enum rtx_code code = GET_CODE (x);
1704 if (code == SMAX)
1705 return GE;
1706 else if (code == SMIN)
1707 return LE;
1708 else if (code == UMIN)
1709 return LEU;
1710 else if (code == UMAX)
1711 return GEU;
1713 abort ();
1716 /* Return 1 if memory locations are adjacent */
1719 adjacent_mem_locations (a, b)
1720 rtx a, b;
1722 int val0 = 0, val1 = 0;
1723 int reg0, reg1;
1725 if ((GET_CODE (XEXP (a, 0)) == REG
1726 || (GET_CODE (XEXP (a, 0)) == PLUS
1727 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1728 && (GET_CODE (XEXP (b, 0)) == REG
1729 || (GET_CODE (XEXP (b, 0)) == PLUS
1730 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1732 if (GET_CODE (XEXP (a, 0)) == PLUS)
1734 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1735 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1737 else
1738 reg0 = REGNO (XEXP (a, 0));
1739 if (GET_CODE (XEXP (b, 0)) == PLUS)
1741 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1742 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1744 else
1745 reg1 = REGNO (XEXP (b, 0));
1746 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1748 return 0;
1751 /* Return 1 if OP is a load multiple operation. It is known to be
1752 parallel and the first section will be tested. */
1755 load_multiple_operation (op, mode)
1756 rtx op;
1757 enum machine_mode mode;
1759 HOST_WIDE_INT count = XVECLEN (op, 0);
1760 int dest_regno;
1761 rtx src_addr;
1762 HOST_WIDE_INT i = 1, base = 0;
1763 rtx elt;
1765 if (count <= 1
1766 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1767 return 0;
1769 /* Check to see if this might be a write-back */
1770 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1772 i++;
1773 base = 1;
1775 /* Now check it more carefully */
1776 if (GET_CODE (SET_DEST (elt)) != REG
1777 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1778 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1779 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1780 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1781 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1782 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1783 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1784 != REGNO (SET_DEST (elt)))
1785 return 0;
1787 count--;
1790 /* Perform a quick check so we don't blow up below. */
1791 if (count <= i
1792 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1793 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1794 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1795 return 0;
1797 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1798 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1800 for (; i < count; i++)
1802 rtx elt = XVECEXP (op, 0, i);
1804 if (GET_CODE (elt) != SET
1805 || GET_CODE (SET_DEST (elt)) != REG
1806 || GET_MODE (SET_DEST (elt)) != SImode
1807 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1808 || GET_CODE (SET_SRC (elt)) != MEM
1809 || GET_MODE (SET_SRC (elt)) != SImode
1810 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1811 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1812 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1813 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1814 return 0;
1817 return 1;
1820 /* Return 1 if OP is a store multiple operation. It is known to be
1821 parallel and the first section will be tested. */
1824 store_multiple_operation (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1828 HOST_WIDE_INT count = XVECLEN (op, 0);
1829 int src_regno;
1830 rtx dest_addr;
1831 HOST_WIDE_INT i = 1, base = 0;
1832 rtx elt;
1834 if (count <= 1
1835 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1836 return 0;
1838 /* Check to see if this might be a write-back */
1839 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1841 i++;
1842 base = 1;
1844 /* Now check it more carefully */
1845 if (GET_CODE (SET_DEST (elt)) != REG
1846 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1847 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1848 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1849 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1850 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1851 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1852 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1853 != REGNO (SET_DEST (elt)))
1854 return 0;
1856 count--;
1859 /* Perform a quick check so we don't blow up below. */
1860 if (count <= i
1861 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1862 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1863 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1864 return 0;
1866 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1867 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1869 for (; i < count; i++)
1871 elt = XVECEXP (op, 0, i);
1873 if (GET_CODE (elt) != SET
1874 || GET_CODE (SET_SRC (elt)) != REG
1875 || GET_MODE (SET_SRC (elt)) != SImode
1876 || REGNO (SET_SRC (elt)) != src_regno + i - base
1877 || GET_CODE (SET_DEST (elt)) != MEM
1878 || GET_MODE (SET_DEST (elt)) != SImode
1879 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1880 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1881 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1882 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
1883 return 0;
1886 return 1;
1890 multi_register_push (op, mode)
1891 rtx op;
1892 enum machine_mode mode;
1894 if (GET_CODE (op) != PARALLEL
1895 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
1896 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
1897 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
1898 return 0;
1900 return 1;
1904 /* Routines for use with attributes */
1907 const_pool_offset (symbol)
1908 rtx symbol;
1910 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
1913 /* Routines for use in generating RTL */
1916 arm_gen_load_multiple (base_regno, count, from, up, write_back)
1917 int base_regno;
1918 int count;
1919 rtx from;
1920 int up;
1921 int write_back;
1923 int i = 0, j;
1924 rtx result;
1925 int sign = up ? 1 : -1;
1927 result = gen_rtx (PARALLEL, VOIDmode,
1928 rtvec_alloc (count + (write_back ? 2 : 0)));
1929 if (write_back)
1931 XVECEXP (result, 0, 0)
1932 = gen_rtx (SET, GET_MODE (from), from,
1933 plus_constant (from, count * 4 * sign));
1934 i = 1;
1935 count++;
1938 for (j = 0; i < count; i++, j++)
1940 XVECEXP (result, 0, i)
1941 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
1942 gen_rtx (MEM, SImode,
1943 plus_constant (from, j * 4 * sign)));
1946 if (write_back)
1947 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
1949 return result;
1953 arm_gen_store_multiple (base_regno, count, to, up, write_back)
1954 int base_regno;
1955 int count;
1956 rtx to;
1957 int up;
1958 int write_back;
1960 int i = 0, j;
1961 rtx result;
1962 int sign = up ? 1 : -1;
1964 result = gen_rtx (PARALLEL, VOIDmode,
1965 rtvec_alloc (count + (write_back ? 2 : 0)));
1966 if (write_back)
1968 XVECEXP (result, 0, 0)
1969 = gen_rtx (SET, GET_MODE (to), to,
1970 plus_constant (to, count * 4 * sign));
1971 i = 1;
1972 count++;
1975 for (j = 0; i < count; i++, j++)
1977 XVECEXP (result, 0, i)
1978 = gen_rtx (SET, VOIDmode,
1979 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
1980 gen_rtx (REG, SImode, base_regno + j));
1983 if (write_back)
1984 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
1986 return result;
1990 arm_gen_movstrqi (operands)
1991 rtx *operands;
1993 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
1994 int i, r;
1995 rtx src, dst;
1996 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
1997 rtx part_bytes_reg = NULL;
1998 extern int optimize;
2000 if (GET_CODE (operands[2]) != CONST_INT
2001 || GET_CODE (operands[3]) != CONST_INT
2002 || INTVAL (operands[2]) > 64
2003 || INTVAL (operands[3]) & 3)
2004 return 0;
2006 st_dst = XEXP (operands[0], 0);
2007 st_src = XEXP (operands[1], 0);
2008 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2009 fin_src = src = copy_to_mode_reg (SImode, st_src);
2011 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2012 out_words_to_go = INTVAL (operands[2]) / 4;
2013 last_bytes = INTVAL (operands[2]) & 3;
2015 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2016 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2018 for (i = 0; in_words_to_go >= 2; i+=4)
2020 emit_insn (arm_gen_load_multiple (0, (in_words_to_go > 4
2021 ? 4 : in_words_to_go),
2022 src, TRUE, TRUE));
2023 if (out_words_to_go)
2025 if (out_words_to_go != 1)
2026 emit_insn (arm_gen_store_multiple (0, (out_words_to_go > 4
2027 ? 4 : out_words_to_go),
2028 dst, TRUE, TRUE));
2029 else
2031 emit_move_insn (gen_rtx (MEM, SImode, dst),
2032 gen_rtx (REG, SImode, 0));
2033 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2037 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2038 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2041 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2042 if (out_words_to_go)
2044 rtx sreg;
2046 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2047 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2048 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2049 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2050 in_words_to_go--;
2052 if (in_words_to_go) /* Sanity check */
2053 abort ();
2056 if (in_words_to_go)
2058 if (in_words_to_go < 0)
2059 abort ();
2061 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2062 emit_insn (gen_addsi3 (src, src, GEN_INT (4)));
2065 if (BYTES_BIG_ENDIAN && last_bytes)
2067 rtx tmp = gen_reg_rtx (SImode);
2069 if (part_bytes_reg == NULL)
2070 abort ();
2072 /* The bytes we want are in the top end of the word */
2073 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2074 GEN_INT (8 * (4 - last_bytes))));
2075 part_bytes_reg = tmp;
2077 while (last_bytes)
2079 emit_move_insn (gen_rtx (MEM, QImode,
2080 plus_constant (dst, last_bytes - 1)),
2081 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2082 if (--last_bytes)
2084 tmp = gen_reg_rtx (SImode);
2085 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2086 part_bytes_reg = tmp;
2091 else
2093 while (last_bytes)
2095 if (part_bytes_reg == NULL)
2096 abort ();
2098 emit_move_insn (gen_rtx (MEM, QImode, dst),
2099 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2100 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2101 if (--last_bytes)
2103 rtx tmp = gen_reg_rtx (SImode);
2104 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2105 part_bytes_reg = tmp;
2110 return 1;
2113 /* X and Y are two things to compare using CODE. Emit the compare insn and
2114 return the rtx for register 0 in the proper mode. FP means this is a
2115 floating point compare: I don't think that it is needed on the arm. */
2118 gen_compare_reg (code, x, y, fp)
2119 enum rtx_code code;
2120 rtx x, y;
2122 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2123 rtx cc_reg = gen_rtx (REG, mode, 24);
2125 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
2126 gen_rtx (COMPARE, mode, x, y)));
2128 return cc_reg;
2131 void
2132 arm_reload_in_hi (operands)
2133 rtx *operands;
2135 rtx base = find_replacement (&XEXP (operands[1], 0));
2137 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
2138 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
2139 gen_rtx (MEM, QImode,
2140 plus_constant (base, 1))));
2141 if (BYTES_BIG_ENDIAN)
2142 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2143 operands[0], 0),
2144 gen_rtx (IOR, SImode,
2145 gen_rtx (ASHIFT, SImode,
2146 gen_rtx (SUBREG, SImode,
2147 operands[0], 0),
2148 GEN_INT (8)),
2149 operands[2])));
2150 else
2151 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2152 operands[0], 0),
2153 gen_rtx (IOR, SImode,
2154 gen_rtx (ASHIFT, SImode,
2155 operands[2],
2156 GEN_INT (8)),
2157 gen_rtx (SUBREG, SImode, operands[0], 0))));
2160 void
2161 arm_reload_out_hi (operands)
2162 rtx *operands;
2164 rtx base = find_replacement (&XEXP (operands[0], 0));
2166 if (BYTES_BIG_ENDIAN)
2168 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2169 gen_rtx (SUBREG, QImode, operands[1], 0)));
2170 emit_insn (gen_lshrsi3 (operands[2],
2171 gen_rtx (SUBREG, SImode, operands[1], 0),
2172 GEN_INT (8)));
2173 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2174 gen_rtx (SUBREG, QImode, operands[2], 0)));
2176 else
2178 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2179 gen_rtx (SUBREG, QImode, operands[1], 0)));
2180 emit_insn (gen_lshrsi3 (operands[2],
2181 gen_rtx (SUBREG, SImode, operands[1], 0),
2182 GEN_INT (8)));
2183 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2184 gen_rtx (SUBREG, QImode, operands[2], 0)));
2188 /* Check to see if a branch is forwards or backwards. Return TRUE if it
2189 is backwards. */
2192 arm_backwards_branch (from, to)
2193 int from, to;
2195 return insn_addresses[to] <= insn_addresses[from];
2198 /* Check to see if a branch is within the distance that can be done using
2199 an arithmetic expression. */
2201 short_branch (from, to)
2202 int from, to;
2204 int delta = insn_addresses[from] + 8 - insn_addresses[to];
2206 return abs (delta) < 980; /* A small margin for safety */
2209 /* Check to see that the insn isn't the target of the conditionalizing
2210 code */
2212 arm_insn_not_targeted (insn)
2213 rtx insn;
2215 return insn != arm_target_insn;
2219 /* Routines for manipulation of the constant pool. */
2220 /* This is unashamedly hacked from the version in sh.c, since the problem is
2221 extremely similar. */
2223 /* Arm instructions cannot load a large constant into a register,
2224 constants have to come from a pc relative load. The reference of a pc
2225 relative load instruction must be less than 1k infront of the instruction.
2226 This means that we often have to dump a constant inside a function, and
2227 generate code to branch around it.
2229 It is important to minimize this, since the branches will slow things
2230 down and make things bigger.
2232 Worst case code looks like:
2234 ldr rn, L1
2235 b L2
2236 align
2237 L1: .long value
2241 ldr rn, L3
2242 b L4
2243 align
2244 L3: .long value
2248 We fix this by performing a scan before scheduling, which notices which
2249 instructions need to have their operands fetched from the constant table
2250 and builds the table.
2253 The algorithm is:
2255 scan, find an instruction which needs a pcrel move. Look forward, find th
2256 last barrier which is within MAX_COUNT bytes of the requirement.
2257 If there isn't one, make one. Process all the instructions between
2258 the find and the barrier.
2260 In the above example, we can tell that L3 is within 1k of L1, so
2261 the first move can be shrunk from the 2 insn+constant sequence into
2262 just 1 insn, and the constant moved to L3 to make:
2264 ldr rn, L1
2266 ldr rn, L3
2267 b L4
2268 align
2269 L1: .long value
2270 L3: .long value
2273 Then the second move becomes the target for the shortening process.
2277 typedef struct
2279 rtx value; /* Value in table */
2280 HOST_WIDE_INT next_offset;
2281 enum machine_mode mode; /* Mode of value */
2282 } pool_node;
2284 /* The maximum number of constants that can fit into one pool, since
2285 the pc relative range is 0...1020 bytes and constants are at least 4
2286 bytes long */
2288 #define MAX_POOL_SIZE (1020/4)
2289 static pool_node pool_vector[MAX_POOL_SIZE];
2290 static int pool_size;
2291 static rtx pool_vector_label;
2293 /* Add a constant to the pool and return its label. */
2294 static HOST_WIDE_INT
2295 add_constant (x, mode)
2296 rtx x;
2297 enum machine_mode mode;
2299 int i;
2300 rtx lab;
2301 HOST_WIDE_INT offset;
2303 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
2304 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2305 x = get_pool_constant (XEXP (x, 0));
2306 #ifndef AOF_ASSEMBLER
2307 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
2308 x = XVECEXP (x, 0, 0);
2309 #endif
2311 /* First see if we've already got it */
2312 for (i = 0; i < pool_size; i++)
2314 if (GET_CODE (x) == pool_vector[i].value->code
2315 && mode == pool_vector[i].mode)
2317 if (GET_CODE (x) == CODE_LABEL)
2319 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
2320 continue;
2322 if (rtx_equal_p (x, pool_vector[i].value))
2323 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
2327 /* Need a new one */
2328 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
2329 offset = 0;
2330 if (pool_size == 0)
2331 pool_vector_label = gen_label_rtx ();
2332 else
2333 pool_vector[pool_size].next_offset
2334 += (offset = pool_vector[pool_size - 1].next_offset);
2336 pool_vector[pool_size].value = x;
2337 pool_vector[pool_size].mode = mode;
2338 pool_size++;
2339 return offset;
2342 /* Output the literal table */
2343 static void
2344 dump_table (scan)
2345 rtx scan;
2347 int i;
2349 scan = emit_label_after (gen_label_rtx (), scan);
2350 scan = emit_insn_after (gen_align_4 (), scan);
2351 scan = emit_label_after (pool_vector_label, scan);
2353 for (i = 0; i < pool_size; i++)
2355 pool_node *p = pool_vector + i;
2357 switch (GET_MODE_SIZE (p->mode))
2359 case 4:
2360 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
2361 break;
2363 case 8:
2364 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
2365 break;
2367 default:
2368 abort ();
2369 break;
2373 scan = emit_insn_after (gen_consttable_end (), scan);
2374 scan = emit_barrier_after (scan);
2375 pool_size = 0;
2378 /* Non zero if the src operand needs to be fixed up */
2379 static int
2380 fixit (src, mode, destreg)
2381 rtx src;
2382 enum machine_mode mode;
2383 int destreg;
2385 if (CONSTANT_P (src))
2387 if (GET_CODE (src) == CONST_INT)
2388 return (! const_ok_for_arm (INTVAL (src))
2389 && ! const_ok_for_arm (~INTVAL (src)));
2390 if (GET_CODE (src) == CONST_DOUBLE)
2391 return (GET_MODE (src) == VOIDmode
2392 || destreg < 16
2393 || (! const_double_rtx_ok_for_fpu (src)
2394 && ! neg_const_double_rtx_ok_for_fpu (src)));
2395 return symbol_mentioned_p (src);
2397 #ifndef AOF_ASSEMBLER
2398 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
2399 return 1;
2400 #endif
2401 else
2402 return (mode == SImode && GET_CODE (src) == MEM
2403 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
2404 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
2407 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
2408 static rtx
2409 find_barrier (from, max_count)
2410 rtx from;
2411 int max_count;
2413 int count = 0;
2414 rtx found_barrier = 0;
2416 while (from && count < max_count)
2418 if (GET_CODE (from) == BARRIER)
2419 found_barrier = from;
2421 /* Count the length of this insn */
2422 if (GET_CODE (from) == INSN
2423 && GET_CODE (PATTERN (from)) == SET
2424 && CONSTANT_P (SET_SRC (PATTERN (from)))
2425 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
2427 rtx src = SET_SRC (PATTERN (from));
2428 count += 2;
2430 else
2431 count += get_attr_length (from);
2433 from = NEXT_INSN (from);
2436 if (!found_barrier)
2438 /* We didn't find a barrier in time to
2439 dump our stuff, so we'll make one */
2440 rtx label = gen_label_rtx ();
2442 if (from)
2443 from = PREV_INSN (from);
2444 else
2445 from = get_last_insn ();
2447 /* Walk back to be just before any jump */
2448 while (GET_CODE (from) == JUMP_INSN
2449 || GET_CODE (from) == NOTE
2450 || GET_CODE (from) == CODE_LABEL)
2451 from = PREV_INSN (from);
2453 from = emit_jump_insn_after (gen_jump (label), from);
2454 JUMP_LABEL (from) = label;
2455 found_barrier = emit_barrier_after (from);
2456 emit_label_after (label, found_barrier);
2457 return found_barrier;
2460 return found_barrier;
2463 /* Non zero if the insn is a move instruction which needs to be fixed. */
2464 static int
2465 broken_move (insn)
2466 rtx insn;
2468 if (!INSN_DELETED_P (insn)
2469 && GET_CODE (insn) == INSN
2470 && GET_CODE (PATTERN (insn)) == SET)
2472 rtx pat = PATTERN (insn);
2473 rtx src = SET_SRC (pat);
2474 rtx dst = SET_DEST (pat);
2475 int destreg;
2476 enum machine_mode mode = GET_MODE (dst);
2477 if (dst == pc_rtx)
2478 return 0;
2480 if (GET_CODE (dst) == REG)
2481 destreg = REGNO (dst);
2482 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
2483 destreg = REGNO (SUBREG_REG (dst));
2485 return fixit (src, mode, destreg);
2487 return 0;
2490 void
2491 arm_reorg (first)
2492 rtx first;
2494 rtx insn;
2495 int count_size;
2496 int regno;
2498 #if 0
2499 /* The ldr instruction can work with up to a 4k offset, and most constants
2500 will be loaded with one of these instructions; however, the adr
2501 instruction and the ldf instructions only work with a 1k offset. This
2502 code needs to be rewritten to use the 4k offset when possible, and to
2503 adjust when a 1k offset is needed. For now we just use a 1k offset
2504 from the start. */
2505 count_size = 4000;
2507 /* Floating point operands can't work further than 1024 bytes from the
2508 PC, so to make things simple we restrict all loads for such functions.
2510 if (TARGET_HARD_FLOAT)
2511 for (regno = 16; regno < 24; regno++)
2512 if (regs_ever_live[regno])
2514 count_size = 1000;
2515 break;
2517 #else
2518 count_size = 1000;
2519 #endif /* 0 */
2521 for (insn = first; insn; insn = NEXT_INSN (insn))
2523 if (broken_move (insn))
2525 /* This is a broken move instruction, scan ahead looking for
2526 a barrier to stick the constant table behind */
2527 rtx scan;
2528 rtx barrier = find_barrier (insn, count_size);
2530 /* Now find all the moves between the points and modify them */
2531 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
2533 if (broken_move (scan))
2535 /* This is a broken move instruction, add it to the pool */
2536 rtx pat = PATTERN (scan);
2537 rtx src = SET_SRC (pat);
2538 rtx dst = SET_DEST (pat);
2539 enum machine_mode mode = GET_MODE (dst);
2540 HOST_WIDE_INT offset;
2541 rtx newinsn = scan;
2542 rtx newsrc;
2543 rtx addr;
2544 int scratch;
2546 /* If this is an HImode constant load, convert it into
2547 an SImode constant load. Since the register is always
2548 32 bits this is safe. We have to do this, since the
2549 load pc-relative instruction only does a 32-bit load. */
2550 if (mode == HImode)
2552 mode = SImode;
2553 if (GET_CODE (dst) != REG)
2554 abort ();
2555 PUT_MODE (dst, SImode);
2558 offset = add_constant (src, mode);
2559 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
2560 pool_vector_label),
2561 offset);
2563 /* For wide moves to integer regs we need to split the
2564 address calculation off into a separate insn, so that
2565 the load can then be done with a load-multiple. This is
2566 safe, since we have already noted the length of such
2567 insns to be 8, and we are immediately over-writing the
2568 scratch we have grabbed with the final result. */
2569 if (GET_MODE_SIZE (mode) > 4
2570 && (scratch = REGNO (dst)) < 16)
2572 rtx reg = gen_rtx (REG, SImode, scratch);
2573 newinsn = emit_insn_after (gen_movaddr (reg, addr),
2574 newinsn);
2575 addr = reg;
2578 newsrc = gen_rtx (MEM, mode, addr);
2580 /* Build a jump insn wrapper around the move instead
2581 of an ordinary insn, because we want to have room for
2582 the target label rtx in fld[7], which an ordinary
2583 insn doesn't have. */
2584 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
2585 dst, newsrc),
2586 newinsn);
2587 JUMP_LABEL (newinsn) = pool_vector_label;
2589 /* But it's still an ordinary insn */
2590 PUT_CODE (newinsn, INSN);
2592 /* Kill old insn */
2593 delete_insn (scan);
2594 scan = newinsn;
2597 dump_table (barrier);
2598 insn = scan;
2604 /* Routines to output assembly language. */
2606 /* If the rtx is the correct value then return the string of the number.
2607 In this way we can ensure that valid double constants are generated even
2608 when cross compiling. */
2609 char *
2610 fp_immediate_constant (x)
2611 rtx x;
2613 REAL_VALUE_TYPE r;
2614 int i;
2616 if (!fpa_consts_inited)
2617 init_fpa_table ();
2619 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2620 for (i = 0; i < 8; i++)
2621 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2622 return strings_fpa[i];
2624 abort ();
2627 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
2628 static char *
2629 fp_const_from_val (r)
2630 REAL_VALUE_TYPE *r;
2632 int i;
2634 if (! fpa_consts_inited)
2635 init_fpa_table ();
2637 for (i = 0; i < 8; i++)
2638 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
2639 return strings_fpa[i];
2641 abort ();
2644 /* Output the operands of a LDM/STM instruction to STREAM.
2645 MASK is the ARM register set mask of which only bits 0-15 are important.
2646 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
2647 must follow the register list. */
2649 void
2650 print_multi_reg (stream, instr, mask, hat)
2651 FILE *stream;
2652 char *instr;
2653 int mask, hat;
2655 int i;
2656 int not_first = FALSE;
2658 fputc ('\t', stream);
2659 fprintf (stream, instr, REGISTER_PREFIX);
2660 fputs (", {", stream);
2661 for (i = 0; i < 16; i++)
2662 if (mask & (1 << i))
2664 if (not_first)
2665 fprintf (stream, ", ");
2666 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
2667 not_first = TRUE;
2670 fprintf (stream, "}%s\n", hat ? "^" : "");
2673 /* Output a 'call' insn. */
2675 char *
2676 output_call (operands)
2677 rtx *operands;
2679 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
2681 if (REGNO (operands[0]) == 14)
2683 operands[0] = gen_rtx (REG, SImode, 12);
2684 output_asm_insn ("mov%?\t%0, %|lr", operands);
2686 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2687 output_asm_insn ("mov%?\t%|pc, %0", operands);
2688 return "";
2691 static int
2692 eliminate_lr2ip (x)
2693 rtx *x;
2695 int something_changed = 0;
2696 rtx x0 = *x;
2697 int code = GET_CODE (x0);
2698 register int i, j;
2699 register char *fmt;
2701 switch (code)
2703 case REG:
2704 if (REGNO (x0) == 14)
2706 *x = gen_rtx (REG, SImode, 12);
2707 return 1;
2709 return 0;
2710 default:
2711 /* Scan through the sub-elements and change any references there */
2712 fmt = GET_RTX_FORMAT (code);
2713 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2714 if (fmt[i] == 'e')
2715 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
2716 else if (fmt[i] == 'E')
2717 for (j = 0; j < XVECLEN (x0, i); j++)
2718 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
2719 return something_changed;
2723 /* Output a 'call' insn that is a reference in memory. */
2725 char *
2726 output_call_mem (operands)
2727 rtx *operands;
2729 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
2730 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
2732 if (eliminate_lr2ip (&operands[0]))
2733 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
2735 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2736 output_asm_insn ("ldr%?\t%|pc, %0", operands);
2737 return "";
2741 /* Output a move from arm registers to an fpu registers.
2742 OPERANDS[0] is an fpu register.
2743 OPERANDS[1] is the first registers of an arm register pair. */
2745 char *
2746 output_mov_long_double_fpu_from_arm (operands)
2747 rtx *operands;
2749 int arm_reg0 = REGNO (operands[1]);
2750 rtx ops[3];
2752 if (arm_reg0 == 12)
2753 abort();
2755 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2756 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2757 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2759 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
2760 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
2761 return "";
2764 /* Output a move from an fpu register to arm registers.
2765 OPERANDS[0] is the first registers of an arm register pair.
2766 OPERANDS[1] is an fpu register. */
2768 char *
2769 output_mov_long_double_arm_from_fpu (operands)
2770 rtx *operands;
2772 int arm_reg0 = REGNO (operands[0]);
2773 rtx ops[3];
2775 if (arm_reg0 == 12)
2776 abort();
2778 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2779 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2780 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2782 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
2783 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
2784 return "";
2787 /* Output a move from arm registers to arm registers of a long double
2788 OPERANDS[0] is the destination.
2789 OPERANDS[1] is the source. */
2790 char *
2791 output_mov_long_double_arm_from_arm (operands)
2792 rtx *operands;
2794 /* We have to be careful here because the two might overlap */
2795 int dest_start = REGNO (operands[0]);
2796 int src_start = REGNO (operands[1]);
2797 rtx ops[2];
2798 int i;
2800 if (dest_start < src_start)
2802 for (i = 0; i < 3; i++)
2804 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2805 ops[1] = gen_rtx (REG, SImode, src_start + i);
2806 output_asm_insn ("mov%?\t%0, %1", ops);
2809 else
2811 for (i = 2; i >= 0; i--)
2813 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2814 ops[1] = gen_rtx (REG, SImode, src_start + i);
2815 output_asm_insn ("mov%?\t%0, %1", ops);
2819 return "";
2823 /* Output a move from arm registers to an fpu registers.
2824 OPERANDS[0] is an fpu register.
2825 OPERANDS[1] is the first registers of an arm register pair. */
2827 char *
2828 output_mov_double_fpu_from_arm (operands)
2829 rtx *operands;
2831 int arm_reg0 = REGNO (operands[1]);
2832 rtx ops[2];
2834 if (arm_reg0 == 12)
2835 abort();
2836 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2837 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2838 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
2839 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
2840 return "";
2843 /* Output a move from an fpu register to arm registers.
2844 OPERANDS[0] is the first registers of an arm register pair.
2845 OPERANDS[1] is an fpu register. */
2847 char *
2848 output_mov_double_arm_from_fpu (operands)
2849 rtx *operands;
2851 int arm_reg0 = REGNO (operands[0]);
2852 rtx ops[2];
2854 if (arm_reg0 == 12)
2855 abort();
2857 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2858 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2859 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
2860 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
2861 return "";
2864 /* Output a move between double words.
2865 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2866 or MEM<-REG and all MEMs must be offsettable addresses. */
2868 char *
2869 output_move_double (operands)
2870 rtx *operands;
2872 enum rtx_code code0 = GET_CODE (operands[0]);
2873 enum rtx_code code1 = GET_CODE (operands[1]);
2874 rtx otherops[2];
2876 if (code0 == REG)
2878 int reg0 = REGNO (operands[0]);
2880 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
2881 if (code1 == REG)
2883 int reg1 = REGNO (operands[1]);
2884 if (reg1 == 12)
2885 abort();
2887 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
2889 /* Ensure the second source is not overwritten */
2890 if (reg0 == 1 + reg1)
2892 output_asm_insn("mov%?\t%0, %1", otherops);
2893 output_asm_insn("mov%?\t%0, %1", operands);
2895 else
2897 output_asm_insn("mov%?\t%0, %1", operands);
2898 output_asm_insn("mov%?\t%0, %1", otherops);
2901 else if (code1 == CONST_DOUBLE)
2903 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
2904 CONST_DOUBLE_HIGH (operands[1]));
2905 operands[1] = gen_rtx (CONST_INT, VOIDmode,
2906 CONST_DOUBLE_LOW (operands[1]));
2907 output_mov_immediate (operands, FALSE, "");
2908 output_mov_immediate (otherops, FALSE, "");
2910 else if (code1 == CONST_INT)
2912 otherops[1] = const0_rtx;
2913 /* sign extend the intval into the high-order word */
2914 /* Note: output_mov_immediate may clobber operands[1], so we
2915 put this out first */
2916 if (INTVAL (operands[1]) < 0)
2917 output_asm_insn ("mvn%?\t%0, %1", otherops);
2918 else
2919 output_asm_insn ("mov%?\t%0, %1", otherops);
2920 output_mov_immediate (operands, FALSE, "");
2922 else if (code1 == MEM)
2924 switch (GET_CODE (XEXP (operands[1], 0)))
2926 case REG:
2927 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2928 break;
2930 case PRE_INC:
2931 abort (); /* Should never happen now */
2932 break;
2934 case PRE_DEC:
2935 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
2936 break;
2938 case POST_INC:
2939 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
2940 break;
2942 case POST_DEC:
2943 abort (); /* Should never happen now */
2944 break;
2946 case LABEL_REF:
2947 case CONST:
2948 output_asm_insn ("adr%?\t%0, %1", operands);
2949 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
2950 break;
2952 default:
2953 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
2955 otherops[0] = operands[0];
2956 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
2957 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
2958 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
2960 if (GET_CODE (otherops[2]) == CONST_INT)
2962 switch (INTVAL (otherops[2]))
2964 case -8:
2965 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
2966 return "";
2967 case -4:
2968 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
2969 return "";
2970 case 4:
2971 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
2972 return "";
2974 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
2975 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
2976 else
2977 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2979 else
2980 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2982 else
2983 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
2984 return "ldm%?ia\t%0, %M0";
2986 else
2988 otherops[1] = adj_offsettable_operand (operands[1], 4);
2989 /* Take care of overlapping base/data reg. */
2990 if (reg_mentioned_p (operands[0], operands[1]))
2992 output_asm_insn ("ldr%?\t%0, %1", otherops);
2993 output_asm_insn ("ldr%?\t%0, %1", operands);
2995 else
2997 output_asm_insn ("ldr%?\t%0, %1", operands);
2998 output_asm_insn ("ldr%?\t%0, %1", otherops);
3003 else
3004 abort(); /* Constraints should prevent this */
3006 else if (code0 == MEM && code1 == REG)
3008 if (REGNO (operands[1]) == 12)
3009 abort();
3011 switch (GET_CODE (XEXP (operands[0], 0)))
3013 case REG:
3014 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
3015 break;
3017 case PRE_INC:
3018 abort (); /* Should never happen now */
3019 break;
3021 case PRE_DEC:
3022 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
3023 break;
3025 case POST_INC:
3026 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
3027 break;
3029 case POST_DEC:
3030 abort (); /* Should never happen now */
3031 break;
3033 case PLUS:
3034 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
3036 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
3038 case -8:
3039 output_asm_insn ("stm%?db\t%m0, %M1", operands);
3040 return "";
3042 case -4:
3043 output_asm_insn ("stm%?da\t%m0, %M1", operands);
3044 return "";
3046 case 4:
3047 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
3048 return "";
3051 /* Fall through */
3053 default:
3054 otherops[0] = adj_offsettable_operand (operands[0], 4);
3055 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
3056 output_asm_insn ("str%?\t%1, %0", operands);
3057 output_asm_insn ("str%?\t%1, %0", otherops);
3060 else
3061 abort(); /* Constraints should prevent this */
3063 return "";
3067 /* Output an arbitrary MOV reg, #n.
3068 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
3070 char *
3071 output_mov_immediate (operands)
3072 rtx *operands;
3074 HOST_WIDE_INT n = INTVAL (operands[1]);
3075 int n_ones = 0;
3076 int i;
3078 /* Try to use one MOV */
3079 if (const_ok_for_arm (n))
3081 output_asm_insn ("mov%?\t%0, %1", operands);
3082 return "";
3085 /* Try to use one MVN */
3086 if (const_ok_for_arm (~n))
3088 operands[1] = GEN_INT (~n);
3089 output_asm_insn ("mvn%?\t%0, %1", operands);
3090 return "";
3093 /* If all else fails, make it out of ORRs or BICs as appropriate. */
3095 for (i=0; i < 32; i++)
3096 if (n & 1 << i)
3097 n_ones++;
3099 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
3100 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
3101 ~n);
3102 else
3103 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
3106 return "";
3110 /* Output an ADD r, s, #n where n may be too big for one instruction. If
3111 adding zero to one register, output nothing. */
3113 char *
3114 output_add_immediate (operands)
3115 rtx *operands;
3117 HOST_WIDE_INT n = INTVAL (operands[2]);
3119 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
3121 if (n < 0)
3122 output_multi_immediate (operands,
3123 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
3124 -n);
3125 else
3126 output_multi_immediate (operands,
3127 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
3131 return "";
3134 /* Output a multiple immediate operation.
3135 OPERANDS is the vector of operands referred to in the output patterns.
3136 INSTR1 is the output pattern to use for the first constant.
3137 INSTR2 is the output pattern to use for subsequent constants.
3138 IMMED_OP is the index of the constant slot in OPERANDS.
3139 N is the constant value. */
3141 char *
3142 output_multi_immediate (operands, instr1, instr2, immed_op, n)
3143 rtx *operands;
3144 char *instr1, *instr2;
3145 int immed_op;
3146 HOST_WIDE_INT n;
3148 #if HOST_BITS_PER_WIDE_INT > 32
3149 n &= 0xffffffff;
3150 #endif
3152 if (n == 0)
3154 operands[immed_op] = const0_rtx;
3155 output_asm_insn (instr1, operands); /* Quick and easy output */
3157 else
3159 int i;
3160 char *instr = instr1;
3162 /* Note that n is never zero here (which would give no output) */
3163 for (i = 0; i < 32; i += 2)
3165 if (n & (3 << i))
3167 operands[immed_op] = GEN_INT (n & (255 << i));
3168 output_asm_insn (instr, operands);
3169 instr = instr2;
3170 i += 6;
3174 return "";
3178 /* Return the appropriate ARM instruction for the operation code.
3179 The returned result should not be overwritten. OP is the rtx of the
3180 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
3181 was shifted. */
3183 char *
3184 arithmetic_instr (op, shift_first_arg)
3185 rtx op;
3186 int shift_first_arg;
3188 switch (GET_CODE (op))
3190 case PLUS:
3191 return "add";
3193 case MINUS:
3194 return shift_first_arg ? "rsb" : "sub";
3196 case IOR:
3197 return "orr";
3199 case XOR:
3200 return "eor";
3202 case AND:
3203 return "and";
3205 default:
3206 abort ();
3211 /* Ensure valid constant shifts and return the appropriate shift mnemonic
3212 for the operation code. The returned result should not be overwritten.
3213 OP is the rtx code of the shift.
3214 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
3215 shift. */
3217 static char *
3218 shift_op (op, amountp)
3219 rtx op;
3220 HOST_WIDE_INT *amountp;
3222 char *mnem;
3223 enum rtx_code code = GET_CODE (op);
3225 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
3226 *amountp = -1;
3227 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
3228 *amountp = INTVAL (XEXP (op, 1));
3229 else
3230 abort ();
3232 switch (code)
3234 case ASHIFT:
3235 mnem = "asl";
3236 break;
3238 case ASHIFTRT:
3239 mnem = "asr";
3240 break;
3242 case LSHIFTRT:
3243 mnem = "lsr";
3244 break;
3246 case ROTATERT:
3247 mnem = "ror";
3248 break;
3250 case MULT:
3251 /* We never have to worry about the amount being other than a
3252 power of 2, since this case can never be reloaded from a reg. */
3253 if (*amountp != -1)
3254 *amountp = int_log2 (*amountp);
3255 else
3256 abort ();
3257 return "asl";
3259 default:
3260 abort ();
3263 if (*amountp != -1)
3265 /* This is not 100% correct, but follows from the desire to merge
3266 multiplication by a power of 2 with the recognizer for a
3267 shift. >=32 is not a valid shift for "asl", so we must try and
3268 output a shift that produces the correct arithmetical result.
3269 Using lsr #32 is identical except for the fact that the carry bit
3270 is not set correctly if we set the flags; but we never use the
3271 carry bit from such an operation, so we can ignore that. */
3272 if (code == ROTATERT)
3273 *amountp &= 31; /* Rotate is just modulo 32 */
3274 else if (*amountp != (*amountp & 31))
3276 if (code == ASHIFT)
3277 mnem = "lsr";
3278 *amountp = 32;
3281 /* Shifts of 0 are no-ops. */
3282 if (*amountp == 0)
3283 return NULL;
3286 return mnem;
3290 /* Obtain the shift from the POWER of two. */
3292 HOST_WIDE_INT
3293 int_log2 (power)
3294 HOST_WIDE_INT power;
3296 HOST_WIDE_INT shift = 0;
3298 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
3300 if (shift > 31)
3301 abort ();
3302 shift++;
3305 return shift;
3308 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
3309 /bin/as is horribly restrictive. */
3311 void
3312 output_ascii_pseudo_op (stream, p, len)
3313 FILE *stream;
3314 unsigned char *p;
3315 int len;
3317 int i;
3318 int len_so_far = 1000;
3319 int chars_so_far = 0;
3321 for (i = 0; i < len; i++)
3323 register int c = p[i];
3325 if (len_so_far > 50)
3327 if (chars_so_far)
3328 fputs ("\"\n", stream);
3329 fputs ("\t.ascii\t\"", stream);
3330 len_so_far = 0;
3331 arm_increase_location (chars_so_far);
3332 chars_so_far = 0;
3335 if (c == '\"' || c == '\\')
3337 putc('\\', stream);
3338 len_so_far++;
3341 if (c >= ' ' && c < 0177)
3343 putc (c, stream);
3344 len_so_far++;
3346 else
3348 fprintf (stream, "\\%03o", c);
3349 len_so_far +=4;
3352 chars_so_far++;
3355 fputs ("\"\n", stream);
3356 arm_increase_location (chars_so_far);
3360 /* Try to determine whether a pattern really clobbers the link register.
3361 This information is useful when peepholing, so that lr need not be pushed
3362 if we combine a call followed by a return.
3363 NOTE: This code does not check for side-effect expressions in a SET_SRC:
3364 such a check should not be needed because these only update an existing
3365 value within a register; the register must still be set elsewhere within
3366 the function. */
3368 static int
3369 pattern_really_clobbers_lr (x)
3370 rtx x;
3372 int i;
3374 switch (GET_CODE (x))
3376 case SET:
3377 switch (GET_CODE (SET_DEST (x)))
3379 case REG:
3380 return REGNO (SET_DEST (x)) == 14;
3382 case SUBREG:
3383 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
3384 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
3386 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
3387 return 0;
3388 abort ();
3390 default:
3391 return 0;
3394 case PARALLEL:
3395 for (i = 0; i < XVECLEN (x, 0); i++)
3396 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
3397 return 1;
3398 return 0;
3400 case CLOBBER:
3401 switch (GET_CODE (XEXP (x, 0)))
3403 case REG:
3404 return REGNO (XEXP (x, 0)) == 14;
3406 case SUBREG:
3407 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3408 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
3409 abort ();
3411 default:
3412 return 0;
3415 case UNSPEC:
3416 return 1;
3418 default:
3419 return 0;
3423 static int
3424 function_really_clobbers_lr (first)
3425 rtx first;
3427 rtx insn, next;
3429 for (insn = first; insn; insn = next_nonnote_insn (insn))
3431 switch (GET_CODE (insn))
3433 case BARRIER:
3434 case NOTE:
3435 case CODE_LABEL:
3436 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
3437 case INLINE_HEADER:
3438 break;
3440 case INSN:
3441 if (pattern_really_clobbers_lr (PATTERN (insn)))
3442 return 1;
3443 break;
3445 case CALL_INSN:
3446 /* Don't yet know how to handle those calls that are not to a
3447 SYMBOL_REF */
3448 if (GET_CODE (PATTERN (insn)) != PARALLEL)
3449 abort ();
3451 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
3453 case CALL:
3454 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
3455 != SYMBOL_REF)
3456 return 1;
3457 break;
3459 case SET:
3460 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
3461 0, 0)), 0), 0))
3462 != SYMBOL_REF)
3463 return 1;
3464 break;
3466 default: /* Don't recognize it, be safe */
3467 return 1;
3470 /* A call can be made (by peepholing) not to clobber lr iff it is
3471 followed by a return. There may, however, be a use insn iff
3472 we are returning the result of the call.
3473 If we run off the end of the insn chain, then that means the
3474 call was at the end of the function. Unfortunately we don't
3475 have a return insn for the peephole to recognize, so we
3476 must reject this. (Can this be fixed by adding our own insn?) */
3477 if ((next = next_nonnote_insn (insn)) == NULL)
3478 return 1;
3480 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
3481 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3482 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
3483 == REGNO (XEXP (PATTERN (next), 0))))
3484 if ((next = next_nonnote_insn (next)) == NULL)
3485 return 1;
3487 if (GET_CODE (next) == JUMP_INSN
3488 && GET_CODE (PATTERN (next)) == RETURN)
3489 break;
3490 return 1;
3492 default:
3493 abort ();
3497 /* We have reached the end of the chain so lr was _not_ clobbered */
3498 return 0;
3501 char *
3502 output_return_instruction (operand, really_return)
3503 rtx operand;
3504 int really_return;
3506 char instr[100];
3507 int reg, live_regs = 0;
3508 int volatile_func = (optimize > 0
3509 && TREE_THIS_VOLATILE (current_function_decl));
3511 return_used_this_function = 1;
3513 if (volatile_func)
3515 rtx ops[2];
3516 /* If this function was declared non-returning, and we have found a tail
3517 call, then we have to trust that the called function won't return. */
3518 if (! really_return)
3519 return "";
3521 /* Otherwise, trap an attempted return by aborting. */
3522 ops[0] = operand;
3523 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
3524 assemble_external_libcall (ops[1]);
3525 output_asm_insn ("bl%d0\t%a1", ops);
3526 return "";
3529 if (current_function_calls_alloca && ! really_return)
3530 abort();
3532 for (reg = 0; reg <= 10; reg++)
3533 if (regs_ever_live[reg] && ! call_used_regs[reg])
3534 live_regs++;
3536 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
3537 live_regs++;
3539 if (frame_pointer_needed)
3540 live_regs += 4;
3542 if (live_regs)
3544 if (lr_save_eliminated || ! regs_ever_live[14])
3545 live_regs++;
3547 if (frame_pointer_needed)
3548 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
3549 else
3550 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
3552 for (reg = 0; reg <= 10; reg++)
3553 if (regs_ever_live[reg] && ! call_used_regs[reg])
3555 strcat (instr, "%|");
3556 strcat (instr, reg_names[reg]);
3557 if (--live_regs)
3558 strcat (instr, ", ");
3561 if (frame_pointer_needed)
3563 strcat (instr, "%|");
3564 strcat (instr, reg_names[11]);
3565 strcat (instr, ", ");
3566 strcat (instr, "%|");
3567 strcat (instr, reg_names[13]);
3568 strcat (instr, ", ");
3569 strcat (instr, "%|");
3570 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3572 else
3574 strcat (instr, "%|");
3575 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3577 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
3578 output_asm_insn (instr, &operand);
3580 else if (really_return)
3582 strcpy (instr, (TARGET_APCS_32
3583 ? "mov%?%d0\t%|pc, %|lr" : "mov%?%d0s\t%|pc, %|lr"));
3584 output_asm_insn (instr, &operand);
3587 return "";
3590 /* Return nonzero if optimizing and the current function is volatile.
3591 Such functions never return, and many memory cycles can be saved
3592 by not storing register values that will never be needed again.
3593 This optimization was added to speed up context switching in a
3594 kernel application. */
3597 arm_volatile_func ()
3599 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
3602 /* Return the size of the prologue. It's not too bad if we slightly
3603 over-estimate. */
3605 static int
3606 get_prologue_size ()
3608 return profile_flag ? 12 : 0;
3611 /* The amount of stack adjustment that happens here, in output_return and in
3612 output_epilogue must be exactly the same as was calculated during reload,
3613 or things will point to the wrong place. The only time we can safely
3614 ignore this constraint is when a function has no arguments on the stack,
3615 no stack frame requirement and no live registers execpt for `lr'. If we
3616 can guarantee that by making all function calls into tail calls and that
3617 lr is not clobbered in any other way, then there is no need to push lr
3618 onto the stack. */
3620 void
3621 output_func_prologue (f, frame_size)
3622 FILE *f;
3623 int frame_size;
3625 int reg, live_regs_mask = 0;
3626 rtx operands[3];
3627 int volatile_func = (optimize > 0
3628 && TREE_THIS_VOLATILE (current_function_decl));
3630 /* Nonzero if we must stuff some register arguments onto the stack as if
3631 they were passed there. */
3632 int store_arg_regs = 0;
3634 if (arm_ccfsm_state || arm_target_insn)
3635 abort (); /* Sanity check */
3637 return_used_this_function = 0;
3638 lr_save_eliminated = 0;
3640 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
3641 ASM_COMMENT_START, current_function_args_size,
3642 current_function_pretend_args_size, frame_size);
3643 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
3644 ASM_COMMENT_START, frame_pointer_needed,
3645 current_function_anonymous_args);
3647 if (volatile_func)
3648 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
3650 if (current_function_anonymous_args && current_function_pretend_args_size)
3651 store_arg_regs = 1;
3653 for (reg = 0; reg <= 10; reg++)
3654 if (regs_ever_live[reg] && ! call_used_regs[reg])
3655 live_regs_mask |= (1 << reg);
3657 if (frame_pointer_needed)
3658 live_regs_mask |= 0xD800;
3659 else if (regs_ever_live[14])
3661 if (! current_function_args_size
3662 && ! function_really_clobbers_lr (get_insns ()))
3663 lr_save_eliminated = 1;
3664 else
3665 live_regs_mask |= 0x4000;
3668 if (live_regs_mask)
3670 /* if a di mode load/store multiple is used, and the base register
3671 is r3, then r4 can become an ever live register without lr
3672 doing so, in this case we need to push lr as well, or we
3673 will fail to get a proper return. */
3675 live_regs_mask |= 0x4000;
3676 lr_save_eliminated = 0;
3680 if (lr_save_eliminated)
3681 fprintf (f,"\t%s I don't think this function clobbers lr\n",
3682 ASM_COMMENT_START);
3686 void
3687 output_func_epilogue (f, frame_size)
3688 FILE *f;
3689 int frame_size;
3691 int reg, live_regs_mask = 0, code_size = 0;
3692 /* If we need this then it will always be at lesat this much */
3693 int floats_offset = 24;
3694 rtx operands[3];
3695 int volatile_func = (optimize > 0
3696 && TREE_THIS_VOLATILE (current_function_decl));
3698 if (use_return_insn() && return_used_this_function)
3700 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
3702 abort ();
3704 goto epilogue_done;
3707 /* A volatile function should never return. Call abort. */
3708 if (volatile_func)
3710 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
3711 assemble_external_libcall (op);
3712 output_asm_insn ("bl\t%a0", &op);
3713 code_size = 4;
3714 goto epilogue_done;
3717 for (reg = 0; reg <= 10; reg++)
3718 if (regs_ever_live[reg] && ! call_used_regs[reg])
3720 live_regs_mask |= (1 << reg);
3721 floats_offset += 4;
3724 if (frame_pointer_needed)
3726 for (reg = 23; reg > 15; reg--)
3727 if (regs_ever_live[reg] && ! call_used_regs[reg])
3729 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
3730 reg_names[reg], REGISTER_PREFIX, floats_offset);
3731 floats_offset += 12;
3732 code_size += 4;
3735 live_regs_mask |= 0xA800;
3736 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
3737 TARGET_APCS_32 ? FALSE : TRUE);
3738 code_size += 4;
3740 else
3742 /* Restore stack pointer if necessary. */
3743 if (frame_size)
3745 operands[0] = operands[1] = stack_pointer_rtx;
3746 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
3747 output_add_immediate (operands);
3750 for (reg = 16; reg < 24; reg++)
3751 if (regs_ever_live[reg] && ! call_used_regs[reg])
3753 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
3754 reg_names[reg], REGISTER_PREFIX);
3755 code_size += 4;
3757 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
3759 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
3760 TARGET_APCS_32 ? FALSE : TRUE);
3761 code_size += 4;
3763 else
3765 if (live_regs_mask || regs_ever_live[14])
3767 live_regs_mask |= 0x4000;
3768 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
3769 code_size += 4;
3771 if (current_function_pretend_args_size)
3773 operands[0] = operands[1] = stack_pointer_rtx;
3774 operands[2] = gen_rtx (CONST_INT, VOIDmode,
3775 current_function_pretend_args_size);
3776 output_add_immediate (operands);
3778 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
3779 : "\tmovs\t%spc, %slr\n"),
3780 REGISTER_PREFIX, REGISTER_PREFIX, f);
3781 code_size += 4;
3785 epilogue_done:
3787 /* insn_addresses isn't allocated when not optimizing */
3789 if (optimize > 0)
3790 arm_increase_location (code_size
3791 + insn_addresses[INSN_UID (get_last_insn ())]
3792 + get_prologue_size ());
3794 current_function_anonymous_args = 0;
3797 static void
3798 emit_multi_reg_push (mask)
3799 int mask;
3801 int num_regs = 0;
3802 int i, j;
3803 rtx par;
3805 for (i = 0; i < 16; i++)
3806 if (mask & (1 << i))
3807 num_regs++;
3809 if (num_regs == 0 || num_regs > 16)
3810 abort ();
3812 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
3814 for (i = 0; i < 16; i++)
3816 if (mask & (1 << i))
3818 XVECEXP (par, 0, 0)
3819 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
3820 gen_rtx (PRE_DEC, BLKmode,
3821 stack_pointer_rtx)),
3822 gen_rtx (UNSPEC, BLKmode,
3823 gen_rtvec (1, gen_rtx (REG, SImode, i)),
3824 2));
3825 break;
3829 for (j = 1, i++; j < num_regs; i++)
3831 if (mask & (1 << i))
3833 XVECEXP (par, 0, j)
3834 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
3835 j++;
3838 emit_insn (par);
3841 void
3842 arm_expand_prologue ()
3844 int reg;
3845 rtx amount = GEN_INT (- get_frame_size ());
3846 rtx push_insn;
3847 int num_regs;
3848 int live_regs_mask = 0;
3849 int store_arg_regs = 0;
3850 int volatile_func = (optimize > 0
3851 && TREE_THIS_VOLATILE (current_function_decl));
3853 if (current_function_anonymous_args && current_function_pretend_args_size)
3854 store_arg_regs = 1;
3856 if (! volatile_func)
3857 for (reg = 0; reg <= 10; reg++)
3858 if (regs_ever_live[reg] && ! call_used_regs[reg])
3859 live_regs_mask |= 1 << reg;
3861 if (! volatile_func && regs_ever_live[14])
3862 live_regs_mask |= 0x4000;
3864 if (frame_pointer_needed)
3866 live_regs_mask |= 0xD800;
3867 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
3868 stack_pointer_rtx));
3871 if (current_function_pretend_args_size)
3873 if (store_arg_regs)
3874 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
3875 & 0xf);
3876 else
3877 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
3878 GEN_INT (-current_function_pretend_args_size)));
3881 if (live_regs_mask)
3883 /* If we have to push any regs, then we must push lr as well, or
3884 we won't get a proper return. */
3885 live_regs_mask |= 0x4000;
3886 emit_multi_reg_push (live_regs_mask);
3889 /* For now the integer regs are still pushed in output_func_epilogue (). */
3891 if (! volatile_func)
3892 for (reg = 23; reg > 15; reg--)
3893 if (regs_ever_live[reg] && ! call_used_regs[reg])
3894 emit_insn (gen_rtx (SET, VOIDmode,
3895 gen_rtx (MEM, XFmode,
3896 gen_rtx (PRE_DEC, XFmode,
3897 stack_pointer_rtx)),
3898 gen_rtx (REG, XFmode, reg)));
3900 if (frame_pointer_needed)
3901 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
3902 (GEN_INT
3903 (-(4 + current_function_pretend_args_size)))));
3905 if (amount != const0_rtx)
3907 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
3908 emit_insn (gen_rtx (CLOBBER, VOIDmode,
3909 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
3912 /* If we are profiling, make sure no instructions are scheduled before
3913 the call to mcount. */
3914 if (profile_flag || profile_block_flag)
3915 emit_insn (gen_blockage ());
3919 /* If CODE is 'd', then the X is a condition operand and the instruction
3920 should only be executed if the condition is true.
3921 if CODE is 'D', then the X is a condition operand and the instruction
3922 should only be executed if the condition is false: however, if the mode
3923 of the comparison is CCFPEmode, then always execute the instruction -- we
3924 do this because in these circumstances !GE does not necessarily imply LT;
3925 in these cases the instruction pattern will take care to make sure that
3926 an instruction containing %d will follow, thereby undoing the effects of
3927 doing this instruction unconditionally.
3928 If CODE is 'N' then X is a floating point operand that must be negated
3929 before output.
3930 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3931 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3933 void
3934 arm_print_operand (stream, x, code)
3935 FILE *stream;
3936 rtx x;
3937 int code;
3939 switch (code)
3941 case '@':
3942 fputs (ASM_COMMENT_START, stream);
3943 return;
3945 case '|':
3946 fputs (REGISTER_PREFIX, stream);
3947 return;
3949 case '?':
3950 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
3951 fputs (arm_condition_codes[arm_current_cc], stream);
3952 return;
3954 case 'N':
3956 REAL_VALUE_TYPE r;
3957 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3958 r = REAL_VALUE_NEGATE (r);
3959 fprintf (stream, "%s", fp_const_from_val (&r));
3961 return;
3963 case 'B':
3964 if (GET_CODE (x) == CONST_INT)
3965 fprintf (stream,
3966 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3967 "%d",
3968 #else
3969 "%ld",
3970 #endif
3971 ARM_SIGN_EXTEND (~ INTVAL (x)));
3972 else
3974 putc ('~', stream);
3975 output_addr_const (stream, x);
3977 return;
3979 case 'i':
3980 fprintf (stream, "%s", arithmetic_instr (x, 1));
3981 return;
3983 case 'I':
3984 fprintf (stream, "%s", arithmetic_instr (x, 0));
3985 return;
3987 case 'S':
3989 HOST_WIDE_INT val;
3990 char *shift = shift_op (x, &val);
3992 if (shift)
3994 fprintf (stream, ", %s ", shift_op (x, &val));
3995 if (val == -1)
3996 arm_print_operand (stream, XEXP (x, 1), 0);
3997 else
3998 fprintf (stream,
3999 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4000 "#%d",
4001 #else
4002 "#%ld",
4003 #endif
4004 val);
4007 return;
4009 case 'R':
4010 if (REGNO (x) > 15)
4011 abort ();
4012 fputs (REGISTER_PREFIX, stream);
4013 fputs (reg_names[REGNO (x) + 1], stream);
4014 return;
4016 case 'm':
4017 fputs (REGISTER_PREFIX, stream);
4018 if (GET_CODE (XEXP (x, 0)) == REG)
4019 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
4020 else
4021 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
4022 return;
4024 case 'M':
4025 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
4026 REGISTER_PREFIX, reg_names[REGNO (x) - 1
4027 + ((GET_MODE_SIZE (GET_MODE (x))
4028 + GET_MODE_SIZE (SImode) - 1)
4029 / GET_MODE_SIZE (SImode))]);
4030 return;
4032 case 'd':
4033 if (x)
4034 fputs (arm_condition_codes[get_arm_condition_code (x)],
4035 stream);
4036 return;
4038 case 'D':
4039 if (x && (flag_fast_math
4040 || GET_CODE (x) == EQ || GET_CODE (x) == NE
4041 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
4042 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
4043 != MODE_FLOAT))))
4044 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
4045 (get_arm_condition_code (x))],
4046 stream);
4047 return;
4049 default:
4050 if (x == 0)
4051 abort ();
4053 if (GET_CODE (x) == REG)
4055 fputs (REGISTER_PREFIX, stream);
4056 fputs (reg_names[REGNO (x)], stream);
4058 else if (GET_CODE (x) == MEM)
4060 output_memory_reference_mode = GET_MODE (x);
4061 output_address (XEXP (x, 0));
4063 else if (GET_CODE (x) == CONST_DOUBLE)
4064 fprintf (stream, "#%s", fp_immediate_constant (x));
4065 else if (GET_CODE (x) == NEG)
4066 abort (); /* This should never happen now. */
4067 else
4069 fputc ('#', stream);
4070 output_addr_const (stream, x);
4075 /* Increase the `arm_text_location' by AMOUNT if we're in the text
4076 segment. */
4078 void
4079 arm_increase_location (amount)
4080 int amount;
4082 if (in_text_section ())
4083 arm_text_location += amount;
4087 /* Output a label definition. If this label is within the .text segment, it
4088 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
4089 Maybe GCC remembers names not starting with a `*' for a long time, but this
4090 is a minority anyway, so we just make a copy. Do not store the leading `*'
4091 if the name starts with one. */
4093 void
4094 arm_asm_output_label (stream, name)
4095 FILE *stream;
4096 char *name;
4098 char *real_name, *s;
4099 struct label_offset *cur;
4100 int hash = 0;
4102 ARM_OUTPUT_LABEL (stream, name);
4103 if (! in_text_section ())
4104 return;
4106 if (name[0] == '*')
4108 real_name = xmalloc (1 + strlen (&name[1]));
4109 strcpy (real_name, &name[1]);
4111 else
4113 real_name = xmalloc (2 + strlen (name));
4114 strcpy (real_name, USER_LABEL_PREFIX);
4115 strcat (real_name, name);
4117 for (s = real_name; *s; s++)
4118 hash += *s;
4120 hash = hash % LABEL_HASH_SIZE;
4121 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
4122 cur->name = real_name;
4123 cur->offset = arm_text_location;
4124 cur->cdr = offset_table[hash];
4125 offset_table[hash] = cur;
4128 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
4129 directive hence this hack, which works by reserving some `.space' in the
4130 bss segment directly.
4132 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
4133 define STATIC COMMON space but merely STATIC BSS space. */
4135 void
4136 output_lcomm_directive (stream, name, size, rounded)
4137 FILE *stream;
4138 char *name;
4139 int size, rounded;
4141 fprintf (stream, "\n\t.bss\t%s .lcomm\n", ASM_COMMENT_START);
4142 assemble_name (stream, name);
4143 fprintf (stream, ":\t.space\t%d\n", rounded);
4144 if (in_text_section ())
4145 fputs ("\n\t.text\n", stream);
4146 else
4147 fputs ("\n\t.data\n", stream);
4150 /* A finite state machine takes care of noticing whether or not instructions
4151 can be conditionally executed, and thus decrease execution time and code
4152 size by deleting branch instructions. The fsm is controlled by
4153 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
4155 /* The state of the fsm controlling condition codes are:
4156 0: normal, do nothing special
4157 1: make ASM_OUTPUT_OPCODE not output this instruction
4158 2: make ASM_OUTPUT_OPCODE not output this instruction
4159 3: make instructions conditional
4160 4: make instructions conditional
4162 State transitions (state->state by whom under condition):
4163 0 -> 1 final_prescan_insn if the `target' is a label
4164 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
4165 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
4166 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4167 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
4168 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
4169 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
4170 (the target insn is arm_target_insn).
4172 If the jump clobbers the conditions then we use states 2 and 4.
4174 A similar thing can be done with conditional return insns.
4176 XXX In case the `target' is an unconditional branch, this conditionalising
4177 of the instructions always reduces code size, but not always execution
4178 time. But then, I want to reduce the code size to somewhere near what
4179 /bin/cc produces. */
4181 /* Returns the index of the ARM condition code string in
4182 `arm_condition_codes'. COMPARISON should be an rtx like
4183 `(eq (...) (...))'. */
4186 get_arm_condition_code (comparison)
4187 rtx comparison;
4189 switch (GET_CODE (comparison))
4191 case NE: return (1);
4192 case EQ: return (0);
4193 case GE: return (10);
4194 case GT: return (12);
4195 case LE: return (13);
4196 case LT: return (11);
4197 case GEU: return (2);
4198 case GTU: return (8);
4199 case LEU: return (9);
4200 case LTU: return (3);
4201 default: abort ();
4203 /*NOTREACHED*/
4204 return (42);
4208 void
4209 final_prescan_insn (insn, opvec, noperands)
4210 rtx insn;
4211 rtx *opvec;
4212 int noperands;
4214 /* BODY will hold the body of INSN. */
4215 register rtx body = PATTERN (insn);
4217 /* This will be 1 if trying to repeat the trick, and things need to be
4218 reversed if it appears to fail. */
4219 int reverse = 0;
4221 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
4222 taken are clobbered, even if the rtl suggests otherwise. It also
4223 means that we have to grub around within the jump expression to find
4224 out what the conditions are when the jump isn't taken. */
4225 int jump_clobbers = 0;
4227 /* If we start with a return insn, we only succeed if we find another one. */
4228 int seeking_return = 0;
4230 /* START_INSN will hold the insn from where we start looking. This is the
4231 first insn after the following code_label if REVERSE is true. */
4232 rtx start_insn = insn;
4234 /* If in state 4, check if the target branch is reached, in order to
4235 change back to state 0. */
4236 if (arm_ccfsm_state == 4)
4238 if (insn == arm_target_insn)
4240 arm_target_insn = NULL;
4241 arm_ccfsm_state = 0;
4243 return;
4246 /* If in state 3, it is possible to repeat the trick, if this insn is an
4247 unconditional branch to a label, and immediately following this branch
4248 is the previous target label which is only used once, and the label this
4249 branch jumps to is not too far off. */
4250 if (arm_ccfsm_state == 3)
4252 if (simplejump_p (insn))
4254 start_insn = next_nonnote_insn (start_insn);
4255 if (GET_CODE (start_insn) == BARRIER)
4257 /* XXX Isn't this always a barrier? */
4258 start_insn = next_nonnote_insn (start_insn);
4260 if (GET_CODE (start_insn) == CODE_LABEL
4261 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4262 && LABEL_NUSES (start_insn) == 1)
4263 reverse = TRUE;
4264 else
4265 return;
4267 else if (GET_CODE (body) == RETURN)
4269 start_insn = next_nonnote_insn (start_insn);
4270 if (GET_CODE (start_insn) == BARRIER)
4271 start_insn = next_nonnote_insn (start_insn);
4272 if (GET_CODE (start_insn) == CODE_LABEL
4273 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4274 && LABEL_NUSES (start_insn) == 1)
4276 reverse = TRUE;
4277 seeking_return = 1;
4279 else
4280 return;
4282 else
4283 return;
4286 if (arm_ccfsm_state != 0 && !reverse)
4287 abort ();
4288 if (GET_CODE (insn) != JUMP_INSN)
4289 return;
4291 /* This jump might be paralleled with a clobber of the condition codes
4292 the jump should always come first */
4293 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4294 body = XVECEXP (body, 0, 0);
4296 #if 0
4297 /* If this is a conditional return then we don't want to know */
4298 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4299 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
4300 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
4301 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
4302 return;
4303 #endif
4305 if (reverse
4306 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4307 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
4309 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
4310 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
4311 int then_not_else = TRUE;
4312 rtx this_insn = start_insn, label = 0;
4314 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
4316 /* The code below is wrong for these, and I haven't time to
4317 fix it now. So we just do the safe thing and return. This
4318 whole function needs re-writing anyway. */
4319 jump_clobbers = 1;
4320 return;
4323 /* Register the insn jumped to. */
4324 if (reverse)
4326 if (!seeking_return)
4327 label = XEXP (SET_SRC (body), 0);
4329 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
4330 label = XEXP (XEXP (SET_SRC (body), 1), 0);
4331 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
4333 label = XEXP (XEXP (SET_SRC (body), 2), 0);
4334 then_not_else = FALSE;
4336 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
4337 seeking_return = 1;
4338 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
4340 seeking_return = 1;
4341 then_not_else = FALSE;
4343 else
4344 abort ();
4346 /* See how many insns this branch skips, and what kind of insns. If all
4347 insns are okay, and the label or unconditional branch to the same
4348 label is not too far away, succeed. */
4349 for (insns_skipped = 0;
4350 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
4351 insns_skipped++)
4353 rtx scanbody;
4355 this_insn = next_nonnote_insn (this_insn);
4356 if (!this_insn)
4357 break;
4359 scanbody = PATTERN (this_insn);
4361 switch (GET_CODE (this_insn))
4363 case CODE_LABEL:
4364 /* Succeed if it is the target label, otherwise fail since
4365 control falls in from somewhere else. */
4366 if (this_insn == label)
4368 if (jump_clobbers)
4370 arm_ccfsm_state = 2;
4371 this_insn = next_nonnote_insn (this_insn);
4373 else
4374 arm_ccfsm_state = 1;
4375 succeed = TRUE;
4377 else
4378 fail = TRUE;
4379 break;
4381 case BARRIER:
4382 /* Succeed if the following insn is the target label.
4383 Otherwise fail.
4384 If return insns are used then the last insn in a function
4385 will be a barrier. */
4386 this_insn = next_nonnote_insn (this_insn);
4387 if (this_insn && this_insn == label)
4389 if (jump_clobbers)
4391 arm_ccfsm_state = 2;
4392 this_insn = next_nonnote_insn (this_insn);
4394 else
4395 arm_ccfsm_state = 1;
4396 succeed = TRUE;
4398 else
4399 fail = TRUE;
4400 break;
4402 case CALL_INSN:
4403 /* If using 32-bit addresses the cc is not preserved over
4404 calls */
4405 if (TARGET_APCS_32)
4406 fail = TRUE;
4407 break;
4409 case JUMP_INSN:
4410 /* If this is an unconditional branch to the same label, succeed.
4411 If it is to another label, do nothing. If it is conditional,
4412 fail. */
4413 /* XXX Probably, the test for the SET and the PC are unnecessary. */
4415 if (GET_CODE (scanbody) == SET
4416 && GET_CODE (SET_DEST (scanbody)) == PC)
4418 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
4419 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
4421 arm_ccfsm_state = 2;
4422 succeed = TRUE;
4424 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
4425 fail = TRUE;
4427 else if (GET_CODE (scanbody) == RETURN
4428 && seeking_return)
4430 arm_ccfsm_state = 2;
4431 succeed = TRUE;
4433 else if (GET_CODE (scanbody) == PARALLEL)
4435 switch (get_attr_conds (this_insn))
4437 case CONDS_NOCOND:
4438 break;
4439 default:
4440 fail = TRUE;
4441 break;
4444 break;
4446 case INSN:
4447 /* Instructions using or affecting the condition codes make it
4448 fail. */
4449 if ((GET_CODE (scanbody) == SET
4450 || GET_CODE (scanbody) == PARALLEL)
4451 && get_attr_conds (this_insn) != CONDS_NOCOND)
4452 fail = TRUE;
4453 break;
4455 default:
4456 break;
4459 if (succeed)
4461 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
4462 arm_target_label = CODE_LABEL_NUMBER (label);
4463 else if (seeking_return || arm_ccfsm_state == 2)
4465 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
4467 this_insn = next_nonnote_insn (this_insn);
4468 if (this_insn && (GET_CODE (this_insn) == BARRIER
4469 || GET_CODE (this_insn) == CODE_LABEL))
4470 abort ();
4472 if (!this_insn)
4474 /* Oh, dear! we ran off the end.. give up */
4475 recog (PATTERN (insn), insn, NULL_PTR);
4476 arm_ccfsm_state = 0;
4477 arm_target_insn = NULL;
4478 return;
4480 arm_target_insn = this_insn;
4482 else
4483 abort ();
4484 if (jump_clobbers)
4486 if (reverse)
4487 abort ();
4488 arm_current_cc =
4489 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
4490 0), 0), 1));
4491 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
4492 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4493 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
4494 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4496 else
4498 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
4499 what it was. */
4500 if (!reverse)
4501 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
4502 0));
4505 if (reverse || then_not_else)
4506 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4508 /* restore recog_operand (getting the attributes of other insns can
4509 destroy this array, but final.c assumes that it remains intact
4510 across this call; since the insn has been recognized already we
4511 call recog direct). */
4512 recog (PATTERN (insn), insn, NULL_PTR);
4516 #ifdef AOF_ASSEMBLER
4517 /* Special functions only needed when producing AOF syntax assembler. */
4519 int arm_text_section_count = 1;
4521 char *
4522 aof_text_section (in_readonly)
4523 int in_readonly;
4525 static char buf[100];
4526 if (in_readonly)
4527 return "";
4528 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
4529 arm_text_section_count++);
4530 if (flag_pic)
4531 strcat (buf, ", PIC, REENTRANT");
4532 return buf;
4535 static int arm_data_section_count = 1;
4537 char *
4538 aof_data_section ()
4540 static char buf[100];
4541 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
4542 return buf;
4545 /* The AOF assembler is religiously strict about declarations of
4546 imported and exported symbols, so that it is impossible to declare
4547 a function as imported near the begining of the file, and then to
4548 export it later on. It is, however, possible to delay the decision
4549 until all the functions in the file have been compiled. To get
4550 around this, we maintain a list of the imports and exports, and
4551 delete from it any that are subsequently defined. At the end of
4552 compilation we spit the remainder of the list out before the END
4553 directive. */
4555 struct import
4557 struct import *next;
4558 char *name;
4561 static struct import *imports_list = NULL;
4563 void
4564 aof_add_import (name)
4565 char *name;
4567 struct import *new;
4569 for (new = imports_list; new; new = new->next)
4570 if (new->name == name)
4571 return;
4573 new = (struct import *) xmalloc (sizeof (struct import));
4574 new->next = imports_list;
4575 imports_list = new;
4576 new->name = name;
4579 void
4580 aof_delete_import (name)
4581 char *name;
4583 struct import **old;
4585 for (old = &imports_list; *old; old = & (*old)->next)
4587 if ((*old)->name == name)
4589 *old = (*old)->next;
4590 return;
4595 int arm_main_function = 0;
4597 void
4598 aof_dump_imports (f)
4599 FILE *f;
4601 /* The AOF assembler needs this to cause the startup code to be extracted
4602 from the library. Brining in __main causes the whole thing to work
4603 automagically. */
4604 if (arm_main_function)
4606 text_section ();
4607 fputs ("\tIMPORT __main\n", f);
4608 fputs ("\tDCD __main\n", f);
4611 /* Now dump the remaining imports. */
4612 while (imports_list)
4614 fprintf (f, "\tIMPORT\t");
4615 assemble_name (f, imports_list->name);
4616 fputc ('\n', f);
4617 imports_list = imports_list->next;
4620 #endif /* AOF_ASSEMBLER */