* arm/arm.c (output_move_double): Cope with both word-endian
[official-gcc.git] / gcc / config / arm / arm.c
blob48165948c703f1d4120af053db2032c4d6dc0d67
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include <stdio.h>
25 #include <string.h>
26 #include "assert.h"
27 #include "config.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "tree.h"
40 #include "expr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
48 extern char *output_multi_immediate ();
49 extern void arm_increase_location ();
51 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
52 static int get_prologue_size PROTO ((void));
53 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
54 HOST_WIDE_INT, rtx, rtx, int, int));
56 /* Define the information needed to generate branch insns. This is
57 stored from the compare operation. */
59 rtx arm_compare_op0, arm_compare_op1;
60 int arm_compare_fp;
62 /* What type of cpu are we compiling for? */
63 enum processor_type arm_cpu;
65 /* What type of floating point are we compiling for? */
66 enum floating_point_type arm_fpu;
68 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
69 enum prog_mode_type arm_prgmode;
71 char *target_cpu_name = ARM_CPU_NAME;
72 char *target_fpe_name = NULL;
74 /* Nonzero if this is an "M" variant of the processor. */
75 int arm_fast_multiply = 0;
77 /* Nonzero if this chip support the ARM Architecture 4 extensions */
78 int arm_arch4 = 0;
80 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
81 must report the mode of the memory reference from PRINT_OPERAND to
82 PRINT_OPERAND_ADDRESS. */
83 enum machine_mode output_memory_reference_mode;
85 /* Nonzero if the prologue must setup `fp'. */
86 int current_function_anonymous_args;
88 /* Location counter of .text segment. */
89 int arm_text_location = 0;
91 /* Set to one if we think that lr is only saved because of subroutine calls,
92 but all of these can be `put after' return insns */
93 int lr_save_eliminated;
95 /* A hash table is used to store text segment labels and their associated
96 offset from the start of the text segment. */
97 struct label_offset
99 char *name;
100 int offset;
101 struct label_offset *cdr;
104 #define LABEL_HASH_SIZE 257
106 static struct label_offset *offset_table[LABEL_HASH_SIZE];
108 /* Set to 1 when a return insn is output, this means that the epilogue
109 is not needed. */
111 static int return_used_this_function;
113 static int arm_constant_limit = 3;
115 /* For an explanation of these variables, see final_prescan_insn below. */
116 int arm_ccfsm_state;
117 int arm_current_cc;
118 rtx arm_target_insn;
119 int arm_target_label;
121 /* The condition codes of the ARM, and the inverse function. */
122 char *arm_condition_codes[] =
124 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
125 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
128 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
131 /* Initialization code */
133 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
134 #define FL_FAST_MULT 0x02 /* Fast multiply */
135 #define FL_MODE26 0x04 /* 26-bit mode support */
136 #define FL_MODE32 0x08 /* 32-bit mode support */
137 #define FL_ARCH4 0x10 /* Architecture rel 4 */
138 #define FL_THUMB 0x20 /* Thumb aware */
139 struct processors
141 char *name;
142 enum processor_type type;
143 unsigned int flags;
146 /* Not all of these give usefully different compilation alternatives,
147 but there is no simple way of generalizing them. */
148 static struct processors all_procs[] =
150 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
151 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
152 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
153 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
154 {"arm60", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
155 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
156 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
157 {"arm620", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
158 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
159 {"arm70", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
160 {"arm7d", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
161 {"arm7di", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
162 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
163 | FL_MODE26)},
164 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
165 | FL_MODE26)},
166 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
167 {"arm700i", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
169 {"arm710c", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
170 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
171 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
172 | FL_ARCH4 | FL_THUMB)},
173 {NULL, 0, 0}
176 /* Fix up any incompatible options that the user has specified.
177 This has now turned into a maze. */
178 void
179 arm_override_options ()
181 int arm_thumb_aware = 0;
183 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
184 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
186 if (TARGET_POKE_FUNCTION_NAME)
187 target_flags |= ARM_FLAG_APCS_FRAME;
189 if (TARGET_6)
191 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu-<proc>");
192 target_flags |= ARM_FLAG_APCS_32;
193 arm_cpu = PROCESSOR_ARM6;
196 if (TARGET_3)
198 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu-<proc>");
199 target_flags &= ~ARM_FLAG_APCS_32;
200 arm_cpu = PROCESSOR_ARM2;
203 if ((TARGET_3 || TARGET_6) && target_cpu_name != NULL)
204 fatal ("Incompatible mix of old and new options. -m%d and -mcpu-%s",
205 TARGET_3 ? 3 : 6, target_cpu_name);
207 if (TARGET_APCS_REENT && flag_pic)
208 fatal ("-fpic and -mapcs-reent are incompatible");
210 if (TARGET_APCS_REENT)
211 warning ("APCS reentrant code not supported. Ignored");
213 if (flag_pic)
214 warning ("Position independent code not supported. Ignored");
216 if (TARGET_APCS_FLOAT)
217 warning ("Passing floating point arguments in fp regs not yet supported");
219 if (TARGET_APCS_STACK && ! TARGET_APCS)
221 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
222 target_flags |= ARM_FLAG_APCS_FRAME;
225 arm_cpu = TARGET_6 ? PROCESSOR_ARM6: PROCESSOR_ARM2;
226 arm_fpu = FP_HARD;
228 if (target_cpu_name != NULL)
230 char *c = target_cpu_name;
231 struct processors *proc;
233 /* Match against the supported types. */
234 for (proc = all_procs; proc->name != NULL; proc++)
236 if (strcmp (proc->name, c) == 0)
237 break;
240 if (proc->name)
242 arm_cpu = proc->type;
244 /* Default value for floating point code... if no co-processor
245 bus, then schedule for emulated floating point. Otherwise,
246 assume the user has an FPA, unless overridden with -mfpe-... */
247 if (proc->flags & FL_CO_PROC == 0)
248 arm_fpu = FP_SOFT3;
249 else
250 arm_fpu = FP_HARD;
251 arm_fast_multiply = (proc->flags & FL_FAST_MULT) != 0;
252 arm_arch4 = (proc->flags & FL_ARCH4) != 0;
253 arm_thumb_aware = (proc->flags & FL_THUMB) != 0;
254 /* Processors with a load delay slot can load constants faster,
255 from the pool than it takes to construct them, so reduce the
256 complexity of the constant that we will try to generate
257 inline. */
259 else
260 fatal ("Unrecognized cpu type: %s", target_cpu_name);
263 if (target_fpe_name)
265 if (strcmp (target_fpe_name, "2") == 0)
266 arm_fpu = FP_SOFT2;
267 else if (strcmp (target_fpe_name, "3") == 0)
268 arm_fpu = FP_SOFT3;
269 else
270 fatal ("Invalid floating point emulation option: -mfpe-%s",
271 target_fpe_name);
274 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
276 warning ("This processor variant does not support Thumb interworking");
277 target_flags &= ~ARM_FLAG_THUMB;
280 if (TARGET_FPE && arm_fpu != FP_HARD)
281 arm_fpu = FP_SOFT2;
283 /* For arm2/3 there is no need to do any scheduling if there is only
284 a floating point emulator, or we are doing software floating-point. */
285 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
286 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
288 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
292 /* Return 1 if it is possible to return using a single instruction */
295 use_return_insn ()
297 int regno;
299 if (!reload_completed ||current_function_pretend_args_size
300 || current_function_anonymous_args
301 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
302 return 0;
304 /* Can't be done if any of the FPU regs are pushed, since this also
305 requires an insn */
306 for (regno = 20; regno < 24; regno++)
307 if (regs_ever_live[regno])
308 return 0;
310 return 1;
313 /* Return TRUE if int I is a valid immediate ARM constant. */
316 const_ok_for_arm (i)
317 HOST_WIDE_INT i;
319 unsigned HOST_WIDE_INT mask = ~0xFF;
321 /* Fast return for 0 and powers of 2 */
322 if ((i & (i - 1)) == 0)
323 return TRUE;
327 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
328 return TRUE;
329 mask =
330 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
331 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
332 } while (mask != ~0xFF);
334 return FALSE;
337 /* Return true if I is a valid constant for the operation CODE. */
339 const_ok_for_op (i, code, mode)
340 HOST_WIDE_INT i;
341 enum rtx_code code;
342 enum machine_mode mode;
344 if (const_ok_for_arm (i))
345 return 1;
347 switch (code)
349 case PLUS:
350 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
352 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
353 case XOR:
354 case IOR:
355 return 0;
357 case AND:
358 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
360 default:
361 abort ();
365 /* Emit a sequence of insns to handle a large constant.
366 CODE is the code of the operation required, it can be any of SET, PLUS,
367 IOR, AND, XOR, MINUS;
368 MODE is the mode in which the operation is being performed;
369 VAL is the integer to operate on;
370 SOURCE is the other operand (a register, or a null-pointer for SET);
371 SUBTARGETS means it is safe to create scratch registers if that will
372 either produce a simpler sequence, or we will want to cse the values.
373 Return value is the number of insns emitted. */
376 arm_split_constant (code, mode, val, target, source, subtargets)
377 enum rtx_code code;
378 enum machine_mode mode;
379 HOST_WIDE_INT val;
380 rtx target;
381 rtx source;
382 int subtargets;
384 if (subtargets || code == SET
385 || (GET_CODE (target) == REG && GET_CODE (source) == REG
386 && REGNO (target) != REGNO (source)))
388 rtx temp;
390 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
391 > arm_constant_limit + (code != SET))
393 if (code == SET)
395 /* Currently SET is the only monadic value for CODE, all
396 the rest are diadic. */
397 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
398 return 1;
400 else
402 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
404 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
405 /* For MINUS, the value is subtracted from, since we never
406 have subtraction of a constant. */
407 if (code == MINUS)
408 emit_insn (gen_rtx (SET, VOIDmode, target,
409 gen_rtx (code, mode, temp, source)));
410 else
411 emit_insn (gen_rtx (SET, VOIDmode, target,
412 gen_rtx (code, mode, source, temp)));
413 return 2;
418 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
421 /* As above, but extra parameter GENERATE which, if clear, suppresses
422 RTL generation. */
424 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
425 enum rtx_code code;
426 enum machine_mode mode;
427 HOST_WIDE_INT val;
428 rtx target;
429 rtx source;
430 int subtargets;
431 int generate;
433 int can_add = 0;
434 int can_invert = 0;
435 int can_negate = 0;
436 int can_negate_initial = 0;
437 int can_shift = 0;
438 int i;
439 int num_bits_set = 0;
440 int set_sign_bit_copies = 0;
441 int clear_sign_bit_copies = 0;
442 int clear_zero_bit_copies = 0;
443 int set_zero_bit_copies = 0;
444 int insns = 0;
445 rtx new_src;
446 unsigned HOST_WIDE_INT temp1, temp2;
447 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
449 /* find out which operations are safe for a given CODE. Also do a quick
450 check for degenerate cases; these can occur when DImode operations
451 are split. */
452 switch (code)
454 case SET:
455 can_invert = 1;
456 can_shift = 1;
457 can_negate = 1;
458 break;
460 case PLUS:
461 can_negate = 1;
462 can_negate_initial = 1;
463 break;
465 case IOR:
466 if (remainder == 0xffffffff)
468 if (generate)
469 emit_insn (gen_rtx (SET, VOIDmode, target,
470 GEN_INT (ARM_SIGN_EXTEND (val))));
471 return 1;
473 if (remainder == 0)
475 if (reload_completed && rtx_equal_p (target, source))
476 return 0;
477 if (generate)
478 emit_insn (gen_rtx (SET, VOIDmode, target, source));
479 return 1;
481 break;
483 case AND:
484 if (remainder == 0)
486 if (generate)
487 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
488 return 1;
490 if (remainder == 0xffffffff)
492 if (reload_completed && rtx_equal_p (target, source))
493 return 0;
494 if (generate)
495 emit_insn (gen_rtx (SET, VOIDmode, target, source));
496 return 1;
498 can_invert = 1;
499 break;
501 case XOR:
502 if (remainder == 0)
504 if (reload_completed && rtx_equal_p (target, source))
505 return 0;
506 if (generate)
507 emit_insn (gen_rtx (SET, VOIDmode, target, source));
508 return 1;
510 if (remainder == 0xffffffff)
512 if (generate)
513 emit_insn (gen_rtx (SET, VOIDmode, target,
514 gen_rtx (NOT, mode, source)));
515 return 1;
518 /* We don't know how to handle this yet below. */
519 abort ();
521 case MINUS:
522 /* We treat MINUS as (val - source), since (source - val) is always
523 passed as (source + (-val)). */
524 if (remainder == 0)
526 if (generate)
527 emit_insn (gen_rtx (SET, VOIDmode, target,
528 gen_rtx (NEG, mode, source)));
529 return 1;
531 if (const_ok_for_arm (val))
533 if (generate)
534 emit_insn (gen_rtx (SET, VOIDmode, target,
535 gen_rtx (MINUS, mode, GEN_INT (val), source)));
536 return 1;
538 can_negate = 1;
540 break;
542 default:
543 abort ();
546 /* If we can do it in one insn get out quickly */
547 if (const_ok_for_arm (val)
548 || (can_negate_initial && const_ok_for_arm (-val))
549 || (can_invert && const_ok_for_arm (~val)))
551 if (generate)
552 emit_insn (gen_rtx (SET, VOIDmode, target,
553 (source ? gen_rtx (code, mode, source,
554 GEN_INT (val))
555 : GEN_INT (val))));
556 return 1;
560 /* Calculate a few attributes that may be useful for specific
561 optimizations. */
563 for (i = 31; i >= 0; i--)
565 if ((remainder & (1 << i)) == 0)
566 clear_sign_bit_copies++;
567 else
568 break;
571 for (i = 31; i >= 0; i--)
573 if ((remainder & (1 << i)) != 0)
574 set_sign_bit_copies++;
575 else
576 break;
579 for (i = 0; i <= 31; i++)
581 if ((remainder & (1 << i)) == 0)
582 clear_zero_bit_copies++;
583 else
584 break;
587 for (i = 0; i <= 31; i++)
589 if ((remainder & (1 << i)) != 0)
590 set_zero_bit_copies++;
591 else
592 break;
595 switch (code)
597 case SET:
598 /* See if we can do this by sign_extending a constant that is known
599 to be negative. This is a good, way of doing it, since the shift
600 may well merge into a subsequent insn. */
601 if (set_sign_bit_copies > 1)
603 if (const_ok_for_arm
604 (temp1 = ARM_SIGN_EXTEND (remainder
605 << (set_sign_bit_copies - 1))))
607 if (generate)
609 new_src = subtargets ? gen_reg_rtx (mode) : target;
610 emit_insn (gen_rtx (SET, VOIDmode, new_src,
611 GEN_INT (temp1)));
612 emit_insn (gen_ashrsi3 (target, new_src,
613 GEN_INT (set_sign_bit_copies - 1)));
615 return 2;
617 /* For an inverted constant, we will need to set the low bits,
618 these will be shifted out of harm's way. */
619 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
620 if (const_ok_for_arm (~temp1))
622 if (generate)
624 new_src = subtargets ? gen_reg_rtx (mode) : target;
625 emit_insn (gen_rtx (SET, VOIDmode, new_src,
626 GEN_INT (temp1)));
627 emit_insn (gen_ashrsi3 (target, new_src,
628 GEN_INT (set_sign_bit_copies - 1)));
630 return 2;
634 /* See if we can generate this by setting the bottom (or the top)
635 16 bits, and then shifting these into the other half of the
636 word. We only look for the simplest cases, to do more would cost
637 too much. Be careful, however, not to generate this when the
638 alternative would take fewer insns. */
639 if (val & 0xffff0000)
641 temp1 = remainder & 0xffff0000;
642 temp2 = remainder & 0x0000ffff;
644 /* Overlaps outside this range are best done using other methods. */
645 for (i = 9; i < 24; i++)
647 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
648 && ! const_ok_for_arm (temp2))
650 insns = arm_gen_constant (code, mode, temp2,
651 new_src = (subtargets
652 ? gen_reg_rtx (mode)
653 : target),
654 source, subtargets, generate);
655 source = new_src;
656 if (generate)
657 emit_insn (gen_rtx (SET, VOIDmode, target,
658 gen_rtx (IOR, mode,
659 gen_rtx (ASHIFT, mode, source,
660 GEN_INT (i)),
661 source)));
662 return insns + 1;
666 /* Don't duplicate cases already considered. */
667 for (i = 17; i < 24; i++)
669 if (((temp1 | (temp1 >> i)) == remainder)
670 && ! const_ok_for_arm (temp1))
672 insns = arm_gen_constant (code, mode, temp1,
673 new_src = (subtargets
674 ? gen_reg_rtx (mode)
675 : target),
676 source, subtargets, generate);
677 source = new_src;
678 if (generate)
679 emit_insn (gen_rtx (SET, VOIDmode, target,
680 gen_rtx (IOR, mode,
681 gen_rtx (LSHIFTRT, mode,
682 source, GEN_INT (i)),
683 source)));
684 return insns + 1;
688 break;
690 case IOR:
691 case XOR:
692 /* If we have IOR or XOR, and the inverse of the constant can be loaded
693 in a single instruction, and we can find a temporary to put it in,
694 then this can be done in two instructions instead of 3-4. */
695 if (subtargets
696 || (reload_completed && ! reg_mentioned_p (target, source)))
698 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
700 if (generate)
702 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
704 emit_insn (gen_rtx (SET, VOIDmode, sub,
705 GEN_INT (ARM_SIGN_EXTEND (~ val))));
706 emit_insn (gen_rtx (SET, VOIDmode, target,
707 gen_rtx (code, mode, source, sub)));
709 return 2;
713 if (code == XOR)
714 break;
716 if (set_sign_bit_copies > 8
717 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
719 if (generate)
721 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
722 rtx shift = GEN_INT (set_sign_bit_copies);
724 emit_insn (gen_rtx (SET, VOIDmode, sub,
725 gen_rtx (NOT, mode,
726 gen_rtx (ASHIFT, mode, source,
727 shift))));
728 emit_insn (gen_rtx (SET, VOIDmode, target,
729 gen_rtx (NOT, mode,
730 gen_rtx (LSHIFTRT, mode, sub,
731 shift))));
733 return 2;
736 if (set_zero_bit_copies > 8
737 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
739 if (generate)
741 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
742 rtx shift = GEN_INT (set_zero_bit_copies);
744 emit_insn (gen_rtx (SET, VOIDmode, sub,
745 gen_rtx (NOT, mode,
746 gen_rtx (LSHIFTRT, mode, source,
747 shift))));
748 emit_insn (gen_rtx (SET, VOIDmode, target,
749 gen_rtx (NOT, mode,
750 gen_rtx (ASHIFT, mode, sub,
751 shift))));
753 return 2;
756 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
758 if (generate)
760 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
761 emit_insn (gen_rtx (SET, VOIDmode, sub,
762 gen_rtx (NOT, mode, source)));
763 source = sub;
764 if (subtargets)
765 sub = gen_reg_rtx (mode);
766 emit_insn (gen_rtx (SET, VOIDmode, sub,
767 gen_rtx (AND, mode, source,
768 GEN_INT (temp1))));
769 emit_insn (gen_rtx (SET, VOIDmode, target,
770 gen_rtx (NOT, mode, sub)));
772 return 3;
774 break;
776 case AND:
777 /* See if two shifts will do 2 or more insn's worth of work. */
778 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
780 HOST_WIDE_INT shift_mask = ((0xffffffff
781 << (32 - clear_sign_bit_copies))
782 & 0xffffffff);
783 rtx new_source;
784 rtx shift;
786 if ((remainder | shift_mask) != 0xffffffff)
788 if (generate)
790 new_source = subtargets ? gen_reg_rtx (mode) : target;
791 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
792 new_source, source, subtargets, 1);
793 source = new_source;
795 else
796 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
797 new_source, source, subtargets, 0);
800 if (generate)
802 shift = GEN_INT (clear_sign_bit_copies);
803 new_source = subtargets ? gen_reg_rtx (mode) : target;
804 emit_insn (gen_ashlsi3 (new_source, source, shift));
805 emit_insn (gen_lshrsi3 (target, new_source, shift));
808 return insns + 2;
811 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
813 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
814 rtx new_source;
815 rtx shift;
817 if ((remainder | shift_mask) != 0xffffffff)
819 if (generate)
821 new_source = subtargets ? gen_reg_rtx (mode) : target;
822 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
823 new_source, source, subtargets, 1);
824 source = new_source;
826 else
827 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
828 new_source, source, subtargets, 0);
831 if (generate)
833 shift = GEN_INT (clear_zero_bit_copies);
834 new_source = subtargets ? gen_reg_rtx (mode) : target;
835 emit_insn (gen_lshrsi3 (new_source, source, shift));
836 emit_insn (gen_ashlsi3 (target, new_source, shift));
839 return insns + 2;
842 break;
844 default:
845 break;
848 for (i = 0; i < 32; i++)
849 if (remainder & (1 << i))
850 num_bits_set++;
852 if (code == AND || (can_invert && num_bits_set > 16))
853 remainder = (~remainder) & 0xffffffff;
854 else if (code == PLUS && num_bits_set > 16)
855 remainder = (-remainder) & 0xffffffff;
856 else
858 can_invert = 0;
859 can_negate = 0;
862 /* Now try and find a way of doing the job in either two or three
863 instructions.
864 We start by looking for the largest block of zeros that are aligned on
865 a 2-bit boundary, we then fill up the temps, wrapping around to the
866 top of the word when we drop off the bottom.
867 In the worst case this code should produce no more than four insns. */
869 int best_start = 0;
870 int best_consecutive_zeros = 0;
872 for (i = 0; i < 32; i += 2)
874 int consecutive_zeros = 0;
876 if (! (remainder & (3 << i)))
878 while ((i < 32) && ! (remainder & (3 << i)))
880 consecutive_zeros += 2;
881 i += 2;
883 if (consecutive_zeros > best_consecutive_zeros)
885 best_consecutive_zeros = consecutive_zeros;
886 best_start = i - consecutive_zeros;
888 i -= 2;
892 /* Now start emitting the insns, starting with the one with the highest
893 bit set: we do this so that the smallest number will be emitted last;
894 this is more likely to be combinable with addressing insns. */
895 i = best_start;
898 int end;
900 if (i <= 0)
901 i += 32;
902 if (remainder & (3 << (i - 2)))
904 end = i - 8;
905 if (end < 0)
906 end += 32;
907 temp1 = remainder & ((0x0ff << end)
908 | ((i < end) ? (0xff >> (32 - end)) : 0));
909 remainder &= ~temp1;
911 if (code == SET)
913 if (generate)
914 emit_insn (gen_rtx (SET, VOIDmode,
915 new_src = (subtargets
916 ? gen_reg_rtx (mode)
917 : target),
918 GEN_INT (can_invert ? ~temp1 : temp1)));
919 can_invert = 0;
920 code = PLUS;
922 else if (code == MINUS)
924 if (generate)
925 emit_insn (gen_rtx (SET, VOIDmode,
926 new_src = (subtargets
927 ? gen_reg_rtx (mode)
928 : target),
929 gen_rtx (code, mode, GEN_INT (temp1),
930 source)));
931 code = PLUS;
933 else
935 if (generate)
936 emit_insn (gen_rtx (SET, VOIDmode,
937 new_src = (remainder
938 ? (subtargets
939 ? gen_reg_rtx (mode)
940 : target)
941 : target),
942 gen_rtx (code, mode, source,
943 GEN_INT (can_invert ? ~temp1
944 : (can_negate
945 ? -temp1
946 : temp1)))));
949 insns++;
950 source = new_src;
951 i -= 6;
953 i -= 2;
954 } while (remainder);
956 return insns;
959 /* Handle aggregates that are not laid out in a BLKmode element.
960 This is a sub-element of RETURN_IN_MEMORY. */
962 arm_return_in_memory (type)
963 tree type;
965 if (TREE_CODE (type) == RECORD_TYPE)
967 tree field;
969 /* For a struct, we can return in a register if every element was a
970 bit-field. */
971 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
972 if (TREE_CODE (field) != FIELD_DECL
973 || ! DECL_BIT_FIELD_TYPE (field))
974 return 1;
976 return 0;
978 else if (TREE_CODE (type) == UNION_TYPE)
980 tree field;
982 /* Unions can be returned in registers if every element is
983 integral, or can be returned in an integer register. */
984 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
986 if (TREE_CODE (field) != FIELD_DECL
987 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
988 && RETURN_IN_MEMORY (TREE_TYPE (field)))
989 || FLOAT_TYPE_P (TREE_TYPE (field)))
990 return 1;
992 return 0;
994 /* XXX Not sure what should be done for other aggregates, so put them in
995 memory. */
996 return 1;
999 #define REG_OR_SUBREG_REG(X) \
1000 (GET_CODE (X) == REG \
1001 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1003 #define REG_OR_SUBREG_RTX(X) \
1004 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1006 #define ARM_FRAME_RTX(X) \
1007 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1008 || (X) == arg_pointer_rtx)
1011 arm_rtx_costs (x, code, outer_code)
1012 rtx x;
1013 enum rtx_code code, outer_code;
1015 enum machine_mode mode = GET_MODE (x);
1016 enum rtx_code subcode;
1017 int extra_cost;
1019 switch (code)
1021 case MEM:
1022 /* Memory costs quite a lot for the first word, but subsequent words
1023 load at the equivalent of a single insn each. */
1024 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1025 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1027 case DIV:
1028 case MOD:
1029 return 100;
1031 case ROTATE:
1032 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1033 return 4;
1034 /* Fall through */
1035 case ROTATERT:
1036 if (mode != SImode)
1037 return 8;
1038 /* Fall through */
1039 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1040 if (mode == DImode)
1041 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1042 + ((GET_CODE (XEXP (x, 0)) == REG
1043 || (GET_CODE (XEXP (x, 0)) == SUBREG
1044 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1045 ? 0 : 8));
1046 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1047 || (GET_CODE (XEXP (x, 0)) == SUBREG
1048 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1049 ? 0 : 4)
1050 + ((GET_CODE (XEXP (x, 1)) == REG
1051 || (GET_CODE (XEXP (x, 1)) == SUBREG
1052 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1053 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1054 ? 0 : 4));
1056 case MINUS:
1057 if (mode == DImode)
1058 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1059 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1060 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1061 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1062 ? 0 : 8));
1064 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1065 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1066 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1067 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1068 ? 0 : 8)
1069 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1070 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1071 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1072 ? 0 : 8));
1074 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1075 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1076 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1077 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1078 || subcode == ASHIFTRT || subcode == LSHIFTRT
1079 || subcode == ROTATE || subcode == ROTATERT
1080 || (subcode == MULT
1081 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1082 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1083 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1084 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1085 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1086 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1087 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1088 return 1;
1089 /* Fall through */
1091 case PLUS:
1092 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1093 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1094 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1095 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1096 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1097 ? 0 : 8));
1099 /* Fall through */
1100 case AND: case XOR: case IOR:
1101 extra_cost = 0;
1103 /* Normally the frame registers will be spilt into reg+const during
1104 reload, so it is a bad idea to combine them with other instructions,
1105 since then they might not be moved outside of loops. As a compromise
1106 we allow integration with ops that have a constant as their second
1107 operand. */
1108 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1109 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1110 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1111 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1112 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1113 extra_cost = 4;
1115 if (mode == DImode)
1116 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1117 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1118 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1120 ? 0 : 8));
1122 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1123 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1124 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1125 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1126 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1127 ? 0 : 4));
1129 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1130 return (1 + extra_cost
1131 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1132 || subcode == LSHIFTRT || subcode == ASHIFTRT
1133 || subcode == ROTATE || subcode == ROTATERT
1134 || (subcode == MULT
1135 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1136 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1137 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1138 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1139 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1140 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1141 ? 0 : 4));
1143 return 8;
1145 case MULT:
1146 if (arm_fast_multiply && mode == DImode
1147 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1148 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1149 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1150 return 8;
1152 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1153 || mode == DImode)
1154 return 30;
1156 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1158 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1159 & (unsigned HOST_WIDE_INT) 0xffffffff);
1160 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1161 int j;
1162 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1164 for (j = 0; i && j < 32; j += booth_unit_size)
1166 i >>= booth_unit_size;
1167 add_cost += 2;
1170 return add_cost;
1173 return ((arm_fast_multiply ? 8 : 30)
1174 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1175 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1177 case NEG:
1178 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1179 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1180 /* Fall through */
1181 case NOT:
1182 if (mode == DImode)
1183 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1185 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1187 case IF_THEN_ELSE:
1188 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1189 return 14;
1190 return 2;
1192 case COMPARE:
1193 return 1;
1195 case ABS:
1196 return 4 + (mode == DImode ? 4 : 0);
1198 case SIGN_EXTEND:
1199 if (GET_MODE (XEXP (x, 0)) == QImode)
1200 return (4 + (mode == DImode ? 4 : 0)
1201 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1202 /* Fall through */
1203 case ZERO_EXTEND:
1204 switch (GET_MODE (XEXP (x, 0)))
1206 case QImode:
1207 return (1 + (mode == DImode ? 4 : 0)
1208 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1210 case HImode:
1211 return (4 + (mode == DImode ? 4 : 0)
1212 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1214 case SImode:
1215 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1217 abort ();
1219 default:
1220 return 99;
1224 /* This code has been fixed for cross compilation. */
1226 static int fpa_consts_inited = 0;
1228 char *strings_fpa[8] = {
1229 "0", "1", "2", "3",
1230 "4", "5", "0.5", "10"
1233 static REAL_VALUE_TYPE values_fpa[8];
1235 static void
1236 init_fpa_table ()
1238 int i;
1239 REAL_VALUE_TYPE r;
1241 for (i = 0; i < 8; i++)
1243 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1244 values_fpa[i] = r;
1247 fpa_consts_inited = 1;
1250 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1253 const_double_rtx_ok_for_fpu (x)
1254 rtx x;
1256 REAL_VALUE_TYPE r;
1257 int i;
1259 if (!fpa_consts_inited)
1260 init_fpa_table ();
1262 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1263 if (REAL_VALUE_MINUS_ZERO (r))
1264 return 0;
1266 for (i = 0; i < 8; i++)
1267 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1268 return 1;
1270 return 0;
1273 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1276 neg_const_double_rtx_ok_for_fpu (x)
1277 rtx x;
1279 REAL_VALUE_TYPE r;
1280 int i;
1282 if (!fpa_consts_inited)
1283 init_fpa_table ();
1285 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1286 r = REAL_VALUE_NEGATE (r);
1287 if (REAL_VALUE_MINUS_ZERO (r))
1288 return 0;
1290 for (i = 0; i < 8; i++)
1291 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1292 return 1;
1294 return 0;
1297 /* Predicates for `match_operand' and `match_operator'. */
1299 /* s_register_operand is the same as register_operand, but it doesn't accept
1300 (SUBREG (MEM)...). */
1303 s_register_operand (op, mode)
1304 register rtx op;
1305 enum machine_mode mode;
1307 if (GET_MODE (op) != mode && mode != VOIDmode)
1308 return 0;
1310 if (GET_CODE (op) == SUBREG)
1311 op = SUBREG_REG (op);
1313 /* We don't consider registers whose class is NO_REGS
1314 to be a register operand. */
1315 return (GET_CODE (op) == REG
1316 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1317 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1320 /* Only accept reg, subreg(reg), const_int. */
1323 reg_or_int_operand (op, mode)
1324 register rtx op;
1325 enum machine_mode mode;
1327 if (GET_CODE (op) == CONST_INT)
1328 return 1;
1330 if (GET_MODE (op) != mode && mode != VOIDmode)
1331 return 0;
1333 if (GET_CODE (op) == SUBREG)
1334 op = SUBREG_REG (op);
1336 /* We don't consider registers whose class is NO_REGS
1337 to be a register operand. */
1338 return (GET_CODE (op) == REG
1339 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1340 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1343 /* Return 1 if OP is an item in memory, given that we are in reload. */
1346 reload_memory_operand (op, mode)
1347 rtx op;
1348 enum machine_mode mode;
1350 int regno = true_regnum (op);
1352 return (! CONSTANT_P (op)
1353 && (regno == -1
1354 || (GET_CODE (op) == REG
1355 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1358 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1361 arm_rhs_operand (op, mode)
1362 rtx op;
1363 enum machine_mode mode;
1365 return (s_register_operand (op, mode)
1366 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1369 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1373 arm_rhsm_operand (op, mode)
1374 rtx op;
1375 enum machine_mode mode;
1377 return (s_register_operand (op, mode)
1378 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1379 || memory_operand (op, mode));
1382 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1383 constant that is valid when negated. */
1386 arm_add_operand (op, mode)
1387 rtx op;
1388 enum machine_mode mode;
1390 return (s_register_operand (op, mode)
1391 || (GET_CODE (op) == CONST_INT
1392 && (const_ok_for_arm (INTVAL (op))
1393 || const_ok_for_arm (-INTVAL (op)))));
1397 arm_not_operand (op, mode)
1398 rtx op;
1399 enum machine_mode mode;
1401 return (s_register_operand (op, mode)
1402 || (GET_CODE (op) == CONST_INT
1403 && (const_ok_for_arm (INTVAL (op))
1404 || const_ok_for_arm (~INTVAL (op)))));
1407 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1410 fpu_rhs_operand (op, mode)
1411 rtx op;
1412 enum machine_mode mode;
1414 if (s_register_operand (op, mode))
1415 return TRUE;
1416 else if (GET_CODE (op) == CONST_DOUBLE)
1417 return (const_double_rtx_ok_for_fpu (op));
1419 return FALSE;
1423 fpu_add_operand (op, mode)
1424 rtx op;
1425 enum machine_mode mode;
1427 if (s_register_operand (op, mode))
1428 return TRUE;
1429 else if (GET_CODE (op) == CONST_DOUBLE)
1430 return (const_double_rtx_ok_for_fpu (op)
1431 || neg_const_double_rtx_ok_for_fpu (op));
1433 return FALSE;
1436 /* Return nonzero if OP is a constant power of two. */
1439 power_of_two_operand (op, mode)
1440 rtx op;
1441 enum machine_mode mode;
1443 if (GET_CODE (op) == CONST_INT)
1445 HOST_WIDE_INT value = INTVAL(op);
1446 return value != 0 && (value & (value - 1)) == 0;
1448 return FALSE;
1451 /* Return TRUE for a valid operand of a DImode operation.
1452 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1453 Note that this disallows MEM(REG+REG), but allows
1454 MEM(PRE/POST_INC/DEC(REG)). */
1457 di_operand (op, mode)
1458 rtx op;
1459 enum machine_mode mode;
1461 if (s_register_operand (op, mode))
1462 return TRUE;
1464 switch (GET_CODE (op))
1466 case CONST_DOUBLE:
1467 case CONST_INT:
1468 return TRUE;
1470 case MEM:
1471 return memory_address_p (DImode, XEXP (op, 0));
1473 default:
1474 return FALSE;
1478 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1479 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1480 Note that this disallows MEM(REG+REG), but allows
1481 MEM(PRE/POST_INC/DEC(REG)). */
1484 soft_df_operand (op, mode)
1485 rtx op;
1486 enum machine_mode mode;
1488 if (s_register_operand (op, mode))
1489 return TRUE;
1491 switch (GET_CODE (op))
1493 case CONST_DOUBLE:
1494 return TRUE;
1496 case MEM:
1497 return memory_address_p (DFmode, XEXP (op, 0));
1499 default:
1500 return FALSE;
1504 /* Return TRUE for valid index operands. */
1507 index_operand (op, mode)
1508 rtx op;
1509 enum machine_mode mode;
1511 return (s_register_operand(op, mode)
1512 || (immediate_operand (op, mode)
1513 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1516 /* Return TRUE for valid shifts by a constant. This also accepts any
1517 power of two on the (somewhat overly relaxed) assumption that the
1518 shift operator in this case was a mult. */
1521 const_shift_operand (op, mode)
1522 rtx op;
1523 enum machine_mode mode;
1525 return (power_of_two_operand (op, mode)
1526 || (immediate_operand (op, mode)
1527 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1530 /* Return TRUE for arithmetic operators which can be combined with a multiply
1531 (shift). */
1534 shiftable_operator (x, mode)
1535 rtx x;
1536 enum machine_mode mode;
1538 if (GET_MODE (x) != mode)
1539 return FALSE;
1540 else
1542 enum rtx_code code = GET_CODE (x);
1544 return (code == PLUS || code == MINUS
1545 || code == IOR || code == XOR || code == AND);
1549 /* Return TRUE for shift operators. */
1552 shift_operator (x, mode)
1553 rtx x;
1554 enum machine_mode mode;
1556 if (GET_MODE (x) != mode)
1557 return FALSE;
1558 else
1560 enum rtx_code code = GET_CODE (x);
1562 if (code == MULT)
1563 return power_of_two_operand (XEXP (x, 1));
1565 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1566 || code == ROTATERT);
1570 int equality_operator (x, mode)
1571 rtx x;
1572 enum machine_mode mode;
1574 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1577 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1580 minmax_operator (x, mode)
1581 rtx x;
1582 enum machine_mode mode;
1584 enum rtx_code code = GET_CODE (x);
1586 if (GET_MODE (x) != mode)
1587 return FALSE;
1589 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1592 /* return TRUE if x is EQ or NE */
1594 /* Return TRUE if this is the condition code register, if we aren't given
1595 a mode, accept any class CCmode register */
1598 cc_register (x, mode)
1599 rtx x;
1600 enum machine_mode mode;
1602 if (mode == VOIDmode)
1604 mode = GET_MODE (x);
1605 if (GET_MODE_CLASS (mode) != MODE_CC)
1606 return FALSE;
1609 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1610 return TRUE;
1612 return FALSE;
1615 /* Return TRUE if this is the condition code register, if we aren't given
1616 a mode, accept any mode in class CC_MODE that is reversible */
1619 reversible_cc_register (x, mode)
1620 rtx x;
1621 enum machine_mode mode;
1623 if (mode == VOIDmode)
1625 mode = GET_MODE (x);
1626 if (GET_MODE_CLASS (mode) != MODE_CC
1627 && GET_CODE (x) == REG && REGNO (x) == 24)
1628 abort ();
1629 if (GET_MODE_CLASS (mode) != MODE_CC
1630 || (! flag_fast_math && ! REVERSIBLE_CC_MODE (mode)))
1631 return FALSE;
1634 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1635 return TRUE;
1637 return FALSE;
1640 /* Return TRUE if X references a SYMBOL_REF. */
1642 symbol_mentioned_p (x)
1643 rtx x;
1645 register char *fmt;
1646 register int i;
1648 if (GET_CODE (x) == SYMBOL_REF)
1649 return 1;
1651 fmt = GET_RTX_FORMAT (GET_CODE (x));
1652 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1654 if (fmt[i] == 'E')
1656 register int j;
1658 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1659 if (symbol_mentioned_p (XVECEXP (x, i, j)))
1660 return 1;
1662 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
1663 return 1;
1666 return 0;
1669 /* Return TRUE if X references a LABEL_REF. */
1671 label_mentioned_p (x)
1672 rtx x;
1674 register char *fmt;
1675 register int i;
1677 if (GET_CODE (x) == LABEL_REF)
1678 return 1;
1680 fmt = GET_RTX_FORMAT (GET_CODE (x));
1681 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1683 if (fmt[i] == 'E')
1685 register int j;
1687 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1688 if (label_mentioned_p (XVECEXP (x, i, j)))
1689 return 1;
1691 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
1692 return 1;
1695 return 0;
1698 enum rtx_code
1699 minmax_code (x)
1700 rtx x;
1702 enum rtx_code code = GET_CODE (x);
1704 if (code == SMAX)
1705 return GE;
1706 else if (code == SMIN)
1707 return LE;
1708 else if (code == UMIN)
1709 return LEU;
1710 else if (code == UMAX)
1711 return GEU;
1713 abort ();
1716 /* Return 1 if memory locations are adjacent */
1719 adjacent_mem_locations (a, b)
1720 rtx a, b;
1722 int val0 = 0, val1 = 0;
1723 int reg0, reg1;
1725 if ((GET_CODE (XEXP (a, 0)) == REG
1726 || (GET_CODE (XEXP (a, 0)) == PLUS
1727 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1728 && (GET_CODE (XEXP (b, 0)) == REG
1729 || (GET_CODE (XEXP (b, 0)) == PLUS
1730 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1732 if (GET_CODE (XEXP (a, 0)) == PLUS)
1734 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1735 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1737 else
1738 reg0 = REGNO (XEXP (a, 0));
1739 if (GET_CODE (XEXP (b, 0)) == PLUS)
1741 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1742 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1744 else
1745 reg1 = REGNO (XEXP (b, 0));
1746 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1748 return 0;
1751 /* Return 1 if OP is a load multiple operation. It is known to be
1752 parallel and the first section will be tested. */
1755 load_multiple_operation (op, mode)
1756 rtx op;
1757 enum machine_mode mode;
1759 HOST_WIDE_INT count = XVECLEN (op, 0);
1760 int dest_regno;
1761 rtx src_addr;
1762 HOST_WIDE_INT i = 1, base = 0;
1763 rtx elt;
1765 if (count <= 1
1766 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1767 return 0;
1769 /* Check to see if this might be a write-back */
1770 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1772 i++;
1773 base = 1;
1775 /* Now check it more carefully */
1776 if (GET_CODE (SET_DEST (elt)) != REG
1777 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1778 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1779 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1780 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1781 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1782 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1783 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1784 != REGNO (SET_DEST (elt)))
1785 return 0;
1787 count--;
1790 /* Perform a quick check so we don't blow up below. */
1791 if (count <= i
1792 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1793 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1794 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1795 return 0;
1797 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1798 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1800 for (; i < count; i++)
1802 rtx elt = XVECEXP (op, 0, i);
1804 if (GET_CODE (elt) != SET
1805 || GET_CODE (SET_DEST (elt)) != REG
1806 || GET_MODE (SET_DEST (elt)) != SImode
1807 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1808 || GET_CODE (SET_SRC (elt)) != MEM
1809 || GET_MODE (SET_SRC (elt)) != SImode
1810 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1811 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1812 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1813 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1814 return 0;
1817 return 1;
1820 /* Return 1 if OP is a store multiple operation. It is known to be
1821 parallel and the first section will be tested. */
1824 store_multiple_operation (op, mode)
1825 rtx op;
1826 enum machine_mode mode;
1828 HOST_WIDE_INT count = XVECLEN (op, 0);
1829 int src_regno;
1830 rtx dest_addr;
1831 HOST_WIDE_INT i = 1, base = 0;
1832 rtx elt;
1834 if (count <= 1
1835 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1836 return 0;
1838 /* Check to see if this might be a write-back */
1839 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1841 i++;
1842 base = 1;
1844 /* Now check it more carefully */
1845 if (GET_CODE (SET_DEST (elt)) != REG
1846 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1847 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1848 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1849 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1850 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1851 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1852 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1853 != REGNO (SET_DEST (elt)))
1854 return 0;
1856 count--;
1859 /* Perform a quick check so we don't blow up below. */
1860 if (count <= i
1861 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1862 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1863 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1864 return 0;
1866 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1867 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1869 for (; i < count; i++)
1871 elt = XVECEXP (op, 0, i);
1873 if (GET_CODE (elt) != SET
1874 || GET_CODE (SET_SRC (elt)) != REG
1875 || GET_MODE (SET_SRC (elt)) != SImode
1876 || REGNO (SET_SRC (elt)) != src_regno + i - base
1877 || GET_CODE (SET_DEST (elt)) != MEM
1878 || GET_MODE (SET_DEST (elt)) != SImode
1879 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1880 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1881 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1882 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
1883 return 0;
1886 return 1;
1890 multi_register_push (op, mode)
1891 rtx op;
1892 enum machine_mode mode;
1894 if (GET_CODE (op) != PARALLEL
1895 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
1896 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
1897 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
1898 return 0;
1900 return 1;
1904 /* Routines for use with attributes */
1907 const_pool_offset (symbol)
1908 rtx symbol;
1910 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
1913 /* Routines for use in generating RTL */
1916 arm_gen_load_multiple (base_regno, count, from, up, write_back)
1917 int base_regno;
1918 int count;
1919 rtx from;
1920 int up;
1921 int write_back;
1923 int i = 0, j;
1924 rtx result;
1925 int sign = up ? 1 : -1;
1927 result = gen_rtx (PARALLEL, VOIDmode,
1928 rtvec_alloc (count + (write_back ? 2 : 0)));
1929 if (write_back)
1931 XVECEXP (result, 0, 0)
1932 = gen_rtx (SET, GET_MODE (from), from,
1933 plus_constant (from, count * 4 * sign));
1934 i = 1;
1935 count++;
1938 for (j = 0; i < count; i++, j++)
1940 XVECEXP (result, 0, i)
1941 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
1942 gen_rtx (MEM, SImode,
1943 plus_constant (from, j * 4 * sign)));
1946 if (write_back)
1947 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
1949 return result;
1953 arm_gen_store_multiple (base_regno, count, to, up, write_back)
1954 int base_regno;
1955 int count;
1956 rtx to;
1957 int up;
1958 int write_back;
1960 int i = 0, j;
1961 rtx result;
1962 int sign = up ? 1 : -1;
1964 result = gen_rtx (PARALLEL, VOIDmode,
1965 rtvec_alloc (count + (write_back ? 2 : 0)));
1966 if (write_back)
1968 XVECEXP (result, 0, 0)
1969 = gen_rtx (SET, GET_MODE (to), to,
1970 plus_constant (to, count * 4 * sign));
1971 i = 1;
1972 count++;
1975 for (j = 0; i < count; i++, j++)
1977 XVECEXP (result, 0, i)
1978 = gen_rtx (SET, VOIDmode,
1979 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
1980 gen_rtx (REG, SImode, base_regno + j));
1983 if (write_back)
1984 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
1986 return result;
1990 arm_gen_movstrqi (operands)
1991 rtx *operands;
1993 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
1994 int i, r;
1995 rtx src, dst;
1996 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
1997 rtx part_bytes_reg = NULL;
1998 extern int optimize;
2000 if (GET_CODE (operands[2]) != CONST_INT
2001 || GET_CODE (operands[3]) != CONST_INT
2002 || INTVAL (operands[2]) > 64
2003 || INTVAL (operands[3]) & 3)
2004 return 0;
2006 st_dst = XEXP (operands[0], 0);
2007 st_src = XEXP (operands[1], 0);
2008 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2009 fin_src = src = copy_to_mode_reg (SImode, st_src);
2011 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2012 out_words_to_go = INTVAL (operands[2]) / 4;
2013 last_bytes = INTVAL (operands[2]) & 3;
2015 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2016 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2018 for (i = 0; in_words_to_go >= 2; i+=4)
2020 emit_insn (arm_gen_load_multiple (0, (in_words_to_go > 4
2021 ? 4 : in_words_to_go),
2022 src, TRUE, TRUE));
2023 if (out_words_to_go)
2025 if (out_words_to_go != 1)
2026 emit_insn (arm_gen_store_multiple (0, (out_words_to_go > 4
2027 ? 4 : out_words_to_go),
2028 dst, TRUE, TRUE));
2029 else
2031 emit_move_insn (gen_rtx (MEM, SImode, dst),
2032 gen_rtx (REG, SImode, 0));
2033 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2037 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2038 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2041 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2042 if (out_words_to_go)
2044 rtx sreg;
2046 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2047 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2048 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2049 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2050 in_words_to_go--;
2052 if (in_words_to_go) /* Sanity check */
2053 abort ();
2056 if (in_words_to_go)
2058 if (in_words_to_go < 0)
2059 abort ();
2061 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2062 emit_insn (gen_addsi3 (src, src, GEN_INT (4)));
2065 if (BYTES_BIG_ENDIAN && last_bytes)
2067 rtx tmp = gen_reg_rtx (SImode);
2069 if (part_bytes_reg == NULL)
2070 abort ();
2072 /* The bytes we want are in the top end of the word */
2073 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2074 GEN_INT (8 * (4 - last_bytes))));
2075 part_bytes_reg = tmp;
2077 while (last_bytes)
2079 emit_move_insn (gen_rtx (MEM, QImode,
2080 plus_constant (dst, last_bytes - 1)),
2081 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2082 if (--last_bytes)
2084 tmp = gen_reg_rtx (SImode);
2085 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2086 part_bytes_reg = tmp;
2091 else
2093 while (last_bytes)
2095 if (part_bytes_reg == NULL)
2096 abort ();
2098 emit_move_insn (gen_rtx (MEM, QImode, dst),
2099 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2100 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2101 if (--last_bytes)
2103 rtx tmp = gen_reg_rtx (SImode);
2104 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2105 part_bytes_reg = tmp;
2110 return 1;
2113 /* X and Y are two things to compare using CODE. Emit the compare insn and
2114 return the rtx for register 0 in the proper mode. FP means this is a
2115 floating point compare: I don't think that it is needed on the arm. */
2118 gen_compare_reg (code, x, y, fp)
2119 enum rtx_code code;
2120 rtx x, y;
2122 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2123 rtx cc_reg = gen_rtx (REG, mode, 24);
2125 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
2126 gen_rtx (COMPARE, mode, x, y)));
2128 return cc_reg;
2131 void
2132 arm_reload_in_hi (operands)
2133 rtx *operands;
2135 rtx base = find_replacement (&XEXP (operands[1], 0));
2137 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
2138 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
2139 gen_rtx (MEM, QImode,
2140 plus_constant (base, 1))));
2141 if (BYTES_BIG_ENDIAN)
2142 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2143 operands[0], 0),
2144 gen_rtx (IOR, SImode,
2145 gen_rtx (ASHIFT, SImode,
2146 gen_rtx (SUBREG, SImode,
2147 operands[0], 0),
2148 GEN_INT (8)),
2149 operands[2])));
2150 else
2151 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2152 operands[0], 0),
2153 gen_rtx (IOR, SImode,
2154 gen_rtx (ASHIFT, SImode,
2155 operands[2],
2156 GEN_INT (8)),
2157 gen_rtx (SUBREG, SImode, operands[0], 0))));
2160 void
2161 arm_reload_out_hi (operands)
2162 rtx *operands;
2164 rtx base = find_replacement (&XEXP (operands[0], 0));
2166 if (BYTES_BIG_ENDIAN)
2168 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2169 gen_rtx (SUBREG, QImode, operands[1], 0)));
2170 emit_insn (gen_lshrsi3 (operands[2],
2171 gen_rtx (SUBREG, SImode, operands[1], 0),
2172 GEN_INT (8)));
2173 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2174 gen_rtx (SUBREG, QImode, operands[2], 0)));
2176 else
2178 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2179 gen_rtx (SUBREG, QImode, operands[1], 0)));
2180 emit_insn (gen_lshrsi3 (operands[2],
2181 gen_rtx (SUBREG, SImode, operands[1], 0),
2182 GEN_INT (8)));
2183 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2184 gen_rtx (SUBREG, QImode, operands[2], 0)));
2188 /* Check to see if a branch is forwards or backwards. Return TRUE if it
2189 is backwards. */
2192 arm_backwards_branch (from, to)
2193 int from, to;
2195 return insn_addresses[to] <= insn_addresses[from];
2198 /* Check to see if a branch is within the distance that can be done using
2199 an arithmetic expression. */
2201 short_branch (from, to)
2202 int from, to;
2204 int delta = insn_addresses[from] + 8 - insn_addresses[to];
2206 return abs (delta) < 980; /* A small margin for safety */
2209 /* Check to see that the insn isn't the target of the conditionalizing
2210 code */
2212 arm_insn_not_targeted (insn)
2213 rtx insn;
2215 return insn != arm_target_insn;
2219 /* Routines for manipulation of the constant pool. */
2220 /* This is unashamedly hacked from the version in sh.c, since the problem is
2221 extremely similar. */
2223 /* Arm instructions cannot load a large constant into a register,
2224 constants have to come from a pc relative load. The reference of a pc
2225 relative load instruction must be less than 1k infront of the instruction.
2226 This means that we often have to dump a constant inside a function, and
2227 generate code to branch around it.
2229 It is important to minimize this, since the branches will slow things
2230 down and make things bigger.
2232 Worst case code looks like:
2234 ldr rn, L1
2235 b L2
2236 align
2237 L1: .long value
2241 ldr rn, L3
2242 b L4
2243 align
2244 L3: .long value
2248 We fix this by performing a scan before scheduling, which notices which
2249 instructions need to have their operands fetched from the constant table
2250 and builds the table.
2253 The algorithm is:
2255 scan, find an instruction which needs a pcrel move. Look forward, find th
2256 last barrier which is within MAX_COUNT bytes of the requirement.
2257 If there isn't one, make one. Process all the instructions between
2258 the find and the barrier.
2260 In the above example, we can tell that L3 is within 1k of L1, so
2261 the first move can be shrunk from the 2 insn+constant sequence into
2262 just 1 insn, and the constant moved to L3 to make:
2264 ldr rn, L1
2266 ldr rn, L3
2267 b L4
2268 align
2269 L1: .long value
2270 L3: .long value
2273 Then the second move becomes the target for the shortening process.
2277 typedef struct
2279 rtx value; /* Value in table */
2280 HOST_WIDE_INT next_offset;
2281 enum machine_mode mode; /* Mode of value */
2282 } pool_node;
2284 /* The maximum number of constants that can fit into one pool, since
2285 the pc relative range is 0...1020 bytes and constants are at least 4
2286 bytes long */
2288 #define MAX_POOL_SIZE (1020/4)
2289 static pool_node pool_vector[MAX_POOL_SIZE];
2290 static int pool_size;
2291 static rtx pool_vector_label;
2293 /* Add a constant to the pool and return its label. */
2294 static HOST_WIDE_INT
2295 add_constant (x, mode)
2296 rtx x;
2297 enum machine_mode mode;
2299 int i;
2300 rtx lab;
2301 HOST_WIDE_INT offset;
2303 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
2304 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
2305 x = get_pool_constant (XEXP (x, 0));
2306 #ifndef AOF_ASSEMBLER
2307 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
2308 x = XVECEXP (x, 0, 0);
2309 #endif
2311 /* First see if we've already got it */
2312 for (i = 0; i < pool_size; i++)
2314 if (GET_CODE (x) == pool_vector[i].value->code
2315 && mode == pool_vector[i].mode)
2317 if (GET_CODE (x) == CODE_LABEL)
2319 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
2320 continue;
2322 if (rtx_equal_p (x, pool_vector[i].value))
2323 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
2327 /* Need a new one */
2328 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
2329 offset = 0;
2330 if (pool_size == 0)
2331 pool_vector_label = gen_label_rtx ();
2332 else
2333 pool_vector[pool_size].next_offset
2334 += (offset = pool_vector[pool_size - 1].next_offset);
2336 pool_vector[pool_size].value = x;
2337 pool_vector[pool_size].mode = mode;
2338 pool_size++;
2339 return offset;
2342 /* Output the literal table */
2343 static void
2344 dump_table (scan)
2345 rtx scan;
2347 int i;
2349 scan = emit_label_after (gen_label_rtx (), scan);
2350 scan = emit_insn_after (gen_align_4 (), scan);
2351 scan = emit_label_after (pool_vector_label, scan);
2353 for (i = 0; i < pool_size; i++)
2355 pool_node *p = pool_vector + i;
2357 switch (GET_MODE_SIZE (p->mode))
2359 case 4:
2360 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
2361 break;
2363 case 8:
2364 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
2365 break;
2367 default:
2368 abort ();
2369 break;
2373 scan = emit_insn_after (gen_consttable_end (), scan);
2374 scan = emit_barrier_after (scan);
2375 pool_size = 0;
2378 /* Non zero if the src operand needs to be fixed up */
2379 static int
2380 fixit (src, mode, destreg)
2381 rtx src;
2382 enum machine_mode mode;
2383 int destreg;
2385 if (CONSTANT_P (src))
2387 if (GET_CODE (src) == CONST_INT)
2388 return (! const_ok_for_arm (INTVAL (src))
2389 && ! const_ok_for_arm (~INTVAL (src)));
2390 if (GET_CODE (src) == CONST_DOUBLE)
2391 return (GET_MODE (src) == VOIDmode
2392 || destreg < 16
2393 || (! const_double_rtx_ok_for_fpu (src)
2394 && ! neg_const_double_rtx_ok_for_fpu (src)));
2395 return symbol_mentioned_p (src);
2397 #ifndef AOF_ASSEMBLER
2398 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
2399 return 1;
2400 #endif
2401 else
2402 return (mode == SImode && GET_CODE (src) == MEM
2403 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
2404 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
2407 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
2408 static rtx
2409 find_barrier (from, max_count)
2410 rtx from;
2411 int max_count;
2413 int count = 0;
2414 rtx found_barrier = 0;
2416 while (from && count < max_count)
2418 if (GET_CODE (from) == BARRIER)
2419 found_barrier = from;
2421 /* Count the length of this insn */
2422 if (GET_CODE (from) == INSN
2423 && GET_CODE (PATTERN (from)) == SET
2424 && CONSTANT_P (SET_SRC (PATTERN (from)))
2425 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
2427 rtx src = SET_SRC (PATTERN (from));
2428 count += 2;
2430 else
2431 count += get_attr_length (from);
2433 from = NEXT_INSN (from);
2436 if (!found_barrier)
2438 /* We didn't find a barrier in time to
2439 dump our stuff, so we'll make one */
2440 rtx label = gen_label_rtx ();
2442 if (from)
2443 from = PREV_INSN (from);
2444 else
2445 from = get_last_insn ();
2447 /* Walk back to be just before any jump */
2448 while (GET_CODE (from) == JUMP_INSN
2449 || GET_CODE (from) == NOTE
2450 || GET_CODE (from) == CODE_LABEL)
2451 from = PREV_INSN (from);
2453 from = emit_jump_insn_after (gen_jump (label), from);
2454 JUMP_LABEL (from) = label;
2455 found_barrier = emit_barrier_after (from);
2456 emit_label_after (label, found_barrier);
2457 return found_barrier;
2460 return found_barrier;
2463 /* Non zero if the insn is a move instruction which needs to be fixed. */
2464 static int
2465 broken_move (insn)
2466 rtx insn;
2468 if (!INSN_DELETED_P (insn)
2469 && GET_CODE (insn) == INSN
2470 && GET_CODE (PATTERN (insn)) == SET)
2472 rtx pat = PATTERN (insn);
2473 rtx src = SET_SRC (pat);
2474 rtx dst = SET_DEST (pat);
2475 int destreg;
2476 enum machine_mode mode = GET_MODE (dst);
2477 if (dst == pc_rtx)
2478 return 0;
2480 if (GET_CODE (dst) == REG)
2481 destreg = REGNO (dst);
2482 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
2483 destreg = REGNO (SUBREG_REG (dst));
2485 return fixit (src, mode, destreg);
2487 return 0;
2490 void
2491 arm_reorg (first)
2492 rtx first;
2494 rtx insn;
2495 int count_size;
2496 int regno;
2498 #if 0
2499 /* The ldr instruction can work with up to a 4k offset, and most constants
2500 will be loaded with one of these instructions; however, the adr
2501 instruction and the ldf instructions only work with a 1k offset. This
2502 code needs to be rewritten to use the 4k offset when possible, and to
2503 adjust when a 1k offset is needed. For now we just use a 1k offset
2504 from the start. */
2505 count_size = 4000;
2507 /* Floating point operands can't work further than 1024 bytes from the
2508 PC, so to make things simple we restrict all loads for such functions.
2510 if (TARGET_HARD_FLOAT)
2511 for (regno = 16; regno < 24; regno++)
2512 if (regs_ever_live[regno])
2514 count_size = 1000;
2515 break;
2517 #else
2518 count_size = 1000;
2519 #endif /* 0 */
2521 for (insn = first; insn; insn = NEXT_INSN (insn))
2523 if (broken_move (insn))
2525 /* This is a broken move instruction, scan ahead looking for
2526 a barrier to stick the constant table behind */
2527 rtx scan;
2528 rtx barrier = find_barrier (insn, count_size);
2530 /* Now find all the moves between the points and modify them */
2531 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
2533 if (broken_move (scan))
2535 /* This is a broken move instruction, add it to the pool */
2536 rtx pat = PATTERN (scan);
2537 rtx src = SET_SRC (pat);
2538 rtx dst = SET_DEST (pat);
2539 enum machine_mode mode = GET_MODE (dst);
2540 HOST_WIDE_INT offset;
2541 rtx newinsn = scan;
2542 rtx newsrc;
2543 rtx addr;
2544 int scratch;
2546 /* If this is an HImode constant load, convert it into
2547 an SImode constant load. Since the register is always
2548 32 bits this is safe. We have to do this, since the
2549 load pc-relative instruction only does a 32-bit load. */
2550 if (mode == HImode)
2552 mode = SImode;
2553 if (GET_CODE (dst) != REG)
2554 abort ();
2555 PUT_MODE (dst, SImode);
2558 offset = add_constant (src, mode);
2559 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
2560 pool_vector_label),
2561 offset);
2563 /* For wide moves to integer regs we need to split the
2564 address calculation off into a separate insn, so that
2565 the load can then be done with a load-multiple. This is
2566 safe, since we have already noted the length of such
2567 insns to be 8, and we are immediately over-writing the
2568 scratch we have grabbed with the final result. */
2569 if (GET_MODE_SIZE (mode) > 4
2570 && (scratch = REGNO (dst)) < 16)
2572 rtx reg = gen_rtx (REG, SImode, scratch);
2573 newinsn = emit_insn_after (gen_movaddr (reg, addr),
2574 newinsn);
2575 addr = reg;
2578 newsrc = gen_rtx (MEM, mode, addr);
2580 /* Build a jump insn wrapper around the move instead
2581 of an ordinary insn, because we want to have room for
2582 the target label rtx in fld[7], which an ordinary
2583 insn doesn't have. */
2584 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
2585 dst, newsrc),
2586 newinsn);
2587 JUMP_LABEL (newinsn) = pool_vector_label;
2589 /* But it's still an ordinary insn */
2590 PUT_CODE (newinsn, INSN);
2592 /* Kill old insn */
2593 delete_insn (scan);
2594 scan = newinsn;
2597 dump_table (barrier);
2598 insn = scan;
2604 /* Routines to output assembly language. */
2606 /* If the rtx is the correct value then return the string of the number.
2607 In this way we can ensure that valid double constants are generated even
2608 when cross compiling. */
2609 char *
2610 fp_immediate_constant (x)
2611 rtx x;
2613 REAL_VALUE_TYPE r;
2614 int i;
2616 if (!fpa_consts_inited)
2617 init_fpa_table ();
2619 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2620 for (i = 0; i < 8; i++)
2621 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
2622 return strings_fpa[i];
2624 abort ();
2627 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
2628 static char *
2629 fp_const_from_val (r)
2630 REAL_VALUE_TYPE *r;
2632 int i;
2634 if (! fpa_consts_inited)
2635 init_fpa_table ();
2637 for (i = 0; i < 8; i++)
2638 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
2639 return strings_fpa[i];
2641 abort ();
2644 /* Output the operands of a LDM/STM instruction to STREAM.
2645 MASK is the ARM register set mask of which only bits 0-15 are important.
2646 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
2647 must follow the register list. */
2649 void
2650 print_multi_reg (stream, instr, mask, hat)
2651 FILE *stream;
2652 char *instr;
2653 int mask, hat;
2655 int i;
2656 int not_first = FALSE;
2658 fputc ('\t', stream);
2659 fprintf (stream, instr, REGISTER_PREFIX);
2660 fputs (", {", stream);
2661 for (i = 0; i < 16; i++)
2662 if (mask & (1 << i))
2664 if (not_first)
2665 fprintf (stream, ", ");
2666 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
2667 not_first = TRUE;
2670 fprintf (stream, "}%s\n", hat ? "^" : "");
2673 /* Output a 'call' insn. */
2675 char *
2676 output_call (operands)
2677 rtx *operands;
2679 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
2681 if (REGNO (operands[0]) == 14)
2683 operands[0] = gen_rtx (REG, SImode, 12);
2684 output_asm_insn ("mov%?\t%0, %|lr", operands);
2686 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2687 output_asm_insn ("mov%?\t%|pc, %0", operands);
2688 return "";
2691 static int
2692 eliminate_lr2ip (x)
2693 rtx *x;
2695 int something_changed = 0;
2696 rtx x0 = *x;
2697 int code = GET_CODE (x0);
2698 register int i, j;
2699 register char *fmt;
2701 switch (code)
2703 case REG:
2704 if (REGNO (x0) == 14)
2706 *x = gen_rtx (REG, SImode, 12);
2707 return 1;
2709 return 0;
2710 default:
2711 /* Scan through the sub-elements and change any references there */
2712 fmt = GET_RTX_FORMAT (code);
2713 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2714 if (fmt[i] == 'e')
2715 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
2716 else if (fmt[i] == 'E')
2717 for (j = 0; j < XVECLEN (x0, i); j++)
2718 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
2719 return something_changed;
2723 /* Output a 'call' insn that is a reference in memory. */
2725 char *
2726 output_call_mem (operands)
2727 rtx *operands;
2729 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
2730 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
2732 if (eliminate_lr2ip (&operands[0]))
2733 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
2735 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
2736 output_asm_insn ("ldr%?\t%|pc, %0", operands);
2737 return "";
2741 /* Output a move from arm registers to an fpu registers.
2742 OPERANDS[0] is an fpu register.
2743 OPERANDS[1] is the first registers of an arm register pair. */
2745 char *
2746 output_mov_long_double_fpu_from_arm (operands)
2747 rtx *operands;
2749 int arm_reg0 = REGNO (operands[1]);
2750 rtx ops[3];
2752 if (arm_reg0 == 12)
2753 abort();
2755 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2756 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2757 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2759 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
2760 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
2761 return "";
2764 /* Output a move from an fpu register to arm registers.
2765 OPERANDS[0] is the first registers of an arm register pair.
2766 OPERANDS[1] is an fpu register. */
2768 char *
2769 output_mov_long_double_arm_from_fpu (operands)
2770 rtx *operands;
2772 int arm_reg0 = REGNO (operands[0]);
2773 rtx ops[3];
2775 if (arm_reg0 == 12)
2776 abort();
2778 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2779 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2780 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
2782 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
2783 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
2784 return "";
2787 /* Output a move from arm registers to arm registers of a long double
2788 OPERANDS[0] is the destination.
2789 OPERANDS[1] is the source. */
2790 char *
2791 output_mov_long_double_arm_from_arm (operands)
2792 rtx *operands;
2794 /* We have to be careful here because the two might overlap */
2795 int dest_start = REGNO (operands[0]);
2796 int src_start = REGNO (operands[1]);
2797 rtx ops[2];
2798 int i;
2800 if (dest_start < src_start)
2802 for (i = 0; i < 3; i++)
2804 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2805 ops[1] = gen_rtx (REG, SImode, src_start + i);
2806 output_asm_insn ("mov%?\t%0, %1", ops);
2809 else
2811 for (i = 2; i >= 0; i--)
2813 ops[0] = gen_rtx (REG, SImode, dest_start + i);
2814 ops[1] = gen_rtx (REG, SImode, src_start + i);
2815 output_asm_insn ("mov%?\t%0, %1", ops);
2819 return "";
2823 /* Output a move from arm registers to an fpu registers.
2824 OPERANDS[0] is an fpu register.
2825 OPERANDS[1] is the first registers of an arm register pair. */
2827 char *
2828 output_mov_double_fpu_from_arm (operands)
2829 rtx *operands;
2831 int arm_reg0 = REGNO (operands[1]);
2832 rtx ops[2];
2834 if (arm_reg0 == 12)
2835 abort();
2836 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2837 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2838 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
2839 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
2840 return "";
2843 /* Output a move from an fpu register to arm registers.
2844 OPERANDS[0] is the first registers of an arm register pair.
2845 OPERANDS[1] is an fpu register. */
2847 char *
2848 output_mov_double_arm_from_fpu (operands)
2849 rtx *operands;
2851 int arm_reg0 = REGNO (operands[0]);
2852 rtx ops[2];
2854 if (arm_reg0 == 12)
2855 abort();
2857 ops[0] = gen_rtx (REG, SImode, arm_reg0);
2858 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
2859 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
2860 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
2861 return "";
2864 /* Output a move between double words.
2865 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2866 or MEM<-REG and all MEMs must be offsettable addresses. */
2868 char *
2869 output_move_double (operands)
2870 rtx *operands;
2872 enum rtx_code code0 = GET_CODE (operands[0]);
2873 enum rtx_code code1 = GET_CODE (operands[1]);
2874 rtx otherops[2];
2876 if (code0 == REG)
2878 int reg0 = REGNO (operands[0]);
2880 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
2881 if (code1 == REG)
2883 int reg1 = REGNO (operands[1]);
2884 if (reg1 == 12)
2885 abort();
2887 /* Ensure the second source is not overwritten */
2888 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
2889 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
2890 else
2891 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
2893 else if (code1 == CONST_DOUBLE)
2895 if (GET_MODE (operands[1]) == DFmode)
2897 long l[2];
2898 union real_extract u;
2900 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
2901 sizeof (u));
2902 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
2903 otherops[1] = GEN_INT(l[1]);
2904 operands[1] = GEN_INT(l[0]);
2906 else if (GET_MODE (operands[1]) != VOIDmode)
2907 abort ();
2908 else if (WORDS_BIG_ENDIAN)
2911 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
2912 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
2914 else
2917 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
2918 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
2920 output_mov_immediate (operands);
2921 output_mov_immediate (otherops);
2923 else if (code1 == CONST_INT)
2925 /* sign extend the intval into the high-order word */
2926 if (WORDS_BIG_ENDIAN)
2928 otherops[1] = operands[1];
2929 operands[1] = (INTVAL (operands[1]) < 0
2930 ? constm1_rtx : const0_rtx);
2932 else
2933 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
2934 output_mov_immediate (otherops);
2935 output_mov_immediate (operands);
2937 else if (code1 == MEM)
2939 switch (GET_CODE (XEXP (operands[1], 0)))
2941 case REG:
2942 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
2943 break;
2945 case PRE_INC:
2946 abort (); /* Should never happen now */
2947 break;
2949 case PRE_DEC:
2950 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
2951 break;
2953 case POST_INC:
2954 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
2955 break;
2957 case POST_DEC:
2958 abort (); /* Should never happen now */
2959 break;
2961 case LABEL_REF:
2962 case CONST:
2963 output_asm_insn ("adr%?\t%0, %1", operands);
2964 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
2965 break;
2967 default:
2968 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
2970 otherops[0] = operands[0];
2971 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
2972 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
2973 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
2975 if (GET_CODE (otherops[2]) == CONST_INT)
2977 switch (INTVAL (otherops[2]))
2979 case -8:
2980 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
2981 return "";
2982 case -4:
2983 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
2984 return "";
2985 case 4:
2986 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
2987 return "";
2989 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
2990 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
2991 else
2992 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2994 else
2995 output_asm_insn ("add%?\t%0, %1, %2", otherops);
2997 else
2998 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
2999 return "ldm%?ia\t%0, %M0";
3001 else
3003 otherops[1] = adj_offsettable_operand (operands[1], 4);
3004 /* Take care of overlapping base/data reg. */
3005 if (reg_mentioned_p (operands[0], operands[1]))
3007 output_asm_insn ("ldr%?\t%0, %1", otherops);
3008 output_asm_insn ("ldr%?\t%0, %1", operands);
3010 else
3012 output_asm_insn ("ldr%?\t%0, %1", operands);
3013 output_asm_insn ("ldr%?\t%0, %1", otherops);
3018 else
3019 abort(); /* Constraints should prevent this */
3021 else if (code0 == MEM && code1 == REG)
3023 if (REGNO (operands[1]) == 12)
3024 abort();
3026 switch (GET_CODE (XEXP (operands[0], 0)))
3028 case REG:
3029 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
3030 break;
3032 case PRE_INC:
3033 abort (); /* Should never happen now */
3034 break;
3036 case PRE_DEC:
3037 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
3038 break;
3040 case POST_INC:
3041 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
3042 break;
3044 case POST_DEC:
3045 abort (); /* Should never happen now */
3046 break;
3048 case PLUS:
3049 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
3051 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
3053 case -8:
3054 output_asm_insn ("stm%?db\t%m0, %M1", operands);
3055 return "";
3057 case -4:
3058 output_asm_insn ("stm%?da\t%m0, %M1", operands);
3059 return "";
3061 case 4:
3062 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
3063 return "";
3066 /* Fall through */
3068 default:
3069 otherops[0] = adj_offsettable_operand (operands[0], 4);
3070 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
3071 output_asm_insn ("str%?\t%1, %0", operands);
3072 output_asm_insn ("str%?\t%1, %0", otherops);
3075 else
3076 abort(); /* Constraints should prevent this */
3078 return "";
3082 /* Output an arbitrary MOV reg, #n.
3083 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
3085 char *
3086 output_mov_immediate (operands)
3087 rtx *operands;
3089 HOST_WIDE_INT n = INTVAL (operands[1]);
3090 int n_ones = 0;
3091 int i;
3093 /* Try to use one MOV */
3094 if (const_ok_for_arm (n))
3096 output_asm_insn ("mov%?\t%0, %1", operands);
3097 return "";
3100 /* Try to use one MVN */
3101 if (const_ok_for_arm (~n))
3103 operands[1] = GEN_INT (~n);
3104 output_asm_insn ("mvn%?\t%0, %1", operands);
3105 return "";
3108 /* If all else fails, make it out of ORRs or BICs as appropriate. */
3110 for (i=0; i < 32; i++)
3111 if (n & 1 << i)
3112 n_ones++;
3114 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
3115 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
3116 ~n);
3117 else
3118 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
3121 return "";
3125 /* Output an ADD r, s, #n where n may be too big for one instruction. If
3126 adding zero to one register, output nothing. */
3128 char *
3129 output_add_immediate (operands)
3130 rtx *operands;
3132 HOST_WIDE_INT n = INTVAL (operands[2]);
3134 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
3136 if (n < 0)
3137 output_multi_immediate (operands,
3138 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
3139 -n);
3140 else
3141 output_multi_immediate (operands,
3142 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
3146 return "";
3149 /* Output a multiple immediate operation.
3150 OPERANDS is the vector of operands referred to in the output patterns.
3151 INSTR1 is the output pattern to use for the first constant.
3152 INSTR2 is the output pattern to use for subsequent constants.
3153 IMMED_OP is the index of the constant slot in OPERANDS.
3154 N is the constant value. */
3156 char *
3157 output_multi_immediate (operands, instr1, instr2, immed_op, n)
3158 rtx *operands;
3159 char *instr1, *instr2;
3160 int immed_op;
3161 HOST_WIDE_INT n;
3163 #if HOST_BITS_PER_WIDE_INT > 32
3164 n &= 0xffffffff;
3165 #endif
3167 if (n == 0)
3169 operands[immed_op] = const0_rtx;
3170 output_asm_insn (instr1, operands); /* Quick and easy output */
3172 else
3174 int i;
3175 char *instr = instr1;
3177 /* Note that n is never zero here (which would give no output) */
3178 for (i = 0; i < 32; i += 2)
3180 if (n & (3 << i))
3182 operands[immed_op] = GEN_INT (n & (255 << i));
3183 output_asm_insn (instr, operands);
3184 instr = instr2;
3185 i += 6;
3189 return "";
3193 /* Return the appropriate ARM instruction for the operation code.
3194 The returned result should not be overwritten. OP is the rtx of the
3195 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
3196 was shifted. */
3198 char *
3199 arithmetic_instr (op, shift_first_arg)
3200 rtx op;
3201 int shift_first_arg;
3203 switch (GET_CODE (op))
3205 case PLUS:
3206 return "add";
3208 case MINUS:
3209 return shift_first_arg ? "rsb" : "sub";
3211 case IOR:
3212 return "orr";
3214 case XOR:
3215 return "eor";
3217 case AND:
3218 return "and";
3220 default:
3221 abort ();
3226 /* Ensure valid constant shifts and return the appropriate shift mnemonic
3227 for the operation code. The returned result should not be overwritten.
3228 OP is the rtx code of the shift.
3229 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
3230 shift. */
3232 static char *
3233 shift_op (op, amountp)
3234 rtx op;
3235 HOST_WIDE_INT *amountp;
3237 char *mnem;
3238 enum rtx_code code = GET_CODE (op);
3240 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
3241 *amountp = -1;
3242 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
3243 *amountp = INTVAL (XEXP (op, 1));
3244 else
3245 abort ();
3247 switch (code)
3249 case ASHIFT:
3250 mnem = "asl";
3251 break;
3253 case ASHIFTRT:
3254 mnem = "asr";
3255 break;
3257 case LSHIFTRT:
3258 mnem = "lsr";
3259 break;
3261 case ROTATERT:
3262 mnem = "ror";
3263 break;
3265 case MULT:
3266 /* We never have to worry about the amount being other than a
3267 power of 2, since this case can never be reloaded from a reg. */
3268 if (*amountp != -1)
3269 *amountp = int_log2 (*amountp);
3270 else
3271 abort ();
3272 return "asl";
3274 default:
3275 abort ();
3278 if (*amountp != -1)
3280 /* This is not 100% correct, but follows from the desire to merge
3281 multiplication by a power of 2 with the recognizer for a
3282 shift. >=32 is not a valid shift for "asl", so we must try and
3283 output a shift that produces the correct arithmetical result.
3284 Using lsr #32 is identical except for the fact that the carry bit
3285 is not set correctly if we set the flags; but we never use the
3286 carry bit from such an operation, so we can ignore that. */
3287 if (code == ROTATERT)
3288 *amountp &= 31; /* Rotate is just modulo 32 */
3289 else if (*amountp != (*amountp & 31))
3291 if (code == ASHIFT)
3292 mnem = "lsr";
3293 *amountp = 32;
3296 /* Shifts of 0 are no-ops. */
3297 if (*amountp == 0)
3298 return NULL;
3301 return mnem;
3305 /* Obtain the shift from the POWER of two. */
3307 HOST_WIDE_INT
3308 int_log2 (power)
3309 HOST_WIDE_INT power;
3311 HOST_WIDE_INT shift = 0;
3313 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
3315 if (shift > 31)
3316 abort ();
3317 shift++;
3320 return shift;
3323 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
3324 /bin/as is horribly restrictive. */
3326 void
3327 output_ascii_pseudo_op (stream, p, len)
3328 FILE *stream;
3329 unsigned char *p;
3330 int len;
3332 int i;
3333 int len_so_far = 1000;
3334 int chars_so_far = 0;
3336 for (i = 0; i < len; i++)
3338 register int c = p[i];
3340 if (len_so_far > 50)
3342 if (chars_so_far)
3343 fputs ("\"\n", stream);
3344 fputs ("\t.ascii\t\"", stream);
3345 len_so_far = 0;
3346 arm_increase_location (chars_so_far);
3347 chars_so_far = 0;
3350 if (c == '\"' || c == '\\')
3352 putc('\\', stream);
3353 len_so_far++;
3356 if (c >= ' ' && c < 0177)
3358 putc (c, stream);
3359 len_so_far++;
3361 else
3363 fprintf (stream, "\\%03o", c);
3364 len_so_far +=4;
3367 chars_so_far++;
3370 fputs ("\"\n", stream);
3371 arm_increase_location (chars_so_far);
3375 /* Try to determine whether a pattern really clobbers the link register.
3376 This information is useful when peepholing, so that lr need not be pushed
3377 if we combine a call followed by a return.
3378 NOTE: This code does not check for side-effect expressions in a SET_SRC:
3379 such a check should not be needed because these only update an existing
3380 value within a register; the register must still be set elsewhere within
3381 the function. */
3383 static int
3384 pattern_really_clobbers_lr (x)
3385 rtx x;
3387 int i;
3389 switch (GET_CODE (x))
3391 case SET:
3392 switch (GET_CODE (SET_DEST (x)))
3394 case REG:
3395 return REGNO (SET_DEST (x)) == 14;
3397 case SUBREG:
3398 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
3399 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
3401 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
3402 return 0;
3403 abort ();
3405 default:
3406 return 0;
3409 case PARALLEL:
3410 for (i = 0; i < XVECLEN (x, 0); i++)
3411 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
3412 return 1;
3413 return 0;
3415 case CLOBBER:
3416 switch (GET_CODE (XEXP (x, 0)))
3418 case REG:
3419 return REGNO (XEXP (x, 0)) == 14;
3421 case SUBREG:
3422 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3423 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
3424 abort ();
3426 default:
3427 return 0;
3430 case UNSPEC:
3431 return 1;
3433 default:
3434 return 0;
3438 static int
3439 function_really_clobbers_lr (first)
3440 rtx first;
3442 rtx insn, next;
3444 for (insn = first; insn; insn = next_nonnote_insn (insn))
3446 switch (GET_CODE (insn))
3448 case BARRIER:
3449 case NOTE:
3450 case CODE_LABEL:
3451 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
3452 case INLINE_HEADER:
3453 break;
3455 case INSN:
3456 if (pattern_really_clobbers_lr (PATTERN (insn)))
3457 return 1;
3458 break;
3460 case CALL_INSN:
3461 /* Don't yet know how to handle those calls that are not to a
3462 SYMBOL_REF */
3463 if (GET_CODE (PATTERN (insn)) != PARALLEL)
3464 abort ();
3466 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
3468 case CALL:
3469 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
3470 != SYMBOL_REF)
3471 return 1;
3472 break;
3474 case SET:
3475 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
3476 0, 0)), 0), 0))
3477 != SYMBOL_REF)
3478 return 1;
3479 break;
3481 default: /* Don't recognize it, be safe */
3482 return 1;
3485 /* A call can be made (by peepholing) not to clobber lr iff it is
3486 followed by a return. There may, however, be a use insn iff
3487 we are returning the result of the call.
3488 If we run off the end of the insn chain, then that means the
3489 call was at the end of the function. Unfortunately we don't
3490 have a return insn for the peephole to recognize, so we
3491 must reject this. (Can this be fixed by adding our own insn?) */
3492 if ((next = next_nonnote_insn (insn)) == NULL)
3493 return 1;
3495 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
3496 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3497 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
3498 == REGNO (XEXP (PATTERN (next), 0))))
3499 if ((next = next_nonnote_insn (next)) == NULL)
3500 return 1;
3502 if (GET_CODE (next) == JUMP_INSN
3503 && GET_CODE (PATTERN (next)) == RETURN)
3504 break;
3505 return 1;
3507 default:
3508 abort ();
3512 /* We have reached the end of the chain so lr was _not_ clobbered */
3513 return 0;
3516 char *
3517 output_return_instruction (operand, really_return)
3518 rtx operand;
3519 int really_return;
3521 char instr[100];
3522 int reg, live_regs = 0;
3523 int volatile_func = (optimize > 0
3524 && TREE_THIS_VOLATILE (current_function_decl));
3526 return_used_this_function = 1;
3528 if (volatile_func)
3530 rtx ops[2];
3531 /* If this function was declared non-returning, and we have found a tail
3532 call, then we have to trust that the called function won't return. */
3533 if (! really_return)
3534 return "";
3536 /* Otherwise, trap an attempted return by aborting. */
3537 ops[0] = operand;
3538 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
3539 assemble_external_libcall (ops[1]);
3540 output_asm_insn ("bl%d0\t%a1", ops);
3541 return "";
3544 if (current_function_calls_alloca && ! really_return)
3545 abort();
3547 for (reg = 0; reg <= 10; reg++)
3548 if (regs_ever_live[reg] && ! call_used_regs[reg])
3549 live_regs++;
3551 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
3552 live_regs++;
3554 if (frame_pointer_needed)
3555 live_regs += 4;
3557 if (live_regs)
3559 if (lr_save_eliminated || ! regs_ever_live[14])
3560 live_regs++;
3562 if (frame_pointer_needed)
3563 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
3564 else
3565 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
3567 for (reg = 0; reg <= 10; reg++)
3568 if (regs_ever_live[reg] && ! call_used_regs[reg])
3570 strcat (instr, "%|");
3571 strcat (instr, reg_names[reg]);
3572 if (--live_regs)
3573 strcat (instr, ", ");
3576 if (frame_pointer_needed)
3578 strcat (instr, "%|");
3579 strcat (instr, reg_names[11]);
3580 strcat (instr, ", ");
3581 strcat (instr, "%|");
3582 strcat (instr, reg_names[13]);
3583 strcat (instr, ", ");
3584 strcat (instr, "%|");
3585 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3587 else
3589 strcat (instr, "%|");
3590 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
3592 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
3593 output_asm_insn (instr, &operand);
3595 else if (really_return)
3597 strcpy (instr, (TARGET_APCS_32
3598 ? "mov%?%d0\t%|pc, %|lr" : "mov%?%d0s\t%|pc, %|lr"));
3599 output_asm_insn (instr, &operand);
3602 return "";
3605 /* Return nonzero if optimizing and the current function is volatile.
3606 Such functions never return, and many memory cycles can be saved
3607 by not storing register values that will never be needed again.
3608 This optimization was added to speed up context switching in a
3609 kernel application. */
3612 arm_volatile_func ()
3614 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
3617 /* Return the size of the prologue. It's not too bad if we slightly
3618 over-estimate. */
3620 static int
3621 get_prologue_size ()
3623 return profile_flag ? 12 : 0;
3626 /* The amount of stack adjustment that happens here, in output_return and in
3627 output_epilogue must be exactly the same as was calculated during reload,
3628 or things will point to the wrong place. The only time we can safely
3629 ignore this constraint is when a function has no arguments on the stack,
3630 no stack frame requirement and no live registers execpt for `lr'. If we
3631 can guarantee that by making all function calls into tail calls and that
3632 lr is not clobbered in any other way, then there is no need to push lr
3633 onto the stack. */
3635 void
3636 output_func_prologue (f, frame_size)
3637 FILE *f;
3638 int frame_size;
3640 int reg, live_regs_mask = 0;
3641 rtx operands[3];
3642 int volatile_func = (optimize > 0
3643 && TREE_THIS_VOLATILE (current_function_decl));
3645 /* Nonzero if we must stuff some register arguments onto the stack as if
3646 they were passed there. */
3647 int store_arg_regs = 0;
3649 if (arm_ccfsm_state || arm_target_insn)
3650 abort (); /* Sanity check */
3652 return_used_this_function = 0;
3653 lr_save_eliminated = 0;
3655 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
3656 ASM_COMMENT_START, current_function_args_size,
3657 current_function_pretend_args_size, frame_size);
3658 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
3659 ASM_COMMENT_START, frame_pointer_needed,
3660 current_function_anonymous_args);
3662 if (volatile_func)
3663 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
3665 if (current_function_anonymous_args && current_function_pretend_args_size)
3666 store_arg_regs = 1;
3668 for (reg = 0; reg <= 10; reg++)
3669 if (regs_ever_live[reg] && ! call_used_regs[reg])
3670 live_regs_mask |= (1 << reg);
3672 if (frame_pointer_needed)
3673 live_regs_mask |= 0xD800;
3674 else if (regs_ever_live[14])
3676 if (! current_function_args_size
3677 && ! function_really_clobbers_lr (get_insns ()))
3678 lr_save_eliminated = 1;
3679 else
3680 live_regs_mask |= 0x4000;
3683 if (live_regs_mask)
3685 /* if a di mode load/store multiple is used, and the base register
3686 is r3, then r4 can become an ever live register without lr
3687 doing so, in this case we need to push lr as well, or we
3688 will fail to get a proper return. */
3690 live_regs_mask |= 0x4000;
3691 lr_save_eliminated = 0;
3695 if (lr_save_eliminated)
3696 fprintf (f,"\t%s I don't think this function clobbers lr\n",
3697 ASM_COMMENT_START);
3701 void
3702 output_func_epilogue (f, frame_size)
3703 FILE *f;
3704 int frame_size;
3706 int reg, live_regs_mask = 0, code_size = 0;
3707 /* If we need this then it will always be at lesat this much */
3708 int floats_offset = 24;
3709 rtx operands[3];
3710 int volatile_func = (optimize > 0
3711 && TREE_THIS_VOLATILE (current_function_decl));
3713 if (use_return_insn() && return_used_this_function)
3715 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
3717 abort ();
3719 goto epilogue_done;
3722 /* A volatile function should never return. Call abort. */
3723 if (volatile_func)
3725 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
3726 assemble_external_libcall (op);
3727 output_asm_insn ("bl\t%a0", &op);
3728 code_size = 4;
3729 goto epilogue_done;
3732 for (reg = 0; reg <= 10; reg++)
3733 if (regs_ever_live[reg] && ! call_used_regs[reg])
3735 live_regs_mask |= (1 << reg);
3736 floats_offset += 4;
3739 if (frame_pointer_needed)
3741 for (reg = 23; reg > 15; reg--)
3742 if (regs_ever_live[reg] && ! call_used_regs[reg])
3744 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
3745 reg_names[reg], REGISTER_PREFIX, floats_offset);
3746 floats_offset += 12;
3747 code_size += 4;
3750 live_regs_mask |= 0xA800;
3751 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
3752 TARGET_APCS_32 ? FALSE : TRUE);
3753 code_size += 4;
3755 else
3757 /* Restore stack pointer if necessary. */
3758 if (frame_size)
3760 operands[0] = operands[1] = stack_pointer_rtx;
3761 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
3762 output_add_immediate (operands);
3765 for (reg = 16; reg < 24; reg++)
3766 if (regs_ever_live[reg] && ! call_used_regs[reg])
3768 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
3769 reg_names[reg], REGISTER_PREFIX);
3770 code_size += 4;
3772 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
3774 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
3775 TARGET_APCS_32 ? FALSE : TRUE);
3776 code_size += 4;
3778 else
3780 if (live_regs_mask || regs_ever_live[14])
3782 live_regs_mask |= 0x4000;
3783 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
3784 code_size += 4;
3786 if (current_function_pretend_args_size)
3788 operands[0] = operands[1] = stack_pointer_rtx;
3789 operands[2] = gen_rtx (CONST_INT, VOIDmode,
3790 current_function_pretend_args_size);
3791 output_add_immediate (operands);
3793 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
3794 : "\tmovs\t%spc, %slr\n"),
3795 REGISTER_PREFIX, REGISTER_PREFIX, f);
3796 code_size += 4;
3800 epilogue_done:
3802 /* insn_addresses isn't allocated when not optimizing */
3804 if (optimize > 0)
3805 arm_increase_location (code_size
3806 + insn_addresses[INSN_UID (get_last_insn ())]
3807 + get_prologue_size ());
3809 current_function_anonymous_args = 0;
3812 static void
3813 emit_multi_reg_push (mask)
3814 int mask;
3816 int num_regs = 0;
3817 int i, j;
3818 rtx par;
3820 for (i = 0; i < 16; i++)
3821 if (mask & (1 << i))
3822 num_regs++;
3824 if (num_regs == 0 || num_regs > 16)
3825 abort ();
3827 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
3829 for (i = 0; i < 16; i++)
3831 if (mask & (1 << i))
3833 XVECEXP (par, 0, 0)
3834 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
3835 gen_rtx (PRE_DEC, BLKmode,
3836 stack_pointer_rtx)),
3837 gen_rtx (UNSPEC, BLKmode,
3838 gen_rtvec (1, gen_rtx (REG, SImode, i)),
3839 2));
3840 break;
3844 for (j = 1, i++; j < num_regs; i++)
3846 if (mask & (1 << i))
3848 XVECEXP (par, 0, j)
3849 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
3850 j++;
3853 emit_insn (par);
3856 void
3857 arm_expand_prologue ()
3859 int reg;
3860 rtx amount = GEN_INT (- get_frame_size ());
3861 rtx push_insn;
3862 int num_regs;
3863 int live_regs_mask = 0;
3864 int store_arg_regs = 0;
3865 int volatile_func = (optimize > 0
3866 && TREE_THIS_VOLATILE (current_function_decl));
3868 if (current_function_anonymous_args && current_function_pretend_args_size)
3869 store_arg_regs = 1;
3871 if (! volatile_func)
3872 for (reg = 0; reg <= 10; reg++)
3873 if (regs_ever_live[reg] && ! call_used_regs[reg])
3874 live_regs_mask |= 1 << reg;
3876 if (! volatile_func && regs_ever_live[14])
3877 live_regs_mask |= 0x4000;
3879 if (frame_pointer_needed)
3881 live_regs_mask |= 0xD800;
3882 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
3883 stack_pointer_rtx));
3886 if (current_function_pretend_args_size)
3888 if (store_arg_regs)
3889 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
3890 & 0xf);
3891 else
3892 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
3893 GEN_INT (-current_function_pretend_args_size)));
3896 if (live_regs_mask)
3898 /* If we have to push any regs, then we must push lr as well, or
3899 we won't get a proper return. */
3900 live_regs_mask |= 0x4000;
3901 emit_multi_reg_push (live_regs_mask);
3904 /* For now the integer regs are still pushed in output_func_epilogue (). */
3906 if (! volatile_func)
3907 for (reg = 23; reg > 15; reg--)
3908 if (regs_ever_live[reg] && ! call_used_regs[reg])
3909 emit_insn (gen_rtx (SET, VOIDmode,
3910 gen_rtx (MEM, XFmode,
3911 gen_rtx (PRE_DEC, XFmode,
3912 stack_pointer_rtx)),
3913 gen_rtx (REG, XFmode, reg)));
3915 if (frame_pointer_needed)
3916 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
3917 (GEN_INT
3918 (-(4 + current_function_pretend_args_size)))));
3920 if (amount != const0_rtx)
3922 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
3923 emit_insn (gen_rtx (CLOBBER, VOIDmode,
3924 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
3927 /* If we are profiling, make sure no instructions are scheduled before
3928 the call to mcount. */
3929 if (profile_flag || profile_block_flag)
3930 emit_insn (gen_blockage ());
3934 /* If CODE is 'd', then the X is a condition operand and the instruction
3935 should only be executed if the condition is true.
3936 if CODE is 'D', then the X is a condition operand and the instruction
3937 should only be executed if the condition is false: however, if the mode
3938 of the comparison is CCFPEmode, then always execute the instruction -- we
3939 do this because in these circumstances !GE does not necessarily imply LT;
3940 in these cases the instruction pattern will take care to make sure that
3941 an instruction containing %d will follow, thereby undoing the effects of
3942 doing this instruction unconditionally.
3943 If CODE is 'N' then X is a floating point operand that must be negated
3944 before output.
3945 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3946 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3948 void
3949 arm_print_operand (stream, x, code)
3950 FILE *stream;
3951 rtx x;
3952 int code;
3954 switch (code)
3956 case '@':
3957 fputs (ASM_COMMENT_START, stream);
3958 return;
3960 case '|':
3961 fputs (REGISTER_PREFIX, stream);
3962 return;
3964 case '?':
3965 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
3966 fputs (arm_condition_codes[arm_current_cc], stream);
3967 return;
3969 case 'N':
3971 REAL_VALUE_TYPE r;
3972 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3973 r = REAL_VALUE_NEGATE (r);
3974 fprintf (stream, "%s", fp_const_from_val (&r));
3976 return;
3978 case 'B':
3979 if (GET_CODE (x) == CONST_INT)
3980 fprintf (stream,
3981 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3982 "%d",
3983 #else
3984 "%ld",
3985 #endif
3986 ARM_SIGN_EXTEND (~ INTVAL (x)));
3987 else
3989 putc ('~', stream);
3990 output_addr_const (stream, x);
3992 return;
3994 case 'i':
3995 fprintf (stream, "%s", arithmetic_instr (x, 1));
3996 return;
3998 case 'I':
3999 fprintf (stream, "%s", arithmetic_instr (x, 0));
4000 return;
4002 case 'S':
4004 HOST_WIDE_INT val;
4005 char *shift = shift_op (x, &val);
4007 if (shift)
4009 fprintf (stream, ", %s ", shift_op (x, &val));
4010 if (val == -1)
4011 arm_print_operand (stream, XEXP (x, 1), 0);
4012 else
4013 fprintf (stream,
4014 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4015 "#%d",
4016 #else
4017 "#%ld",
4018 #endif
4019 val);
4022 return;
4024 case 'Q':
4025 if (REGNO (x) > 15)
4026 abort ();
4027 fputs (REGISTER_PREFIX, stream);
4028 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
4029 return;
4031 case 'R':
4032 if (REGNO (x) > 15)
4033 abort ();
4034 fputs (REGISTER_PREFIX, stream);
4035 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
4036 return;
4038 case 'm':
4039 fputs (REGISTER_PREFIX, stream);
4040 if (GET_CODE (XEXP (x, 0)) == REG)
4041 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
4042 else
4043 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
4044 return;
4046 case 'M':
4047 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
4048 REGISTER_PREFIX, reg_names[REGNO (x) - 1
4049 + ((GET_MODE_SIZE (GET_MODE (x))
4050 + GET_MODE_SIZE (SImode) - 1)
4051 / GET_MODE_SIZE (SImode))]);
4052 return;
4054 case 'd':
4055 if (x)
4056 fputs (arm_condition_codes[get_arm_condition_code (x)],
4057 stream);
4058 return;
4060 case 'D':
4061 if (x && (flag_fast_math
4062 || GET_CODE (x) == EQ || GET_CODE (x) == NE
4063 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
4064 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
4065 != MODE_FLOAT))))
4066 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
4067 (get_arm_condition_code (x))],
4068 stream);
4069 return;
4071 default:
4072 if (x == 0)
4073 abort ();
4075 if (GET_CODE (x) == REG)
4077 fputs (REGISTER_PREFIX, stream);
4078 fputs (reg_names[REGNO (x)], stream);
4080 else if (GET_CODE (x) == MEM)
4082 output_memory_reference_mode = GET_MODE (x);
4083 output_address (XEXP (x, 0));
4085 else if (GET_CODE (x) == CONST_DOUBLE)
4086 fprintf (stream, "#%s", fp_immediate_constant (x));
4087 else if (GET_CODE (x) == NEG)
4088 abort (); /* This should never happen now. */
4089 else
4091 fputc ('#', stream);
4092 output_addr_const (stream, x);
4097 /* Increase the `arm_text_location' by AMOUNT if we're in the text
4098 segment. */
4100 void
4101 arm_increase_location (amount)
4102 int amount;
4104 if (in_text_section ())
4105 arm_text_location += amount;
4109 /* Output a label definition. If this label is within the .text segment, it
4110 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
4111 Maybe GCC remembers names not starting with a `*' for a long time, but this
4112 is a minority anyway, so we just make a copy. Do not store the leading `*'
4113 if the name starts with one. */
4115 void
4116 arm_asm_output_label (stream, name)
4117 FILE *stream;
4118 char *name;
4120 char *real_name, *s;
4121 struct label_offset *cur;
4122 int hash = 0;
4124 ARM_OUTPUT_LABEL (stream, name);
4125 if (! in_text_section ())
4126 return;
4128 if (name[0] == '*')
4130 real_name = xmalloc (1 + strlen (&name[1]));
4131 strcpy (real_name, &name[1]);
4133 else
4135 real_name = xmalloc (2 + strlen (name));
4136 strcpy (real_name, USER_LABEL_PREFIX);
4137 strcat (real_name, name);
4139 for (s = real_name; *s; s++)
4140 hash += *s;
4142 hash = hash % LABEL_HASH_SIZE;
4143 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
4144 cur->name = real_name;
4145 cur->offset = arm_text_location;
4146 cur->cdr = offset_table[hash];
4147 offset_table[hash] = cur;
4150 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
4151 directive hence this hack, which works by reserving some `.space' in the
4152 bss segment directly.
4154 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
4155 define STATIC COMMON space but merely STATIC BSS space. */
4157 void
4158 output_lcomm_directive (stream, name, size, rounded)
4159 FILE *stream;
4160 char *name;
4161 int size, rounded;
4163 fprintf (stream, "\n\t.bss\t%s .lcomm\n", ASM_COMMENT_START);
4164 assemble_name (stream, name);
4165 fprintf (stream, ":\t.space\t%d\n", rounded);
4166 if (in_text_section ())
4167 fputs ("\n\t.text\n", stream);
4168 else
4169 fputs ("\n\t.data\n", stream);
4172 /* A finite state machine takes care of noticing whether or not instructions
4173 can be conditionally executed, and thus decrease execution time and code
4174 size by deleting branch instructions. The fsm is controlled by
4175 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
4177 /* The state of the fsm controlling condition codes are:
4178 0: normal, do nothing special
4179 1: make ASM_OUTPUT_OPCODE not output this instruction
4180 2: make ASM_OUTPUT_OPCODE not output this instruction
4181 3: make instructions conditional
4182 4: make instructions conditional
4184 State transitions (state->state by whom under condition):
4185 0 -> 1 final_prescan_insn if the `target' is a label
4186 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
4187 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
4188 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4189 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
4190 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
4191 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
4192 (the target insn is arm_target_insn).
4194 If the jump clobbers the conditions then we use states 2 and 4.
4196 A similar thing can be done with conditional return insns.
4198 XXX In case the `target' is an unconditional branch, this conditionalising
4199 of the instructions always reduces code size, but not always execution
4200 time. But then, I want to reduce the code size to somewhere near what
4201 /bin/cc produces. */
4203 /* Returns the index of the ARM condition code string in
4204 `arm_condition_codes'. COMPARISON should be an rtx like
4205 `(eq (...) (...))'. */
4208 get_arm_condition_code (comparison)
4209 rtx comparison;
4211 switch (GET_CODE (comparison))
4213 case NE: return (1);
4214 case EQ: return (0);
4215 case GE: return (10);
4216 case GT: return (12);
4217 case LE: return (13);
4218 case LT: return (11);
4219 case GEU: return (2);
4220 case GTU: return (8);
4221 case LEU: return (9);
4222 case LTU: return (3);
4223 default: abort ();
4225 /*NOTREACHED*/
4226 return (42);
4230 void
4231 final_prescan_insn (insn, opvec, noperands)
4232 rtx insn;
4233 rtx *opvec;
4234 int noperands;
4236 /* BODY will hold the body of INSN. */
4237 register rtx body = PATTERN (insn);
4239 /* This will be 1 if trying to repeat the trick, and things need to be
4240 reversed if it appears to fail. */
4241 int reverse = 0;
4243 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
4244 taken are clobbered, even if the rtl suggests otherwise. It also
4245 means that we have to grub around within the jump expression to find
4246 out what the conditions are when the jump isn't taken. */
4247 int jump_clobbers = 0;
4249 /* If we start with a return insn, we only succeed if we find another one. */
4250 int seeking_return = 0;
4252 /* START_INSN will hold the insn from where we start looking. This is the
4253 first insn after the following code_label if REVERSE is true. */
4254 rtx start_insn = insn;
4256 /* If in state 4, check if the target branch is reached, in order to
4257 change back to state 0. */
4258 if (arm_ccfsm_state == 4)
4260 if (insn == arm_target_insn)
4262 arm_target_insn = NULL;
4263 arm_ccfsm_state = 0;
4265 return;
4268 /* If in state 3, it is possible to repeat the trick, if this insn is an
4269 unconditional branch to a label, and immediately following this branch
4270 is the previous target label which is only used once, and the label this
4271 branch jumps to is not too far off. */
4272 if (arm_ccfsm_state == 3)
4274 if (simplejump_p (insn))
4276 start_insn = next_nonnote_insn (start_insn);
4277 if (GET_CODE (start_insn) == BARRIER)
4279 /* XXX Isn't this always a barrier? */
4280 start_insn = next_nonnote_insn (start_insn);
4282 if (GET_CODE (start_insn) == CODE_LABEL
4283 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4284 && LABEL_NUSES (start_insn) == 1)
4285 reverse = TRUE;
4286 else
4287 return;
4289 else if (GET_CODE (body) == RETURN)
4291 start_insn = next_nonnote_insn (start_insn);
4292 if (GET_CODE (start_insn) == BARRIER)
4293 start_insn = next_nonnote_insn (start_insn);
4294 if (GET_CODE (start_insn) == CODE_LABEL
4295 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
4296 && LABEL_NUSES (start_insn) == 1)
4298 reverse = TRUE;
4299 seeking_return = 1;
4301 else
4302 return;
4304 else
4305 return;
4308 if (arm_ccfsm_state != 0 && !reverse)
4309 abort ();
4310 if (GET_CODE (insn) != JUMP_INSN)
4311 return;
4313 /* This jump might be paralleled with a clobber of the condition codes
4314 the jump should always come first */
4315 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
4316 body = XVECEXP (body, 0, 0);
4318 #if 0
4319 /* If this is a conditional return then we don't want to know */
4320 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4321 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
4322 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
4323 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
4324 return;
4325 #endif
4327 if (reverse
4328 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
4329 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
4331 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
4332 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
4333 int then_not_else = TRUE;
4334 rtx this_insn = start_insn, label = 0;
4336 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
4338 /* The code below is wrong for these, and I haven't time to
4339 fix it now. So we just do the safe thing and return. This
4340 whole function needs re-writing anyway. */
4341 jump_clobbers = 1;
4342 return;
4345 /* Register the insn jumped to. */
4346 if (reverse)
4348 if (!seeking_return)
4349 label = XEXP (SET_SRC (body), 0);
4351 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
4352 label = XEXP (XEXP (SET_SRC (body), 1), 0);
4353 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
4355 label = XEXP (XEXP (SET_SRC (body), 2), 0);
4356 then_not_else = FALSE;
4358 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
4359 seeking_return = 1;
4360 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
4362 seeking_return = 1;
4363 then_not_else = FALSE;
4365 else
4366 abort ();
4368 /* See how many insns this branch skips, and what kind of insns. If all
4369 insns are okay, and the label or unconditional branch to the same
4370 label is not too far away, succeed. */
4371 for (insns_skipped = 0;
4372 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
4373 insns_skipped++)
4375 rtx scanbody;
4377 this_insn = next_nonnote_insn (this_insn);
4378 if (!this_insn)
4379 break;
4381 scanbody = PATTERN (this_insn);
4383 switch (GET_CODE (this_insn))
4385 case CODE_LABEL:
4386 /* Succeed if it is the target label, otherwise fail since
4387 control falls in from somewhere else. */
4388 if (this_insn == label)
4390 if (jump_clobbers)
4392 arm_ccfsm_state = 2;
4393 this_insn = next_nonnote_insn (this_insn);
4395 else
4396 arm_ccfsm_state = 1;
4397 succeed = TRUE;
4399 else
4400 fail = TRUE;
4401 break;
4403 case BARRIER:
4404 /* Succeed if the following insn is the target label.
4405 Otherwise fail.
4406 If return insns are used then the last insn in a function
4407 will be a barrier. */
4408 this_insn = next_nonnote_insn (this_insn);
4409 if (this_insn && this_insn == label)
4411 if (jump_clobbers)
4413 arm_ccfsm_state = 2;
4414 this_insn = next_nonnote_insn (this_insn);
4416 else
4417 arm_ccfsm_state = 1;
4418 succeed = TRUE;
4420 else
4421 fail = TRUE;
4422 break;
4424 case CALL_INSN:
4425 /* If using 32-bit addresses the cc is not preserved over
4426 calls */
4427 if (TARGET_APCS_32)
4428 fail = TRUE;
4429 break;
4431 case JUMP_INSN:
4432 /* If this is an unconditional branch to the same label, succeed.
4433 If it is to another label, do nothing. If it is conditional,
4434 fail. */
4435 /* XXX Probably, the test for the SET and the PC are unnecessary. */
4437 if (GET_CODE (scanbody) == SET
4438 && GET_CODE (SET_DEST (scanbody)) == PC)
4440 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
4441 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
4443 arm_ccfsm_state = 2;
4444 succeed = TRUE;
4446 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
4447 fail = TRUE;
4449 else if (GET_CODE (scanbody) == RETURN
4450 && seeking_return)
4452 arm_ccfsm_state = 2;
4453 succeed = TRUE;
4455 else if (GET_CODE (scanbody) == PARALLEL)
4457 switch (get_attr_conds (this_insn))
4459 case CONDS_NOCOND:
4460 break;
4461 default:
4462 fail = TRUE;
4463 break;
4466 break;
4468 case INSN:
4469 /* Instructions using or affecting the condition codes make it
4470 fail. */
4471 if ((GET_CODE (scanbody) == SET
4472 || GET_CODE (scanbody) == PARALLEL)
4473 && get_attr_conds (this_insn) != CONDS_NOCOND)
4474 fail = TRUE;
4475 break;
4477 default:
4478 break;
4481 if (succeed)
4483 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
4484 arm_target_label = CODE_LABEL_NUMBER (label);
4485 else if (seeking_return || arm_ccfsm_state == 2)
4487 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
4489 this_insn = next_nonnote_insn (this_insn);
4490 if (this_insn && (GET_CODE (this_insn) == BARRIER
4491 || GET_CODE (this_insn) == CODE_LABEL))
4492 abort ();
4494 if (!this_insn)
4496 /* Oh, dear! we ran off the end.. give up */
4497 recog (PATTERN (insn), insn, NULL_PTR);
4498 arm_ccfsm_state = 0;
4499 arm_target_insn = NULL;
4500 return;
4502 arm_target_insn = this_insn;
4504 else
4505 abort ();
4506 if (jump_clobbers)
4508 if (reverse)
4509 abort ();
4510 arm_current_cc =
4511 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
4512 0), 0), 1));
4513 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
4514 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4515 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
4516 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4518 else
4520 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
4521 what it was. */
4522 if (!reverse)
4523 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
4524 0));
4527 if (reverse || then_not_else)
4528 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
4530 /* restore recog_operand (getting the attributes of other insns can
4531 destroy this array, but final.c assumes that it remains intact
4532 across this call; since the insn has been recognized already we
4533 call recog direct). */
4534 recog (PATTERN (insn), insn, NULL_PTR);
4538 #ifdef AOF_ASSEMBLER
4539 /* Special functions only needed when producing AOF syntax assembler. */
4541 int arm_text_section_count = 1;
4543 char *
4544 aof_text_section (in_readonly)
4545 int in_readonly;
4547 static char buf[100];
4548 if (in_readonly)
4549 return "";
4550 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
4551 arm_text_section_count++);
4552 if (flag_pic)
4553 strcat (buf, ", PIC, REENTRANT");
4554 return buf;
4557 static int arm_data_section_count = 1;
4559 char *
4560 aof_data_section ()
4562 static char buf[100];
4563 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
4564 return buf;
4567 /* The AOF assembler is religiously strict about declarations of
4568 imported and exported symbols, so that it is impossible to declare
4569 a function as imported near the begining of the file, and then to
4570 export it later on. It is, however, possible to delay the decision
4571 until all the functions in the file have been compiled. To get
4572 around this, we maintain a list of the imports and exports, and
4573 delete from it any that are subsequently defined. At the end of
4574 compilation we spit the remainder of the list out before the END
4575 directive. */
4577 struct import
4579 struct import *next;
4580 char *name;
4583 static struct import *imports_list = NULL;
4585 void
4586 aof_add_import (name)
4587 char *name;
4589 struct import *new;
4591 for (new = imports_list; new; new = new->next)
4592 if (new->name == name)
4593 return;
4595 new = (struct import *) xmalloc (sizeof (struct import));
4596 new->next = imports_list;
4597 imports_list = new;
4598 new->name = name;
4601 void
4602 aof_delete_import (name)
4603 char *name;
4605 struct import **old;
4607 for (old = &imports_list; *old; old = & (*old)->next)
4609 if ((*old)->name == name)
4611 *old = (*old)->next;
4612 return;
4617 int arm_main_function = 0;
4619 void
4620 aof_dump_imports (f)
4621 FILE *f;
4623 /* The AOF assembler needs this to cause the startup code to be extracted
4624 from the library. Brining in __main causes the whole thing to work
4625 automagically. */
4626 if (arm_main_function)
4628 text_section ();
4629 fputs ("\tIMPORT __main\n", f);
4630 fputs ("\tDCD __main\n", f);
4633 /* Now dump the remaining imports. */
4634 while (imports_list)
4636 fprintf (f, "\tIMPORT\t");
4637 assemble_name (f, imports_list->name);
4638 fputc ('\n', f);
4639 imports_list = imports_list->next;
4642 #endif /* AOF_ASSEMBLER */