(arm_gen_constant, case IOR,XOR): Don't invert a constant if loading
[official-gcc.git] / gcc / config / arm / arm.c
blobfb4705cf40546910dbb35b31237049e6b4daa03f
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include <stdio.h>
25 #include <string.h>
26 #include "assert.h"
27 #include "config.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "tree.h"
40 #include "expr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
48 extern char *output_multi_immediate ();
49 extern void arm_increase_location ();
51 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
52 static int get_prologue_size PROTO ((void));
53 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
54 HOST_WIDE_INT, rtx, rtx, int, int));
56 /* Define the information needed to generate branch insns. This is
57 stored from the compare operation. */
59 rtx arm_compare_op0, arm_compare_op1;
60 int arm_compare_fp;
62 /* What type of cpu are we compiling for? */
63 enum processor_type arm_cpu;
65 /* What type of floating point are we compiling for? */
66 enum floating_point_type arm_fpu;
68 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
69 enum prog_mode_type arm_prgmode;
71 char *target_cpu_name = ARM_CPU_NAME;
72 char *target_fpe_name = NULL;
74 /* Nonzero if this is an "M" variant of the processor. */
75 int arm_fast_multiply = 0;
77 /* Nonzero if this chip support the ARM Architecture 4 extensions */
78 int arm_arch4 = 0;
80 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
81 must report the mode of the memory reference from PRINT_OPERAND to
82 PRINT_OPERAND_ADDRESS. */
83 enum machine_mode output_memory_reference_mode;
85 /* Nonzero if the prologue must setup `fp'. */
86 int current_function_anonymous_args;
88 /* Location counter of .text segment. */
89 int arm_text_location = 0;
91 /* Set to one if we think that lr is only saved because of subroutine calls,
92 but all of these can be `put after' return insns */
93 int lr_save_eliminated;
95 /* A hash table is used to store text segment labels and their associated
96 offset from the start of the text segment. */
97 struct label_offset
99 char *name;
100 int offset;
101 struct label_offset *cdr;
104 #define LABEL_HASH_SIZE 257
106 static struct label_offset *offset_table[LABEL_HASH_SIZE];
108 /* Set to 1 when a return insn is output, this means that the epilogue
109 is not needed. */
111 static int return_used_this_function;
113 static int arm_constant_limit = 3;
115 /* For an explanation of these variables, see final_prescan_insn below. */
116 int arm_ccfsm_state;
117 enum arm_cond_code arm_current_cc;
118 rtx arm_target_insn;
119 int arm_target_label;
121 /* The condition codes of the ARM, and the inverse function. */
122 char *arm_condition_codes[] =
124 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
125 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
128 static enum arm_cond_code get_arm_condition_code ();
131 /* Initialization code */
133 struct arm_cpu_select arm_select[3] =
135 /* switch name, tune arch */
136 { (char *)0, "--with-cpu=", 1, 1 },
137 { (char *)0, "-mcpu=", 1, 1 },
138 { (char *)0, "-mtune=", 1, 0 },
141 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
142 #define FL_FAST_MULT 0x02 /* Fast multiply */
143 #define FL_MODE26 0x04 /* 26-bit mode support */
144 #define FL_MODE32 0x08 /* 32-bit mode support */
145 #define FL_ARCH4 0x10 /* Architecture rel 4 */
146 #define FL_THUMB 0x20 /* Thumb aware */
147 struct processors
149 char *name;
150 enum processor_type type;
151 unsigned int flags;
154 /* Not all of these give usefully different compilation alternatives,
155 but there is no simple way of generalizing them. */
156 static struct processors all_procs[] =
158 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
159 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
160 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
161 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
162 {"arm60", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
163 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
164 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
165 {"arm620", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
166 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
167 {"arm70", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm7d", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
169 {"arm7di", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
170 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
171 | FL_MODE26)},
172 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
173 | FL_MODE26)},
174 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
175 {"arm700i", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
176 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
177 {"arm710c", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
178 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
179 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
180 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
181 | FL_ARCH4 | FL_THUMB)},
182 {NULL, 0, 0}
185 /* Fix up any incompatible options that the user has specified.
186 This has now turned into a maze. */
187 void
188 arm_override_options ()
190 int arm_thumb_aware = 0;
191 int flags = 0;
192 int i;
193 struct arm_cpu_select *ptr;
195 arm_cpu = PROCESSOR_DEFAULT;
196 arm_select[0].string = TARGET_CPU_DEFAULT;
198 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
200 ptr = &arm_select[i];
201 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
203 struct processors *sel;
205 for (sel = all_procs; sel->name != NULL; sel++)
206 if (! strcmp (ptr->string, sel->name))
208 if (ptr->set_tune_p)
209 arm_cpu = sel->type;
211 if (ptr->set_arch_p)
212 flags = sel->flags;
213 break;
216 if (sel->name == NULL)
217 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
221 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
222 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
224 if (TARGET_POKE_FUNCTION_NAME)
225 target_flags |= ARM_FLAG_APCS_FRAME;
227 if (TARGET_6)
229 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
230 target_flags |= ARM_FLAG_APCS_32;
231 arm_cpu = PROCESSOR_ARM6;
234 if (TARGET_3)
236 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
237 target_flags &= ~ARM_FLAG_APCS_32;
238 arm_cpu = PROCESSOR_ARM2;
241 if (TARGET_APCS_REENT && flag_pic)
242 fatal ("-fpic and -mapcs-reent are incompatible");
244 if (TARGET_APCS_REENT)
245 warning ("APCS reentrant code not supported. Ignored");
247 if (flag_pic)
248 warning ("Position independent code not supported. Ignored");
250 if (TARGET_APCS_FLOAT)
251 warning ("Passing floating point arguments in fp regs not yet supported");
253 if (TARGET_APCS_STACK && ! TARGET_APCS)
255 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
256 target_flags |= ARM_FLAG_APCS_FRAME;
259 arm_fpu = FP_HARD;
261 /* Default value for floating point code... if no co-processor
262 bus, then schedule for emulated floating point. Otherwise,
263 assume the user has an FPA, unless overridden with -mfpe-... */
264 if (flags & FL_CO_PROC == 0)
265 arm_fpu = FP_SOFT3;
266 else
267 arm_fpu = FP_HARD;
268 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
269 arm_arch4 = (flags & FL_ARCH4) != 0;
270 arm_thumb_aware = (flags & FL_THUMB) != 0;
272 if (target_fpe_name)
274 if (strcmp (target_fpe_name, "2") == 0)
275 arm_fpu = FP_SOFT2;
276 else if (strcmp (target_fpe_name, "3") == 0)
277 arm_fpu = FP_SOFT3;
278 else
279 fatal ("Invalid floating point emulation option: -mfpe-%s",
280 target_fpe_name);
283 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
285 warning ("This processor variant does not support Thumb interworking");
286 target_flags &= ~ARM_FLAG_THUMB;
289 if (TARGET_FPE && arm_fpu != FP_HARD)
290 arm_fpu = FP_SOFT2;
292 /* For arm2/3 there is no need to do any scheduling if there is only
293 a floating point emulator, or we are doing software floating-point. */
294 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
295 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
297 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
300 /* Return 1 if it is possible to return using a single instruction */
303 use_return_insn ()
305 int regno;
307 if (!reload_completed ||current_function_pretend_args_size
308 || current_function_anonymous_args
309 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
310 return 0;
312 /* Can't be done if any of the FPU regs are pushed, since this also
313 requires an insn */
314 for (regno = 20; regno < 24; regno++)
315 if (regs_ever_live[regno])
316 return 0;
318 /* If a function is naked, don't use the "return" insn. */
319 if (arm_naked_function_p (current_function_decl))
320 return 0;
322 return 1;
325 /* Return TRUE if int I is a valid immediate ARM constant. */
328 const_ok_for_arm (i)
329 HOST_WIDE_INT i;
331 unsigned HOST_WIDE_INT mask = ~0xFF;
333 /* Fast return for 0 and powers of 2 */
334 if ((i & (i - 1)) == 0)
335 return TRUE;
339 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
340 return TRUE;
341 mask =
342 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
343 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
344 } while (mask != ~0xFF);
346 return FALSE;
349 /* Return true if I is a valid constant for the operation CODE. */
351 const_ok_for_op (i, code, mode)
352 HOST_WIDE_INT i;
353 enum rtx_code code;
354 enum machine_mode mode;
356 if (const_ok_for_arm (i))
357 return 1;
359 switch (code)
361 case PLUS:
362 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
364 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
365 case XOR:
366 case IOR:
367 return 0;
369 case AND:
370 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
372 default:
373 abort ();
377 /* Emit a sequence of insns to handle a large constant.
378 CODE is the code of the operation required, it can be any of SET, PLUS,
379 IOR, AND, XOR, MINUS;
380 MODE is the mode in which the operation is being performed;
381 VAL is the integer to operate on;
382 SOURCE is the other operand (a register, or a null-pointer for SET);
383 SUBTARGETS means it is safe to create scratch registers if that will
384 either produce a simpler sequence, or we will want to cse the values.
385 Return value is the number of insns emitted. */
388 arm_split_constant (code, mode, val, target, source, subtargets)
389 enum rtx_code code;
390 enum machine_mode mode;
391 HOST_WIDE_INT val;
392 rtx target;
393 rtx source;
394 int subtargets;
396 if (subtargets || code == SET
397 || (GET_CODE (target) == REG && GET_CODE (source) == REG
398 && REGNO (target) != REGNO (source)))
400 rtx temp;
402 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
403 > arm_constant_limit + (code != SET))
405 if (code == SET)
407 /* Currently SET is the only monadic value for CODE, all
408 the rest are diadic. */
409 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
410 return 1;
412 else
414 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
416 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
417 /* For MINUS, the value is subtracted from, since we never
418 have subtraction of a constant. */
419 if (code == MINUS)
420 emit_insn (gen_rtx (SET, VOIDmode, target,
421 gen_rtx (code, mode, temp, source)));
422 else
423 emit_insn (gen_rtx (SET, VOIDmode, target,
424 gen_rtx (code, mode, source, temp)));
425 return 2;
430 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
433 /* As above, but extra parameter GENERATE which, if clear, suppresses
434 RTL generation. */
436 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
437 enum rtx_code code;
438 enum machine_mode mode;
439 HOST_WIDE_INT val;
440 rtx target;
441 rtx source;
442 int subtargets;
443 int generate;
445 int can_add = 0;
446 int can_invert = 0;
447 int can_negate = 0;
448 int can_negate_initial = 0;
449 int can_shift = 0;
450 int i;
451 int num_bits_set = 0;
452 int set_sign_bit_copies = 0;
453 int clear_sign_bit_copies = 0;
454 int clear_zero_bit_copies = 0;
455 int set_zero_bit_copies = 0;
456 int insns = 0;
457 rtx new_src;
458 unsigned HOST_WIDE_INT temp1, temp2;
459 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
461 /* find out which operations are safe for a given CODE. Also do a quick
462 check for degenerate cases; these can occur when DImode operations
463 are split. */
464 switch (code)
466 case SET:
467 can_invert = 1;
468 can_shift = 1;
469 can_negate = 1;
470 break;
472 case PLUS:
473 can_negate = 1;
474 can_negate_initial = 1;
475 break;
477 case IOR:
478 if (remainder == 0xffffffff)
480 if (generate)
481 emit_insn (gen_rtx (SET, VOIDmode, target,
482 GEN_INT (ARM_SIGN_EXTEND (val))));
483 return 1;
485 if (remainder == 0)
487 if (reload_completed && rtx_equal_p (target, source))
488 return 0;
489 if (generate)
490 emit_insn (gen_rtx (SET, VOIDmode, target, source));
491 return 1;
493 break;
495 case AND:
496 if (remainder == 0)
498 if (generate)
499 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
500 return 1;
502 if (remainder == 0xffffffff)
504 if (reload_completed && rtx_equal_p (target, source))
505 return 0;
506 if (generate)
507 emit_insn (gen_rtx (SET, VOIDmode, target, source));
508 return 1;
510 can_invert = 1;
511 break;
513 case XOR:
514 if (remainder == 0)
516 if (reload_completed && rtx_equal_p (target, source))
517 return 0;
518 if (generate)
519 emit_insn (gen_rtx (SET, VOIDmode, target, source));
520 return 1;
522 if (remainder == 0xffffffff)
524 if (generate)
525 emit_insn (gen_rtx (SET, VOIDmode, target,
526 gen_rtx (NOT, mode, source)));
527 return 1;
530 /* We don't know how to handle this yet below. */
531 abort ();
533 case MINUS:
534 /* We treat MINUS as (val - source), since (source - val) is always
535 passed as (source + (-val)). */
536 if (remainder == 0)
538 if (generate)
539 emit_insn (gen_rtx (SET, VOIDmode, target,
540 gen_rtx (NEG, mode, source)));
541 return 1;
543 if (const_ok_for_arm (val))
545 if (generate)
546 emit_insn (gen_rtx (SET, VOIDmode, target,
547 gen_rtx (MINUS, mode, GEN_INT (val), source)));
548 return 1;
550 can_negate = 1;
552 break;
554 default:
555 abort ();
558 /* If we can do it in one insn get out quickly */
559 if (const_ok_for_arm (val)
560 || (can_negate_initial && const_ok_for_arm (-val))
561 || (can_invert && const_ok_for_arm (~val)))
563 if (generate)
564 emit_insn (gen_rtx (SET, VOIDmode, target,
565 (source ? gen_rtx (code, mode, source,
566 GEN_INT (val))
567 : GEN_INT (val))));
568 return 1;
572 /* Calculate a few attributes that may be useful for specific
573 optimizations. */
575 for (i = 31; i >= 0; i--)
577 if ((remainder & (1 << i)) == 0)
578 clear_sign_bit_copies++;
579 else
580 break;
583 for (i = 31; i >= 0; i--)
585 if ((remainder & (1 << i)) != 0)
586 set_sign_bit_copies++;
587 else
588 break;
591 for (i = 0; i <= 31; i++)
593 if ((remainder & (1 << i)) == 0)
594 clear_zero_bit_copies++;
595 else
596 break;
599 for (i = 0; i <= 31; i++)
601 if ((remainder & (1 << i)) != 0)
602 set_zero_bit_copies++;
603 else
604 break;
607 switch (code)
609 case SET:
610 /* See if we can do this by sign_extending a constant that is known
611 to be negative. This is a good, way of doing it, since the shift
612 may well merge into a subsequent insn. */
613 if (set_sign_bit_copies > 1)
615 if (const_ok_for_arm
616 (temp1 = ARM_SIGN_EXTEND (remainder
617 << (set_sign_bit_copies - 1))))
619 if (generate)
621 new_src = subtargets ? gen_reg_rtx (mode) : target;
622 emit_insn (gen_rtx (SET, VOIDmode, new_src,
623 GEN_INT (temp1)));
624 emit_insn (gen_ashrsi3 (target, new_src,
625 GEN_INT (set_sign_bit_copies - 1)));
627 return 2;
629 /* For an inverted constant, we will need to set the low bits,
630 these will be shifted out of harm's way. */
631 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
632 if (const_ok_for_arm (~temp1))
634 if (generate)
636 new_src = subtargets ? gen_reg_rtx (mode) : target;
637 emit_insn (gen_rtx (SET, VOIDmode, new_src,
638 GEN_INT (temp1)));
639 emit_insn (gen_ashrsi3 (target, new_src,
640 GEN_INT (set_sign_bit_copies - 1)));
642 return 2;
646 /* See if we can generate this by setting the bottom (or the top)
647 16 bits, and then shifting these into the other half of the
648 word. We only look for the simplest cases, to do more would cost
649 too much. Be careful, however, not to generate this when the
650 alternative would take fewer insns. */
651 if (val & 0xffff0000)
653 temp1 = remainder & 0xffff0000;
654 temp2 = remainder & 0x0000ffff;
656 /* Overlaps outside this range are best done using other methods. */
657 for (i = 9; i < 24; i++)
659 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
660 && ! const_ok_for_arm (temp2))
662 insns = arm_gen_constant (code, mode, temp2,
663 new_src = (subtargets
664 ? gen_reg_rtx (mode)
665 : target),
666 source, subtargets, generate);
667 source = new_src;
668 if (generate)
669 emit_insn (gen_rtx (SET, VOIDmode, target,
670 gen_rtx (IOR, mode,
671 gen_rtx (ASHIFT, mode, source,
672 GEN_INT (i)),
673 source)));
674 return insns + 1;
678 /* Don't duplicate cases already considered. */
679 for (i = 17; i < 24; i++)
681 if (((temp1 | (temp1 >> i)) == remainder)
682 && ! const_ok_for_arm (temp1))
684 insns = arm_gen_constant (code, mode, temp1,
685 new_src = (subtargets
686 ? gen_reg_rtx (mode)
687 : target),
688 source, subtargets, generate);
689 source = new_src;
690 if (generate)
691 emit_insn (gen_rtx (SET, VOIDmode, target,
692 gen_rtx (IOR, mode,
693 gen_rtx (LSHIFTRT, mode,
694 source, GEN_INT (i)),
695 source)));
696 return insns + 1;
700 break;
702 case IOR:
703 case XOR:
704 /* If we have IOR or XOR, and the constant can be loaded in a
705 single instruction, and we can find a temporary to put it in,
706 then this can be done in two instructions instead of 3-4. */
707 if (subtargets
708 || (reload_completed && ! reg_mentioned_p (target, source)))
710 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
712 if (generate)
714 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
716 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
717 emit_insn (gen_rtx (SET, VOIDmode, target,
718 gen_rtx (code, mode, source, sub)));
720 return 2;
724 if (code == XOR)
725 break;
727 if (set_sign_bit_copies > 8
728 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
730 if (generate)
732 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
733 rtx shift = GEN_INT (set_sign_bit_copies);
735 emit_insn (gen_rtx (SET, VOIDmode, sub,
736 gen_rtx (NOT, mode,
737 gen_rtx (ASHIFT, mode, source,
738 shift))));
739 emit_insn (gen_rtx (SET, VOIDmode, target,
740 gen_rtx (NOT, mode,
741 gen_rtx (LSHIFTRT, mode, sub,
742 shift))));
744 return 2;
747 if (set_zero_bit_copies > 8
748 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
750 if (generate)
752 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
753 rtx shift = GEN_INT (set_zero_bit_copies);
755 emit_insn (gen_rtx (SET, VOIDmode, sub,
756 gen_rtx (NOT, mode,
757 gen_rtx (LSHIFTRT, mode, source,
758 shift))));
759 emit_insn (gen_rtx (SET, VOIDmode, target,
760 gen_rtx (NOT, mode,
761 gen_rtx (ASHIFT, mode, sub,
762 shift))));
764 return 2;
767 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
769 if (generate)
771 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
772 emit_insn (gen_rtx (SET, VOIDmode, sub,
773 gen_rtx (NOT, mode, source)));
774 source = sub;
775 if (subtargets)
776 sub = gen_reg_rtx (mode);
777 emit_insn (gen_rtx (SET, VOIDmode, sub,
778 gen_rtx (AND, mode, source,
779 GEN_INT (temp1))));
780 emit_insn (gen_rtx (SET, VOIDmode, target,
781 gen_rtx (NOT, mode, sub)));
783 return 3;
785 break;
787 case AND:
788 /* See if two shifts will do 2 or more insn's worth of work. */
789 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
791 HOST_WIDE_INT shift_mask = ((0xffffffff
792 << (32 - clear_sign_bit_copies))
793 & 0xffffffff);
794 rtx new_source;
795 rtx shift;
797 if ((remainder | shift_mask) != 0xffffffff)
799 if (generate)
801 new_source = subtargets ? gen_reg_rtx (mode) : target;
802 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
803 new_source, source, subtargets, 1);
804 source = new_source;
806 else
807 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
808 new_source, source, subtargets, 0);
811 if (generate)
813 shift = GEN_INT (clear_sign_bit_copies);
814 new_source = subtargets ? gen_reg_rtx (mode) : target;
815 emit_insn (gen_ashlsi3 (new_source, source, shift));
816 emit_insn (gen_lshrsi3 (target, new_source, shift));
819 return insns + 2;
822 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
824 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
825 rtx new_source;
826 rtx shift;
828 if ((remainder | shift_mask) != 0xffffffff)
830 if (generate)
832 new_source = subtargets ? gen_reg_rtx (mode) : target;
833 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
834 new_source, source, subtargets, 1);
835 source = new_source;
837 else
838 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
839 new_source, source, subtargets, 0);
842 if (generate)
844 shift = GEN_INT (clear_zero_bit_copies);
845 new_source = subtargets ? gen_reg_rtx (mode) : target;
846 emit_insn (gen_lshrsi3 (new_source, source, shift));
847 emit_insn (gen_ashlsi3 (target, new_source, shift));
850 return insns + 2;
853 break;
855 default:
856 break;
859 for (i = 0; i < 32; i++)
860 if (remainder & (1 << i))
861 num_bits_set++;
863 if (code == AND || (can_invert && num_bits_set > 16))
864 remainder = (~remainder) & 0xffffffff;
865 else if (code == PLUS && num_bits_set > 16)
866 remainder = (-remainder) & 0xffffffff;
867 else
869 can_invert = 0;
870 can_negate = 0;
873 /* Now try and find a way of doing the job in either two or three
874 instructions.
875 We start by looking for the largest block of zeros that are aligned on
876 a 2-bit boundary, we then fill up the temps, wrapping around to the
877 top of the word when we drop off the bottom.
878 In the worst case this code should produce no more than four insns. */
880 int best_start = 0;
881 int best_consecutive_zeros = 0;
883 for (i = 0; i < 32; i += 2)
885 int consecutive_zeros = 0;
887 if (! (remainder & (3 << i)))
889 while ((i < 32) && ! (remainder & (3 << i)))
891 consecutive_zeros += 2;
892 i += 2;
894 if (consecutive_zeros > best_consecutive_zeros)
896 best_consecutive_zeros = consecutive_zeros;
897 best_start = i - consecutive_zeros;
899 i -= 2;
903 /* Now start emitting the insns, starting with the one with the highest
904 bit set: we do this so that the smallest number will be emitted last;
905 this is more likely to be combinable with addressing insns. */
906 i = best_start;
909 int end;
911 if (i <= 0)
912 i += 32;
913 if (remainder & (3 << (i - 2)))
915 end = i - 8;
916 if (end < 0)
917 end += 32;
918 temp1 = remainder & ((0x0ff << end)
919 | ((i < end) ? (0xff >> (32 - end)) : 0));
920 remainder &= ~temp1;
922 if (code == SET)
924 if (generate)
925 emit_insn (gen_rtx (SET, VOIDmode,
926 new_src = (subtargets
927 ? gen_reg_rtx (mode)
928 : target),
929 GEN_INT (can_invert ? ~temp1 : temp1)));
930 can_invert = 0;
931 code = PLUS;
933 else if (code == MINUS)
935 if (generate)
936 emit_insn (gen_rtx (SET, VOIDmode,
937 new_src = (subtargets
938 ? gen_reg_rtx (mode)
939 : target),
940 gen_rtx (code, mode, GEN_INT (temp1),
941 source)));
942 code = PLUS;
944 else
946 if (generate)
947 emit_insn (gen_rtx (SET, VOIDmode,
948 new_src = (remainder
949 ? (subtargets
950 ? gen_reg_rtx (mode)
951 : target)
952 : target),
953 gen_rtx (code, mode, source,
954 GEN_INT (can_invert ? ~temp1
955 : (can_negate
956 ? -temp1
957 : temp1)))));
960 insns++;
961 source = new_src;
962 i -= 6;
964 i -= 2;
965 } while (remainder);
967 return insns;
970 /* Canonicalize a comparison so that we are more likely to recognize it.
971 This can be done for a few constant compares, where we can make the
972 immediate value easier to load. */
973 enum rtx_code
974 arm_canonicalize_comparison (code, op1)
975 enum rtx_code code;
976 rtx *op1;
978 HOST_WIDE_INT i = INTVAL (*op1);
980 switch (code)
982 case EQ:
983 case NE:
984 return code;
986 case GT:
987 case LE:
988 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1)
989 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
991 *op1 = GEN_INT (i+1);
992 return code == GT ? GE : LT;
994 break;
996 case GE:
997 case LT:
998 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1))
999 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1001 *op1 = GEN_INT (i-1);
1002 return code == GE ? GT : LE;
1004 break;
1006 case GTU:
1007 case LEU:
1008 if (i != ~0
1009 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1011 *op1 = GEN_INT (i + 1);
1012 return code == GTU ? GEU : LTU;
1014 break;
1016 case GEU:
1017 case LTU:
1018 if (i != 0
1019 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1021 *op1 = GEN_INT (i - 1);
1022 return code == GEU ? GTU : LEU;
1024 break;
1026 default:
1027 abort ();
1030 return code;
1034 /* Handle aggregates that are not laid out in a BLKmode element.
1035 This is a sub-element of RETURN_IN_MEMORY. */
1037 arm_return_in_memory (type)
1038 tree type;
1040 if (TREE_CODE (type) == RECORD_TYPE)
1042 tree field;
1044 /* For a struct, we can return in a register if every element was a
1045 bit-field. */
1046 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1047 if (TREE_CODE (field) != FIELD_DECL
1048 || ! DECL_BIT_FIELD_TYPE (field))
1049 return 1;
1051 return 0;
1053 else if (TREE_CODE (type) == UNION_TYPE)
1055 tree field;
1057 /* Unions can be returned in registers if every element is
1058 integral, or can be returned in an integer register. */
1059 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1061 if (TREE_CODE (field) != FIELD_DECL
1062 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1063 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1064 || FLOAT_TYPE_P (TREE_TYPE (field)))
1065 return 1;
1067 return 0;
1069 /* XXX Not sure what should be done for other aggregates, so put them in
1070 memory. */
1071 return 1;
1074 #define REG_OR_SUBREG_REG(X) \
1075 (GET_CODE (X) == REG \
1076 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1078 #define REG_OR_SUBREG_RTX(X) \
1079 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1081 #define ARM_FRAME_RTX(X) \
1082 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1083 || (X) == arg_pointer_rtx)
1086 arm_rtx_costs (x, code, outer_code)
1087 rtx x;
1088 enum rtx_code code, outer_code;
1090 enum machine_mode mode = GET_MODE (x);
1091 enum rtx_code subcode;
1092 int extra_cost;
1094 switch (code)
1096 case MEM:
1097 /* Memory costs quite a lot for the first word, but subsequent words
1098 load at the equivalent of a single insn each. */
1099 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1100 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1102 case DIV:
1103 case MOD:
1104 return 100;
1106 case ROTATE:
1107 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1108 return 4;
1109 /* Fall through */
1110 case ROTATERT:
1111 if (mode != SImode)
1112 return 8;
1113 /* Fall through */
1114 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1115 if (mode == DImode)
1116 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1117 + ((GET_CODE (XEXP (x, 0)) == REG
1118 || (GET_CODE (XEXP (x, 0)) == SUBREG
1119 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1120 ? 0 : 8));
1121 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1122 || (GET_CODE (XEXP (x, 0)) == SUBREG
1123 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1124 ? 0 : 4)
1125 + ((GET_CODE (XEXP (x, 1)) == REG
1126 || (GET_CODE (XEXP (x, 1)) == SUBREG
1127 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1128 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1129 ? 0 : 4));
1131 case MINUS:
1132 if (mode == DImode)
1133 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1134 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1135 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1136 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1137 ? 0 : 8));
1139 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1140 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1141 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1142 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1143 ? 0 : 8)
1144 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1145 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1146 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1147 ? 0 : 8));
1149 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1150 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1151 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1152 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1153 || subcode == ASHIFTRT || subcode == LSHIFTRT
1154 || subcode == ROTATE || subcode == ROTATERT
1155 || (subcode == MULT
1156 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1157 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1158 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1159 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1160 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1161 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1162 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1163 return 1;
1164 /* Fall through */
1166 case PLUS:
1167 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1168 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1169 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1170 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1171 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1172 ? 0 : 8));
1174 /* Fall through */
1175 case AND: case XOR: case IOR:
1176 extra_cost = 0;
1178 /* Normally the frame registers will be spilt into reg+const during
1179 reload, so it is a bad idea to combine them with other instructions,
1180 since then they might not be moved outside of loops. As a compromise
1181 we allow integration with ops that have a constant as their second
1182 operand. */
1183 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1184 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1185 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1186 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1187 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1188 extra_cost = 4;
1190 if (mode == DImode)
1191 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1192 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1193 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1194 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1195 ? 0 : 8));
1197 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1198 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1199 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1200 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1201 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1202 ? 0 : 4));
1204 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1205 return (1 + extra_cost
1206 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1207 || subcode == LSHIFTRT || subcode == ASHIFTRT
1208 || subcode == ROTATE || subcode == ROTATERT
1209 || (subcode == MULT
1210 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1211 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1212 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1213 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1214 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1215 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1216 ? 0 : 4));
1218 return 8;
1220 case MULT:
1221 if (arm_fast_multiply && mode == DImode
1222 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1223 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1224 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1225 return 8;
1227 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1228 || mode == DImode)
1229 return 30;
1231 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1233 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1234 & (unsigned HOST_WIDE_INT) 0xffffffff);
1235 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1236 int j;
1237 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1239 for (j = 0; i && j < 32; j += booth_unit_size)
1241 i >>= booth_unit_size;
1242 add_cost += 2;
1245 return add_cost;
1248 return ((arm_fast_multiply ? 8 : 30)
1249 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1250 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1252 case NEG:
1253 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1254 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1255 /* Fall through */
1256 case NOT:
1257 if (mode == DImode)
1258 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1260 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1262 case IF_THEN_ELSE:
1263 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1264 return 14;
1265 return 2;
1267 case COMPARE:
1268 return 1;
1270 case ABS:
1271 return 4 + (mode == DImode ? 4 : 0);
1273 case SIGN_EXTEND:
1274 if (GET_MODE (XEXP (x, 0)) == QImode)
1275 return (4 + (mode == DImode ? 4 : 0)
1276 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1277 /* Fall through */
1278 case ZERO_EXTEND:
1279 switch (GET_MODE (XEXP (x, 0)))
1281 case QImode:
1282 return (1 + (mode == DImode ? 4 : 0)
1283 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1285 case HImode:
1286 return (4 + (mode == DImode ? 4 : 0)
1287 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1289 case SImode:
1290 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1292 abort ();
1294 default:
1295 return 99;
1299 /* This code has been fixed for cross compilation. */
1301 static int fpa_consts_inited = 0;
1303 char *strings_fpa[8] = {
1304 "0", "1", "2", "3",
1305 "4", "5", "0.5", "10"
1308 static REAL_VALUE_TYPE values_fpa[8];
1310 static void
1311 init_fpa_table ()
1313 int i;
1314 REAL_VALUE_TYPE r;
1316 for (i = 0; i < 8; i++)
1318 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1319 values_fpa[i] = r;
1322 fpa_consts_inited = 1;
1325 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1328 const_double_rtx_ok_for_fpu (x)
1329 rtx x;
1331 REAL_VALUE_TYPE r;
1332 int i;
1334 if (!fpa_consts_inited)
1335 init_fpa_table ();
1337 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1338 if (REAL_VALUE_MINUS_ZERO (r))
1339 return 0;
1341 for (i = 0; i < 8; i++)
1342 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1343 return 1;
1345 return 0;
1348 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1351 neg_const_double_rtx_ok_for_fpu (x)
1352 rtx x;
1354 REAL_VALUE_TYPE r;
1355 int i;
1357 if (!fpa_consts_inited)
1358 init_fpa_table ();
1360 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1361 r = REAL_VALUE_NEGATE (r);
1362 if (REAL_VALUE_MINUS_ZERO (r))
1363 return 0;
1365 for (i = 0; i < 8; i++)
1366 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1367 return 1;
1369 return 0;
1372 /* Predicates for `match_operand' and `match_operator'. */
1374 /* s_register_operand is the same as register_operand, but it doesn't accept
1375 (SUBREG (MEM)...).
1377 This function exists because at the time it was put in it led to better
1378 code. SUBREG(MEM) always needs a reload in the places where
1379 s_register_operand is used, and this seemed to lead to excessive
1380 reloading. */
1383 s_register_operand (op, mode)
1384 register rtx op;
1385 enum machine_mode mode;
1387 if (GET_MODE (op) != mode && mode != VOIDmode)
1388 return 0;
1390 if (GET_CODE (op) == SUBREG)
1391 op = SUBREG_REG (op);
1393 /* We don't consider registers whose class is NO_REGS
1394 to be a register operand. */
1395 return (GET_CODE (op) == REG
1396 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1397 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1400 /* Only accept reg, subreg(reg), const_int. */
1403 reg_or_int_operand (op, mode)
1404 register rtx op;
1405 enum machine_mode mode;
1407 if (GET_CODE (op) == CONST_INT)
1408 return 1;
1410 if (GET_MODE (op) != mode && mode != VOIDmode)
1411 return 0;
1413 if (GET_CODE (op) == SUBREG)
1414 op = SUBREG_REG (op);
1416 /* We don't consider registers whose class is NO_REGS
1417 to be a register operand. */
1418 return (GET_CODE (op) == REG
1419 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1420 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1423 /* Return 1 if OP is an item in memory, given that we are in reload. */
1426 reload_memory_operand (op, mode)
1427 rtx op;
1428 enum machine_mode mode;
1430 int regno = true_regnum (op);
1432 return (! CONSTANT_P (op)
1433 && (regno == -1
1434 || (GET_CODE (op) == REG
1435 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1438 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1441 arm_rhs_operand (op, mode)
1442 rtx op;
1443 enum machine_mode mode;
1445 return (s_register_operand (op, mode)
1446 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1449 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1453 arm_rhsm_operand (op, mode)
1454 rtx op;
1455 enum machine_mode mode;
1457 return (s_register_operand (op, mode)
1458 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1459 || memory_operand (op, mode));
1462 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1463 constant that is valid when negated. */
1466 arm_add_operand (op, mode)
1467 rtx op;
1468 enum machine_mode mode;
1470 return (s_register_operand (op, mode)
1471 || (GET_CODE (op) == CONST_INT
1472 && (const_ok_for_arm (INTVAL (op))
1473 || const_ok_for_arm (-INTVAL (op)))));
1477 arm_not_operand (op, mode)
1478 rtx op;
1479 enum machine_mode mode;
1481 return (s_register_operand (op, mode)
1482 || (GET_CODE (op) == CONST_INT
1483 && (const_ok_for_arm (INTVAL (op))
1484 || const_ok_for_arm (~INTVAL (op)))));
1487 /* Return TRUE if the operand is a memory reference which contains an
1488 offsettable address. */
1490 offsettable_memory_operand (op, mode)
1491 register rtx op;
1492 enum machine_mode mode;
1494 if (mode == VOIDmode)
1495 mode = GET_MODE (op);
1497 return (mode == GET_MODE (op)
1498 && GET_CODE (op) == MEM
1499 && offsettable_address_p (reload_completed | reload_in_progress,
1500 mode, XEXP (op, 0)));
1503 /* Return TRUE if the operand is a memory reference which is, or can be
1504 made word aligned by adjusting the offset. */
1506 alignable_memory_operand (op, mode)
1507 register rtx op;
1508 enum machine_mode mode;
1510 rtx reg;
1512 if (mode == VOIDmode)
1513 mode = GET_MODE (op);
1515 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1516 return 0;
1518 op = XEXP (op, 0);
1520 return ((GET_CODE (reg = op) == REG
1521 || (GET_CODE (op) == SUBREG
1522 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1523 || (GET_CODE (op) == PLUS
1524 && GET_CODE (XEXP (op, 1)) == CONST_INT
1525 && (GET_CODE (reg = XEXP (op, 0)) == REG
1526 || (GET_CODE (XEXP (op, 0)) == SUBREG
1527 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1528 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1531 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1534 fpu_rhs_operand (op, mode)
1535 rtx op;
1536 enum machine_mode mode;
1538 if (s_register_operand (op, mode))
1539 return TRUE;
1540 else if (GET_CODE (op) == CONST_DOUBLE)
1541 return (const_double_rtx_ok_for_fpu (op));
1543 return FALSE;
1547 fpu_add_operand (op, mode)
1548 rtx op;
1549 enum machine_mode mode;
1551 if (s_register_operand (op, mode))
1552 return TRUE;
1553 else if (GET_CODE (op) == CONST_DOUBLE)
1554 return (const_double_rtx_ok_for_fpu (op)
1555 || neg_const_double_rtx_ok_for_fpu (op));
1557 return FALSE;
1560 /* Return nonzero if OP is a constant power of two. */
1563 power_of_two_operand (op, mode)
1564 rtx op;
1565 enum machine_mode mode;
1567 if (GET_CODE (op) == CONST_INT)
1569 HOST_WIDE_INT value = INTVAL(op);
1570 return value != 0 && (value & (value - 1)) == 0;
1572 return FALSE;
1575 /* Return TRUE for a valid operand of a DImode operation.
1576 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1577 Note that this disallows MEM(REG+REG), but allows
1578 MEM(PRE/POST_INC/DEC(REG)). */
1581 di_operand (op, mode)
1582 rtx op;
1583 enum machine_mode mode;
1585 if (s_register_operand (op, mode))
1586 return TRUE;
1588 switch (GET_CODE (op))
1590 case CONST_DOUBLE:
1591 case CONST_INT:
1592 return TRUE;
1594 case MEM:
1595 return memory_address_p (DImode, XEXP (op, 0));
1597 default:
1598 return FALSE;
1602 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1603 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1604 Note that this disallows MEM(REG+REG), but allows
1605 MEM(PRE/POST_INC/DEC(REG)). */
1608 soft_df_operand (op, mode)
1609 rtx op;
1610 enum machine_mode mode;
1612 if (s_register_operand (op, mode))
1613 return TRUE;
1615 switch (GET_CODE (op))
1617 case CONST_DOUBLE:
1618 return TRUE;
1620 case MEM:
1621 return memory_address_p (DFmode, XEXP (op, 0));
1623 default:
1624 return FALSE;
1628 /* Return TRUE for valid index operands. */
1631 index_operand (op, mode)
1632 rtx op;
1633 enum machine_mode mode;
1635 return (s_register_operand(op, mode)
1636 || (immediate_operand (op, mode)
1637 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1640 /* Return TRUE for valid shifts by a constant. This also accepts any
1641 power of two on the (somewhat overly relaxed) assumption that the
1642 shift operator in this case was a mult. */
1645 const_shift_operand (op, mode)
1646 rtx op;
1647 enum machine_mode mode;
1649 return (power_of_two_operand (op, mode)
1650 || (immediate_operand (op, mode)
1651 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1654 /* Return TRUE for arithmetic operators which can be combined with a multiply
1655 (shift). */
1658 shiftable_operator (x, mode)
1659 rtx x;
1660 enum machine_mode mode;
1662 if (GET_MODE (x) != mode)
1663 return FALSE;
1664 else
1666 enum rtx_code code = GET_CODE (x);
1668 return (code == PLUS || code == MINUS
1669 || code == IOR || code == XOR || code == AND);
1673 /* Return TRUE for shift operators. */
1676 shift_operator (x, mode)
1677 rtx x;
1678 enum machine_mode mode;
1680 if (GET_MODE (x) != mode)
1681 return FALSE;
1682 else
1684 enum rtx_code code = GET_CODE (x);
1686 if (code == MULT)
1687 return power_of_two_operand (XEXP (x, 1));
1689 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1690 || code == ROTATERT);
1694 int equality_operator (x, mode)
1695 rtx x;
1696 enum machine_mode mode;
1698 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1701 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1704 minmax_operator (x, mode)
1705 rtx x;
1706 enum machine_mode mode;
1708 enum rtx_code code = GET_CODE (x);
1710 if (GET_MODE (x) != mode)
1711 return FALSE;
1713 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1716 /* return TRUE if x is EQ or NE */
1718 /* Return TRUE if this is the condition code register, if we aren't given
1719 a mode, accept any class CCmode register */
1722 cc_register (x, mode)
1723 rtx x;
1724 enum machine_mode mode;
1726 if (mode == VOIDmode)
1728 mode = GET_MODE (x);
1729 if (GET_MODE_CLASS (mode) != MODE_CC)
1730 return FALSE;
1733 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1734 return TRUE;
1736 return FALSE;
1739 /* Return TRUE if this is the condition code register, if we aren't given
1740 a mode, accept any class CCmode register which indicates a dominance
1741 expression. */
1744 dominant_cc_register (x, mode)
1745 rtx x;
1746 enum machine_mode mode;
1748 if (mode == VOIDmode)
1750 mode = GET_MODE (x);
1751 if (GET_MODE_CLASS (mode) != MODE_CC)
1752 return FALSE;
1755 if (mode != CC_DNEmode && mode != CC_DEQmode
1756 && mode != CC_DLEmode && mode != CC_DLTmode
1757 && mode != CC_DGEmode && mode != CC_DGTmode
1758 && mode != CC_DLEUmode && mode != CC_DLTUmode
1759 && mode != CC_DGEUmode && mode != CC_DGTUmode)
1760 return FALSE;
1762 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1763 return TRUE;
1765 return FALSE;
1768 /* Return TRUE if X references a SYMBOL_REF. */
1770 symbol_mentioned_p (x)
1771 rtx x;
1773 register char *fmt;
1774 register int i;
1776 if (GET_CODE (x) == SYMBOL_REF)
1777 return 1;
1779 fmt = GET_RTX_FORMAT (GET_CODE (x));
1780 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1782 if (fmt[i] == 'E')
1784 register int j;
1786 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1787 if (symbol_mentioned_p (XVECEXP (x, i, j)))
1788 return 1;
1790 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
1791 return 1;
1794 return 0;
1797 /* Return TRUE if X references a LABEL_REF. */
1799 label_mentioned_p (x)
1800 rtx x;
1802 register char *fmt;
1803 register int i;
1805 if (GET_CODE (x) == LABEL_REF)
1806 return 1;
1808 fmt = GET_RTX_FORMAT (GET_CODE (x));
1809 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1811 if (fmt[i] == 'E')
1813 register int j;
1815 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1816 if (label_mentioned_p (XVECEXP (x, i, j)))
1817 return 1;
1819 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
1820 return 1;
1823 return 0;
1826 enum rtx_code
1827 minmax_code (x)
1828 rtx x;
1830 enum rtx_code code = GET_CODE (x);
1832 if (code == SMAX)
1833 return GE;
1834 else if (code == SMIN)
1835 return LE;
1836 else if (code == UMIN)
1837 return LEU;
1838 else if (code == UMAX)
1839 return GEU;
1841 abort ();
1844 /* Return 1 if memory locations are adjacent */
1847 adjacent_mem_locations (a, b)
1848 rtx a, b;
1850 int val0 = 0, val1 = 0;
1851 int reg0, reg1;
1853 if ((GET_CODE (XEXP (a, 0)) == REG
1854 || (GET_CODE (XEXP (a, 0)) == PLUS
1855 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1856 && (GET_CODE (XEXP (b, 0)) == REG
1857 || (GET_CODE (XEXP (b, 0)) == PLUS
1858 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1860 if (GET_CODE (XEXP (a, 0)) == PLUS)
1862 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1863 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1865 else
1866 reg0 = REGNO (XEXP (a, 0));
1867 if (GET_CODE (XEXP (b, 0)) == PLUS)
1869 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1870 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1872 else
1873 reg1 = REGNO (XEXP (b, 0));
1874 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1876 return 0;
1879 /* Return 1 if OP is a load multiple operation. It is known to be
1880 parallel and the first section will be tested. */
1883 load_multiple_operation (op, mode)
1884 rtx op;
1885 enum machine_mode mode;
1887 HOST_WIDE_INT count = XVECLEN (op, 0);
1888 int dest_regno;
1889 rtx src_addr;
1890 HOST_WIDE_INT i = 1, base = 0;
1891 rtx elt;
1893 if (count <= 1
1894 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1895 return 0;
1897 /* Check to see if this might be a write-back */
1898 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1900 i++;
1901 base = 1;
1903 /* Now check it more carefully */
1904 if (GET_CODE (SET_DEST (elt)) != REG
1905 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1906 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1907 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1908 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1909 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1910 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1911 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1912 != REGNO (SET_DEST (elt)))
1913 return 0;
1915 count--;
1918 /* Perform a quick check so we don't blow up below. */
1919 if (count <= i
1920 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1921 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
1922 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
1923 return 0;
1925 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
1926 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
1928 for (; i < count; i++)
1930 rtx elt = XVECEXP (op, 0, i);
1932 if (GET_CODE (elt) != SET
1933 || GET_CODE (SET_DEST (elt)) != REG
1934 || GET_MODE (SET_DEST (elt)) != SImode
1935 || REGNO (SET_DEST (elt)) != dest_regno + i - base
1936 || GET_CODE (SET_SRC (elt)) != MEM
1937 || GET_MODE (SET_SRC (elt)) != SImode
1938 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1939 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1940 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1941 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
1942 return 0;
1945 return 1;
1948 /* Return 1 if OP is a store multiple operation. It is known to be
1949 parallel and the first section will be tested. */
1952 store_multiple_operation (op, mode)
1953 rtx op;
1954 enum machine_mode mode;
1956 HOST_WIDE_INT count = XVECLEN (op, 0);
1957 int src_regno;
1958 rtx dest_addr;
1959 HOST_WIDE_INT i = 1, base = 0;
1960 rtx elt;
1962 if (count <= 1
1963 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1964 return 0;
1966 /* Check to see if this might be a write-back */
1967 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1969 i++;
1970 base = 1;
1972 /* Now check it more carefully */
1973 if (GET_CODE (SET_DEST (elt)) != REG
1974 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1975 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
1976 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
1977 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
1978 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
1979 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
1980 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
1981 != REGNO (SET_DEST (elt)))
1982 return 0;
1984 count--;
1987 /* Perform a quick check so we don't blow up below. */
1988 if (count <= i
1989 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
1990 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
1991 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
1992 return 0;
1994 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
1995 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
1997 for (; i < count; i++)
1999 elt = XVECEXP (op, 0, i);
2001 if (GET_CODE (elt) != SET
2002 || GET_CODE (SET_SRC (elt)) != REG
2003 || GET_MODE (SET_SRC (elt)) != SImode
2004 || REGNO (SET_SRC (elt)) != src_regno + i - base
2005 || GET_CODE (SET_DEST (elt)) != MEM
2006 || GET_MODE (SET_DEST (elt)) != SImode
2007 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2008 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2009 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2010 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2011 return 0;
2014 return 1;
2018 load_multiple_sequence (operands, nops, regs, base, load_offset)
2019 rtx *operands;
2020 int nops;
2021 int *regs;
2022 int *base;
2023 HOST_WIDE_INT *load_offset;
2025 int unsorted_regs[4];
2026 HOST_WIDE_INT unsorted_offsets[4];
2027 int order[4];
2028 int base_reg;
2029 int i;
2031 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2032 extended if required. */
2033 if (nops < 2 || nops > 4)
2034 abort ();
2036 /* Loop over the operands and check that the memory references are
2037 suitable (ie immediate offsets from the same base register). At
2038 the same time, extract the target register, and the memory
2039 offsets. */
2040 for (i = 0; i < nops; i++)
2042 rtx reg;
2043 rtx offset;
2045 if (GET_CODE (operands[nops + i]) != MEM)
2046 abort ();
2048 /* Don't reorder volatile memory references; it doesn't seem worth
2049 looking for the case where the order is ok anyway. */
2050 if (MEM_VOLATILE_P (operands[nops + i]))
2051 return 0;
2053 offset = const0_rtx;
2055 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2056 || (GET_CODE (reg) == SUBREG
2057 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2058 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2059 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2060 == REG)
2061 || (GET_CODE (reg) == SUBREG
2062 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2063 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2064 == CONST_INT)))
2066 if (i == 0)
2068 base_reg = REGNO(reg);
2069 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2070 ? REGNO (operands[i])
2071 : REGNO (SUBREG_REG (operands[i])));
2072 order[0] = 0;
2074 else
2076 if (base_reg != REGNO (reg))
2077 /* Not addressed from the same base register. */
2078 return 0;
2080 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2081 ? REGNO (operands[i])
2082 : REGNO (SUBREG_REG (operands[i])));
2083 if (unsorted_regs[i] < unsorted_regs[order[0]])
2084 order[0] = i;
2087 /* If it isn't an integer register, or if it overwrites the
2088 base register but isn't the last insn in the list, then
2089 we can't do this. */
2090 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2091 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2092 return 0;
2094 unsorted_offsets[i] = INTVAL (offset);
2096 else
2097 /* Not a suitable memory address. */
2098 return 0;
2101 /* All the useful information has now been extracted from the
2102 operands into unsorted_regs and unsorted_offsets; additionally,
2103 order[0] has been set to the lowest numbered register in the
2104 list. Sort the registers into order, and check that the memory
2105 offsets are ascending and adjacent. */
2107 for (i = 1; i < nops; i++)
2109 int j;
2111 order[i] = order[i - 1];
2112 for (j = 0; j < nops; j++)
2113 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2114 && (order[i] == order[i - 1]
2115 || unsorted_regs[j] < unsorted_regs[order[i]]))
2116 order[i] = j;
2118 /* Have we found a suitable register? if not, one must be used more
2119 than once. */
2120 if (order[i] == order[i - 1])
2121 return 0;
2123 /* Is the memory address adjacent and ascending? */
2124 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2125 return 0;
2128 if (base)
2130 *base = base_reg;
2132 for (i = 0; i < nops; i++)
2133 regs[i] = unsorted_regs[order[i]];
2135 *load_offset = unsorted_offsets[order[0]];
2138 if (unsorted_offsets[order[0]] == 0)
2139 return 1; /* ldmia */
2141 if (unsorted_offsets[order[0]] == 4)
2142 return 2; /* ldmib */
2144 if (unsorted_offsets[order[nops - 1]] == 0)
2145 return 3; /* ldmda */
2147 if (unsorted_offsets[order[nops - 1]] == -4)
2148 return 4; /* ldmdb */
2150 /* Can't do it without setting up the offset, only do this if it takes
2151 no more than one insn. */
2152 return (const_ok_for_arm (unsorted_offsets[order[0]])
2153 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2156 char *
2157 emit_ldm_seq (operands, nops)
2158 rtx *operands;
2159 int nops;
2161 int regs[4];
2162 int base_reg;
2163 HOST_WIDE_INT offset;
2164 char buf[100];
2165 int i;
2167 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2169 case 1:
2170 strcpy (buf, "ldm%?ia\t");
2171 break;
2173 case 2:
2174 strcpy (buf, "ldm%?ib\t");
2175 break;
2177 case 3:
2178 strcpy (buf, "ldm%?da\t");
2179 break;
2181 case 4:
2182 strcpy (buf, "ldm%?db\t");
2183 break;
2185 case 5:
2186 if (offset >= 0)
2187 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2188 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2189 (long) offset);
2190 else
2191 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2192 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2193 (long) -offset);
2194 output_asm_insn (buf, operands);
2195 base_reg = regs[0];
2196 strcpy (buf, "ldm%?ia\t");
2197 break;
2199 default:
2200 abort ();
2203 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2204 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2206 for (i = 1; i < nops; i++)
2207 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2208 reg_names[regs[i]]);
2210 strcat (buf, "}\t%@ phole ldm");
2212 output_asm_insn (buf, operands);
2213 return "";
2217 store_multiple_sequence (operands, nops, regs, base, load_offset)
2218 rtx *operands;
2219 int nops;
2220 int *regs;
2221 int *base;
2222 HOST_WIDE_INT *load_offset;
2224 int unsorted_regs[4];
2225 HOST_WIDE_INT unsorted_offsets[4];
2226 int order[4];
2227 int base_reg;
2228 int i;
2230 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2231 extended if required. */
2232 if (nops < 2 || nops > 4)
2233 abort ();
2235 /* Loop over the operands and check that the memory references are
2236 suitable (ie immediate offsets from the same base register). At
2237 the same time, extract the target register, and the memory
2238 offsets. */
2239 for (i = 0; i < nops; i++)
2241 rtx reg;
2242 rtx offset;
2244 if (GET_CODE (operands[nops + i]) != MEM)
2245 abort ();
2247 /* Don't reorder volatile memory references; it doesn't seem worth
2248 looking for the case where the order is ok anyway. */
2249 if (MEM_VOLATILE_P (operands[nops + i]))
2250 return 0;
2252 offset = const0_rtx;
2254 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2255 || (GET_CODE (reg) == SUBREG
2256 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2257 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2258 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2259 == REG)
2260 || (GET_CODE (reg) == SUBREG
2261 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2262 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2263 == CONST_INT)))
2265 if (i == 0)
2267 base_reg = REGNO(reg);
2268 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2269 ? REGNO (operands[i])
2270 : REGNO (SUBREG_REG (operands[i])));
2271 order[0] = 0;
2273 else
2275 if (base_reg != REGNO (reg))
2276 /* Not addressed from the same base register. */
2277 return 0;
2279 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2280 ? REGNO (operands[i])
2281 : REGNO (SUBREG_REG (operands[i])));
2282 if (unsorted_regs[i] < unsorted_regs[order[0]])
2283 order[0] = i;
2286 /* If it isn't an integer register, then we can't do this. */
2287 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2288 return 0;
2290 unsorted_offsets[i] = INTVAL (offset);
2292 else
2293 /* Not a suitable memory address. */
2294 return 0;
2297 /* All the useful information has now been extracted from the
2298 operands into unsorted_regs and unsorted_offsets; additionally,
2299 order[0] has been set to the lowest numbered register in the
2300 list. Sort the registers into order, and check that the memory
2301 offsets are ascending and adjacent. */
2303 for (i = 1; i < nops; i++)
2305 int j;
2307 order[i] = order[i - 1];
2308 for (j = 0; j < nops; j++)
2309 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2310 && (order[i] == order[i - 1]
2311 || unsorted_regs[j] < unsorted_regs[order[i]]))
2312 order[i] = j;
2314 /* Have we found a suitable register? if not, one must be used more
2315 than once. */
2316 if (order[i] == order[i - 1])
2317 return 0;
2319 /* Is the memory address adjacent and ascending? */
2320 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2321 return 0;
2324 if (base)
2326 *base = base_reg;
2328 for (i = 0; i < nops; i++)
2329 regs[i] = unsorted_regs[order[i]];
2331 *load_offset = unsorted_offsets[order[0]];
2334 if (unsorted_offsets[order[0]] == 0)
2335 return 1; /* stmia */
2337 if (unsorted_offsets[order[0]] == 4)
2338 return 2; /* stmib */
2340 if (unsorted_offsets[order[nops - 1]] == 0)
2341 return 3; /* stmda */
2343 if (unsorted_offsets[order[nops - 1]] == -4)
2344 return 4; /* stmdb */
2346 return 0;
2349 char *
2350 emit_stm_seq (operands, nops)
2351 rtx *operands;
2352 int nops;
2354 int regs[4];
2355 int base_reg;
2356 HOST_WIDE_INT offset;
2357 char buf[100];
2358 int i;
2360 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2362 case 1:
2363 strcpy (buf, "stm%?ia\t");
2364 break;
2366 case 2:
2367 strcpy (buf, "stm%?ib\t");
2368 break;
2370 case 3:
2371 strcpy (buf, "stm%?da\t");
2372 break;
2374 case 4:
2375 strcpy (buf, "stm%?db\t");
2376 break;
2378 default:
2379 abort ();
2382 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2383 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2385 for (i = 1; i < nops; i++)
2386 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2387 reg_names[regs[i]]);
2389 strcat (buf, "}\t%@ phole stm");
2391 output_asm_insn (buf, operands);
2392 return "";
2396 multi_register_push (op, mode)
2397 rtx op;
2398 enum machine_mode mode;
2400 if (GET_CODE (op) != PARALLEL
2401 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2402 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2403 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2404 return 0;
2406 return 1;
2410 /* Routines for use with attributes */
2413 const_pool_offset (symbol)
2414 rtx symbol;
2416 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
2419 /* Return nonzero if ATTR is a valid attribute for DECL.
2420 ATTRIBUTES are any existing attributes and ARGS are the arguments
2421 supplied with ATTR.
2423 Supported attributes:
2425 naked: don't output any prologue or epilogue code, the user is assumed
2426 to do the right thing. */
2429 arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2430 tree decl;
2431 tree attributes;
2432 tree attr;
2433 tree args;
2435 if (args != NULL_TREE)
2436 return 0;
2438 if (is_attribute_p ("naked", attr))
2439 return TREE_CODE (decl) == FUNCTION_DECL;
2440 return 0;
2443 /* Return non-zero if FUNC is a naked function. */
2445 static int
2446 arm_naked_function_p (func)
2447 tree func;
2449 tree a;
2451 if (TREE_CODE (func) != FUNCTION_DECL)
2452 abort ();
2454 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2455 return a != NULL_TREE;
2458 /* Routines for use in generating RTL */
2461 arm_gen_load_multiple (base_regno, count, from, up, write_back)
2462 int base_regno;
2463 int count;
2464 rtx from;
2465 int up;
2466 int write_back;
2468 int i = 0, j;
2469 rtx result;
2470 int sign = up ? 1 : -1;
2472 result = gen_rtx (PARALLEL, VOIDmode,
2473 rtvec_alloc (count + (write_back ? 2 : 0)));
2474 if (write_back)
2476 XVECEXP (result, 0, 0)
2477 = gen_rtx (SET, GET_MODE (from), from,
2478 plus_constant (from, count * 4 * sign));
2479 i = 1;
2480 count++;
2483 for (j = 0; i < count; i++, j++)
2485 XVECEXP (result, 0, i)
2486 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
2487 gen_rtx (MEM, SImode,
2488 plus_constant (from, j * 4 * sign)));
2491 if (write_back)
2492 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2494 return result;
2498 arm_gen_store_multiple (base_regno, count, to, up, write_back)
2499 int base_regno;
2500 int count;
2501 rtx to;
2502 int up;
2503 int write_back;
2505 int i = 0, j;
2506 rtx result;
2507 int sign = up ? 1 : -1;
2509 result = gen_rtx (PARALLEL, VOIDmode,
2510 rtvec_alloc (count + (write_back ? 2 : 0)));
2511 if (write_back)
2513 XVECEXP (result, 0, 0)
2514 = gen_rtx (SET, GET_MODE (to), to,
2515 plus_constant (to, count * 4 * sign));
2516 i = 1;
2517 count++;
2520 for (j = 0; i < count; i++, j++)
2522 XVECEXP (result, 0, i)
2523 = gen_rtx (SET, VOIDmode,
2524 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
2525 gen_rtx (REG, SImode, base_regno + j));
2528 if (write_back)
2529 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2531 return result;
2535 arm_gen_movstrqi (operands)
2536 rtx *operands;
2538 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
2539 int i, r;
2540 rtx src, dst;
2541 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
2542 rtx part_bytes_reg = NULL;
2543 extern int optimize;
2545 if (GET_CODE (operands[2]) != CONST_INT
2546 || GET_CODE (operands[3]) != CONST_INT
2547 || INTVAL (operands[2]) > 64
2548 || INTVAL (operands[3]) & 3)
2549 return 0;
2551 st_dst = XEXP (operands[0], 0);
2552 st_src = XEXP (operands[1], 0);
2553 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2554 fin_src = src = copy_to_mode_reg (SImode, st_src);
2556 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2557 out_words_to_go = INTVAL (operands[2]) / 4;
2558 last_bytes = INTVAL (operands[2]) & 3;
2560 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2561 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2563 for (i = 0; in_words_to_go >= 2; i+=4)
2565 if (in_words_to_go > 4)
2566 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE));
2567 else
2568 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
2569 FALSE));
2571 if (out_words_to_go)
2573 if (out_words_to_go > 4)
2574 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE));
2575 else if (out_words_to_go != 1)
2576 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2577 dst, TRUE,
2578 (last_bytes == 0
2579 ? FALSE : TRUE)));
2580 else
2582 emit_move_insn (gen_rtx (MEM, SImode, dst),
2583 gen_rtx (REG, SImode, 0));
2584 if (last_bytes != 0)
2585 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2589 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2590 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2593 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2594 if (out_words_to_go)
2596 rtx sreg;
2598 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2599 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2600 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2601 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2602 in_words_to_go--;
2604 if (in_words_to_go) /* Sanity check */
2605 abort ();
2608 if (in_words_to_go)
2610 if (in_words_to_go < 0)
2611 abort ();
2613 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2616 if (BYTES_BIG_ENDIAN && last_bytes)
2618 rtx tmp = gen_reg_rtx (SImode);
2620 if (part_bytes_reg == NULL)
2621 abort ();
2623 /* The bytes we want are in the top end of the word */
2624 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2625 GEN_INT (8 * (4 - last_bytes))));
2626 part_bytes_reg = tmp;
2628 while (last_bytes)
2630 emit_move_insn (gen_rtx (MEM, QImode,
2631 plus_constant (dst, last_bytes - 1)),
2632 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2633 if (--last_bytes)
2635 tmp = gen_reg_rtx (SImode);
2636 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2637 part_bytes_reg = tmp;
2642 else
2644 while (last_bytes)
2646 if (part_bytes_reg == NULL)
2647 abort ();
2649 emit_move_insn (gen_rtx (MEM, QImode, dst),
2650 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2651 if (--last_bytes)
2653 rtx tmp = gen_reg_rtx (SImode);
2655 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2656 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2657 part_bytes_reg = tmp;
2662 return 1;
2665 /* Generate a memory reference for a half word, such that it will be loaded
2666 into the top 16 bits of the word. We can assume that the address is
2667 known to be alignable and of the form reg, or plus (reg, const). */
2669 gen_rotated_half_load (memref)
2670 rtx memref;
2672 HOST_WIDE_INT offset = 0;
2673 rtx base = XEXP (memref, 0);
2675 if (GET_CODE (base) == PLUS)
2677 offset = INTVAL (XEXP (base, 1));
2678 base = XEXP (base, 0);
2681 /* If we aren't allowed to generate unalligned addresses, then fail. */
2682 if (TARGET_SHORT_BY_BYTES
2683 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
2684 return NULL;
2686 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
2688 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
2689 return base;
2691 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
2694 static enum machine_mode
2695 select_dominance_cc_mode (op, x, y, cond_or)
2696 enum rtx_code op;
2697 rtx x;
2698 rtx y;
2699 HOST_WIDE_INT cond_or;
2701 enum rtx_code cond1, cond2;
2702 int swapped = 0;
2704 /* Currently we will probably get the wrong result if the individual
2705 comparisons are not simple. This also ensures that it is safe to
2706 reverse a comparions if necessary. */
2707 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
2708 != CCmode)
2709 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
2710 != CCmode))
2711 return CCmode;
2713 if (cond_or)
2714 cond1 = reverse_condition (cond1);
2716 /* If the comparisons are not equal, and one doesn't dominate the other,
2717 then we can't do this. */
2718 if (cond1 != cond2
2719 && ! comparison_dominates_p (cond1, cond2)
2720 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
2721 return CCmode;
2723 if (swapped)
2725 enum rtx_code temp = cond1;
2726 cond1 = cond2;
2727 cond2 = temp;
2730 switch (cond1)
2732 case EQ:
2733 if (cond2 == EQ || ! cond_or)
2734 return CC_DEQmode;
2736 switch (cond2)
2738 case LE: return CC_DLEmode;
2739 case LEU: return CC_DLEUmode;
2740 case GE: return CC_DGEmode;
2741 case GEU: return CC_DGEUmode;
2744 break;
2746 case LT:
2747 if (cond2 == LT || ! cond_or)
2748 return CC_DLTmode;
2749 if (cond2 == LE)
2750 return CC_DLEmode;
2751 if (cond2 == NE)
2752 return CC_DNEmode;
2753 break;
2755 case GT:
2756 if (cond2 == GT || ! cond_or)
2757 return CC_DGTmode;
2758 if (cond2 == GE)
2759 return CC_DGEmode;
2760 if (cond2 == NE)
2761 return CC_DNEmode;
2762 break;
2764 case LTU:
2765 if (cond2 == LTU || ! cond_or)
2766 return CC_DLTUmode;
2767 if (cond2 == LEU)
2768 return CC_DLEUmode;
2769 if (cond2 == NE)
2770 return CC_DNEmode;
2771 break;
2773 case GTU:
2774 if (cond2 == GTU || ! cond_or)
2775 return CC_DGTUmode;
2776 if (cond2 == GEU)
2777 return CC_DGEUmode;
2778 if (cond2 == NE)
2779 return CC_DNEmode;
2780 break;
2782 /* The remaining cases only occur when both comparisons are the
2783 same. */
2784 case NE:
2785 return CC_DNEmode;
2787 case LE:
2788 return CC_DLEmode;
2790 case GE:
2791 return CC_DGEmode;
2793 case LEU:
2794 return CC_DLEUmode;
2796 case GEU:
2797 return CC_DGEUmode;
2800 abort ();
2803 enum machine_mode
2804 arm_select_cc_mode (op, x, y)
2805 enum rtx_code op;
2806 rtx x;
2807 rtx y;
2809 /* All floating point compares return CCFP if it is an equality
2810 comparison, and CCFPE otherwise. */
2811 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2812 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
2814 /* A compare with a shifted operand. Because of canonicalization, the
2815 comparison will have to be swapped when we emit the assembler. */
2816 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
2817 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
2818 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
2819 || GET_CODE (x) == ROTATERT))
2820 return CC_SWPmode;
2822 /* This is a special case, that is used by combine to alow a
2823 comarison of a shifted byte load to be split into a zero-extend
2824 followed by a comparison of the shifted integer (only valid for
2825 equalities and unsigned inequalites. */
2826 if (GET_MODE (x) == SImode
2827 && GET_CODE (x) == ASHIFT
2828 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
2829 && GET_CODE (XEXP (x, 0)) == SUBREG
2830 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
2831 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
2832 && (op == EQ || op == NE
2833 || op == GEU || op == GTU || op == LTU || op == LEU)
2834 && GET_CODE (y) == CONST_INT)
2835 return CC_Zmode;
2837 /* An operation that sets the condition codes as a side-effect, the
2838 V flag is not set correctly, so we can only use comparisons where
2839 this doesn't matter. (For LT and GE we can use "mi" and "pl"
2840 instead. */
2841 if (GET_MODE (x) == SImode
2842 && y == const0_rtx
2843 && (op == EQ || op == NE || op == LT || op == GE)
2844 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
2845 || GET_CODE (x) == AND || GET_CODE (x) == IOR
2846 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
2847 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
2848 || GET_CODE (x) == LSHIFTRT
2849 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
2850 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
2851 return CC_NOOVmode;
2853 /* A construct for a conditional compare, if the false arm contains
2854 0, then both conditions must be true, otherwise either condition
2855 must be true. Not all conditions are possible, so CCmode is
2856 returned if it can't be done. */
2857 if (GET_CODE (x) == IF_THEN_ELSE
2858 && (XEXP (x, 2) == const0_rtx
2859 || XEXP (x, 2) == const1_rtx)
2860 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2861 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
2862 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
2863 INTVAL (XEXP (x, 2)));
2865 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
2866 return CC_Zmode;
2868 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
2869 && GET_CODE (x) == PLUS
2870 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
2871 return CC_Cmode;
2873 return CCmode;
2876 /* X and Y are two things to compare using CODE. Emit the compare insn and
2877 return the rtx for register 0 in the proper mode. FP means this is a
2878 floating point compare: I don't think that it is needed on the arm. */
2881 gen_compare_reg (code, x, y, fp)
2882 enum rtx_code code;
2883 rtx x, y;
2885 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2886 rtx cc_reg = gen_rtx (REG, mode, 24);
2888 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
2889 gen_rtx (COMPARE, mode, x, y)));
2891 return cc_reg;
2894 void
2895 arm_reload_in_hi (operands)
2896 rtx *operands;
2898 rtx base = find_replacement (&XEXP (operands[1], 0));
2900 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
2901 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
2902 gen_rtx (MEM, QImode,
2903 plus_constant (base, 1))));
2904 if (BYTES_BIG_ENDIAN)
2905 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2906 operands[0], 0),
2907 gen_rtx (IOR, SImode,
2908 gen_rtx (ASHIFT, SImode,
2909 gen_rtx (SUBREG, SImode,
2910 operands[0], 0),
2911 GEN_INT (8)),
2912 operands[2])));
2913 else
2914 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2915 operands[0], 0),
2916 gen_rtx (IOR, SImode,
2917 gen_rtx (ASHIFT, SImode,
2918 operands[2],
2919 GEN_INT (8)),
2920 gen_rtx (SUBREG, SImode, operands[0], 0))));
2923 void
2924 arm_reload_out_hi (operands)
2925 rtx *operands;
2927 rtx base = find_replacement (&XEXP (operands[0], 0));
2929 if (BYTES_BIG_ENDIAN)
2931 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2932 gen_rtx (SUBREG, QImode, operands[1], 0)));
2933 emit_insn (gen_lshrsi3 (operands[2],
2934 gen_rtx (SUBREG, SImode, operands[1], 0),
2935 GEN_INT (8)));
2936 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2937 gen_rtx (SUBREG, QImode, operands[2], 0)));
2939 else
2941 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
2942 gen_rtx (SUBREG, QImode, operands[1], 0)));
2943 emit_insn (gen_lshrsi3 (operands[2],
2944 gen_rtx (SUBREG, SImode, operands[1], 0),
2945 GEN_INT (8)));
2946 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
2947 gen_rtx (SUBREG, QImode, operands[2], 0)));
2951 /* Check to see if a branch is forwards or backwards. Return TRUE if it
2952 is backwards. */
2955 arm_backwards_branch (from, to)
2956 int from, to;
2958 return insn_addresses[to] <= insn_addresses[from];
2961 /* Check to see if a branch is within the distance that can be done using
2962 an arithmetic expression. */
2964 short_branch (from, to)
2965 int from, to;
2967 int delta = insn_addresses[from] + 8 - insn_addresses[to];
2969 return abs (delta) < 980; /* A small margin for safety */
2972 /* Check to see that the insn isn't the target of the conditionalizing
2973 code */
2975 arm_insn_not_targeted (insn)
2976 rtx insn;
2978 return insn != arm_target_insn;
2982 /* Routines for manipulation of the constant pool. */
2983 /* This is unashamedly hacked from the version in sh.c, since the problem is
2984 extremely similar. */
2986 /* Arm instructions cannot load a large constant into a register,
2987 constants have to come from a pc relative load. The reference of a pc
2988 relative load instruction must be less than 1k infront of the instruction.
2989 This means that we often have to dump a constant inside a function, and
2990 generate code to branch around it.
2992 It is important to minimize this, since the branches will slow things
2993 down and make things bigger.
2995 Worst case code looks like:
2997 ldr rn, L1
2998 b L2
2999 align
3000 L1: .long value
3004 ldr rn, L3
3005 b L4
3006 align
3007 L3: .long value
3011 We fix this by performing a scan before scheduling, which notices which
3012 instructions need to have their operands fetched from the constant table
3013 and builds the table.
3016 The algorithm is:
3018 scan, find an instruction which needs a pcrel move. Look forward, find th
3019 last barrier which is within MAX_COUNT bytes of the requirement.
3020 If there isn't one, make one. Process all the instructions between
3021 the find and the barrier.
3023 In the above example, we can tell that L3 is within 1k of L1, so
3024 the first move can be shrunk from the 2 insn+constant sequence into
3025 just 1 insn, and the constant moved to L3 to make:
3027 ldr rn, L1
3029 ldr rn, L3
3030 b L4
3031 align
3032 L1: .long value
3033 L3: .long value
3036 Then the second move becomes the target for the shortening process.
3040 typedef struct
3042 rtx value; /* Value in table */
3043 HOST_WIDE_INT next_offset;
3044 enum machine_mode mode; /* Mode of value */
3045 } pool_node;
3047 /* The maximum number of constants that can fit into one pool, since
3048 the pc relative range is 0...1020 bytes and constants are at least 4
3049 bytes long */
3051 #define MAX_POOL_SIZE (1020/4)
3052 static pool_node pool_vector[MAX_POOL_SIZE];
3053 static int pool_size;
3054 static rtx pool_vector_label;
3056 /* Add a constant to the pool and return its label. */
3057 static HOST_WIDE_INT
3058 add_constant (x, mode)
3059 rtx x;
3060 enum machine_mode mode;
3062 int i;
3063 rtx lab;
3064 HOST_WIDE_INT offset;
3066 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3067 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3068 x = get_pool_constant (XEXP (x, 0));
3069 #ifndef AOF_ASSEMBLER
3070 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3071 x = XVECEXP (x, 0, 0);
3072 #endif
3074 /* First see if we've already got it */
3075 for (i = 0; i < pool_size; i++)
3077 if (GET_CODE (x) == pool_vector[i].value->code
3078 && mode == pool_vector[i].mode)
3080 if (GET_CODE (x) == CODE_LABEL)
3082 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3083 continue;
3085 if (rtx_equal_p (x, pool_vector[i].value))
3086 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3090 /* Need a new one */
3091 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3092 offset = 0;
3093 if (pool_size == 0)
3094 pool_vector_label = gen_label_rtx ();
3095 else
3096 pool_vector[pool_size].next_offset
3097 += (offset = pool_vector[pool_size - 1].next_offset);
3099 pool_vector[pool_size].value = x;
3100 pool_vector[pool_size].mode = mode;
3101 pool_size++;
3102 return offset;
3105 /* Output the literal table */
3106 static void
3107 dump_table (scan)
3108 rtx scan;
3110 int i;
3112 scan = emit_label_after (gen_label_rtx (), scan);
3113 scan = emit_insn_after (gen_align_4 (), scan);
3114 scan = emit_label_after (pool_vector_label, scan);
3116 for (i = 0; i < pool_size; i++)
3118 pool_node *p = pool_vector + i;
3120 switch (GET_MODE_SIZE (p->mode))
3122 case 4:
3123 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3124 break;
3126 case 8:
3127 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3128 break;
3130 default:
3131 abort ();
3132 break;
3136 scan = emit_insn_after (gen_consttable_end (), scan);
3137 scan = emit_barrier_after (scan);
3138 pool_size = 0;
3141 /* Non zero if the src operand needs to be fixed up */
3142 static int
3143 fixit (src, mode, destreg)
3144 rtx src;
3145 enum machine_mode mode;
3146 int destreg;
3148 if (CONSTANT_P (src))
3150 if (GET_CODE (src) == CONST_INT)
3151 return (! const_ok_for_arm (INTVAL (src))
3152 && ! const_ok_for_arm (~INTVAL (src)));
3153 if (GET_CODE (src) == CONST_DOUBLE)
3154 return (GET_MODE (src) == VOIDmode
3155 || destreg < 16
3156 || (! const_double_rtx_ok_for_fpu (src)
3157 && ! neg_const_double_rtx_ok_for_fpu (src)));
3158 return symbol_mentioned_p (src);
3160 #ifndef AOF_ASSEMBLER
3161 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3162 return 1;
3163 #endif
3164 else
3165 return (mode == SImode && GET_CODE (src) == MEM
3166 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3167 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3170 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3171 static rtx
3172 find_barrier (from, max_count)
3173 rtx from;
3174 int max_count;
3176 int count = 0;
3177 rtx found_barrier = 0;
3179 while (from && count < max_count)
3181 if (GET_CODE (from) == BARRIER)
3182 found_barrier = from;
3184 /* Count the length of this insn */
3185 if (GET_CODE (from) == INSN
3186 && GET_CODE (PATTERN (from)) == SET
3187 && CONSTANT_P (SET_SRC (PATTERN (from)))
3188 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
3190 rtx src = SET_SRC (PATTERN (from));
3191 count += 2;
3193 else
3194 count += get_attr_length (from);
3196 from = NEXT_INSN (from);
3199 if (!found_barrier)
3201 /* We didn't find a barrier in time to
3202 dump our stuff, so we'll make one */
3203 rtx label = gen_label_rtx ();
3205 if (from)
3206 from = PREV_INSN (from);
3207 else
3208 from = get_last_insn ();
3210 /* Walk back to be just before any jump */
3211 while (GET_CODE (from) == JUMP_INSN
3212 || GET_CODE (from) == NOTE
3213 || GET_CODE (from) == CODE_LABEL)
3214 from = PREV_INSN (from);
3216 from = emit_jump_insn_after (gen_jump (label), from);
3217 JUMP_LABEL (from) = label;
3218 found_barrier = emit_barrier_after (from);
3219 emit_label_after (label, found_barrier);
3220 return found_barrier;
3223 return found_barrier;
3226 /* Non zero if the insn is a move instruction which needs to be fixed. */
3227 static int
3228 broken_move (insn)
3229 rtx insn;
3231 if (!INSN_DELETED_P (insn)
3232 && GET_CODE (insn) == INSN
3233 && GET_CODE (PATTERN (insn)) == SET)
3235 rtx pat = PATTERN (insn);
3236 rtx src = SET_SRC (pat);
3237 rtx dst = SET_DEST (pat);
3238 int destreg;
3239 enum machine_mode mode = GET_MODE (dst);
3240 if (dst == pc_rtx)
3241 return 0;
3243 if (GET_CODE (dst) == REG)
3244 destreg = REGNO (dst);
3245 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3246 destreg = REGNO (SUBREG_REG (dst));
3248 return fixit (src, mode, destreg);
3250 return 0;
3253 void
3254 arm_reorg (first)
3255 rtx first;
3257 rtx insn;
3258 int count_size;
3259 int regno;
3261 #if 0
3262 /* The ldr instruction can work with up to a 4k offset, and most constants
3263 will be loaded with one of these instructions; however, the adr
3264 instruction and the ldf instructions only work with a 1k offset. This
3265 code needs to be rewritten to use the 4k offset when possible, and to
3266 adjust when a 1k offset is needed. For now we just use a 1k offset
3267 from the start. */
3268 count_size = 4000;
3270 /* Floating point operands can't work further than 1024 bytes from the
3271 PC, so to make things simple we restrict all loads for such functions.
3273 if (TARGET_HARD_FLOAT)
3274 for (regno = 16; regno < 24; regno++)
3275 if (regs_ever_live[regno])
3277 count_size = 1000;
3278 break;
3280 #else
3281 count_size = 1000;
3282 #endif /* 0 */
3284 for (insn = first; insn; insn = NEXT_INSN (insn))
3286 if (broken_move (insn))
3288 /* This is a broken move instruction, scan ahead looking for
3289 a barrier to stick the constant table behind */
3290 rtx scan;
3291 rtx barrier = find_barrier (insn, count_size);
3293 /* Now find all the moves between the points and modify them */
3294 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3296 if (broken_move (scan))
3298 /* This is a broken move instruction, add it to the pool */
3299 rtx pat = PATTERN (scan);
3300 rtx src = SET_SRC (pat);
3301 rtx dst = SET_DEST (pat);
3302 enum machine_mode mode = GET_MODE (dst);
3303 HOST_WIDE_INT offset;
3304 rtx newinsn = scan;
3305 rtx newsrc;
3306 rtx addr;
3307 int scratch;
3309 /* If this is an HImode constant load, convert it into
3310 an SImode constant load. Since the register is always
3311 32 bits this is safe. We have to do this, since the
3312 load pc-relative instruction only does a 32-bit load. */
3313 if (mode == HImode)
3315 mode = SImode;
3316 if (GET_CODE (dst) != REG)
3317 abort ();
3318 PUT_MODE (dst, SImode);
3321 offset = add_constant (src, mode);
3322 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3323 pool_vector_label),
3324 offset);
3326 /* For wide moves to integer regs we need to split the
3327 address calculation off into a separate insn, so that
3328 the load can then be done with a load-multiple. This is
3329 safe, since we have already noted the length of such
3330 insns to be 8, and we are immediately over-writing the
3331 scratch we have grabbed with the final result. */
3332 if (GET_MODE_SIZE (mode) > 4
3333 && (scratch = REGNO (dst)) < 16)
3335 rtx reg = gen_rtx (REG, SImode, scratch);
3336 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3337 newinsn);
3338 addr = reg;
3341 newsrc = gen_rtx (MEM, mode, addr);
3343 /* Build a jump insn wrapper around the move instead
3344 of an ordinary insn, because we want to have room for
3345 the target label rtx in fld[7], which an ordinary
3346 insn doesn't have. */
3347 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
3348 dst, newsrc),
3349 newinsn);
3350 JUMP_LABEL (newinsn) = pool_vector_label;
3352 /* But it's still an ordinary insn */
3353 PUT_CODE (newinsn, INSN);
3355 /* Kill old insn */
3356 delete_insn (scan);
3357 scan = newinsn;
3360 dump_table (barrier);
3361 insn = scan;
3367 /* Routines to output assembly language. */
3369 /* If the rtx is the correct value then return the string of the number.
3370 In this way we can ensure that valid double constants are generated even
3371 when cross compiling. */
3372 char *
3373 fp_immediate_constant (x)
3374 rtx x;
3376 REAL_VALUE_TYPE r;
3377 int i;
3379 if (!fpa_consts_inited)
3380 init_fpa_table ();
3382 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3383 for (i = 0; i < 8; i++)
3384 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3385 return strings_fpa[i];
3387 abort ();
3390 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3391 static char *
3392 fp_const_from_val (r)
3393 REAL_VALUE_TYPE *r;
3395 int i;
3397 if (! fpa_consts_inited)
3398 init_fpa_table ();
3400 for (i = 0; i < 8; i++)
3401 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3402 return strings_fpa[i];
3404 abort ();
3407 /* Output the operands of a LDM/STM instruction to STREAM.
3408 MASK is the ARM register set mask of which only bits 0-15 are important.
3409 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3410 must follow the register list. */
3412 void
3413 print_multi_reg (stream, instr, mask, hat)
3414 FILE *stream;
3415 char *instr;
3416 int mask, hat;
3418 int i;
3419 int not_first = FALSE;
3421 fputc ('\t', stream);
3422 fprintf (stream, instr, REGISTER_PREFIX);
3423 fputs (", {", stream);
3424 for (i = 0; i < 16; i++)
3425 if (mask & (1 << i))
3427 if (not_first)
3428 fprintf (stream, ", ");
3429 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
3430 not_first = TRUE;
3433 fprintf (stream, "}%s\n", hat ? "^" : "");
3436 /* Output a 'call' insn. */
3438 char *
3439 output_call (operands)
3440 rtx *operands;
3442 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3444 if (REGNO (operands[0]) == 14)
3446 operands[0] = gen_rtx (REG, SImode, 12);
3447 output_asm_insn ("mov%?\t%0, %|lr", operands);
3449 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3450 output_asm_insn ("mov%?\t%|pc, %0", operands);
3451 return "";
3454 static int
3455 eliminate_lr2ip (x)
3456 rtx *x;
3458 int something_changed = 0;
3459 rtx x0 = *x;
3460 int code = GET_CODE (x0);
3461 register int i, j;
3462 register char *fmt;
3464 switch (code)
3466 case REG:
3467 if (REGNO (x0) == 14)
3469 *x = gen_rtx (REG, SImode, 12);
3470 return 1;
3472 return 0;
3473 default:
3474 /* Scan through the sub-elements and change any references there */
3475 fmt = GET_RTX_FORMAT (code);
3476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3477 if (fmt[i] == 'e')
3478 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3479 else if (fmt[i] == 'E')
3480 for (j = 0; j < XVECLEN (x0, i); j++)
3481 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3482 return something_changed;
3486 /* Output a 'call' insn that is a reference in memory. */
3488 char *
3489 output_call_mem (operands)
3490 rtx *operands;
3492 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3493 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3495 if (eliminate_lr2ip (&operands[0]))
3496 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
3498 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3499 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3500 return "";
3504 /* Output a move from arm registers to an fpu registers.
3505 OPERANDS[0] is an fpu register.
3506 OPERANDS[1] is the first registers of an arm register pair. */
3508 char *
3509 output_mov_long_double_fpu_from_arm (operands)
3510 rtx *operands;
3512 int arm_reg0 = REGNO (operands[1]);
3513 rtx ops[3];
3515 if (arm_reg0 == 12)
3516 abort();
3518 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3519 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3520 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3522 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3523 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
3524 return "";
3527 /* Output a move from an fpu register to arm registers.
3528 OPERANDS[0] is the first registers of an arm register pair.
3529 OPERANDS[1] is an fpu register. */
3531 char *
3532 output_mov_long_double_arm_from_fpu (operands)
3533 rtx *operands;
3535 int arm_reg0 = REGNO (operands[0]);
3536 rtx ops[3];
3538 if (arm_reg0 == 12)
3539 abort();
3541 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3542 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3543 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3545 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3546 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
3547 return "";
3550 /* Output a move from arm registers to arm registers of a long double
3551 OPERANDS[0] is the destination.
3552 OPERANDS[1] is the source. */
3553 char *
3554 output_mov_long_double_arm_from_arm (operands)
3555 rtx *operands;
3557 /* We have to be careful here because the two might overlap */
3558 int dest_start = REGNO (operands[0]);
3559 int src_start = REGNO (operands[1]);
3560 rtx ops[2];
3561 int i;
3563 if (dest_start < src_start)
3565 for (i = 0; i < 3; i++)
3567 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3568 ops[1] = gen_rtx (REG, SImode, src_start + i);
3569 output_asm_insn ("mov%?\t%0, %1", ops);
3572 else
3574 for (i = 2; i >= 0; i--)
3576 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3577 ops[1] = gen_rtx (REG, SImode, src_start + i);
3578 output_asm_insn ("mov%?\t%0, %1", ops);
3582 return "";
3586 /* Output a move from arm registers to an fpu registers.
3587 OPERANDS[0] is an fpu register.
3588 OPERANDS[1] is the first registers of an arm register pair. */
3590 char *
3591 output_mov_double_fpu_from_arm (operands)
3592 rtx *operands;
3594 int arm_reg0 = REGNO (operands[1]);
3595 rtx ops[2];
3597 if (arm_reg0 == 12)
3598 abort();
3599 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3600 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3601 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
3602 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
3603 return "";
3606 /* Output a move from an fpu register to arm registers.
3607 OPERANDS[0] is the first registers of an arm register pair.
3608 OPERANDS[1] is an fpu register. */
3610 char *
3611 output_mov_double_arm_from_fpu (operands)
3612 rtx *operands;
3614 int arm_reg0 = REGNO (operands[0]);
3615 rtx ops[2];
3617 if (arm_reg0 == 12)
3618 abort();
3620 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3621 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3622 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
3623 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
3624 return "";
3627 /* Output a move between double words.
3628 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
3629 or MEM<-REG and all MEMs must be offsettable addresses. */
3631 char *
3632 output_move_double (operands)
3633 rtx *operands;
3635 enum rtx_code code0 = GET_CODE (operands[0]);
3636 enum rtx_code code1 = GET_CODE (operands[1]);
3637 rtx otherops[2];
3639 if (code0 == REG)
3641 int reg0 = REGNO (operands[0]);
3643 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
3644 if (code1 == REG)
3646 int reg1 = REGNO (operands[1]);
3647 if (reg1 == 12)
3648 abort();
3650 /* Ensure the second source is not overwritten */
3651 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
3652 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
3653 else
3654 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
3656 else if (code1 == CONST_DOUBLE)
3658 if (GET_MODE (operands[1]) == DFmode)
3660 long l[2];
3661 union real_extract u;
3663 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
3664 sizeof (u));
3665 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
3666 otherops[1] = GEN_INT(l[1]);
3667 operands[1] = GEN_INT(l[0]);
3669 else if (GET_MODE (operands[1]) != VOIDmode)
3670 abort ();
3671 else if (WORDS_BIG_ENDIAN)
3674 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3675 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3677 else
3680 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3681 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3683 output_mov_immediate (operands);
3684 output_mov_immediate (otherops);
3686 else if (code1 == CONST_INT)
3688 /* sign extend the intval into the high-order word */
3689 if (WORDS_BIG_ENDIAN)
3691 otherops[1] = operands[1];
3692 operands[1] = (INTVAL (operands[1]) < 0
3693 ? constm1_rtx : const0_rtx);
3695 else
3696 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
3697 output_mov_immediate (otherops);
3698 output_mov_immediate (operands);
3700 else if (code1 == MEM)
3702 switch (GET_CODE (XEXP (operands[1], 0)))
3704 case REG:
3705 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
3706 break;
3708 case PRE_INC:
3709 abort (); /* Should never happen now */
3710 break;
3712 case PRE_DEC:
3713 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
3714 break;
3716 case POST_INC:
3717 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
3718 break;
3720 case POST_DEC:
3721 abort (); /* Should never happen now */
3722 break;
3724 case LABEL_REF:
3725 case CONST:
3726 output_asm_insn ("adr%?\t%0, %1", operands);
3727 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
3728 break;
3730 default:
3731 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
3733 otherops[0] = operands[0];
3734 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
3735 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
3736 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
3738 if (GET_CODE (otherops[2]) == CONST_INT)
3740 switch (INTVAL (otherops[2]))
3742 case -8:
3743 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
3744 return "";
3745 case -4:
3746 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
3747 return "";
3748 case 4:
3749 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
3750 return "";
3752 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
3753 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
3754 else
3755 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3757 else
3758 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3760 else
3761 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
3762 return "ldm%?ia\t%0, %M0";
3764 else
3766 otherops[1] = adj_offsettable_operand (operands[1], 4);
3767 /* Take care of overlapping base/data reg. */
3768 if (reg_mentioned_p (operands[0], operands[1]))
3770 output_asm_insn ("ldr%?\t%0, %1", otherops);
3771 output_asm_insn ("ldr%?\t%0, %1", operands);
3773 else
3775 output_asm_insn ("ldr%?\t%0, %1", operands);
3776 output_asm_insn ("ldr%?\t%0, %1", otherops);
3781 else
3782 abort(); /* Constraints should prevent this */
3784 else if (code0 == MEM && code1 == REG)
3786 if (REGNO (operands[1]) == 12)
3787 abort();
3789 switch (GET_CODE (XEXP (operands[0], 0)))
3791 case REG:
3792 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
3793 break;
3795 case PRE_INC:
3796 abort (); /* Should never happen now */
3797 break;
3799 case PRE_DEC:
3800 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
3801 break;
3803 case POST_INC:
3804 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
3805 break;
3807 case POST_DEC:
3808 abort (); /* Should never happen now */
3809 break;
3811 case PLUS:
3812 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
3814 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
3816 case -8:
3817 output_asm_insn ("stm%?db\t%m0, %M1", operands);
3818 return "";
3820 case -4:
3821 output_asm_insn ("stm%?da\t%m0, %M1", operands);
3822 return "";
3824 case 4:
3825 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
3826 return "";
3829 /* Fall through */
3831 default:
3832 otherops[0] = adj_offsettable_operand (operands[0], 4);
3833 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
3834 output_asm_insn ("str%?\t%1, %0", operands);
3835 output_asm_insn ("str%?\t%1, %0", otherops);
3838 else
3839 abort(); /* Constraints should prevent this */
3841 return "";
3845 /* Output an arbitrary MOV reg, #n.
3846 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
3848 char *
3849 output_mov_immediate (operands)
3850 rtx *operands;
3852 HOST_WIDE_INT n = INTVAL (operands[1]);
3853 int n_ones = 0;
3854 int i;
3856 /* Try to use one MOV */
3857 if (const_ok_for_arm (n))
3859 output_asm_insn ("mov%?\t%0, %1", operands);
3860 return "";
3863 /* Try to use one MVN */
3864 if (const_ok_for_arm (~n))
3866 operands[1] = GEN_INT (~n);
3867 output_asm_insn ("mvn%?\t%0, %1", operands);
3868 return "";
3871 /* If all else fails, make it out of ORRs or BICs as appropriate. */
3873 for (i=0; i < 32; i++)
3874 if (n & 1 << i)
3875 n_ones++;
3877 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
3878 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
3879 ~n);
3880 else
3881 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
3884 return "";
3888 /* Output an ADD r, s, #n where n may be too big for one instruction. If
3889 adding zero to one register, output nothing. */
3891 char *
3892 output_add_immediate (operands)
3893 rtx *operands;
3895 HOST_WIDE_INT n = INTVAL (operands[2]);
3897 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
3899 if (n < 0)
3900 output_multi_immediate (operands,
3901 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
3902 -n);
3903 else
3904 output_multi_immediate (operands,
3905 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
3909 return "";
3912 /* Output a multiple immediate operation.
3913 OPERANDS is the vector of operands referred to in the output patterns.
3914 INSTR1 is the output pattern to use for the first constant.
3915 INSTR2 is the output pattern to use for subsequent constants.
3916 IMMED_OP is the index of the constant slot in OPERANDS.
3917 N is the constant value. */
3919 char *
3920 output_multi_immediate (operands, instr1, instr2, immed_op, n)
3921 rtx *operands;
3922 char *instr1, *instr2;
3923 int immed_op;
3924 HOST_WIDE_INT n;
3926 #if HOST_BITS_PER_WIDE_INT > 32
3927 n &= 0xffffffff;
3928 #endif
3930 if (n == 0)
3932 operands[immed_op] = const0_rtx;
3933 output_asm_insn (instr1, operands); /* Quick and easy output */
3935 else
3937 int i;
3938 char *instr = instr1;
3940 /* Note that n is never zero here (which would give no output) */
3941 for (i = 0; i < 32; i += 2)
3943 if (n & (3 << i))
3945 operands[immed_op] = GEN_INT (n & (255 << i));
3946 output_asm_insn (instr, operands);
3947 instr = instr2;
3948 i += 6;
3952 return "";
3956 /* Return the appropriate ARM instruction for the operation code.
3957 The returned result should not be overwritten. OP is the rtx of the
3958 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
3959 was shifted. */
3961 char *
3962 arithmetic_instr (op, shift_first_arg)
3963 rtx op;
3964 int shift_first_arg;
3966 switch (GET_CODE (op))
3968 case PLUS:
3969 return "add";
3971 case MINUS:
3972 return shift_first_arg ? "rsb" : "sub";
3974 case IOR:
3975 return "orr";
3977 case XOR:
3978 return "eor";
3980 case AND:
3981 return "and";
3983 default:
3984 abort ();
3989 /* Ensure valid constant shifts and return the appropriate shift mnemonic
3990 for the operation code. The returned result should not be overwritten.
3991 OP is the rtx code of the shift.
3992 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
3993 shift. */
3995 static char *
3996 shift_op (op, amountp)
3997 rtx op;
3998 HOST_WIDE_INT *amountp;
4000 char *mnem;
4001 enum rtx_code code = GET_CODE (op);
4003 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4004 *amountp = -1;
4005 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4006 *amountp = INTVAL (XEXP (op, 1));
4007 else
4008 abort ();
4010 switch (code)
4012 case ASHIFT:
4013 mnem = "asl";
4014 break;
4016 case ASHIFTRT:
4017 mnem = "asr";
4018 break;
4020 case LSHIFTRT:
4021 mnem = "lsr";
4022 break;
4024 case ROTATERT:
4025 mnem = "ror";
4026 break;
4028 case MULT:
4029 /* We never have to worry about the amount being other than a
4030 power of 2, since this case can never be reloaded from a reg. */
4031 if (*amountp != -1)
4032 *amountp = int_log2 (*amountp);
4033 else
4034 abort ();
4035 return "asl";
4037 default:
4038 abort ();
4041 if (*amountp != -1)
4043 /* This is not 100% correct, but follows from the desire to merge
4044 multiplication by a power of 2 with the recognizer for a
4045 shift. >=32 is not a valid shift for "asl", so we must try and
4046 output a shift that produces the correct arithmetical result.
4047 Using lsr #32 is identical except for the fact that the carry bit
4048 is not set correctly if we set the flags; but we never use the
4049 carry bit from such an operation, so we can ignore that. */
4050 if (code == ROTATERT)
4051 *amountp &= 31; /* Rotate is just modulo 32 */
4052 else if (*amountp != (*amountp & 31))
4054 if (code == ASHIFT)
4055 mnem = "lsr";
4056 *amountp = 32;
4059 /* Shifts of 0 are no-ops. */
4060 if (*amountp == 0)
4061 return NULL;
4064 return mnem;
4068 /* Obtain the shift from the POWER of two. */
4070 HOST_WIDE_INT
4071 int_log2 (power)
4072 HOST_WIDE_INT power;
4074 HOST_WIDE_INT shift = 0;
4076 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
4078 if (shift > 31)
4079 abort ();
4080 shift++;
4083 return shift;
4086 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
4087 /bin/as is horribly restrictive. */
4089 void
4090 output_ascii_pseudo_op (stream, p, len)
4091 FILE *stream;
4092 unsigned char *p;
4093 int len;
4095 int i;
4096 int len_so_far = 1000;
4097 int chars_so_far = 0;
4099 for (i = 0; i < len; i++)
4101 register int c = p[i];
4103 if (len_so_far > 50)
4105 if (chars_so_far)
4106 fputs ("\"\n", stream);
4107 fputs ("\t.ascii\t\"", stream);
4108 len_so_far = 0;
4109 arm_increase_location (chars_so_far);
4110 chars_so_far = 0;
4113 if (c == '\"' || c == '\\')
4115 putc('\\', stream);
4116 len_so_far++;
4119 if (c >= ' ' && c < 0177)
4121 putc (c, stream);
4122 len_so_far++;
4124 else
4126 fprintf (stream, "\\%03o", c);
4127 len_so_far +=4;
4130 chars_so_far++;
4133 fputs ("\"\n", stream);
4134 arm_increase_location (chars_so_far);
4138 /* Try to determine whether a pattern really clobbers the link register.
4139 This information is useful when peepholing, so that lr need not be pushed
4140 if we combine a call followed by a return.
4141 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4142 such a check should not be needed because these only update an existing
4143 value within a register; the register must still be set elsewhere within
4144 the function. */
4146 static int
4147 pattern_really_clobbers_lr (x)
4148 rtx x;
4150 int i;
4152 switch (GET_CODE (x))
4154 case SET:
4155 switch (GET_CODE (SET_DEST (x)))
4157 case REG:
4158 return REGNO (SET_DEST (x)) == 14;
4160 case SUBREG:
4161 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4162 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
4164 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4165 return 0;
4166 abort ();
4168 default:
4169 return 0;
4172 case PARALLEL:
4173 for (i = 0; i < XVECLEN (x, 0); i++)
4174 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4175 return 1;
4176 return 0;
4178 case CLOBBER:
4179 switch (GET_CODE (XEXP (x, 0)))
4181 case REG:
4182 return REGNO (XEXP (x, 0)) == 14;
4184 case SUBREG:
4185 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4186 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4187 abort ();
4189 default:
4190 return 0;
4193 case UNSPEC:
4194 return 1;
4196 default:
4197 return 0;
4201 static int
4202 function_really_clobbers_lr (first)
4203 rtx first;
4205 rtx insn, next;
4207 for (insn = first; insn; insn = next_nonnote_insn (insn))
4209 switch (GET_CODE (insn))
4211 case BARRIER:
4212 case NOTE:
4213 case CODE_LABEL:
4214 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4215 case INLINE_HEADER:
4216 break;
4218 case INSN:
4219 if (pattern_really_clobbers_lr (PATTERN (insn)))
4220 return 1;
4221 break;
4223 case CALL_INSN:
4224 /* Don't yet know how to handle those calls that are not to a
4225 SYMBOL_REF */
4226 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4227 abort ();
4229 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4231 case CALL:
4232 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4233 != SYMBOL_REF)
4234 return 1;
4235 break;
4237 case SET:
4238 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4239 0, 0)), 0), 0))
4240 != SYMBOL_REF)
4241 return 1;
4242 break;
4244 default: /* Don't recognize it, be safe */
4245 return 1;
4248 /* A call can be made (by peepholing) not to clobber lr iff it is
4249 followed by a return. There may, however, be a use insn iff
4250 we are returning the result of the call.
4251 If we run off the end of the insn chain, then that means the
4252 call was at the end of the function. Unfortunately we don't
4253 have a return insn for the peephole to recognize, so we
4254 must reject this. (Can this be fixed by adding our own insn?) */
4255 if ((next = next_nonnote_insn (insn)) == NULL)
4256 return 1;
4258 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4259 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4260 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4261 == REGNO (XEXP (PATTERN (next), 0))))
4262 if ((next = next_nonnote_insn (next)) == NULL)
4263 return 1;
4265 if (GET_CODE (next) == JUMP_INSN
4266 && GET_CODE (PATTERN (next)) == RETURN)
4267 break;
4268 return 1;
4270 default:
4271 abort ();
4275 /* We have reached the end of the chain so lr was _not_ clobbered */
4276 return 0;
4279 char *
4280 output_return_instruction (operand, really_return, reverse)
4281 rtx operand;
4282 int really_return;
4283 int reverse;
4285 char instr[100];
4286 int reg, live_regs = 0;
4287 int volatile_func = (optimize > 0
4288 && TREE_THIS_VOLATILE (current_function_decl));
4290 return_used_this_function = 1;
4292 if (volatile_func)
4294 rtx ops[2];
4295 /* If this function was declared non-returning, and we have found a tail
4296 call, then we have to trust that the called function won't return. */
4297 if (! really_return)
4298 return "";
4300 /* Otherwise, trap an attempted return by aborting. */
4301 ops[0] = operand;
4302 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
4303 assemble_external_libcall (ops[1]);
4304 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
4305 return "";
4308 if (current_function_calls_alloca && ! really_return)
4309 abort();
4311 for (reg = 0; reg <= 10; reg++)
4312 if (regs_ever_live[reg] && ! call_used_regs[reg])
4313 live_regs++;
4315 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
4316 live_regs++;
4318 if (frame_pointer_needed)
4319 live_regs += 4;
4321 if (live_regs)
4323 if (lr_save_eliminated || ! regs_ever_live[14])
4324 live_regs++;
4326 if (frame_pointer_needed)
4327 strcpy (instr,
4328 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
4329 else
4330 strcpy (instr,
4331 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
4333 for (reg = 0; reg <= 10; reg++)
4334 if (regs_ever_live[reg] && ! call_used_regs[reg])
4336 strcat (instr, "%|");
4337 strcat (instr, reg_names[reg]);
4338 if (--live_regs)
4339 strcat (instr, ", ");
4342 if (frame_pointer_needed)
4344 strcat (instr, "%|");
4345 strcat (instr, reg_names[11]);
4346 strcat (instr, ", ");
4347 strcat (instr, "%|");
4348 strcat (instr, reg_names[13]);
4349 strcat (instr, ", ");
4350 strcat (instr, "%|");
4351 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4353 else
4355 strcat (instr, "%|");
4356 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4358 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
4359 output_asm_insn (instr, &operand);
4361 else if (really_return)
4363 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4364 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
4365 output_asm_insn (instr, &operand);
4368 return "";
4371 /* Return nonzero if optimizing and the current function is volatile.
4372 Such functions never return, and many memory cycles can be saved
4373 by not storing register values that will never be needed again.
4374 This optimization was added to speed up context switching in a
4375 kernel application. */
4378 arm_volatile_func ()
4380 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4383 /* Return the size of the prologue. It's not too bad if we slightly
4384 over-estimate. */
4386 static int
4387 get_prologue_size ()
4389 return profile_flag ? 12 : 0;
4392 /* The amount of stack adjustment that happens here, in output_return and in
4393 output_epilogue must be exactly the same as was calculated during reload,
4394 or things will point to the wrong place. The only time we can safely
4395 ignore this constraint is when a function has no arguments on the stack,
4396 no stack frame requirement and no live registers execpt for `lr'. If we
4397 can guarantee that by making all function calls into tail calls and that
4398 lr is not clobbered in any other way, then there is no need to push lr
4399 onto the stack. */
4401 void
4402 output_func_prologue (f, frame_size)
4403 FILE *f;
4404 int frame_size;
4406 int reg, live_regs_mask = 0;
4407 rtx operands[3];
4408 int volatile_func = (optimize > 0
4409 && TREE_THIS_VOLATILE (current_function_decl));
4411 /* Nonzero if we must stuff some register arguments onto the stack as if
4412 they were passed there. */
4413 int store_arg_regs = 0;
4415 if (arm_ccfsm_state || arm_target_insn)
4416 abort (); /* Sanity check */
4418 if (arm_naked_function_p (current_function_decl))
4419 return;
4421 return_used_this_function = 0;
4422 lr_save_eliminated = 0;
4424 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4425 ASM_COMMENT_START, current_function_args_size,
4426 current_function_pretend_args_size, frame_size);
4427 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4428 ASM_COMMENT_START, frame_pointer_needed,
4429 current_function_anonymous_args);
4431 if (volatile_func)
4432 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
4434 if (current_function_anonymous_args && current_function_pretend_args_size)
4435 store_arg_regs = 1;
4437 for (reg = 0; reg <= 10; reg++)
4438 if (regs_ever_live[reg] && ! call_used_regs[reg])
4439 live_regs_mask |= (1 << reg);
4441 if (frame_pointer_needed)
4442 live_regs_mask |= 0xD800;
4443 else if (regs_ever_live[14])
4445 if (! current_function_args_size
4446 && ! function_really_clobbers_lr (get_insns ()))
4447 lr_save_eliminated = 1;
4448 else
4449 live_regs_mask |= 0x4000;
4452 if (live_regs_mask)
4454 /* if a di mode load/store multiple is used, and the base register
4455 is r3, then r4 can become an ever live register without lr
4456 doing so, in this case we need to push lr as well, or we
4457 will fail to get a proper return. */
4459 live_regs_mask |= 0x4000;
4460 lr_save_eliminated = 0;
4464 if (lr_save_eliminated)
4465 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4466 ASM_COMMENT_START);
4470 void
4471 output_func_epilogue (f, frame_size)
4472 FILE *f;
4473 int frame_size;
4475 int reg, live_regs_mask = 0, code_size = 0;
4476 /* If we need this then it will always be at lesat this much */
4477 int floats_offset = 24;
4478 rtx operands[3];
4479 int volatile_func = (optimize > 0
4480 && TREE_THIS_VOLATILE (current_function_decl));
4482 if (use_return_insn() && return_used_this_function)
4484 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
4486 abort ();
4488 goto epilogue_done;
4491 /* Naked functions don't have epilogues. */
4492 if (arm_naked_function_p (current_function_decl))
4493 goto epilogue_done;
4495 /* A volatile function should never return. Call abort. */
4496 if (volatile_func)
4498 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
4499 assemble_external_libcall (op);
4500 output_asm_insn ("bl\t%a0", &op);
4501 code_size = 4;
4502 goto epilogue_done;
4505 for (reg = 0; reg <= 10; reg++)
4506 if (regs_ever_live[reg] && ! call_used_regs[reg])
4508 live_regs_mask |= (1 << reg);
4509 floats_offset += 4;
4512 if (frame_pointer_needed)
4514 for (reg = 23; reg > 15; reg--)
4515 if (regs_ever_live[reg] && ! call_used_regs[reg])
4517 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4518 reg_names[reg], REGISTER_PREFIX, floats_offset);
4519 floats_offset += 12;
4520 code_size += 4;
4523 live_regs_mask |= 0xA800;
4524 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4525 TARGET_APCS_32 ? FALSE : TRUE);
4526 code_size += 4;
4528 else
4530 /* Restore stack pointer if necessary. */
4531 if (frame_size)
4533 operands[0] = operands[1] = stack_pointer_rtx;
4534 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
4535 output_add_immediate (operands);
4538 for (reg = 16; reg < 24; reg++)
4539 if (regs_ever_live[reg] && ! call_used_regs[reg])
4541 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
4542 reg_names[reg], REGISTER_PREFIX);
4543 code_size += 4;
4545 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
4547 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
4548 TARGET_APCS_32 ? FALSE : TRUE);
4549 code_size += 4;
4551 else
4553 if (live_regs_mask || regs_ever_live[14])
4555 live_regs_mask |= 0x4000;
4556 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
4557 code_size += 4;
4559 if (current_function_pretend_args_size)
4561 operands[0] = operands[1] = stack_pointer_rtx;
4562 operands[2] = gen_rtx (CONST_INT, VOIDmode,
4563 current_function_pretend_args_size);
4564 output_add_immediate (operands);
4566 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
4567 : "\tmovs\t%spc, %slr\n"),
4568 REGISTER_PREFIX, REGISTER_PREFIX, f);
4569 code_size += 4;
4573 epilogue_done:
4575 /* insn_addresses isn't allocated when not optimizing */
4576 /* ??? The previous comment is incorrect. Clarify. */
4578 if (optimize > 0)
4579 arm_increase_location (code_size
4580 + insn_addresses[INSN_UID (get_last_insn ())]
4581 + get_prologue_size ());
4583 current_function_anonymous_args = 0;
4586 static void
4587 emit_multi_reg_push (mask)
4588 int mask;
4590 int num_regs = 0;
4591 int i, j;
4592 rtx par;
4594 for (i = 0; i < 16; i++)
4595 if (mask & (1 << i))
4596 num_regs++;
4598 if (num_regs == 0 || num_regs > 16)
4599 abort ();
4601 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
4603 for (i = 0; i < 16; i++)
4605 if (mask & (1 << i))
4607 XVECEXP (par, 0, 0)
4608 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
4609 gen_rtx (PRE_DEC, BLKmode,
4610 stack_pointer_rtx)),
4611 gen_rtx (UNSPEC, BLKmode,
4612 gen_rtvec (1, gen_rtx (REG, SImode, i)),
4613 2));
4614 break;
4618 for (j = 1, i++; j < num_regs; i++)
4620 if (mask & (1 << i))
4622 XVECEXP (par, 0, j)
4623 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
4624 j++;
4627 emit_insn (par);
4630 void
4631 arm_expand_prologue ()
4633 int reg;
4634 rtx amount = GEN_INT (- get_frame_size ());
4635 rtx push_insn;
4636 int num_regs;
4637 int live_regs_mask = 0;
4638 int store_arg_regs = 0;
4639 int volatile_func = (optimize > 0
4640 && TREE_THIS_VOLATILE (current_function_decl));
4642 /* Naked functions don't have prologues. */
4643 if (arm_naked_function_p (current_function_decl))
4644 return;
4646 if (current_function_anonymous_args && current_function_pretend_args_size)
4647 store_arg_regs = 1;
4649 if (! volatile_func)
4650 for (reg = 0; reg <= 10; reg++)
4651 if (regs_ever_live[reg] && ! call_used_regs[reg])
4652 live_regs_mask |= 1 << reg;
4654 if (! volatile_func && regs_ever_live[14])
4655 live_regs_mask |= 0x4000;
4657 if (frame_pointer_needed)
4659 live_regs_mask |= 0xD800;
4660 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
4661 stack_pointer_rtx));
4664 if (current_function_pretend_args_size)
4666 if (store_arg_regs)
4667 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
4668 & 0xf);
4669 else
4670 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
4671 GEN_INT (-current_function_pretend_args_size)));
4674 if (live_regs_mask)
4676 /* If we have to push any regs, then we must push lr as well, or
4677 we won't get a proper return. */
4678 live_regs_mask |= 0x4000;
4679 emit_multi_reg_push (live_regs_mask);
4682 /* For now the integer regs are still pushed in output_func_epilogue (). */
4684 if (! volatile_func)
4685 for (reg = 23; reg > 15; reg--)
4686 if (regs_ever_live[reg] && ! call_used_regs[reg])
4687 emit_insn (gen_rtx (SET, VOIDmode,
4688 gen_rtx (MEM, XFmode,
4689 gen_rtx (PRE_DEC, XFmode,
4690 stack_pointer_rtx)),
4691 gen_rtx (REG, XFmode, reg)));
4693 if (frame_pointer_needed)
4694 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
4695 (GEN_INT
4696 (-(4 + current_function_pretend_args_size)))));
4698 if (amount != const0_rtx)
4700 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
4701 emit_insn (gen_rtx (CLOBBER, VOIDmode,
4702 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
4705 /* If we are profiling, make sure no instructions are scheduled before
4706 the call to mcount. */
4707 if (profile_flag || profile_block_flag)
4708 emit_insn (gen_blockage ());
4712 /* If CODE is 'd', then the X is a condition operand and the instruction
4713 should only be executed if the condition is true.
4714 if CODE is 'D', then the X is a condition operand and the instruction
4715 should only be executed if the condition is false: however, if the mode
4716 of the comparison is CCFPEmode, then always execute the instruction -- we
4717 do this because in these circumstances !GE does not necessarily imply LT;
4718 in these cases the instruction pattern will take care to make sure that
4719 an instruction containing %d will follow, thereby undoing the effects of
4720 doing this instruction unconditionally.
4721 If CODE is 'N' then X is a floating point operand that must be negated
4722 before output.
4723 If CODE is 'B' then output a bitwise inverted value of X (a const int).
4724 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
4726 void
4727 arm_print_operand (stream, x, code)
4728 FILE *stream;
4729 rtx x;
4730 int code;
4732 switch (code)
4734 case '@':
4735 fputs (ASM_COMMENT_START, stream);
4736 return;
4738 case '|':
4739 fputs (REGISTER_PREFIX, stream);
4740 return;
4742 case '?':
4743 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
4744 fputs (arm_condition_codes[arm_current_cc], stream);
4745 return;
4747 case 'N':
4749 REAL_VALUE_TYPE r;
4750 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4751 r = REAL_VALUE_NEGATE (r);
4752 fprintf (stream, "%s", fp_const_from_val (&r));
4754 return;
4756 case 'B':
4757 if (GET_CODE (x) == CONST_INT)
4758 fprintf (stream,
4759 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4760 "%d",
4761 #else
4762 "%ld",
4763 #endif
4764 ARM_SIGN_EXTEND (~ INTVAL (x)));
4765 else
4767 putc ('~', stream);
4768 output_addr_const (stream, x);
4770 return;
4772 case 'i':
4773 fprintf (stream, "%s", arithmetic_instr (x, 1));
4774 return;
4776 case 'I':
4777 fprintf (stream, "%s", arithmetic_instr (x, 0));
4778 return;
4780 case 'S':
4782 HOST_WIDE_INT val;
4783 char *shift = shift_op (x, &val);
4785 if (shift)
4787 fprintf (stream, ", %s ", shift_op (x, &val));
4788 if (val == -1)
4789 arm_print_operand (stream, XEXP (x, 1), 0);
4790 else
4791 fprintf (stream,
4792 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4793 "#%d",
4794 #else
4795 "#%ld",
4796 #endif
4797 val);
4800 return;
4802 case 'Q':
4803 if (REGNO (x) > 15)
4804 abort ();
4805 fputs (REGISTER_PREFIX, stream);
4806 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
4807 return;
4809 case 'R':
4810 if (REGNO (x) > 15)
4811 abort ();
4812 fputs (REGISTER_PREFIX, stream);
4813 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
4814 return;
4816 case 'm':
4817 fputs (REGISTER_PREFIX, stream);
4818 if (GET_CODE (XEXP (x, 0)) == REG)
4819 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
4820 else
4821 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
4822 return;
4824 case 'M':
4825 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
4826 REGISTER_PREFIX, reg_names[REGNO (x) - 1
4827 + ((GET_MODE_SIZE (GET_MODE (x))
4828 + GET_MODE_SIZE (SImode) - 1)
4829 / GET_MODE_SIZE (SImode))]);
4830 return;
4832 case 'd':
4833 if (x)
4834 fputs (arm_condition_codes[get_arm_condition_code (x)],
4835 stream);
4836 return;
4838 case 'D':
4839 if (x)
4840 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
4841 (get_arm_condition_code (x))],
4842 stream);
4843 return;
4845 default:
4846 if (x == 0)
4847 abort ();
4849 if (GET_CODE (x) == REG)
4851 fputs (REGISTER_PREFIX, stream);
4852 fputs (reg_names[REGNO (x)], stream);
4854 else if (GET_CODE (x) == MEM)
4856 output_memory_reference_mode = GET_MODE (x);
4857 output_address (XEXP (x, 0));
4859 else if (GET_CODE (x) == CONST_DOUBLE)
4860 fprintf (stream, "#%s", fp_immediate_constant (x));
4861 else if (GET_CODE (x) == NEG)
4862 abort (); /* This should never happen now. */
4863 else
4865 fputc ('#', stream);
4866 output_addr_const (stream, x);
4871 /* Increase the `arm_text_location' by AMOUNT if we're in the text
4872 segment. */
4874 void
4875 arm_increase_location (amount)
4876 int amount;
4878 if (in_text_section ())
4879 arm_text_location += amount;
4883 /* Output a label definition. If this label is within the .text segment, it
4884 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
4885 Maybe GCC remembers names not starting with a `*' for a long time, but this
4886 is a minority anyway, so we just make a copy. Do not store the leading `*'
4887 if the name starts with one. */
4889 void
4890 arm_asm_output_label (stream, name)
4891 FILE *stream;
4892 char *name;
4894 char *real_name, *s;
4895 struct label_offset *cur;
4896 int hash = 0;
4898 ARM_OUTPUT_LABEL (stream, name);
4899 if (! in_text_section ())
4900 return;
4902 if (name[0] == '*')
4904 real_name = xmalloc (1 + strlen (&name[1]));
4905 strcpy (real_name, &name[1]);
4907 else
4909 real_name = xmalloc (2 + strlen (name));
4910 strcpy (real_name, USER_LABEL_PREFIX);
4911 strcat (real_name, name);
4913 for (s = real_name; *s; s++)
4914 hash += *s;
4916 hash = hash % LABEL_HASH_SIZE;
4917 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
4918 cur->name = real_name;
4919 cur->offset = arm_text_location;
4920 cur->cdr = offset_table[hash];
4921 offset_table[hash] = cur;
4924 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
4925 directive hence this hack, which works by reserving some `.space' in the
4926 bss segment directly.
4928 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
4929 define STATIC COMMON space but merely STATIC BSS space. */
4931 void
4932 output_lcomm_directive (stream, name, size, align)
4933 FILE *stream;
4934 char *name;
4935 int size, align;
4937 bss_section ();
4938 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
4939 ARM_OUTPUT_LABEL (stream, name);
4940 fprintf (stream, "\t.space\t%d\n", size);
4943 /* A finite state machine takes care of noticing whether or not instructions
4944 can be conditionally executed, and thus decrease execution time and code
4945 size by deleting branch instructions. The fsm is controlled by
4946 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
4948 /* The state of the fsm controlling condition codes are:
4949 0: normal, do nothing special
4950 1: make ASM_OUTPUT_OPCODE not output this instruction
4951 2: make ASM_OUTPUT_OPCODE not output this instruction
4952 3: make instructions conditional
4953 4: make instructions conditional
4955 State transitions (state->state by whom under condition):
4956 0 -> 1 final_prescan_insn if the `target' is a label
4957 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
4958 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
4959 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
4960 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
4961 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
4962 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
4963 (the target insn is arm_target_insn).
4965 If the jump clobbers the conditions then we use states 2 and 4.
4967 A similar thing can be done with conditional return insns.
4969 XXX In case the `target' is an unconditional branch, this conditionalising
4970 of the instructions always reduces code size, but not always execution
4971 time. But then, I want to reduce the code size to somewhere near what
4972 /bin/cc produces. */
4974 /* Returns the index of the ARM condition code string in
4975 `arm_condition_codes'. COMPARISON should be an rtx like
4976 `(eq (...) (...))'. */
4978 static enum arm_cond_code
4979 get_arm_condition_code (comparison)
4980 rtx comparison;
4982 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
4983 register int code;
4984 register enum rtx_code comp_code = GET_CODE (comparison);
4986 if (GET_MODE_CLASS (mode) != MODE_CC)
4987 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
4988 XEXP (comparison, 1));
4990 switch (mode)
4992 case CC_DNEmode: code = ARM_NE; goto dominance;
4993 case CC_DEQmode: code = ARM_EQ; goto dominance;
4994 case CC_DGEmode: code = ARM_GE; goto dominance;
4995 case CC_DGTmode: code = ARM_GT; goto dominance;
4996 case CC_DLEmode: code = ARM_LE; goto dominance;
4997 case CC_DLTmode: code = ARM_LT; goto dominance;
4998 case CC_DGEUmode: code = ARM_CS; goto dominance;
4999 case CC_DGTUmode: code = ARM_HI; goto dominance;
5000 case CC_DLEUmode: code = ARM_LS; goto dominance;
5001 case CC_DLTUmode: code = ARM_CC;
5003 dominance:
5004 if (comp_code != EQ && comp_code != NE)
5005 abort ();
5007 if (comp_code == EQ)
5008 return ARM_INVERSE_CONDITION_CODE (code);
5009 return code;
5011 case CC_NOOVmode:
5012 switch (comp_code)
5014 case NE: return ARM_NE;
5015 case EQ: return ARM_EQ;
5016 case GE: return ARM_PL;
5017 case LT: return ARM_MI;
5018 default: abort ();
5021 case CC_Zmode:
5022 case CCFPmode:
5023 switch (comp_code)
5025 case NE: return ARM_NE;
5026 case EQ: return ARM_EQ;
5027 default: abort ();
5030 case CCFPEmode:
5031 switch (comp_code)
5033 case GE: return ARM_GE;
5034 case GT: return ARM_GT;
5035 case LE: return ARM_LS;
5036 case LT: return ARM_MI;
5037 default: abort ();
5040 case CC_SWPmode:
5041 switch (comp_code)
5043 case NE: return ARM_NE;
5044 case EQ: return ARM_EQ;
5045 case GE: return ARM_LE;
5046 case GT: return ARM_LT;
5047 case LE: return ARM_GE;
5048 case LT: return ARM_GT;
5049 case GEU: return ARM_LS;
5050 case GTU: return ARM_CC;
5051 case LEU: return ARM_CS;
5052 case LTU: return ARM_HI;
5053 default: abort ();
5056 case CC_Cmode:
5057 switch (comp_code)
5059 case LTU: return ARM_CS;
5060 case GEU: return ARM_CC;
5061 default: abort ();
5064 case CCmode:
5065 switch (comp_code)
5067 case NE: return ARM_NE;
5068 case EQ: return ARM_EQ;
5069 case GE: return ARM_GE;
5070 case GT: return ARM_GT;
5071 case LE: return ARM_LE;
5072 case LT: return ARM_LT;
5073 case GEU: return ARM_CS;
5074 case GTU: return ARM_HI;
5075 case LEU: return ARM_LS;
5076 case LTU: return ARM_CC;
5077 default: abort ();
5080 default: abort ();
5083 abort ();
5087 void
5088 final_prescan_insn (insn, opvec, noperands)
5089 rtx insn;
5090 rtx *opvec;
5091 int noperands;
5093 /* BODY will hold the body of INSN. */
5094 register rtx body = PATTERN (insn);
5096 /* This will be 1 if trying to repeat the trick, and things need to be
5097 reversed if it appears to fail. */
5098 int reverse = 0;
5100 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5101 taken are clobbered, even if the rtl suggests otherwise. It also
5102 means that we have to grub around within the jump expression to find
5103 out what the conditions are when the jump isn't taken. */
5104 int jump_clobbers = 0;
5106 /* If we start with a return insn, we only succeed if we find another one. */
5107 int seeking_return = 0;
5109 /* START_INSN will hold the insn from where we start looking. This is the
5110 first insn after the following code_label if REVERSE is true. */
5111 rtx start_insn = insn;
5113 /* If in state 4, check if the target branch is reached, in order to
5114 change back to state 0. */
5115 if (arm_ccfsm_state == 4)
5117 if (insn == arm_target_insn)
5119 arm_target_insn = NULL;
5120 arm_ccfsm_state = 0;
5122 return;
5125 /* If in state 3, it is possible to repeat the trick, if this insn is an
5126 unconditional branch to a label, and immediately following this branch
5127 is the previous target label which is only used once, and the label this
5128 branch jumps to is not too far off. */
5129 if (arm_ccfsm_state == 3)
5131 if (simplejump_p (insn))
5133 start_insn = next_nonnote_insn (start_insn);
5134 if (GET_CODE (start_insn) == BARRIER)
5136 /* XXX Isn't this always a barrier? */
5137 start_insn = next_nonnote_insn (start_insn);
5139 if (GET_CODE (start_insn) == CODE_LABEL
5140 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5141 && LABEL_NUSES (start_insn) == 1)
5142 reverse = TRUE;
5143 else
5144 return;
5146 else if (GET_CODE (body) == RETURN)
5148 start_insn = next_nonnote_insn (start_insn);
5149 if (GET_CODE (start_insn) == BARRIER)
5150 start_insn = next_nonnote_insn (start_insn);
5151 if (GET_CODE (start_insn) == CODE_LABEL
5152 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5153 && LABEL_NUSES (start_insn) == 1)
5155 reverse = TRUE;
5156 seeking_return = 1;
5158 else
5159 return;
5161 else
5162 return;
5165 if (arm_ccfsm_state != 0 && !reverse)
5166 abort ();
5167 if (GET_CODE (insn) != JUMP_INSN)
5168 return;
5170 /* This jump might be paralleled with a clobber of the condition codes
5171 the jump should always come first */
5172 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5173 body = XVECEXP (body, 0, 0);
5175 #if 0
5176 /* If this is a conditional return then we don't want to know */
5177 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5178 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5179 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5180 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5181 return;
5182 #endif
5184 if (reverse
5185 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5186 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5188 int insns_skipped;
5189 int fail = FALSE, succeed = FALSE;
5190 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5191 int then_not_else = TRUE;
5192 rtx this_insn = start_insn, label = 0;
5194 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5196 /* The code below is wrong for these, and I haven't time to
5197 fix it now. So we just do the safe thing and return. This
5198 whole function needs re-writing anyway. */
5199 jump_clobbers = 1;
5200 return;
5203 /* Register the insn jumped to. */
5204 if (reverse)
5206 if (!seeking_return)
5207 label = XEXP (SET_SRC (body), 0);
5209 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5210 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5211 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5213 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5214 then_not_else = FALSE;
5216 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5217 seeking_return = 1;
5218 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5220 seeking_return = 1;
5221 then_not_else = FALSE;
5223 else
5224 abort ();
5226 /* See how many insns this branch skips, and what kind of insns. If all
5227 insns are okay, and the label or unconditional branch to the same
5228 label is not too far away, succeed. */
5229 for (insns_skipped = 0;
5230 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
5232 rtx scanbody;
5234 this_insn = next_nonnote_insn (this_insn);
5235 if (!this_insn)
5236 break;
5238 scanbody = PATTERN (this_insn);
5240 switch (GET_CODE (this_insn))
5242 case CODE_LABEL:
5243 /* Succeed if it is the target label, otherwise fail since
5244 control falls in from somewhere else. */
5245 if (this_insn == label)
5247 if (jump_clobbers)
5249 arm_ccfsm_state = 2;
5250 this_insn = next_nonnote_insn (this_insn);
5252 else
5253 arm_ccfsm_state = 1;
5254 succeed = TRUE;
5256 else
5257 fail = TRUE;
5258 break;
5260 case BARRIER:
5261 /* Succeed if the following insn is the target label.
5262 Otherwise fail.
5263 If return insns are used then the last insn in a function
5264 will be a barrier. */
5265 this_insn = next_nonnote_insn (this_insn);
5266 if (this_insn && this_insn == label)
5268 if (jump_clobbers)
5270 arm_ccfsm_state = 2;
5271 this_insn = next_nonnote_insn (this_insn);
5273 else
5274 arm_ccfsm_state = 1;
5275 succeed = TRUE;
5277 else
5278 fail = TRUE;
5279 break;
5281 case CALL_INSN:
5282 /* If using 32-bit addresses the cc is not preserved over
5283 calls */
5284 if (TARGET_APCS_32)
5286 /* Succeed if the following insn is the target label,
5287 or if the following two insns are a barrier and
5288 the target label. */
5289 this_insn = next_nonnote_insn (this_insn);
5290 if (this_insn && GET_CODE (this_insn) == BARRIER)
5291 this_insn = next_nonnote_insn (this_insn);
5293 if (this_insn && this_insn == label
5294 && insns_skipped < MAX_INSNS_SKIPPED)
5296 if (jump_clobbers)
5298 arm_ccfsm_state = 2;
5299 this_insn = next_nonnote_insn (this_insn);
5301 else
5302 arm_ccfsm_state = 1;
5303 succeed = TRUE;
5305 else
5306 fail = TRUE;
5308 break;
5310 case JUMP_INSN:
5311 /* If this is an unconditional branch to the same label, succeed.
5312 If it is to another label, do nothing. If it is conditional,
5313 fail. */
5314 /* XXX Probably, the test for the SET and the PC are unnecessary. */
5316 if (GET_CODE (scanbody) == SET
5317 && GET_CODE (SET_DEST (scanbody)) == PC)
5319 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5320 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5322 arm_ccfsm_state = 2;
5323 succeed = TRUE;
5325 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5326 fail = TRUE;
5328 else if (GET_CODE (scanbody) == RETURN
5329 && seeking_return)
5331 arm_ccfsm_state = 2;
5332 succeed = TRUE;
5334 else if (GET_CODE (scanbody) == PARALLEL)
5336 switch (get_attr_conds (this_insn))
5338 case CONDS_NOCOND:
5339 break;
5340 default:
5341 fail = TRUE;
5342 break;
5345 break;
5347 case INSN:
5348 /* Instructions using or affecting the condition codes make it
5349 fail. */
5350 if ((GET_CODE (scanbody) == SET
5351 || GET_CODE (scanbody) == PARALLEL)
5352 && get_attr_conds (this_insn) != CONDS_NOCOND)
5353 fail = TRUE;
5354 break;
5356 default:
5357 break;
5360 if (succeed)
5362 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
5363 arm_target_label = CODE_LABEL_NUMBER (label);
5364 else if (seeking_return || arm_ccfsm_state == 2)
5366 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5368 this_insn = next_nonnote_insn (this_insn);
5369 if (this_insn && (GET_CODE (this_insn) == BARRIER
5370 || GET_CODE (this_insn) == CODE_LABEL))
5371 abort ();
5373 if (!this_insn)
5375 /* Oh, dear! we ran off the end.. give up */
5376 recog (PATTERN (insn), insn, NULL_PTR);
5377 arm_ccfsm_state = 0;
5378 arm_target_insn = NULL;
5379 return;
5381 arm_target_insn = this_insn;
5383 else
5384 abort ();
5385 if (jump_clobbers)
5387 if (reverse)
5388 abort ();
5389 arm_current_cc =
5390 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5391 0), 0), 1));
5392 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5393 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5394 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5395 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5397 else
5399 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5400 what it was. */
5401 if (!reverse)
5402 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5403 0));
5406 if (reverse || then_not_else)
5407 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5409 /* restore recog_operand (getting the attributes of other insns can
5410 destroy this array, but final.c assumes that it remains intact
5411 across this call; since the insn has been recognized already we
5412 call recog direct). */
5413 recog (PATTERN (insn), insn, NULL_PTR);
5417 #ifdef AOF_ASSEMBLER
5418 /* Special functions only needed when producing AOF syntax assembler. */
5420 int arm_text_section_count = 1;
5422 char *
5423 aof_text_section ()
5425 static char buf[100];
5426 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5427 arm_text_section_count++);
5428 if (flag_pic)
5429 strcat (buf, ", PIC, REENTRANT");
5430 return buf;
5433 static int arm_data_section_count = 1;
5435 char *
5436 aof_data_section ()
5438 static char buf[100];
5439 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5440 return buf;
5443 /* The AOF assembler is religiously strict about declarations of
5444 imported and exported symbols, so that it is impossible to declare
5445 a function as imported near the begining of the file, and then to
5446 export it later on. It is, however, possible to delay the decision
5447 until all the functions in the file have been compiled. To get
5448 around this, we maintain a list of the imports and exports, and
5449 delete from it any that are subsequently defined. At the end of
5450 compilation we spit the remainder of the list out before the END
5451 directive. */
5453 struct import
5455 struct import *next;
5456 char *name;
5459 static struct import *imports_list = NULL;
5461 void
5462 aof_add_import (name)
5463 char *name;
5465 struct import *new;
5467 for (new = imports_list; new; new = new->next)
5468 if (new->name == name)
5469 return;
5471 new = (struct import *) xmalloc (sizeof (struct import));
5472 new->next = imports_list;
5473 imports_list = new;
5474 new->name = name;
5477 void
5478 aof_delete_import (name)
5479 char *name;
5481 struct import **old;
5483 for (old = &imports_list; *old; old = & (*old)->next)
5485 if ((*old)->name == name)
5487 *old = (*old)->next;
5488 return;
5493 int arm_main_function = 0;
5495 void
5496 aof_dump_imports (f)
5497 FILE *f;
5499 /* The AOF assembler needs this to cause the startup code to be extracted
5500 from the library. Brining in __main causes the whole thing to work
5501 automagically. */
5502 if (arm_main_function)
5504 text_section ();
5505 fputs ("\tIMPORT __main\n", f);
5506 fputs ("\tDCD __main\n", f);
5509 /* Now dump the remaining imports. */
5510 while (imports_list)
5512 fprintf (f, "\tIMPORT\t");
5513 assemble_name (f, imports_list->name);
5514 fputc ('\n', f);
5515 imports_list = imports_list->next;
5518 #endif /* AOF_ASSEMBLER */