(arm_select): Declare and initialize.
[official-gcc.git] / gcc / config / arm / arm.c
blob5f59a0c9116ea8c75fa48b34af8490d2fbe9b509
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 #include <stdio.h>
25 #include <string.h>
26 #include "assert.h"
27 #include "config.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "reload.h"
39 #include "tree.h"
40 #include "expr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
43 possible. */
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
48 extern char *output_multi_immediate ();
49 extern void arm_increase_location ();
51 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
52 static int get_prologue_size PROTO ((void));
53 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
54 HOST_WIDE_INT, rtx, rtx, int, int));
56 /* Define the information needed to generate branch insns. This is
57 stored from the compare operation. */
59 rtx arm_compare_op0, arm_compare_op1;
60 int arm_compare_fp;
62 /* What type of cpu are we compiling for? */
63 enum processor_type arm_cpu;
65 /* What type of floating point are we compiling for? */
66 enum floating_point_type arm_fpu;
68 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
69 enum prog_mode_type arm_prgmode;
71 char *target_cpu_name = ARM_CPU_NAME;
72 char *target_fpe_name = NULL;
74 /* Nonzero if this is an "M" variant of the processor. */
75 int arm_fast_multiply = 0;
77 /* Nonzero if this chip support the ARM Architecture 4 extensions */
78 int arm_arch4 = 0;
80 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
81 must report the mode of the memory reference from PRINT_OPERAND to
82 PRINT_OPERAND_ADDRESS. */
83 enum machine_mode output_memory_reference_mode;
85 /* Nonzero if the prologue must setup `fp'. */
86 int current_function_anonymous_args;
88 /* Location counter of .text segment. */
89 int arm_text_location = 0;
91 /* Set to one if we think that lr is only saved because of subroutine calls,
92 but all of these can be `put after' return insns */
93 int lr_save_eliminated;
95 /* A hash table is used to store text segment labels and their associated
96 offset from the start of the text segment. */
97 struct label_offset
99 char *name;
100 int offset;
101 struct label_offset *cdr;
104 #define LABEL_HASH_SIZE 257
106 static struct label_offset *offset_table[LABEL_HASH_SIZE];
108 /* Set to 1 when a return insn is output, this means that the epilogue
109 is not needed. */
111 static int return_used_this_function;
113 static int arm_constant_limit = 3;
115 /* For an explanation of these variables, see final_prescan_insn below. */
116 int arm_ccfsm_state;
117 enum arm_cond_code arm_current_cc;
118 rtx arm_target_insn;
119 int arm_target_label;
121 /* The condition codes of the ARM, and the inverse function. */
122 char *arm_condition_codes[] =
124 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
125 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
128 static enum arm_cond_code get_arm_condition_code ();
131 /* Initialization code */
133 struct arm_cpu_select arm_select[3] =
135 /* switch name, tune arch */
136 { (char *)0, "--with-cpu=", 1, 1 },
137 { (char *)0, "-mcpu=", 1, 1 },
138 { (char *)0, "-mtune=", 1, 0 },
141 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
142 #define FL_FAST_MULT 0x02 /* Fast multiply */
143 #define FL_MODE26 0x04 /* 26-bit mode support */
144 #define FL_MODE32 0x08 /* 32-bit mode support */
145 #define FL_ARCH4 0x10 /* Architecture rel 4 */
146 #define FL_THUMB 0x20 /* Thumb aware */
147 struct processors
149 char *name;
150 enum processor_type type;
151 unsigned int flags;
154 /* Not all of these give usefully different compilation alternatives,
155 but there is no simple way of generalizing them. */
156 static struct processors all_procs[] =
158 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
159 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
160 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
161 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
162 {"arm60", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
163 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
164 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
165 {"arm620", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
166 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
167 {"arm70", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm7d", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
169 {"arm7di", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
170 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
171 | FL_MODE26)},
172 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
173 | FL_MODE26)},
174 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
175 {"arm700i", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
176 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
177 {"arm710c", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
178 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
179 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
180 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
181 | FL_ARCH4 | FL_THUMB)},
182 {NULL, 0, 0}
185 /* Fix up any incompatible options that the user has specified.
186 This has now turned into a maze. */
187 void
188 arm_override_options ()
190 int arm_thumb_aware = 0;
191 int flags = 0;
192 int i;
193 struct arm_cpu_select *ptr;
195 arm_cpu = PROCESSOR_DEFAULT;
196 arm_select[0].string = TARGET_CPU_DEFAULT;
198 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
200 ptr = &arm_select[i];
201 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
203 struct processors *sel;
205 for (sel = all_procs; sel->name != NULL; sel++)
206 if (! strcmp (ptr->string, sel->name))
208 if (ptr->set_tune_p)
209 arm_cpu = sel->type;
211 if (ptr->set_arch_p)
212 flags = sel->flags;
213 break;
216 if (sel->name == NULL)
217 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
221 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
222 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
224 if (TARGET_POKE_FUNCTION_NAME)
225 target_flags |= ARM_FLAG_APCS_FRAME;
227 if (TARGET_6)
229 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
230 target_flags |= ARM_FLAG_APCS_32;
231 arm_cpu = PROCESSOR_ARM6;
234 if (TARGET_3)
236 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
237 target_flags &= ~ARM_FLAG_APCS_32;
238 arm_cpu = PROCESSOR_ARM2;
241 if (TARGET_APCS_REENT && flag_pic)
242 fatal ("-fpic and -mapcs-reent are incompatible");
244 if (TARGET_APCS_REENT)
245 warning ("APCS reentrant code not supported. Ignored");
247 if (flag_pic)
248 warning ("Position independent code not supported. Ignored");
250 if (TARGET_APCS_FLOAT)
251 warning ("Passing floating point arguments in fp regs not yet supported");
253 if (TARGET_APCS_STACK && ! TARGET_APCS)
255 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
256 target_flags |= ARM_FLAG_APCS_FRAME;
259 arm_fpu = FP_HARD;
261 /* Default value for floating point code... if no co-processor
262 bus, then schedule for emulated floating point. Otherwise,
263 assume the user has an FPA, unless overridden with -mfpe-... */
264 if (flags & FL_CO_PROC == 0)
265 arm_fpu = FP_SOFT3;
266 else
267 arm_fpu = FP_HARD;
268 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
269 arm_arch4 = (flags & FL_ARCH4) != 0;
270 arm_thumb_aware = (flags & FL_THUMB) != 0;
272 if (target_fpe_name)
274 if (strcmp (target_fpe_name, "2") == 0)
275 arm_fpu = FP_SOFT2;
276 else if (strcmp (target_fpe_name, "3") == 0)
277 arm_fpu = FP_SOFT3;
278 else
279 fatal ("Invalid floating point emulation option: -mfpe-%s",
280 target_fpe_name);
283 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
285 warning ("This processor variant does not support Thumb interworking");
286 target_flags &= ~ARM_FLAG_THUMB;
289 if (TARGET_FPE && arm_fpu != FP_HARD)
290 arm_fpu = FP_SOFT2;
292 /* For arm2/3 there is no need to do any scheduling if there is only
293 a floating point emulator, or we are doing software floating-point. */
294 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
295 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
297 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
300 #define MAX_LINE 79
302 struct asm_option
304 char *string;
305 int *variable;
306 int on_value;
309 static int
310 output_option (file, type, name, pos)
311 FILE *file;
312 char *type;
313 char *name;
314 int pos;
316 int type_len = strlen (type);
317 int name_len = strlen (name);
319 if (1 + type_len + name_len + pos > MAX_LINE)
321 fprintf (file, "\n%s %s%s", ASM_COMMENT_START, type, name);
322 return 3 + type_len + name_len;
324 fprintf (file, " %s%s", type, name);
325 return pos + 1 + type_len + name_len;
328 static struct { char *name; int value; } m_options[] = TARGET_SWITCHES;
329 extern char *version_string, *language_string;
331 void
332 output_options (file, f_options, f_len, W_options, W_len)
333 FILE *file;
334 struct asm_option *f_options;
335 int f_len;
336 struct asm_option *W_options;
337 int W_len;
339 int j;
340 int flags = target_flags;
341 int pos = 32767;
343 fprintf (file, "%s %s %s", ASM_COMMENT_START, language_string,
344 version_string);
346 if (optimize)
348 char opt_string[20];
349 sprintf (opt_string, "%d", optimize);
350 pos = output_option (file, "-O", opt_string, pos);
353 if (profile_flag)
354 pos = output_option (file, "-p", "", pos);
356 if (inhibit_warnings)
357 pos = output_option (file, "-w", "", pos);
359 for (j = 0; j < f_len; j++)
361 if (*f_options[j].variable == f_options[j].on_value)
362 pos = output_option (file, "-f", f_options[j].string, pos);
365 for (j = 0; j < W_len; j++)
367 if (*W_options[j].variable == W_options[j].on_value)
368 pos = output_option (file, "-W", W_options[j].string, pos);
371 for (j = 0; j < sizeof m_options / sizeof m_options[0]; j++)
373 if (m_options[j].name[0] != '\0'
374 && m_options[j].value > 0
375 && ((m_options[j].value & flags) == m_options[j].value))
377 pos = output_option (file, "-m", m_options[j].name, pos);
378 flags &= ~ m_options[j].value;
382 for (j = 0; j < sizeof (arm_select) / sizeof(arm_select[0]); j++)
383 if (arm_select[j].string != (char *)0)
384 pos = output_option (file, arm_select[j].name, arm_select[j].string,
385 pos);
387 fputs ("\n\n", file);
392 /* Return 1 if it is possible to return using a single instruction */
395 use_return_insn ()
397 int regno;
399 if (!reload_completed ||current_function_pretend_args_size
400 || current_function_anonymous_args
401 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
402 return 0;
404 /* Can't be done if any of the FPU regs are pushed, since this also
405 requires an insn */
406 for (regno = 20; regno < 24; regno++)
407 if (regs_ever_live[regno])
408 return 0;
410 /* If a function is naked, don't use the "return" insn. */
411 if (arm_naked_function_p (current_function_decl))
412 return 0;
414 return 1;
417 /* Return TRUE if int I is a valid immediate ARM constant. */
420 const_ok_for_arm (i)
421 HOST_WIDE_INT i;
423 unsigned HOST_WIDE_INT mask = ~0xFF;
425 /* Fast return for 0 and powers of 2 */
426 if ((i & (i - 1)) == 0)
427 return TRUE;
431 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
432 return TRUE;
433 mask =
434 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
435 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
436 } while (mask != ~0xFF);
438 return FALSE;
441 /* Return true if I is a valid constant for the operation CODE. */
443 const_ok_for_op (i, code, mode)
444 HOST_WIDE_INT i;
445 enum rtx_code code;
446 enum machine_mode mode;
448 if (const_ok_for_arm (i))
449 return 1;
451 switch (code)
453 case PLUS:
454 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
456 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
457 case XOR:
458 case IOR:
459 return 0;
461 case AND:
462 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
464 default:
465 abort ();
469 /* Emit a sequence of insns to handle a large constant.
470 CODE is the code of the operation required, it can be any of SET, PLUS,
471 IOR, AND, XOR, MINUS;
472 MODE is the mode in which the operation is being performed;
473 VAL is the integer to operate on;
474 SOURCE is the other operand (a register, or a null-pointer for SET);
475 SUBTARGETS means it is safe to create scratch registers if that will
476 either produce a simpler sequence, or we will want to cse the values.
477 Return value is the number of insns emitted. */
480 arm_split_constant (code, mode, val, target, source, subtargets)
481 enum rtx_code code;
482 enum machine_mode mode;
483 HOST_WIDE_INT val;
484 rtx target;
485 rtx source;
486 int subtargets;
488 if (subtargets || code == SET
489 || (GET_CODE (target) == REG && GET_CODE (source) == REG
490 && REGNO (target) != REGNO (source)))
492 rtx temp;
494 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
495 > arm_constant_limit + (code != SET))
497 if (code == SET)
499 /* Currently SET is the only monadic value for CODE, all
500 the rest are diadic. */
501 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
502 return 1;
504 else
506 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
508 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
509 /* For MINUS, the value is subtracted from, since we never
510 have subtraction of a constant. */
511 if (code == MINUS)
512 emit_insn (gen_rtx (SET, VOIDmode, target,
513 gen_rtx (code, mode, temp, source)));
514 else
515 emit_insn (gen_rtx (SET, VOIDmode, target,
516 gen_rtx (code, mode, source, temp)));
517 return 2;
522 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
525 /* As above, but extra parameter GENERATE which, if clear, suppresses
526 RTL generation. */
528 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
529 enum rtx_code code;
530 enum machine_mode mode;
531 HOST_WIDE_INT val;
532 rtx target;
533 rtx source;
534 int subtargets;
535 int generate;
537 int can_add = 0;
538 int can_invert = 0;
539 int can_negate = 0;
540 int can_negate_initial = 0;
541 int can_shift = 0;
542 int i;
543 int num_bits_set = 0;
544 int set_sign_bit_copies = 0;
545 int clear_sign_bit_copies = 0;
546 int clear_zero_bit_copies = 0;
547 int set_zero_bit_copies = 0;
548 int insns = 0;
549 rtx new_src;
550 unsigned HOST_WIDE_INT temp1, temp2;
551 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
553 /* find out which operations are safe for a given CODE. Also do a quick
554 check for degenerate cases; these can occur when DImode operations
555 are split. */
556 switch (code)
558 case SET:
559 can_invert = 1;
560 can_shift = 1;
561 can_negate = 1;
562 break;
564 case PLUS:
565 can_negate = 1;
566 can_negate_initial = 1;
567 break;
569 case IOR:
570 if (remainder == 0xffffffff)
572 if (generate)
573 emit_insn (gen_rtx (SET, VOIDmode, target,
574 GEN_INT (ARM_SIGN_EXTEND (val))));
575 return 1;
577 if (remainder == 0)
579 if (reload_completed && rtx_equal_p (target, source))
580 return 0;
581 if (generate)
582 emit_insn (gen_rtx (SET, VOIDmode, target, source));
583 return 1;
585 break;
587 case AND:
588 if (remainder == 0)
590 if (generate)
591 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
592 return 1;
594 if (remainder == 0xffffffff)
596 if (reload_completed && rtx_equal_p (target, source))
597 return 0;
598 if (generate)
599 emit_insn (gen_rtx (SET, VOIDmode, target, source));
600 return 1;
602 can_invert = 1;
603 break;
605 case XOR:
606 if (remainder == 0)
608 if (reload_completed && rtx_equal_p (target, source))
609 return 0;
610 if (generate)
611 emit_insn (gen_rtx (SET, VOIDmode, target, source));
612 return 1;
614 if (remainder == 0xffffffff)
616 if (generate)
617 emit_insn (gen_rtx (SET, VOIDmode, target,
618 gen_rtx (NOT, mode, source)));
619 return 1;
622 /* We don't know how to handle this yet below. */
623 abort ();
625 case MINUS:
626 /* We treat MINUS as (val - source), since (source - val) is always
627 passed as (source + (-val)). */
628 if (remainder == 0)
630 if (generate)
631 emit_insn (gen_rtx (SET, VOIDmode, target,
632 gen_rtx (NEG, mode, source)));
633 return 1;
635 if (const_ok_for_arm (val))
637 if (generate)
638 emit_insn (gen_rtx (SET, VOIDmode, target,
639 gen_rtx (MINUS, mode, GEN_INT (val), source)));
640 return 1;
642 can_negate = 1;
644 break;
646 default:
647 abort ();
650 /* If we can do it in one insn get out quickly */
651 if (const_ok_for_arm (val)
652 || (can_negate_initial && const_ok_for_arm (-val))
653 || (can_invert && const_ok_for_arm (~val)))
655 if (generate)
656 emit_insn (gen_rtx (SET, VOIDmode, target,
657 (source ? gen_rtx (code, mode, source,
658 GEN_INT (val))
659 : GEN_INT (val))));
660 return 1;
664 /* Calculate a few attributes that may be useful for specific
665 optimizations. */
667 for (i = 31; i >= 0; i--)
669 if ((remainder & (1 << i)) == 0)
670 clear_sign_bit_copies++;
671 else
672 break;
675 for (i = 31; i >= 0; i--)
677 if ((remainder & (1 << i)) != 0)
678 set_sign_bit_copies++;
679 else
680 break;
683 for (i = 0; i <= 31; i++)
685 if ((remainder & (1 << i)) == 0)
686 clear_zero_bit_copies++;
687 else
688 break;
691 for (i = 0; i <= 31; i++)
693 if ((remainder & (1 << i)) != 0)
694 set_zero_bit_copies++;
695 else
696 break;
699 switch (code)
701 case SET:
702 /* See if we can do this by sign_extending a constant that is known
703 to be negative. This is a good, way of doing it, since the shift
704 may well merge into a subsequent insn. */
705 if (set_sign_bit_copies > 1)
707 if (const_ok_for_arm
708 (temp1 = ARM_SIGN_EXTEND (remainder
709 << (set_sign_bit_copies - 1))))
711 if (generate)
713 new_src = subtargets ? gen_reg_rtx (mode) : target;
714 emit_insn (gen_rtx (SET, VOIDmode, new_src,
715 GEN_INT (temp1)));
716 emit_insn (gen_ashrsi3 (target, new_src,
717 GEN_INT (set_sign_bit_copies - 1)));
719 return 2;
721 /* For an inverted constant, we will need to set the low bits,
722 these will be shifted out of harm's way. */
723 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
724 if (const_ok_for_arm (~temp1))
726 if (generate)
728 new_src = subtargets ? gen_reg_rtx (mode) : target;
729 emit_insn (gen_rtx (SET, VOIDmode, new_src,
730 GEN_INT (temp1)));
731 emit_insn (gen_ashrsi3 (target, new_src,
732 GEN_INT (set_sign_bit_copies - 1)));
734 return 2;
738 /* See if we can generate this by setting the bottom (or the top)
739 16 bits, and then shifting these into the other half of the
740 word. We only look for the simplest cases, to do more would cost
741 too much. Be careful, however, not to generate this when the
742 alternative would take fewer insns. */
743 if (val & 0xffff0000)
745 temp1 = remainder & 0xffff0000;
746 temp2 = remainder & 0x0000ffff;
748 /* Overlaps outside this range are best done using other methods. */
749 for (i = 9; i < 24; i++)
751 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
752 && ! const_ok_for_arm (temp2))
754 insns = arm_gen_constant (code, mode, temp2,
755 new_src = (subtargets
756 ? gen_reg_rtx (mode)
757 : target),
758 source, subtargets, generate);
759 source = new_src;
760 if (generate)
761 emit_insn (gen_rtx (SET, VOIDmode, target,
762 gen_rtx (IOR, mode,
763 gen_rtx (ASHIFT, mode, source,
764 GEN_INT (i)),
765 source)));
766 return insns + 1;
770 /* Don't duplicate cases already considered. */
771 for (i = 17; i < 24; i++)
773 if (((temp1 | (temp1 >> i)) == remainder)
774 && ! const_ok_for_arm (temp1))
776 insns = arm_gen_constant (code, mode, temp1,
777 new_src = (subtargets
778 ? gen_reg_rtx (mode)
779 : target),
780 source, subtargets, generate);
781 source = new_src;
782 if (generate)
783 emit_insn (gen_rtx (SET, VOIDmode, target,
784 gen_rtx (IOR, mode,
785 gen_rtx (LSHIFTRT, mode,
786 source, GEN_INT (i)),
787 source)));
788 return insns + 1;
792 break;
794 case IOR:
795 case XOR:
796 /* If we have IOR or XOR, and the inverse of the constant can be loaded
797 in a single instruction, and we can find a temporary to put it in,
798 then this can be done in two instructions instead of 3-4. */
799 if (subtargets
800 || (reload_completed && ! reg_mentioned_p (target, source)))
802 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
804 if (generate)
806 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
808 emit_insn (gen_rtx (SET, VOIDmode, sub,
809 GEN_INT (ARM_SIGN_EXTEND (~ val))));
810 emit_insn (gen_rtx (SET, VOIDmode, target,
811 gen_rtx (code, mode, source, sub)));
813 return 2;
817 if (code == XOR)
818 break;
820 if (set_sign_bit_copies > 8
821 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
823 if (generate)
825 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
826 rtx shift = GEN_INT (set_sign_bit_copies);
828 emit_insn (gen_rtx (SET, VOIDmode, sub,
829 gen_rtx (NOT, mode,
830 gen_rtx (ASHIFT, mode, source,
831 shift))));
832 emit_insn (gen_rtx (SET, VOIDmode, target,
833 gen_rtx (NOT, mode,
834 gen_rtx (LSHIFTRT, mode, sub,
835 shift))));
837 return 2;
840 if (set_zero_bit_copies > 8
841 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
843 if (generate)
845 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
846 rtx shift = GEN_INT (set_zero_bit_copies);
848 emit_insn (gen_rtx (SET, VOIDmode, sub,
849 gen_rtx (NOT, mode,
850 gen_rtx (LSHIFTRT, mode, source,
851 shift))));
852 emit_insn (gen_rtx (SET, VOIDmode, target,
853 gen_rtx (NOT, mode,
854 gen_rtx (ASHIFT, mode, sub,
855 shift))));
857 return 2;
860 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
862 if (generate)
864 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
865 emit_insn (gen_rtx (SET, VOIDmode, sub,
866 gen_rtx (NOT, mode, source)));
867 source = sub;
868 if (subtargets)
869 sub = gen_reg_rtx (mode);
870 emit_insn (gen_rtx (SET, VOIDmode, sub,
871 gen_rtx (AND, mode, source,
872 GEN_INT (temp1))));
873 emit_insn (gen_rtx (SET, VOIDmode, target,
874 gen_rtx (NOT, mode, sub)));
876 return 3;
878 break;
880 case AND:
881 /* See if two shifts will do 2 or more insn's worth of work. */
882 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
884 HOST_WIDE_INT shift_mask = ((0xffffffff
885 << (32 - clear_sign_bit_copies))
886 & 0xffffffff);
887 rtx new_source;
888 rtx shift;
890 if ((remainder | shift_mask) != 0xffffffff)
892 if (generate)
894 new_source = subtargets ? gen_reg_rtx (mode) : target;
895 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
896 new_source, source, subtargets, 1);
897 source = new_source;
899 else
900 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
901 new_source, source, subtargets, 0);
904 if (generate)
906 shift = GEN_INT (clear_sign_bit_copies);
907 new_source = subtargets ? gen_reg_rtx (mode) : target;
908 emit_insn (gen_ashlsi3 (new_source, source, shift));
909 emit_insn (gen_lshrsi3 (target, new_source, shift));
912 return insns + 2;
915 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
917 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
918 rtx new_source;
919 rtx shift;
921 if ((remainder | shift_mask) != 0xffffffff)
923 if (generate)
925 new_source = subtargets ? gen_reg_rtx (mode) : target;
926 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
927 new_source, source, subtargets, 1);
928 source = new_source;
930 else
931 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
932 new_source, source, subtargets, 0);
935 if (generate)
937 shift = GEN_INT (clear_zero_bit_copies);
938 new_source = subtargets ? gen_reg_rtx (mode) : target;
939 emit_insn (gen_lshrsi3 (new_source, source, shift));
940 emit_insn (gen_ashlsi3 (target, new_source, shift));
943 return insns + 2;
946 break;
948 default:
949 break;
952 for (i = 0; i < 32; i++)
953 if (remainder & (1 << i))
954 num_bits_set++;
956 if (code == AND || (can_invert && num_bits_set > 16))
957 remainder = (~remainder) & 0xffffffff;
958 else if (code == PLUS && num_bits_set > 16)
959 remainder = (-remainder) & 0xffffffff;
960 else
962 can_invert = 0;
963 can_negate = 0;
966 /* Now try and find a way of doing the job in either two or three
967 instructions.
968 We start by looking for the largest block of zeros that are aligned on
969 a 2-bit boundary, we then fill up the temps, wrapping around to the
970 top of the word when we drop off the bottom.
971 In the worst case this code should produce no more than four insns. */
973 int best_start = 0;
974 int best_consecutive_zeros = 0;
976 for (i = 0; i < 32; i += 2)
978 int consecutive_zeros = 0;
980 if (! (remainder & (3 << i)))
982 while ((i < 32) && ! (remainder & (3 << i)))
984 consecutive_zeros += 2;
985 i += 2;
987 if (consecutive_zeros > best_consecutive_zeros)
989 best_consecutive_zeros = consecutive_zeros;
990 best_start = i - consecutive_zeros;
992 i -= 2;
996 /* Now start emitting the insns, starting with the one with the highest
997 bit set: we do this so that the smallest number will be emitted last;
998 this is more likely to be combinable with addressing insns. */
999 i = best_start;
1002 int end;
1004 if (i <= 0)
1005 i += 32;
1006 if (remainder & (3 << (i - 2)))
1008 end = i - 8;
1009 if (end < 0)
1010 end += 32;
1011 temp1 = remainder & ((0x0ff << end)
1012 | ((i < end) ? (0xff >> (32 - end)) : 0));
1013 remainder &= ~temp1;
1015 if (code == SET)
1017 if (generate)
1018 emit_insn (gen_rtx (SET, VOIDmode,
1019 new_src = (subtargets
1020 ? gen_reg_rtx (mode)
1021 : target),
1022 GEN_INT (can_invert ? ~temp1 : temp1)));
1023 can_invert = 0;
1024 code = PLUS;
1026 else if (code == MINUS)
1028 if (generate)
1029 emit_insn (gen_rtx (SET, VOIDmode,
1030 new_src = (subtargets
1031 ? gen_reg_rtx (mode)
1032 : target),
1033 gen_rtx (code, mode, GEN_INT (temp1),
1034 source)));
1035 code = PLUS;
1037 else
1039 if (generate)
1040 emit_insn (gen_rtx (SET, VOIDmode,
1041 new_src = (remainder
1042 ? (subtargets
1043 ? gen_reg_rtx (mode)
1044 : target)
1045 : target),
1046 gen_rtx (code, mode, source,
1047 GEN_INT (can_invert ? ~temp1
1048 : (can_negate
1049 ? -temp1
1050 : temp1)))));
1053 insns++;
1054 source = new_src;
1055 i -= 6;
1057 i -= 2;
1058 } while (remainder);
1060 return insns;
1063 /* Canonicalize a comparison so that we are more likely to recognize it.
1064 This can be done for a few constant compares, where we can make the
1065 immediate value easier to load. */
1066 enum rtx_code
1067 arm_canonicalize_comparison (code, op1)
1068 enum rtx_code code;
1069 rtx *op1;
1071 HOST_WIDE_INT i = INTVAL (*op1);
1073 switch (code)
1075 case EQ:
1076 case NE:
1077 return code;
1079 case GT:
1080 case LE:
1081 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1)
1082 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1084 *op1 = GEN_INT (i+1);
1085 return code == GT ? GE : LT;
1087 break;
1089 case GE:
1090 case LT:
1091 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1))
1092 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1094 *op1 = GEN_INT (i-1);
1095 return code == GE ? GT : LE;
1097 break;
1099 case GTU:
1100 case LEU:
1101 if (i != ~0
1102 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1104 *op1 = GEN_INT (i + 1);
1105 return code == GTU ? GEU : LTU;
1107 break;
1109 case GEU:
1110 case LTU:
1111 if (i != 0
1112 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1114 *op1 = GEN_INT (i - 1);
1115 return code == GEU ? GTU : LEU;
1117 break;
1119 default:
1120 abort ();
1123 return code;
1127 /* Handle aggregates that are not laid out in a BLKmode element.
1128 This is a sub-element of RETURN_IN_MEMORY. */
1130 arm_return_in_memory (type)
1131 tree type;
1133 if (TREE_CODE (type) == RECORD_TYPE)
1135 tree field;
1137 /* For a struct, we can return in a register if every element was a
1138 bit-field. */
1139 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1140 if (TREE_CODE (field) != FIELD_DECL
1141 || ! DECL_BIT_FIELD_TYPE (field))
1142 return 1;
1144 return 0;
1146 else if (TREE_CODE (type) == UNION_TYPE)
1148 tree field;
1150 /* Unions can be returned in registers if every element is
1151 integral, or can be returned in an integer register. */
1152 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1154 if (TREE_CODE (field) != FIELD_DECL
1155 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1156 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1157 || FLOAT_TYPE_P (TREE_TYPE (field)))
1158 return 1;
1160 return 0;
1162 /* XXX Not sure what should be done for other aggregates, so put them in
1163 memory. */
1164 return 1;
1167 #define REG_OR_SUBREG_REG(X) \
1168 (GET_CODE (X) == REG \
1169 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1171 #define REG_OR_SUBREG_RTX(X) \
1172 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1174 #define ARM_FRAME_RTX(X) \
1175 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1176 || (X) == arg_pointer_rtx)
1179 arm_rtx_costs (x, code, outer_code)
1180 rtx x;
1181 enum rtx_code code, outer_code;
1183 enum machine_mode mode = GET_MODE (x);
1184 enum rtx_code subcode;
1185 int extra_cost;
1187 switch (code)
1189 case MEM:
1190 /* Memory costs quite a lot for the first word, but subsequent words
1191 load at the equivalent of a single insn each. */
1192 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1193 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1195 case DIV:
1196 case MOD:
1197 return 100;
1199 case ROTATE:
1200 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1201 return 4;
1202 /* Fall through */
1203 case ROTATERT:
1204 if (mode != SImode)
1205 return 8;
1206 /* Fall through */
1207 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1208 if (mode == DImode)
1209 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1210 + ((GET_CODE (XEXP (x, 0)) == REG
1211 || (GET_CODE (XEXP (x, 0)) == SUBREG
1212 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1213 ? 0 : 8));
1214 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1215 || (GET_CODE (XEXP (x, 0)) == SUBREG
1216 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1217 ? 0 : 4)
1218 + ((GET_CODE (XEXP (x, 1)) == REG
1219 || (GET_CODE (XEXP (x, 1)) == SUBREG
1220 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1221 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1222 ? 0 : 4));
1224 case MINUS:
1225 if (mode == DImode)
1226 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1227 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1228 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1229 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1230 ? 0 : 8));
1232 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1233 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1234 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1235 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1236 ? 0 : 8)
1237 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1238 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1239 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1240 ? 0 : 8));
1242 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1243 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1244 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1245 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1246 || subcode == ASHIFTRT || subcode == LSHIFTRT
1247 || subcode == ROTATE || subcode == ROTATERT
1248 || (subcode == MULT
1249 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1250 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1251 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1252 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1253 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1254 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1255 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1256 return 1;
1257 /* Fall through */
1259 case PLUS:
1260 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1261 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1262 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1263 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1264 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1265 ? 0 : 8));
1267 /* Fall through */
1268 case AND: case XOR: case IOR:
1269 extra_cost = 0;
1271 /* Normally the frame registers will be spilt into reg+const during
1272 reload, so it is a bad idea to combine them with other instructions,
1273 since then they might not be moved outside of loops. As a compromise
1274 we allow integration with ops that have a constant as their second
1275 operand. */
1276 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1277 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1278 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1279 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1280 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1281 extra_cost = 4;
1283 if (mode == DImode)
1284 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1285 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1286 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1287 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1288 ? 0 : 8));
1290 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1291 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1292 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1293 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1294 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1295 ? 0 : 4));
1297 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1298 return (1 + extra_cost
1299 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1300 || subcode == LSHIFTRT || subcode == ASHIFTRT
1301 || subcode == ROTATE || subcode == ROTATERT
1302 || (subcode == MULT
1303 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1304 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1305 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1306 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1307 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1308 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1309 ? 0 : 4));
1311 return 8;
1313 case MULT:
1314 if (arm_fast_multiply && mode == DImode
1315 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1316 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1317 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1318 return 8;
1320 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1321 || mode == DImode)
1322 return 30;
1324 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1326 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1327 & (unsigned HOST_WIDE_INT) 0xffffffff);
1328 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1329 int j;
1330 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1332 for (j = 0; i && j < 32; j += booth_unit_size)
1334 i >>= booth_unit_size;
1335 add_cost += 2;
1338 return add_cost;
1341 return ((arm_fast_multiply ? 8 : 30)
1342 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1343 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1345 case NEG:
1346 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1347 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1348 /* Fall through */
1349 case NOT:
1350 if (mode == DImode)
1351 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1353 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1355 case IF_THEN_ELSE:
1356 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1357 return 14;
1358 return 2;
1360 case COMPARE:
1361 return 1;
1363 case ABS:
1364 return 4 + (mode == DImode ? 4 : 0);
1366 case SIGN_EXTEND:
1367 if (GET_MODE (XEXP (x, 0)) == QImode)
1368 return (4 + (mode == DImode ? 4 : 0)
1369 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1370 /* Fall through */
1371 case ZERO_EXTEND:
1372 switch (GET_MODE (XEXP (x, 0)))
1374 case QImode:
1375 return (1 + (mode == DImode ? 4 : 0)
1376 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1378 case HImode:
1379 return (4 + (mode == DImode ? 4 : 0)
1380 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1382 case SImode:
1383 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1385 abort ();
1387 default:
1388 return 99;
1392 /* This code has been fixed for cross compilation. */
1394 static int fpa_consts_inited = 0;
1396 char *strings_fpa[8] = {
1397 "0", "1", "2", "3",
1398 "4", "5", "0.5", "10"
1401 static REAL_VALUE_TYPE values_fpa[8];
1403 static void
1404 init_fpa_table ()
1406 int i;
1407 REAL_VALUE_TYPE r;
1409 for (i = 0; i < 8; i++)
1411 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1412 values_fpa[i] = r;
1415 fpa_consts_inited = 1;
1418 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1421 const_double_rtx_ok_for_fpu (x)
1422 rtx x;
1424 REAL_VALUE_TYPE r;
1425 int i;
1427 if (!fpa_consts_inited)
1428 init_fpa_table ();
1430 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1431 if (REAL_VALUE_MINUS_ZERO (r))
1432 return 0;
1434 for (i = 0; i < 8; i++)
1435 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1436 return 1;
1438 return 0;
1441 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1444 neg_const_double_rtx_ok_for_fpu (x)
1445 rtx x;
1447 REAL_VALUE_TYPE r;
1448 int i;
1450 if (!fpa_consts_inited)
1451 init_fpa_table ();
1453 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1454 r = REAL_VALUE_NEGATE (r);
1455 if (REAL_VALUE_MINUS_ZERO (r))
1456 return 0;
1458 for (i = 0; i < 8; i++)
1459 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1460 return 1;
1462 return 0;
1465 /* Predicates for `match_operand' and `match_operator'. */
1467 /* s_register_operand is the same as register_operand, but it doesn't accept
1468 (SUBREG (MEM)...).
1470 This function exists because at the time it was put in it led to better
1471 code. SUBREG(MEM) always needs a reload in the places where
1472 s_register_operand is used, and this seemed to lead to excessive
1473 reloading. */
1476 s_register_operand (op, mode)
1477 register rtx op;
1478 enum machine_mode mode;
1480 if (GET_MODE (op) != mode && mode != VOIDmode)
1481 return 0;
1483 if (GET_CODE (op) == SUBREG)
1484 op = SUBREG_REG (op);
1486 /* We don't consider registers whose class is NO_REGS
1487 to be a register operand. */
1488 return (GET_CODE (op) == REG
1489 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1490 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1493 /* Only accept reg, subreg(reg), const_int. */
1496 reg_or_int_operand (op, mode)
1497 register rtx op;
1498 enum machine_mode mode;
1500 if (GET_CODE (op) == CONST_INT)
1501 return 1;
1503 if (GET_MODE (op) != mode && mode != VOIDmode)
1504 return 0;
1506 if (GET_CODE (op) == SUBREG)
1507 op = SUBREG_REG (op);
1509 /* We don't consider registers whose class is NO_REGS
1510 to be a register operand. */
1511 return (GET_CODE (op) == REG
1512 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1513 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1516 /* Return 1 if OP is an item in memory, given that we are in reload. */
1519 reload_memory_operand (op, mode)
1520 rtx op;
1521 enum machine_mode mode;
1523 int regno = true_regnum (op);
1525 return (! CONSTANT_P (op)
1526 && (regno == -1
1527 || (GET_CODE (op) == REG
1528 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1531 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1534 arm_rhs_operand (op, mode)
1535 rtx op;
1536 enum machine_mode mode;
1538 return (s_register_operand (op, mode)
1539 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1542 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1546 arm_rhsm_operand (op, mode)
1547 rtx op;
1548 enum machine_mode mode;
1550 return (s_register_operand (op, mode)
1551 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1552 || memory_operand (op, mode));
1555 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1556 constant that is valid when negated. */
1559 arm_add_operand (op, mode)
1560 rtx op;
1561 enum machine_mode mode;
1563 return (s_register_operand (op, mode)
1564 || (GET_CODE (op) == CONST_INT
1565 && (const_ok_for_arm (INTVAL (op))
1566 || const_ok_for_arm (-INTVAL (op)))));
1570 arm_not_operand (op, mode)
1571 rtx op;
1572 enum machine_mode mode;
1574 return (s_register_operand (op, mode)
1575 || (GET_CODE (op) == CONST_INT
1576 && (const_ok_for_arm (INTVAL (op))
1577 || const_ok_for_arm (~INTVAL (op)))));
1580 /* Return TRUE if the operand is a memory reference which contains an
1581 offsettable address. */
1583 offsettable_memory_operand (op, mode)
1584 register rtx op;
1585 enum machine_mode mode;
1587 if (mode == VOIDmode)
1588 mode = GET_MODE (op);
1590 return (mode == GET_MODE (op)
1591 && GET_CODE (op) == MEM
1592 && offsettable_address_p (reload_completed | reload_in_progress,
1593 mode, XEXP (op, 0)));
1596 /* Return TRUE if the operand is a memory reference which is, or can be
1597 made word aligned by adjusting the offset. */
1599 alignable_memory_operand (op, mode)
1600 register rtx op;
1601 enum machine_mode mode;
1603 rtx reg;
1605 if (mode == VOIDmode)
1606 mode = GET_MODE (op);
1608 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1609 return 0;
1611 op = XEXP (op, 0);
1613 return ((GET_CODE (reg = op) == REG
1614 || (GET_CODE (op) == SUBREG
1615 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1616 || (GET_CODE (op) == PLUS
1617 && GET_CODE (XEXP (op, 1)) == CONST_INT
1618 && (GET_CODE (reg = XEXP (op, 0)) == REG
1619 || (GET_CODE (XEXP (op, 0)) == SUBREG
1620 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1621 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1624 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1627 fpu_rhs_operand (op, mode)
1628 rtx op;
1629 enum machine_mode mode;
1631 if (s_register_operand (op, mode))
1632 return TRUE;
1633 else if (GET_CODE (op) == CONST_DOUBLE)
1634 return (const_double_rtx_ok_for_fpu (op));
1636 return FALSE;
1640 fpu_add_operand (op, mode)
1641 rtx op;
1642 enum machine_mode mode;
1644 if (s_register_operand (op, mode))
1645 return TRUE;
1646 else if (GET_CODE (op) == CONST_DOUBLE)
1647 return (const_double_rtx_ok_for_fpu (op)
1648 || neg_const_double_rtx_ok_for_fpu (op));
1650 return FALSE;
1653 /* Return nonzero if OP is a constant power of two. */
1656 power_of_two_operand (op, mode)
1657 rtx op;
1658 enum machine_mode mode;
1660 if (GET_CODE (op) == CONST_INT)
1662 HOST_WIDE_INT value = INTVAL(op);
1663 return value != 0 && (value & (value - 1)) == 0;
1665 return FALSE;
1668 /* Return TRUE for a valid operand of a DImode operation.
1669 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1670 Note that this disallows MEM(REG+REG), but allows
1671 MEM(PRE/POST_INC/DEC(REG)). */
1674 di_operand (op, mode)
1675 rtx op;
1676 enum machine_mode mode;
1678 if (s_register_operand (op, mode))
1679 return TRUE;
1681 switch (GET_CODE (op))
1683 case CONST_DOUBLE:
1684 case CONST_INT:
1685 return TRUE;
1687 case MEM:
1688 return memory_address_p (DImode, XEXP (op, 0));
1690 default:
1691 return FALSE;
1695 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1696 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1697 Note that this disallows MEM(REG+REG), but allows
1698 MEM(PRE/POST_INC/DEC(REG)). */
1701 soft_df_operand (op, mode)
1702 rtx op;
1703 enum machine_mode mode;
1705 if (s_register_operand (op, mode))
1706 return TRUE;
1708 switch (GET_CODE (op))
1710 case CONST_DOUBLE:
1711 return TRUE;
1713 case MEM:
1714 return memory_address_p (DFmode, XEXP (op, 0));
1716 default:
1717 return FALSE;
1721 /* Return TRUE for valid index operands. */
1724 index_operand (op, mode)
1725 rtx op;
1726 enum machine_mode mode;
1728 return (s_register_operand(op, mode)
1729 || (immediate_operand (op, mode)
1730 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1733 /* Return TRUE for valid shifts by a constant. This also accepts any
1734 power of two on the (somewhat overly relaxed) assumption that the
1735 shift operator in this case was a mult. */
1738 const_shift_operand (op, mode)
1739 rtx op;
1740 enum machine_mode mode;
1742 return (power_of_two_operand (op, mode)
1743 || (immediate_operand (op, mode)
1744 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1747 /* Return TRUE for arithmetic operators which can be combined with a multiply
1748 (shift). */
1751 shiftable_operator (x, mode)
1752 rtx x;
1753 enum machine_mode mode;
1755 if (GET_MODE (x) != mode)
1756 return FALSE;
1757 else
1759 enum rtx_code code = GET_CODE (x);
1761 return (code == PLUS || code == MINUS
1762 || code == IOR || code == XOR || code == AND);
1766 /* Return TRUE for shift operators. */
1769 shift_operator (x, mode)
1770 rtx x;
1771 enum machine_mode mode;
1773 if (GET_MODE (x) != mode)
1774 return FALSE;
1775 else
1777 enum rtx_code code = GET_CODE (x);
1779 if (code == MULT)
1780 return power_of_two_operand (XEXP (x, 1));
1782 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1783 || code == ROTATERT);
1787 int equality_operator (x, mode)
1788 rtx x;
1789 enum machine_mode mode;
1791 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1794 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1797 minmax_operator (x, mode)
1798 rtx x;
1799 enum machine_mode mode;
1801 enum rtx_code code = GET_CODE (x);
1803 if (GET_MODE (x) != mode)
1804 return FALSE;
1806 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1809 /* return TRUE if x is EQ or NE */
1811 /* Return TRUE if this is the condition code register, if we aren't given
1812 a mode, accept any class CCmode register */
1815 cc_register (x, mode)
1816 rtx x;
1817 enum machine_mode mode;
1819 if (mode == VOIDmode)
1821 mode = GET_MODE (x);
1822 if (GET_MODE_CLASS (mode) != MODE_CC)
1823 return FALSE;
1826 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1827 return TRUE;
1829 return FALSE;
1832 /* Return TRUE if this is the condition code register, if we aren't given
1833 a mode, accept any class CCmode register which indicates a dominance
1834 expression. */
1837 dominant_cc_register (x, mode)
1838 rtx x;
1839 enum machine_mode mode;
1841 if (mode == VOIDmode)
1843 mode = GET_MODE (x);
1844 if (GET_MODE_CLASS (mode) != MODE_CC)
1845 return FALSE;
1848 if (mode != CC_DNEmode && mode != CC_DEQmode
1849 && mode != CC_DLEmode && mode != CC_DLTmode
1850 && mode != CC_DGEmode && mode != CC_DGTmode
1851 && mode != CC_DLEUmode && mode != CC_DLTUmode
1852 && mode != CC_DGEUmode && mode != CC_DGTUmode)
1853 return FALSE;
1855 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1856 return TRUE;
1858 return FALSE;
1861 /* Return TRUE if X references a SYMBOL_REF. */
1863 symbol_mentioned_p (x)
1864 rtx x;
1866 register char *fmt;
1867 register int i;
1869 if (GET_CODE (x) == SYMBOL_REF)
1870 return 1;
1872 fmt = GET_RTX_FORMAT (GET_CODE (x));
1873 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1875 if (fmt[i] == 'E')
1877 register int j;
1879 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1880 if (symbol_mentioned_p (XVECEXP (x, i, j)))
1881 return 1;
1883 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
1884 return 1;
1887 return 0;
1890 /* Return TRUE if X references a LABEL_REF. */
1892 label_mentioned_p (x)
1893 rtx x;
1895 register char *fmt;
1896 register int i;
1898 if (GET_CODE (x) == LABEL_REF)
1899 return 1;
1901 fmt = GET_RTX_FORMAT (GET_CODE (x));
1902 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1904 if (fmt[i] == 'E')
1906 register int j;
1908 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1909 if (label_mentioned_p (XVECEXP (x, i, j)))
1910 return 1;
1912 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
1913 return 1;
1916 return 0;
1919 enum rtx_code
1920 minmax_code (x)
1921 rtx x;
1923 enum rtx_code code = GET_CODE (x);
1925 if (code == SMAX)
1926 return GE;
1927 else if (code == SMIN)
1928 return LE;
1929 else if (code == UMIN)
1930 return LEU;
1931 else if (code == UMAX)
1932 return GEU;
1934 abort ();
1937 /* Return 1 if memory locations are adjacent */
1940 adjacent_mem_locations (a, b)
1941 rtx a, b;
1943 int val0 = 0, val1 = 0;
1944 int reg0, reg1;
1946 if ((GET_CODE (XEXP (a, 0)) == REG
1947 || (GET_CODE (XEXP (a, 0)) == PLUS
1948 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
1949 && (GET_CODE (XEXP (b, 0)) == REG
1950 || (GET_CODE (XEXP (b, 0)) == PLUS
1951 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
1953 if (GET_CODE (XEXP (a, 0)) == PLUS)
1955 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
1956 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
1958 else
1959 reg0 = REGNO (XEXP (a, 0));
1960 if (GET_CODE (XEXP (b, 0)) == PLUS)
1962 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
1963 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
1965 else
1966 reg1 = REGNO (XEXP (b, 0));
1967 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
1969 return 0;
1972 /* Return 1 if OP is a load multiple operation. It is known to be
1973 parallel and the first section will be tested. */
1976 load_multiple_operation (op, mode)
1977 rtx op;
1978 enum machine_mode mode;
1980 HOST_WIDE_INT count = XVECLEN (op, 0);
1981 int dest_regno;
1982 rtx src_addr;
1983 HOST_WIDE_INT i = 1, base = 0;
1984 rtx elt;
1986 if (count <= 1
1987 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
1988 return 0;
1990 /* Check to see if this might be a write-back */
1991 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
1993 i++;
1994 base = 1;
1996 /* Now check it more carefully */
1997 if (GET_CODE (SET_DEST (elt)) != REG
1998 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
1999 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2000 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2001 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2002 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2003 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2004 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2005 != REGNO (SET_DEST (elt)))
2006 return 0;
2008 count--;
2011 /* Perform a quick check so we don't blow up below. */
2012 if (count <= i
2013 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2014 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2015 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2016 return 0;
2018 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2019 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2021 for (; i < count; i++)
2023 rtx elt = XVECEXP (op, 0, i);
2025 if (GET_CODE (elt) != SET
2026 || GET_CODE (SET_DEST (elt)) != REG
2027 || GET_MODE (SET_DEST (elt)) != SImode
2028 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2029 || GET_CODE (SET_SRC (elt)) != MEM
2030 || GET_MODE (SET_SRC (elt)) != SImode
2031 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2032 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2033 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2034 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2035 return 0;
2038 return 1;
2041 /* Return 1 if OP is a store multiple operation. It is known to be
2042 parallel and the first section will be tested. */
2045 store_multiple_operation (op, mode)
2046 rtx op;
2047 enum machine_mode mode;
2049 HOST_WIDE_INT count = XVECLEN (op, 0);
2050 int src_regno;
2051 rtx dest_addr;
2052 HOST_WIDE_INT i = 1, base = 0;
2053 rtx elt;
2055 if (count <= 1
2056 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2057 return 0;
2059 /* Check to see if this might be a write-back */
2060 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2062 i++;
2063 base = 1;
2065 /* Now check it more carefully */
2066 if (GET_CODE (SET_DEST (elt)) != REG
2067 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2068 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2069 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2070 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2071 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2072 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2073 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2074 != REGNO (SET_DEST (elt)))
2075 return 0;
2077 count--;
2080 /* Perform a quick check so we don't blow up below. */
2081 if (count <= i
2082 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2083 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2084 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2085 return 0;
2087 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2088 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2090 for (; i < count; i++)
2092 elt = XVECEXP (op, 0, i);
2094 if (GET_CODE (elt) != SET
2095 || GET_CODE (SET_SRC (elt)) != REG
2096 || GET_MODE (SET_SRC (elt)) != SImode
2097 || REGNO (SET_SRC (elt)) != src_regno + i - base
2098 || GET_CODE (SET_DEST (elt)) != MEM
2099 || GET_MODE (SET_DEST (elt)) != SImode
2100 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2101 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2102 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2103 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2104 return 0;
2107 return 1;
2111 load_multiple_sequence (operands, nops, regs, base, load_offset)
2112 rtx *operands;
2113 int nops;
2114 int *regs;
2115 int *base;
2116 HOST_WIDE_INT *load_offset;
2118 int unsorted_regs[4];
2119 HOST_WIDE_INT unsorted_offsets[4];
2120 int order[4];
2121 int base_reg;
2122 int i;
2124 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2125 extended if required. */
2126 if (nops < 2 || nops > 4)
2127 abort ();
2129 /* Loop over the operands and check that the memory references are
2130 suitable (ie immediate offsets from the same base register). At
2131 the same time, extract the target register, and the memory
2132 offsets. */
2133 for (i = 0; i < nops; i++)
2135 rtx reg;
2136 rtx offset;
2138 if (GET_CODE (operands[nops + i]) != MEM)
2139 abort ();
2141 /* Don't reorder volatile memory references; it doesn't seem worth
2142 looking for the case where the order is ok anyway. */
2143 if (MEM_VOLATILE_P (operands[nops + i]))
2144 return 0;
2146 offset = const0_rtx;
2148 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2149 || (GET_CODE (reg) == SUBREG
2150 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2151 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2152 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2153 == REG)
2154 || (GET_CODE (reg) == SUBREG
2155 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2156 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2157 == CONST_INT)))
2159 if (i == 0)
2161 base_reg = REGNO(reg);
2162 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2163 ? REGNO (operands[i])
2164 : REGNO (SUBREG_REG (operands[i])));
2165 order[0] = 0;
2167 else
2169 if (base_reg != REGNO (reg))
2170 /* Not addressed from the same base register. */
2171 return 0;
2173 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2174 ? REGNO (operands[i])
2175 : REGNO (SUBREG_REG (operands[i])));
2176 if (unsorted_regs[i] < unsorted_regs[order[0]])
2177 order[0] = i;
2180 /* If it isn't an integer register, or if it overwrites the
2181 base register but isn't the last insn in the list, then
2182 we can't do this. */
2183 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2184 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2185 return 0;
2187 unsorted_offsets[i] = INTVAL (offset);
2189 else
2190 /* Not a suitable memory address. */
2191 return 0;
2194 /* All the useful information has now been extracted from the
2195 operands into unsorted_regs and unsorted_offsets; additionally,
2196 order[0] has been set to the lowest numbered register in the
2197 list. Sort the registers into order, and check that the memory
2198 offsets are ascending and adjacent. */
2200 for (i = 1; i < nops; i++)
2202 int j;
2204 order[i] = order[i - 1];
2205 for (j = 0; j < nops; j++)
2206 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2207 && (order[i] == order[i - 1]
2208 || unsorted_regs[j] < unsorted_regs[order[i]]))
2209 order[i] = j;
2211 /* Have we found a suitable register? if not, one must be used more
2212 than once. */
2213 if (order[i] == order[i - 1])
2214 return 0;
2216 /* Is the memory address adjacent and ascending? */
2217 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2218 return 0;
2221 if (base)
2223 *base = base_reg;
2225 for (i = 0; i < nops; i++)
2226 regs[i] = unsorted_regs[order[i]];
2228 *load_offset = unsorted_offsets[order[0]];
2231 if (unsorted_offsets[order[0]] == 0)
2232 return 1; /* ldmia */
2234 if (unsorted_offsets[order[0]] == 4)
2235 return 2; /* ldmib */
2237 if (unsorted_offsets[order[nops - 1]] == 0)
2238 return 3; /* ldmda */
2240 if (unsorted_offsets[order[nops - 1]] == -4)
2241 return 4; /* ldmdb */
2243 /* Can't do it without setting up the offset, only do this if it takes
2244 no more than one insn. */
2245 return (const_ok_for_arm (unsorted_offsets[order[0]])
2246 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2249 char *
2250 emit_ldm_seq (operands, nops)
2251 rtx *operands;
2252 int nops;
2254 int regs[4];
2255 int base_reg;
2256 HOST_WIDE_INT offset;
2257 char buf[100];
2258 int i;
2260 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2262 case 1:
2263 strcpy (buf, "ldm%?ia\t");
2264 break;
2266 case 2:
2267 strcpy (buf, "ldm%?ib\t");
2268 break;
2270 case 3:
2271 strcpy (buf, "ldm%?da\t");
2272 break;
2274 case 4:
2275 strcpy (buf, "ldm%?db\t");
2276 break;
2278 case 5:
2279 if (offset >= 0)
2280 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2281 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2282 (long) offset);
2283 else
2284 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2285 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2286 (long) -offset);
2287 output_asm_insn (buf, operands);
2288 base_reg = regs[0];
2289 strcpy (buf, "ldm%?ia\t");
2290 break;
2292 default:
2293 abort ();
2296 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2297 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2299 for (i = 1; i < nops; i++)
2300 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2301 reg_names[regs[i]]);
2303 strcat (buf, "}\t%@ phole ldm");
2305 output_asm_insn (buf, operands);
2306 return "";
2310 store_multiple_sequence (operands, nops, regs, base, load_offset)
2311 rtx *operands;
2312 int nops;
2313 int *regs;
2314 int *base;
2315 HOST_WIDE_INT *load_offset;
2317 int unsorted_regs[4];
2318 HOST_WIDE_INT unsorted_offsets[4];
2319 int order[4];
2320 int base_reg;
2321 int i;
2323 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2324 extended if required. */
2325 if (nops < 2 || nops > 4)
2326 abort ();
2328 /* Loop over the operands and check that the memory references are
2329 suitable (ie immediate offsets from the same base register). At
2330 the same time, extract the target register, and the memory
2331 offsets. */
2332 for (i = 0; i < nops; i++)
2334 rtx reg;
2335 rtx offset;
2337 if (GET_CODE (operands[nops + i]) != MEM)
2338 abort ();
2340 /* Don't reorder volatile memory references; it doesn't seem worth
2341 looking for the case where the order is ok anyway. */
2342 if (MEM_VOLATILE_P (operands[nops + i]))
2343 return 0;
2345 offset = const0_rtx;
2347 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2348 || (GET_CODE (reg) == SUBREG
2349 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2350 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2351 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2352 == REG)
2353 || (GET_CODE (reg) == SUBREG
2354 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2355 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2356 == CONST_INT)))
2358 if (i == 0)
2360 base_reg = REGNO(reg);
2361 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2362 ? REGNO (operands[i])
2363 : REGNO (SUBREG_REG (operands[i])));
2364 order[0] = 0;
2366 else
2368 if (base_reg != REGNO (reg))
2369 /* Not addressed from the same base register. */
2370 return 0;
2372 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2373 ? REGNO (operands[i])
2374 : REGNO (SUBREG_REG (operands[i])));
2375 if (unsorted_regs[i] < unsorted_regs[order[0]])
2376 order[0] = i;
2379 /* If it isn't an integer register, then we can't do this. */
2380 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2381 return 0;
2383 unsorted_offsets[i] = INTVAL (offset);
2385 else
2386 /* Not a suitable memory address. */
2387 return 0;
2390 /* All the useful information has now been extracted from the
2391 operands into unsorted_regs and unsorted_offsets; additionally,
2392 order[0] has been set to the lowest numbered register in the
2393 list. Sort the registers into order, and check that the memory
2394 offsets are ascending and adjacent. */
2396 for (i = 1; i < nops; i++)
2398 int j;
2400 order[i] = order[i - 1];
2401 for (j = 0; j < nops; j++)
2402 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2403 && (order[i] == order[i - 1]
2404 || unsorted_regs[j] < unsorted_regs[order[i]]))
2405 order[i] = j;
2407 /* Have we found a suitable register? if not, one must be used more
2408 than once. */
2409 if (order[i] == order[i - 1])
2410 return 0;
2412 /* Is the memory address adjacent and ascending? */
2413 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2414 return 0;
2417 if (base)
2419 *base = base_reg;
2421 for (i = 0; i < nops; i++)
2422 regs[i] = unsorted_regs[order[i]];
2424 *load_offset = unsorted_offsets[order[0]];
2427 if (unsorted_offsets[order[0]] == 0)
2428 return 1; /* stmia */
2430 if (unsorted_offsets[order[0]] == 4)
2431 return 2; /* stmib */
2433 if (unsorted_offsets[order[nops - 1]] == 0)
2434 return 3; /* stmda */
2436 if (unsorted_offsets[order[nops - 1]] == -4)
2437 return 4; /* stmdb */
2439 return 0;
2442 char *
2443 emit_stm_seq (operands, nops)
2444 rtx *operands;
2445 int nops;
2447 int regs[4];
2448 int base_reg;
2449 HOST_WIDE_INT offset;
2450 char buf[100];
2451 int i;
2453 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2455 case 1:
2456 strcpy (buf, "stm%?ia\t");
2457 break;
2459 case 2:
2460 strcpy (buf, "stm%?ib\t");
2461 break;
2463 case 3:
2464 strcpy (buf, "stm%?da\t");
2465 break;
2467 case 4:
2468 strcpy (buf, "stm%?db\t");
2469 break;
2471 default:
2472 abort ();
2475 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2476 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2478 for (i = 1; i < nops; i++)
2479 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2480 reg_names[regs[i]]);
2482 strcat (buf, "}\t%@ phole stm");
2484 output_asm_insn (buf, operands);
2485 return "";
2489 multi_register_push (op, mode)
2490 rtx op;
2491 enum machine_mode mode;
2493 if (GET_CODE (op) != PARALLEL
2494 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2495 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2496 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2497 return 0;
2499 return 1;
2503 /* Routines for use with attributes */
2506 const_pool_offset (symbol)
2507 rtx symbol;
2509 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
2512 /* Return nonzero if ATTR is a valid attribute for DECL.
2513 ATTRIBUTES are any existing attributes and ARGS are the arguments
2514 supplied with ATTR.
2516 Supported attributes:
2518 naked: don't output any prologue or epilogue code, the user is assumed
2519 to do the right thing. */
2522 arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2523 tree decl;
2524 tree attributes;
2525 tree attr;
2526 tree args;
2528 if (args != NULL_TREE)
2529 return 0;
2531 if (is_attribute_p ("naked", attr))
2532 return TREE_CODE (decl) == FUNCTION_DECL;
2533 return 0;
2536 /* Return non-zero if FUNC is a naked function. */
2538 static int
2539 arm_naked_function_p (func)
2540 tree func;
2542 tree a;
2544 if (TREE_CODE (func) != FUNCTION_DECL)
2545 abort ();
2547 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2548 return a != NULL_TREE;
2551 /* Routines for use in generating RTL */
2554 arm_gen_load_multiple (base_regno, count, from, up, write_back)
2555 int base_regno;
2556 int count;
2557 rtx from;
2558 int up;
2559 int write_back;
2561 int i = 0, j;
2562 rtx result;
2563 int sign = up ? 1 : -1;
2565 result = gen_rtx (PARALLEL, VOIDmode,
2566 rtvec_alloc (count + (write_back ? 2 : 0)));
2567 if (write_back)
2569 XVECEXP (result, 0, 0)
2570 = gen_rtx (SET, GET_MODE (from), from,
2571 plus_constant (from, count * 4 * sign));
2572 i = 1;
2573 count++;
2576 for (j = 0; i < count; i++, j++)
2578 XVECEXP (result, 0, i)
2579 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
2580 gen_rtx (MEM, SImode,
2581 plus_constant (from, j * 4 * sign)));
2584 if (write_back)
2585 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2587 return result;
2591 arm_gen_store_multiple (base_regno, count, to, up, write_back)
2592 int base_regno;
2593 int count;
2594 rtx to;
2595 int up;
2596 int write_back;
2598 int i = 0, j;
2599 rtx result;
2600 int sign = up ? 1 : -1;
2602 result = gen_rtx (PARALLEL, VOIDmode,
2603 rtvec_alloc (count + (write_back ? 2 : 0)));
2604 if (write_back)
2606 XVECEXP (result, 0, 0)
2607 = gen_rtx (SET, GET_MODE (to), to,
2608 plus_constant (to, count * 4 * sign));
2609 i = 1;
2610 count++;
2613 for (j = 0; i < count; i++, j++)
2615 XVECEXP (result, 0, i)
2616 = gen_rtx (SET, VOIDmode,
2617 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
2618 gen_rtx (REG, SImode, base_regno + j));
2621 if (write_back)
2622 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2624 return result;
2628 arm_gen_movstrqi (operands)
2629 rtx *operands;
2631 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
2632 int i, r;
2633 rtx src, dst;
2634 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
2635 rtx part_bytes_reg = NULL;
2636 extern int optimize;
2638 if (GET_CODE (operands[2]) != CONST_INT
2639 || GET_CODE (operands[3]) != CONST_INT
2640 || INTVAL (operands[2]) > 64
2641 || INTVAL (operands[3]) & 3)
2642 return 0;
2644 st_dst = XEXP (operands[0], 0);
2645 st_src = XEXP (operands[1], 0);
2646 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2647 fin_src = src = copy_to_mode_reg (SImode, st_src);
2649 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2650 out_words_to_go = INTVAL (operands[2]) / 4;
2651 last_bytes = INTVAL (operands[2]) & 3;
2653 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2654 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2656 for (i = 0; in_words_to_go >= 2; i+=4)
2658 if (in_words_to_go > 4)
2659 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE));
2660 else
2661 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
2662 FALSE));
2664 if (out_words_to_go)
2666 if (out_words_to_go > 4)
2667 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE));
2668 else if (out_words_to_go != 1)
2669 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2670 dst, TRUE,
2671 (last_bytes == 0
2672 ? FALSE : TRUE)));
2673 else
2675 emit_move_insn (gen_rtx (MEM, SImode, dst),
2676 gen_rtx (REG, SImode, 0));
2677 if (last_bytes != 0)
2678 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2682 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2683 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2686 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2687 if (out_words_to_go)
2689 rtx sreg;
2691 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2692 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2693 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2694 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2695 in_words_to_go--;
2697 if (in_words_to_go) /* Sanity check */
2698 abort ();
2701 if (in_words_to_go)
2703 if (in_words_to_go < 0)
2704 abort ();
2706 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2709 if (BYTES_BIG_ENDIAN && last_bytes)
2711 rtx tmp = gen_reg_rtx (SImode);
2713 if (part_bytes_reg == NULL)
2714 abort ();
2716 /* The bytes we want are in the top end of the word */
2717 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2718 GEN_INT (8 * (4 - last_bytes))));
2719 part_bytes_reg = tmp;
2721 while (last_bytes)
2723 emit_move_insn (gen_rtx (MEM, QImode,
2724 plus_constant (dst, last_bytes - 1)),
2725 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2726 if (--last_bytes)
2728 tmp = gen_reg_rtx (SImode);
2729 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2730 part_bytes_reg = tmp;
2735 else
2737 while (last_bytes)
2739 if (part_bytes_reg == NULL)
2740 abort ();
2742 emit_move_insn (gen_rtx (MEM, QImode, dst),
2743 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2744 if (--last_bytes)
2746 rtx tmp = gen_reg_rtx (SImode);
2748 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2749 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2750 part_bytes_reg = tmp;
2755 return 1;
2758 /* Generate a memory reference for a half word, such that it will be loaded
2759 into the top 16 bits of the word. We can assume that the address is
2760 known to be alignable and of the form reg, or plus (reg, const). */
2762 gen_rotated_half_load (memref)
2763 rtx memref;
2765 HOST_WIDE_INT offset = 0;
2766 rtx base = XEXP (memref, 0);
2768 if (GET_CODE (base) == PLUS)
2770 offset = INTVAL (XEXP (base, 1));
2771 base = XEXP (base, 0);
2774 /* If we aren't allowed to generate unalligned addresses, then fail. */
2775 if (TARGET_SHORT_BY_BYTES
2776 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
2777 return NULL;
2779 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
2781 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
2782 return base;
2784 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
2787 static enum machine_mode
2788 select_dominance_cc_mode (op, x, y, cond_or)
2789 enum rtx_code op;
2790 rtx x;
2791 rtx y;
2792 HOST_WIDE_INT cond_or;
2794 enum rtx_code cond1, cond2;
2795 int swapped = 0;
2797 /* Currently we will probably get the wrong result if the individual
2798 comparisons are not simple. This also ensures that it is safe to
2799 reverse a comparions if necessary. */
2800 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
2801 != CCmode)
2802 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
2803 != CCmode))
2804 return CCmode;
2806 if (cond_or)
2807 cond1 = reverse_condition (cond1);
2809 /* If the comparisons are not equal, and one doesn't dominate the other,
2810 then we can't do this. */
2811 if (cond1 != cond2
2812 && ! comparison_dominates_p (cond1, cond2)
2813 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
2814 return CCmode;
2816 if (swapped)
2818 enum rtx_code temp = cond1;
2819 cond1 = cond2;
2820 cond2 = temp;
2823 switch (cond1)
2825 case EQ:
2826 if (cond2 == EQ || ! cond_or)
2827 return CC_DEQmode;
2829 switch (cond2)
2831 case LE: return CC_DLEmode;
2832 case LEU: return CC_DLEUmode;
2833 case GE: return CC_DGEmode;
2834 case GEU: return CC_DGEUmode;
2837 break;
2839 case LT:
2840 if (cond2 == LT || ! cond_or)
2841 return CC_DLTmode;
2842 if (cond2 == LE)
2843 return CC_DLEmode;
2844 if (cond2 == NE)
2845 return CC_DNEmode;
2846 break;
2848 case GT:
2849 if (cond2 == GT || ! cond_or)
2850 return CC_DGTmode;
2851 if (cond2 == GE)
2852 return CC_DGEmode;
2853 if (cond2 == NE)
2854 return CC_DNEmode;
2855 break;
2857 case LTU:
2858 if (cond2 == LTU || ! cond_or)
2859 return CC_DLTUmode;
2860 if (cond2 == LEU)
2861 return CC_DLEUmode;
2862 if (cond2 == NE)
2863 return CC_DNEmode;
2864 break;
2866 case GTU:
2867 if (cond2 == GTU || ! cond_or)
2868 return CC_DGTUmode;
2869 if (cond2 == GEU)
2870 return CC_DGEUmode;
2871 if (cond2 == NE)
2872 return CC_DNEmode;
2873 break;
2875 /* The remaining cases only occur when both comparisons are the
2876 same. */
2877 case NE:
2878 return CC_DNEmode;
2880 case LE:
2881 return CC_DLEmode;
2883 case GE:
2884 return CC_DGEmode;
2886 case LEU:
2887 return CC_DLEUmode;
2889 case GEU:
2890 return CC_DGEUmode;
2893 abort ();
2896 enum machine_mode
2897 arm_select_cc_mode (op, x, y)
2898 enum rtx_code op;
2899 rtx x;
2900 rtx y;
2902 /* All floating point compares return CCFP if it is an equality
2903 comparison, and CCFPE otherwise. */
2904 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2905 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
2907 /* A compare with a shifted operand. Because of canonicalization, the
2908 comparison will have to be swapped when we emit the assembler. */
2909 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
2910 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
2911 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
2912 || GET_CODE (x) == ROTATERT))
2913 return CC_SWPmode;
2915 /* This is a special case, that is used by combine to alow a
2916 comarison of a shifted byte load to be split into a zero-extend
2917 followed by a comparison of the shifted integer (only valid for
2918 equalities and unsigned inequalites. */
2919 if (GET_MODE (x) == SImode
2920 && GET_CODE (x) == ASHIFT
2921 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
2922 && GET_CODE (XEXP (x, 0)) == SUBREG
2923 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
2924 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
2925 && (op == EQ || op == NE
2926 || op == GEU || op == GTU || op == LTU || op == LEU)
2927 && GET_CODE (y) == CONST_INT)
2928 return CC_Zmode;
2930 /* An operation that sets the condition codes as a side-effect, the
2931 V flag is not set correctly, so we can only use comparisons where
2932 this doesn't matter. (For LT and GE we can use "mi" and "pl"
2933 instead. */
2934 if (GET_MODE (x) == SImode
2935 && y == const0_rtx
2936 && (op == EQ || op == NE || op == LT || op == GE)
2937 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
2938 || GET_CODE (x) == AND || GET_CODE (x) == IOR
2939 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
2940 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
2941 || GET_CODE (x) == LSHIFTRT
2942 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
2943 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
2944 return CC_NOOVmode;
2946 /* A construct for a conditional compare, if the false arm contains
2947 0, then both conditions must be true, otherwise either condition
2948 must be true. Not all conditions are possible, so CCmode is
2949 returned if it can't be done. */
2950 if (GET_CODE (x) == IF_THEN_ELSE
2951 && (XEXP (x, 2) == const0_rtx
2952 || XEXP (x, 2) == const1_rtx)
2953 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2954 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
2955 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
2956 INTVAL (XEXP (x, 2)));
2958 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
2959 return CC_Zmode;
2961 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
2962 && GET_CODE (x) == PLUS
2963 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
2964 return CC_Cmode;
2966 return CCmode;
2969 /* X and Y are two things to compare using CODE. Emit the compare insn and
2970 return the rtx for register 0 in the proper mode. FP means this is a
2971 floating point compare: I don't think that it is needed on the arm. */
2974 gen_compare_reg (code, x, y, fp)
2975 enum rtx_code code;
2976 rtx x, y;
2978 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
2979 rtx cc_reg = gen_rtx (REG, mode, 24);
2981 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
2982 gen_rtx (COMPARE, mode, x, y)));
2984 return cc_reg;
2987 void
2988 arm_reload_in_hi (operands)
2989 rtx *operands;
2991 rtx base = find_replacement (&XEXP (operands[1], 0));
2993 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
2994 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
2995 gen_rtx (MEM, QImode,
2996 plus_constant (base, 1))));
2997 if (BYTES_BIG_ENDIAN)
2998 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
2999 operands[0], 0),
3000 gen_rtx (IOR, SImode,
3001 gen_rtx (ASHIFT, SImode,
3002 gen_rtx (SUBREG, SImode,
3003 operands[0], 0),
3004 GEN_INT (8)),
3005 operands[2])));
3006 else
3007 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3008 operands[0], 0),
3009 gen_rtx (IOR, SImode,
3010 gen_rtx (ASHIFT, SImode,
3011 operands[2],
3012 GEN_INT (8)),
3013 gen_rtx (SUBREG, SImode, operands[0], 0))));
3016 void
3017 arm_reload_out_hi (operands)
3018 rtx *operands;
3020 rtx base = find_replacement (&XEXP (operands[0], 0));
3022 if (BYTES_BIG_ENDIAN)
3024 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3025 gen_rtx (SUBREG, QImode, operands[1], 0)));
3026 emit_insn (gen_lshrsi3 (operands[2],
3027 gen_rtx (SUBREG, SImode, operands[1], 0),
3028 GEN_INT (8)));
3029 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3030 gen_rtx (SUBREG, QImode, operands[2], 0)));
3032 else
3034 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3035 gen_rtx (SUBREG, QImode, operands[1], 0)));
3036 emit_insn (gen_lshrsi3 (operands[2],
3037 gen_rtx (SUBREG, SImode, operands[1], 0),
3038 GEN_INT (8)));
3039 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3040 gen_rtx (SUBREG, QImode, operands[2], 0)));
3044 /* Check to see if a branch is forwards or backwards. Return TRUE if it
3045 is backwards. */
3048 arm_backwards_branch (from, to)
3049 int from, to;
3051 return insn_addresses[to] <= insn_addresses[from];
3054 /* Check to see if a branch is within the distance that can be done using
3055 an arithmetic expression. */
3057 short_branch (from, to)
3058 int from, to;
3060 int delta = insn_addresses[from] + 8 - insn_addresses[to];
3062 return abs (delta) < 980; /* A small margin for safety */
3065 /* Check to see that the insn isn't the target of the conditionalizing
3066 code */
3068 arm_insn_not_targeted (insn)
3069 rtx insn;
3071 return insn != arm_target_insn;
3075 /* Routines for manipulation of the constant pool. */
3076 /* This is unashamedly hacked from the version in sh.c, since the problem is
3077 extremely similar. */
3079 /* Arm instructions cannot load a large constant into a register,
3080 constants have to come from a pc relative load. The reference of a pc
3081 relative load instruction must be less than 1k infront of the instruction.
3082 This means that we often have to dump a constant inside a function, and
3083 generate code to branch around it.
3085 It is important to minimize this, since the branches will slow things
3086 down and make things bigger.
3088 Worst case code looks like:
3090 ldr rn, L1
3091 b L2
3092 align
3093 L1: .long value
3097 ldr rn, L3
3098 b L4
3099 align
3100 L3: .long value
3104 We fix this by performing a scan before scheduling, which notices which
3105 instructions need to have their operands fetched from the constant table
3106 and builds the table.
3109 The algorithm is:
3111 scan, find an instruction which needs a pcrel move. Look forward, find th
3112 last barrier which is within MAX_COUNT bytes of the requirement.
3113 If there isn't one, make one. Process all the instructions between
3114 the find and the barrier.
3116 In the above example, we can tell that L3 is within 1k of L1, so
3117 the first move can be shrunk from the 2 insn+constant sequence into
3118 just 1 insn, and the constant moved to L3 to make:
3120 ldr rn, L1
3122 ldr rn, L3
3123 b L4
3124 align
3125 L1: .long value
3126 L3: .long value
3129 Then the second move becomes the target for the shortening process.
3133 typedef struct
3135 rtx value; /* Value in table */
3136 HOST_WIDE_INT next_offset;
3137 enum machine_mode mode; /* Mode of value */
3138 } pool_node;
3140 /* The maximum number of constants that can fit into one pool, since
3141 the pc relative range is 0...1020 bytes and constants are at least 4
3142 bytes long */
3144 #define MAX_POOL_SIZE (1020/4)
3145 static pool_node pool_vector[MAX_POOL_SIZE];
3146 static int pool_size;
3147 static rtx pool_vector_label;
3149 /* Add a constant to the pool and return its label. */
3150 static HOST_WIDE_INT
3151 add_constant (x, mode)
3152 rtx x;
3153 enum machine_mode mode;
3155 int i;
3156 rtx lab;
3157 HOST_WIDE_INT offset;
3159 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3160 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3161 x = get_pool_constant (XEXP (x, 0));
3162 #ifndef AOF_ASSEMBLER
3163 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3164 x = XVECEXP (x, 0, 0);
3165 #endif
3167 /* First see if we've already got it */
3168 for (i = 0; i < pool_size; i++)
3170 if (GET_CODE (x) == pool_vector[i].value->code
3171 && mode == pool_vector[i].mode)
3173 if (GET_CODE (x) == CODE_LABEL)
3175 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3176 continue;
3178 if (rtx_equal_p (x, pool_vector[i].value))
3179 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3183 /* Need a new one */
3184 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3185 offset = 0;
3186 if (pool_size == 0)
3187 pool_vector_label = gen_label_rtx ();
3188 else
3189 pool_vector[pool_size].next_offset
3190 += (offset = pool_vector[pool_size - 1].next_offset);
3192 pool_vector[pool_size].value = x;
3193 pool_vector[pool_size].mode = mode;
3194 pool_size++;
3195 return offset;
3198 /* Output the literal table */
3199 static void
3200 dump_table (scan)
3201 rtx scan;
3203 int i;
3205 scan = emit_label_after (gen_label_rtx (), scan);
3206 scan = emit_insn_after (gen_align_4 (), scan);
3207 scan = emit_label_after (pool_vector_label, scan);
3209 for (i = 0; i < pool_size; i++)
3211 pool_node *p = pool_vector + i;
3213 switch (GET_MODE_SIZE (p->mode))
3215 case 4:
3216 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3217 break;
3219 case 8:
3220 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3221 break;
3223 default:
3224 abort ();
3225 break;
3229 scan = emit_insn_after (gen_consttable_end (), scan);
3230 scan = emit_barrier_after (scan);
3231 pool_size = 0;
3234 /* Non zero if the src operand needs to be fixed up */
3235 static int
3236 fixit (src, mode, destreg)
3237 rtx src;
3238 enum machine_mode mode;
3239 int destreg;
3241 if (CONSTANT_P (src))
3243 if (GET_CODE (src) == CONST_INT)
3244 return (! const_ok_for_arm (INTVAL (src))
3245 && ! const_ok_for_arm (~INTVAL (src)));
3246 if (GET_CODE (src) == CONST_DOUBLE)
3247 return (GET_MODE (src) == VOIDmode
3248 || destreg < 16
3249 || (! const_double_rtx_ok_for_fpu (src)
3250 && ! neg_const_double_rtx_ok_for_fpu (src)));
3251 return symbol_mentioned_p (src);
3253 #ifndef AOF_ASSEMBLER
3254 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3255 return 1;
3256 #endif
3257 else
3258 return (mode == SImode && GET_CODE (src) == MEM
3259 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3260 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3263 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3264 static rtx
3265 find_barrier (from, max_count)
3266 rtx from;
3267 int max_count;
3269 int count = 0;
3270 rtx found_barrier = 0;
3272 while (from && count < max_count)
3274 if (GET_CODE (from) == BARRIER)
3275 found_barrier = from;
3277 /* Count the length of this insn */
3278 if (GET_CODE (from) == INSN
3279 && GET_CODE (PATTERN (from)) == SET
3280 && CONSTANT_P (SET_SRC (PATTERN (from)))
3281 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
3283 rtx src = SET_SRC (PATTERN (from));
3284 count += 2;
3286 else
3287 count += get_attr_length (from);
3289 from = NEXT_INSN (from);
3292 if (!found_barrier)
3294 /* We didn't find a barrier in time to
3295 dump our stuff, so we'll make one */
3296 rtx label = gen_label_rtx ();
3298 if (from)
3299 from = PREV_INSN (from);
3300 else
3301 from = get_last_insn ();
3303 /* Walk back to be just before any jump */
3304 while (GET_CODE (from) == JUMP_INSN
3305 || GET_CODE (from) == NOTE
3306 || GET_CODE (from) == CODE_LABEL)
3307 from = PREV_INSN (from);
3309 from = emit_jump_insn_after (gen_jump (label), from);
3310 JUMP_LABEL (from) = label;
3311 found_barrier = emit_barrier_after (from);
3312 emit_label_after (label, found_barrier);
3313 return found_barrier;
3316 return found_barrier;
3319 /* Non zero if the insn is a move instruction which needs to be fixed. */
3320 static int
3321 broken_move (insn)
3322 rtx insn;
3324 if (!INSN_DELETED_P (insn)
3325 && GET_CODE (insn) == INSN
3326 && GET_CODE (PATTERN (insn)) == SET)
3328 rtx pat = PATTERN (insn);
3329 rtx src = SET_SRC (pat);
3330 rtx dst = SET_DEST (pat);
3331 int destreg;
3332 enum machine_mode mode = GET_MODE (dst);
3333 if (dst == pc_rtx)
3334 return 0;
3336 if (GET_CODE (dst) == REG)
3337 destreg = REGNO (dst);
3338 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3339 destreg = REGNO (SUBREG_REG (dst));
3341 return fixit (src, mode, destreg);
3343 return 0;
3346 void
3347 arm_reorg (first)
3348 rtx first;
3350 rtx insn;
3351 int count_size;
3352 int regno;
3354 #if 0
3355 /* The ldr instruction can work with up to a 4k offset, and most constants
3356 will be loaded with one of these instructions; however, the adr
3357 instruction and the ldf instructions only work with a 1k offset. This
3358 code needs to be rewritten to use the 4k offset when possible, and to
3359 adjust when a 1k offset is needed. For now we just use a 1k offset
3360 from the start. */
3361 count_size = 4000;
3363 /* Floating point operands can't work further than 1024 bytes from the
3364 PC, so to make things simple we restrict all loads for such functions.
3366 if (TARGET_HARD_FLOAT)
3367 for (regno = 16; regno < 24; regno++)
3368 if (regs_ever_live[regno])
3370 count_size = 1000;
3371 break;
3373 #else
3374 count_size = 1000;
3375 #endif /* 0 */
3377 for (insn = first; insn; insn = NEXT_INSN (insn))
3379 if (broken_move (insn))
3381 /* This is a broken move instruction, scan ahead looking for
3382 a barrier to stick the constant table behind */
3383 rtx scan;
3384 rtx barrier = find_barrier (insn, count_size);
3386 /* Now find all the moves between the points and modify them */
3387 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3389 if (broken_move (scan))
3391 /* This is a broken move instruction, add it to the pool */
3392 rtx pat = PATTERN (scan);
3393 rtx src = SET_SRC (pat);
3394 rtx dst = SET_DEST (pat);
3395 enum machine_mode mode = GET_MODE (dst);
3396 HOST_WIDE_INT offset;
3397 rtx newinsn = scan;
3398 rtx newsrc;
3399 rtx addr;
3400 int scratch;
3402 /* If this is an HImode constant load, convert it into
3403 an SImode constant load. Since the register is always
3404 32 bits this is safe. We have to do this, since the
3405 load pc-relative instruction only does a 32-bit load. */
3406 if (mode == HImode)
3408 mode = SImode;
3409 if (GET_CODE (dst) != REG)
3410 abort ();
3411 PUT_MODE (dst, SImode);
3414 offset = add_constant (src, mode);
3415 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3416 pool_vector_label),
3417 offset);
3419 /* For wide moves to integer regs we need to split the
3420 address calculation off into a separate insn, so that
3421 the load can then be done with a load-multiple. This is
3422 safe, since we have already noted the length of such
3423 insns to be 8, and we are immediately over-writing the
3424 scratch we have grabbed with the final result. */
3425 if (GET_MODE_SIZE (mode) > 4
3426 && (scratch = REGNO (dst)) < 16)
3428 rtx reg = gen_rtx (REG, SImode, scratch);
3429 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3430 newinsn);
3431 addr = reg;
3434 newsrc = gen_rtx (MEM, mode, addr);
3436 /* Build a jump insn wrapper around the move instead
3437 of an ordinary insn, because we want to have room for
3438 the target label rtx in fld[7], which an ordinary
3439 insn doesn't have. */
3440 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
3441 dst, newsrc),
3442 newinsn);
3443 JUMP_LABEL (newinsn) = pool_vector_label;
3445 /* But it's still an ordinary insn */
3446 PUT_CODE (newinsn, INSN);
3448 /* Kill old insn */
3449 delete_insn (scan);
3450 scan = newinsn;
3453 dump_table (barrier);
3454 insn = scan;
3460 /* Routines to output assembly language. */
3462 /* If the rtx is the correct value then return the string of the number.
3463 In this way we can ensure that valid double constants are generated even
3464 when cross compiling. */
3465 char *
3466 fp_immediate_constant (x)
3467 rtx x;
3469 REAL_VALUE_TYPE r;
3470 int i;
3472 if (!fpa_consts_inited)
3473 init_fpa_table ();
3475 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3476 for (i = 0; i < 8; i++)
3477 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3478 return strings_fpa[i];
3480 abort ();
3483 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3484 static char *
3485 fp_const_from_val (r)
3486 REAL_VALUE_TYPE *r;
3488 int i;
3490 if (! fpa_consts_inited)
3491 init_fpa_table ();
3493 for (i = 0; i < 8; i++)
3494 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3495 return strings_fpa[i];
3497 abort ();
3500 /* Output the operands of a LDM/STM instruction to STREAM.
3501 MASK is the ARM register set mask of which only bits 0-15 are important.
3502 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3503 must follow the register list. */
3505 void
3506 print_multi_reg (stream, instr, mask, hat)
3507 FILE *stream;
3508 char *instr;
3509 int mask, hat;
3511 int i;
3512 int not_first = FALSE;
3514 fputc ('\t', stream);
3515 fprintf (stream, instr, REGISTER_PREFIX);
3516 fputs (", {", stream);
3517 for (i = 0; i < 16; i++)
3518 if (mask & (1 << i))
3520 if (not_first)
3521 fprintf (stream, ", ");
3522 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
3523 not_first = TRUE;
3526 fprintf (stream, "}%s\n", hat ? "^" : "");
3529 /* Output a 'call' insn. */
3531 char *
3532 output_call (operands)
3533 rtx *operands;
3535 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3537 if (REGNO (operands[0]) == 14)
3539 operands[0] = gen_rtx (REG, SImode, 12);
3540 output_asm_insn ("mov%?\t%0, %|lr", operands);
3542 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3543 output_asm_insn ("mov%?\t%|pc, %0", operands);
3544 return "";
3547 static int
3548 eliminate_lr2ip (x)
3549 rtx *x;
3551 int something_changed = 0;
3552 rtx x0 = *x;
3553 int code = GET_CODE (x0);
3554 register int i, j;
3555 register char *fmt;
3557 switch (code)
3559 case REG:
3560 if (REGNO (x0) == 14)
3562 *x = gen_rtx (REG, SImode, 12);
3563 return 1;
3565 return 0;
3566 default:
3567 /* Scan through the sub-elements and change any references there */
3568 fmt = GET_RTX_FORMAT (code);
3569 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3570 if (fmt[i] == 'e')
3571 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3572 else if (fmt[i] == 'E')
3573 for (j = 0; j < XVECLEN (x0, i); j++)
3574 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3575 return something_changed;
3579 /* Output a 'call' insn that is a reference in memory. */
3581 char *
3582 output_call_mem (operands)
3583 rtx *operands;
3585 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3586 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3588 if (eliminate_lr2ip (&operands[0]))
3589 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
3591 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3592 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3593 return "";
3597 /* Output a move from arm registers to an fpu registers.
3598 OPERANDS[0] is an fpu register.
3599 OPERANDS[1] is the first registers of an arm register pair. */
3601 char *
3602 output_mov_long_double_fpu_from_arm (operands)
3603 rtx *operands;
3605 int arm_reg0 = REGNO (operands[1]);
3606 rtx ops[3];
3608 if (arm_reg0 == 12)
3609 abort();
3611 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3612 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3613 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3615 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3616 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
3617 return "";
3620 /* Output a move from an fpu register to arm registers.
3621 OPERANDS[0] is the first registers of an arm register pair.
3622 OPERANDS[1] is an fpu register. */
3624 char *
3625 output_mov_long_double_arm_from_fpu (operands)
3626 rtx *operands;
3628 int arm_reg0 = REGNO (operands[0]);
3629 rtx ops[3];
3631 if (arm_reg0 == 12)
3632 abort();
3634 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3635 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3636 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3638 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3639 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
3640 return "";
3643 /* Output a move from arm registers to arm registers of a long double
3644 OPERANDS[0] is the destination.
3645 OPERANDS[1] is the source. */
3646 char *
3647 output_mov_long_double_arm_from_arm (operands)
3648 rtx *operands;
3650 /* We have to be careful here because the two might overlap */
3651 int dest_start = REGNO (operands[0]);
3652 int src_start = REGNO (operands[1]);
3653 rtx ops[2];
3654 int i;
3656 if (dest_start < src_start)
3658 for (i = 0; i < 3; i++)
3660 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3661 ops[1] = gen_rtx (REG, SImode, src_start + i);
3662 output_asm_insn ("mov%?\t%0, %1", ops);
3665 else
3667 for (i = 2; i >= 0; i--)
3669 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3670 ops[1] = gen_rtx (REG, SImode, src_start + i);
3671 output_asm_insn ("mov%?\t%0, %1", ops);
3675 return "";
3679 /* Output a move from arm registers to an fpu registers.
3680 OPERANDS[0] is an fpu register.
3681 OPERANDS[1] is the first registers of an arm register pair. */
3683 char *
3684 output_mov_double_fpu_from_arm (operands)
3685 rtx *operands;
3687 int arm_reg0 = REGNO (operands[1]);
3688 rtx ops[2];
3690 if (arm_reg0 == 12)
3691 abort();
3692 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3693 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3694 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
3695 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
3696 return "";
3699 /* Output a move from an fpu register to arm registers.
3700 OPERANDS[0] is the first registers of an arm register pair.
3701 OPERANDS[1] is an fpu register. */
3703 char *
3704 output_mov_double_arm_from_fpu (operands)
3705 rtx *operands;
3707 int arm_reg0 = REGNO (operands[0]);
3708 rtx ops[2];
3710 if (arm_reg0 == 12)
3711 abort();
3713 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3714 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3715 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
3716 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
3717 return "";
3720 /* Output a move between double words.
3721 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
3722 or MEM<-REG and all MEMs must be offsettable addresses. */
3724 char *
3725 output_move_double (operands)
3726 rtx *operands;
3728 enum rtx_code code0 = GET_CODE (operands[0]);
3729 enum rtx_code code1 = GET_CODE (operands[1]);
3730 rtx otherops[2];
3732 if (code0 == REG)
3734 int reg0 = REGNO (operands[0]);
3736 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
3737 if (code1 == REG)
3739 int reg1 = REGNO (operands[1]);
3740 if (reg1 == 12)
3741 abort();
3743 /* Ensure the second source is not overwritten */
3744 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
3745 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
3746 else
3747 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
3749 else if (code1 == CONST_DOUBLE)
3751 if (GET_MODE (operands[1]) == DFmode)
3753 long l[2];
3754 union real_extract u;
3756 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
3757 sizeof (u));
3758 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
3759 otherops[1] = GEN_INT(l[1]);
3760 operands[1] = GEN_INT(l[0]);
3762 else if (GET_MODE (operands[1]) != VOIDmode)
3763 abort ();
3764 else if (WORDS_BIG_ENDIAN)
3767 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3768 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3770 else
3773 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3774 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3776 output_mov_immediate (operands);
3777 output_mov_immediate (otherops);
3779 else if (code1 == CONST_INT)
3781 /* sign extend the intval into the high-order word */
3782 if (WORDS_BIG_ENDIAN)
3784 otherops[1] = operands[1];
3785 operands[1] = (INTVAL (operands[1]) < 0
3786 ? constm1_rtx : const0_rtx);
3788 else
3789 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
3790 output_mov_immediate (otherops);
3791 output_mov_immediate (operands);
3793 else if (code1 == MEM)
3795 switch (GET_CODE (XEXP (operands[1], 0)))
3797 case REG:
3798 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
3799 break;
3801 case PRE_INC:
3802 abort (); /* Should never happen now */
3803 break;
3805 case PRE_DEC:
3806 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
3807 break;
3809 case POST_INC:
3810 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
3811 break;
3813 case POST_DEC:
3814 abort (); /* Should never happen now */
3815 break;
3817 case LABEL_REF:
3818 case CONST:
3819 output_asm_insn ("adr%?\t%0, %1", operands);
3820 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
3821 break;
3823 default:
3824 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
3826 otherops[0] = operands[0];
3827 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
3828 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
3829 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
3831 if (GET_CODE (otherops[2]) == CONST_INT)
3833 switch (INTVAL (otherops[2]))
3835 case -8:
3836 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
3837 return "";
3838 case -4:
3839 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
3840 return "";
3841 case 4:
3842 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
3843 return "";
3845 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
3846 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
3847 else
3848 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3850 else
3851 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3853 else
3854 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
3855 return "ldm%?ia\t%0, %M0";
3857 else
3859 otherops[1] = adj_offsettable_operand (operands[1], 4);
3860 /* Take care of overlapping base/data reg. */
3861 if (reg_mentioned_p (operands[0], operands[1]))
3863 output_asm_insn ("ldr%?\t%0, %1", otherops);
3864 output_asm_insn ("ldr%?\t%0, %1", operands);
3866 else
3868 output_asm_insn ("ldr%?\t%0, %1", operands);
3869 output_asm_insn ("ldr%?\t%0, %1", otherops);
3874 else
3875 abort(); /* Constraints should prevent this */
3877 else if (code0 == MEM && code1 == REG)
3879 if (REGNO (operands[1]) == 12)
3880 abort();
3882 switch (GET_CODE (XEXP (operands[0], 0)))
3884 case REG:
3885 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
3886 break;
3888 case PRE_INC:
3889 abort (); /* Should never happen now */
3890 break;
3892 case PRE_DEC:
3893 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
3894 break;
3896 case POST_INC:
3897 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
3898 break;
3900 case POST_DEC:
3901 abort (); /* Should never happen now */
3902 break;
3904 case PLUS:
3905 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
3907 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
3909 case -8:
3910 output_asm_insn ("stm%?db\t%m0, %M1", operands);
3911 return "";
3913 case -4:
3914 output_asm_insn ("stm%?da\t%m0, %M1", operands);
3915 return "";
3917 case 4:
3918 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
3919 return "";
3922 /* Fall through */
3924 default:
3925 otherops[0] = adj_offsettable_operand (operands[0], 4);
3926 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
3927 output_asm_insn ("str%?\t%1, %0", operands);
3928 output_asm_insn ("str%?\t%1, %0", otherops);
3931 else
3932 abort(); /* Constraints should prevent this */
3934 return "";
3938 /* Output an arbitrary MOV reg, #n.
3939 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
3941 char *
3942 output_mov_immediate (operands)
3943 rtx *operands;
3945 HOST_WIDE_INT n = INTVAL (operands[1]);
3946 int n_ones = 0;
3947 int i;
3949 /* Try to use one MOV */
3950 if (const_ok_for_arm (n))
3952 output_asm_insn ("mov%?\t%0, %1", operands);
3953 return "";
3956 /* Try to use one MVN */
3957 if (const_ok_for_arm (~n))
3959 operands[1] = GEN_INT (~n);
3960 output_asm_insn ("mvn%?\t%0, %1", operands);
3961 return "";
3964 /* If all else fails, make it out of ORRs or BICs as appropriate. */
3966 for (i=0; i < 32; i++)
3967 if (n & 1 << i)
3968 n_ones++;
3970 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
3971 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
3972 ~n);
3973 else
3974 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
3977 return "";
3981 /* Output an ADD r, s, #n where n may be too big for one instruction. If
3982 adding zero to one register, output nothing. */
3984 char *
3985 output_add_immediate (operands)
3986 rtx *operands;
3988 HOST_WIDE_INT n = INTVAL (operands[2]);
3990 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
3992 if (n < 0)
3993 output_multi_immediate (operands,
3994 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
3995 -n);
3996 else
3997 output_multi_immediate (operands,
3998 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4002 return "";
4005 /* Output a multiple immediate operation.
4006 OPERANDS is the vector of operands referred to in the output patterns.
4007 INSTR1 is the output pattern to use for the first constant.
4008 INSTR2 is the output pattern to use for subsequent constants.
4009 IMMED_OP is the index of the constant slot in OPERANDS.
4010 N is the constant value. */
4012 char *
4013 output_multi_immediate (operands, instr1, instr2, immed_op, n)
4014 rtx *operands;
4015 char *instr1, *instr2;
4016 int immed_op;
4017 HOST_WIDE_INT n;
4019 #if HOST_BITS_PER_WIDE_INT > 32
4020 n &= 0xffffffff;
4021 #endif
4023 if (n == 0)
4025 operands[immed_op] = const0_rtx;
4026 output_asm_insn (instr1, operands); /* Quick and easy output */
4028 else
4030 int i;
4031 char *instr = instr1;
4033 /* Note that n is never zero here (which would give no output) */
4034 for (i = 0; i < 32; i += 2)
4036 if (n & (3 << i))
4038 operands[immed_op] = GEN_INT (n & (255 << i));
4039 output_asm_insn (instr, operands);
4040 instr = instr2;
4041 i += 6;
4045 return "";
4049 /* Return the appropriate ARM instruction for the operation code.
4050 The returned result should not be overwritten. OP is the rtx of the
4051 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4052 was shifted. */
4054 char *
4055 arithmetic_instr (op, shift_first_arg)
4056 rtx op;
4057 int shift_first_arg;
4059 switch (GET_CODE (op))
4061 case PLUS:
4062 return "add";
4064 case MINUS:
4065 return shift_first_arg ? "rsb" : "sub";
4067 case IOR:
4068 return "orr";
4070 case XOR:
4071 return "eor";
4073 case AND:
4074 return "and";
4076 default:
4077 abort ();
4082 /* Ensure valid constant shifts and return the appropriate shift mnemonic
4083 for the operation code. The returned result should not be overwritten.
4084 OP is the rtx code of the shift.
4085 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4086 shift. */
4088 static char *
4089 shift_op (op, amountp)
4090 rtx op;
4091 HOST_WIDE_INT *amountp;
4093 char *mnem;
4094 enum rtx_code code = GET_CODE (op);
4096 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4097 *amountp = -1;
4098 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4099 *amountp = INTVAL (XEXP (op, 1));
4100 else
4101 abort ();
4103 switch (code)
4105 case ASHIFT:
4106 mnem = "asl";
4107 break;
4109 case ASHIFTRT:
4110 mnem = "asr";
4111 break;
4113 case LSHIFTRT:
4114 mnem = "lsr";
4115 break;
4117 case ROTATERT:
4118 mnem = "ror";
4119 break;
4121 case MULT:
4122 /* We never have to worry about the amount being other than a
4123 power of 2, since this case can never be reloaded from a reg. */
4124 if (*amountp != -1)
4125 *amountp = int_log2 (*amountp);
4126 else
4127 abort ();
4128 return "asl";
4130 default:
4131 abort ();
4134 if (*amountp != -1)
4136 /* This is not 100% correct, but follows from the desire to merge
4137 multiplication by a power of 2 with the recognizer for a
4138 shift. >=32 is not a valid shift for "asl", so we must try and
4139 output a shift that produces the correct arithmetical result.
4140 Using lsr #32 is identical except for the fact that the carry bit
4141 is not set correctly if we set the flags; but we never use the
4142 carry bit from such an operation, so we can ignore that. */
4143 if (code == ROTATERT)
4144 *amountp &= 31; /* Rotate is just modulo 32 */
4145 else if (*amountp != (*amountp & 31))
4147 if (code == ASHIFT)
4148 mnem = "lsr";
4149 *amountp = 32;
4152 /* Shifts of 0 are no-ops. */
4153 if (*amountp == 0)
4154 return NULL;
4157 return mnem;
4161 /* Obtain the shift from the POWER of two. */
4163 HOST_WIDE_INT
4164 int_log2 (power)
4165 HOST_WIDE_INT power;
4167 HOST_WIDE_INT shift = 0;
4169 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
4171 if (shift > 31)
4172 abort ();
4173 shift++;
4176 return shift;
4179 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
4180 /bin/as is horribly restrictive. */
4182 void
4183 output_ascii_pseudo_op (stream, p, len)
4184 FILE *stream;
4185 unsigned char *p;
4186 int len;
4188 int i;
4189 int len_so_far = 1000;
4190 int chars_so_far = 0;
4192 for (i = 0; i < len; i++)
4194 register int c = p[i];
4196 if (len_so_far > 50)
4198 if (chars_so_far)
4199 fputs ("\"\n", stream);
4200 fputs ("\t.ascii\t\"", stream);
4201 len_so_far = 0;
4202 arm_increase_location (chars_so_far);
4203 chars_so_far = 0;
4206 if (c == '\"' || c == '\\')
4208 putc('\\', stream);
4209 len_so_far++;
4212 if (c >= ' ' && c < 0177)
4214 putc (c, stream);
4215 len_so_far++;
4217 else
4219 fprintf (stream, "\\%03o", c);
4220 len_so_far +=4;
4223 chars_so_far++;
4226 fputs ("\"\n", stream);
4227 arm_increase_location (chars_so_far);
4231 /* Try to determine whether a pattern really clobbers the link register.
4232 This information is useful when peepholing, so that lr need not be pushed
4233 if we combine a call followed by a return.
4234 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4235 such a check should not be needed because these only update an existing
4236 value within a register; the register must still be set elsewhere within
4237 the function. */
4239 static int
4240 pattern_really_clobbers_lr (x)
4241 rtx x;
4243 int i;
4245 switch (GET_CODE (x))
4247 case SET:
4248 switch (GET_CODE (SET_DEST (x)))
4250 case REG:
4251 return REGNO (SET_DEST (x)) == 14;
4253 case SUBREG:
4254 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4255 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
4257 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4258 return 0;
4259 abort ();
4261 default:
4262 return 0;
4265 case PARALLEL:
4266 for (i = 0; i < XVECLEN (x, 0); i++)
4267 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4268 return 1;
4269 return 0;
4271 case CLOBBER:
4272 switch (GET_CODE (XEXP (x, 0)))
4274 case REG:
4275 return REGNO (XEXP (x, 0)) == 14;
4277 case SUBREG:
4278 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4279 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4280 abort ();
4282 default:
4283 return 0;
4286 case UNSPEC:
4287 return 1;
4289 default:
4290 return 0;
4294 static int
4295 function_really_clobbers_lr (first)
4296 rtx first;
4298 rtx insn, next;
4300 for (insn = first; insn; insn = next_nonnote_insn (insn))
4302 switch (GET_CODE (insn))
4304 case BARRIER:
4305 case NOTE:
4306 case CODE_LABEL:
4307 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4308 case INLINE_HEADER:
4309 break;
4311 case INSN:
4312 if (pattern_really_clobbers_lr (PATTERN (insn)))
4313 return 1;
4314 break;
4316 case CALL_INSN:
4317 /* Don't yet know how to handle those calls that are not to a
4318 SYMBOL_REF */
4319 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4320 abort ();
4322 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4324 case CALL:
4325 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4326 != SYMBOL_REF)
4327 return 1;
4328 break;
4330 case SET:
4331 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4332 0, 0)), 0), 0))
4333 != SYMBOL_REF)
4334 return 1;
4335 break;
4337 default: /* Don't recognize it, be safe */
4338 return 1;
4341 /* A call can be made (by peepholing) not to clobber lr iff it is
4342 followed by a return. There may, however, be a use insn iff
4343 we are returning the result of the call.
4344 If we run off the end of the insn chain, then that means the
4345 call was at the end of the function. Unfortunately we don't
4346 have a return insn for the peephole to recognize, so we
4347 must reject this. (Can this be fixed by adding our own insn?) */
4348 if ((next = next_nonnote_insn (insn)) == NULL)
4349 return 1;
4351 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4352 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4353 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4354 == REGNO (XEXP (PATTERN (next), 0))))
4355 if ((next = next_nonnote_insn (next)) == NULL)
4356 return 1;
4358 if (GET_CODE (next) == JUMP_INSN
4359 && GET_CODE (PATTERN (next)) == RETURN)
4360 break;
4361 return 1;
4363 default:
4364 abort ();
4368 /* We have reached the end of the chain so lr was _not_ clobbered */
4369 return 0;
4372 char *
4373 output_return_instruction (operand, really_return, reverse)
4374 rtx operand;
4375 int really_return;
4376 int reverse;
4378 char instr[100];
4379 int reg, live_regs = 0;
4380 int volatile_func = (optimize > 0
4381 && TREE_THIS_VOLATILE (current_function_decl));
4383 return_used_this_function = 1;
4385 if (volatile_func)
4387 rtx ops[2];
4388 /* If this function was declared non-returning, and we have found a tail
4389 call, then we have to trust that the called function won't return. */
4390 if (! really_return)
4391 return "";
4393 /* Otherwise, trap an attempted return by aborting. */
4394 ops[0] = operand;
4395 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
4396 assemble_external_libcall (ops[1]);
4397 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
4398 return "";
4401 if (current_function_calls_alloca && ! really_return)
4402 abort();
4404 for (reg = 0; reg <= 10; reg++)
4405 if (regs_ever_live[reg] && ! call_used_regs[reg])
4406 live_regs++;
4408 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
4409 live_regs++;
4411 if (frame_pointer_needed)
4412 live_regs += 4;
4414 if (live_regs)
4416 if (lr_save_eliminated || ! regs_ever_live[14])
4417 live_regs++;
4419 if (frame_pointer_needed)
4420 strcpy (instr,
4421 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
4422 else
4423 strcpy (instr,
4424 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
4426 for (reg = 0; reg <= 10; reg++)
4427 if (regs_ever_live[reg] && ! call_used_regs[reg])
4429 strcat (instr, "%|");
4430 strcat (instr, reg_names[reg]);
4431 if (--live_regs)
4432 strcat (instr, ", ");
4435 if (frame_pointer_needed)
4437 strcat (instr, "%|");
4438 strcat (instr, reg_names[11]);
4439 strcat (instr, ", ");
4440 strcat (instr, "%|");
4441 strcat (instr, reg_names[13]);
4442 strcat (instr, ", ");
4443 strcat (instr, "%|");
4444 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4446 else
4448 strcat (instr, "%|");
4449 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4451 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
4452 output_asm_insn (instr, &operand);
4454 else if (really_return)
4456 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4457 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
4458 output_asm_insn (instr, &operand);
4461 return "";
4464 /* Return nonzero if optimizing and the current function is volatile.
4465 Such functions never return, and many memory cycles can be saved
4466 by not storing register values that will never be needed again.
4467 This optimization was added to speed up context switching in a
4468 kernel application. */
4471 arm_volatile_func ()
4473 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4476 /* Return the size of the prologue. It's not too bad if we slightly
4477 over-estimate. */
4479 static int
4480 get_prologue_size ()
4482 return profile_flag ? 12 : 0;
4485 /* The amount of stack adjustment that happens here, in output_return and in
4486 output_epilogue must be exactly the same as was calculated during reload,
4487 or things will point to the wrong place. The only time we can safely
4488 ignore this constraint is when a function has no arguments on the stack,
4489 no stack frame requirement and no live registers execpt for `lr'. If we
4490 can guarantee that by making all function calls into tail calls and that
4491 lr is not clobbered in any other way, then there is no need to push lr
4492 onto the stack. */
4494 void
4495 output_func_prologue (f, frame_size)
4496 FILE *f;
4497 int frame_size;
4499 int reg, live_regs_mask = 0;
4500 rtx operands[3];
4501 int volatile_func = (optimize > 0
4502 && TREE_THIS_VOLATILE (current_function_decl));
4504 /* Nonzero if we must stuff some register arguments onto the stack as if
4505 they were passed there. */
4506 int store_arg_regs = 0;
4508 if (arm_ccfsm_state || arm_target_insn)
4509 abort (); /* Sanity check */
4511 if (arm_naked_function_p (current_function_decl))
4512 return;
4514 return_used_this_function = 0;
4515 lr_save_eliminated = 0;
4517 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4518 ASM_COMMENT_START, current_function_args_size,
4519 current_function_pretend_args_size, frame_size);
4520 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4521 ASM_COMMENT_START, frame_pointer_needed,
4522 current_function_anonymous_args);
4524 if (volatile_func)
4525 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
4527 if (current_function_anonymous_args && current_function_pretend_args_size)
4528 store_arg_regs = 1;
4530 for (reg = 0; reg <= 10; reg++)
4531 if (regs_ever_live[reg] && ! call_used_regs[reg])
4532 live_regs_mask |= (1 << reg);
4534 if (frame_pointer_needed)
4535 live_regs_mask |= 0xD800;
4536 else if (regs_ever_live[14])
4538 if (! current_function_args_size
4539 && ! function_really_clobbers_lr (get_insns ()))
4540 lr_save_eliminated = 1;
4541 else
4542 live_regs_mask |= 0x4000;
4545 if (live_regs_mask)
4547 /* if a di mode load/store multiple is used, and the base register
4548 is r3, then r4 can become an ever live register without lr
4549 doing so, in this case we need to push lr as well, or we
4550 will fail to get a proper return. */
4552 live_regs_mask |= 0x4000;
4553 lr_save_eliminated = 0;
4557 if (lr_save_eliminated)
4558 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4559 ASM_COMMENT_START);
4563 void
4564 output_func_epilogue (f, frame_size)
4565 FILE *f;
4566 int frame_size;
4568 int reg, live_regs_mask = 0, code_size = 0;
4569 /* If we need this then it will always be at lesat this much */
4570 int floats_offset = 24;
4571 rtx operands[3];
4572 int volatile_func = (optimize > 0
4573 && TREE_THIS_VOLATILE (current_function_decl));
4575 if (use_return_insn() && return_used_this_function)
4577 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
4579 abort ();
4581 goto epilogue_done;
4584 /* Naked functions don't have epilogues. */
4585 if (arm_naked_function_p (current_function_decl))
4586 goto epilogue_done;
4588 /* A volatile function should never return. Call abort. */
4589 if (volatile_func)
4591 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
4592 assemble_external_libcall (op);
4593 output_asm_insn ("bl\t%a0", &op);
4594 code_size = 4;
4595 goto epilogue_done;
4598 for (reg = 0; reg <= 10; reg++)
4599 if (regs_ever_live[reg] && ! call_used_regs[reg])
4601 live_regs_mask |= (1 << reg);
4602 floats_offset += 4;
4605 if (frame_pointer_needed)
4607 for (reg = 23; reg > 15; reg--)
4608 if (regs_ever_live[reg] && ! call_used_regs[reg])
4610 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4611 reg_names[reg], REGISTER_PREFIX, floats_offset);
4612 floats_offset += 12;
4613 code_size += 4;
4616 live_regs_mask |= 0xA800;
4617 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4618 TARGET_APCS_32 ? FALSE : TRUE);
4619 code_size += 4;
4621 else
4623 /* Restore stack pointer if necessary. */
4624 if (frame_size)
4626 operands[0] = operands[1] = stack_pointer_rtx;
4627 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
4628 output_add_immediate (operands);
4631 for (reg = 16; reg < 24; reg++)
4632 if (regs_ever_live[reg] && ! call_used_regs[reg])
4634 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
4635 reg_names[reg], REGISTER_PREFIX);
4636 code_size += 4;
4638 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
4640 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
4641 TARGET_APCS_32 ? FALSE : TRUE);
4642 code_size += 4;
4644 else
4646 if (live_regs_mask || regs_ever_live[14])
4648 live_regs_mask |= 0x4000;
4649 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
4650 code_size += 4;
4652 if (current_function_pretend_args_size)
4654 operands[0] = operands[1] = stack_pointer_rtx;
4655 operands[2] = gen_rtx (CONST_INT, VOIDmode,
4656 current_function_pretend_args_size);
4657 output_add_immediate (operands);
4659 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
4660 : "\tmovs\t%spc, %slr\n"),
4661 REGISTER_PREFIX, REGISTER_PREFIX, f);
4662 code_size += 4;
4666 epilogue_done:
4668 /* insn_addresses isn't allocated when not optimizing */
4670 if (optimize > 0)
4671 arm_increase_location (code_size
4672 + insn_addresses[INSN_UID (get_last_insn ())]
4673 + get_prologue_size ());
4675 current_function_anonymous_args = 0;
4678 static void
4679 emit_multi_reg_push (mask)
4680 int mask;
4682 int num_regs = 0;
4683 int i, j;
4684 rtx par;
4686 for (i = 0; i < 16; i++)
4687 if (mask & (1 << i))
4688 num_regs++;
4690 if (num_regs == 0 || num_regs > 16)
4691 abort ();
4693 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
4695 for (i = 0; i < 16; i++)
4697 if (mask & (1 << i))
4699 XVECEXP (par, 0, 0)
4700 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
4701 gen_rtx (PRE_DEC, BLKmode,
4702 stack_pointer_rtx)),
4703 gen_rtx (UNSPEC, BLKmode,
4704 gen_rtvec (1, gen_rtx (REG, SImode, i)),
4705 2));
4706 break;
4710 for (j = 1, i++; j < num_regs; i++)
4712 if (mask & (1 << i))
4714 XVECEXP (par, 0, j)
4715 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
4716 j++;
4719 emit_insn (par);
4722 void
4723 arm_expand_prologue ()
4725 int reg;
4726 rtx amount = GEN_INT (- get_frame_size ());
4727 rtx push_insn;
4728 int num_regs;
4729 int live_regs_mask = 0;
4730 int store_arg_regs = 0;
4731 int volatile_func = (optimize > 0
4732 && TREE_THIS_VOLATILE (current_function_decl));
4734 /* Naked functions don't have prologues. */
4735 if (arm_naked_function_p (current_function_decl))
4736 return;
4738 if (current_function_anonymous_args && current_function_pretend_args_size)
4739 store_arg_regs = 1;
4741 if (! volatile_func)
4742 for (reg = 0; reg <= 10; reg++)
4743 if (regs_ever_live[reg] && ! call_used_regs[reg])
4744 live_regs_mask |= 1 << reg;
4746 if (! volatile_func && regs_ever_live[14])
4747 live_regs_mask |= 0x4000;
4749 if (frame_pointer_needed)
4751 live_regs_mask |= 0xD800;
4752 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
4753 stack_pointer_rtx));
4756 if (current_function_pretend_args_size)
4758 if (store_arg_regs)
4759 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
4760 & 0xf);
4761 else
4762 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
4763 GEN_INT (-current_function_pretend_args_size)));
4766 if (live_regs_mask)
4768 /* If we have to push any regs, then we must push lr as well, or
4769 we won't get a proper return. */
4770 live_regs_mask |= 0x4000;
4771 emit_multi_reg_push (live_regs_mask);
4774 /* For now the integer regs are still pushed in output_func_epilogue (). */
4776 if (! volatile_func)
4777 for (reg = 23; reg > 15; reg--)
4778 if (regs_ever_live[reg] && ! call_used_regs[reg])
4779 emit_insn (gen_rtx (SET, VOIDmode,
4780 gen_rtx (MEM, XFmode,
4781 gen_rtx (PRE_DEC, XFmode,
4782 stack_pointer_rtx)),
4783 gen_rtx (REG, XFmode, reg)));
4785 if (frame_pointer_needed)
4786 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
4787 (GEN_INT
4788 (-(4 + current_function_pretend_args_size)))));
4790 if (amount != const0_rtx)
4792 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
4793 emit_insn (gen_rtx (CLOBBER, VOIDmode,
4794 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
4797 /* If we are profiling, make sure no instructions are scheduled before
4798 the call to mcount. */
4799 if (profile_flag || profile_block_flag)
4800 emit_insn (gen_blockage ());
4804 /* If CODE is 'd', then the X is a condition operand and the instruction
4805 should only be executed if the condition is true.
4806 if CODE is 'D', then the X is a condition operand and the instruction
4807 should only be executed if the condition is false: however, if the mode
4808 of the comparison is CCFPEmode, then always execute the instruction -- we
4809 do this because in these circumstances !GE does not necessarily imply LT;
4810 in these cases the instruction pattern will take care to make sure that
4811 an instruction containing %d will follow, thereby undoing the effects of
4812 doing this instruction unconditionally.
4813 If CODE is 'N' then X is a floating point operand that must be negated
4814 before output.
4815 If CODE is 'B' then output a bitwise inverted value of X (a const int).
4816 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
4818 void
4819 arm_print_operand (stream, x, code)
4820 FILE *stream;
4821 rtx x;
4822 int code;
4824 switch (code)
4826 case '@':
4827 fputs (ASM_COMMENT_START, stream);
4828 return;
4830 case '|':
4831 fputs (REGISTER_PREFIX, stream);
4832 return;
4834 case '?':
4835 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
4836 fputs (arm_condition_codes[arm_current_cc], stream);
4837 return;
4839 case 'N':
4841 REAL_VALUE_TYPE r;
4842 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4843 r = REAL_VALUE_NEGATE (r);
4844 fprintf (stream, "%s", fp_const_from_val (&r));
4846 return;
4848 case 'B':
4849 if (GET_CODE (x) == CONST_INT)
4850 fprintf (stream,
4851 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4852 "%d",
4853 #else
4854 "%ld",
4855 #endif
4856 ARM_SIGN_EXTEND (~ INTVAL (x)));
4857 else
4859 putc ('~', stream);
4860 output_addr_const (stream, x);
4862 return;
4864 case 'i':
4865 fprintf (stream, "%s", arithmetic_instr (x, 1));
4866 return;
4868 case 'I':
4869 fprintf (stream, "%s", arithmetic_instr (x, 0));
4870 return;
4872 case 'S':
4874 HOST_WIDE_INT val;
4875 char *shift = shift_op (x, &val);
4877 if (shift)
4879 fprintf (stream, ", %s ", shift_op (x, &val));
4880 if (val == -1)
4881 arm_print_operand (stream, XEXP (x, 1), 0);
4882 else
4883 fprintf (stream,
4884 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4885 "#%d",
4886 #else
4887 "#%ld",
4888 #endif
4889 val);
4892 return;
4894 case 'Q':
4895 if (REGNO (x) > 15)
4896 abort ();
4897 fputs (REGISTER_PREFIX, stream);
4898 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
4899 return;
4901 case 'R':
4902 if (REGNO (x) > 15)
4903 abort ();
4904 fputs (REGISTER_PREFIX, stream);
4905 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
4906 return;
4908 case 'm':
4909 fputs (REGISTER_PREFIX, stream);
4910 if (GET_CODE (XEXP (x, 0)) == REG)
4911 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
4912 else
4913 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
4914 return;
4916 case 'M':
4917 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
4918 REGISTER_PREFIX, reg_names[REGNO (x) - 1
4919 + ((GET_MODE_SIZE (GET_MODE (x))
4920 + GET_MODE_SIZE (SImode) - 1)
4921 / GET_MODE_SIZE (SImode))]);
4922 return;
4924 case 'd':
4925 if (x)
4926 fputs (arm_condition_codes[get_arm_condition_code (x)],
4927 stream);
4928 return;
4930 case 'D':
4931 if (x)
4932 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
4933 (get_arm_condition_code (x))],
4934 stream);
4935 return;
4937 default:
4938 if (x == 0)
4939 abort ();
4941 if (GET_CODE (x) == REG)
4943 fputs (REGISTER_PREFIX, stream);
4944 fputs (reg_names[REGNO (x)], stream);
4946 else if (GET_CODE (x) == MEM)
4948 output_memory_reference_mode = GET_MODE (x);
4949 output_address (XEXP (x, 0));
4951 else if (GET_CODE (x) == CONST_DOUBLE)
4952 fprintf (stream, "#%s", fp_immediate_constant (x));
4953 else if (GET_CODE (x) == NEG)
4954 abort (); /* This should never happen now. */
4955 else
4957 fputc ('#', stream);
4958 output_addr_const (stream, x);
4963 /* Increase the `arm_text_location' by AMOUNT if we're in the text
4964 segment. */
4966 void
4967 arm_increase_location (amount)
4968 int amount;
4970 if (in_text_section ())
4971 arm_text_location += amount;
4975 /* Output a label definition. If this label is within the .text segment, it
4976 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
4977 Maybe GCC remembers names not starting with a `*' for a long time, but this
4978 is a minority anyway, so we just make a copy. Do not store the leading `*'
4979 if the name starts with one. */
4981 void
4982 arm_asm_output_label (stream, name)
4983 FILE *stream;
4984 char *name;
4986 char *real_name, *s;
4987 struct label_offset *cur;
4988 int hash = 0;
4990 ARM_OUTPUT_LABEL (stream, name);
4991 if (! in_text_section ())
4992 return;
4994 if (name[0] == '*')
4996 real_name = xmalloc (1 + strlen (&name[1]));
4997 strcpy (real_name, &name[1]);
4999 else
5001 real_name = xmalloc (2 + strlen (name));
5002 strcpy (real_name, USER_LABEL_PREFIX);
5003 strcat (real_name, name);
5005 for (s = real_name; *s; s++)
5006 hash += *s;
5008 hash = hash % LABEL_HASH_SIZE;
5009 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
5010 cur->name = real_name;
5011 cur->offset = arm_text_location;
5012 cur->cdr = offset_table[hash];
5013 offset_table[hash] = cur;
5016 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
5017 directive hence this hack, which works by reserving some `.space' in the
5018 bss segment directly.
5020 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
5021 define STATIC COMMON space but merely STATIC BSS space. */
5023 void
5024 output_lcomm_directive (stream, name, size, align)
5025 FILE *stream;
5026 char *name;
5027 int size, align;
5029 bss_section ();
5030 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
5031 ARM_OUTPUT_LABEL (stream, name);
5032 fprintf (stream, "\t.space\t%d\n", size);
5035 /* A finite state machine takes care of noticing whether or not instructions
5036 can be conditionally executed, and thus decrease execution time and code
5037 size by deleting branch instructions. The fsm is controlled by
5038 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5040 /* The state of the fsm controlling condition codes are:
5041 0: normal, do nothing special
5042 1: make ASM_OUTPUT_OPCODE not output this instruction
5043 2: make ASM_OUTPUT_OPCODE not output this instruction
5044 3: make instructions conditional
5045 4: make instructions conditional
5047 State transitions (state->state by whom under condition):
5048 0 -> 1 final_prescan_insn if the `target' is a label
5049 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5050 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5051 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5052 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5053 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5054 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5055 (the target insn is arm_target_insn).
5057 If the jump clobbers the conditions then we use states 2 and 4.
5059 A similar thing can be done with conditional return insns.
5061 XXX In case the `target' is an unconditional branch, this conditionalising
5062 of the instructions always reduces code size, but not always execution
5063 time. But then, I want to reduce the code size to somewhere near what
5064 /bin/cc produces. */
5066 /* Returns the index of the ARM condition code string in
5067 `arm_condition_codes'. COMPARISON should be an rtx like
5068 `(eq (...) (...))'. */
5070 static enum arm_cond_code
5071 get_arm_condition_code (comparison)
5072 rtx comparison;
5074 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
5075 register int code;
5076 register enum rtx_code comp_code = GET_CODE (comparison);
5078 if (GET_MODE_CLASS (mode) != MODE_CC)
5079 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5080 XEXP (comparison, 1));
5082 switch (mode)
5084 case CC_DNEmode: code = ARM_NE; goto dominance;
5085 case CC_DEQmode: code = ARM_EQ; goto dominance;
5086 case CC_DGEmode: code = ARM_GE; goto dominance;
5087 case CC_DGTmode: code = ARM_GT; goto dominance;
5088 case CC_DLEmode: code = ARM_LE; goto dominance;
5089 case CC_DLTmode: code = ARM_LT; goto dominance;
5090 case CC_DGEUmode: code = ARM_CS; goto dominance;
5091 case CC_DGTUmode: code = ARM_HI; goto dominance;
5092 case CC_DLEUmode: code = ARM_LS; goto dominance;
5093 case CC_DLTUmode: code = ARM_CC;
5095 dominance:
5096 if (comp_code != EQ && comp_code != NE)
5097 abort ();
5099 if (comp_code == EQ)
5100 return ARM_INVERSE_CONDITION_CODE (code);
5101 return code;
5103 case CC_NOOVmode:
5104 switch (comp_code)
5106 case NE: return ARM_NE;
5107 case EQ: return ARM_EQ;
5108 case GE: return ARM_PL;
5109 case LT: return ARM_MI;
5110 default: abort ();
5113 case CC_Zmode:
5114 case CCFPmode:
5115 switch (comp_code)
5117 case NE: return ARM_NE;
5118 case EQ: return ARM_EQ;
5119 default: abort ();
5122 case CCFPEmode:
5123 switch (comp_code)
5125 case GE: return ARM_GE;
5126 case GT: return ARM_GT;
5127 case LE: return ARM_LS;
5128 case LT: return ARM_MI;
5129 default: abort ();
5132 case CC_SWPmode:
5133 switch (comp_code)
5135 case NE: return ARM_NE;
5136 case EQ: return ARM_EQ;
5137 case GE: return ARM_LE;
5138 case GT: return ARM_LT;
5139 case LE: return ARM_GE;
5140 case LT: return ARM_GT;
5141 case GEU: return ARM_LS;
5142 case GTU: return ARM_CC;
5143 case LEU: return ARM_CS;
5144 case LTU: return ARM_HI;
5145 default: abort ();
5148 case CC_Cmode:
5149 switch (comp_code)
5151 case LTU: return ARM_CS;
5152 case GEU: return ARM_CC;
5153 default: abort ();
5156 case CCmode:
5157 switch (comp_code)
5159 case NE: return ARM_NE;
5160 case EQ: return ARM_EQ;
5161 case GE: return ARM_GE;
5162 case GT: return ARM_GT;
5163 case LE: return ARM_LE;
5164 case LT: return ARM_LT;
5165 case GEU: return ARM_CS;
5166 case GTU: return ARM_HI;
5167 case LEU: return ARM_LS;
5168 case LTU: return ARM_CC;
5169 default: abort ();
5172 default: abort ();
5175 abort ();
5179 void
5180 final_prescan_insn (insn, opvec, noperands)
5181 rtx insn;
5182 rtx *opvec;
5183 int noperands;
5185 /* BODY will hold the body of INSN. */
5186 register rtx body = PATTERN (insn);
5188 /* This will be 1 if trying to repeat the trick, and things need to be
5189 reversed if it appears to fail. */
5190 int reverse = 0;
5192 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5193 taken are clobbered, even if the rtl suggests otherwise. It also
5194 means that we have to grub around within the jump expression to find
5195 out what the conditions are when the jump isn't taken. */
5196 int jump_clobbers = 0;
5198 /* If we start with a return insn, we only succeed if we find another one. */
5199 int seeking_return = 0;
5201 /* START_INSN will hold the insn from where we start looking. This is the
5202 first insn after the following code_label if REVERSE is true. */
5203 rtx start_insn = insn;
5205 /* If in state 4, check if the target branch is reached, in order to
5206 change back to state 0. */
5207 if (arm_ccfsm_state == 4)
5209 if (insn == arm_target_insn)
5211 arm_target_insn = NULL;
5212 arm_ccfsm_state = 0;
5214 return;
5217 /* If in state 3, it is possible to repeat the trick, if this insn is an
5218 unconditional branch to a label, and immediately following this branch
5219 is the previous target label which is only used once, and the label this
5220 branch jumps to is not too far off. */
5221 if (arm_ccfsm_state == 3)
5223 if (simplejump_p (insn))
5225 start_insn = next_nonnote_insn (start_insn);
5226 if (GET_CODE (start_insn) == BARRIER)
5228 /* XXX Isn't this always a barrier? */
5229 start_insn = next_nonnote_insn (start_insn);
5231 if (GET_CODE (start_insn) == CODE_LABEL
5232 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5233 && LABEL_NUSES (start_insn) == 1)
5234 reverse = TRUE;
5235 else
5236 return;
5238 else if (GET_CODE (body) == RETURN)
5240 start_insn = next_nonnote_insn (start_insn);
5241 if (GET_CODE (start_insn) == BARRIER)
5242 start_insn = next_nonnote_insn (start_insn);
5243 if (GET_CODE (start_insn) == CODE_LABEL
5244 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5245 && LABEL_NUSES (start_insn) == 1)
5247 reverse = TRUE;
5248 seeking_return = 1;
5250 else
5251 return;
5253 else
5254 return;
5257 if (arm_ccfsm_state != 0 && !reverse)
5258 abort ();
5259 if (GET_CODE (insn) != JUMP_INSN)
5260 return;
5262 /* This jump might be paralleled with a clobber of the condition codes
5263 the jump should always come first */
5264 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5265 body = XVECEXP (body, 0, 0);
5267 #if 0
5268 /* If this is a conditional return then we don't want to know */
5269 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5270 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5271 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5272 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5273 return;
5274 #endif
5276 if (reverse
5277 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5278 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5280 int insns_skipped;
5281 int fail = FALSE, succeed = FALSE;
5282 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5283 int then_not_else = TRUE;
5284 rtx this_insn = start_insn, label = 0;
5286 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5288 /* The code below is wrong for these, and I haven't time to
5289 fix it now. So we just do the safe thing and return. This
5290 whole function needs re-writing anyway. */
5291 jump_clobbers = 1;
5292 return;
5295 /* Register the insn jumped to. */
5296 if (reverse)
5298 if (!seeking_return)
5299 label = XEXP (SET_SRC (body), 0);
5301 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5302 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5303 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5305 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5306 then_not_else = FALSE;
5308 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5309 seeking_return = 1;
5310 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5312 seeking_return = 1;
5313 then_not_else = FALSE;
5315 else
5316 abort ();
5318 /* See how many insns this branch skips, and what kind of insns. If all
5319 insns are okay, and the label or unconditional branch to the same
5320 label is not too far away, succeed. */
5321 for (insns_skipped = 0;
5322 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
5324 rtx scanbody;
5326 this_insn = next_nonnote_insn (this_insn);
5327 if (!this_insn)
5328 break;
5330 scanbody = PATTERN (this_insn);
5332 switch (GET_CODE (this_insn))
5334 case CODE_LABEL:
5335 /* Succeed if it is the target label, otherwise fail since
5336 control falls in from somewhere else. */
5337 if (this_insn == label)
5339 if (jump_clobbers)
5341 arm_ccfsm_state = 2;
5342 this_insn = next_nonnote_insn (this_insn);
5344 else
5345 arm_ccfsm_state = 1;
5346 succeed = TRUE;
5348 else
5349 fail = TRUE;
5350 break;
5352 case BARRIER:
5353 /* Succeed if the following insn is the target label.
5354 Otherwise fail.
5355 If return insns are used then the last insn in a function
5356 will be a barrier. */
5357 this_insn = next_nonnote_insn (this_insn);
5358 if (this_insn && this_insn == label)
5360 if (jump_clobbers)
5362 arm_ccfsm_state = 2;
5363 this_insn = next_nonnote_insn (this_insn);
5365 else
5366 arm_ccfsm_state = 1;
5367 succeed = TRUE;
5369 else
5370 fail = TRUE;
5371 break;
5373 case CALL_INSN:
5374 /* If using 32-bit addresses the cc is not preserved over
5375 calls */
5376 if (TARGET_APCS_32)
5378 /* Succeed if the following insn is the target label,
5379 or if the following two insns are a barrier and
5380 the target label. */
5381 this_insn = next_nonnote_insn (this_insn);
5382 if (this_insn && GET_CODE (this_insn) == BARRIER)
5383 this_insn = next_nonnote_insn (this_insn);
5385 if (this_insn && this_insn == label
5386 && insns_skipped < MAX_INSNS_SKIPPED)
5388 if (jump_clobbers)
5390 arm_ccfsm_state = 2;
5391 this_insn = next_nonnote_insn (this_insn);
5393 else
5394 arm_ccfsm_state = 1;
5395 succeed = TRUE;
5397 else
5398 fail = TRUE;
5400 break;
5402 case JUMP_INSN:
5403 /* If this is an unconditional branch to the same label, succeed.
5404 If it is to another label, do nothing. If it is conditional,
5405 fail. */
5406 /* XXX Probably, the test for the SET and the PC are unnecessary. */
5408 if (GET_CODE (scanbody) == SET
5409 && GET_CODE (SET_DEST (scanbody)) == PC)
5411 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5412 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5414 arm_ccfsm_state = 2;
5415 succeed = TRUE;
5417 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5418 fail = TRUE;
5420 else if (GET_CODE (scanbody) == RETURN
5421 && seeking_return)
5423 arm_ccfsm_state = 2;
5424 succeed = TRUE;
5426 else if (GET_CODE (scanbody) == PARALLEL)
5428 switch (get_attr_conds (this_insn))
5430 case CONDS_NOCOND:
5431 break;
5432 default:
5433 fail = TRUE;
5434 break;
5437 break;
5439 case INSN:
5440 /* Instructions using or affecting the condition codes make it
5441 fail. */
5442 if ((GET_CODE (scanbody) == SET
5443 || GET_CODE (scanbody) == PARALLEL)
5444 && get_attr_conds (this_insn) != CONDS_NOCOND)
5445 fail = TRUE;
5446 break;
5448 default:
5449 break;
5452 if (succeed)
5454 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
5455 arm_target_label = CODE_LABEL_NUMBER (label);
5456 else if (seeking_return || arm_ccfsm_state == 2)
5458 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5460 this_insn = next_nonnote_insn (this_insn);
5461 if (this_insn && (GET_CODE (this_insn) == BARRIER
5462 || GET_CODE (this_insn) == CODE_LABEL))
5463 abort ();
5465 if (!this_insn)
5467 /* Oh, dear! we ran off the end.. give up */
5468 recog (PATTERN (insn), insn, NULL_PTR);
5469 arm_ccfsm_state = 0;
5470 arm_target_insn = NULL;
5471 return;
5473 arm_target_insn = this_insn;
5475 else
5476 abort ();
5477 if (jump_clobbers)
5479 if (reverse)
5480 abort ();
5481 arm_current_cc =
5482 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5483 0), 0), 1));
5484 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5485 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5486 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5487 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5489 else
5491 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5492 what it was. */
5493 if (!reverse)
5494 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5495 0));
5498 if (reverse || then_not_else)
5499 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5501 /* restore recog_operand (getting the attributes of other insns can
5502 destroy this array, but final.c assumes that it remains intact
5503 across this call; since the insn has been recognized already we
5504 call recog direct). */
5505 recog (PATTERN (insn), insn, NULL_PTR);
5509 #ifdef AOF_ASSEMBLER
5510 /* Special functions only needed when producing AOF syntax assembler. */
5512 int arm_text_section_count = 1;
5514 char *
5515 aof_text_section ()
5517 static char buf[100];
5518 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5519 arm_text_section_count++);
5520 if (flag_pic)
5521 strcat (buf, ", PIC, REENTRANT");
5522 return buf;
5525 static int arm_data_section_count = 1;
5527 char *
5528 aof_data_section ()
5530 static char buf[100];
5531 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5532 return buf;
5535 /* The AOF assembler is religiously strict about declarations of
5536 imported and exported symbols, so that it is impossible to declare
5537 a function as imported near the begining of the file, and then to
5538 export it later on. It is, however, possible to delay the decision
5539 until all the functions in the file have been compiled. To get
5540 around this, we maintain a list of the imports and exports, and
5541 delete from it any that are subsequently defined. At the end of
5542 compilation we spit the remainder of the list out before the END
5543 directive. */
5545 struct import
5547 struct import *next;
5548 char *name;
5551 static struct import *imports_list = NULL;
5553 void
5554 aof_add_import (name)
5555 char *name;
5557 struct import *new;
5559 for (new = imports_list; new; new = new->next)
5560 if (new->name == name)
5561 return;
5563 new = (struct import *) xmalloc (sizeof (struct import));
5564 new->next = imports_list;
5565 imports_list = new;
5566 new->name = name;
5569 void
5570 aof_delete_import (name)
5571 char *name;
5573 struct import **old;
5575 for (old = &imports_list; *old; old = & (*old)->next)
5577 if ((*old)->name == name)
5579 *old = (*old)->next;
5580 return;
5585 int arm_main_function = 0;
5587 void
5588 aof_dump_imports (f)
5589 FILE *f;
5591 /* The AOF assembler needs this to cause the startup code to be extracted
5592 from the library. Brining in __main causes the whole thing to work
5593 automagically. */
5594 if (arm_main_function)
5596 text_section ();
5597 fputs ("\tIMPORT __main\n", f);
5598 fputs ("\tDCD __main\n", f);
5601 /* Now dump the remaining imports. */
5602 while (imports_list)
5604 fprintf (f, "\tIMPORT\t");
5605 assemble_name (f, imports_list->name);
5606 fputc ('\n', f);
5607 imports_list = imports_list->next;
5610 #endif /* AOF_ASSEMBLER */