General tidy up.
[official-gcc.git] / gcc / config / arm / arm.c
blob0e8d7d7b6bb24049cd24829d355c960288f1a3ac
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
23 #include <stdio.h>
24 #include <string.h>
25 #include "assert.h"
26 #include "config.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "reload.h"
39 /* The maximum number of insns skipped which will be conditionalised if
40 possible. */
41 #define MAX_INSNS_SKIPPED 5
43 /* Some function declarations. */
44 extern FILE *asm_out_file;
45 extern char *output_multi_immediate ();
46 extern void arm_increase_location ();
48 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
49 static int get_prologue_size PROTO ((void));
51 /* Define the information needed to generate branch insns. This is
52 stored from the compare operation. */
54 rtx arm_compare_op0, arm_compare_op1;
55 int arm_compare_fp;
57 /* What type of cpu are we compiling for? */
59 enum processor_type arm_cpu;
61 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
62 must report the mode of the memory reference from PRINT_OPERAND to
63 PRINT_OPERAND_ADDRESS. */
64 enum machine_mode output_memory_reference_mode;
66 /* Nonzero if the prologue must setup `fp'. */
67 int current_function_anonymous_args;
69 /* Location counter of .text segment. */
70 int arm_text_location = 0;
72 /* Set to one if we think that lr is only saved because of subroutine calls,
73 but all of these can be `put after' return insns */
74 int lr_save_eliminated;
76 /* A hash table is used to store text segment labels and their associated
77 offset from the start of the text segment. */
78 struct label_offset
80 char *name;
81 int offset;
82 struct label_offset *cdr;
85 #define LABEL_HASH_SIZE 257
87 static struct label_offset *offset_table[LABEL_HASH_SIZE];
89 /* Set to 1 when a return insn is output, this means that the epilogue
90 is not needed. */
92 static int return_used_this_function;
94 /* For an explanation of these variables, see final_prescan_insn below. */
95 int arm_ccfsm_state;
96 int arm_current_cc;
97 rtx arm_target_insn;
98 int arm_target_label;
100 /* Return 1 if it is possible to return using a single instruction */
103 use_return_insn ()
105 int regno;
107 if (!reload_completed ||current_function_pretend_args_size
108 || current_function_anonymous_args
109 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
110 return 0;
112 /* Can't be done if any of the FPU regs are pushed, since this also
113 requires an insn */
114 for (regno = 20; regno < 24; regno++)
115 if (regs_ever_live[regno])
116 return 0;
118 return 1;
121 /* Return TRUE if int I is a valid immediate ARM constant. */
124 const_ok_for_arm (i)
125 HOST_WIDE_INT i;
127 unsigned HOST_WIDE_INT mask = ~0xFF;
131 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
132 return TRUE;
133 mask =
134 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
135 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
136 } while (mask != ~0xFF);
138 return FALSE;
141 /* This code has been fixed for cross compilation. */
143 static int fpa_consts_inited = 0;
145 char *strings_fpa[8] = {
146 "0.0",
147 "1.0",
148 "2.0",
149 "3.0",
150 "4.0",
151 "5.0",
152 "0.5",
153 "10.0"
156 static REAL_VALUE_TYPE values_fpa[8];
158 static void
159 init_fpa_table ()
161 int i;
162 REAL_VALUE_TYPE r;
164 for (i = 0; i < 8; i++)
166 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
167 values_fpa[i] = r;
170 fpa_consts_inited = 1;
173 /* Return TRUE if rtx X is a valid immediate FPU constant. */
176 const_double_rtx_ok_for_fpu (x)
177 rtx x;
179 REAL_VALUE_TYPE r;
180 int i;
182 if (!fpa_consts_inited)
183 init_fpa_table ();
185 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
186 if (REAL_VALUE_MINUS_ZERO (r))
187 return 0;
189 for (i = 0; i < 8; i++)
190 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
191 return 1;
193 return 0;
196 /* Return TRUE if rtx X is a valid immediate FPU constant. */
199 neg_const_double_rtx_ok_for_fpu (x)
200 rtx x;
202 REAL_VALUE_TYPE r;
203 int i;
205 if (!fpa_consts_inited)
206 init_fpa_table ();
208 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
209 r = REAL_VALUE_NEGATE (r);
210 if (REAL_VALUE_MINUS_ZERO (r))
211 return 0;
213 for (i = 0; i < 8; i++)
214 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
215 return 1;
217 return 0;
220 /* Predicates for `match_operand' and `match_operator'. */
222 /* s_register_operand is the same as register_operand, but it doesn't accept
223 (SUBREG (MEM)...). */
226 s_register_operand (op, mode)
227 register rtx op;
228 enum machine_mode mode;
230 if (GET_MODE (op) != mode && mode != VOIDmode)
231 return 0;
233 if (GET_CODE (op) == SUBREG)
234 op = SUBREG_REG (op);
236 /* We don't consider registers whose class is NO_REGS
237 to be a register operand. */
238 return (GET_CODE (op) == REG
239 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
240 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
243 /* Return 1 if OP is an item in memory, given that we are in reload. */
246 reload_memory_operand (op, mode)
247 rtx op;
248 enum machine_mode mode;
250 int regno = true_regnum (op);
252 return (! CONSTANT_P (op)
253 && (regno == -1
254 || (GET_CODE (op) == REG
255 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
258 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
261 arm_rhs_operand (op, mode)
262 rtx op;
263 enum machine_mode mode;
265 return (s_register_operand (op, mode)
266 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
269 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
273 arm_rhsm_operand (op, mode)
274 rtx op;
275 enum machine_mode mode;
277 return (s_register_operand (op, mode)
278 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
279 || memory_operand (op, mode));
282 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
283 constant that is valid when negated. */
286 arm_add_operand (op, mode)
287 rtx op;
288 enum machine_mode mode;
290 return (s_register_operand (op, mode)
291 || (GET_CODE (op) == CONST_INT
292 && (const_ok_for_arm (INTVAL (op))
293 || const_ok_for_arm (-INTVAL (op)))));
297 arm_not_operand (op, mode)
298 rtx op;
299 enum machine_mode mode;
301 return (s_register_operand (op, mode)
302 || (GET_CODE (op) == CONST_INT
303 && (const_ok_for_arm (INTVAL (op))
304 || const_ok_for_arm (~INTVAL (op)))));
307 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
310 fpu_rhs_operand (op, mode)
311 rtx op;
312 enum machine_mode mode;
314 if (s_register_operand (op, mode))
315 return TRUE;
316 else if (GET_CODE (op) == CONST_DOUBLE)
317 return (const_double_rtx_ok_for_fpu (op));
319 return FALSE;
323 fpu_add_operand (op, mode)
324 rtx op;
325 enum machine_mode mode;
327 if (s_register_operand (op, mode))
328 return TRUE;
329 else if (GET_CODE (op) == CONST_DOUBLE)
330 return (const_double_rtx_ok_for_fpu (op)
331 || neg_const_double_rtx_ok_for_fpu (op));
333 return FALSE;
336 /* Return nonzero if OP is a constant power of two. */
339 power_of_two_operand (op, mode)
340 rtx op;
341 enum machine_mode mode;
343 if (GET_CODE (op) == CONST_INT)
345 HOST_WIDE_INT value = INTVAL(op);
346 return value != 0 && (value & (value - 1)) == 0;
348 return FALSE;
351 /* Return TRUE for a valid operand of a DImode operation.
352 Either: REG, CONST_DOUBLE or MEM(DImode_address).
353 Note that this disallows MEM(REG+REG), but allows
354 MEM(PRE/POST_INC/DEC(REG)). */
357 di_operand (op, mode)
358 rtx op;
359 enum machine_mode mode;
361 if (s_register_operand (op, mode))
362 return TRUE;
364 switch (GET_CODE (op))
366 case CONST_DOUBLE:
367 case CONST_INT:
368 return TRUE;
370 case MEM:
371 return memory_address_p (DImode, XEXP (op, 0));
373 default:
374 return FALSE;
378 /* Return TRUE for valid index operands. */
381 index_operand (op, mode)
382 rtx op;
383 enum machine_mode mode;
385 return (s_register_operand(op, mode)
386 || (immediate_operand (op, mode)
387 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
390 /* Return TRUE for valid shifts by a constant. This also accepts any
391 power of two on the (somewhat overly relaxed) assumption that the
392 shift operator in this case was a mult. */
395 const_shift_operand (op, mode)
396 rtx op;
397 enum machine_mode mode;
399 return (power_of_two_operand (op, mode)
400 || (immediate_operand (op, mode)
401 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
404 /* Return TRUE for arithmetic operators which can be combined with a multiply
405 (shift). */
408 shiftable_operator (x, mode)
409 rtx x;
410 enum machine_mode mode;
412 if (GET_MODE (x) != mode)
413 return FALSE;
414 else
416 enum rtx_code code = GET_CODE (x);
418 return (code == PLUS || code == MINUS
419 || code == IOR || code == XOR || code == AND);
423 /* Return TRUE for shift operators. */
426 shift_operator (x, mode)
427 rtx x;
428 enum machine_mode mode;
430 if (GET_MODE (x) != mode)
431 return FALSE;
432 else
434 enum rtx_code code = GET_CODE (x);
436 if (code == MULT)
437 return power_of_two_operand (XEXP (x, 1));
439 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
443 int equality_operator (x, mode)
444 rtx x;
445 enum machine_mode mode;
447 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
450 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
453 minmax_operator (x, mode)
454 rtx x;
455 enum machine_mode mode;
457 enum rtx_code code = GET_CODE (x);
459 if (GET_MODE (x) != mode)
460 return FALSE;
462 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
465 /* return TRUE if x is EQ or NE */
467 /* Return TRUE if this is the condition code register, if we aren't given
468 a mode, accept any class CCmode register */
471 cc_register (x, mode)
472 rtx x;
473 enum machine_mode mode;
475 if (mode == VOIDmode)
477 mode = GET_MODE (x);
478 if (GET_MODE_CLASS (mode) != MODE_CC)
479 return FALSE;
482 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
483 return TRUE;
485 return FALSE;
488 enum rtx_code
489 minmax_code (x)
490 rtx x;
492 enum rtx_code code = GET_CODE (x);
494 if (code == SMAX)
495 return GE;
496 else if (code == SMIN)
497 return LE;
498 else if (code == UMIN)
499 return LEU;
500 else if (code == UMAX)
501 return GEU;
503 abort ();
506 /* Return 1 if memory locations are adjacent */
509 adjacent_mem_locations (a, b)
510 rtx a, b;
512 int val0 = 0, val1 = 0;
513 int reg0, reg1;
515 if ((GET_CODE (XEXP (a, 0)) == REG
516 || (GET_CODE (XEXP (a, 0)) == PLUS
517 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
518 && (GET_CODE (XEXP (b, 0)) == REG
519 || (GET_CODE (XEXP (b, 0)) == PLUS
520 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
522 if (GET_CODE (XEXP (a, 0)) == PLUS)
524 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
525 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
527 else
528 reg0 = REGNO (XEXP (a, 0));
529 if (GET_CODE (XEXP (b, 0)) == PLUS)
531 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
532 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
534 else
535 reg1 = REGNO (XEXP (b, 0));
536 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
538 return 0;
541 /* Return 1 if OP is a load multiple operation. It is known to be
542 parallel and the first section will be tested. */
545 load_multiple_operation (op, mode)
546 rtx op;
547 enum machine_mode mode;
549 HOST_WIDE_INT count = XVECLEN (op, 0);
550 int dest_regno;
551 rtx src_addr;
552 HOST_WIDE_INT i = 1, base = 0;
553 rtx elt;
555 if (count <= 1
556 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
557 return 0;
559 /* Check to see if this might be a write-back */
560 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
562 i++;
563 base = 1;
565 /* Now check it more carefully */
566 if (GET_CODE (SET_DEST (elt)) != REG
567 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
568 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
569 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
570 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
571 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
572 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
573 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
574 != REGNO (SET_DEST (elt)))
575 return 0;
577 count--;
580 /* Perform a quick check so we don't blow up below. */
581 if (count <= i
582 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
583 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
584 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
585 return 0;
587 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
588 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
590 for (; i < count; i++)
592 rtx elt = XVECEXP (op, 0, i);
594 if (GET_CODE (elt) != SET
595 || GET_CODE (SET_DEST (elt)) != REG
596 || GET_MODE (SET_DEST (elt)) != SImode
597 || REGNO (SET_DEST (elt)) != dest_regno + i - base
598 || GET_CODE (SET_SRC (elt)) != MEM
599 || GET_MODE (SET_SRC (elt)) != SImode
600 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
601 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
602 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
603 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
604 return 0;
607 return 1;
610 /* Return 1 if OP is a store multiple operation. It is known to be
611 parallel and the first section will be tested. */
614 store_multiple_operation (op, mode)
615 rtx op;
616 enum machine_mode mode;
618 HOST_WIDE_INT count = XVECLEN (op, 0);
619 int src_regno;
620 rtx dest_addr;
621 HOST_WIDE_INT i = 1, base = 0;
622 rtx elt;
624 if (count <= 1
625 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
626 return 0;
628 /* Check to see if this might be a write-back */
629 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
631 i++;
632 base = 1;
634 /* Now check it more carefully */
635 if (GET_CODE (SET_DEST (elt)) != REG
636 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
637 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
638 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
639 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
640 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
641 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
642 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
643 != REGNO (SET_DEST (elt)))
644 return 0;
646 count--;
649 /* Perform a quick check so we don't blow up below. */
650 if (count <= i
651 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
652 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
653 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
654 return 0;
656 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
657 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
659 for (; i < count; i++)
661 elt = XVECEXP (op, 0, i);
663 if (GET_CODE (elt) != SET
664 || GET_CODE (SET_SRC (elt)) != REG
665 || GET_MODE (SET_SRC (elt)) != SImode
666 || REGNO (SET_SRC (elt)) != src_regno + i - base
667 || GET_CODE (SET_DEST (elt)) != MEM
668 || GET_MODE (SET_DEST (elt)) != SImode
669 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
670 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
671 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
672 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
673 return 0;
676 return 1;
679 /* Routines for use with attributes */
682 const_pool_offset (symbol)
683 rtx (symbol);
685 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
688 /* Routines for use in generating RTL */
691 arm_gen_load_multiple (base_regno, count, from, up, write_back)
692 int base_regno;
693 int count;
694 rtx from;
695 int up;
696 int write_back;
698 int i = 0, j;
699 rtx result;
700 int sign = up ? 1 : -1;
702 result = gen_rtx (PARALLEL, VOIDmode,
703 rtvec_alloc (count + (write_back ? 2 : 0)));
704 if (write_back)
706 XVECEXP (result, 0, 0)
707 = gen_rtx (SET, GET_MODE (from), from,
708 plus_constant (from, count * 4 * sign));
709 i = 1;
710 count++;
713 for (j = 0; i < count; i++, j++)
715 XVECEXP (result, 0, i)
716 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
717 gen_rtx (MEM, SImode,
718 plus_constant (from, j * 4 * sign)));
721 if (write_back)
722 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
724 return result;
728 arm_gen_store_multiple (base_regno, count, to, up, write_back)
729 int base_regno;
730 int count;
731 rtx to;
732 int up;
733 int write_back;
735 int i = 0, j;
736 rtx result;
737 int sign = up ? 1 : -1;
739 result = gen_rtx (PARALLEL, VOIDmode,
740 rtvec_alloc (count + (write_back ? 2 : 0)));
741 if (write_back)
743 XVECEXP (result, 0, 0)
744 = gen_rtx (SET, GET_MODE (to), to,
745 plus_constant (to, count * 4 * sign));
746 i = 1;
747 count++;
750 for (j = 0; i < count; i++, j++)
752 XVECEXP (result, 0, i)
753 = gen_rtx (SET, VOIDmode,
754 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
755 gen_rtx (REG, SImode, base_regno + j));
758 if (write_back)
759 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
761 return result;
764 /* X and Y are two things to compare using CODE. Emit the compare insn and
765 return the rtx for register 0 in the proper mode. FP means this is a
766 floating point compare: I don't think that it is needed on the arm. */
769 gen_compare_reg (code, x, y, fp)
770 enum rtx_code code;
771 rtx x, y;
773 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
774 rtx cc_reg = gen_rtx (REG, mode, 24);
776 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
777 gen_rtx (COMPARE, mode, x, y)));
779 return cc_reg;
782 void
783 arm_reload_out_hi (operands)
784 rtx *operands;
786 rtx base = find_replacement (&XEXP (operands[0], 0));
788 emit_insn (gen_rtx (SET, VOIDmode,
789 gen_rtx (MEM, QImode, base),
790 gen_rtx (SUBREG, QImode, operands[1], 0)));
791 emit_insn (gen_rtx (SET, VOIDmode, operands[2],
792 gen_rtx (LSHIFTRT, SImode,
793 gen_rtx (SUBREG, SImode, operands[1], 0),
794 GEN_INT (8))));
795 emit_insn (gen_rtx (SET, VOIDmode,
796 gen_rtx (MEM, QImode,
797 plus_constant (base, 1)),
798 gen_rtx (SUBREG, QImode, operands[2], 0)));
801 /* Check to see if a branch is forwards or backwards. Return TRUE if it
802 is backwards. */
805 arm_backwards_branch (from, to)
806 int from, to;
808 return insn_addresses[to] <= insn_addresses[from];
811 /* Check to see if a branch is within the distance that can be done using
812 an arithmetic expression. */
814 short_branch (from, to)
815 int from, to;
817 int delta = insn_addresses[from] + 8 - insn_addresses[to];
819 return abs (delta) < 980; /* A small margin for safety */
822 /* Check to see that the insn isn't the target of the conditionalizing
823 code */
825 arm_insn_not_targeted (insn)
826 rtx insn;
828 return insn != arm_target_insn;
832 /* Routines to output assembly language. */
834 /* If the rtx is the correct value then return the string of the number.
835 In this way we can ensure that valid double constants are generated even
836 when cross compiling. */
837 char *
838 fp_immediate_constant (x)
839 rtx (x);
841 REAL_VALUE_TYPE r;
842 int i;
844 if (!fpa_consts_inited)
845 init_fpa_table ();
847 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
848 for (i = 0; i < 8; i++)
849 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
850 return strings_fpa[i];
852 abort ();
856 /* Output the operands of a LDM/STM instruction to STREAM.
857 MASK is the ARM register set mask of which only bits 0-15 are important.
858 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
859 must follow the register list. */
861 void
862 print_multi_reg (stream, instr, mask, hat)
863 FILE *stream;
864 char *instr;
865 int mask, hat;
867 int i;
868 int not_first = FALSE;
870 fprintf (stream, "\t%s, {", instr);
871 for (i = 0; i < 16; i++)
872 if (mask & (1 << i))
874 if (not_first)
875 fprintf (stream, ", ");
876 fprintf (stream, "%s", reg_names[i]);
877 not_first = TRUE;
880 fprintf (stream, "}%s\n", hat ? "^" : "");
883 /* Output a 'call' insn. */
885 char *
886 output_call (operands)
887 rtx *operands;
889 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
891 if (REGNO (operands[0]) == 14)
893 operands[0] = gen_rtx (REG, SImode, 12);
894 output_asm_insn ("mov\t%0, lr", operands);
896 output_asm_insn ("mov\tlr, pc", operands);
897 output_asm_insn ("mov\tpc, %0", operands);
898 return "";
901 static int
902 eliminate_lr2ip (x)
903 rtx *x;
905 int something_changed = 0;
906 rtx x0 = *x;
907 int code = GET_CODE (x0);
908 register int i, j;
909 register char *fmt;
911 switch (code)
913 case REG:
914 if (REGNO (x0) == 14)
916 *x = gen_rtx (REG, SImode, 12);
917 return 1;
919 return 0;
920 default:
921 /* Scan through the sub-elements and change any references there */
922 fmt = GET_RTX_FORMAT (code);
923 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
924 if (fmt[i] == 'e')
925 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
926 else if (fmt[i] == 'E')
927 for (j = 0; j < XVECLEN (x0, i); j++)
928 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
929 return something_changed;
933 /* Output a 'call' insn that is a reference in memory. */
935 char *
936 output_call_mem (operands)
937 rtx *operands;
939 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
940 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
942 if (eliminate_lr2ip (&operands[0]))
943 output_asm_insn ("mov\tip, lr", operands);
945 output_asm_insn ("mov\tlr, pc", operands);
946 output_asm_insn ("ldr\tpc, %0", operands);
947 return "";
951 /* Output a move from arm registers to an fpu registers.
952 OPERANDS[0] is an fpu register.
953 OPERANDS[1] is the first registers of an arm register pair. */
955 char *
956 output_mov_long_double_fpu_from_arm (operands)
957 rtx *operands;
959 int arm_reg0 = REGNO (operands[1]);
960 rtx ops[3];
962 if (arm_reg0 == 12)
963 abort();
965 ops[0] = gen_rtx (REG, SImode, arm_reg0);
966 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
967 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
969 output_asm_insn ("stmfd\tsp!, {%0, %1, %2}", ops);
970 output_asm_insn ("ldfe\t%0, [sp], #12", operands);
971 return "";
974 /* Output a move from an fpu register to arm registers.
975 OPERANDS[0] is the first registers of an arm register pair.
976 OPERANDS[1] is an fpu register. */
978 char *
979 output_mov_long_double_arm_from_fpu (operands)
980 rtx *operands;
982 int arm_reg0 = REGNO (operands[0]);
983 rtx ops[3];
985 if (arm_reg0 == 12)
986 abort();
988 ops[0] = gen_rtx (REG, SImode, arm_reg0);
989 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
990 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
992 output_asm_insn ("stfe\t%1, [sp, #-12]!", operands);
993 output_asm_insn ("ldmfd\tsp!, {%0, %1, %2}", ops);
994 return "";
997 /* Output a move from arm registers to arm registers of a long double
998 OPERANDS[0] is the destination.
999 OPERANDS[1] is the source. */
1000 char *
1001 output_mov_long_double_arm_from_arm (operands)
1002 rtx *operands;
1004 /* We have to be careful here because the two might overlap */
1005 int dest_start = REGNO (operands[0]);
1006 int src_start = REGNO (operands[1]);
1007 rtx ops[2];
1008 int i;
1010 if (dest_start < src_start)
1012 for (i = 0; i < 3; i++)
1014 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1015 ops[1] = gen_rtx (REG, SImode, src_start + i);
1016 output_asm_insn ("mov\t%0, %1", ops);
1019 else
1021 for (i = 2; i >= 0; i--)
1023 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1024 ops[1] = gen_rtx (REG, SImode, src_start + i);
1025 output_asm_insn ("mov\t%0, %1", ops);
1029 return "";
1033 /* Output a move from arm registers to an fpu registers.
1034 OPERANDS[0] is an fpu register.
1035 OPERANDS[1] is the first registers of an arm register pair. */
1037 char *
1038 output_mov_double_fpu_from_arm (operands)
1039 rtx *operands;
1041 int arm_reg0 = REGNO (operands[1]);
1042 rtx ops[2];
1044 if (arm_reg0 == 12)
1045 abort();
1046 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1047 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1048 output_asm_insn ("stmfd\tsp!, {%0, %1}", ops);
1049 output_asm_insn ("ldfd\t%0, [sp], #8", operands);
1050 return "";
1053 /* Output a move from an fpu register to arm registers.
1054 OPERANDS[0] is the first registers of an arm register pair.
1055 OPERANDS[1] is an fpu register. */
1057 char *
1058 output_mov_double_arm_from_fpu (operands)
1059 rtx *operands;
1061 int arm_reg0 = REGNO (operands[0]);
1062 rtx ops[2];
1064 if (arm_reg0 == 12)
1065 abort();
1067 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1068 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1069 output_asm_insn ("stfd\t%1, [sp, #-8]!", operands);
1070 output_asm_insn ("ldmfd\tsp!, {%0, %1}", ops);
1071 return "";
1074 /* Output a move between double words.
1075 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1076 or MEM<-REG and all MEMs must be offsettable addresses. */
1078 char *
1079 output_move_double (operands)
1080 rtx *operands;
1082 enum rtx_code code0 = GET_CODE (operands[0]);
1083 enum rtx_code code1 = GET_CODE (operands[1]);
1084 rtx otherops[2];
1086 if (code0 == REG)
1088 int reg0 = REGNO (operands[0]);
1090 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
1091 if (code1 == REG)
1093 int reg1 = REGNO (operands[1]);
1094 if (reg1 == 12)
1095 abort();
1097 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
1099 /* Ensure the second source is not overwritten */
1100 if (reg0 == 1 + reg1)
1102 output_asm_insn("mov\t%0, %1", otherops);
1103 output_asm_insn("mov\t%0, %1", operands);
1105 else
1107 output_asm_insn("mov\t%0, %1", operands);
1108 output_asm_insn("mov\t%0, %1", otherops);
1111 else if (code1 == CONST_DOUBLE)
1113 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
1114 CONST_DOUBLE_HIGH (operands[1]));
1115 operands[1] = gen_rtx (CONST_INT, VOIDmode,
1116 CONST_DOUBLE_LOW (operands[1]));
1117 output_mov_immediate (operands, FALSE, "");
1118 output_mov_immediate (otherops, FALSE, "");
1120 else if (code1 == CONST_INT)
1122 otherops[1] = const0_rtx;
1123 /* sign extend the intval into the high-order word */
1124 /* Note: output_mov_immediate may clobber operands[1], so we
1125 put this out first */
1126 if (INTVAL (operands[1]) < 0)
1127 output_asm_insn ("mvn\t%0, %1", otherops);
1128 else
1129 output_asm_insn ("mov\t%0, %1", otherops);
1130 output_mov_immediate (operands, FALSE, "");
1132 else if (code1 == MEM)
1134 switch (GET_CODE (XEXP (operands[1], 0)))
1136 case REG:
1137 /* Handle the simple case where address is [r, #0] more
1138 efficient. */
1139 operands[1] = XEXP (operands[1], 0);
1140 output_asm_insn ("ldmia\t%1, %M0", operands);
1141 break;
1142 case PRE_INC:
1143 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1144 output_asm_insn ("add\t%1, %1, #8", operands);
1145 output_asm_insn ("ldmia\t%1, %M0", operands);
1146 break;
1147 case PRE_DEC:
1148 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1149 output_asm_insn ("sub\t%1, %1, #8", operands);
1150 output_asm_insn ("ldmia\t%1, %M0", operands);
1151 break;
1152 case POST_INC:
1153 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1154 output_asm_insn ("ldmia\t%1!, %M0", operands);
1155 break;
1156 case POST_DEC:
1157 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1158 output_asm_insn ("ldmia\t%1, %M0", operands);
1159 output_asm_insn ("sub\t%1, %1, #8", operands);
1160 break;
1161 default:
1162 otherops[1] = adj_offsettable_operand (operands[1], 4);
1163 /* Take care of overlapping base/data reg. */
1164 if (reg_mentioned_p (operands[0], operands[1]))
1166 output_asm_insn ("ldr\t%0, %1", otherops);
1167 output_asm_insn ("ldr\t%0, %1", operands);
1169 else
1171 output_asm_insn ("ldr\t%0, %1", operands);
1172 output_asm_insn ("ldr\t%0, %1", otherops);
1176 else abort(); /* Constraints should prevent this */
1178 else if (code0 == MEM && code1 == REG)
1180 if (REGNO (operands[1]) == 12)
1181 abort();
1182 switch (GET_CODE (XEXP (operands[0], 0)))
1184 case REG:
1185 operands[0] = XEXP (operands[0], 0);
1186 output_asm_insn ("stmia\t%0, %M1", operands);
1187 break;
1188 case PRE_INC:
1189 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1190 output_asm_insn ("add\t%0, %0, #8", operands);
1191 output_asm_insn ("stmia\t%0, %M1", operands);
1192 break;
1193 case PRE_DEC:
1194 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1195 output_asm_insn ("sub\t%0, %0, #8", operands);
1196 output_asm_insn ("stmia\t%0, %M1", operands);
1197 break;
1198 case POST_INC:
1199 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1200 output_asm_insn ("stmia\t%0!, %M1", operands);
1201 break;
1202 case POST_DEC:
1203 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1204 output_asm_insn ("stmia\t%0, %M1", operands);
1205 output_asm_insn ("sub\t%0, %0, #8", operands);
1206 break;
1207 default:
1208 otherops[0] = adj_offsettable_operand (operands[0], 4);
1209 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
1210 output_asm_insn ("str\t%1, %0", operands);
1211 output_asm_insn ("str\t%1, %0", otherops);
1214 else abort(); /* Constraints should prevent this */
1216 return("");
1217 } /* output_move_double */
1220 /* Output an arbitrary MOV reg, #n.
1221 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1223 char *
1224 output_mov_immediate (operands)
1225 rtx *operands;
1227 HOST_WIDE_INT n = INTVAL (operands[1]);
1228 int n_ones = 0;
1229 int i;
1231 /* Try to use one MOV */
1232 if (const_ok_for_arm (n))
1234 output_asm_insn ("mov\t%0, %1", operands);
1235 return "";
1238 /* Try to use one MVN */
1239 if (const_ok_for_arm (~n))
1241 operands[1] = GEN_INT (~n);
1242 output_asm_insn ("mvn\t%0, %1", operands);
1243 return "";
1246 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1248 for (i=0; i < 32; i++)
1249 if (n & 1 << i)
1250 n_ones++;
1252 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
1253 output_multi_immediate(operands, "mvn\t%0, %1", "bic\t%0, %0, %1", 1, ~n);
1254 else
1255 output_multi_immediate(operands, "mov\t%0, %1", "orr\t%0, %0, %1", 1, n);
1257 return "";
1261 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1262 adding zero to one register, output nothing. */
1264 char *
1265 output_add_immediate (operands)
1266 rtx *operands;
1268 HOST_WIDE_INT n = INTVAL (operands[2]);
1270 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
1272 if (n < 0)
1273 output_multi_immediate (operands,
1274 "sub\t%0, %1, %2", "sub\t%0, %0, %2", 2, -n);
1275 else
1276 output_multi_immediate (operands,
1277 "add\t%0, %1, %2", "add\t%0, %0, %2", 2, n);
1280 return "";
1284 /* Output a multiple immediate operation.
1285 OPERANDS is the vector of operands referred to in the output patterns.
1286 INSTR1 is the output pattern to use for the first constant.
1287 INSTR2 is the output pattern to use for subsequent constants.
1288 IMMED_OP is the index of the constant slot in OPERANDS.
1289 N is the constant value. */
1291 char *
1292 output_multi_immediate (operands, instr1, instr2, immed_op, n)
1293 rtx *operands;
1294 char *instr1, *instr2;
1295 int immed_op;
1296 HOST_WIDE_INT n;
1298 #if HOST_BITS_PER_WIDE_INT > 32
1299 n &= 0xffffffff;
1300 #endif
1302 if (n == 0)
1304 operands[immed_op] = const0_rtx;
1305 output_asm_insn (instr1, operands); /* Quick and easy output */
1307 else
1309 int i;
1310 char *instr = instr1;
1312 /* Note that n is never zero here (which would give no output) */
1313 for (i = 0; i < 32; i += 2)
1315 if (n & (3 << i))
1317 operands[immed_op] = GEN_INT (n & (255 << i));
1318 output_asm_insn (instr, operands);
1319 instr = instr2;
1320 i += 6;
1324 return "";
1325 } /* output_multi_immediate */
1328 /* Return the appropriate ARM instruction for the operation code.
1329 The returned result should not be overwritten. OP is the rtx of the
1330 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1331 was shifted. */
1333 char *
1334 arithmetic_instr (op, shift_first_arg)
1335 rtx op;
1336 int shift_first_arg;
1338 switch (GET_CODE(op))
1340 case PLUS:
1341 return "add";
1343 case MINUS:
1344 return shift_first_arg ? "rsb" : "sub";
1346 case IOR:
1347 return "orr";
1349 case XOR:
1350 return "eor";
1352 case AND:
1353 return "and";
1355 default:
1356 abort ();
1361 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1362 for the operation code. The returned result should not be overwritten.
1363 OP is the rtx code of the shift.
1364 SHIFT_PTR points to the shift size operand. */
1366 char *
1367 shift_instr (op, shift_ptr)
1368 enum rtx_code op;
1369 rtx *shift_ptr;
1371 int min_shift = 0;
1372 int max_shift = 31;
1373 char *mnem;
1375 switch (op)
1377 case ASHIFT:
1378 mnem = "asl";
1379 break;
1381 case ASHIFTRT:
1382 mnem = "asr";
1383 max_shift = 32;
1384 break;
1386 case LSHIFTRT:
1387 mnem = "lsr";
1388 max_shift = 32;
1389 break;
1391 case MULT:
1392 *shift_ptr = GEN_INT (int_log2 (INTVAL (*shift_ptr)));
1393 return "asl";
1395 default:
1396 abort ();
1399 if (GET_CODE (*shift_ptr) == CONST_INT)
1401 int shift = INTVAL (*shift_ptr);
1403 if (shift < min_shift)
1404 *shift_ptr = gen_rtx (CONST_INT, VOIDmode, 0);
1405 else if (shift > max_shift)
1406 *shift_ptr = gen_rtx (CONST_INT, VOIDmode, max_shift);
1408 return (mnem);
1409 } /* shift_instr */
1412 /* Obtain the shift from the POWER of two. */
1414 HOST_WIDE_INT
1415 int_log2 (power)
1416 HOST_WIDE_INT power;
1418 HOST_WIDE_INT shift = 0;
1420 while (((1 << shift) & power) == 0)
1422 if (shift > 31)
1423 abort ();
1424 shift++;
1427 return shift;
1431 /* Output an arithmetic instruction which may set the condition code.
1432 OPERANDS[0] is the destination register.
1433 OPERANDS[1] is the arithmetic operator expression.
1434 OPERANDS[2] is the left hand argument.
1435 OPERANDS[3] is the right hand argument.
1436 CONST_FIRST_ARG is TRUE if the first argument of the operator was constant.
1437 SET_COND is TRUE when the condition code should be set. */
1439 char *
1440 output_arithmetic (operands, const_first_arg, set_cond)
1441 rtx *operands;
1442 int const_first_arg;
1443 int set_cond;
1445 char mnemonic[80];
1446 char *instr = arithmetic_instr (operands[1], const_first_arg);
1448 sprintf (mnemonic, "%s%s\t%%0, %%2, %%3", instr, set_cond ? "s" : "");
1449 output_asm_insn (mnemonic, operands);
1450 return "";
1454 /* Output an arithmetic instruction with a shift.
1455 OPERANDS[0] is the destination register.
1456 OPERANDS[1] is the arithmetic operator expression.
1457 OPERANDS[2] is the unshifted register.
1458 OPERANDS[3] is the shift operator expression.
1459 OPERANDS[4] is the shifted register.
1460 OPERANDS[5] is the shift constant or register.
1461 SHIFT_FIRST_ARG is TRUE if the first argument of the operator was shifted.
1462 SET_COND is TRUE when the condition code should be set. */
1464 char *
1465 output_arithmetic_with_shift (operands, shift_first_arg, set_cond)
1466 rtx *operands;
1467 int shift_first_arg;
1468 int set_cond;
1470 char mnemonic[80];
1471 char *instr = arithmetic_instr (operands[1], shift_first_arg);
1472 char *condbit = set_cond ? "s" : "";
1473 char *shift = shift_instr (GET_CODE (operands[3]), &operands[5]);
1475 sprintf (mnemonic, "%s%s\t%%0, %%2, %%4, %s %%5", instr, condbit, shift);
1476 output_asm_insn (mnemonic, operands);
1477 return "";
1480 /* Output an arithmetic instruction with a power of two multiplication.
1481 OPERANDS[0] is the destination register.
1482 OPERANDS[1] is the arithmetic operator expression.
1483 OPERANDS[2] is the unmultiplied register.
1484 OPERANDS[3] is the multiplied register.
1485 OPERANDS[4] is the constant multiple (power of two).
1486 SHIFT_FIRST_ARG is TRUE if the first arg of the operator was multiplied. */
1488 char *
1489 output_arithmetic_with_immediate_multiply (operands, shift_first_arg)
1490 rtx *operands;
1491 int shift_first_arg;
1493 char mnemonic[80];
1494 char *instr = arithmetic_instr (operands[1], shift_first_arg);
1495 HOST_WIDE_INT shift = int_log2 (INTVAL (operands[4]));
1497 sprintf (mnemonic, "%s\t%%0, %%2, %%3, asl#%d", instr, (int) shift);
1498 output_asm_insn (mnemonic, operands);
1499 return "";
1503 /* Output a move with a shift.
1504 OP is the shift rtx code.
1505 OPERANDS[0] = destination register.
1506 OPERANDS[1] = source register.
1507 OPERANDS[2] = shift constant or register. */
1509 char *
1510 output_shifted_move (op, operands)
1511 enum rtx_code op;
1512 rtx *operands;
1514 char mnemonic[80];
1516 if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 0)
1517 sprintf (mnemonic, "mov\t%%0, %%1");
1518 else
1519 sprintf (mnemonic, "mov\t%%0, %%1, %s %%2",
1520 shift_instr (op, &operands[2]));
1522 output_asm_insn (mnemonic, operands);
1523 return "";
1526 char *
1527 output_shift_compare (operands, neg)
1528 rtx *operands;
1529 int neg;
1531 char buf[80];
1533 if (neg)
1534 sprintf (buf, "cmn\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands[2]),
1535 &operands[4]));
1536 else
1537 sprintf (buf, "cmp\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands[2]),
1538 &operands[4]));
1539 output_asm_insn (buf, operands);
1540 return "";
1543 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1544 /bin/as is horribly restrictive. */
1546 void
1547 output_ascii_pseudo_op (stream, p, len)
1548 FILE *stream;
1549 unsigned char *p;
1550 int len;
1552 int i;
1553 int len_so_far = 1000;
1554 int chars_so_far = 0;
1556 for (i = 0; i < len; i++)
1558 register int c = p[i];
1560 if (len_so_far > 50)
1562 if (chars_so_far)
1563 fputs ("\"\n", stream);
1564 fputs ("\t.ascii\t\"", stream);
1565 len_so_far = 0;
1566 arm_increase_location (chars_so_far);
1567 chars_so_far = 0;
1570 if (c == '\"' || c == '\\')
1572 putc('\\', stream);
1573 len_so_far++;
1576 if (c >= ' ' && c < 0177)
1578 putc (c, stream);
1579 len_so_far++;
1581 else
1583 fprintf (stream, "\\%03o", c);
1584 len_so_far +=4;
1587 chars_so_far++;
1590 fputs ("\"\n", stream);
1591 arm_increase_location (chars_so_far);
1595 /* Try to determine whether a pattern really clobbers the link register.
1596 This information is useful when peepholing, so that lr need not be pushed
1597 if we combine a call followed by a return.
1598 NOTE: This code does not check for side-effect expressions in a SET_SRC:
1599 such a check should not be needed because these only update an existing
1600 value within a register; the register must still be set elsewhere within
1601 the function. */
1603 static int
1604 pattern_really_clobbers_lr (x)
1605 rtx x;
1607 int i;
1609 switch (GET_CODE (x))
1611 case SET:
1612 switch (GET_CODE (SET_DEST (x)))
1614 case REG:
1615 return REGNO (SET_DEST (x)) == 14;
1617 case SUBREG:
1618 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
1619 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
1621 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
1622 return 0;
1623 abort ();
1625 default:
1626 return 0;
1629 case PARALLEL:
1630 for (i = 0; i < XVECLEN (x, 0); i++)
1631 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
1632 return 1;
1633 return 0;
1635 case CLOBBER:
1636 switch (GET_CODE (XEXP (x, 0)))
1638 case REG:
1639 return REGNO (XEXP (x, 0)) == 14;
1641 case SUBREG:
1642 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
1643 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
1644 abort ();
1646 default:
1647 return 0;
1650 case UNSPEC:
1651 return 1;
1653 default:
1654 return 0;
1658 static int
1659 function_really_clobbers_lr (first)
1660 rtx first;
1662 rtx insn, next;
1664 for (insn = first; insn; insn = next_nonnote_insn (insn))
1666 switch (GET_CODE (insn))
1668 case BARRIER:
1669 case NOTE:
1670 case CODE_LABEL:
1671 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
1672 case INLINE_HEADER:
1673 break;
1675 case INSN:
1676 if (pattern_really_clobbers_lr (PATTERN (insn)))
1677 return 1;
1678 break;
1680 case CALL_INSN:
1681 /* Don't yet know how to handle those calls that are not to a
1682 SYMBOL_REF */
1683 if (GET_CODE (PATTERN (insn)) != PARALLEL)
1684 abort ();
1686 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
1688 case CALL:
1689 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
1690 != SYMBOL_REF)
1691 return 1;
1692 break;
1694 case SET:
1695 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
1696 0, 0)), 0), 0))
1697 != SYMBOL_REF)
1698 return 1;
1699 break;
1701 default: /* Don't recognize it, be safe */
1702 return 1;
1705 /* A call can be made (by peepholing) not to clobber lr iff it is
1706 followed by a return. There may, however, be a use insn iff
1707 we are returning the result of the call.
1708 If we run off the end of the insn chain, then that means the
1709 call was at the end of the function. Unfortunately we don't
1710 have a return insn for the peephole to recognize, so we
1711 must reject this. (Can this be fixed by adding our own insn?) */
1712 if ((next = next_nonnote_insn (insn)) == NULL)
1713 return 1;
1715 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
1716 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1717 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
1718 == REGNO (XEXP (PATTERN (next), 0))))
1719 if ((next = next_nonnote_insn (next)) == NULL)
1720 return 1;
1722 if (GET_CODE (next) == JUMP_INSN
1723 && GET_CODE (PATTERN (next)) == RETURN)
1724 break;
1725 return 1;
1727 default:
1728 abort ();
1732 /* We have reached the end of the chain so lr was _not_ clobbered */
1733 return 0;
1736 char *
1737 output_return_instruction (operand, really_return)
1738 rtx operand;
1739 int really_return;
1741 char instr[100];
1742 int reg, live_regs = 0;
1744 if (current_function_calls_alloca && ! really_return)
1745 abort();
1747 for (reg = 0; reg <= 10; reg++)
1748 if (regs_ever_live[reg] && ! call_used_regs[reg])
1749 live_regs++;
1751 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
1752 live_regs++;
1754 if (frame_pointer_needed)
1755 live_regs += 4;
1757 if (live_regs)
1759 if (lr_save_eliminated || ! regs_ever_live[14])
1760 live_regs++;
1762 if (frame_pointer_needed)
1763 strcpy (instr, "ldm%d0ea\tfp, {");
1764 else
1765 strcpy (instr, "ldm%d0fd\tsp!, {");
1767 for (reg = 0; reg <= 10; reg++)
1768 if (regs_ever_live[reg] && ! call_used_regs[reg])
1770 strcat (instr, reg_names[reg]);
1771 if (--live_regs)
1772 strcat (instr, ", ");
1775 if (frame_pointer_needed)
1777 strcat (instr, reg_names[11]);
1778 strcat (instr, ", ");
1779 strcat (instr, reg_names[13]);
1780 strcat (instr, ", ");
1781 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1783 else
1784 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1785 strcat (instr, (TARGET_6 || !really_return) ? "}" : "}^");
1786 output_asm_insn (instr, &operand);
1788 else if (really_return)
1790 strcpy (instr, TARGET_6 ? "mov%d0\tpc, lr" : "mov%d0s\tpc, lr");
1791 output_asm_insn (instr, &operand);
1794 return_used_this_function = 1;
1795 return "";
1798 /* Return the size of the prologue. It's not too bad if we slightly
1799 over-estimate. */
1801 static int
1802 get_prologue_size ()
1804 int amount = 0;
1805 int regno;
1807 /* Until we know which registers are really used return the maximum. */
1808 if (! reload_completed)
1809 return 24;
1811 /* Look for integer regs that have to be saved. */
1812 for (regno = 0; regno < 15; regno++)
1813 if (regs_ever_live[regno] && ! call_used_regs[regno])
1815 amount = 4;
1816 break;
1819 /* Clobbering lr when none of the other regs have been saved also requires
1820 a save. */
1821 if (regs_ever_live[14])
1822 amount = 4;
1824 /* If we need to push a stack frame then there is an extra instruction to
1825 preserve the current value of the stack pointer. */
1826 if (frame_pointer_needed)
1827 amount = 8;
1829 /* Now look for floating-point regs that need saving. We need an
1830 instruction per register. */
1831 for (regno = 16; regno < 24; regno++)
1832 if (regs_ever_live[regno] && ! call_used_regs[regno])
1833 amount += 4;
1835 if (current_function_anonymous_args && current_function_pretend_args_size)
1836 amount += 4;
1838 return amount;
1841 /* The amount of stack adjustment that happens here, in output_return and in
1842 output_epilogue must be exactly the same as was calculated during reload,
1843 or things will point to the wrong place. The only time we can safely
1844 ignore this constraint is when a function has no arguments on the stack,
1845 no stack frame requirement and no live registers execpt for `lr'. If we
1846 can guarantee that by making all function calls into tail calls and that
1847 lr is not clobbered in any other way, then there is no need to push lr
1848 onto the stack. */
1850 void
1851 output_func_prologue (f, frame_size)
1852 FILE *f;
1853 int frame_size;
1855 int reg, live_regs_mask = 0;
1856 rtx operands[3];
1858 /* Nonzero if we must stuff some register arguments onto the stack as if
1859 they were passed there. */
1860 int store_arg_regs = 0;
1862 if (arm_ccfsm_state || arm_target_insn)
1863 abort (); /* Sanity check */
1865 return_used_this_function = 0;
1866 lr_save_eliminated = 0;
1868 fprintf (f, "\t@ args = %d, pretend = %d, frame = %d\n",
1869 current_function_args_size, current_function_pretend_args_size,
1870 frame_size);
1871 fprintf (f, "\t@ frame_needed = %d, current_function_anonymous_args = %d\n",
1872 frame_pointer_needed, current_function_anonymous_args);
1874 if (current_function_anonymous_args && current_function_pretend_args_size)
1875 store_arg_regs = 1;
1877 for (reg = 0; reg <= 10; reg++)
1878 if (regs_ever_live[reg] && ! call_used_regs[reg])
1879 live_regs_mask |= (1 << reg);
1881 if (frame_pointer_needed)
1883 live_regs_mask |= 0xD800;
1884 fputs ("\tmov\tip, sp\n", f);
1886 else if (regs_ever_live[14])
1888 if (! current_function_args_size
1889 && ! function_really_clobbers_lr (get_insns ()))
1891 fprintf (f,"\t@ I don't think this function clobbers lr\n");
1892 lr_save_eliminated = 1;
1894 else
1895 live_regs_mask |= 0x4000;
1898 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1899 room. If also STORE_ARG_REGS store the argument registers involved in
1900 the created slot (this is for stdarg and varargs). */
1901 if (current_function_pretend_args_size)
1903 if (store_arg_regs)
1905 int arg_size, mask = 0;
1907 assert (current_function_pretend_args_size <= 16);
1908 for (reg = 3, arg_size = current_function_pretend_args_size;
1909 arg_size > 0; reg--, arg_size -= 4)
1910 mask |= (1 << reg);
1911 print_multi_reg (f, "stmfd\tsp!", mask, FALSE);
1913 else
1915 operands[0] = operands[1] = stack_pointer_rtx;
1916 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1917 -current_function_pretend_args_size);
1918 output_add_immediate (operands);
1922 if (live_regs_mask)
1924 /* if a di mode load/store multiple is used, and the base register
1925 is r3, then r4 can become an ever live register without lr
1926 doing so, in this case we need to push lr as well, or we
1927 will fail to get a proper return. */
1929 live_regs_mask |= 0x4000;
1930 lr_save_eliminated = 0;
1932 /* Now push all the call-saved regs onto the stack */
1933 print_multi_reg (f, "stmfd\tsp!", live_regs_mask, FALSE);
1936 for (reg = 23; reg > 15; reg--)
1937 if (regs_ever_live[reg] && !call_used_regs[reg])
1938 fprintf (f, "\tstfe\t%s, [sp, #-12]!\n", reg_names[reg]);
1940 if (frame_pointer_needed)
1942 /* Make `fp' point to saved value of `pc'. */
1944 operands[0] = gen_rtx (REG, SImode, HARD_FRAME_POINTER_REGNUM);
1945 operands[1] = gen_rtx (REG, SImode, 12);
1946 operands[2] = GEN_INT ( - (4 + current_function_pretend_args_size));
1947 output_add_immediate (operands);
1950 if (frame_size)
1952 operands[0] = operands[1] = stack_pointer_rtx;
1953 operands[2] = GEN_INT (-frame_size);
1954 output_add_immediate (operands);
1959 void
1960 output_func_epilogue (f, frame_size)
1961 FILE *f;
1962 int frame_size;
1964 int reg, live_regs_mask = 0, code_size = 0;
1965 /* If we need this then it will always be at lesat this much */
1966 int floats_offset = 24;
1967 rtx operands[3];
1969 if (use_return_insn() && return_used_this_function)
1971 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
1973 abort ();
1975 goto epilogue_done;
1978 for (reg = 0; reg <= 10; reg++)
1979 if (regs_ever_live[reg] && ! call_used_regs[reg])
1981 live_regs_mask |= (1 << reg);
1982 floats_offset += 4;
1985 if (frame_pointer_needed)
1987 for (reg = 23; reg > 15; reg--)
1988 if (regs_ever_live[reg] && ! call_used_regs[reg])
1990 fprintf (f, "\tldfe\t%s, [fp, #-%d]\n", reg_names[reg],
1991 floats_offset);
1992 floats_offset += 12;
1993 code_size += 4;
1996 live_regs_mask |= 0xA800;
1997 print_multi_reg (f, "ldmea\tfp", live_regs_mask,
1998 TARGET_6 ? FALSE : TRUE);
1999 code_size += 4;
2001 else
2003 /* Restore stack pointer if necessary. */
2004 if (frame_size)
2006 operands[0] = operands[1] = stack_pointer_rtx;
2007 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
2008 output_add_immediate (operands);
2011 for (reg = 16; reg < 24; reg++)
2012 if (regs_ever_live[reg] && ! call_used_regs[reg])
2014 fprintf (f, "\tldfe\t%s, [sp], #12\n", reg_names[reg]);
2015 code_size += 4;
2017 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
2019 print_multi_reg (f, "ldmfd\tsp!", live_regs_mask | 0x8000,
2020 TARGET_6 ? FALSE : TRUE);
2021 code_size += 4;
2023 else
2025 if (live_regs_mask || regs_ever_live[14])
2027 live_regs_mask |= 0x4000;
2028 print_multi_reg (f, "ldmfd\tsp!", live_regs_mask, FALSE);
2029 code_size += 4;
2031 if (current_function_pretend_args_size)
2033 operands[0] = operands[1] = stack_pointer_rtx;
2034 operands[2] = gen_rtx (CONST_INT, VOIDmode,
2035 current_function_pretend_args_size);
2036 output_add_immediate (operands);
2038 fputs (TARGET_6 ? "\tmov\tpc, lr\n" : "\tmovs\tpc, lr\n", f);
2039 code_size += 4;
2043 epilogue_done:
2045 /* insn_addresses isn't allocated when not optimizing */
2047 if (optimize > 0)
2048 arm_increase_location (code_size
2049 + insn_addresses[INSN_UID (get_last_insn ())]
2050 + get_prologue_size ());
2052 current_function_anonymous_args = 0;
2055 /* Increase the `arm_text_location' by AMOUNT if we're in the text
2056 segment. */
2058 void
2059 arm_increase_location (amount)
2060 int amount;
2062 if (in_text_section ())
2063 arm_text_location += amount;
2067 /* Output a label definition. If this label is within the .text segment, it
2068 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
2069 Maybe GCC remembers names not starting with a `*' for a long time, but this
2070 is a minority anyway, so we just make a copy. Do not store the leading `*'
2071 if the name starts with one. */
2073 void
2074 arm_asm_output_label (stream, name)
2075 FILE *stream;
2076 char *name;
2078 char *real_name, *s;
2079 struct label_offset *cur;
2080 int hash = 0;
2082 assemble_name (stream, name);
2083 fputs (":\n", stream);
2084 if (! in_text_section ())
2085 return;
2087 if (name[0] == '*')
2089 real_name = xmalloc (1 + strlen (&name[1]));
2090 strcpy (real_name, &name[1]);
2092 else
2094 real_name = xmalloc (2 + strlen (name));
2095 strcpy (real_name, "_");
2096 strcat (real_name, name);
2098 for (s = real_name; *s; s++)
2099 hash += *s;
2101 hash = hash % LABEL_HASH_SIZE;
2102 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
2103 cur->name = real_name;
2104 cur->offset = arm_text_location;
2105 cur->cdr = offset_table[hash];
2106 offset_table[hash] = cur;
2109 /* Load a symbol that is known to be in the text segment into a register.
2110 This should never be called when not optimizing. */
2112 char *
2113 output_load_symbol (insn, operands)
2114 rtx insn;
2115 rtx *operands;
2117 char *s;
2118 char *name = XSTR (operands[1], 0);
2119 struct label_offset *he;
2120 int hash = 0;
2121 int offset;
2122 unsigned int mask, never_mask = 0xffffffff;
2123 int shift, inst;
2124 char buffer[100];
2126 if (optimize == 0 || *name != '*')
2127 abort ();
2129 for (s = &name[1]; *s; s++)
2130 hash += *s;
2132 hash = hash % LABEL_HASH_SIZE;
2133 he = offset_table[hash];
2134 while (he && strcmp (he->name, &name[1]))
2135 he = he->cdr;
2137 if (!he)
2138 abort ();
2140 offset = (arm_text_location + insn_addresses[INSN_UID (insn)]
2141 + get_prologue_size () + 8 - he->offset);
2142 if (offset < 0)
2143 abort ();
2145 /* When generating the instructions, we never mask out the bits that we
2146 think will be always zero, then if a mistake has occureed somewhere, the
2147 assembler will spot it and generate an error. */
2149 /* If the symbol is word aligned then we might be able to reduce the
2150 number of loads. */
2151 shift = ((offset & 3) == 0) ? 2 : 0;
2153 /* Clear the bits from NEVER_MASK that will be orred in with the individual
2154 instructions. */
2155 for (; shift < 32; shift += 8)
2157 mask = 0xff << shift;
2158 if ((offset & mask) || ((unsigned) offset) > mask)
2159 never_mask &= ~mask;
2162 inst = 8;
2163 mask = 0xff << (shift - 32);
2165 while (mask && (never_mask & mask) == 0)
2167 if (inst == 8)
2169 strcpy (buffer, "sub\t%0, pc, #(8 + . -%a1)");
2170 if ((never_mask | mask) != 0xffffffff)
2171 sprintf (buffer + strlen (buffer), " & 0x%x", mask | never_mask);
2173 else
2174 sprintf (buffer, "sub\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
2175 inst, mask | never_mask);
2177 output_asm_insn (buffer, operands);
2178 mask <<= 8;
2179 inst -= 4;
2182 return "";
2185 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2186 directive hence this hack, which works by reserving some `.space' in the
2187 bss segment directly.
2189 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2190 define STATIC COMMON space but merely STATIC BSS space. */
2192 void
2193 output_lcomm_directive (stream, name, size, rounded)
2194 FILE *stream;
2195 char *name;
2196 int size, rounded;
2198 fputs ("\n\t.bss\t@ .lcomm\n", stream);
2199 assemble_name (stream, name);
2200 fprintf (stream, ":\t.space\t%d\n", rounded);
2201 if (in_text_section ())
2202 fputs ("\n\t.text\n", stream);
2203 else
2204 fputs ("\n\t.data\n", stream);
2207 /* A finite state machine takes care of noticing whether or not instructions
2208 can be conditionally executed, and thus decrease execution time and code
2209 size by deleting branch instructions. The fsm is controlled by
2210 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2212 /* The state of the fsm controlling condition codes are:
2213 0: normal, do nothing special
2214 1: make ASM_OUTPUT_OPCODE not output this instruction
2215 2: make ASM_OUTPUT_OPCODE not output this instruction
2216 3: make instructions conditional
2217 4: make instructions conditional
2219 State transitions (state->state by whom under condition):
2220 0 -> 1 final_prescan_insn if the `target' is a label
2221 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2222 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2223 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2224 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2225 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2226 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2227 (the target insn is arm_target_insn).
2229 If the jump clobbers the conditions then we use states 2 and 4.
2231 A similar thing can be done with conditional return insns.
2233 XXX In case the `target' is an unconditional branch, this conditionalising
2234 of the instructions always reduces code size, but not always execution
2235 time. But then, I want to reduce the code size to somewhere near what
2236 /bin/cc produces. */
2238 /* The condition codes of the ARM, and the inverse function. */
2239 char *arm_condition_codes[] =
2241 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
2242 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
2245 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
2247 /* Returns the index of the ARM condition code string in
2248 `arm_condition_codes'. COMPARISON should be an rtx like
2249 `(eq (...) (...))'. */
2252 get_arm_condition_code (comparison)
2253 rtx comparison;
2255 switch (GET_CODE (comparison))
2257 case NE: return (1);
2258 case EQ: return (0);
2259 case GE: return (10);
2260 case GT: return (12);
2261 case LE: return (13);
2262 case LT: return (11);
2263 case GEU: return (2);
2264 case GTU: return (8);
2265 case LEU: return (9);
2266 case LTU: return (3);
2267 default: abort ();
2269 /*NOTREACHED*/
2270 return (42);
2274 void
2275 final_prescan_insn (insn, opvec, noperands)
2276 rtx insn;
2277 rtx *opvec;
2278 int noperands;
2280 /* BODY will hold the body of INSN. */
2281 register rtx body = PATTERN (insn);
2283 /* This will be 1 if trying to repeat the trick, and things need to be
2284 reversed if it appears to fail. */
2285 int reverse = 0;
2287 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2288 taken are clobbered, even if the rtl suggests otherwise. It also
2289 means that we have to grub around within the jump expression to find
2290 out what the conditions are when the jump isn't taken. */
2291 int jump_clobbers = 0;
2293 /* If we start with a return insn, we only succeed if we find another one. */
2294 int seeking_return = 0;
2296 /* START_INSN will hold the insn from where we start looking. This is the
2297 first insn after the following code_label if REVERSE is true. */
2298 rtx start_insn = insn;
2300 /* If in state 4, check if the target branch is reached, in order to
2301 change back to state 0. */
2302 if (arm_ccfsm_state == 4)
2304 if (insn == arm_target_insn)
2306 arm_target_insn = NULL;
2307 arm_ccfsm_state = 0;
2309 return;
2312 /* If in state 3, it is possible to repeat the trick, if this insn is an
2313 unconditional branch to a label, and immediately following this branch
2314 is the previous target label which is only used once, and the label this
2315 branch jumps to is not too far off. */
2316 if (arm_ccfsm_state == 3)
2318 if (simplejump_p (insn))
2320 start_insn = next_nonnote_insn (start_insn);
2321 if (GET_CODE (start_insn) == BARRIER)
2323 /* XXX Isn't this always a barrier? */
2324 start_insn = next_nonnote_insn (start_insn);
2326 if (GET_CODE (start_insn) == CODE_LABEL
2327 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2328 && LABEL_NUSES (start_insn) == 1)
2329 reverse = TRUE;
2330 else
2331 return;
2333 else if (GET_CODE (body) == RETURN)
2335 start_insn = next_nonnote_insn (start_insn);
2336 if (GET_CODE (start_insn) == BARRIER)
2337 start_insn = next_nonnote_insn (start_insn);
2338 if (GET_CODE (start_insn) == CODE_LABEL
2339 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2340 && LABEL_NUSES (start_insn) == 1)
2342 reverse = TRUE;
2343 seeking_return = 1;
2345 else
2346 return;
2348 else
2349 return;
2352 if (arm_ccfsm_state != 0 && !reverse)
2353 abort ();
2354 if (GET_CODE (insn) != JUMP_INSN)
2355 return;
2357 /* This jump might be paralled with a clobber of the condition codes
2358 the jump should always come first */
2359 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2360 body = XVECEXP (body, 0, 0);
2362 #if 0
2363 /* If this is a conditional return then we don't want to know */
2364 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2365 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2366 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
2367 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
2368 return;
2369 #endif
2371 if (reverse
2372 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2373 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2375 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2376 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2377 int then_not_else = TRUE;
2378 rtx this_insn = start_insn, label = 0;
2380 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
2381 jump_clobbers = 1;
2383 /* Register the insn jumped to. */
2384 if (reverse)
2386 if (!seeking_return)
2387 label = XEXP (SET_SRC (body), 0);
2389 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2390 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2391 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2393 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2394 then_not_else = FALSE;
2396 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2397 seeking_return = 1;
2398 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2400 seeking_return = 1;
2401 then_not_else = FALSE;
2403 else
2404 abort ();
2406 /* See how many insns this branch skips, and what kind of insns. If all
2407 insns are okay, and the label or unconditional branch to the same
2408 label is not too far away, succeed. */
2409 for (insns_skipped = 0;
2410 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2411 insns_skipped++)
2413 rtx scanbody;
2415 this_insn = next_nonnote_insn (this_insn);
2416 if (!this_insn)
2417 break;
2419 scanbody = PATTERN (this_insn);
2421 switch (GET_CODE (this_insn))
2423 case CODE_LABEL:
2424 /* Succeed if it is the target label, otherwise fail since
2425 control falls in from somewhere else. */
2426 if (this_insn == label)
2428 if (jump_clobbers)
2430 arm_ccfsm_state = 2;
2431 this_insn = next_nonnote_insn (this_insn);
2433 else
2434 arm_ccfsm_state = 1;
2435 succeed = TRUE;
2437 else
2438 fail = TRUE;
2439 break;
2441 case BARRIER:
2442 /* Succeed if the following insn is the target label.
2443 Otherwise fail.
2444 If return insns are used then the last insn in a function
2445 will be a barrier. */
2446 this_insn = next_nonnote_insn (this_insn);
2447 if (this_insn && this_insn == label)
2449 if (jump_clobbers)
2451 arm_ccfsm_state = 2;
2452 this_insn = next_nonnote_insn (this_insn);
2454 else
2455 arm_ccfsm_state = 1;
2456 succeed = TRUE;
2458 else
2459 fail = TRUE;
2460 break;
2462 case CALL_INSN:
2463 /* The arm 6xx uses full 32 bit addresses so the cc is not
2464 preserved over calls */
2465 if (TARGET_6)
2466 fail = TRUE;
2467 break;
2468 case JUMP_INSN:
2469 /* If this is an unconditional branch to the same label, succeed.
2470 If it is to another label, do nothing. If it is conditional,
2471 fail. */
2472 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2474 if (GET_CODE (scanbody) == SET
2475 && GET_CODE (SET_DEST (scanbody)) == PC)
2477 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2478 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2480 arm_ccfsm_state = 2;
2481 succeed = TRUE;
2483 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2484 fail = TRUE;
2486 else if (GET_CODE (scanbody) == RETURN
2487 && seeking_return)
2489 arm_ccfsm_state = 2;
2490 succeed = TRUE;
2492 else if (GET_CODE (scanbody) == PARALLEL)
2494 switch (get_attr_conds (this_insn))
2496 case CONDS_NOCOND:
2497 break;
2498 default:
2499 fail = TRUE;
2500 break;
2503 break;
2505 case INSN:
2506 /* Instructions using or affecting the condition codes make it
2507 fail. */
2508 if ((GET_CODE (scanbody) == SET
2509 || GET_CODE (scanbody) == PARALLEL)
2510 && get_attr_conds (this_insn) != CONDS_NOCOND)
2511 fail = TRUE;
2512 break;
2514 default:
2515 break;
2518 if (succeed)
2520 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
2521 arm_target_label = CODE_LABEL_NUMBER (label);
2522 else if (seeking_return || arm_ccfsm_state == 2)
2524 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2526 this_insn = next_nonnote_insn (this_insn);
2527 if (this_insn && (GET_CODE (this_insn) == BARRIER
2528 || GET_CODE (this_insn) == CODE_LABEL))
2529 abort ();
2531 if (!this_insn)
2533 /* Oh, dear! we ran off the end.. give up */
2534 recog (PATTERN (insn), insn, NULL_PTR);
2535 arm_ccfsm_state = 0;
2536 arm_target_insn = NULL;
2537 return;
2539 arm_target_insn = this_insn;
2541 else
2542 abort ();
2543 if (jump_clobbers)
2545 if (reverse)
2546 abort ();
2547 arm_current_cc =
2548 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
2549 0), 0), 1));
2550 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
2551 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2552 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
2553 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2555 else
2557 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2558 what it was. */
2559 if (!reverse)
2560 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
2561 0));
2564 if (reverse || then_not_else)
2565 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2567 /* restore recog_operand (getting the attributes of other insns can
2568 destroy this array, but final.c assumes that it remains intact
2569 accross this call; since the insn has been recognized already we
2570 call recog direct). */
2571 recog (PATTERN (insn), insn, NULL_PTR);
2575 /* EOF */