Major rewrite -- See ChangeLog for details
[official-gcc.git] / gcc / config / arm / arm.c
blobfa10ce23201e3baaf4a194e9dee23a628ee7444f
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
23 #include <stdio.h>
24 #include "assert.h"
25 #include "config.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
37 /* The maximum number of insns skipped which will be conditionalised if
38 possible. */
39 #define MAX_INSNS_SKIPPED 5
41 /* Some function declarations. */
42 extern FILE *asm_out_file;
43 extern char *output_multi_immediate ();
44 extern char *arm_output_asm_insn ();
45 extern void arm_increase_location ();
47 /* Define the information needed to generate branch insns. This is
48 stored from the compare operation. */
50 rtx arm_compare_op0, arm_compare_op1;
51 int arm_compare_fp;
53 /* What type of cpu are we compiling for? */
55 enum processor_type arm_cpu;
57 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
58 must report the mode of the memory reference from PRINT_OPERAND to
59 PRINT_OPERAND_ADDRESS. */
60 int output_memory_reference_mode;
62 /* Nonzero if the prologue must setup `fp'. */
63 int current_function_anonymous_args;
65 /* Location counter of .text segment. */
66 int arm_text_location = 0;
68 /* Set to one if we think that lr is only saved because of subroutine calls,
69 but all of these can be `put after' return insns */
70 int lr_save_eliminated;
72 /* A hash table is used to store text segment labels and their associated
73 offset from the start of the text segment. */
74 struct label_offset
76 char *name;
77 int offset;
78 struct label_offset *cdr;
81 #define LABEL_HASH_SIZE 257
83 static struct label_offset *offset_table[LABEL_HASH_SIZE];
85 /* Set to 1 when a return insn is output, this means that the epilogue
86 is not needed. */
88 static int return_used_this_function;
90 /* For an explanation of these variables, see final_prescan_insn below. */
91 int arm_ccfsm_state;
92 int arm_current_cc;
93 rtx arm_target_insn;
94 int arm_target_label;
96 /* Return 1 if it is possible to return using a single instruction */
98 int
99 use_return_insn ()
101 int regno;
103 if (!reload_completed ||current_function_pretend_args_size
104 || current_function_anonymous_args
105 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
106 return 0;
108 /* Can't be done if any of the FPU regs are pushed, since this also
109 requires an insn */
110 for (regno = 20; regno < 24; regno++)
111 if (regs_ever_live[regno])
112 return 0;
114 return 1;
117 /* Return the number of mov instructions needed to get the constant VALUE into
118 a register. */
121 arm_const_nmoves (value)
122 register int value;
124 register int i;
126 if (value == 0)
127 return (1);
128 for (i = 0; value; i++, value &= ~0xff)
129 while ((value & 3) == 0)
130 value = (value >> 2) | ((value & 3) << 30);
131 return (i);
132 } /* arm_const_nmoves */
135 /* Return TRUE if int I is a valid immediate ARM constant. */
138 const_ok_for_arm (i)
139 HOST_WIDE_INT i;
141 unsigned HOST_WIDE_INT mask = ~0xFF;
145 if ((i & mask & 0xffffffffu) == 0)
146 return(TRUE);
147 mask = (mask << 2) | ((mask & 0xffffffffu) >> (32 - 2)) | ~0xffffffffu;
148 } while (mask != ~0xFF);
150 return (FALSE);
151 } /* const_ok_for_arm */
153 /* This code has been fixed for cross compilation. */
155 static int fpa_consts_inited = 0;
157 char *strings_fpa[8] = {
158 "0.0",
159 "1.0",
160 "2.0",
161 "3.0",
162 "4.0",
163 "5.0",
164 "0.5",
165 "10.0"
168 static REAL_VALUE_TYPE values_fpa[8];
170 static void
171 init_fpa_table ()
173 int i;
174 REAL_VALUE_TYPE r;
176 for (i = 0; i < 8; i++)
178 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
179 values_fpa[i] = r;
181 fpa_consts_inited = 1;
184 /* Return TRUE if rtx X is a valid immediate FPU constant. */
187 const_double_rtx_ok_for_fpu (x)
188 rtx x;
190 REAL_VALUE_TYPE r;
191 int i;
193 if (!fpa_consts_inited)
194 init_fpa_table ();
196 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
197 if (REAL_VALUE_MINUS_ZERO (r))
198 return 0;
199 for (i = 0; i < 8; i++)
200 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
201 return 1;
202 return 0;
203 } /* const_double_rtx_ok_for_fpu */
205 /* Return TRUE if rtx X is a valid immediate FPU constant. */
208 neg_const_double_rtx_ok_for_fpu (x)
209 rtx x;
211 REAL_VALUE_TYPE r;
212 int i;
214 if (!fpa_consts_inited)
215 init_fpa_table ();
217 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
218 r = REAL_VALUE_NEGATE (r);
219 if (REAL_VALUE_MINUS_ZERO (r))
220 return 0;
221 for (i = 0; i < 8; i++)
222 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
223 return 1;
224 return 0;
225 } /* neg_const_double_rtx_ok_for_fpu */
227 /* Predicates for `match_operand' and `match_operator'. */
229 /* s_register_operand is the same as register_operand, but it doesn't accept
230 (SUBREG (MEM)...). */
233 s_register_operand (op, mode)
234 register rtx op;
235 enum machine_mode mode;
237 if (GET_MODE (op) != mode && mode != VOIDmode)
238 return 0;
240 if (GET_CODE (op) == SUBREG)
242 op = SUBREG_REG (op);
245 /* We don't consider registers whose class is NO_REGS
246 to be a register operand. */
247 return (GET_CODE (op) == REG
248 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
249 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
252 /* Return 1 if OP is an item in memory, given that we are in reload. */
255 reload_memory_operand (op, mode)
256 rtx op;
257 enum machine_mode mode;
259 int regno = true_regnum (op);
261 return (! CONSTANT_P (op)
262 && (regno == -1
263 || (GET_CODE (op) == REG
264 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
267 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
270 arm_rhs_operand (op, mode)
271 rtx op;
272 enum machine_mode mode;
274 return (s_register_operand (op, mode)
275 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
276 } /* arm_rhs_operand */
278 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
282 arm_rhsm_operand (op, mode)
283 rtx op;
284 enum machine_mode mode;
286 return (s_register_operand (op, mode)
287 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
288 || memory_operand (op, mode));
289 } /* arm_rhs_operand */
291 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
292 constant that is valid when negated. */
295 arm_add_operand (op, mode)
296 rtx op;
297 enum machine_mode mode;
299 return (s_register_operand (op, mode)
300 || (GET_CODE (op) == CONST_INT
301 && (const_ok_for_arm (INTVAL (op))
302 || const_ok_for_arm (-INTVAL (op)))));
303 } /* arm_rhs_operand */
306 arm_not_operand (op, mode)
307 rtx op;
308 enum machine_mode mode;
310 return (s_register_operand (op, mode)
311 || (GET_CODE (op) == CONST_INT
312 && (const_ok_for_arm (INTVAL (op))
313 || const_ok_for_arm (~INTVAL (op)))));
314 } /* arm_rhs_operand */
316 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
319 fpu_rhs_operand (op, mode)
320 rtx op;
321 enum machine_mode mode;
323 if (s_register_operand (op, mode))
324 return(TRUE);
325 else if (GET_CODE (op) == CONST_DOUBLE)
326 return (const_double_rtx_ok_for_fpu (op));
327 else return (FALSE);
328 } /* fpu_rhs_operand */
331 fpu_add_operand (op, mode)
332 rtx op;
333 enum machine_mode mode;
335 if (s_register_operand (op, mode))
336 return(TRUE);
337 else if (GET_CODE (op) == CONST_DOUBLE)
338 return const_double_rtx_ok_for_fpu (op)
339 || neg_const_double_rtx_ok_for_fpu (op);
340 return (FALSE);
343 /* Return nonzero if OP is a constant power of two. */
346 power_of_two_operand (op, mode)
347 rtx op;
348 enum machine_mode mode;
350 if (GET_CODE (op) == CONST_INT)
352 int value = INTVAL(op);
353 return (value != 0 && (value & (value-1)) == 0);
355 return (FALSE);
356 } /* power_of_two_operand */
358 /* Return TRUE for a valid operand of a DImode operation.
359 Either: REG, CONST_DOUBLE or MEM(DImode_address).
360 Note that this disallows MEM(REG+REG), but allows
361 MEM(PRE/POST_INC/DEC(REG)). */
364 di_operand (op, mode)
365 rtx op;
366 enum machine_mode mode;
368 if (s_register_operand (op, mode))
369 return (TRUE);
371 switch (GET_CODE (op))
373 case CONST_DOUBLE:
374 case CONST_INT:
375 return (TRUE);
376 case MEM:
377 return (memory_address_p (DImode, XEXP (op, 0)));
378 default:
379 return (FALSE);
381 } /* di_operand */
383 /* Return TRUE for valid index operands. */
386 index_operand (op, mode)
387 rtx op;
388 enum machine_mode mode;
390 return (s_register_operand(op, mode)
391 || (immediate_operand (op, mode)
392 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
393 } /* index_operand */
395 /* Return TRUE for valid shifts by a constant. This also accepts any
396 power of two on the (somewhat overly relaxed) assumption that the
397 shift operator in this case was a mult. */
400 const_shift_operand (op, mode)
401 rtx op;
402 enum machine_mode mode;
404 return (power_of_two_operand (op, mode)
405 || (immediate_operand (op, mode)
406 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
407 } /* const_shift_operand */
409 /* Return TRUE for arithmetic operators which can be combined with a multiply
410 (shift). */
413 shiftable_operator (x, mode)
414 rtx x;
415 enum machine_mode mode;
417 if (GET_MODE (x) != mode)
418 return FALSE;
419 else
421 enum rtx_code code = GET_CODE (x);
423 return (code == PLUS || code == MINUS
424 || code == IOR || code == XOR || code == AND);
426 } /* shiftable_operator */
428 /* Return TRUE for shift operators. */
431 shift_operator (x, mode)
432 rtx x;
433 enum machine_mode mode;
435 if (GET_MODE (x) != mode)
436 return FALSE;
437 else
439 enum rtx_code code = GET_CODE (x);
441 if (code == MULT)
442 return power_of_two_operand (XEXP (x, 1));
443 return (code == ASHIFT || code == LSHIFT
444 || code == ASHIFTRT || code == LSHIFTRT);
446 } /* shift_operator */
448 int equality_operator (x, mode)
449 rtx x;
450 enum machine_mode mode;
452 return (GET_CODE (x) == EQ || GET_CODE (x) == NE);
455 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
458 minmax_operator (x, mode)
459 rtx x;
460 enum machine_mode mode;
462 enum rtx_code code = GET_CODE (x);
464 if (GET_MODE (x) != mode)
465 return FALSE;
466 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
467 } /* minmax_operator */
469 /* return TRUE if x is EQ or NE */
471 /* Return TRUE if this is the condition code register, if we aren't given
472 a mode, accept any class CCmode register */
475 cc_register (x, mode)
476 rtx x;
477 enum machine_mode mode;
479 if (mode == VOIDmode)
481 mode = GET_MODE (x);
482 if (GET_MODE_CLASS (mode) != MODE_CC)
483 return FALSE;
485 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
486 return TRUE;
487 return FALSE;
490 enum rtx_code
491 minmax_code (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
496 if (code == SMAX)
497 return GE;
498 if (code == SMIN)
499 return LE;
500 if (code == UMIN)
501 return LEU;
502 if (code == UMAX)
503 return GEU;
504 abort ();
507 /* Return 1 if memory locations are adjacent */
509 adjacent_mem_locations (a, b)
510 rtx a, b;
512 int val0 = 0, val1 = 0;
513 int reg0, reg1;
515 if ((GET_CODE (XEXP (a, 0)) == REG
516 || (GET_CODE (XEXP (a, 0)) == PLUS
517 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
518 && (GET_CODE (XEXP (b, 0)) == REG
519 || (GET_CODE (XEXP (b, 0)) == PLUS
520 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
522 if (GET_CODE (XEXP (a, 0)) == PLUS)
524 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
525 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
527 else
528 reg0 = REGNO (XEXP (a, 0));
529 if (GET_CODE (XEXP (b, 0)) == PLUS)
531 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
532 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
534 else
535 reg1 = REGNO (XEXP (b, 0));
536 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
538 return 0;
541 /* Return 1 if OP is a load multiple operation. It is known to be
542 parallel and the first section will be tested. */
544 load_multiple_operation (op, mode)
545 rtx op;
546 enum machine_mode mode;
548 int count = XVECLEN (op, 0);
549 int dest_regno;
550 rtx src_addr;
551 int i = 1, base = 0;
552 rtx elt;
554 if (count <= 1
555 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
556 return 0;
558 /* Check to see if this might be a write-back */
559 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
561 i++;
562 base = 1;
564 /* Now check it more carefully */
565 if (GET_CODE (SET_DEST (elt)) != REG
566 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
567 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
568 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
569 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
570 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
571 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
572 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
573 != REGNO (SET_DEST (elt)))
574 return 0;
575 count--;
578 /* Perform a quick check so we don't blow up below. */
579 if (count <= i
580 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
581 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
582 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
583 return 0;
585 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
586 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
588 for (; i < count; i++)
590 rtx elt = XVECEXP (op, 0, i);
592 if (GET_CODE (elt) != SET
593 || GET_CODE (SET_DEST (elt)) != REG
594 || GET_MODE (SET_DEST (elt)) != SImode
595 || REGNO (SET_DEST (elt)) != dest_regno + i - base
596 || GET_CODE (SET_SRC (elt)) != MEM
597 || GET_MODE (SET_SRC (elt)) != SImode
598 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
599 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
600 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
601 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
602 return 0;
605 return 1;
608 /* Return 1 if OP is a store multiple operation. It is known to be
609 parallel and the first section will be tested. */
611 store_multiple_operation (op, mode)
612 rtx op;
613 enum machine_mode mode;
615 int count = XVECLEN (op, 0);
616 int src_regno;
617 rtx dest_addr;
618 int i = 1, base = 0;
619 rtx elt;
621 if (count <= 1
622 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
623 return 0;
625 /* Check to see if this might be a write-back */
626 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
628 i++;
629 base = 1;
631 /* Now check it more carefully */
632 if (GET_CODE (SET_DEST (elt)) != REG
633 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
634 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
635 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
636 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
637 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
638 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
639 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
640 != REGNO (SET_DEST (elt)))
641 return 0;
642 count--;
645 /* Perform a quick check so we don't blow up below. */
646 if (count <= i
647 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
648 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
649 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
650 return 0;
652 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
653 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
655 for (; i < count; i++)
657 elt = XVECEXP (op, 0, i);
659 if (GET_CODE (elt) != SET
660 || GET_CODE (SET_SRC (elt)) != REG
661 || GET_MODE (SET_SRC (elt)) != SImode
662 || REGNO (SET_SRC (elt)) != src_regno + i - base
663 || GET_CODE (SET_DEST (elt)) != MEM
664 || GET_MODE (SET_DEST (elt)) != SImode
665 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
666 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
667 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
668 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
669 return 0;
672 return 1;
675 /* Routines for use in generating RTL */
677 rtx arm_gen_load_multiple (base_regno, count, from, up, write_back)
678 int base_regno;
679 int count;
680 rtx from;
681 int up;
682 int write_back;
684 int i = 0, j;
685 rtx result;
686 int sign = up ? 1 : -1;
688 result = gen_rtx (PARALLEL, VOIDmode,
689 rtvec_alloc (count + (write_back ? 2 : 0)));
690 if (write_back)
692 XVECEXP (result, 0, 0)
693 = gen_rtx (SET, GET_MODE (from), from,
694 plus_constant (from, count * 4 * sign));
695 i = 1;
696 count++;
698 for (j = 0; i < count; i++, j++)
700 XVECEXP (result, 0, i)
701 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
702 gen_rtx (MEM, SImode,
703 plus_constant (from, j * 4 * sign)));
705 if (write_back)
706 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
708 return result;
711 rtx arm_gen_store_multiple (base_regno, count, to, up, write_back)
712 int base_regno;
713 int count;
714 rtx to;
715 int up;
716 int write_back;
718 int i = 0, j;
719 rtx result;
720 int sign = up ? 1 : -1;
722 result = gen_rtx (PARALLEL, VOIDmode,
723 rtvec_alloc (count + (write_back ? 2 : 0)));
724 if (write_back)
726 XVECEXP (result, 0, 0)
727 = gen_rtx (SET, GET_MODE (to), to,
728 plus_constant (to, count * 4 * sign));
729 i = 1;
730 count++;
732 for (j = 0; i < count; i++, j++)
734 XVECEXP (result, 0, i)
735 = gen_rtx (SET, VOIDmode,
736 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
737 gen_rtx (REG, SImode, base_regno + j));
739 if (write_back)
740 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
742 return result;
745 /* X and Y are two things to compare using CODE. Emit the compare insn and
746 return the rtx for register 0 in the proper mode. FP means this is a
747 floating point compare: I don't think that it is needed on the arm. */
750 gen_compare_reg (code, x, y, fp)
751 enum rtx_code code;
752 rtx x, y;
754 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
755 rtx cc_reg = gen_rtx (REG, mode, 24);
757 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
758 gen_rtx (COMPARE, mode, x, y)));
760 return cc_reg;
764 /* Check to see if a branch is forwards or backwards. Return TRUE if it
765 is backwards. */
768 arm_backwards_branch (from, to)
769 int from, to;
771 return (insn_addresses[to] < insn_addresses[from]);
774 /* Check to see if a branch is within the distance that can be done using
775 an arithmetic expression. */
777 short_branch (from, to)
778 int from, to;
780 int delta = insn_addresses[from] + 2 - insn_addresses[to];
782 return abs (delta) < 245; /* A small margin for safety */
785 /* Check to see that the insn isn't the target of the conditionalizing
786 code */
788 arm_insn_not_targeted (insn)
789 rtx insn;
791 return insn != arm_target_insn;
795 /* Routines to output assembly language. */
797 /* fp_immediate_constant
798 if the rtx is the correct value then return the string of the number.
799 In this way we can ensure that valid double constants are generated even
800 when cross compiling. */
801 char *
802 fp_immediate_constant (x)
803 rtx (x);
805 REAL_VALUE_TYPE r;
806 int i;
808 if (!fpa_consts_inited)
809 init_fpa_table ();
811 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
812 for (i = 0; i < 8; i++)
813 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
814 return strings_fpa[i];
815 abort ();
819 /* Output the operands of a LDM/STM instruction to STREAM.
820 MASK is the ARM register set mask of which only bits 0-15 are important.
821 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
822 must follow the register list. */
824 void
825 print_multi_reg (stream, instr, mask, hat)
826 FILE *stream;
827 char *instr;
828 int mask, hat;
830 int i;
831 int not_first = FALSE;
833 fprintf (stream, "\t%s, {", instr);
834 for (i = 0; i < 16; i++)
835 if (mask & (1 << i))
837 if (not_first)
838 fprintf (stream, ", ");
839 fprintf (stream, "%s", reg_names[i]);
840 not_first = TRUE;
842 fprintf (stream, "}%s\n", hat ? "^" : "");
843 } /* print_multi_reg */
845 /* Output a 'call' insn. */
847 char *
848 output_call (operands)
849 rtx operands[];
851 operands[0] = XEXP (operands[0], 0);
853 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
855 if (REGNO (operands[0]) == 14)
857 operands[0] = gen_rtx (REG, SImode, 12);
858 arm_output_asm_insn ("mov\t%0, lr", operands);
860 arm_output_asm_insn ("mov\tlr, pc", operands);
861 arm_output_asm_insn ("mov\tpc, %0", operands);
862 return ("");
863 } /* output_call */
865 static int
866 eliminate_lr2ip (x)
867 rtx *x;
869 int something_changed = 0;
870 rtx x0 = *x;
871 int code = GET_CODE (x0);
872 register int i, j;
873 register char *fmt;
875 switch (code)
877 case REG:
878 if (REGNO (x0) == 14)
880 *x = gen_rtx (REG, SImode, 12);
881 return 1;
883 return 0;
884 default:
885 /* Scan through the sub-elements and change any references there */
886 fmt = GET_RTX_FORMAT (code);
887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
888 if (fmt[i] == 'e')
889 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
890 else if (fmt[i] == 'E')
891 for (j = 0; j < XVECLEN (x0, i); j++)
892 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
893 return something_changed;
897 /* Output a 'call' insn that is a reference in memory. */
899 char *
900 output_call_mem (operands)
901 rtx operands[];
903 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
904 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
906 if (eliminate_lr2ip (&operands[0]))
907 arm_output_asm_insn ("mov\tip, lr", operands);
908 arm_output_asm_insn ("mov\tlr, pc", operands);
909 arm_output_asm_insn ("ldr\tpc, %0", operands);
910 return ("");
911 } /* output_call */
914 /* Output a move from arm registers to an fpu registers.
915 OPERANDS[0] is an fpu register.
916 OPERANDS[1] is the first registers of an arm register pair. */
918 char *
919 output_mov_long_double_fpu_from_arm (operands)
920 rtx operands[];
922 int arm_reg0 = REGNO (operands[1]);
923 rtx ops[3];
925 if (arm_reg0 == 12)
926 abort();
927 ops[0] = gen_rtx (REG, SImode, arm_reg0);
928 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
929 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
931 arm_output_asm_insn ("stmfd\tsp!, {%0, %1, %2}", ops);
932 arm_output_asm_insn ("ldfe\t%0, [sp], #12", operands);
933 return ("");
934 } /* output_mov_long_double_fpu_from_arm */
936 /* Output a move from an fpu register to arm registers.
937 OPERANDS[0] is the first registers of an arm register pair.
938 OPERANDS[1] is an fpu register. */
940 char *
941 output_mov_long_double_arm_from_fpu (operands)
942 rtx operands[];
944 int arm_reg0 = REGNO (operands[0]);
945 rtx ops[3];
947 if (arm_reg0 == 12)
948 abort();
949 ops[0] = gen_rtx (REG, SImode, arm_reg0);
950 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
951 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
953 arm_output_asm_insn ("stfe\t%1, [sp, #-12]!", operands);
954 arm_output_asm_insn ("ldmfd\tsp!, {%0, %1, %2}", ops);
955 return("");
956 } /* output_mov_long_double_arm_from_fpu */
958 /* Output a move from arm registers to arm registers of a long double
959 OPERANDS[0] is the destination.
960 OPERANDS[1] is the source. */
961 char *
962 output_mov_long_double_arm_from_arm (operands)
963 rtx operands[];
965 /* We have to be careful here because the two might overlap */
966 int dest_start = REGNO (operands[0]);
967 int src_start = REGNO (operands[1]);
968 rtx ops[2];
969 int i;
971 if (dest_start < src_start)
973 for (i = 0; i < 3; i++)
975 ops[0] = gen_rtx (REG, SImode, dest_start + i);
976 ops[1] = gen_rtx (REG, SImode, src_start + i);
977 arm_output_asm_insn ("mov\t%0, %1", ops);
980 else
982 for (i = 2; i >= 0; i--)
984 ops[0] = gen_rtx (REG, SImode, dest_start + i);
985 ops[1] = gen_rtx (REG, SImode, src_start + i);
986 arm_output_asm_insn ("mov\t%0, %1", ops);
989 return "";
993 /* Output a move from arm registers to an fpu registers.
994 OPERANDS[0] is an fpu register.
995 OPERANDS[1] is the first registers of an arm register pair. */
997 char *
998 output_mov_double_fpu_from_arm (operands)
999 rtx operands[];
1001 int arm_reg0 = REGNO (operands[1]);
1002 rtx ops[2];
1004 if (arm_reg0 == 12)
1005 abort();
1006 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1007 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1008 arm_output_asm_insn ("stmfd\tsp!, {%0, %1}", ops);
1009 arm_output_asm_insn ("ldfd\t%0, [sp], #8", operands);
1010 return ("");
1011 } /* output_mov_double_fpu_from_arm */
1013 /* Output a move from an fpu register to arm registers.
1014 OPERANDS[0] is the first registers of an arm register pair.
1015 OPERANDS[1] is an fpu register. */
1017 char *
1018 output_mov_double_arm_from_fpu (operands)
1019 rtx operands[];
1021 int arm_reg0 = REGNO (operands[0]);
1022 rtx ops[2];
1024 if (arm_reg0 == 12)
1025 abort();
1026 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1027 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1028 arm_output_asm_insn ("stfd\t%1, [sp, #-8]!", operands);
1029 arm_output_asm_insn ("ldmfd\tsp!, {%0, %1}", ops);
1030 return("");
1031 } /* output_mov_double_arm_from_fpu */
1033 /* Output a move between double words.
1034 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1035 or MEM<-REG and all MEMs must be offsettable addresses. */
1037 char *
1038 output_move_double (operands)
1039 rtx operands[];
1041 enum rtx_code code0 = GET_CODE (operands[0]);
1042 enum rtx_code code1 = GET_CODE (operands[1]);
1043 rtx otherops[2];
1045 if (code0 == REG)
1047 int reg0 = REGNO (operands[0]);
1049 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
1050 if (code1 == REG)
1052 int reg1 = REGNO (operands[1]);
1053 if (reg1 == 12)
1054 abort();
1055 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
1057 /* Ensure the second source is not overwritten */
1058 if (reg0 == 1 + reg1)
1060 arm_output_asm_insn("mov\t%0, %1", otherops);
1061 arm_output_asm_insn("mov\t%0, %1", operands);
1063 else
1065 arm_output_asm_insn("mov\t%0, %1", operands);
1066 arm_output_asm_insn("mov\t%0, %1", otherops);
1069 else if (code1 == CONST_DOUBLE)
1071 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
1072 CONST_DOUBLE_HIGH (operands[1]));
1073 operands[1] = gen_rtx (CONST_INT, VOIDmode,
1074 CONST_DOUBLE_LOW (operands[1]));
1075 output_mov_immediate (operands, FALSE, "");
1076 output_mov_immediate (otherops, FALSE, "");
1078 else if (code1 == CONST_INT)
1080 otherops[1] = const0_rtx;
1081 /* sign extend the intval into the high-order word */
1082 /* Note: output_mov_immediate may clobber operands[1], so we
1083 put this out first */
1084 if (INTVAL (operands[1]) < 0)
1085 arm_output_asm_insn ("mvn\t%0, %1", otherops);
1086 else
1087 arm_output_asm_insn ("mov\t%0, %1", otherops);
1088 output_mov_immediate (operands, FALSE, "");
1090 else if (code1 == MEM)
1092 switch (GET_CODE (XEXP (operands[1], 0)))
1094 case REG:
1095 /* Handle the simple case where address is [r, #0] more
1096 efficient. */
1097 operands[1] = XEXP (operands[1], 0);
1098 arm_output_asm_insn ("ldmia\t%1, %M0", operands);
1099 break;
1100 case PRE_INC:
1101 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1102 arm_output_asm_insn ("add\t%1, %1, #8", operands);
1103 arm_output_asm_insn ("ldmia\t%1, %M0", operands);
1104 break;
1105 case PRE_DEC:
1106 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1107 arm_output_asm_insn ("sub\t%1, %1, #8", operands);
1108 arm_output_asm_insn ("ldmia\t%1, %M0", operands);
1109 break;
1110 case POST_INC:
1111 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1112 arm_output_asm_insn ("ldmia\t%1!, %M0", operands);
1113 break;
1114 case POST_DEC:
1115 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1116 arm_output_asm_insn ("ldmia\t%1, %M0", operands);
1117 arm_output_asm_insn ("sub\t%1, %1, #8", operands);
1118 break;
1119 default:
1120 otherops[1] = adj_offsettable_operand (operands[1], 4);
1121 /* Take care of overlapping base/data reg. */
1122 if (reg_mentioned_p (operands[0], operands[1]))
1124 arm_output_asm_insn ("ldr\t%0, %1", otherops);
1125 arm_output_asm_insn ("ldr\t%0, %1", operands);
1127 else
1129 arm_output_asm_insn ("ldr\t%0, %1", operands);
1130 arm_output_asm_insn ("ldr\t%0, %1", otherops);
1134 else abort(); /* Constraints should prevent this */
1136 else if (code0 == MEM && code1 == REG)
1138 if (REGNO (operands[1]) == 12)
1139 abort();
1140 switch (GET_CODE (XEXP (operands[0], 0)))
1142 case REG:
1143 operands[0] = XEXP (operands[0], 0);
1144 arm_output_asm_insn ("stmia\t%0, %M1", operands);
1145 break;
1146 case PRE_INC:
1147 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1148 arm_output_asm_insn ("add\t%0, %0, #8", operands);
1149 arm_output_asm_insn ("stmia\t%0, %M1", operands);
1150 break;
1151 case PRE_DEC:
1152 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1153 arm_output_asm_insn ("sub\t%0, %0, #8", operands);
1154 arm_output_asm_insn ("stmia\t%0, %M1", operands);
1155 break;
1156 case POST_INC:
1157 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1158 arm_output_asm_insn ("stmia\t%0!, %M1", operands);
1159 break;
1160 case POST_DEC:
1161 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1162 arm_output_asm_insn ("stmia\t%0, %M1", operands);
1163 arm_output_asm_insn ("sub\t%0, %0, #8", operands);
1164 break;
1165 default:
1166 otherops[0] = adj_offsettable_operand (operands[0], 4);
1167 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
1168 arm_output_asm_insn ("str\t%1, %0", operands);
1169 arm_output_asm_insn ("str\t%1, %0", otherops);
1172 else abort(); /* Constraints should prevent this */
1174 return("");
1175 } /* output_move_double */
1178 /* Output an arbitrary MOV reg, #n.
1179 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1181 char *
1182 output_mov_immediate (operands)
1183 rtx operands[2];
1185 int n = INTVAL (operands[1]);
1186 int n_ones = 0;
1187 int i;
1189 /* Try to use one MOV */
1191 if (const_ok_for_arm (n))
1192 return (arm_output_asm_insn ("mov\t%0, %1", operands));
1194 /* Try to use one MVN */
1196 if (const_ok_for_arm(~n))
1198 operands[1] = gen_rtx (CONST_INT, VOIDmode, ~n);
1199 return (arm_output_asm_insn ("mvn\t%0, %1", operands));
1202 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1204 for (i=0; i < 32; i++)
1205 if (n & 1 << i)
1206 n_ones++;
1208 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
1209 output_multi_immediate(operands, "mvn\t%0, %1", "bic\t%0, %0, %1", 1, ~n);
1210 else
1211 output_multi_immediate(operands, "mov\t%0, %1", "orr\t%0, %0, %1", 1, n);
1212 return("");
1213 } /* output_mov_immediate */
1216 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1217 adding zero to one register, output nothing. */
1219 char *
1220 output_add_immediate (operands)
1221 rtx operands[3];
1223 int n = INTVAL (operands[2]);
1225 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
1227 if (n < 0)
1228 output_multi_immediate (operands,
1229 "sub\t%0, %1, %2", "sub\t%0, %0, %2", 2, -n);
1230 else
1231 output_multi_immediate (operands,
1232 "add\t%0, %1, %2", "add\t%0, %0, %2", 2, n);
1234 return("");
1235 } /* output_add_immediate */
1238 /* Output a multiple immediate operation.
1239 OPERANDS is the vector of operands referred to in the output patterns.
1240 INSTR1 is the output pattern to use for the first constant.
1241 INSTR2 is the output pattern to use for subsequent constants.
1242 IMMED_OP is the index of the constant slot in OPERANDS.
1243 N is the constant value. */
1245 char *
1246 output_multi_immediate (operands, instr1, instr2, immed_op, n)
1247 rtx operands[];
1248 char *instr1, *instr2;
1249 int immed_op, n;
1251 if (n == 0)
1253 operands[immed_op] = const0_rtx;
1254 arm_output_asm_insn (instr1, operands); /* Quick and easy output */
1256 else
1258 int i;
1259 char *instr = instr1;
1261 /* Note that n is never zero here (which would give no output) */
1263 for (i = 0; i < 32; i += 2)
1265 if (n & (3 << i))
1267 operands[immed_op] = gen_rtx (CONST_INT, VOIDmode,
1268 n & (255 << i));
1269 arm_output_asm_insn (instr, operands);
1270 instr = instr2;
1271 i += 6;
1275 return ("");
1276 } /* output_multi_immediate */
1279 /* Return the appropriate ARM instruction for the operation code.
1280 The returned result should not be overwritten. OP is the rtx of the
1281 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1282 was shifted. */
1284 char *
1285 arithmetic_instr (op, shift_first_arg)
1286 rtx op;
1288 switch (GET_CODE(op))
1290 case PLUS:
1291 return ("add");
1292 case MINUS:
1293 if (shift_first_arg)
1294 return ("rsb");
1295 else
1296 return ("sub");
1297 case IOR:
1298 return ("orr");
1299 case XOR:
1300 return ("eor");
1301 case AND:
1302 return ("and");
1303 default:
1304 abort();
1306 return (""); /* stupid cc */
1307 } /* arithmetic_instr */
1310 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1311 for the operation code. The returned result should not be overwritten.
1312 OP is the rtx code of the shift.
1313 SHIFT_PTR points to the shift size operand. */
1315 char *
1316 shift_instr (op, shift_ptr)
1317 enum rtx_code op;
1318 rtx *shift_ptr;
1320 int min_shift = 0;
1321 int max_shift = 31;
1322 char *mnem;
1324 switch (op)
1326 case ASHIFT:
1327 mnem = "asl";
1328 break;
1329 case LSHIFT:
1330 mnem = "lsl";
1331 break;
1332 case ASHIFTRT:
1333 mnem = "asr";
1334 max_shift = 32;
1335 break;
1336 case LSHIFTRT:
1337 mnem = "lsr";
1338 max_shift = 32;
1339 break;
1340 case MULT:
1341 *shift_ptr = gen_rtx (CONST_INT, VOIDmode,
1342 int_log2 (INTVAL (*shift_ptr)));
1343 return ("asl");
1344 default:
1345 abort();
1348 if (GET_CODE (*shift_ptr) == CONST_INT)
1350 int shift = INTVAL (*shift_ptr);
1352 if (shift < min_shift)
1353 *shift_ptr = gen_rtx (CONST_INT, VOIDmode, 0);
1354 else if (shift > max_shift)
1355 *shift_ptr = gen_rtx (CONST_INT, VOIDmode, max_shift);
1357 return (mnem);
1358 } /* shift_instr */
1361 /* Obtain the shift from the POWER of two. */
1364 int_log2 (power)
1365 unsigned int power;
1367 int shift = 0;
1369 while (((1 << shift) & power) == 0)
1371 if (shift > 31)
1372 abort();
1373 shift++;
1375 return (shift);
1376 } /* int_log2 */
1379 /* Output an arithmetic instruction which may set the condition code.
1380 OPERANDS[0] is the destination register.
1381 OPERANDS[1] is the arithmetic operator expression.
1382 OPERANDS[2] is the left hand argument.
1383 OPERANDS[3] is the right hand argument.
1384 CONST_FIRST_ARG is TRUE if the first argument of the operator was constant.
1385 SET_COND is TRUE when the condition code should be set. */
1387 char *
1388 output_arithmetic (operands, const_first_arg, set_cond)
1389 rtx operands[4];
1390 int const_first_arg;
1391 int set_cond;
1393 char mnemonic[80];
1394 char *instr = arithmetic_instr (operands[1], const_first_arg);
1396 sprintf (mnemonic, "%s%s\t%%0, %%2, %%3", instr, set_cond ? "s" : "");
1397 return (arm_output_asm_insn (mnemonic, operands));
1398 } /* output_arithmetic */
1401 /* Output an arithmetic instruction with a shift.
1402 OPERANDS[0] is the destination register.
1403 OPERANDS[1] is the arithmetic operator expression.
1404 OPERANDS[2] is the unshifted register.
1405 OPERANDS[3] is the shift operator expression.
1406 OPERANDS[4] is the shifted register.
1407 OPERANDS[5] is the shift constant or register.
1408 SHIFT_FIRST_ARG is TRUE if the first argument of the operator was shifted.
1409 SET_COND is TRUE when the condition code should be set. */
1411 char *
1412 output_arithmetic_with_shift (operands, shift_first_arg, set_cond)
1413 rtx operands[6];
1414 int shift_first_arg;
1415 int set_cond;
1417 char mnemonic[80];
1418 char *instr = arithmetic_instr (operands[1], shift_first_arg);
1419 char *condbit = set_cond ? "s" : "";
1420 char *shift = shift_instr (GET_CODE (operands[3]), &operands[5]);
1422 sprintf (mnemonic, "%s%s\t%%0, %%2, %%4, %s %%5", instr, condbit, shift);
1423 return (arm_output_asm_insn (mnemonic, operands));
1424 } /* output_arithmetic_with_shift */
1427 /* Output an arithmetic instruction with a power of two multiplication.
1428 OPERANDS[0] is the destination register.
1429 OPERANDS[1] is the arithmetic operator expression.
1430 OPERANDS[2] is the unmultiplied register.
1431 OPERANDS[3] is the multiplied register.
1432 OPERANDS[4] is the constant multiple (power of two).
1433 SHIFT_FIRST_ARG is TRUE if the first arg of the operator was multiplied. */
1435 char *
1436 output_arithmetic_with_immediate_multiply (operands, shift_first_arg)
1437 rtx operands[5];
1438 int shift_first_arg;
1440 char mnemonic[80];
1441 char *instr = arithmetic_instr (operands[1], shift_first_arg);
1442 int shift = int_log2 (INTVAL (operands[4]));
1444 sprintf (mnemonic, "%s\t%%0, %%2, %%3, asl#%d", instr, shift);
1445 return (arm_output_asm_insn (mnemonic, operands));
1446 } /* output_arithmetic_with_immediate_multiply */
1449 /* Output a move with a shift.
1450 OP is the shift rtx code.
1451 OPERANDS[0] = destination register.
1452 OPERANDS[1] = source register.
1453 OPERANDS[2] = shift constant or register. */
1455 char *
1456 output_shifted_move (op, operands)
1457 enum rtx_code op;
1458 rtx operands[2];
1460 char mnemonic[80];
1462 if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 0)
1463 sprintf (mnemonic, "mov\t%%0, %%1");
1464 else
1465 sprintf (mnemonic, "mov\t%%0, %%1, %s %%2",
1466 shift_instr (op, &operands[2]));
1467 return (arm_output_asm_insn (mnemonic, operands));
1468 } /* output_shifted_move */
1470 char *
1471 output_shift_compare (operands, neg)
1472 rtx *operands;
1473 int neg;
1475 char buf[80];
1477 if (neg)
1478 sprintf (buf, "cmn\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands[2]),
1479 &operands[4]));
1480 else
1481 sprintf (buf, "cmp\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands[2]),
1482 &operands[4]));
1483 return arm_output_asm_insn (buf, operands);
1484 } /* output_shift_compare */
1486 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1487 /bin/as is horribly restrictive. */
1489 void
1490 output_ascii_pseudo_op (stream, p, len)
1491 FILE *stream;
1492 char *p;
1493 int len;
1495 int i;
1496 int len_so_far = 1000;
1497 int chars_so_far = 0;
1499 for (i = 0; i < len; i++)
1501 register int c = p[i];
1503 if (len_so_far > 50)
1505 if (chars_so_far)
1506 fputs ("\"\n", stream);
1507 fputs ("\t.ascii\t\"", stream);
1508 len_so_far = 0;
1509 arm_increase_location (chars_so_far);
1510 chars_so_far = 0;
1513 if (c == '\"' || c == '\\')
1515 putc('\\', stream);
1516 len_so_far++;
1518 if (c >= ' ' && c < 0177)
1520 putc (c, stream);
1521 len_so_far++;
1523 else
1525 fprintf (stream, "\\%03o", c);
1526 len_so_far +=4;
1528 chars_so_far++;
1530 fputs ("\"\n", stream);
1531 arm_increase_location (chars_so_far);
1532 } /* output_ascii_pseudo_op */
1535 /* Try to determine whether a pattern really clobbers the link register.
1536 This information is useful when peepholing, so that lr need not be pushed
1537 if we combine a call followed by a return */
1539 static int
1540 pattern_really_clobbers_lr (x)
1541 rtx x;
1543 int i;
1545 switch (GET_CODE (x))
1547 case SET:
1548 switch (GET_CODE (SET_DEST (x)))
1550 case REG:
1551 return REGNO (SET_DEST (x)) == 14;
1552 case SUBREG:
1553 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
1554 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
1555 abort ();
1556 default:
1557 return 0;
1559 case PARALLEL:
1560 for (i = 0; i < XVECLEN (x, 0); i++)
1561 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
1562 return 1;
1563 return 0;
1564 case CLOBBER:
1565 switch (GET_CODE (XEXP (x, 0)))
1567 case REG:
1568 return REGNO (XEXP (x, 0)) == 14;
1569 case SUBREG:
1570 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
1571 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
1572 abort ();
1573 default:
1574 return 0;
1576 case UNSPEC:
1577 return 1;
1578 default:
1579 return 0;
1583 static int
1584 function_really_clobbers_lr (first)
1585 rtx first;
1587 rtx insn, next;
1589 for (insn = first; insn; insn = next_nonnote_insn (insn))
1591 switch (GET_CODE (insn))
1593 case BARRIER:
1594 case NOTE:
1595 case CODE_LABEL:
1596 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
1597 case INLINE_HEADER:
1598 break;
1599 case INSN:
1600 if (pattern_really_clobbers_lr (PATTERN (insn)))
1601 return 1;
1602 break;
1603 case CALL_INSN:
1604 /* Don't yet know how to handle those calls that are not to a
1605 SYMBOL_REF */
1606 if (GET_CODE (PATTERN (insn)) != PARALLEL)
1607 abort ();
1608 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
1610 case CALL:
1611 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
1612 != SYMBOL_REF)
1613 return 1;
1614 break;
1615 case SET:
1616 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
1617 0, 0)), 0), 0))
1618 != SYMBOL_REF)
1619 return 1;
1620 break;
1621 default: /* Don't recognize it, be safe */
1622 return 1;
1624 /* A call can be made (by peepholing) not to clobber lr iff it is
1625 followed by a return. There may, however, be a use insn iff
1626 we are returning the result of the call.
1627 If we run off the end of the insn chain, then that means the
1628 call was at the end of the function. Unfortunately we don't
1629 have a return insn for the peephole to recognize, so we
1630 must reject this. (Can this be fixed by adding our own insn?) */
1631 if ((next = next_nonnote_insn (insn)) == NULL)
1632 return 1;
1633 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
1634 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1635 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
1636 == REGNO (XEXP (PATTERN (next), 0))))
1637 if ((next = next_nonnote_insn (next)) == NULL)
1638 return 1;
1639 if (GET_CODE (next) == JUMP_INSN
1640 && GET_CODE (PATTERN (next)) == RETURN)
1641 break;
1642 return 1;
1643 default:
1644 abort ();
1647 /* We have reached the end of the chain so lr was _not_ clobbered */
1648 return 0;
1651 char *
1652 output_return_instruction (operand, really_return)
1653 rtx operand;
1654 int really_return;
1656 char instr[100];
1657 int reg, live_regs = 0;
1659 if (current_function_calls_alloca && !really_return)
1660 abort();
1662 for (reg = 4; reg < 10; reg++)
1663 if (regs_ever_live[reg])
1664 live_regs++;
1666 if (live_regs || (regs_ever_live[14] && !lr_save_eliminated))
1667 live_regs++;
1669 if (frame_pointer_needed)
1670 live_regs += 4;
1672 if (live_regs)
1674 if (lr_save_eliminated || !regs_ever_live[14])
1675 live_regs++;
1676 if (frame_pointer_needed)
1677 strcpy (instr, "ldm%d0ea\tfp, {");
1678 else
1679 strcpy (instr, "ldm%d0fd\tsp!, {");
1680 for (reg = 4; reg < 10; reg++)
1681 if (regs_ever_live[reg])
1683 strcat (instr, reg_names[reg]);
1684 if (--live_regs)
1685 strcat (instr, ", ");
1687 if (frame_pointer_needed)
1689 strcat (instr, reg_names[11]);
1690 strcat (instr, ", ");
1691 strcat (instr, reg_names[13]);
1692 strcat (instr, ", ");
1693 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1695 else
1696 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1697 strcat (instr, (TARGET_6 || !really_return) ? "}" : "}^");
1698 arm_output_asm_insn (instr, &operand);
1700 else if (really_return)
1702 strcpy (instr, TARGET_6 ? "mov%d0\tpc, lr" : "mov%d0s\tpc, lr");
1703 arm_output_asm_insn (instr, &operand);
1705 return_used_this_function = 1;
1706 return "";
1709 /* The amount of stack adjustment that happens here, in output_return and in
1710 output_epilogue must be exactly the same as was calculated during reload,
1711 or things will point to the wrong place. The only time we can safely
1712 ignore this constraint is when a function has no arguments on the stack,
1713 no stack frame requirement and no live registers execpt for `lr'. If we
1714 can guarantee that by making all function calls into tail calls and that
1715 lr is not clobbered in any other way, then there is no need to push lr
1716 onto the stack. */
1718 void
1719 output_prologue (f, frame_size)
1720 FILE *f;
1721 int frame_size;
1723 int reg, live_regs_mask = 0, code_size = 0;
1724 rtx operands[3];
1726 /* Nonzero if we must stuff some register arguments onto the stack as if
1727 they were passed there. */
1728 int store_arg_regs = 0;
1730 return_used_this_function = 0;
1731 lr_save_eliminated = 0;
1733 fprintf (f, "\t@ args = %d, pretend = %d, frame = %d\n",
1734 current_function_args_size, current_function_pretend_args_size,
1735 frame_size);
1736 fprintf (f, "\t@ frame_needed = %d, current_function_anonymous_args = %d\n",
1737 frame_pointer_needed, current_function_anonymous_args);
1739 if (current_function_anonymous_args && current_function_pretend_args_size)
1740 store_arg_regs = 1;
1742 for (reg = 4; reg < 10; reg++)
1743 if (regs_ever_live[reg])
1744 live_regs_mask |= (1 << reg);
1746 if (frame_pointer_needed)
1748 live_regs_mask |= 0xD800;
1749 fputs ("\tmov\tip, sp\n", f);
1750 code_size += 4;
1752 else if (regs_ever_live[14])
1754 if (! current_function_args_size
1755 && !function_really_clobbers_lr (get_insns ()))
1757 fprintf (f,"\t@ I don't think this function clobbers lr\n");
1758 lr_save_eliminated = 1;
1760 else
1761 live_regs_mask |= 0x4000;
1764 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1765 room. If also STORE_ARG_REGS store the argument registers involved in
1766 the created slot (this is for stdarg and varargs). */
1767 if (current_function_pretend_args_size)
1769 if (store_arg_regs)
1771 int arg_size, mask = 0;
1773 assert (current_function_pretend_args_size <= 16);
1774 for (reg = 3, arg_size = current_function_pretend_args_size;
1775 arg_size > 0; reg--, arg_size -= 4)
1776 mask |= (1 << reg);
1777 print_multi_reg (f, "stmfd\tsp!", mask, FALSE);
1778 code_size += 4;
1780 else
1782 operands[0] = operands[1] = stack_pointer_rtx;
1783 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1784 -current_function_pretend_args_size);
1785 output_add_immediate (operands);
1789 if (live_regs_mask)
1791 /* if a di mode load/store multiple is used, and the base register
1792 is r3, then r4 can become an ever live register without lr
1793 doing so, in this case we need to push lr as well, or we
1794 will fail to get a proper return. */
1796 live_regs_mask |= 0x4000;
1797 lr_save_eliminated = 0;
1798 print_multi_reg (f, "stmfd\tsp!", live_regs_mask, FALSE);
1799 code_size += 4;
1802 for (reg = 23; reg > 19; reg--)
1803 if (regs_ever_live[reg])
1805 fprintf (f, "\tstfe\t%s, [sp, #-12]!\n", reg_names[reg]);
1806 code_size += 4;
1809 if (frame_pointer_needed)
1811 /* Make `fp' point to saved value of `pc'. */
1813 operands[0] = gen_rtx (REG, SImode, HARD_FRAME_POINTER_REGNUM);
1814 operands[1] = gen_rtx (REG, SImode, 12);
1815 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1816 - (4 + current_function_pretend_args_size));
1817 output_add_immediate (operands);
1820 if (frame_size)
1822 operands[0] = operands[1] = stack_pointer_rtx;
1823 operands[2] = gen_rtx (CONST_INT, VOIDmode, -frame_size);
1824 output_add_immediate (operands);
1827 arm_increase_location (code_size);
1828 } /* output_prologue */
1831 void
1832 output_epilogue (f, frame_size)
1833 FILE *f;
1834 int frame_size;
1836 int reg, live_regs_mask = 0, code_size = 0, fp_needed = 0;
1837 /* If we need this then it will always be at lesat this much */
1838 int floats_offset = 24;
1839 rtx operands[3];
1841 if (use_return_insn() && return_used_this_function)
1843 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
1845 abort ();
1847 return;
1850 for (reg = 4; reg <= 10; reg++)
1851 if (regs_ever_live[reg])
1853 live_regs_mask |= (1 << reg);
1854 floats_offset += 4;
1858 if (frame_pointer_needed)
1860 for (reg = 23; reg >= 20; reg--)
1861 if (regs_ever_live[reg])
1863 fprintf (f, "\tldfe\t%s, [fp, #-%d]\n", reg_names[reg],
1864 floats_offset);
1865 floats_offset += 12;
1866 code_size += 4;
1869 live_regs_mask |= 0xA800;
1870 print_multi_reg (f, "ldmea\tfp", live_regs_mask,
1871 TARGET_6 ? FALSE : TRUE);
1872 code_size += 4;
1874 else
1876 /* Restore stack pointer if necessary. */
1877 if (frame_size)
1879 operands[0] = operands[1] = stack_pointer_rtx;
1880 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
1881 output_add_immediate (operands);
1884 for (reg = 20; reg < 24; reg++)
1885 if (regs_ever_live[reg])
1887 fprintf (f, "\tldfe\t%s, [sp], #12\n", reg_names[reg]);
1888 code_size += 4;
1890 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
1892 print_multi_reg (f, "ldmfd\tsp!", live_regs_mask | 0x8000,
1893 TARGET_6 ? FALSE : TRUE);
1894 code_size += 4;
1896 else
1898 if (live_regs_mask || regs_ever_live[14])
1900 live_regs_mask |= 0x4000;
1901 print_multi_reg (f, "ldmfd\tsp!", live_regs_mask, FALSE);
1902 code_size += 4;
1904 if (current_function_pretend_args_size)
1906 operands[0] = operands[1] = stack_pointer_rtx;
1907 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1908 current_function_pretend_args_size);
1909 output_add_immediate (operands);
1911 fputs (TARGET_6 ? "\tmov\tpc, lr\n" : "\tmovs\tpc, lr\n", f);
1912 code_size += 4;
1915 arm_increase_location (code_size);
1916 current_function_anonymous_args = 0;
1917 } /* output_epilogue */
1919 /* Increase the `arm_text_location' by AMOUNT if we're in the text
1920 segment. */
1922 void
1923 arm_increase_location (amount)
1924 int amount;
1926 if (in_text_section ())
1927 arm_text_location += amount;
1928 } /* arm_increase_location */
1931 /* Like output_asm_insn (), but also increases the arm_text_location (if in
1932 the .text segment, of course, even though this will always be true).
1933 Returns the empty string. */
1935 char *
1936 arm_output_asm_insn (template, operands)
1937 char *template;
1938 rtx *operands;
1940 extern FILE *asm_out_file;
1942 output_asm_insn (template, operands);
1943 if (in_text_section ())
1944 arm_text_location += 4;
1945 fflush (asm_out_file);
1946 return ("");
1947 } /* arm_output_asm_insn */
1950 /* Output a label definition. If this label is within the .text segment, it
1951 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
1952 Maybe GCC remembers names not starting with a `*' for a long time, but this
1953 is a minority anyway, so we just make a copy. Do not store the leading `*'
1954 if the name starts with one. */
1956 void
1957 arm_asm_output_label (stream, name)
1958 FILE *stream;
1959 char *name;
1961 char *real_name, *s;
1962 struct label_offset *cur;
1963 int hash = 0;
1965 assemble_name (stream, name);
1966 fputs (":\n", stream);
1967 if (! in_text_section ())
1968 return;
1970 if (name[0] == '*')
1972 real_name = xmalloc (1 + strlen (&name[1]));
1973 strcpy (real_name, &name[1]);
1975 else
1977 real_name = xmalloc (2 + strlen (name));
1978 strcpy (real_name, "_");
1979 strcat (real_name, name);
1981 for (s = real_name; *s; s++)
1982 hash += *s;
1983 hash = hash % LABEL_HASH_SIZE;
1984 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
1985 cur->name = real_name;
1986 cur->offset = arm_text_location;
1987 cur->cdr = offset_table[hash];
1988 offset_table[hash] = cur;
1989 } /* arm_asm_output_label */
1992 /* Output the instructions needed to perform what Martin's /bin/as called
1993 llc: load an SImode thing from the function's constant pool.
1995 XXX This could be enhanced in that we do not really need a pointer in the
1996 constant pool pointing to the real thing. If we can address this pointer,
1997 we can also address what it is pointing at, in fact, anything in the text
1998 segment which has been defined already within this .s file. */
2000 char *
2001 arm_output_llc (operands)
2002 rtx *operands;
2004 char *s, *name = XSTR (XEXP (operands[1], 0), 0);
2005 struct label_offset *he;
2006 int hash = 0, conditional = (arm_ccfsm_state == 3 || arm_ccfsm_state == 4);
2008 if (*name != '*')
2009 abort ();
2011 for (s = &name[1]; *s; s++)
2012 hash += *s;
2013 hash = hash % LABEL_HASH_SIZE;
2014 he = offset_table[hash];
2015 while (he && strcmp (he->name, &name[1]))
2016 he = he->cdr;
2018 if (!he)
2019 abort ();
2021 if (arm_text_location + 8 - he->offset < 4095)
2023 fprintf (asm_out_file, "\tldr%s\t%s, [pc, #%s - . - 8]\n",
2024 conditional ? arm_condition_codes[arm_current_cc] : "",
2025 reg_names[REGNO (operands[0])], &name[1]);
2026 arm_increase_location (4);
2027 return ("");
2029 else
2031 int offset = - (arm_text_location + 8 - he->offset);
2032 char *reg_name = reg_names[REGNO (operands[0])];
2034 /* ??? This is a hack, assuming the constant pool never is more than
2035 (1 + 255) * 4096 == 1Meg away from the PC. */
2037 if (offset > 1000000)
2038 abort ();
2040 fprintf (asm_out_file, "\tsub%s\t%s, pc, #(8 + . - %s) & ~4095\n",
2041 conditional ? arm_condition_codes[arm_current_cc] : "",
2042 reg_name, &name[1]);
2043 fprintf (asm_out_file, "\tldr%s\t%s, [%s, #- ((4 + . - %s) & 4095)]\n",
2044 conditional ? arm_condition_codes[arm_current_cc] : "",
2045 reg_name, reg_name, &name[1]);
2046 arm_increase_location (8);
2048 return ("");
2049 } /* arm_output_llc */
2051 /* output_load_symbol ()
2052 load a symbol that is known to be in the text segment into a register */
2054 char *
2055 output_load_symbol (operands)
2056 rtx *operands;
2058 char *s, *name = XSTR (operands[1], 0);
2059 struct label_offset *he;
2060 int hash = 0;
2061 int offset;
2063 if (*name != '*')
2064 abort ();
2066 for (s = &name[1]; *s; s++)
2067 hash += *s;
2068 hash = hash % LABEL_HASH_SIZE;
2069 he = offset_table[hash];
2070 while (he && strcmp (he->name, &name[1]))
2071 he = he->cdr;
2073 if (!he)
2074 abort ();
2076 offset = (arm_text_location + 8 - he->offset);
2077 if (offset < 0)
2078 abort ();
2080 /* If the symbol is word aligned then we might be able to reduce the
2081 number of loads */
2082 if ((offset & 3) == 0)
2084 arm_output_asm_insn ("sub\t%0, pc, #(8 + . -%a1) & 1023", operands);
2085 if (offset > 0x3ff)
2087 arm_output_asm_insn ("sub\t%0, %0, #(4 + . -%a1) & 261120",
2088 operands);
2089 if (offset > 0x3ffff)
2091 arm_output_asm_insn ("sub\t%0, %0, #(. -%a1) & 66846720",
2092 operands);
2093 if (offset > 0x3ffffff)
2094 arm_output_asm_insn ("sub\t%0, %0, #(. - 4 -%a1) & -67108864",
2095 operands);
2099 else
2101 arm_output_asm_insn ("sub\t%0, pc, #(8 + . -%a1) & 255", operands);
2102 if (offset > 0x0ff)
2104 arm_output_asm_insn ("sub\t%0, %0, #(4 + . -%a1) & 65280", operands);
2105 if (offset > 0x0ffff)
2107 arm_output_asm_insn ("sub\t%0, %0, #(. -%a1) & 16711680",
2108 operands);
2109 if (offset > 0x0ffffff)
2110 arm_output_asm_insn ("sub\t%0, %0, #(. - 4 -%a1) & -16777216",
2111 operands);
2115 return "";
2118 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2119 directive hence this hack, which works by reserving some `.space' in the
2120 bss segment directly.
2122 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2123 define STATIC COMMON space but merely STATIC BSS space. */
2125 void
2126 output_lcomm_directive (stream, name, size, rounded)
2127 FILE *stream;
2128 char *name;
2129 int size, rounded;
2131 fputs ("\n\t.bss\t@ .lcomm\n", stream);
2132 assemble_name (stream, name);
2133 fprintf (stream, ":\t.space\t%d\n", rounded);
2134 if (in_text_section ())
2135 fputs ("\n\t.text\n", stream);
2136 else
2137 fputs ("\n\t.data\n", stream);
2138 } /* output_lcomm_directive */
2140 /* A finite state machine takes care of noticing whether or not instructions
2141 can be conditionally executed, and thus decrease execution time and code
2142 size by deleting branch instructions. The fsm is controlled by
2143 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2145 /* The state of the fsm controlling condition codes are:
2146 0: normal, do nothing special
2147 1: make ASM_OUTPUT_OPCODE not output this instruction
2148 2: make ASM_OUTPUT_OPCODE not output this instruction
2149 3: make instructions conditional
2150 4: make instructions conditional
2152 State transitions (state->state by whom under condition):
2153 0 -> 1 final_prescan_insn if the `target' is a label
2154 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2155 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2156 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2157 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2158 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2159 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2160 (the target insn is arm_target_insn).
2162 If the jump clobbers the conditions then we use states 2 and 4.
2164 A similar thing can be done with conditional return insns.
2166 XXX In case the `target' is an unconditional branch, this conditionalising
2167 of the instructions always reduces code size, but not always execution
2168 time. But then, I want to reduce the code size to somewhere near what
2169 /bin/cc produces. */
2171 /* The condition codes of the ARM, and the inverse function. */
2172 char *arm_condition_codes[] =
2174 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
2175 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
2178 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
2180 /* Returns the index of the ARM condition code string in
2181 `arm_condition_codes'. COMPARISON should be an rtx like
2182 `(eq (...) (...))'. */
2185 get_arm_condition_code (comparison)
2186 rtx comparison;
2188 switch (GET_CODE (comparison))
2190 case NE: return (1);
2191 case EQ: return (0);
2192 case GE: return (10);
2193 case GT: return (12);
2194 case LE: return (13);
2195 case LT: return (11);
2196 case GEU: return (2);
2197 case GTU: return (8);
2198 case LEU: return (9);
2199 case LTU: return (3);
2200 default: abort ();
2202 /*NOTREACHED*/
2203 return (42);
2204 } /* get_arm_condition_code */
2207 void
2208 final_prescan_insn (insn, opvec, noperands)
2209 rtx insn;
2210 rtx *opvec;
2211 int noperands;
2213 /* BODY will hold the body of INSN. */
2214 register rtx body = PATTERN (insn);
2216 /* This will be 1 if trying to repeat the trick, and things need to be
2217 reversed if it appears to fail. */
2218 int reverse = 0;
2220 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2221 taken are clobbered, even if the rtl suggests otherwise. It also
2222 means that we have to grub around within the jump expression to find
2223 out what the conditions are when the jump isn't taken. */
2224 int jump_clobbers = 0;
2226 /* If we start with a return insn, we only succeed if we find another one. */
2227 int seeking_return = 0;
2229 /* START_INSN will hold the insn from where we start looking. This is the
2230 first insn after the following code_label if REVERSE is true. */
2231 rtx start_insn = insn;
2233 /* If in state 4, check if the target branch is reached, in order to
2234 change back to state 0. */
2235 if (arm_ccfsm_state == 4)
2237 if (insn == arm_target_insn)
2238 arm_ccfsm_state = 0;
2239 return;
2242 /* If in state 3, it is possible to repeat the trick, if this insn is an
2243 unconditional branch to a label, and immediately following this branch
2244 is the previous target label which is only used once, and the label this
2245 branch jumps to is not too far off. */
2246 if (arm_ccfsm_state == 3)
2248 if (simplejump_p (insn))
2250 start_insn = next_nonnote_insn (start_insn);
2251 if (GET_CODE (start_insn) == BARRIER)
2253 /* XXX Isn't this always a barrier? */
2254 start_insn = next_nonnote_insn (start_insn);
2256 if (GET_CODE (start_insn) == CODE_LABEL
2257 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2258 && LABEL_NUSES (start_insn) == 1)
2259 reverse = TRUE;
2260 else
2261 return;
2263 else if (GET_CODE (body) == RETURN)
2265 start_insn = next_nonnote_insn (start_insn);
2266 if (GET_CODE (start_insn) == BARRIER)
2267 start_insn = next_nonnote_insn (start_insn);
2268 if (GET_CODE (start_insn) == CODE_LABEL
2269 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2270 && LABEL_NUSES (start_insn) == 1)
2272 reverse = TRUE;
2273 seeking_return = 1;
2275 else
2276 return;
2278 else
2279 return;
2282 if (arm_ccfsm_state != 0 && !reverse)
2283 abort ();
2284 if (GET_CODE (insn) != JUMP_INSN)
2285 return;
2287 /* This jump might be paralled with a clobber of the condition codes
2288 the jump should always come first */
2289 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2290 body = XVECEXP (body, 0, 0);
2292 #if 0
2293 /* If this is a conditional return then we don't want to know */
2294 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2295 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2296 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
2297 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
2298 return;
2299 #endif
2301 if (reverse
2302 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2303 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2305 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2306 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2307 int then_not_else = TRUE;
2308 rtx this_insn = start_insn, label = 0;
2310 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
2311 jump_clobbers = 1;
2313 /* Register the insn jumped to. */
2314 if (reverse)
2316 if (!seeking_return)
2317 label = XEXP (SET_SRC (body), 0);
2319 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2320 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2321 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2323 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2324 then_not_else = FALSE;
2326 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2327 seeking_return = 1;
2328 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2330 seeking_return = 1;
2331 then_not_else = FALSE;
2333 else
2334 abort ();
2336 /* See how many insns this branch skips, and what kind of insns. If all
2337 insns are okay, and the label or unconditional branch to the same
2338 label is not too far away, succeed. */
2339 for (insns_skipped = 0;
2340 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2341 insns_skipped++)
2343 rtx scanbody;
2345 this_insn = next_nonnote_insn (this_insn);
2346 if (!this_insn)
2347 break;
2349 scanbody = PATTERN (this_insn);
2351 switch (GET_CODE (this_insn))
2353 case CODE_LABEL:
2354 /* Succeed if it is the target label, otherwise fail since
2355 control falls in from somewhere else. */
2356 if (this_insn == label)
2358 if (jump_clobbers)
2360 arm_ccfsm_state = 2;
2361 this_insn = next_nonnote_insn (this_insn);
2363 else
2364 arm_ccfsm_state = 1;
2365 succeed = TRUE;
2367 else
2368 fail = TRUE;
2369 break;
2371 case BARRIER:
2372 /* Succeed if the following insn is the target label.
2373 Otherwise fail.
2374 If return insns are used then the last insn in a function
2375 will be a barrier. */
2376 this_insn = next_nonnote_insn (this_insn);
2377 if (this_insn && this_insn == label)
2379 if (jump_clobbers)
2381 arm_ccfsm_state = 2;
2382 this_insn = next_nonnote_insn (this_insn);
2384 else
2385 arm_ccfsm_state = 1;
2386 succeed = TRUE;
2388 else
2389 fail = TRUE;
2390 break;
2392 case CALL_INSN:
2393 /* The arm 6xx uses full 32 bit addresses so the cc is not
2394 preserved over calls */
2395 if (TARGET_6)
2396 fail = TRUE;
2397 break;
2398 case JUMP_INSN:
2399 /* If this is an unconditional branch to the same label, succeed.
2400 If it is to another label, do nothing. If it is conditional,
2401 fail. */
2402 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2404 if (GET_CODE (scanbody) == SET
2405 && GET_CODE (SET_DEST (scanbody)) == PC)
2407 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2408 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2410 arm_ccfsm_state = 2;
2411 succeed = TRUE;
2413 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2414 fail = TRUE;
2416 else if (GET_CODE (scanbody) == RETURN
2417 && seeking_return)
2419 arm_ccfsm_state = 2;
2420 succeed = TRUE;
2422 else if (GET_CODE (scanbody) == PARALLEL)
2424 switch (get_attr_conds (this_insn))
2426 case CONDS_NOCOND:
2427 break;
2428 default:
2429 fail = TRUE;
2430 break;
2433 break;
2435 case INSN:
2436 /* Instructions using or affecting the condition codes make it
2437 fail. */
2438 if ((GET_CODE (scanbody) == SET
2439 || GET_CODE (scanbody) == PARALLEL)
2440 && get_attr_conds (this_insn) != CONDS_NOCOND)
2441 fail = TRUE;
2442 break;
2444 default:
2445 break;
2448 if (succeed)
2450 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
2451 arm_target_label = CODE_LABEL_NUMBER (label);
2452 else if (seeking_return || arm_ccfsm_state == 2)
2454 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2456 this_insn = next_nonnote_insn (this_insn);
2457 if (this_insn && (GET_CODE (this_insn) == BARRIER
2458 || GET_CODE (this_insn) == CODE_LABEL))
2459 abort ();
2461 if (!this_insn)
2463 /* Oh, dear! we ran off the end.. give up */
2464 recog (PATTERN (insn), insn, NULL_PTR);
2465 arm_ccfsm_state = 0;
2466 return;
2468 arm_target_insn = this_insn;
2470 else
2471 abort ();
2472 if (jump_clobbers)
2474 if (reverse)
2475 abort ();
2476 arm_current_cc =
2477 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
2478 0), 0), 1));
2479 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
2480 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2481 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
2482 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2484 else
2486 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2487 what it was. */
2488 if (!reverse)
2489 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
2490 0));
2493 if (reverse || then_not_else)
2494 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2496 /* restore recog_operand (getting the attributes of other insns can
2497 destroy this array, but final.c assumes that it remains intact
2498 accross this call; since the insn has been recognized already we
2499 call recog direct). */
2500 recog (PATTERN (insn), insn, NULL_PTR);
2502 } /* final_prescan_insn */
2504 /* EOF */