1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
34 #include "insn-attr.h"
37 /* The maximum number of insns skipped which will be conditionalised if
39 #define MAX_INSNS_SKIPPED 5
41 /* Some function declarations. */
42 extern FILE *asm_out_file
;
43 extern char *output_multi_immediate ();
44 extern char *arm_output_asm_insn ();
45 extern void arm_increase_location ();
47 /* Define the information needed to generate branch insns. This is
48 stored from the compare operation. */
50 rtx arm_compare_op0
, arm_compare_op1
;
53 /* What type of cpu are we compiling for? */
55 enum processor_type arm_cpu
;
57 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
58 must report the mode of the memory reference from PRINT_OPERAND to
59 PRINT_OPERAND_ADDRESS. */
60 int output_memory_reference_mode
;
62 /* Nonzero if the prologue must setup `fp'. */
63 int current_function_anonymous_args
;
65 /* Location counter of .text segment. */
66 int arm_text_location
= 0;
68 /* Set to one if we think that lr is only saved because of subroutine calls,
69 but all of these can be `put after' return insns */
70 int lr_save_eliminated
;
72 /* A hash table is used to store text segment labels and their associated
73 offset from the start of the text segment. */
78 struct label_offset
*cdr
;
81 #define LABEL_HASH_SIZE 257
83 static struct label_offset
*offset_table
[LABEL_HASH_SIZE
];
85 /* Set to 1 when a return insn is output, this means that the epilogue
88 static int return_used_this_function
;
90 /* For an explanation of these variables, see final_prescan_insn below. */
96 /* Return 1 if it is possible to return using a single instruction */
103 if (!reload_completed
||current_function_pretend_args_size
104 || current_function_anonymous_args
105 || (get_frame_size () && !(TARGET_APCS
|| frame_pointer_needed
)))
108 /* Can't be done if any of the FPU regs are pushed, since this also
110 for (regno
= 20; regno
< 24; regno
++)
111 if (regs_ever_live
[regno
])
117 /* Return the number of mov instructions needed to get the constant VALUE into
121 arm_const_nmoves (value
)
128 for (i
= 0; value
; i
++, value
&= ~0xff)
129 while ((value
& 3) == 0)
130 value
= (value
>> 2) | ((value
& 3) << 30);
132 } /* arm_const_nmoves */
135 /* Return TRUE if int I is a valid immediate ARM constant. */
141 unsigned HOST_WIDE_INT mask
= ~0xFF;
145 if ((i
& mask
& 0xffffffffu
) == 0)
147 mask
= (mask
<< 2) | ((mask
& 0xffffffffu
) >> (32 - 2)) | ~0xffffffffu
;
148 } while (mask
!= ~0xFF);
151 } /* const_ok_for_arm */
153 /* This code has been fixed for cross compilation. */
155 static int fpa_consts_inited
= 0;
157 char *strings_fpa
[8] = {
168 static REAL_VALUE_TYPE values_fpa
[8];
176 for (i
= 0; i
< 8; i
++)
178 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
181 fpa_consts_inited
= 1;
184 /* Return TRUE if rtx X is a valid immediate FPU constant. */
187 const_double_rtx_ok_for_fpu (x
)
193 if (!fpa_consts_inited
)
196 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
197 if (REAL_VALUE_MINUS_ZERO (r
))
199 for (i
= 0; i
< 8; i
++)
200 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
203 } /* const_double_rtx_ok_for_fpu */
205 /* Return TRUE if rtx X is a valid immediate FPU constant. */
208 neg_const_double_rtx_ok_for_fpu (x
)
214 if (!fpa_consts_inited
)
217 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
218 r
= REAL_VALUE_NEGATE (r
);
219 if (REAL_VALUE_MINUS_ZERO (r
))
221 for (i
= 0; i
< 8; i
++)
222 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
225 } /* neg_const_double_rtx_ok_for_fpu */
227 /* Predicates for `match_operand' and `match_operator'. */
229 /* s_register_operand is the same as register_operand, but it doesn't accept
230 (SUBREG (MEM)...). */
233 s_register_operand (op
, mode
)
235 enum machine_mode mode
;
237 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
240 if (GET_CODE (op
) == SUBREG
)
242 op
= SUBREG_REG (op
);
245 /* We don't consider registers whose class is NO_REGS
246 to be a register operand. */
247 return (GET_CODE (op
) == REG
248 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
249 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
252 /* Return 1 if OP is an item in memory, given that we are in reload. */
255 reload_memory_operand (op
, mode
)
257 enum machine_mode mode
;
259 int regno
= true_regnum (op
);
261 return (! CONSTANT_P (op
)
263 || (GET_CODE (op
) == REG
264 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
267 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
270 arm_rhs_operand (op
, mode
)
272 enum machine_mode mode
;
274 return (s_register_operand (op
, mode
)
275 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
276 } /* arm_rhs_operand */
278 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
282 arm_rhsm_operand (op
, mode
)
284 enum machine_mode mode
;
286 return (s_register_operand (op
, mode
)
287 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
288 || memory_operand (op
, mode
));
289 } /* arm_rhs_operand */
291 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
292 constant that is valid when negated. */
295 arm_add_operand (op
, mode
)
297 enum machine_mode mode
;
299 return (s_register_operand (op
, mode
)
300 || (GET_CODE (op
) == CONST_INT
301 && (const_ok_for_arm (INTVAL (op
))
302 || const_ok_for_arm (-INTVAL (op
)))));
303 } /* arm_rhs_operand */
306 arm_not_operand (op
, mode
)
308 enum machine_mode mode
;
310 return (s_register_operand (op
, mode
)
311 || (GET_CODE (op
) == CONST_INT
312 && (const_ok_for_arm (INTVAL (op
))
313 || const_ok_for_arm (~INTVAL (op
)))));
314 } /* arm_rhs_operand */
316 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
319 fpu_rhs_operand (op
, mode
)
321 enum machine_mode mode
;
323 if (s_register_operand (op
, mode
))
325 else if (GET_CODE (op
) == CONST_DOUBLE
)
326 return (const_double_rtx_ok_for_fpu (op
));
328 } /* fpu_rhs_operand */
331 fpu_add_operand (op
, mode
)
333 enum machine_mode mode
;
335 if (s_register_operand (op
, mode
))
337 else if (GET_CODE (op
) == CONST_DOUBLE
)
338 return const_double_rtx_ok_for_fpu (op
)
339 || neg_const_double_rtx_ok_for_fpu (op
);
343 /* Return nonzero if OP is a constant power of two. */
346 power_of_two_operand (op
, mode
)
348 enum machine_mode mode
;
350 if (GET_CODE (op
) == CONST_INT
)
352 int value
= INTVAL(op
);
353 return (value
!= 0 && (value
& (value
-1)) == 0);
356 } /* power_of_two_operand */
358 /* Return TRUE for a valid operand of a DImode operation.
359 Either: REG, CONST_DOUBLE or MEM(DImode_address).
360 Note that this disallows MEM(REG+REG), but allows
361 MEM(PRE/POST_INC/DEC(REG)). */
364 di_operand (op
, mode
)
366 enum machine_mode mode
;
368 if (s_register_operand (op
, mode
))
371 switch (GET_CODE (op
))
377 return (memory_address_p (DImode
, XEXP (op
, 0)));
383 /* Return TRUE for valid index operands. */
386 index_operand (op
, mode
)
388 enum machine_mode mode
;
390 return (s_register_operand(op
, mode
)
391 || (immediate_operand (op
, mode
)
392 && INTVAL (op
) < 4096 && INTVAL (op
) > -4096));
393 } /* index_operand */
395 /* Return TRUE for valid shifts by a constant. This also accepts any
396 power of two on the (somewhat overly relaxed) assumption that the
397 shift operator in this case was a mult. */
400 const_shift_operand (op
, mode
)
402 enum machine_mode mode
;
404 return (power_of_two_operand (op
, mode
)
405 || (immediate_operand (op
, mode
)
406 && (INTVAL (op
) < 32 && INTVAL (op
) > 0)));
407 } /* const_shift_operand */
409 /* Return TRUE for arithmetic operators which can be combined with a multiply
413 shiftable_operator (x
, mode
)
415 enum machine_mode mode
;
417 if (GET_MODE (x
) != mode
)
421 enum rtx_code code
= GET_CODE (x
);
423 return (code
== PLUS
|| code
== MINUS
424 || code
== IOR
|| code
== XOR
|| code
== AND
);
426 } /* shiftable_operator */
428 /* Return TRUE for shift operators. */
431 shift_operator (x
, mode
)
433 enum machine_mode mode
;
435 if (GET_MODE (x
) != mode
)
439 enum rtx_code code
= GET_CODE (x
);
442 return power_of_two_operand (XEXP (x
, 1));
443 return (code
== ASHIFT
|| code
== LSHIFT
444 || code
== ASHIFTRT
|| code
== LSHIFTRT
);
446 } /* shift_operator */
448 int equality_operator (x
, mode
)
450 enum machine_mode mode
;
452 return (GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
);
455 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
458 minmax_operator (x
, mode
)
460 enum machine_mode mode
;
462 enum rtx_code code
= GET_CODE (x
);
464 if (GET_MODE (x
) != mode
)
466 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
467 } /* minmax_operator */
469 /* return TRUE if x is EQ or NE */
471 /* Return TRUE if this is the condition code register, if we aren't given
472 a mode, accept any class CCmode register */
475 cc_register (x
, mode
)
477 enum machine_mode mode
;
479 if (mode
== VOIDmode
)
482 if (GET_MODE_CLASS (mode
) != MODE_CC
)
485 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
494 enum rtx_code code
= GET_CODE (x
);
507 /* Return 1 if memory locations are adjacent */
509 adjacent_mem_locations (a
, b
)
512 int val0
= 0, val1
= 0;
515 if ((GET_CODE (XEXP (a
, 0)) == REG
516 || (GET_CODE (XEXP (a
, 0)) == PLUS
517 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
518 && (GET_CODE (XEXP (b
, 0)) == REG
519 || (GET_CODE (XEXP (b
, 0)) == PLUS
520 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
522 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
524 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
525 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
528 reg0
= REGNO (XEXP (a
, 0));
529 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
531 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
532 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
535 reg1
= REGNO (XEXP (b
, 0));
536 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
541 /* Return 1 if OP is a load multiple operation. It is known to be
542 parallel and the first section will be tested. */
544 load_multiple_operation (op
, mode
)
546 enum machine_mode mode
;
548 int count
= XVECLEN (op
, 0);
555 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
558 /* Check to see if this might be a write-back */
559 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
564 /* Now check it more carefully */
565 if (GET_CODE (SET_DEST (elt
)) != REG
566 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
567 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
568 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
569 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
570 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
571 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
572 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
573 != REGNO (SET_DEST (elt
)))
578 /* Perform a quick check so we don't blow up below. */
580 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
581 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
582 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
585 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
586 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
588 for (; i
< count
; i
++)
590 rtx elt
= XVECEXP (op
, 0, i
);
592 if (GET_CODE (elt
) != SET
593 || GET_CODE (SET_DEST (elt
)) != REG
594 || GET_MODE (SET_DEST (elt
)) != SImode
595 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
- base
596 || GET_CODE (SET_SRC (elt
)) != MEM
597 || GET_MODE (SET_SRC (elt
)) != SImode
598 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
599 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
600 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
601 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
608 /* Return 1 if OP is a store multiple operation. It is known to be
609 parallel and the first section will be tested. */
611 store_multiple_operation (op
, mode
)
613 enum machine_mode mode
;
615 int count
= XVECLEN (op
, 0);
622 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
625 /* Check to see if this might be a write-back */
626 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
631 /* Now check it more carefully */
632 if (GET_CODE (SET_DEST (elt
)) != REG
633 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
634 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
635 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
636 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
637 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
638 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
639 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
640 != REGNO (SET_DEST (elt
)))
645 /* Perform a quick check so we don't blow up below. */
647 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
648 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
649 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
652 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
653 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
655 for (; i
< count
; i
++)
657 elt
= XVECEXP (op
, 0, i
);
659 if (GET_CODE (elt
) != SET
660 || GET_CODE (SET_SRC (elt
)) != REG
661 || GET_MODE (SET_SRC (elt
)) != SImode
662 || REGNO (SET_SRC (elt
)) != src_regno
+ i
- base
663 || GET_CODE (SET_DEST (elt
)) != MEM
664 || GET_MODE (SET_DEST (elt
)) != SImode
665 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
666 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
667 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
668 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
675 /* Routines for use in generating RTL */
677 rtx
arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
)
686 int sign
= up
? 1 : -1;
688 result
= gen_rtx (PARALLEL
, VOIDmode
,
689 rtvec_alloc (count
+ (write_back
? 2 : 0)));
692 XVECEXP (result
, 0, 0)
693 = gen_rtx (SET
, GET_MODE (from
), from
,
694 plus_constant (from
, count
* 4 * sign
));
698 for (j
= 0; i
< count
; i
++, j
++)
700 XVECEXP (result
, 0, i
)
701 = gen_rtx (SET
, VOIDmode
, gen_rtx (REG
, SImode
, base_regno
+ j
),
702 gen_rtx (MEM
, SImode
,
703 plus_constant (from
, j
* 4 * sign
)));
706 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, from
);
711 rtx
arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
)
720 int sign
= up
? 1 : -1;
722 result
= gen_rtx (PARALLEL
, VOIDmode
,
723 rtvec_alloc (count
+ (write_back
? 2 : 0)));
726 XVECEXP (result
, 0, 0)
727 = gen_rtx (SET
, GET_MODE (to
), to
,
728 plus_constant (to
, count
* 4 * sign
));
732 for (j
= 0; i
< count
; i
++, j
++)
734 XVECEXP (result
, 0, i
)
735 = gen_rtx (SET
, VOIDmode
,
736 gen_rtx (MEM
, SImode
, plus_constant (to
, j
* 4 * sign
)),
737 gen_rtx (REG
, SImode
, base_regno
+ j
));
740 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, to
);
745 /* X and Y are two things to compare using CODE. Emit the compare insn and
746 return the rtx for register 0 in the proper mode. FP means this is a
747 floating point compare: I don't think that it is needed on the arm. */
750 gen_compare_reg (code
, x
, y
, fp
)
754 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
755 rtx cc_reg
= gen_rtx (REG
, mode
, 24);
757 emit_insn (gen_rtx (SET
, VOIDmode
, cc_reg
,
758 gen_rtx (COMPARE
, mode
, x
, y
)));
764 /* Check to see if a branch is forwards or backwards. Return TRUE if it
768 arm_backwards_branch (from
, to
)
771 return (insn_addresses
[to
] < insn_addresses
[from
]);
774 /* Check to see if a branch is within the distance that can be done using
775 an arithmetic expression. */
777 short_branch (from
, to
)
780 int delta
= insn_addresses
[from
] + 2 - insn_addresses
[to
];
782 return abs (delta
) < 245; /* A small margin for safety */
785 /* Check to see that the insn isn't the target of the conditionalizing
788 arm_insn_not_targeted (insn
)
791 return insn
!= arm_target_insn
;
795 /* Routines to output assembly language. */
797 /* fp_immediate_constant
798 if the rtx is the correct value then return the string of the number.
799 In this way we can ensure that valid double constants are generated even
800 when cross compiling. */
802 fp_immediate_constant (x
)
808 if (!fpa_consts_inited
)
811 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
812 for (i
= 0; i
< 8; i
++)
813 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
814 return strings_fpa
[i
];
819 /* Output the operands of a LDM/STM instruction to STREAM.
820 MASK is the ARM register set mask of which only bits 0-15 are important.
821 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
822 must follow the register list. */
825 print_multi_reg (stream
, instr
, mask
, hat
)
831 int not_first
= FALSE
;
833 fprintf (stream
, "\t%s, {", instr
);
834 for (i
= 0; i
< 16; i
++)
838 fprintf (stream
, ", ");
839 fprintf (stream
, "%s", reg_names
[i
]);
842 fprintf (stream
, "}%s\n", hat
? "^" : "");
843 } /* print_multi_reg */
845 /* Output a 'call' insn. */
848 output_call (operands
)
851 operands
[0] = XEXP (operands
[0], 0);
853 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
855 if (REGNO (operands
[0]) == 14)
857 operands
[0] = gen_rtx (REG
, SImode
, 12);
858 arm_output_asm_insn ("mov\t%0, lr", operands
);
860 arm_output_asm_insn ("mov\tlr, pc", operands
);
861 arm_output_asm_insn ("mov\tpc, %0", operands
);
869 int something_changed
= 0;
871 int code
= GET_CODE (x0
);
878 if (REGNO (x0
) == 14)
880 *x
= gen_rtx (REG
, SImode
, 12);
885 /* Scan through the sub-elements and change any references there */
886 fmt
= GET_RTX_FORMAT (code
);
887 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
889 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
890 else if (fmt
[i
] == 'E')
891 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
892 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
893 return something_changed
;
897 /* Output a 'call' insn that is a reference in memory. */
900 output_call_mem (operands
)
903 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful */
904 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
906 if (eliminate_lr2ip (&operands
[0]))
907 arm_output_asm_insn ("mov\tip, lr", operands
);
908 arm_output_asm_insn ("mov\tlr, pc", operands
);
909 arm_output_asm_insn ("ldr\tpc, %0", operands
);
914 /* Output a move from arm registers to an fpu registers.
915 OPERANDS[0] is an fpu register.
916 OPERANDS[1] is the first registers of an arm register pair. */
919 output_mov_long_double_fpu_from_arm (operands
)
922 int arm_reg0
= REGNO (operands
[1]);
927 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
928 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
929 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
931 arm_output_asm_insn ("stmfd\tsp!, {%0, %1, %2}", ops
);
932 arm_output_asm_insn ("ldfe\t%0, [sp], #12", operands
);
934 } /* output_mov_long_double_fpu_from_arm */
936 /* Output a move from an fpu register to arm registers.
937 OPERANDS[0] is the first registers of an arm register pair.
938 OPERANDS[1] is an fpu register. */
941 output_mov_long_double_arm_from_fpu (operands
)
944 int arm_reg0
= REGNO (operands
[0]);
949 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
950 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
951 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
953 arm_output_asm_insn ("stfe\t%1, [sp, #-12]!", operands
);
954 arm_output_asm_insn ("ldmfd\tsp!, {%0, %1, %2}", ops
);
956 } /* output_mov_long_double_arm_from_fpu */
958 /* Output a move from arm registers to arm registers of a long double
959 OPERANDS[0] is the destination.
960 OPERANDS[1] is the source. */
962 output_mov_long_double_arm_from_arm (operands
)
965 /* We have to be careful here because the two might overlap */
966 int dest_start
= REGNO (operands
[0]);
967 int src_start
= REGNO (operands
[1]);
971 if (dest_start
< src_start
)
973 for (i
= 0; i
< 3; i
++)
975 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
976 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
977 arm_output_asm_insn ("mov\t%0, %1", ops
);
982 for (i
= 2; i
>= 0; i
--)
984 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
985 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
986 arm_output_asm_insn ("mov\t%0, %1", ops
);
993 /* Output a move from arm registers to an fpu registers.
994 OPERANDS[0] is an fpu register.
995 OPERANDS[1] is the first registers of an arm register pair. */
998 output_mov_double_fpu_from_arm (operands
)
1001 int arm_reg0
= REGNO (operands
[1]);
1006 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1007 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1008 arm_output_asm_insn ("stmfd\tsp!, {%0, %1}", ops
);
1009 arm_output_asm_insn ("ldfd\t%0, [sp], #8", operands
);
1011 } /* output_mov_double_fpu_from_arm */
1013 /* Output a move from an fpu register to arm registers.
1014 OPERANDS[0] is the first registers of an arm register pair.
1015 OPERANDS[1] is an fpu register. */
1018 output_mov_double_arm_from_fpu (operands
)
1021 int arm_reg0
= REGNO (operands
[0]);
1026 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1027 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1028 arm_output_asm_insn ("stfd\t%1, [sp, #-8]!", operands
);
1029 arm_output_asm_insn ("ldmfd\tsp!, {%0, %1}", ops
);
1031 } /* output_mov_double_arm_from_fpu */
1033 /* Output a move between double words.
1034 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1035 or MEM<-REG and all MEMs must be offsettable addresses. */
1038 output_move_double (operands
)
1041 enum rtx_code code0
= GET_CODE (operands
[0]);
1042 enum rtx_code code1
= GET_CODE (operands
[1]);
1047 int reg0
= REGNO (operands
[0]);
1049 otherops
[0] = gen_rtx (REG
, SImode
, 1 + reg0
);
1052 int reg1
= REGNO (operands
[1]);
1055 otherops
[1] = gen_rtx (REG
, SImode
, 1 + reg1
);
1057 /* Ensure the second source is not overwritten */
1058 if (reg0
== 1 + reg1
)
1060 arm_output_asm_insn("mov\t%0, %1", otherops
);
1061 arm_output_asm_insn("mov\t%0, %1", operands
);
1065 arm_output_asm_insn("mov\t%0, %1", operands
);
1066 arm_output_asm_insn("mov\t%0, %1", otherops
);
1069 else if (code1
== CONST_DOUBLE
)
1071 otherops
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1072 CONST_DOUBLE_HIGH (operands
[1]));
1073 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1074 CONST_DOUBLE_LOW (operands
[1]));
1075 output_mov_immediate (operands
, FALSE
, "");
1076 output_mov_immediate (otherops
, FALSE
, "");
1078 else if (code1
== CONST_INT
)
1080 otherops
[1] = const0_rtx
;
1081 /* sign extend the intval into the high-order word */
1082 /* Note: output_mov_immediate may clobber operands[1], so we
1083 put this out first */
1084 if (INTVAL (operands
[1]) < 0)
1085 arm_output_asm_insn ("mvn\t%0, %1", otherops
);
1087 arm_output_asm_insn ("mov\t%0, %1", otherops
);
1088 output_mov_immediate (operands
, FALSE
, "");
1090 else if (code1
== MEM
)
1092 switch (GET_CODE (XEXP (operands
[1], 0)))
1095 /* Handle the simple case where address is [r, #0] more
1097 operands
[1] = XEXP (operands
[1], 0);
1098 arm_output_asm_insn ("ldmia\t%1, %M0", operands
);
1101 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1102 arm_output_asm_insn ("add\t%1, %1, #8", operands
);
1103 arm_output_asm_insn ("ldmia\t%1, %M0", operands
);
1106 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1107 arm_output_asm_insn ("sub\t%1, %1, #8", operands
);
1108 arm_output_asm_insn ("ldmia\t%1, %M0", operands
);
1111 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1112 arm_output_asm_insn ("ldmia\t%1!, %M0", operands
);
1115 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1116 arm_output_asm_insn ("ldmia\t%1, %M0", operands
);
1117 arm_output_asm_insn ("sub\t%1, %1, #8", operands
);
1120 otherops
[1] = adj_offsettable_operand (operands
[1], 4);
1121 /* Take care of overlapping base/data reg. */
1122 if (reg_mentioned_p (operands
[0], operands
[1]))
1124 arm_output_asm_insn ("ldr\t%0, %1", otherops
);
1125 arm_output_asm_insn ("ldr\t%0, %1", operands
);
1129 arm_output_asm_insn ("ldr\t%0, %1", operands
);
1130 arm_output_asm_insn ("ldr\t%0, %1", otherops
);
1134 else abort(); /* Constraints should prevent this */
1136 else if (code0
== MEM
&& code1
== REG
)
1138 if (REGNO (operands
[1]) == 12)
1140 switch (GET_CODE (XEXP (operands
[0], 0)))
1143 operands
[0] = XEXP (operands
[0], 0);
1144 arm_output_asm_insn ("stmia\t%0, %M1", operands
);
1147 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1148 arm_output_asm_insn ("add\t%0, %0, #8", operands
);
1149 arm_output_asm_insn ("stmia\t%0, %M1", operands
);
1152 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1153 arm_output_asm_insn ("sub\t%0, %0, #8", operands
);
1154 arm_output_asm_insn ("stmia\t%0, %M1", operands
);
1157 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1158 arm_output_asm_insn ("stmia\t%0!, %M1", operands
);
1161 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1162 arm_output_asm_insn ("stmia\t%0, %M1", operands
);
1163 arm_output_asm_insn ("sub\t%0, %0, #8", operands
);
1166 otherops
[0] = adj_offsettable_operand (operands
[0], 4);
1167 otherops
[1] = gen_rtx (REG
, SImode
, 1 + REGNO (operands
[1]));
1168 arm_output_asm_insn ("str\t%1, %0", operands
);
1169 arm_output_asm_insn ("str\t%1, %0", otherops
);
1172 else abort(); /* Constraints should prevent this */
1175 } /* output_move_double */
1178 /* Output an arbitrary MOV reg, #n.
1179 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1182 output_mov_immediate (operands
)
1185 int n
= INTVAL (operands
[1]);
1189 /* Try to use one MOV */
1191 if (const_ok_for_arm (n
))
1192 return (arm_output_asm_insn ("mov\t%0, %1", operands
));
1194 /* Try to use one MVN */
1196 if (const_ok_for_arm(~n
))
1198 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
, ~n
);
1199 return (arm_output_asm_insn ("mvn\t%0, %1", operands
));
1202 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1204 for (i
=0; i
< 32; i
++)
1208 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
1209 output_multi_immediate(operands
, "mvn\t%0, %1", "bic\t%0, %0, %1", 1, ~n
);
1211 output_multi_immediate(operands
, "mov\t%0, %1", "orr\t%0, %0, %1", 1, n
);
1213 } /* output_mov_immediate */
1216 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1217 adding zero to one register, output nothing. */
1220 output_add_immediate (operands
)
1223 int n
= INTVAL (operands
[2]);
1225 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
1228 output_multi_immediate (operands
,
1229 "sub\t%0, %1, %2", "sub\t%0, %0, %2", 2, -n
);
1231 output_multi_immediate (operands
,
1232 "add\t%0, %1, %2", "add\t%0, %0, %2", 2, n
);
1235 } /* output_add_immediate */
1238 /* Output a multiple immediate operation.
1239 OPERANDS is the vector of operands referred to in the output patterns.
1240 INSTR1 is the output pattern to use for the first constant.
1241 INSTR2 is the output pattern to use for subsequent constants.
1242 IMMED_OP is the index of the constant slot in OPERANDS.
1243 N is the constant value. */
1246 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
1248 char *instr1
, *instr2
;
1253 operands
[immed_op
] = const0_rtx
;
1254 arm_output_asm_insn (instr1
, operands
); /* Quick and easy output */
1259 char *instr
= instr1
;
1261 /* Note that n is never zero here (which would give no output) */
1263 for (i
= 0; i
< 32; i
+= 2)
1267 operands
[immed_op
] = gen_rtx (CONST_INT
, VOIDmode
,
1269 arm_output_asm_insn (instr
, operands
);
1276 } /* output_multi_immediate */
1279 /* Return the appropriate ARM instruction for the operation code.
1280 The returned result should not be overwritten. OP is the rtx of the
1281 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1285 arithmetic_instr (op
, shift_first_arg
)
1288 switch (GET_CODE(op
))
1293 if (shift_first_arg
)
1306 return (""); /* stupid cc */
1307 } /* arithmetic_instr */
1310 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1311 for the operation code. The returned result should not be overwritten.
1312 OP is the rtx code of the shift.
1313 SHIFT_PTR points to the shift size operand. */
1316 shift_instr (op
, shift_ptr
)
1341 *shift_ptr
= gen_rtx (CONST_INT
, VOIDmode
,
1342 int_log2 (INTVAL (*shift_ptr
)));
1348 if (GET_CODE (*shift_ptr
) == CONST_INT
)
1350 int shift
= INTVAL (*shift_ptr
);
1352 if (shift
< min_shift
)
1353 *shift_ptr
= gen_rtx (CONST_INT
, VOIDmode
, 0);
1354 else if (shift
> max_shift
)
1355 *shift_ptr
= gen_rtx (CONST_INT
, VOIDmode
, max_shift
);
1361 /* Obtain the shift from the POWER of two. */
1369 while (((1 << shift
) & power
) == 0)
1379 /* Output an arithmetic instruction which may set the condition code.
1380 OPERANDS[0] is the destination register.
1381 OPERANDS[1] is the arithmetic operator expression.
1382 OPERANDS[2] is the left hand argument.
1383 OPERANDS[3] is the right hand argument.
1384 CONST_FIRST_ARG is TRUE if the first argument of the operator was constant.
1385 SET_COND is TRUE when the condition code should be set. */
1388 output_arithmetic (operands
, const_first_arg
, set_cond
)
1390 int const_first_arg
;
1394 char *instr
= arithmetic_instr (operands
[1], const_first_arg
);
1396 sprintf (mnemonic
, "%s%s\t%%0, %%2, %%3", instr
, set_cond
? "s" : "");
1397 return (arm_output_asm_insn (mnemonic
, operands
));
1398 } /* output_arithmetic */
1401 /* Output an arithmetic instruction with a shift.
1402 OPERANDS[0] is the destination register.
1403 OPERANDS[1] is the arithmetic operator expression.
1404 OPERANDS[2] is the unshifted register.
1405 OPERANDS[3] is the shift operator expression.
1406 OPERANDS[4] is the shifted register.
1407 OPERANDS[5] is the shift constant or register.
1408 SHIFT_FIRST_ARG is TRUE if the first argument of the operator was shifted.
1409 SET_COND is TRUE when the condition code should be set. */
1412 output_arithmetic_with_shift (operands
, shift_first_arg
, set_cond
)
1414 int shift_first_arg
;
1418 char *instr
= arithmetic_instr (operands
[1], shift_first_arg
);
1419 char *condbit
= set_cond
? "s" : "";
1420 char *shift
= shift_instr (GET_CODE (operands
[3]), &operands
[5]);
1422 sprintf (mnemonic
, "%s%s\t%%0, %%2, %%4, %s %%5", instr
, condbit
, shift
);
1423 return (arm_output_asm_insn (mnemonic
, operands
));
1424 } /* output_arithmetic_with_shift */
1427 /* Output an arithmetic instruction with a power of two multiplication.
1428 OPERANDS[0] is the destination register.
1429 OPERANDS[1] is the arithmetic operator expression.
1430 OPERANDS[2] is the unmultiplied register.
1431 OPERANDS[3] is the multiplied register.
1432 OPERANDS[4] is the constant multiple (power of two).
1433 SHIFT_FIRST_ARG is TRUE if the first arg of the operator was multiplied. */
1436 output_arithmetic_with_immediate_multiply (operands
, shift_first_arg
)
1438 int shift_first_arg
;
1441 char *instr
= arithmetic_instr (operands
[1], shift_first_arg
);
1442 int shift
= int_log2 (INTVAL (operands
[4]));
1444 sprintf (mnemonic
, "%s\t%%0, %%2, %%3, asl#%d", instr
, shift
);
1445 return (arm_output_asm_insn (mnemonic
, operands
));
1446 } /* output_arithmetic_with_immediate_multiply */
1449 /* Output a move with a shift.
1450 OP is the shift rtx code.
1451 OPERANDS[0] = destination register.
1452 OPERANDS[1] = source register.
1453 OPERANDS[2] = shift constant or register. */
1456 output_shifted_move (op
, operands
)
1462 if (GET_CODE (operands
[2]) == CONST_INT
&& INTVAL (operands
[2]) == 0)
1463 sprintf (mnemonic
, "mov\t%%0, %%1");
1465 sprintf (mnemonic
, "mov\t%%0, %%1, %s %%2",
1466 shift_instr (op
, &operands
[2]));
1467 return (arm_output_asm_insn (mnemonic
, operands
));
1468 } /* output_shifted_move */
1471 output_shift_compare (operands
, neg
)
1478 sprintf (buf
, "cmn\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands
[2]),
1481 sprintf (buf
, "cmp\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands
[2]),
1483 return arm_output_asm_insn (buf
, operands
);
1484 } /* output_shift_compare */
1486 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1487 /bin/as is horribly restrictive. */
1490 output_ascii_pseudo_op (stream
, p
, len
)
1496 int len_so_far
= 1000;
1497 int chars_so_far
= 0;
1499 for (i
= 0; i
< len
; i
++)
1501 register int c
= p
[i
];
1503 if (len_so_far
> 50)
1506 fputs ("\"\n", stream
);
1507 fputs ("\t.ascii\t\"", stream
);
1509 arm_increase_location (chars_so_far
);
1513 if (c
== '\"' || c
== '\\')
1518 if (c
>= ' ' && c
< 0177)
1525 fprintf (stream
, "\\%03o", c
);
1530 fputs ("\"\n", stream
);
1531 arm_increase_location (chars_so_far
);
1532 } /* output_ascii_pseudo_op */
1535 /* Try to determine whether a pattern really clobbers the link register.
1536 This information is useful when peepholing, so that lr need not be pushed
1537 if we combine a call followed by a return */
1540 pattern_really_clobbers_lr (x
)
1545 switch (GET_CODE (x
))
1548 switch (GET_CODE (SET_DEST (x
)))
1551 return REGNO (SET_DEST (x
)) == 14;
1553 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == REG
)
1554 return REGNO (XEXP (SET_DEST (x
), 0)) == 14;
1560 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1561 if (pattern_really_clobbers_lr (XVECEXP (x
, 0, i
)))
1565 switch (GET_CODE (XEXP (x
, 0)))
1568 return REGNO (XEXP (x
, 0)) == 14;
1570 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
1571 return REGNO (XEXP (XEXP (x
, 0), 0)) == 14;
1584 function_really_clobbers_lr (first
)
1589 for (insn
= first
; insn
; insn
= next_nonnote_insn (insn
))
1591 switch (GET_CODE (insn
))
1596 case JUMP_INSN
: /* Jump insns only change the PC (and conds) */
1600 if (pattern_really_clobbers_lr (PATTERN (insn
)))
1604 /* Don't yet know how to handle those calls that are not to a
1606 if (GET_CODE (PATTERN (insn
)) != PARALLEL
)
1608 switch (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)))
1611 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn
), 0, 0), 0), 0))
1616 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
),
1621 default: /* Don't recognize it, be safe */
1624 /* A call can be made (by peepholing) not to clobber lr iff it is
1625 followed by a return. There may, however, be a use insn iff
1626 we are returning the result of the call.
1627 If we run off the end of the insn chain, then that means the
1628 call was at the end of the function. Unfortunately we don't
1629 have a return insn for the peephole to recognize, so we
1630 must reject this. (Can this be fixed by adding our own insn?) */
1631 if ((next
= next_nonnote_insn (insn
)) == NULL
)
1633 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == USE
1634 && (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1635 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn
), 0, 0)))
1636 == REGNO (XEXP (PATTERN (next
), 0))))
1637 if ((next
= next_nonnote_insn (next
)) == NULL
)
1639 if (GET_CODE (next
) == JUMP_INSN
1640 && GET_CODE (PATTERN (next
)) == RETURN
)
1647 /* We have reached the end of the chain so lr was _not_ clobbered */
1652 output_return_instruction (operand
, really_return
)
1657 int reg
, live_regs
= 0;
1659 if (current_function_calls_alloca
&& !really_return
)
1662 for (reg
= 4; reg
< 10; reg
++)
1663 if (regs_ever_live
[reg
])
1666 if (live_regs
|| (regs_ever_live
[14] && !lr_save_eliminated
))
1669 if (frame_pointer_needed
)
1674 if (lr_save_eliminated
|| !regs_ever_live
[14])
1676 if (frame_pointer_needed
)
1677 strcpy (instr
, "ldm%d0ea\tfp, {");
1679 strcpy (instr
, "ldm%d0fd\tsp!, {");
1680 for (reg
= 4; reg
< 10; reg
++)
1681 if (regs_ever_live
[reg
])
1683 strcat (instr
, reg_names
[reg
]);
1685 strcat (instr
, ", ");
1687 if (frame_pointer_needed
)
1689 strcat (instr
, reg_names
[11]);
1690 strcat (instr
, ", ");
1691 strcat (instr
, reg_names
[13]);
1692 strcat (instr
, ", ");
1693 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1696 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1697 strcat (instr
, (TARGET_6
|| !really_return
) ? "}" : "}^");
1698 arm_output_asm_insn (instr
, &operand
);
1700 else if (really_return
)
1702 strcpy (instr
, TARGET_6
? "mov%d0\tpc, lr" : "mov%d0s\tpc, lr");
1703 arm_output_asm_insn (instr
, &operand
);
1705 return_used_this_function
= 1;
1709 /* The amount of stack adjustment that happens here, in output_return and in
1710 output_epilogue must be exactly the same as was calculated during reload,
1711 or things will point to the wrong place. The only time we can safely
1712 ignore this constraint is when a function has no arguments on the stack,
1713 no stack frame requirement and no live registers execpt for `lr'. If we
1714 can guarantee that by making all function calls into tail calls and that
1715 lr is not clobbered in any other way, then there is no need to push lr
1719 output_prologue (f
, frame_size
)
1723 int reg
, live_regs_mask
= 0, code_size
= 0;
1726 /* Nonzero if we must stuff some register arguments onto the stack as if
1727 they were passed there. */
1728 int store_arg_regs
= 0;
1730 return_used_this_function
= 0;
1731 lr_save_eliminated
= 0;
1733 fprintf (f
, "\t@ args = %d, pretend = %d, frame = %d\n",
1734 current_function_args_size
, current_function_pretend_args_size
,
1736 fprintf (f
, "\t@ frame_needed = %d, current_function_anonymous_args = %d\n",
1737 frame_pointer_needed
, current_function_anonymous_args
);
1739 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1742 for (reg
= 4; reg
< 10; reg
++)
1743 if (regs_ever_live
[reg
])
1744 live_regs_mask
|= (1 << reg
);
1746 if (frame_pointer_needed
)
1748 live_regs_mask
|= 0xD800;
1749 fputs ("\tmov\tip, sp\n", f
);
1752 else if (regs_ever_live
[14])
1754 if (! current_function_args_size
1755 && !function_really_clobbers_lr (get_insns ()))
1757 fprintf (f
,"\t@ I don't think this function clobbers lr\n");
1758 lr_save_eliminated
= 1;
1761 live_regs_mask
|= 0x4000;
1764 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1765 room. If also STORE_ARG_REGS store the argument registers involved in
1766 the created slot (this is for stdarg and varargs). */
1767 if (current_function_pretend_args_size
)
1771 int arg_size
, mask
= 0;
1773 assert (current_function_pretend_args_size
<= 16);
1774 for (reg
= 3, arg_size
= current_function_pretend_args_size
;
1775 arg_size
> 0; reg
--, arg_size
-= 4)
1777 print_multi_reg (f
, "stmfd\tsp!", mask
, FALSE
);
1782 operands
[0] = operands
[1] = stack_pointer_rtx
;
1783 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1784 -current_function_pretend_args_size
);
1785 output_add_immediate (operands
);
1791 /* if a di mode load/store multiple is used, and the base register
1792 is r3, then r4 can become an ever live register without lr
1793 doing so, in this case we need to push lr as well, or we
1794 will fail to get a proper return. */
1796 live_regs_mask
|= 0x4000;
1797 lr_save_eliminated
= 0;
1798 print_multi_reg (f
, "stmfd\tsp!", live_regs_mask
, FALSE
);
1802 for (reg
= 23; reg
> 19; reg
--)
1803 if (regs_ever_live
[reg
])
1805 fprintf (f
, "\tstfe\t%s, [sp, #-12]!\n", reg_names
[reg
]);
1809 if (frame_pointer_needed
)
1811 /* Make `fp' point to saved value of `pc'. */
1813 operands
[0] = gen_rtx (REG
, SImode
, HARD_FRAME_POINTER_REGNUM
);
1814 operands
[1] = gen_rtx (REG
, SImode
, 12);
1815 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1816 - (4 + current_function_pretend_args_size
));
1817 output_add_immediate (operands
);
1822 operands
[0] = operands
[1] = stack_pointer_rtx
;
1823 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, -frame_size
);
1824 output_add_immediate (operands
);
1827 arm_increase_location (code_size
);
1828 } /* output_prologue */
1832 output_epilogue (f
, frame_size
)
1836 int reg
, live_regs_mask
= 0, code_size
= 0, fp_needed
= 0;
1837 /* If we need this then it will always be at lesat this much */
1838 int floats_offset
= 24;
1841 if (use_return_insn() && return_used_this_function
)
1843 if (frame_size
&& !(frame_pointer_needed
|| TARGET_APCS
))
1850 for (reg
= 4; reg
<= 10; reg
++)
1851 if (regs_ever_live
[reg
])
1853 live_regs_mask
|= (1 << reg
);
1858 if (frame_pointer_needed
)
1860 for (reg
= 23; reg
>= 20; reg
--)
1861 if (regs_ever_live
[reg
])
1863 fprintf (f
, "\tldfe\t%s, [fp, #-%d]\n", reg_names
[reg
],
1865 floats_offset
+= 12;
1869 live_regs_mask
|= 0xA800;
1870 print_multi_reg (f
, "ldmea\tfp", live_regs_mask
,
1871 TARGET_6
? FALSE
: TRUE
);
1876 /* Restore stack pointer if necessary. */
1879 operands
[0] = operands
[1] = stack_pointer_rtx
;
1880 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, frame_size
);
1881 output_add_immediate (operands
);
1884 for (reg
= 20; reg
< 24; reg
++)
1885 if (regs_ever_live
[reg
])
1887 fprintf (f
, "\tldfe\t%s, [sp], #12\n", reg_names
[reg
]);
1890 if (current_function_pretend_args_size
== 0 && regs_ever_live
[14])
1892 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
| 0x8000,
1893 TARGET_6
? FALSE
: TRUE
);
1898 if (live_regs_mask
|| regs_ever_live
[14])
1900 live_regs_mask
|= 0x4000;
1901 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
, FALSE
);
1904 if (current_function_pretend_args_size
)
1906 operands
[0] = operands
[1] = stack_pointer_rtx
;
1907 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1908 current_function_pretend_args_size
);
1909 output_add_immediate (operands
);
1911 fputs (TARGET_6
? "\tmov\tpc, lr\n" : "\tmovs\tpc, lr\n", f
);
1915 arm_increase_location (code_size
);
1916 current_function_anonymous_args
= 0;
1917 } /* output_epilogue */
1919 /* Increase the `arm_text_location' by AMOUNT if we're in the text
1923 arm_increase_location (amount
)
1926 if (in_text_section ())
1927 arm_text_location
+= amount
;
1928 } /* arm_increase_location */
1931 /* Like output_asm_insn (), but also increases the arm_text_location (if in
1932 the .text segment, of course, even though this will always be true).
1933 Returns the empty string. */
1936 arm_output_asm_insn (template, operands
)
1940 extern FILE *asm_out_file
;
1942 output_asm_insn (template, operands
);
1943 if (in_text_section ())
1944 arm_text_location
+= 4;
1945 fflush (asm_out_file
);
1947 } /* arm_output_asm_insn */
1950 /* Output a label definition. If this label is within the .text segment, it
1951 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
1952 Maybe GCC remembers names not starting with a `*' for a long time, but this
1953 is a minority anyway, so we just make a copy. Do not store the leading `*'
1954 if the name starts with one. */
1957 arm_asm_output_label (stream
, name
)
1961 char *real_name
, *s
;
1962 struct label_offset
*cur
;
1965 assemble_name (stream
, name
);
1966 fputs (":\n", stream
);
1967 if (! in_text_section ())
1972 real_name
= xmalloc (1 + strlen (&name
[1]));
1973 strcpy (real_name
, &name
[1]);
1977 real_name
= xmalloc (2 + strlen (name
));
1978 strcpy (real_name
, "_");
1979 strcat (real_name
, name
);
1981 for (s
= real_name
; *s
; s
++)
1983 hash
= hash
% LABEL_HASH_SIZE
;
1984 cur
= (struct label_offset
*) xmalloc (sizeof (struct label_offset
));
1985 cur
->name
= real_name
;
1986 cur
->offset
= arm_text_location
;
1987 cur
->cdr
= offset_table
[hash
];
1988 offset_table
[hash
] = cur
;
1989 } /* arm_asm_output_label */
1992 /* Output the instructions needed to perform what Martin's /bin/as called
1993 llc: load an SImode thing from the function's constant pool.
1995 XXX This could be enhanced in that we do not really need a pointer in the
1996 constant pool pointing to the real thing. If we can address this pointer,
1997 we can also address what it is pointing at, in fact, anything in the text
1998 segment which has been defined already within this .s file. */
2001 arm_output_llc (operands
)
2004 char *s
, *name
= XSTR (XEXP (operands
[1], 0), 0);
2005 struct label_offset
*he
;
2006 int hash
= 0, conditional
= (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4);
2011 for (s
= &name
[1]; *s
; s
++)
2013 hash
= hash
% LABEL_HASH_SIZE
;
2014 he
= offset_table
[hash
];
2015 while (he
&& strcmp (he
->name
, &name
[1]))
2021 if (arm_text_location
+ 8 - he
->offset
< 4095)
2023 fprintf (asm_out_file
, "\tldr%s\t%s, [pc, #%s - . - 8]\n",
2024 conditional
? arm_condition_codes
[arm_current_cc
] : "",
2025 reg_names
[REGNO (operands
[0])], &name
[1]);
2026 arm_increase_location (4);
2031 int offset
= - (arm_text_location
+ 8 - he
->offset
);
2032 char *reg_name
= reg_names
[REGNO (operands
[0])];
2034 /* ??? This is a hack, assuming the constant pool never is more than
2035 (1 + 255) * 4096 == 1Meg away from the PC. */
2037 if (offset
> 1000000)
2040 fprintf (asm_out_file
, "\tsub%s\t%s, pc, #(8 + . - %s) & ~4095\n",
2041 conditional
? arm_condition_codes
[arm_current_cc
] : "",
2042 reg_name
, &name
[1]);
2043 fprintf (asm_out_file
, "\tldr%s\t%s, [%s, #- ((4 + . - %s) & 4095)]\n",
2044 conditional
? arm_condition_codes
[arm_current_cc
] : "",
2045 reg_name
, reg_name
, &name
[1]);
2046 arm_increase_location (8);
2049 } /* arm_output_llc */
2051 /* output_load_symbol ()
2052 load a symbol that is known to be in the text segment into a register */
2055 output_load_symbol (operands
)
2058 char *s
, *name
= XSTR (operands
[1], 0);
2059 struct label_offset
*he
;
2066 for (s
= &name
[1]; *s
; s
++)
2068 hash
= hash
% LABEL_HASH_SIZE
;
2069 he
= offset_table
[hash
];
2070 while (he
&& strcmp (he
->name
, &name
[1]))
2076 offset
= (arm_text_location
+ 8 - he
->offset
);
2080 /* If the symbol is word aligned then we might be able to reduce the
2082 if ((offset
& 3) == 0)
2084 arm_output_asm_insn ("sub\t%0, pc, #(8 + . -%a1) & 1023", operands
);
2087 arm_output_asm_insn ("sub\t%0, %0, #(4 + . -%a1) & 261120",
2089 if (offset
> 0x3ffff)
2091 arm_output_asm_insn ("sub\t%0, %0, #(. -%a1) & 66846720",
2093 if (offset
> 0x3ffffff)
2094 arm_output_asm_insn ("sub\t%0, %0, #(. - 4 -%a1) & -67108864",
2101 arm_output_asm_insn ("sub\t%0, pc, #(8 + . -%a1) & 255", operands
);
2104 arm_output_asm_insn ("sub\t%0, %0, #(4 + . -%a1) & 65280", operands
);
2105 if (offset
> 0x0ffff)
2107 arm_output_asm_insn ("sub\t%0, %0, #(. -%a1) & 16711680",
2109 if (offset
> 0x0ffffff)
2110 arm_output_asm_insn ("sub\t%0, %0, #(. - 4 -%a1) & -16777216",
2118 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2119 directive hence this hack, which works by reserving some `.space' in the
2120 bss segment directly.
2122 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2123 define STATIC COMMON space but merely STATIC BSS space. */
2126 output_lcomm_directive (stream
, name
, size
, rounded
)
2131 fputs ("\n\t.bss\t@ .lcomm\n", stream
);
2132 assemble_name (stream
, name
);
2133 fprintf (stream
, ":\t.space\t%d\n", rounded
);
2134 if (in_text_section ())
2135 fputs ("\n\t.text\n", stream
);
2137 fputs ("\n\t.data\n", stream
);
2138 } /* output_lcomm_directive */
2140 /* A finite state machine takes care of noticing whether or not instructions
2141 can be conditionally executed, and thus decrease execution time and code
2142 size by deleting branch instructions. The fsm is controlled by
2143 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2145 /* The state of the fsm controlling condition codes are:
2146 0: normal, do nothing special
2147 1: make ASM_OUTPUT_OPCODE not output this instruction
2148 2: make ASM_OUTPUT_OPCODE not output this instruction
2149 3: make instructions conditional
2150 4: make instructions conditional
2152 State transitions (state->state by whom under condition):
2153 0 -> 1 final_prescan_insn if the `target' is a label
2154 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2155 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2156 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2157 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2158 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2159 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2160 (the target insn is arm_target_insn).
2162 If the jump clobbers the conditions then we use states 2 and 4.
2164 A similar thing can be done with conditional return insns.
2166 XXX In case the `target' is an unconditional branch, this conditionalising
2167 of the instructions always reduces code size, but not always execution
2168 time. But then, I want to reduce the code size to somewhere near what
2169 /bin/cc produces. */
2171 /* The condition codes of the ARM, and the inverse function. */
2172 char *arm_condition_codes
[] =
2174 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
2175 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
2178 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
2180 /* Returns the index of the ARM condition code string in
2181 `arm_condition_codes'. COMPARISON should be an rtx like
2182 `(eq (...) (...))'. */
2185 get_arm_condition_code (comparison
)
2188 switch (GET_CODE (comparison
))
2190 case NE
: return (1);
2191 case EQ
: return (0);
2192 case GE
: return (10);
2193 case GT
: return (12);
2194 case LE
: return (13);
2195 case LT
: return (11);
2196 case GEU
: return (2);
2197 case GTU
: return (8);
2198 case LEU
: return (9);
2199 case LTU
: return (3);
2204 } /* get_arm_condition_code */
2208 final_prescan_insn (insn
, opvec
, noperands
)
2213 /* BODY will hold the body of INSN. */
2214 register rtx body
= PATTERN (insn
);
2216 /* This will be 1 if trying to repeat the trick, and things need to be
2217 reversed if it appears to fail. */
2220 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2221 taken are clobbered, even if the rtl suggests otherwise. It also
2222 means that we have to grub around within the jump expression to find
2223 out what the conditions are when the jump isn't taken. */
2224 int jump_clobbers
= 0;
2226 /* If we start with a return insn, we only succeed if we find another one. */
2227 int seeking_return
= 0;
2229 /* START_INSN will hold the insn from where we start looking. This is the
2230 first insn after the following code_label if REVERSE is true. */
2231 rtx start_insn
= insn
;
2233 /* If in state 4, check if the target branch is reached, in order to
2234 change back to state 0. */
2235 if (arm_ccfsm_state
== 4)
2237 if (insn
== arm_target_insn
)
2238 arm_ccfsm_state
= 0;
2242 /* If in state 3, it is possible to repeat the trick, if this insn is an
2243 unconditional branch to a label, and immediately following this branch
2244 is the previous target label which is only used once, and the label this
2245 branch jumps to is not too far off. */
2246 if (arm_ccfsm_state
== 3)
2248 if (simplejump_p (insn
))
2250 start_insn
= next_nonnote_insn (start_insn
);
2251 if (GET_CODE (start_insn
) == BARRIER
)
2253 /* XXX Isn't this always a barrier? */
2254 start_insn
= next_nonnote_insn (start_insn
);
2256 if (GET_CODE (start_insn
) == CODE_LABEL
2257 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2258 && LABEL_NUSES (start_insn
) == 1)
2263 else if (GET_CODE (body
) == RETURN
)
2265 start_insn
= next_nonnote_insn (start_insn
);
2266 if (GET_CODE (start_insn
) == BARRIER
)
2267 start_insn
= next_nonnote_insn (start_insn
);
2268 if (GET_CODE (start_insn
) == CODE_LABEL
2269 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2270 && LABEL_NUSES (start_insn
) == 1)
2282 if (arm_ccfsm_state
!= 0 && !reverse
)
2284 if (GET_CODE (insn
) != JUMP_INSN
)
2287 /* This jump might be paralled with a clobber of the condition codes
2288 the jump should always come first */
2289 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2290 body
= XVECEXP (body
, 0, 0);
2293 /* If this is a conditional return then we don't want to know */
2294 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2295 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2296 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
2297 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
2302 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2303 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2305 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2306 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2307 int then_not_else
= TRUE
;
2308 rtx this_insn
= start_insn
, label
= 0;
2310 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
2313 /* Register the insn jumped to. */
2316 if (!seeking_return
)
2317 label
= XEXP (SET_SRC (body
), 0);
2319 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2320 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2321 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2323 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2324 then_not_else
= FALSE
;
2326 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2328 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2331 then_not_else
= FALSE
;
2336 /* See how many insns this branch skips, and what kind of insns. If all
2337 insns are okay, and the label or unconditional branch to the same
2338 label is not too far away, succeed. */
2339 for (insns_skipped
= 0;
2340 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2345 this_insn
= next_nonnote_insn (this_insn
);
2349 scanbody
= PATTERN (this_insn
);
2351 switch (GET_CODE (this_insn
))
2354 /* Succeed if it is the target label, otherwise fail since
2355 control falls in from somewhere else. */
2356 if (this_insn
== label
)
2360 arm_ccfsm_state
= 2;
2361 this_insn
= next_nonnote_insn (this_insn
);
2364 arm_ccfsm_state
= 1;
2372 /* Succeed if the following insn is the target label.
2374 If return insns are used then the last insn in a function
2375 will be a barrier. */
2376 this_insn
= next_nonnote_insn (this_insn
);
2377 if (this_insn
&& this_insn
== label
)
2381 arm_ccfsm_state
= 2;
2382 this_insn
= next_nonnote_insn (this_insn
);
2385 arm_ccfsm_state
= 1;
2393 /* The arm 6xx uses full 32 bit addresses so the cc is not
2394 preserved over calls */
2399 /* If this is an unconditional branch to the same label, succeed.
2400 If it is to another label, do nothing. If it is conditional,
2402 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2404 if (GET_CODE (scanbody
) == SET
2405 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2407 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2408 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2410 arm_ccfsm_state
= 2;
2413 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2416 else if (GET_CODE (scanbody
) == RETURN
2419 arm_ccfsm_state
= 2;
2422 else if (GET_CODE (scanbody
) == PARALLEL
)
2424 switch (get_attr_conds (this_insn
))
2436 /* Instructions using or affecting the condition codes make it
2438 if ((GET_CODE (scanbody
) == SET
2439 || GET_CODE (scanbody
) == PARALLEL
)
2440 && get_attr_conds (this_insn
) != CONDS_NOCOND
)
2450 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
2451 arm_target_label
= CODE_LABEL_NUMBER (label
);
2452 else if (seeking_return
|| arm_ccfsm_state
== 2)
2454 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2456 this_insn
= next_nonnote_insn (this_insn
);
2457 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2458 || GET_CODE (this_insn
) == CODE_LABEL
))
2463 /* Oh, dear! we ran off the end.. give up */
2464 recog (PATTERN (insn
), insn
, NULL_PTR
);
2465 arm_ccfsm_state
= 0;
2468 arm_target_insn
= this_insn
;
2477 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
2479 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
2480 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2481 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
2482 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2486 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2489 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
2493 if (reverse
|| then_not_else
)
2494 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2496 /* restore recog_operand (getting the attributes of other insns can
2497 destroy this array, but final.c assumes that it remains intact
2498 accross this call; since the insn has been recognized already we
2499 call recog direct). */
2500 recog (PATTERN (insn
), insn
, NULL_PTR
);
2502 } /* final_prescan_insn */