1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
39 /* The maximum number of insns skipped which will be conditionalised if
41 #define MAX_INSNS_SKIPPED 5
43 /* Some function declarations. */
44 extern FILE *asm_out_file
;
45 extern char *output_multi_immediate ();
46 extern void arm_increase_location ();
48 HOST_WIDE_INT int_log2
PROTO ((HOST_WIDE_INT
));
49 static int get_prologue_size
PROTO ((void));
51 /* Define the information needed to generate branch insns. This is
52 stored from the compare operation. */
54 rtx arm_compare_op0
, arm_compare_op1
;
57 /* What type of cpu are we compiling for? */
59 enum processor_type arm_cpu
;
61 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
62 must report the mode of the memory reference from PRINT_OPERAND to
63 PRINT_OPERAND_ADDRESS. */
64 enum machine_mode output_memory_reference_mode
;
66 /* Nonzero if the prologue must setup `fp'. */
67 int current_function_anonymous_args
;
69 /* Location counter of .text segment. */
70 int arm_text_location
= 0;
72 /* Set to one if we think that lr is only saved because of subroutine calls,
73 but all of these can be `put after' return insns */
74 int lr_save_eliminated
;
76 /* A hash table is used to store text segment labels and their associated
77 offset from the start of the text segment. */
82 struct label_offset
*cdr
;
85 #define LABEL_HASH_SIZE 257
87 static struct label_offset
*offset_table
[LABEL_HASH_SIZE
];
89 /* Set to 1 when a return insn is output, this means that the epilogue
92 static int return_used_this_function
;
94 /* For an explanation of these variables, see final_prescan_insn below. */
100 /* Return 1 if it is possible to return using a single instruction */
107 if (!reload_completed
||current_function_pretend_args_size
108 || current_function_anonymous_args
109 || (get_frame_size () && !(TARGET_APCS
|| frame_pointer_needed
)))
112 /* Can't be done if any of the FPU regs are pushed, since this also
114 for (regno
= 20; regno
< 24; regno
++)
115 if (regs_ever_live
[regno
])
121 /* Return TRUE if int I is a valid immediate ARM constant. */
127 unsigned HOST_WIDE_INT mask
= ~0xFF;
131 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
134 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
135 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT
) 0xffffffff);
136 } while (mask
!= ~0xFF);
141 /* This code has been fixed for cross compilation. */
143 static int fpa_consts_inited
= 0;
145 char *strings_fpa
[8] = {
156 static REAL_VALUE_TYPE values_fpa
[8];
164 for (i
= 0; i
< 8; i
++)
166 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
170 fpa_consts_inited
= 1;
173 /* Return TRUE if rtx X is a valid immediate FPU constant. */
176 const_double_rtx_ok_for_fpu (x
)
182 if (!fpa_consts_inited
)
185 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
186 if (REAL_VALUE_MINUS_ZERO (r
))
189 for (i
= 0; i
< 8; i
++)
190 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
196 /* Return TRUE if rtx X is a valid immediate FPU constant. */
199 neg_const_double_rtx_ok_for_fpu (x
)
205 if (!fpa_consts_inited
)
208 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
209 r
= REAL_VALUE_NEGATE (r
);
210 if (REAL_VALUE_MINUS_ZERO (r
))
213 for (i
= 0; i
< 8; i
++)
214 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
220 /* Predicates for `match_operand' and `match_operator'. */
222 /* s_register_operand is the same as register_operand, but it doesn't accept
223 (SUBREG (MEM)...). */
226 s_register_operand (op
, mode
)
228 enum machine_mode mode
;
230 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
233 if (GET_CODE (op
) == SUBREG
)
234 op
= SUBREG_REG (op
);
236 /* We don't consider registers whose class is NO_REGS
237 to be a register operand. */
238 return (GET_CODE (op
) == REG
239 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
240 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
243 /* Return 1 if OP is an item in memory, given that we are in reload. */
246 reload_memory_operand (op
, mode
)
248 enum machine_mode mode
;
250 int regno
= true_regnum (op
);
252 return (! CONSTANT_P (op
)
254 || (GET_CODE (op
) == REG
255 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
258 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
261 arm_rhs_operand (op
, mode
)
263 enum machine_mode mode
;
265 return (s_register_operand (op
, mode
)
266 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
269 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
273 arm_rhsm_operand (op
, mode
)
275 enum machine_mode mode
;
277 return (s_register_operand (op
, mode
)
278 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
279 || memory_operand (op
, mode
));
282 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
283 constant that is valid when negated. */
286 arm_add_operand (op
, mode
)
288 enum machine_mode mode
;
290 return (s_register_operand (op
, mode
)
291 || (GET_CODE (op
) == CONST_INT
292 && (const_ok_for_arm (INTVAL (op
))
293 || const_ok_for_arm (-INTVAL (op
)))));
297 arm_not_operand (op
, mode
)
299 enum machine_mode mode
;
301 return (s_register_operand (op
, mode
)
302 || (GET_CODE (op
) == CONST_INT
303 && (const_ok_for_arm (INTVAL (op
))
304 || const_ok_for_arm (~INTVAL (op
)))));
307 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
310 fpu_rhs_operand (op
, mode
)
312 enum machine_mode mode
;
314 if (s_register_operand (op
, mode
))
316 else if (GET_CODE (op
) == CONST_DOUBLE
)
317 return (const_double_rtx_ok_for_fpu (op
));
323 fpu_add_operand (op
, mode
)
325 enum machine_mode mode
;
327 if (s_register_operand (op
, mode
))
329 else if (GET_CODE (op
) == CONST_DOUBLE
)
330 return (const_double_rtx_ok_for_fpu (op
)
331 || neg_const_double_rtx_ok_for_fpu (op
));
336 /* Return nonzero if OP is a constant power of two. */
339 power_of_two_operand (op
, mode
)
341 enum machine_mode mode
;
343 if (GET_CODE (op
) == CONST_INT
)
345 HOST_WIDE_INT value
= INTVAL(op
);
346 return value
!= 0 && (value
& (value
- 1)) == 0;
351 /* Return TRUE for a valid operand of a DImode operation.
352 Either: REG, CONST_DOUBLE or MEM(DImode_address).
353 Note that this disallows MEM(REG+REG), but allows
354 MEM(PRE/POST_INC/DEC(REG)). */
357 di_operand (op
, mode
)
359 enum machine_mode mode
;
361 if (s_register_operand (op
, mode
))
364 switch (GET_CODE (op
))
371 return memory_address_p (DImode
, XEXP (op
, 0));
378 /* Return TRUE for valid index operands. */
381 index_operand (op
, mode
)
383 enum machine_mode mode
;
385 return (s_register_operand(op
, mode
)
386 || (immediate_operand (op
, mode
)
387 && INTVAL (op
) < 4096 && INTVAL (op
) > -4096));
390 /* Return TRUE for valid shifts by a constant. This also accepts any
391 power of two on the (somewhat overly relaxed) assumption that the
392 shift operator in this case was a mult. */
395 const_shift_operand (op
, mode
)
397 enum machine_mode mode
;
399 return (power_of_two_operand (op
, mode
)
400 || (immediate_operand (op
, mode
)
401 && (INTVAL (op
) < 32 && INTVAL (op
) > 0)));
404 /* Return TRUE for arithmetic operators which can be combined with a multiply
408 shiftable_operator (x
, mode
)
410 enum machine_mode mode
;
412 if (GET_MODE (x
) != mode
)
416 enum rtx_code code
= GET_CODE (x
);
418 return (code
== PLUS
|| code
== MINUS
419 || code
== IOR
|| code
== XOR
|| code
== AND
);
423 /* Return TRUE for shift operators. */
426 shift_operator (x
, mode
)
428 enum machine_mode mode
;
430 if (GET_MODE (x
) != mode
)
434 enum rtx_code code
= GET_CODE (x
);
437 return power_of_two_operand (XEXP (x
, 1));
439 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
443 int equality_operator (x
, mode
)
445 enum machine_mode mode
;
447 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
450 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
453 minmax_operator (x
, mode
)
455 enum machine_mode mode
;
457 enum rtx_code code
= GET_CODE (x
);
459 if (GET_MODE (x
) != mode
)
462 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
465 /* return TRUE if x is EQ or NE */
467 /* Return TRUE if this is the condition code register, if we aren't given
468 a mode, accept any class CCmode register */
471 cc_register (x
, mode
)
473 enum machine_mode mode
;
475 if (mode
== VOIDmode
)
478 if (GET_MODE_CLASS (mode
) != MODE_CC
)
482 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
492 enum rtx_code code
= GET_CODE (x
);
496 else if (code
== SMIN
)
498 else if (code
== UMIN
)
500 else if (code
== UMAX
)
506 /* Return 1 if memory locations are adjacent */
509 adjacent_mem_locations (a
, b
)
512 int val0
= 0, val1
= 0;
515 if ((GET_CODE (XEXP (a
, 0)) == REG
516 || (GET_CODE (XEXP (a
, 0)) == PLUS
517 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
518 && (GET_CODE (XEXP (b
, 0)) == REG
519 || (GET_CODE (XEXP (b
, 0)) == PLUS
520 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
522 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
524 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
525 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
528 reg0
= REGNO (XEXP (a
, 0));
529 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
531 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
532 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
535 reg1
= REGNO (XEXP (b
, 0));
536 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
541 /* Return 1 if OP is a load multiple operation. It is known to be
542 parallel and the first section will be tested. */
545 load_multiple_operation (op
, mode
)
547 enum machine_mode mode
;
549 HOST_WIDE_INT count
= XVECLEN (op
, 0);
552 HOST_WIDE_INT i
= 1, base
= 0;
556 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
559 /* Check to see if this might be a write-back */
560 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
565 /* Now check it more carefully */
566 if (GET_CODE (SET_DEST (elt
)) != REG
567 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
568 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
569 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
570 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
571 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
572 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
573 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
574 != REGNO (SET_DEST (elt
)))
580 /* Perform a quick check so we don't blow up below. */
582 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
583 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
584 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
587 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
588 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
590 for (; i
< count
; i
++)
592 rtx elt
= XVECEXP (op
, 0, i
);
594 if (GET_CODE (elt
) != SET
595 || GET_CODE (SET_DEST (elt
)) != REG
596 || GET_MODE (SET_DEST (elt
)) != SImode
597 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
- base
598 || GET_CODE (SET_SRC (elt
)) != MEM
599 || GET_MODE (SET_SRC (elt
)) != SImode
600 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
601 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
602 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
603 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
610 /* Return 1 if OP is a store multiple operation. It is known to be
611 parallel and the first section will be tested. */
614 store_multiple_operation (op
, mode
)
616 enum machine_mode mode
;
618 HOST_WIDE_INT count
= XVECLEN (op
, 0);
621 HOST_WIDE_INT i
= 1, base
= 0;
625 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
628 /* Check to see if this might be a write-back */
629 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
634 /* Now check it more carefully */
635 if (GET_CODE (SET_DEST (elt
)) != REG
636 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
637 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
638 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
639 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
640 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
641 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
642 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
643 != REGNO (SET_DEST (elt
)))
649 /* Perform a quick check so we don't blow up below. */
651 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
652 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
653 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
656 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
657 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
659 for (; i
< count
; i
++)
661 elt
= XVECEXP (op
, 0, i
);
663 if (GET_CODE (elt
) != SET
664 || GET_CODE (SET_SRC (elt
)) != REG
665 || GET_MODE (SET_SRC (elt
)) != SImode
666 || REGNO (SET_SRC (elt
)) != src_regno
+ i
- base
667 || GET_CODE (SET_DEST (elt
)) != MEM
668 || GET_MODE (SET_DEST (elt
)) != SImode
669 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
670 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
671 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
672 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
679 /* Routines for use with attributes */
682 const_pool_offset (symbol
)
685 return get_pool_offset (symbol
) - get_pool_size () - get_prologue_size ();
688 /* Routines for use in generating RTL */
691 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
)
700 int sign
= up
? 1 : -1;
702 result
= gen_rtx (PARALLEL
, VOIDmode
,
703 rtvec_alloc (count
+ (write_back
? 2 : 0)));
706 XVECEXP (result
, 0, 0)
707 = gen_rtx (SET
, GET_MODE (from
), from
,
708 plus_constant (from
, count
* 4 * sign
));
713 for (j
= 0; i
< count
; i
++, j
++)
715 XVECEXP (result
, 0, i
)
716 = gen_rtx (SET
, VOIDmode
, gen_rtx (REG
, SImode
, base_regno
+ j
),
717 gen_rtx (MEM
, SImode
,
718 plus_constant (from
, j
* 4 * sign
)));
722 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, from
);
728 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
)
737 int sign
= up
? 1 : -1;
739 result
= gen_rtx (PARALLEL
, VOIDmode
,
740 rtvec_alloc (count
+ (write_back
? 2 : 0)));
743 XVECEXP (result
, 0, 0)
744 = gen_rtx (SET
, GET_MODE (to
), to
,
745 plus_constant (to
, count
* 4 * sign
));
750 for (j
= 0; i
< count
; i
++, j
++)
752 XVECEXP (result
, 0, i
)
753 = gen_rtx (SET
, VOIDmode
,
754 gen_rtx (MEM
, SImode
, plus_constant (to
, j
* 4 * sign
)),
755 gen_rtx (REG
, SImode
, base_regno
+ j
));
759 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, to
);
764 /* X and Y are two things to compare using CODE. Emit the compare insn and
765 return the rtx for register 0 in the proper mode. FP means this is a
766 floating point compare: I don't think that it is needed on the arm. */
769 gen_compare_reg (code
, x
, y
, fp
)
773 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
774 rtx cc_reg
= gen_rtx (REG
, mode
, 24);
776 emit_insn (gen_rtx (SET
, VOIDmode
, cc_reg
,
777 gen_rtx (COMPARE
, mode
, x
, y
)));
783 arm_reload_out_hi (operands
)
786 rtx base
= find_replacement (&XEXP (operands
[0], 0));
788 emit_insn (gen_rtx (SET
, VOIDmode
,
789 gen_rtx (MEM
, QImode
, base
),
790 gen_rtx (SUBREG
, QImode
, operands
[1], 0)));
791 emit_insn (gen_rtx (SET
, VOIDmode
, operands
[2],
792 gen_rtx (LSHIFTRT
, SImode
,
793 gen_rtx (SUBREG
, SImode
, operands
[1], 0),
795 emit_insn (gen_rtx (SET
, VOIDmode
,
796 gen_rtx (MEM
, QImode
,
797 plus_constant (base
, 1)),
798 gen_rtx (SUBREG
, QImode
, operands
[2], 0)));
801 /* Check to see if a branch is forwards or backwards. Return TRUE if it
805 arm_backwards_branch (from
, to
)
808 return insn_addresses
[to
] <= insn_addresses
[from
];
811 /* Check to see if a branch is within the distance that can be done using
812 an arithmetic expression. */
814 short_branch (from
, to
)
817 int delta
= insn_addresses
[from
] + 8 - insn_addresses
[to
];
819 return abs (delta
) < 980; /* A small margin for safety */
822 /* Check to see that the insn isn't the target of the conditionalizing
825 arm_insn_not_targeted (insn
)
828 return insn
!= arm_target_insn
;
832 /* Routines to output assembly language. */
834 /* If the rtx is the correct value then return the string of the number.
835 In this way we can ensure that valid double constants are generated even
836 when cross compiling. */
838 fp_immediate_constant (x
)
844 if (!fpa_consts_inited
)
847 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
848 for (i
= 0; i
< 8; i
++)
849 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
850 return strings_fpa
[i
];
856 /* Output the operands of a LDM/STM instruction to STREAM.
857 MASK is the ARM register set mask of which only bits 0-15 are important.
858 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
859 must follow the register list. */
862 print_multi_reg (stream
, instr
, mask
, hat
)
868 int not_first
= FALSE
;
870 fprintf (stream
, "\t%s, {", instr
);
871 for (i
= 0; i
< 16; i
++)
875 fprintf (stream
, ", ");
876 fprintf (stream
, "%s", reg_names
[i
]);
880 fprintf (stream
, "}%s\n", hat
? "^" : "");
883 /* Output a 'call' insn. */
886 output_call (operands
)
889 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
891 if (REGNO (operands
[0]) == 14)
893 operands
[0] = gen_rtx (REG
, SImode
, 12);
894 output_asm_insn ("mov\t%0, lr", operands
);
896 output_asm_insn ("mov\tlr, pc", operands
);
897 output_asm_insn ("mov\tpc, %0", operands
);
905 int something_changed
= 0;
907 int code
= GET_CODE (x0
);
914 if (REGNO (x0
) == 14)
916 *x
= gen_rtx (REG
, SImode
, 12);
921 /* Scan through the sub-elements and change any references there */
922 fmt
= GET_RTX_FORMAT (code
);
923 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
925 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
926 else if (fmt
[i
] == 'E')
927 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
928 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
929 return something_changed
;
933 /* Output a 'call' insn that is a reference in memory. */
936 output_call_mem (operands
)
939 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful */
940 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
942 if (eliminate_lr2ip (&operands
[0]))
943 output_asm_insn ("mov\tip, lr", operands
);
945 output_asm_insn ("mov\tlr, pc", operands
);
946 output_asm_insn ("ldr\tpc, %0", operands
);
951 /* Output a move from arm registers to an fpu registers.
952 OPERANDS[0] is an fpu register.
953 OPERANDS[1] is the first registers of an arm register pair. */
956 output_mov_long_double_fpu_from_arm (operands
)
959 int arm_reg0
= REGNO (operands
[1]);
965 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
966 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
967 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
969 output_asm_insn ("stmfd\tsp!, {%0, %1, %2}", ops
);
970 output_asm_insn ("ldfe\t%0, [sp], #12", operands
);
974 /* Output a move from an fpu register to arm registers.
975 OPERANDS[0] is the first registers of an arm register pair.
976 OPERANDS[1] is an fpu register. */
979 output_mov_long_double_arm_from_fpu (operands
)
982 int arm_reg0
= REGNO (operands
[0]);
988 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
989 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
990 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
992 output_asm_insn ("stfe\t%1, [sp, #-12]!", operands
);
993 output_asm_insn ("ldmfd\tsp!, {%0, %1, %2}", ops
);
997 /* Output a move from arm registers to arm registers of a long double
998 OPERANDS[0] is the destination.
999 OPERANDS[1] is the source. */
1001 output_mov_long_double_arm_from_arm (operands
)
1004 /* We have to be careful here because the two might overlap */
1005 int dest_start
= REGNO (operands
[0]);
1006 int src_start
= REGNO (operands
[1]);
1010 if (dest_start
< src_start
)
1012 for (i
= 0; i
< 3; i
++)
1014 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1015 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1016 output_asm_insn ("mov\t%0, %1", ops
);
1021 for (i
= 2; i
>= 0; i
--)
1023 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1024 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1025 output_asm_insn ("mov\t%0, %1", ops
);
1033 /* Output a move from arm registers to an fpu registers.
1034 OPERANDS[0] is an fpu register.
1035 OPERANDS[1] is the first registers of an arm register pair. */
1038 output_mov_double_fpu_from_arm (operands
)
1041 int arm_reg0
= REGNO (operands
[1]);
1046 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1047 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1048 output_asm_insn ("stmfd\tsp!, {%0, %1}", ops
);
1049 output_asm_insn ("ldfd\t%0, [sp], #8", operands
);
1053 /* Output a move from an fpu register to arm registers.
1054 OPERANDS[0] is the first registers of an arm register pair.
1055 OPERANDS[1] is an fpu register. */
1058 output_mov_double_arm_from_fpu (operands
)
1061 int arm_reg0
= REGNO (operands
[0]);
1067 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1068 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1069 output_asm_insn ("stfd\t%1, [sp, #-8]!", operands
);
1070 output_asm_insn ("ldmfd\tsp!, {%0, %1}", ops
);
1074 /* Output a move between double words.
1075 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1076 or MEM<-REG and all MEMs must be offsettable addresses. */
1079 output_move_double (operands
)
1082 enum rtx_code code0
= GET_CODE (operands
[0]);
1083 enum rtx_code code1
= GET_CODE (operands
[1]);
1088 int reg0
= REGNO (operands
[0]);
1090 otherops
[0] = gen_rtx (REG
, SImode
, 1 + reg0
);
1093 int reg1
= REGNO (operands
[1]);
1097 otherops
[1] = gen_rtx (REG
, SImode
, 1 + reg1
);
1099 /* Ensure the second source is not overwritten */
1100 if (reg0
== 1 + reg1
)
1102 output_asm_insn("mov\t%0, %1", otherops
);
1103 output_asm_insn("mov\t%0, %1", operands
);
1107 output_asm_insn("mov\t%0, %1", operands
);
1108 output_asm_insn("mov\t%0, %1", otherops
);
1111 else if (code1
== CONST_DOUBLE
)
1113 otherops
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1114 CONST_DOUBLE_HIGH (operands
[1]));
1115 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1116 CONST_DOUBLE_LOW (operands
[1]));
1117 output_mov_immediate (operands
, FALSE
, "");
1118 output_mov_immediate (otherops
, FALSE
, "");
1120 else if (code1
== CONST_INT
)
1122 otherops
[1] = const0_rtx
;
1123 /* sign extend the intval into the high-order word */
1124 /* Note: output_mov_immediate may clobber operands[1], so we
1125 put this out first */
1126 if (INTVAL (operands
[1]) < 0)
1127 output_asm_insn ("mvn\t%0, %1", otherops
);
1129 output_asm_insn ("mov\t%0, %1", otherops
);
1130 output_mov_immediate (operands
, FALSE
, "");
1132 else if (code1
== MEM
)
1134 switch (GET_CODE (XEXP (operands
[1], 0)))
1137 /* Handle the simple case where address is [r, #0] more
1139 operands
[1] = XEXP (operands
[1], 0);
1140 output_asm_insn ("ldmia\t%1, %M0", operands
);
1143 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1144 output_asm_insn ("add\t%1, %1, #8", operands
);
1145 output_asm_insn ("ldmia\t%1, %M0", operands
);
1148 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1149 output_asm_insn ("sub\t%1, %1, #8", operands
);
1150 output_asm_insn ("ldmia\t%1, %M0", operands
);
1153 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1154 output_asm_insn ("ldmia\t%1!, %M0", operands
);
1157 operands
[1] = XEXP (XEXP (operands
[1], 0), 0);
1158 output_asm_insn ("ldmia\t%1, %M0", operands
);
1159 output_asm_insn ("sub\t%1, %1, #8", operands
);
1162 otherops
[1] = adj_offsettable_operand (operands
[1], 4);
1163 /* Take care of overlapping base/data reg. */
1164 if (reg_mentioned_p (operands
[0], operands
[1]))
1166 output_asm_insn ("ldr\t%0, %1", otherops
);
1167 output_asm_insn ("ldr\t%0, %1", operands
);
1171 output_asm_insn ("ldr\t%0, %1", operands
);
1172 output_asm_insn ("ldr\t%0, %1", otherops
);
1176 else abort(); /* Constraints should prevent this */
1178 else if (code0
== MEM
&& code1
== REG
)
1180 if (REGNO (operands
[1]) == 12)
1182 switch (GET_CODE (XEXP (operands
[0], 0)))
1185 operands
[0] = XEXP (operands
[0], 0);
1186 output_asm_insn ("stmia\t%0, %M1", operands
);
1189 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1190 output_asm_insn ("add\t%0, %0, #8", operands
);
1191 output_asm_insn ("stmia\t%0, %M1", operands
);
1194 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1195 output_asm_insn ("sub\t%0, %0, #8", operands
);
1196 output_asm_insn ("stmia\t%0, %M1", operands
);
1199 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1200 output_asm_insn ("stmia\t%0!, %M1", operands
);
1203 operands
[0] = XEXP (XEXP (operands
[0], 0), 0);
1204 output_asm_insn ("stmia\t%0, %M1", operands
);
1205 output_asm_insn ("sub\t%0, %0, #8", operands
);
1208 otherops
[0] = adj_offsettable_operand (operands
[0], 4);
1209 otherops
[1] = gen_rtx (REG
, SImode
, 1 + REGNO (operands
[1]));
1210 output_asm_insn ("str\t%1, %0", operands
);
1211 output_asm_insn ("str\t%1, %0", otherops
);
1214 else abort(); /* Constraints should prevent this */
1217 } /* output_move_double */
1220 /* Output an arbitrary MOV reg, #n.
1221 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1224 output_mov_immediate (operands
)
1227 HOST_WIDE_INT n
= INTVAL (operands
[1]);
1231 /* Try to use one MOV */
1232 if (const_ok_for_arm (n
))
1234 output_asm_insn ("mov\t%0, %1", operands
);
1238 /* Try to use one MVN */
1239 if (const_ok_for_arm (~n
))
1241 operands
[1] = GEN_INT (~n
);
1242 output_asm_insn ("mvn\t%0, %1", operands
);
1246 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1248 for (i
=0; i
< 32; i
++)
1252 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
1253 output_multi_immediate(operands
, "mvn\t%0, %1", "bic\t%0, %0, %1", 1, ~n
);
1255 output_multi_immediate(operands
, "mov\t%0, %1", "orr\t%0, %0, %1", 1, n
);
1261 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1262 adding zero to one register, output nothing. */
1265 output_add_immediate (operands
)
1268 HOST_WIDE_INT n
= INTVAL (operands
[2]);
1270 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
1273 output_multi_immediate (operands
,
1274 "sub\t%0, %1, %2", "sub\t%0, %0, %2", 2, -n
);
1276 output_multi_immediate (operands
,
1277 "add\t%0, %1, %2", "add\t%0, %0, %2", 2, n
);
1284 /* Output a multiple immediate operation.
1285 OPERANDS is the vector of operands referred to in the output patterns.
1286 INSTR1 is the output pattern to use for the first constant.
1287 INSTR2 is the output pattern to use for subsequent constants.
1288 IMMED_OP is the index of the constant slot in OPERANDS.
1289 N is the constant value. */
1292 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
1294 char *instr1
, *instr2
;
1298 #if HOST_BITS_PER_WIDE_INT > 32
1304 operands
[immed_op
] = const0_rtx
;
1305 output_asm_insn (instr1
, operands
); /* Quick and easy output */
1310 char *instr
= instr1
;
1312 /* Note that n is never zero here (which would give no output) */
1313 for (i
= 0; i
< 32; i
+= 2)
1317 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
1318 output_asm_insn (instr
, operands
);
1325 } /* output_multi_immediate */
1328 /* Return the appropriate ARM instruction for the operation code.
1329 The returned result should not be overwritten. OP is the rtx of the
1330 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1334 arithmetic_instr (op
, shift_first_arg
)
1336 int shift_first_arg
;
1338 switch (GET_CODE(op
))
1344 return shift_first_arg
? "rsb" : "sub";
1361 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1362 for the operation code. The returned result should not be overwritten.
1363 OP is the rtx code of the shift.
1364 SHIFT_PTR points to the shift size operand. */
1367 shift_instr (op
, shift_ptr
)
1392 *shift_ptr
= GEN_INT (int_log2 (INTVAL (*shift_ptr
)));
1399 if (GET_CODE (*shift_ptr
) == CONST_INT
)
1401 int shift
= INTVAL (*shift_ptr
);
1403 if (shift
< min_shift
)
1404 *shift_ptr
= gen_rtx (CONST_INT
, VOIDmode
, 0);
1405 else if (shift
> max_shift
)
1406 *shift_ptr
= gen_rtx (CONST_INT
, VOIDmode
, max_shift
);
1412 /* Obtain the shift from the POWER of two. */
1416 HOST_WIDE_INT power
;
1418 HOST_WIDE_INT shift
= 0;
1420 while (((1 << shift
) & power
) == 0)
1431 /* Output an arithmetic instruction which may set the condition code.
1432 OPERANDS[0] is the destination register.
1433 OPERANDS[1] is the arithmetic operator expression.
1434 OPERANDS[2] is the left hand argument.
1435 OPERANDS[3] is the right hand argument.
1436 CONST_FIRST_ARG is TRUE if the first argument of the operator was constant.
1437 SET_COND is TRUE when the condition code should be set. */
1440 output_arithmetic (operands
, const_first_arg
, set_cond
)
1442 int const_first_arg
;
1446 char *instr
= arithmetic_instr (operands
[1], const_first_arg
);
1448 sprintf (mnemonic
, "%s%s\t%%0, %%2, %%3", instr
, set_cond
? "s" : "");
1449 output_asm_insn (mnemonic
, operands
);
1454 /* Output an arithmetic instruction with a shift.
1455 OPERANDS[0] is the destination register.
1456 OPERANDS[1] is the arithmetic operator expression.
1457 OPERANDS[2] is the unshifted register.
1458 OPERANDS[3] is the shift operator expression.
1459 OPERANDS[4] is the shifted register.
1460 OPERANDS[5] is the shift constant or register.
1461 SHIFT_FIRST_ARG is TRUE if the first argument of the operator was shifted.
1462 SET_COND is TRUE when the condition code should be set. */
1465 output_arithmetic_with_shift (operands
, shift_first_arg
, set_cond
)
1467 int shift_first_arg
;
1471 char *instr
= arithmetic_instr (operands
[1], shift_first_arg
);
1472 char *condbit
= set_cond
? "s" : "";
1473 char *shift
= shift_instr (GET_CODE (operands
[3]), &operands
[5]);
1475 sprintf (mnemonic
, "%s%s\t%%0, %%2, %%4, %s %%5", instr
, condbit
, shift
);
1476 output_asm_insn (mnemonic
, operands
);
1480 /* Output an arithmetic instruction with a power of two multiplication.
1481 OPERANDS[0] is the destination register.
1482 OPERANDS[1] is the arithmetic operator expression.
1483 OPERANDS[2] is the unmultiplied register.
1484 OPERANDS[3] is the multiplied register.
1485 OPERANDS[4] is the constant multiple (power of two).
1486 SHIFT_FIRST_ARG is TRUE if the first arg of the operator was multiplied. */
1489 output_arithmetic_with_immediate_multiply (operands
, shift_first_arg
)
1491 int shift_first_arg
;
1494 char *instr
= arithmetic_instr (operands
[1], shift_first_arg
);
1495 HOST_WIDE_INT shift
= int_log2 (INTVAL (operands
[4]));
1497 sprintf (mnemonic
, "%s\t%%0, %%2, %%3, asl#%d", instr
, (int) shift
);
1498 output_asm_insn (mnemonic
, operands
);
1503 /* Output a move with a shift.
1504 OP is the shift rtx code.
1505 OPERANDS[0] = destination register.
1506 OPERANDS[1] = source register.
1507 OPERANDS[2] = shift constant or register. */
1510 output_shifted_move (op
, operands
)
1516 if (GET_CODE (operands
[2]) == CONST_INT
&& INTVAL (operands
[2]) == 0)
1517 sprintf (mnemonic
, "mov\t%%0, %%1");
1519 sprintf (mnemonic
, "mov\t%%0, %%1, %s %%2",
1520 shift_instr (op
, &operands
[2]));
1522 output_asm_insn (mnemonic
, operands
);
1527 output_shift_compare (operands
, neg
)
1534 sprintf (buf
, "cmn\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands
[2]),
1537 sprintf (buf
, "cmp\t%%1, %%3, %s %%4", shift_instr (GET_CODE (operands
[2]),
1539 output_asm_insn (buf
, operands
);
1543 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1544 /bin/as is horribly restrictive. */
1547 output_ascii_pseudo_op (stream
, p
, len
)
1553 int len_so_far
= 1000;
1554 int chars_so_far
= 0;
1556 for (i
= 0; i
< len
; i
++)
1558 register int c
= p
[i
];
1560 if (len_so_far
> 50)
1563 fputs ("\"\n", stream
);
1564 fputs ("\t.ascii\t\"", stream
);
1566 arm_increase_location (chars_so_far
);
1570 if (c
== '\"' || c
== '\\')
1576 if (c
>= ' ' && c
< 0177)
1583 fprintf (stream
, "\\%03o", c
);
1590 fputs ("\"\n", stream
);
1591 arm_increase_location (chars_so_far
);
1595 /* Try to determine whether a pattern really clobbers the link register.
1596 This information is useful when peepholing, so that lr need not be pushed
1597 if we combine a call followed by a return.
1598 NOTE: This code does not check for side-effect expressions in a SET_SRC:
1599 such a check should not be needed because these only update an existing
1600 value within a register; the register must still be set elsewhere within
1604 pattern_really_clobbers_lr (x
)
1609 switch (GET_CODE (x
))
1612 switch (GET_CODE (SET_DEST (x
)))
1615 return REGNO (SET_DEST (x
)) == 14;
1618 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == REG
)
1619 return REGNO (XEXP (SET_DEST (x
), 0)) == 14;
1621 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == MEM
)
1630 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1631 if (pattern_really_clobbers_lr (XVECEXP (x
, 0, i
)))
1636 switch (GET_CODE (XEXP (x
, 0)))
1639 return REGNO (XEXP (x
, 0)) == 14;
1642 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
1643 return REGNO (XEXP (XEXP (x
, 0), 0)) == 14;
1659 function_really_clobbers_lr (first
)
1664 for (insn
= first
; insn
; insn
= next_nonnote_insn (insn
))
1666 switch (GET_CODE (insn
))
1671 case JUMP_INSN
: /* Jump insns only change the PC (and conds) */
1676 if (pattern_really_clobbers_lr (PATTERN (insn
)))
1681 /* Don't yet know how to handle those calls that are not to a
1683 if (GET_CODE (PATTERN (insn
)) != PARALLEL
)
1686 switch (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)))
1689 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn
), 0, 0), 0), 0))
1695 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
),
1701 default: /* Don't recognize it, be safe */
1705 /* A call can be made (by peepholing) not to clobber lr iff it is
1706 followed by a return. There may, however, be a use insn iff
1707 we are returning the result of the call.
1708 If we run off the end of the insn chain, then that means the
1709 call was at the end of the function. Unfortunately we don't
1710 have a return insn for the peephole to recognize, so we
1711 must reject this. (Can this be fixed by adding our own insn?) */
1712 if ((next
= next_nonnote_insn (insn
)) == NULL
)
1715 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == USE
1716 && (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1717 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn
), 0, 0)))
1718 == REGNO (XEXP (PATTERN (next
), 0))))
1719 if ((next
= next_nonnote_insn (next
)) == NULL
)
1722 if (GET_CODE (next
) == JUMP_INSN
1723 && GET_CODE (PATTERN (next
)) == RETURN
)
1732 /* We have reached the end of the chain so lr was _not_ clobbered */
1737 output_return_instruction (operand
, really_return
)
1742 int reg
, live_regs
= 0;
1744 if (current_function_calls_alloca
&& ! really_return
)
1747 for (reg
= 0; reg
<= 10; reg
++)
1748 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1751 if (live_regs
|| (regs_ever_live
[14] && ! lr_save_eliminated
))
1754 if (frame_pointer_needed
)
1759 if (lr_save_eliminated
|| ! regs_ever_live
[14])
1762 if (frame_pointer_needed
)
1763 strcpy (instr
, "ldm%d0ea\tfp, {");
1765 strcpy (instr
, "ldm%d0fd\tsp!, {");
1767 for (reg
= 0; reg
<= 10; reg
++)
1768 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1770 strcat (instr
, reg_names
[reg
]);
1772 strcat (instr
, ", ");
1775 if (frame_pointer_needed
)
1777 strcat (instr
, reg_names
[11]);
1778 strcat (instr
, ", ");
1779 strcat (instr
, reg_names
[13]);
1780 strcat (instr
, ", ");
1781 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1784 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1785 strcat (instr
, (TARGET_6
|| !really_return
) ? "}" : "}^");
1786 output_asm_insn (instr
, &operand
);
1788 else if (really_return
)
1790 strcpy (instr
, TARGET_6
? "mov%d0\tpc, lr" : "mov%d0s\tpc, lr");
1791 output_asm_insn (instr
, &operand
);
1794 return_used_this_function
= 1;
1798 /* Return the size of the prologue. It's not too bad if we slightly
1802 get_prologue_size ()
1807 /* Until we know which registers are really used return the maximum. */
1808 if (! reload_completed
)
1811 /* Look for integer regs that have to be saved. */
1812 for (regno
= 0; regno
< 15; regno
++)
1813 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1819 /* Clobbering lr when none of the other regs have been saved also requires
1821 if (regs_ever_live
[14])
1824 /* If we need to push a stack frame then there is an extra instruction to
1825 preserve the current value of the stack pointer. */
1826 if (frame_pointer_needed
)
1829 /* Now look for floating-point regs that need saving. We need an
1830 instruction per register. */
1831 for (regno
= 16; regno
< 24; regno
++)
1832 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1835 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1841 /* The amount of stack adjustment that happens here, in output_return and in
1842 output_epilogue must be exactly the same as was calculated during reload,
1843 or things will point to the wrong place. The only time we can safely
1844 ignore this constraint is when a function has no arguments on the stack,
1845 no stack frame requirement and no live registers execpt for `lr'. If we
1846 can guarantee that by making all function calls into tail calls and that
1847 lr is not clobbered in any other way, then there is no need to push lr
1851 output_func_prologue (f
, frame_size
)
1855 int reg
, live_regs_mask
= 0;
1858 /* Nonzero if we must stuff some register arguments onto the stack as if
1859 they were passed there. */
1860 int store_arg_regs
= 0;
1862 if (arm_ccfsm_state
|| arm_target_insn
)
1863 abort (); /* Sanity check */
1865 return_used_this_function
= 0;
1866 lr_save_eliminated
= 0;
1868 fprintf (f
, "\t@ args = %d, pretend = %d, frame = %d\n",
1869 current_function_args_size
, current_function_pretend_args_size
,
1871 fprintf (f
, "\t@ frame_needed = %d, current_function_anonymous_args = %d\n",
1872 frame_pointer_needed
, current_function_anonymous_args
);
1874 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1877 for (reg
= 0; reg
<= 10; reg
++)
1878 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1879 live_regs_mask
|= (1 << reg
);
1881 if (frame_pointer_needed
)
1883 live_regs_mask
|= 0xD800;
1884 fputs ("\tmov\tip, sp\n", f
);
1886 else if (regs_ever_live
[14])
1888 if (! current_function_args_size
1889 && ! function_really_clobbers_lr (get_insns ()))
1891 fprintf (f
,"\t@ I don't think this function clobbers lr\n");
1892 lr_save_eliminated
= 1;
1895 live_regs_mask
|= 0x4000;
1898 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1899 room. If also STORE_ARG_REGS store the argument registers involved in
1900 the created slot (this is for stdarg and varargs). */
1901 if (current_function_pretend_args_size
)
1905 int arg_size
, mask
= 0;
1907 assert (current_function_pretend_args_size
<= 16);
1908 for (reg
= 3, arg_size
= current_function_pretend_args_size
;
1909 arg_size
> 0; reg
--, arg_size
-= 4)
1911 print_multi_reg (f
, "stmfd\tsp!", mask
, FALSE
);
1915 operands
[0] = operands
[1] = stack_pointer_rtx
;
1916 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1917 -current_function_pretend_args_size
);
1918 output_add_immediate (operands
);
1924 /* if a di mode load/store multiple is used, and the base register
1925 is r3, then r4 can become an ever live register without lr
1926 doing so, in this case we need to push lr as well, or we
1927 will fail to get a proper return. */
1929 live_regs_mask
|= 0x4000;
1930 lr_save_eliminated
= 0;
1932 /* Now push all the call-saved regs onto the stack */
1933 print_multi_reg (f
, "stmfd\tsp!", live_regs_mask
, FALSE
);
1936 for (reg
= 23; reg
> 15; reg
--)
1937 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
1938 fprintf (f
, "\tstfe\t%s, [sp, #-12]!\n", reg_names
[reg
]);
1940 if (frame_pointer_needed
)
1942 /* Make `fp' point to saved value of `pc'. */
1944 operands
[0] = gen_rtx (REG
, SImode
, HARD_FRAME_POINTER_REGNUM
);
1945 operands
[1] = gen_rtx (REG
, SImode
, 12);
1946 operands
[2] = GEN_INT ( - (4 + current_function_pretend_args_size
));
1947 output_add_immediate (operands
);
1952 operands
[0] = operands
[1] = stack_pointer_rtx
;
1953 operands
[2] = GEN_INT (-frame_size
);
1954 output_add_immediate (operands
);
1960 output_func_epilogue (f
, frame_size
)
1964 int reg
, live_regs_mask
= 0, code_size
= 0;
1965 /* If we need this then it will always be at lesat this much */
1966 int floats_offset
= 24;
1969 if (use_return_insn() && return_used_this_function
)
1971 if (frame_size
&& !(frame_pointer_needed
|| TARGET_APCS
))
1978 for (reg
= 0; reg
<= 10; reg
++)
1979 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1981 live_regs_mask
|= (1 << reg
);
1985 if (frame_pointer_needed
)
1987 for (reg
= 23; reg
> 15; reg
--)
1988 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1990 fprintf (f
, "\tldfe\t%s, [fp, #-%d]\n", reg_names
[reg
],
1992 floats_offset
+= 12;
1996 live_regs_mask
|= 0xA800;
1997 print_multi_reg (f
, "ldmea\tfp", live_regs_mask
,
1998 TARGET_6
? FALSE
: TRUE
);
2003 /* Restore stack pointer if necessary. */
2006 operands
[0] = operands
[1] = stack_pointer_rtx
;
2007 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, frame_size
);
2008 output_add_immediate (operands
);
2011 for (reg
= 16; reg
< 24; reg
++)
2012 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2014 fprintf (f
, "\tldfe\t%s, [sp], #12\n", reg_names
[reg
]);
2017 if (current_function_pretend_args_size
== 0 && regs_ever_live
[14])
2019 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
| 0x8000,
2020 TARGET_6
? FALSE
: TRUE
);
2025 if (live_regs_mask
|| regs_ever_live
[14])
2027 live_regs_mask
|= 0x4000;
2028 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
, FALSE
);
2031 if (current_function_pretend_args_size
)
2033 operands
[0] = operands
[1] = stack_pointer_rtx
;
2034 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
2035 current_function_pretend_args_size
);
2036 output_add_immediate (operands
);
2038 fputs (TARGET_6
? "\tmov\tpc, lr\n" : "\tmovs\tpc, lr\n", f
);
2045 /* insn_addresses isn't allocated when not optimizing */
2048 arm_increase_location (code_size
2049 + insn_addresses
[INSN_UID (get_last_insn ())]
2050 + get_prologue_size ());
2052 current_function_anonymous_args
= 0;
2055 /* Increase the `arm_text_location' by AMOUNT if we're in the text
2059 arm_increase_location (amount
)
2062 if (in_text_section ())
2063 arm_text_location
+= amount
;
2067 /* Output a label definition. If this label is within the .text segment, it
2068 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
2069 Maybe GCC remembers names not starting with a `*' for a long time, but this
2070 is a minority anyway, so we just make a copy. Do not store the leading `*'
2071 if the name starts with one. */
2074 arm_asm_output_label (stream
, name
)
2078 char *real_name
, *s
;
2079 struct label_offset
*cur
;
2082 assemble_name (stream
, name
);
2083 fputs (":\n", stream
);
2084 if (! in_text_section ())
2089 real_name
= xmalloc (1 + strlen (&name
[1]));
2090 strcpy (real_name
, &name
[1]);
2094 real_name
= xmalloc (2 + strlen (name
));
2095 strcpy (real_name
, "_");
2096 strcat (real_name
, name
);
2098 for (s
= real_name
; *s
; s
++)
2101 hash
= hash
% LABEL_HASH_SIZE
;
2102 cur
= (struct label_offset
*) xmalloc (sizeof (struct label_offset
));
2103 cur
->name
= real_name
;
2104 cur
->offset
= arm_text_location
;
2105 cur
->cdr
= offset_table
[hash
];
2106 offset_table
[hash
] = cur
;
2109 /* Load a symbol that is known to be in the text segment into a register.
2110 This should never be called when not optimizing. */
2113 output_load_symbol (insn
, operands
)
2118 char *name
= XSTR (operands
[1], 0);
2119 struct label_offset
*he
;
2122 unsigned int mask
, never_mask
= 0xffffffff;
2126 if (optimize
== 0 || *name
!= '*')
2129 for (s
= &name
[1]; *s
; s
++)
2132 hash
= hash
% LABEL_HASH_SIZE
;
2133 he
= offset_table
[hash
];
2134 while (he
&& strcmp (he
->name
, &name
[1]))
2140 offset
= (arm_text_location
+ insn_addresses
[INSN_UID (insn
)]
2141 + get_prologue_size () + 8 - he
->offset
);
2145 /* When generating the instructions, we never mask out the bits that we
2146 think will be always zero, then if a mistake has occureed somewhere, the
2147 assembler will spot it and generate an error. */
2149 /* If the symbol is word aligned then we might be able to reduce the
2151 shift
= ((offset
& 3) == 0) ? 2 : 0;
2153 /* Clear the bits from NEVER_MASK that will be orred in with the individual
2155 for (; shift
< 32; shift
+= 8)
2157 mask
= 0xff << shift
;
2158 if ((offset
& mask
) || ((unsigned) offset
) > mask
)
2159 never_mask
&= ~mask
;
2163 mask
= 0xff << (shift
- 32);
2165 while (mask
&& (never_mask
& mask
) == 0)
2169 strcpy (buffer
, "sub\t%0, pc, #(8 + . -%a1)");
2170 if ((never_mask
| mask
) != 0xffffffff)
2171 sprintf (buffer
+ strlen (buffer
), " & 0x%x", mask
| never_mask
);
2174 sprintf (buffer
, "sub\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
2175 inst
, mask
| never_mask
);
2177 output_asm_insn (buffer
, operands
);
2185 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2186 directive hence this hack, which works by reserving some `.space' in the
2187 bss segment directly.
2189 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2190 define STATIC COMMON space but merely STATIC BSS space. */
2193 output_lcomm_directive (stream
, name
, size
, rounded
)
2198 fputs ("\n\t.bss\t@ .lcomm\n", stream
);
2199 assemble_name (stream
, name
);
2200 fprintf (stream
, ":\t.space\t%d\n", rounded
);
2201 if (in_text_section ())
2202 fputs ("\n\t.text\n", stream
);
2204 fputs ("\n\t.data\n", stream
);
2207 /* A finite state machine takes care of noticing whether or not instructions
2208 can be conditionally executed, and thus decrease execution time and code
2209 size by deleting branch instructions. The fsm is controlled by
2210 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2212 /* The state of the fsm controlling condition codes are:
2213 0: normal, do nothing special
2214 1: make ASM_OUTPUT_OPCODE not output this instruction
2215 2: make ASM_OUTPUT_OPCODE not output this instruction
2216 3: make instructions conditional
2217 4: make instructions conditional
2219 State transitions (state->state by whom under condition):
2220 0 -> 1 final_prescan_insn if the `target' is a label
2221 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2222 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2223 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2224 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2225 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2226 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2227 (the target insn is arm_target_insn).
2229 If the jump clobbers the conditions then we use states 2 and 4.
2231 A similar thing can be done with conditional return insns.
2233 XXX In case the `target' is an unconditional branch, this conditionalising
2234 of the instructions always reduces code size, but not always execution
2235 time. But then, I want to reduce the code size to somewhere near what
2236 /bin/cc produces. */
2238 /* The condition codes of the ARM, and the inverse function. */
2239 char *arm_condition_codes
[] =
2241 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
2242 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
2245 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
2247 /* Returns the index of the ARM condition code string in
2248 `arm_condition_codes'. COMPARISON should be an rtx like
2249 `(eq (...) (...))'. */
2252 get_arm_condition_code (comparison
)
2255 switch (GET_CODE (comparison
))
2257 case NE
: return (1);
2258 case EQ
: return (0);
2259 case GE
: return (10);
2260 case GT
: return (12);
2261 case LE
: return (13);
2262 case LT
: return (11);
2263 case GEU
: return (2);
2264 case GTU
: return (8);
2265 case LEU
: return (9);
2266 case LTU
: return (3);
2275 final_prescan_insn (insn
, opvec
, noperands
)
2280 /* BODY will hold the body of INSN. */
2281 register rtx body
= PATTERN (insn
);
2283 /* This will be 1 if trying to repeat the trick, and things need to be
2284 reversed if it appears to fail. */
2287 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2288 taken are clobbered, even if the rtl suggests otherwise. It also
2289 means that we have to grub around within the jump expression to find
2290 out what the conditions are when the jump isn't taken. */
2291 int jump_clobbers
= 0;
2293 /* If we start with a return insn, we only succeed if we find another one. */
2294 int seeking_return
= 0;
2296 /* START_INSN will hold the insn from where we start looking. This is the
2297 first insn after the following code_label if REVERSE is true. */
2298 rtx start_insn
= insn
;
2300 /* If in state 4, check if the target branch is reached, in order to
2301 change back to state 0. */
2302 if (arm_ccfsm_state
== 4)
2304 if (insn
== arm_target_insn
)
2306 arm_target_insn
= NULL
;
2307 arm_ccfsm_state
= 0;
2312 /* If in state 3, it is possible to repeat the trick, if this insn is an
2313 unconditional branch to a label, and immediately following this branch
2314 is the previous target label which is only used once, and the label this
2315 branch jumps to is not too far off. */
2316 if (arm_ccfsm_state
== 3)
2318 if (simplejump_p (insn
))
2320 start_insn
= next_nonnote_insn (start_insn
);
2321 if (GET_CODE (start_insn
) == BARRIER
)
2323 /* XXX Isn't this always a barrier? */
2324 start_insn
= next_nonnote_insn (start_insn
);
2326 if (GET_CODE (start_insn
) == CODE_LABEL
2327 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2328 && LABEL_NUSES (start_insn
) == 1)
2333 else if (GET_CODE (body
) == RETURN
)
2335 start_insn
= next_nonnote_insn (start_insn
);
2336 if (GET_CODE (start_insn
) == BARRIER
)
2337 start_insn
= next_nonnote_insn (start_insn
);
2338 if (GET_CODE (start_insn
) == CODE_LABEL
2339 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2340 && LABEL_NUSES (start_insn
) == 1)
2352 if (arm_ccfsm_state
!= 0 && !reverse
)
2354 if (GET_CODE (insn
) != JUMP_INSN
)
2357 /* This jump might be paralled with a clobber of the condition codes
2358 the jump should always come first */
2359 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2360 body
= XVECEXP (body
, 0, 0);
2363 /* If this is a conditional return then we don't want to know */
2364 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2365 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2366 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
2367 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
2372 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2373 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2375 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2376 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2377 int then_not_else
= TRUE
;
2378 rtx this_insn
= start_insn
, label
= 0;
2380 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
2383 /* Register the insn jumped to. */
2386 if (!seeking_return
)
2387 label
= XEXP (SET_SRC (body
), 0);
2389 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2390 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2391 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2393 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2394 then_not_else
= FALSE
;
2396 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2398 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2401 then_not_else
= FALSE
;
2406 /* See how many insns this branch skips, and what kind of insns. If all
2407 insns are okay, and the label or unconditional branch to the same
2408 label is not too far away, succeed. */
2409 for (insns_skipped
= 0;
2410 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2415 this_insn
= next_nonnote_insn (this_insn
);
2419 scanbody
= PATTERN (this_insn
);
2421 switch (GET_CODE (this_insn
))
2424 /* Succeed if it is the target label, otherwise fail since
2425 control falls in from somewhere else. */
2426 if (this_insn
== label
)
2430 arm_ccfsm_state
= 2;
2431 this_insn
= next_nonnote_insn (this_insn
);
2434 arm_ccfsm_state
= 1;
2442 /* Succeed if the following insn is the target label.
2444 If return insns are used then the last insn in a function
2445 will be a barrier. */
2446 this_insn
= next_nonnote_insn (this_insn
);
2447 if (this_insn
&& this_insn
== label
)
2451 arm_ccfsm_state
= 2;
2452 this_insn
= next_nonnote_insn (this_insn
);
2455 arm_ccfsm_state
= 1;
2463 /* The arm 6xx uses full 32 bit addresses so the cc is not
2464 preserved over calls */
2469 /* If this is an unconditional branch to the same label, succeed.
2470 If it is to another label, do nothing. If it is conditional,
2472 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2474 if (GET_CODE (scanbody
) == SET
2475 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2477 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2478 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2480 arm_ccfsm_state
= 2;
2483 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2486 else if (GET_CODE (scanbody
) == RETURN
2489 arm_ccfsm_state
= 2;
2492 else if (GET_CODE (scanbody
) == PARALLEL
)
2494 switch (get_attr_conds (this_insn
))
2506 /* Instructions using or affecting the condition codes make it
2508 if ((GET_CODE (scanbody
) == SET
2509 || GET_CODE (scanbody
) == PARALLEL
)
2510 && get_attr_conds (this_insn
) != CONDS_NOCOND
)
2520 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
2521 arm_target_label
= CODE_LABEL_NUMBER (label
);
2522 else if (seeking_return
|| arm_ccfsm_state
== 2)
2524 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2526 this_insn
= next_nonnote_insn (this_insn
);
2527 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2528 || GET_CODE (this_insn
) == CODE_LABEL
))
2533 /* Oh, dear! we ran off the end.. give up */
2534 recog (PATTERN (insn
), insn
, NULL_PTR
);
2535 arm_ccfsm_state
= 0;
2536 arm_target_insn
= NULL
;
2539 arm_target_insn
= this_insn
;
2548 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
2550 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
2551 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2552 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
2553 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2557 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2560 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
2564 if (reverse
|| then_not_else
)
2565 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2567 /* restore recog_operand (getting the attributes of other insns can
2568 destroy this array, but final.c assumes that it remains intact
2569 accross this call; since the insn has been recognized already we
2570 call recog direct). */
2571 recog (PATTERN (insn
), insn
, NULL_PTR
);