1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
39 /* The maximum number of insns skipped which will be conditionalised if
41 #define MAX_INSNS_SKIPPED 5
43 /* Some function declarations. */
44 extern FILE *asm_out_file
;
45 extern char *output_multi_immediate ();
46 extern void arm_increase_location ();
48 HOST_WIDE_INT int_log2
PROTO ((HOST_WIDE_INT
));
49 static int get_prologue_size
PROTO ((void));
51 /* Define the information needed to generate branch insns. This is
52 stored from the compare operation. */
54 rtx arm_compare_op0
, arm_compare_op1
;
57 /* What type of cpu are we compiling for? */
59 enum processor_type arm_cpu
;
61 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
62 must report the mode of the memory reference from PRINT_OPERAND to
63 PRINT_OPERAND_ADDRESS. */
64 enum machine_mode output_memory_reference_mode
;
66 /* Nonzero if the prologue must setup `fp'. */
67 int current_function_anonymous_args
;
69 /* Location counter of .text segment. */
70 int arm_text_location
= 0;
72 /* Set to one if we think that lr is only saved because of subroutine calls,
73 but all of these can be `put after' return insns */
74 int lr_save_eliminated
;
76 /* A hash table is used to store text segment labels and their associated
77 offset from the start of the text segment. */
82 struct label_offset
*cdr
;
85 #define LABEL_HASH_SIZE 257
87 static struct label_offset
*offset_table
[LABEL_HASH_SIZE
];
89 /* Set to 1 when a return insn is output, this means that the epilogue
92 static int return_used_this_function
;
94 /* For an explanation of these variables, see final_prescan_insn below. */
100 /* The condition codes of the ARM, and the inverse function. */
101 char *arm_condition_codes
[] =
103 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
104 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
107 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
109 /* Return 1 if it is possible to return using a single instruction */
116 if (!reload_completed
||current_function_pretend_args_size
117 || current_function_anonymous_args
118 || (get_frame_size () && !(TARGET_APCS
|| frame_pointer_needed
)))
121 /* Can't be done if any of the FPU regs are pushed, since this also
123 for (regno
= 20; regno
< 24; regno
++)
124 if (regs_ever_live
[regno
])
130 /* Return TRUE if int I is a valid immediate ARM constant. */
136 unsigned HOST_WIDE_INT mask
= ~0xFF;
140 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
143 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
144 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT
) 0xffffffff);
145 } while (mask
!= ~0xFF);
150 /* This code has been fixed for cross compilation. */
152 static int fpa_consts_inited
= 0;
154 char *strings_fpa
[8] = {
165 static REAL_VALUE_TYPE values_fpa
[8];
173 for (i
= 0; i
< 8; i
++)
175 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
179 fpa_consts_inited
= 1;
182 /* Return TRUE if rtx X is a valid immediate FPU constant. */
185 const_double_rtx_ok_for_fpu (x
)
191 if (!fpa_consts_inited
)
194 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
195 if (REAL_VALUE_MINUS_ZERO (r
))
198 for (i
= 0; i
< 8; i
++)
199 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
205 /* Return TRUE if rtx X is a valid immediate FPU constant. */
208 neg_const_double_rtx_ok_for_fpu (x
)
214 if (!fpa_consts_inited
)
217 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
218 r
= REAL_VALUE_NEGATE (r
);
219 if (REAL_VALUE_MINUS_ZERO (r
))
222 for (i
= 0; i
< 8; i
++)
223 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
229 /* Predicates for `match_operand' and `match_operator'. */
231 /* s_register_operand is the same as register_operand, but it doesn't accept
232 (SUBREG (MEM)...). */
235 s_register_operand (op
, mode
)
237 enum machine_mode mode
;
239 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
242 if (GET_CODE (op
) == SUBREG
)
243 op
= SUBREG_REG (op
);
245 /* We don't consider registers whose class is NO_REGS
246 to be a register operand. */
247 return (GET_CODE (op
) == REG
248 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
249 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
252 /* Return 1 if OP is an item in memory, given that we are in reload. */
255 reload_memory_operand (op
, mode
)
257 enum machine_mode mode
;
259 int regno
= true_regnum (op
);
261 return (! CONSTANT_P (op
)
263 || (GET_CODE (op
) == REG
264 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
267 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
270 arm_rhs_operand (op
, mode
)
272 enum machine_mode mode
;
274 return (s_register_operand (op
, mode
)
275 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
278 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
282 arm_rhsm_operand (op
, mode
)
284 enum machine_mode mode
;
286 return (s_register_operand (op
, mode
)
287 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
288 || memory_operand (op
, mode
));
291 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
292 constant that is valid when negated. */
295 arm_add_operand (op
, mode
)
297 enum machine_mode mode
;
299 return (s_register_operand (op
, mode
)
300 || (GET_CODE (op
) == CONST_INT
301 && (const_ok_for_arm (INTVAL (op
))
302 || const_ok_for_arm (-INTVAL (op
)))));
306 arm_not_operand (op
, mode
)
308 enum machine_mode mode
;
310 return (s_register_operand (op
, mode
)
311 || (GET_CODE (op
) == CONST_INT
312 && (const_ok_for_arm (INTVAL (op
))
313 || const_ok_for_arm (~INTVAL (op
)))));
316 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
319 fpu_rhs_operand (op
, mode
)
321 enum machine_mode mode
;
323 if (s_register_operand (op
, mode
))
325 else if (GET_CODE (op
) == CONST_DOUBLE
)
326 return (const_double_rtx_ok_for_fpu (op
));
332 fpu_add_operand (op
, mode
)
334 enum machine_mode mode
;
336 if (s_register_operand (op
, mode
))
338 else if (GET_CODE (op
) == CONST_DOUBLE
)
339 return (const_double_rtx_ok_for_fpu (op
)
340 || neg_const_double_rtx_ok_for_fpu (op
));
345 /* Return nonzero if OP is a constant power of two. */
348 power_of_two_operand (op
, mode
)
350 enum machine_mode mode
;
352 if (GET_CODE (op
) == CONST_INT
)
354 HOST_WIDE_INT value
= INTVAL(op
);
355 return value
!= 0 && (value
& (value
- 1)) == 0;
360 /* Return TRUE for a valid operand of a DImode operation.
361 Either: REG, CONST_DOUBLE or MEM(DImode_address).
362 Note that this disallows MEM(REG+REG), but allows
363 MEM(PRE/POST_INC/DEC(REG)). */
366 di_operand (op
, mode
)
368 enum machine_mode mode
;
370 if (s_register_operand (op
, mode
))
373 switch (GET_CODE (op
))
380 return memory_address_p (DImode
, XEXP (op
, 0));
387 /* Return TRUE for valid index operands. */
390 index_operand (op
, mode
)
392 enum machine_mode mode
;
394 return (s_register_operand(op
, mode
)
395 || (immediate_operand (op
, mode
)
396 && INTVAL (op
) < 4096 && INTVAL (op
) > -4096));
399 /* Return TRUE for valid shifts by a constant. This also accepts any
400 power of two on the (somewhat overly relaxed) assumption that the
401 shift operator in this case was a mult. */
404 const_shift_operand (op
, mode
)
406 enum machine_mode mode
;
408 return (power_of_two_operand (op
, mode
)
409 || (immediate_operand (op
, mode
)
410 && (INTVAL (op
) < 32 && INTVAL (op
) > 0)));
413 /* Return TRUE for arithmetic operators which can be combined with a multiply
417 shiftable_operator (x
, mode
)
419 enum machine_mode mode
;
421 if (GET_MODE (x
) != mode
)
425 enum rtx_code code
= GET_CODE (x
);
427 return (code
== PLUS
|| code
== MINUS
428 || code
== IOR
|| code
== XOR
|| code
== AND
);
432 /* Return TRUE for shift operators. */
435 shift_operator (x
, mode
)
437 enum machine_mode mode
;
439 if (GET_MODE (x
) != mode
)
443 enum rtx_code code
= GET_CODE (x
);
446 return power_of_two_operand (XEXP (x
, 1));
448 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
452 int equality_operator (x
, mode
)
454 enum machine_mode mode
;
456 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
459 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
462 minmax_operator (x
, mode
)
464 enum machine_mode mode
;
466 enum rtx_code code
= GET_CODE (x
);
468 if (GET_MODE (x
) != mode
)
471 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
474 /* return TRUE if x is EQ or NE */
476 /* Return TRUE if this is the condition code register, if we aren't given
477 a mode, accept any class CCmode register */
480 cc_register (x
, mode
)
482 enum machine_mode mode
;
484 if (mode
== VOIDmode
)
487 if (GET_MODE_CLASS (mode
) != MODE_CC
)
491 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
501 enum rtx_code code
= GET_CODE (x
);
505 else if (code
== SMIN
)
507 else if (code
== UMIN
)
509 else if (code
== UMAX
)
515 /* Return 1 if memory locations are adjacent */
518 adjacent_mem_locations (a
, b
)
521 int val0
= 0, val1
= 0;
524 if ((GET_CODE (XEXP (a
, 0)) == REG
525 || (GET_CODE (XEXP (a
, 0)) == PLUS
526 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
527 && (GET_CODE (XEXP (b
, 0)) == REG
528 || (GET_CODE (XEXP (b
, 0)) == PLUS
529 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
531 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
533 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
534 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
537 reg0
= REGNO (XEXP (a
, 0));
538 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
540 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
541 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
544 reg1
= REGNO (XEXP (b
, 0));
545 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
550 /* Return 1 if OP is a load multiple operation. It is known to be
551 parallel and the first section will be tested. */
554 load_multiple_operation (op
, mode
)
556 enum machine_mode mode
;
558 HOST_WIDE_INT count
= XVECLEN (op
, 0);
561 HOST_WIDE_INT i
= 1, base
= 0;
565 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
568 /* Check to see if this might be a write-back */
569 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
574 /* Now check it more carefully */
575 if (GET_CODE (SET_DEST (elt
)) != REG
576 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
577 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
578 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
579 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
580 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
581 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
582 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
583 != REGNO (SET_DEST (elt
)))
589 /* Perform a quick check so we don't blow up below. */
591 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
592 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
593 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
596 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
597 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
599 for (; i
< count
; i
++)
601 rtx elt
= XVECEXP (op
, 0, i
);
603 if (GET_CODE (elt
) != SET
604 || GET_CODE (SET_DEST (elt
)) != REG
605 || GET_MODE (SET_DEST (elt
)) != SImode
606 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
- base
607 || GET_CODE (SET_SRC (elt
)) != MEM
608 || GET_MODE (SET_SRC (elt
)) != SImode
609 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
610 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
611 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
612 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
619 /* Return 1 if OP is a store multiple operation. It is known to be
620 parallel and the first section will be tested. */
623 store_multiple_operation (op
, mode
)
625 enum machine_mode mode
;
627 HOST_WIDE_INT count
= XVECLEN (op
, 0);
630 HOST_WIDE_INT i
= 1, base
= 0;
634 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
637 /* Check to see if this might be a write-back */
638 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
643 /* Now check it more carefully */
644 if (GET_CODE (SET_DEST (elt
)) != REG
645 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
646 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
647 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
648 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
649 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
650 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
651 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
652 != REGNO (SET_DEST (elt
)))
658 /* Perform a quick check so we don't blow up below. */
660 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
661 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
662 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
665 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
666 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
668 for (; i
< count
; i
++)
670 elt
= XVECEXP (op
, 0, i
);
672 if (GET_CODE (elt
) != SET
673 || GET_CODE (SET_SRC (elt
)) != REG
674 || GET_MODE (SET_SRC (elt
)) != SImode
675 || REGNO (SET_SRC (elt
)) != src_regno
+ i
- base
676 || GET_CODE (SET_DEST (elt
)) != MEM
677 || GET_MODE (SET_DEST (elt
)) != SImode
678 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
679 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
680 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
681 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
688 /* Routines for use with attributes */
691 const_pool_offset (symbol
)
694 return get_pool_offset (symbol
) - get_pool_size () - get_prologue_size ();
697 /* Routines for use in generating RTL */
700 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
)
709 int sign
= up
? 1 : -1;
711 result
= gen_rtx (PARALLEL
, VOIDmode
,
712 rtvec_alloc (count
+ (write_back
? 2 : 0)));
715 XVECEXP (result
, 0, 0)
716 = gen_rtx (SET
, GET_MODE (from
), from
,
717 plus_constant (from
, count
* 4 * sign
));
722 for (j
= 0; i
< count
; i
++, j
++)
724 XVECEXP (result
, 0, i
)
725 = gen_rtx (SET
, VOIDmode
, gen_rtx (REG
, SImode
, base_regno
+ j
),
726 gen_rtx (MEM
, SImode
,
727 plus_constant (from
, j
* 4 * sign
)));
731 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, from
);
737 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
)
746 int sign
= up
? 1 : -1;
748 result
= gen_rtx (PARALLEL
, VOIDmode
,
749 rtvec_alloc (count
+ (write_back
? 2 : 0)));
752 XVECEXP (result
, 0, 0)
753 = gen_rtx (SET
, GET_MODE (to
), to
,
754 plus_constant (to
, count
* 4 * sign
));
759 for (j
= 0; i
< count
; i
++, j
++)
761 XVECEXP (result
, 0, i
)
762 = gen_rtx (SET
, VOIDmode
,
763 gen_rtx (MEM
, SImode
, plus_constant (to
, j
* 4 * sign
)),
764 gen_rtx (REG
, SImode
, base_regno
+ j
));
768 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, to
);
773 /* X and Y are two things to compare using CODE. Emit the compare insn and
774 return the rtx for register 0 in the proper mode. FP means this is a
775 floating point compare: I don't think that it is needed on the arm. */
778 gen_compare_reg (code
, x
, y
, fp
)
782 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
783 rtx cc_reg
= gen_rtx (REG
, mode
, 24);
785 emit_insn (gen_rtx (SET
, VOIDmode
, cc_reg
,
786 gen_rtx (COMPARE
, mode
, x
, y
)));
792 arm_reload_out_hi (operands
)
795 rtx base
= find_replacement (&XEXP (operands
[0], 0));
797 emit_insn (gen_rtx (SET
, VOIDmode
,
798 gen_rtx (MEM
, QImode
, base
),
799 gen_rtx (SUBREG
, QImode
, operands
[1], 0)));
800 emit_insn (gen_rtx (SET
, VOIDmode
, operands
[2],
801 gen_rtx (LSHIFTRT
, SImode
,
802 gen_rtx (SUBREG
, SImode
, operands
[1], 0),
804 emit_insn (gen_rtx (SET
, VOIDmode
,
805 gen_rtx (MEM
, QImode
,
806 plus_constant (base
, 1)),
807 gen_rtx (SUBREG
, QImode
, operands
[2], 0)));
810 /* Check to see if a branch is forwards or backwards. Return TRUE if it
814 arm_backwards_branch (from
, to
)
817 return insn_addresses
[to
] <= insn_addresses
[from
];
820 /* Check to see if a branch is within the distance that can be done using
821 an arithmetic expression. */
823 short_branch (from
, to
)
826 int delta
= insn_addresses
[from
] + 8 - insn_addresses
[to
];
828 return abs (delta
) < 980; /* A small margin for safety */
831 /* Check to see that the insn isn't the target of the conditionalizing
834 arm_insn_not_targeted (insn
)
837 return insn
!= arm_target_insn
;
841 /* Routines to output assembly language. */
843 /* If the rtx is the correct value then return the string of the number.
844 In this way we can ensure that valid double constants are generated even
845 when cross compiling. */
847 fp_immediate_constant (x
)
853 if (!fpa_consts_inited
)
856 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
857 for (i
= 0; i
< 8; i
++)
858 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
859 return strings_fpa
[i
];
864 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
866 fp_const_from_val (r
)
871 if (! fpa_consts_inited
)
874 for (i
= 0; i
< 8; i
++)
875 if (REAL_VALUES_EQUAL (*r
, values_fpa
[i
]))
876 return strings_fpa
[i
];
881 /* Output the operands of a LDM/STM instruction to STREAM.
882 MASK is the ARM register set mask of which only bits 0-15 are important.
883 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
884 must follow the register list. */
887 print_multi_reg (stream
, instr
, mask
, hat
)
893 int not_first
= FALSE
;
895 fprintf (stream
, "\t%s, {", instr
);
896 for (i
= 0; i
< 16; i
++)
900 fprintf (stream
, ", ");
901 fprintf (stream
, "%s", reg_names
[i
]);
905 fprintf (stream
, "}%s\n", hat
? "^" : "");
908 /* Output a 'call' insn. */
911 output_call (operands
)
914 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
916 if (REGNO (operands
[0]) == 14)
918 operands
[0] = gen_rtx (REG
, SImode
, 12);
919 output_asm_insn ("mov%?\t%0, lr", operands
);
921 output_asm_insn ("mov%?\tlr, pc", operands
);
922 output_asm_insn ("mov%?\tpc, %0", operands
);
930 int something_changed
= 0;
932 int code
= GET_CODE (x0
);
939 if (REGNO (x0
) == 14)
941 *x
= gen_rtx (REG
, SImode
, 12);
946 /* Scan through the sub-elements and change any references there */
947 fmt
= GET_RTX_FORMAT (code
);
948 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
950 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
951 else if (fmt
[i
] == 'E')
952 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
953 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
954 return something_changed
;
958 /* Output a 'call' insn that is a reference in memory. */
961 output_call_mem (operands
)
964 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful */
965 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
967 if (eliminate_lr2ip (&operands
[0]))
968 output_asm_insn ("mov%?\tip, lr", operands
);
970 output_asm_insn ("mov%?\tlr, pc", operands
);
971 output_asm_insn ("ldr%?\tpc, %0", operands
);
976 /* Output a move from arm registers to an fpu registers.
977 OPERANDS[0] is an fpu register.
978 OPERANDS[1] is the first registers of an arm register pair. */
981 output_mov_long_double_fpu_from_arm (operands
)
984 int arm_reg0
= REGNO (operands
[1]);
990 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
991 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
992 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
994 output_asm_insn ("stm%?fd\tsp!, {%0, %1, %2}", ops
);
995 output_asm_insn ("ldf%?e\t%0, [sp], #12", operands
);
999 /* Output a move from an fpu register to arm registers.
1000 OPERANDS[0] is the first registers of an arm register pair.
1001 OPERANDS[1] is an fpu register. */
1004 output_mov_long_double_arm_from_fpu (operands
)
1007 int arm_reg0
= REGNO (operands
[0]);
1013 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1014 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1015 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
1017 output_asm_insn ("stf%?e\t%1, [sp, #-12]!", operands
);
1018 output_asm_insn ("ldm%?fd\tsp!, {%0, %1, %2}", ops
);
1022 /* Output a move from arm registers to arm registers of a long double
1023 OPERANDS[0] is the destination.
1024 OPERANDS[1] is the source. */
1026 output_mov_long_double_arm_from_arm (operands
)
1029 /* We have to be careful here because the two might overlap */
1030 int dest_start
= REGNO (operands
[0]);
1031 int src_start
= REGNO (operands
[1]);
1035 if (dest_start
< src_start
)
1037 for (i
= 0; i
< 3; i
++)
1039 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1040 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1041 output_asm_insn ("mov%?\t%0, %1", ops
);
1046 for (i
= 2; i
>= 0; i
--)
1048 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1049 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1050 output_asm_insn ("mov%?\t%0, %1", ops
);
1058 /* Output a move from arm registers to an fpu registers.
1059 OPERANDS[0] is an fpu register.
1060 OPERANDS[1] is the first registers of an arm register pair. */
1063 output_mov_double_fpu_from_arm (operands
)
1066 int arm_reg0
= REGNO (operands
[1]);
1071 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1072 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1073 output_asm_insn ("stm%?fd\tsp!, {%0, %1}", ops
);
1074 output_asm_insn ("ldf%?d\t%0, [sp], #8", operands
);
1078 /* Output a move from an fpu register to arm registers.
1079 OPERANDS[0] is the first registers of an arm register pair.
1080 OPERANDS[1] is an fpu register. */
1083 output_mov_double_arm_from_fpu (operands
)
1086 int arm_reg0
= REGNO (operands
[0]);
1092 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1093 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1094 output_asm_insn ("stf%?d\t%1, [sp, #-8]!", operands
);
1095 output_asm_insn ("ldm%?fd\tsp!, {%0, %1}", ops
);
1099 /* Output a move between double words.
1100 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1101 or MEM<-REG and all MEMs must be offsettable addresses. */
1104 output_move_double (operands
)
1107 enum rtx_code code0
= GET_CODE (operands
[0]);
1108 enum rtx_code code1
= GET_CODE (operands
[1]);
1113 int reg0
= REGNO (operands
[0]);
1115 otherops
[0] = gen_rtx (REG
, SImode
, 1 + reg0
);
1118 int reg1
= REGNO (operands
[1]);
1122 otherops
[1] = gen_rtx (REG
, SImode
, 1 + reg1
);
1124 /* Ensure the second source is not overwritten */
1125 if (reg0
== 1 + reg1
)
1127 output_asm_insn("mov%?\t%0, %1", otherops
);
1128 output_asm_insn("mov%?\t%0, %1", operands
);
1132 output_asm_insn("mov%?\t%0, %1", operands
);
1133 output_asm_insn("mov%?\t%0, %1", otherops
);
1136 else if (code1
== CONST_DOUBLE
)
1138 otherops
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1139 CONST_DOUBLE_HIGH (operands
[1]));
1140 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1141 CONST_DOUBLE_LOW (operands
[1]));
1142 output_mov_immediate (operands
, FALSE
, "");
1143 output_mov_immediate (otherops
, FALSE
, "");
1145 else if (code1
== CONST_INT
)
1147 otherops
[1] = const0_rtx
;
1148 /* sign extend the intval into the high-order word */
1149 /* Note: output_mov_immediate may clobber operands[1], so we
1150 put this out first */
1151 if (INTVAL (operands
[1]) < 0)
1152 output_asm_insn ("mvn%?\t%0, %1", otherops
);
1154 output_asm_insn ("mov%?\t%0, %1", otherops
);
1155 output_mov_immediate (operands
, FALSE
, "");
1157 else if (code1
== MEM
)
1159 switch (GET_CODE (XEXP (operands
[1], 0)))
1162 /* Handle the simple case where address is [r, #0] more
1164 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1167 output_asm_insn ("add%?\t%m1, %m1, #8", operands
);
1168 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1171 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
1172 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1175 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands
);
1178 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1179 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
1182 otherops
[1] = adj_offsettable_operand (operands
[1], 4);
1183 /* Take care of overlapping base/data reg. */
1184 if (reg_mentioned_p (operands
[0], operands
[1]))
1186 output_asm_insn ("ldr%?\t%0, %1", otherops
);
1187 output_asm_insn ("ldr%?\t%0, %1", operands
);
1191 output_asm_insn ("ldr%?\t%0, %1", operands
);
1192 output_asm_insn ("ldr%?\t%0, %1", otherops
);
1196 else abort(); /* Constraints should prevent this */
1198 else if (code0
== MEM
&& code1
== REG
)
1200 if (REGNO (operands
[1]) == 12)
1202 switch (GET_CODE (XEXP (operands
[0], 0)))
1205 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1208 output_asm_insn ("add%?\t%m0, %m0, #8", operands
);
1209 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1212 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
1213 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1216 output_asm_insn ("stm%?ia\t%m0!, %M1", operands
);
1219 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1220 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
1223 otherops
[0] = adj_offsettable_operand (operands
[0], 4);
1224 otherops
[1] = gen_rtx (REG
, SImode
, 1 + REGNO (operands
[1]));
1225 output_asm_insn ("str%?\t%1, %0", operands
);
1226 output_asm_insn ("str%?\t%1, %0", otherops
);
1229 else abort(); /* Constraints should prevent this */
1235 /* Output an arbitrary MOV reg, #n.
1236 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1239 output_mov_immediate (operands
)
1242 HOST_WIDE_INT n
= INTVAL (operands
[1]);
1246 /* Try to use one MOV */
1247 if (const_ok_for_arm (n
))
1249 output_asm_insn ("mov%?\t%0, %1", operands
);
1253 /* Try to use one MVN */
1254 if (const_ok_for_arm (~n
))
1256 operands
[1] = GEN_INT (~n
);
1257 output_asm_insn ("mvn%?\t%0, %1", operands
);
1261 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1263 for (i
=0; i
< 32; i
++)
1267 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
1268 output_multi_immediate(operands
, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
1271 output_multi_immediate(operands
, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
1278 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1279 adding zero to one register, output nothing. */
1282 output_add_immediate (operands
)
1285 HOST_WIDE_INT n
= INTVAL (operands
[2]);
1287 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
1290 output_multi_immediate (operands
,
1291 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
1294 output_multi_immediate (operands
,
1295 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
1302 /* Output a multiple immediate operation.
1303 OPERANDS is the vector of operands referred to in the output patterns.
1304 INSTR1 is the output pattern to use for the first constant.
1305 INSTR2 is the output pattern to use for subsequent constants.
1306 IMMED_OP is the index of the constant slot in OPERANDS.
1307 N is the constant value. */
1310 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
1312 char *instr1
, *instr2
;
1316 #if HOST_BITS_PER_WIDE_INT > 32
1322 operands
[immed_op
] = const0_rtx
;
1323 output_asm_insn (instr1
, operands
); /* Quick and easy output */
1328 char *instr
= instr1
;
1330 /* Note that n is never zero here (which would give no output) */
1331 for (i
= 0; i
< 32; i
+= 2)
1335 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
1336 output_asm_insn (instr
, operands
);
1346 /* Return the appropriate ARM instruction for the operation code.
1347 The returned result should not be overwritten. OP is the rtx of the
1348 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1352 arithmetic_instr (op
, shift_first_arg
)
1354 int shift_first_arg
;
1356 switch (GET_CODE (op
))
1362 return shift_first_arg
? "rsb" : "sub";
1379 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1380 for the operation code. The returned result should not be overwritten.
1381 OP is the rtx code of the shift.
1382 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
1386 shift_op (op
, amountp
)
1388 HOST_WIDE_INT
*amountp
;
1394 if (GET_CODE (XEXP (op
, 1)) == REG
|| GET_CODE (XEXP (op
, 1)) == SUBREG
)
1396 else if (GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1397 *amountp
= INTVAL (XEXP (op
, 1));
1401 switch (GET_CODE (op
))
1424 *amountp
= int_log2 (*amountp
);
1434 && (*amountp
< min_shift
|| *amountp
> max_shift
))
1440 /* Obtain the shift from the POWER of two. */
1444 HOST_WIDE_INT power
;
1446 HOST_WIDE_INT shift
= 0;
1448 while (((1 << shift
) & power
) == 0)
1458 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1459 /bin/as is horribly restrictive. */
1462 output_ascii_pseudo_op (stream
, p
, len
)
1468 int len_so_far
= 1000;
1469 int chars_so_far
= 0;
1471 for (i
= 0; i
< len
; i
++)
1473 register int c
= p
[i
];
1475 if (len_so_far
> 50)
1478 fputs ("\"\n", stream
);
1479 fputs ("\t.ascii\t\"", stream
);
1481 arm_increase_location (chars_so_far
);
1485 if (c
== '\"' || c
== '\\')
1491 if (c
>= ' ' && c
< 0177)
1498 fprintf (stream
, "\\%03o", c
);
1505 fputs ("\"\n", stream
);
1506 arm_increase_location (chars_so_far
);
1510 /* Try to determine whether a pattern really clobbers the link register.
1511 This information is useful when peepholing, so that lr need not be pushed
1512 if we combine a call followed by a return.
1513 NOTE: This code does not check for side-effect expressions in a SET_SRC:
1514 such a check should not be needed because these only update an existing
1515 value within a register; the register must still be set elsewhere within
1519 pattern_really_clobbers_lr (x
)
1524 switch (GET_CODE (x
))
1527 switch (GET_CODE (SET_DEST (x
)))
1530 return REGNO (SET_DEST (x
)) == 14;
1533 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == REG
)
1534 return REGNO (XEXP (SET_DEST (x
), 0)) == 14;
1536 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == MEM
)
1545 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1546 if (pattern_really_clobbers_lr (XVECEXP (x
, 0, i
)))
1551 switch (GET_CODE (XEXP (x
, 0)))
1554 return REGNO (XEXP (x
, 0)) == 14;
1557 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
1558 return REGNO (XEXP (XEXP (x
, 0), 0)) == 14;
1574 function_really_clobbers_lr (first
)
1579 for (insn
= first
; insn
; insn
= next_nonnote_insn (insn
))
1581 switch (GET_CODE (insn
))
1586 case JUMP_INSN
: /* Jump insns only change the PC (and conds) */
1591 if (pattern_really_clobbers_lr (PATTERN (insn
)))
1596 /* Don't yet know how to handle those calls that are not to a
1598 if (GET_CODE (PATTERN (insn
)) != PARALLEL
)
1601 switch (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)))
1604 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn
), 0, 0), 0), 0))
1610 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
),
1616 default: /* Don't recognize it, be safe */
1620 /* A call can be made (by peepholing) not to clobber lr iff it is
1621 followed by a return. There may, however, be a use insn iff
1622 we are returning the result of the call.
1623 If we run off the end of the insn chain, then that means the
1624 call was at the end of the function. Unfortunately we don't
1625 have a return insn for the peephole to recognize, so we
1626 must reject this. (Can this be fixed by adding our own insn?) */
1627 if ((next
= next_nonnote_insn (insn
)) == NULL
)
1630 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == USE
1631 && (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1632 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn
), 0, 0)))
1633 == REGNO (XEXP (PATTERN (next
), 0))))
1634 if ((next
= next_nonnote_insn (next
)) == NULL
)
1637 if (GET_CODE (next
) == JUMP_INSN
1638 && GET_CODE (PATTERN (next
)) == RETURN
)
1647 /* We have reached the end of the chain so lr was _not_ clobbered */
1652 output_return_instruction (operand
, really_return
)
1657 int reg
, live_regs
= 0;
1659 if (current_function_calls_alloca
&& ! really_return
)
1662 for (reg
= 0; reg
<= 10; reg
++)
1663 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1666 if (live_regs
|| (regs_ever_live
[14] && ! lr_save_eliminated
))
1669 if (frame_pointer_needed
)
1674 if (lr_save_eliminated
|| ! regs_ever_live
[14])
1677 if (frame_pointer_needed
)
1678 strcpy (instr
, "ldm%?%d0ea\tfp, {");
1680 strcpy (instr
, "ldm%?%d0fd\tsp!, {");
1682 for (reg
= 0; reg
<= 10; reg
++)
1683 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1685 strcat (instr
, reg_names
[reg
]);
1687 strcat (instr
, ", ");
1690 if (frame_pointer_needed
)
1692 strcat (instr
, reg_names
[11]);
1693 strcat (instr
, ", ");
1694 strcat (instr
, reg_names
[13]);
1695 strcat (instr
, ", ");
1696 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1699 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1700 strcat (instr
, (TARGET_6
|| !really_return
) ? "}" : "}^");
1701 output_asm_insn (instr
, &operand
);
1703 else if (really_return
)
1705 strcpy (instr
, TARGET_6
? "mov%?%d0\tpc, lr" : "mov%?%d0s\tpc, lr");
1706 output_asm_insn (instr
, &operand
);
1709 return_used_this_function
= 1;
1713 /* Return the size of the prologue. It's not too bad if we slightly
1717 get_prologue_size ()
1722 /* Until we know which registers are really used return the maximum. */
1723 if (! reload_completed
)
1726 /* Look for integer regs that have to be saved. */
1727 for (regno
= 0; regno
< 15; regno
++)
1728 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1734 /* Clobbering lr when none of the other regs have been saved also requires
1736 if (regs_ever_live
[14])
1739 /* If we need to push a stack frame then there is an extra instruction to
1740 preserve the current value of the stack pointer. */
1741 if (frame_pointer_needed
)
1744 /* Now look for floating-point regs that need saving. We need an
1745 instruction per register. */
1746 for (regno
= 16; regno
< 24; regno
++)
1747 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1750 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1756 /* The amount of stack adjustment that happens here, in output_return and in
1757 output_epilogue must be exactly the same as was calculated during reload,
1758 or things will point to the wrong place. The only time we can safely
1759 ignore this constraint is when a function has no arguments on the stack,
1760 no stack frame requirement and no live registers execpt for `lr'. If we
1761 can guarantee that by making all function calls into tail calls and that
1762 lr is not clobbered in any other way, then there is no need to push lr
1766 output_func_prologue (f
, frame_size
)
1770 int reg
, live_regs_mask
= 0;
1773 /* Nonzero if we must stuff some register arguments onto the stack as if
1774 they were passed there. */
1775 int store_arg_regs
= 0;
1777 if (arm_ccfsm_state
|| arm_target_insn
)
1778 abort (); /* Sanity check */
1780 return_used_this_function
= 0;
1781 lr_save_eliminated
= 0;
1783 fprintf (f
, "\t@ args = %d, pretend = %d, frame = %d\n",
1784 current_function_args_size
, current_function_pretend_args_size
,
1786 fprintf (f
, "\t@ frame_needed = %d, current_function_anonymous_args = %d\n",
1787 frame_pointer_needed
, current_function_anonymous_args
);
1789 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1792 for (reg
= 0; reg
<= 10; reg
++)
1793 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1794 live_regs_mask
|= (1 << reg
);
1796 if (frame_pointer_needed
)
1798 live_regs_mask
|= 0xD800;
1799 fputs ("\tmov\tip, sp\n", f
);
1801 else if (regs_ever_live
[14])
1803 if (! current_function_args_size
1804 && ! function_really_clobbers_lr (get_insns ()))
1806 fprintf (f
,"\t@ I don't think this function clobbers lr\n");
1807 lr_save_eliminated
= 1;
1810 live_regs_mask
|= 0x4000;
1813 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1814 room. If also STORE_ARG_REGS store the argument registers involved in
1815 the created slot (this is for stdarg and varargs). */
1816 if (current_function_pretend_args_size
)
1820 int arg_size
, mask
= 0;
1822 assert (current_function_pretend_args_size
<= 16);
1823 for (reg
= 3, arg_size
= current_function_pretend_args_size
;
1824 arg_size
> 0; reg
--, arg_size
-= 4)
1826 print_multi_reg (f
, "stmfd\tsp!", mask
, FALSE
);
1830 operands
[0] = operands
[1] = stack_pointer_rtx
;
1831 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1832 -current_function_pretend_args_size
);
1833 output_add_immediate (operands
);
1839 /* if a di mode load/store multiple is used, and the base register
1840 is r3, then r4 can become an ever live register without lr
1841 doing so, in this case we need to push lr as well, or we
1842 will fail to get a proper return. */
1844 live_regs_mask
|= 0x4000;
1845 lr_save_eliminated
= 0;
1847 /* Now push all the call-saved regs onto the stack */
1848 print_multi_reg (f
, "stmfd\tsp!", live_regs_mask
, FALSE
);
1851 for (reg
= 23; reg
> 15; reg
--)
1852 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
1853 fprintf (f
, "\tstfe\t%s, [sp, #-12]!\n", reg_names
[reg
]);
1855 if (frame_pointer_needed
)
1857 /* Make `fp' point to saved value of `pc'. */
1859 operands
[0] = gen_rtx (REG
, SImode
, HARD_FRAME_POINTER_REGNUM
);
1860 operands
[1] = gen_rtx (REG
, SImode
, 12);
1861 operands
[2] = GEN_INT ( - (4 + current_function_pretend_args_size
));
1862 output_add_immediate (operands
);
1867 operands
[0] = operands
[1] = stack_pointer_rtx
;
1868 operands
[2] = GEN_INT (-frame_size
);
1869 output_add_immediate (operands
);
1875 output_func_epilogue (f
, frame_size
)
1879 int reg
, live_regs_mask
= 0, code_size
= 0;
1880 /* If we need this then it will always be at lesat this much */
1881 int floats_offset
= 24;
1884 if (use_return_insn() && return_used_this_function
)
1886 if (frame_size
&& !(frame_pointer_needed
|| TARGET_APCS
))
1893 for (reg
= 0; reg
<= 10; reg
++)
1894 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1896 live_regs_mask
|= (1 << reg
);
1900 if (frame_pointer_needed
)
1902 for (reg
= 23; reg
> 15; reg
--)
1903 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1905 fprintf (f
, "\tldfe\t%s, [fp, #-%d]\n", reg_names
[reg
],
1907 floats_offset
+= 12;
1911 live_regs_mask
|= 0xA800;
1912 print_multi_reg (f
, "ldmea\tfp", live_regs_mask
,
1913 TARGET_6
? FALSE
: TRUE
);
1918 /* Restore stack pointer if necessary. */
1921 operands
[0] = operands
[1] = stack_pointer_rtx
;
1922 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, frame_size
);
1923 output_add_immediate (operands
);
1926 for (reg
= 16; reg
< 24; reg
++)
1927 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1929 fprintf (f
, "\tldfe\t%s, [sp], #12\n", reg_names
[reg
]);
1932 if (current_function_pretend_args_size
== 0 && regs_ever_live
[14])
1934 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
| 0x8000,
1935 TARGET_6
? FALSE
: TRUE
);
1940 if (live_regs_mask
|| regs_ever_live
[14])
1942 live_regs_mask
|= 0x4000;
1943 print_multi_reg (f
, "ldmfd\tsp!", live_regs_mask
, FALSE
);
1946 if (current_function_pretend_args_size
)
1948 operands
[0] = operands
[1] = stack_pointer_rtx
;
1949 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1950 current_function_pretend_args_size
);
1951 output_add_immediate (operands
);
1953 fputs (TARGET_6
? "\tmov\tpc, lr\n" : "\tmovs\tpc, lr\n", f
);
1960 /* insn_addresses isn't allocated when not optimizing */
1963 arm_increase_location (code_size
1964 + insn_addresses
[INSN_UID (get_last_insn ())]
1965 + get_prologue_size ());
1967 current_function_anonymous_args
= 0;
1970 /* If CODE is 'd', then the X is a condition operand and the instruction
1971 should only be executed if the condition is true.
1972 if CODE is 'D', then the X is a condition operand and the instruciton
1973 should only be executed if the condition is false: however, if the mode
1974 of the comparison is CCFPEmode, then always execute the instruction -- we
1975 do this because in these circumstances !GE does not necessarily imply LT;
1976 in these cases the instruction pattern will take care to make sure that
1977 an instruction containing %d will follow, thereby undoing the effects of
1978 doing this instrucion unconditionally.
1979 If CODE is 'N' then X is a floating point operand that must be negated
1981 If CODE is 'B' then output a bitwise inverted value of X (a const int).
1982 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
1985 arm_print_operand (stream
, x
, code
)
1993 fputc (ARM_COMMENT_CHAR
, stream
);
1997 fputs (ARM_REG_PREFIX
, stream
);
2001 if (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4)
2002 fputs (arm_condition_codes
[arm_current_cc
], stream
);
2008 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2009 r
= REAL_VALUE_NEGATE (r
);
2010 fprintf (stream
, "%s", fp_const_from_val (&r
));
2015 if (GET_CODE (x
) == CONST_INT
)
2017 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2022 ARM_SIGN_EXTEND (~ INTVAL (x
)));
2026 output_addr_const (stream
, x
);
2031 fprintf (stream
, "%s", arithmetic_instr (x
, 1));
2035 fprintf (stream
, "%s", arithmetic_instr (x
, 0));
2042 fprintf (stream
, "%s ", shift_op (x
, &val
));
2044 arm_print_operand (stream
, XEXP (x
, 1), 0);
2047 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2059 fputs (reg_names
[REGNO (x
) + 1], stream
);
2063 if (GET_CODE (XEXP (x
, 0)) == REG
)
2064 fputs (reg_names
[REGNO (XEXP (x
, 0))], stream
);
2066 fputs (reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))], stream
);
2070 fprintf (stream
, "{%s-%s}", reg_names
[REGNO (x
)],
2071 reg_names
[REGNO (x
) - 1
2072 + ((GET_MODE_SIZE (GET_MODE (x
))
2073 + GET_MODE_SIZE (SImode
) - 1)
2074 / GET_MODE_SIZE (SImode
))]);
2079 fputs (arm_condition_codes
[get_arm_condition_code (x
)],
2084 if (x
&& (flag_fast_math
2085 || GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
2086 || (GET_MODE (XEXP (x
, 0)) != CCFPEmode
2087 && (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0)))
2089 fputs (arm_condition_codes
[ARM_INVERSE_CONDITION_CODE
2090 (get_arm_condition_code (x
))],
2098 if (GET_CODE (x
) == REG
)
2099 fputs (reg_names
[REGNO (x
)], stream
);
2100 else if (GET_CODE (x
) == MEM
)
2102 output_memory_reference_mode
= GET_MODE (x
);
2103 output_address (XEXP (x
, 0));
2105 else if (GET_CODE (x
) == CONST_DOUBLE
)
2106 fprintf (stream
, "#%s", fp_immediate_constant (x
));
2107 else if (GET_CODE (x
) == NEG
)
2108 abort (); /* This should never happen now. */
2111 fputc ('#', stream
);
2112 output_addr_const (stream
, x
);
2117 /* Increase the `arm_text_location' by AMOUNT if we're in the text
2121 arm_increase_location (amount
)
2124 if (in_text_section ())
2125 arm_text_location
+= amount
;
2129 /* Output a label definition. If this label is within the .text segment, it
2130 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
2131 Maybe GCC remembers names not starting with a `*' for a long time, but this
2132 is a minority anyway, so we just make a copy. Do not store the leading `*'
2133 if the name starts with one. */
2136 arm_asm_output_label (stream
, name
)
2140 char *real_name
, *s
;
2141 struct label_offset
*cur
;
2144 assemble_name (stream
, name
);
2145 fputs (":\n", stream
);
2146 if (! in_text_section ())
2151 real_name
= xmalloc (1 + strlen (&name
[1]));
2152 strcpy (real_name
, &name
[1]);
2156 real_name
= xmalloc (2 + strlen (name
));
2157 strcpy (real_name
, "_");
2158 strcat (real_name
, name
);
2160 for (s
= real_name
; *s
; s
++)
2163 hash
= hash
% LABEL_HASH_SIZE
;
2164 cur
= (struct label_offset
*) xmalloc (sizeof (struct label_offset
));
2165 cur
->name
= real_name
;
2166 cur
->offset
= arm_text_location
;
2167 cur
->cdr
= offset_table
[hash
];
2168 offset_table
[hash
] = cur
;
2171 /* Load a symbol that is known to be in the text segment into a register.
2172 This should never be called when not optimizing. */
2175 output_load_symbol (insn
, operands
)
2180 char *name
= XSTR (operands
[1], 0);
2181 struct label_offset
*he
;
2184 unsigned int mask
, never_mask
= 0xffffffff;
2188 if (optimize
== 0 || *name
!= '*')
2191 for (s
= &name
[1]; *s
; s
++)
2194 hash
= hash
% LABEL_HASH_SIZE
;
2195 he
= offset_table
[hash
];
2196 while (he
&& strcmp (he
->name
, &name
[1]))
2202 offset
= (arm_text_location
+ insn_addresses
[INSN_UID (insn
)]
2203 + get_prologue_size () + 8 - he
->offset
);
2207 /* When generating the instructions, we never mask out the bits that we
2208 think will be always zero, then if a mistake has occured somewhere, the
2209 assembler will spot it and generate an error. */
2211 /* If the symbol is word aligned then we might be able to reduce the
2213 shift
= ((offset
& 3) == 0) ? 2 : 0;
2215 /* Clear the bits from NEVER_MASK that will be orred in with the individual
2217 for (; shift
< 32; shift
+= 8)
2219 mask
= 0xff << shift
;
2220 if ((offset
& mask
) || ((unsigned) offset
) > mask
)
2221 never_mask
&= ~mask
;
2225 mask
= 0xff << (shift
- 32);
2227 while (mask
&& (never_mask
& mask
) == 0)
2231 strcpy (buffer
, "sub%?\t%0, pc, #(8 + . -%a1)");
2232 if ((never_mask
| mask
) != 0xffffffff)
2233 sprintf (buffer
+ strlen (buffer
), " & 0x%x", mask
| never_mask
);
2236 sprintf (buffer
, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
2237 inst
, mask
| never_mask
);
2239 output_asm_insn (buffer
, operands
);
2247 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2248 directive hence this hack, which works by reserving some `.space' in the
2249 bss segment directly.
2251 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2252 define STATIC COMMON space but merely STATIC BSS space. */
2255 output_lcomm_directive (stream
, name
, size
, rounded
)
2260 fputs ("\n\t.bss\t@ .lcomm\n", stream
);
2261 assemble_name (stream
, name
);
2262 fprintf (stream
, ":\t.space\t%d\n", rounded
);
2263 if (in_text_section ())
2264 fputs ("\n\t.text\n", stream
);
2266 fputs ("\n\t.data\n", stream
);
2269 /* A finite state machine takes care of noticing whether or not instructions
2270 can be conditionally executed, and thus decrease execution time and code
2271 size by deleting branch instructions. The fsm is controlled by
2272 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2274 /* The state of the fsm controlling condition codes are:
2275 0: normal, do nothing special
2276 1: make ASM_OUTPUT_OPCODE not output this instruction
2277 2: make ASM_OUTPUT_OPCODE not output this instruction
2278 3: make instructions conditional
2279 4: make instructions conditional
2281 State transitions (state->state by whom under condition):
2282 0 -> 1 final_prescan_insn if the `target' is a label
2283 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2284 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2285 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2286 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2287 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2288 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2289 (the target insn is arm_target_insn).
2291 If the jump clobbers the conditions then we use states 2 and 4.
2293 A similar thing can be done with conditional return insns.
2295 XXX In case the `target' is an unconditional branch, this conditionalising
2296 of the instructions always reduces code size, but not always execution
2297 time. But then, I want to reduce the code size to somewhere near what
2298 /bin/cc produces. */
2300 /* Returns the index of the ARM condition code string in
2301 `arm_condition_codes'. COMPARISON should be an rtx like
2302 `(eq (...) (...))'. */
2305 get_arm_condition_code (comparison
)
2308 switch (GET_CODE (comparison
))
2310 case NE
: return (1);
2311 case EQ
: return (0);
2312 case GE
: return (10);
2313 case GT
: return (12);
2314 case LE
: return (13);
2315 case LT
: return (11);
2316 case GEU
: return (2);
2317 case GTU
: return (8);
2318 case LEU
: return (9);
2319 case LTU
: return (3);
2328 final_prescan_insn (insn
, opvec
, noperands
)
2333 /* BODY will hold the body of INSN. */
2334 register rtx body
= PATTERN (insn
);
2336 /* This will be 1 if trying to repeat the trick, and things need to be
2337 reversed if it appears to fail. */
2340 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2341 taken are clobbered, even if the rtl suggests otherwise. It also
2342 means that we have to grub around within the jump expression to find
2343 out what the conditions are when the jump isn't taken. */
2344 int jump_clobbers
= 0;
2346 /* If we start with a return insn, we only succeed if we find another one. */
2347 int seeking_return
= 0;
2349 /* START_INSN will hold the insn from where we start looking. This is the
2350 first insn after the following code_label if REVERSE is true. */
2351 rtx start_insn
= insn
;
2353 /* If in state 4, check if the target branch is reached, in order to
2354 change back to state 0. */
2355 if (arm_ccfsm_state
== 4)
2357 if (insn
== arm_target_insn
)
2359 arm_target_insn
= NULL
;
2360 arm_ccfsm_state
= 0;
2365 /* If in state 3, it is possible to repeat the trick, if this insn is an
2366 unconditional branch to a label, and immediately following this branch
2367 is the previous target label which is only used once, and the label this
2368 branch jumps to is not too far off. */
2369 if (arm_ccfsm_state
== 3)
2371 if (simplejump_p (insn
))
2373 start_insn
= next_nonnote_insn (start_insn
);
2374 if (GET_CODE (start_insn
) == BARRIER
)
2376 /* XXX Isn't this always a barrier? */
2377 start_insn
= next_nonnote_insn (start_insn
);
2379 if (GET_CODE (start_insn
) == CODE_LABEL
2380 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2381 && LABEL_NUSES (start_insn
) == 1)
2386 else if (GET_CODE (body
) == RETURN
)
2388 start_insn
= next_nonnote_insn (start_insn
);
2389 if (GET_CODE (start_insn
) == BARRIER
)
2390 start_insn
= next_nonnote_insn (start_insn
);
2391 if (GET_CODE (start_insn
) == CODE_LABEL
2392 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2393 && LABEL_NUSES (start_insn
) == 1)
2405 if (arm_ccfsm_state
!= 0 && !reverse
)
2407 if (GET_CODE (insn
) != JUMP_INSN
)
2410 /* This jump might be paralled with a clobber of the condition codes
2411 the jump should always come first */
2412 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2413 body
= XVECEXP (body
, 0, 0);
2416 /* If this is a conditional return then we don't want to know */
2417 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2418 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2419 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
2420 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
2425 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2426 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2428 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2429 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2430 int then_not_else
= TRUE
;
2431 rtx this_insn
= start_insn
, label
= 0;
2433 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
2436 /* Register the insn jumped to. */
2439 if (!seeking_return
)
2440 label
= XEXP (SET_SRC (body
), 0);
2442 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2443 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2444 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2446 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2447 then_not_else
= FALSE
;
2449 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2451 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2454 then_not_else
= FALSE
;
2459 /* See how many insns this branch skips, and what kind of insns. If all
2460 insns are okay, and the label or unconditional branch to the same
2461 label is not too far away, succeed. */
2462 for (insns_skipped
= 0;
2463 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2468 this_insn
= next_nonnote_insn (this_insn
);
2472 scanbody
= PATTERN (this_insn
);
2474 switch (GET_CODE (this_insn
))
2477 /* Succeed if it is the target label, otherwise fail since
2478 control falls in from somewhere else. */
2479 if (this_insn
== label
)
2483 arm_ccfsm_state
= 2;
2484 this_insn
= next_nonnote_insn (this_insn
);
2487 arm_ccfsm_state
= 1;
2495 /* Succeed if the following insn is the target label.
2497 If return insns are used then the last insn in a function
2498 will be a barrier. */
2499 this_insn
= next_nonnote_insn (this_insn
);
2500 if (this_insn
&& this_insn
== label
)
2504 arm_ccfsm_state
= 2;
2505 this_insn
= next_nonnote_insn (this_insn
);
2508 arm_ccfsm_state
= 1;
2516 /* The arm 6xx uses full 32 bit addresses so the cc is not
2517 preserved over calls */
2522 /* If this is an unconditional branch to the same label, succeed.
2523 If it is to another label, do nothing. If it is conditional,
2525 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2527 if (GET_CODE (scanbody
) == SET
2528 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2530 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2531 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2533 arm_ccfsm_state
= 2;
2536 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2539 else if (GET_CODE (scanbody
) == RETURN
2542 arm_ccfsm_state
= 2;
2545 else if (GET_CODE (scanbody
) == PARALLEL
)
2547 switch (get_attr_conds (this_insn
))
2559 /* Instructions using or affecting the condition codes make it
2561 if ((GET_CODE (scanbody
) == SET
2562 || GET_CODE (scanbody
) == PARALLEL
)
2563 && get_attr_conds (this_insn
) != CONDS_NOCOND
)
2573 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
2574 arm_target_label
= CODE_LABEL_NUMBER (label
);
2575 else if (seeking_return
|| arm_ccfsm_state
== 2)
2577 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2579 this_insn
= next_nonnote_insn (this_insn
);
2580 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2581 || GET_CODE (this_insn
) == CODE_LABEL
))
2586 /* Oh, dear! we ran off the end.. give up */
2587 recog (PATTERN (insn
), insn
, NULL_PTR
);
2588 arm_ccfsm_state
= 0;
2589 arm_target_insn
= NULL
;
2592 arm_target_insn
= this_insn
;
2601 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
2603 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
2604 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2605 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
2606 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2610 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2613 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
2617 if (reverse
|| then_not_else
)
2618 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2620 /* restore recog_operand (getting the attributes of other insns can
2621 destroy this array, but final.c assumes that it remains intact
2622 accross this call; since the insn has been recognized already we
2623 call recog direct). */
2624 recog (PATTERN (insn
), insn
, NULL_PTR
);