1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
41 /* The maximum number of insns skipped which will be conditionalised if
43 #define MAX_INSNS_SKIPPED 5
45 /* Some function declarations. */
46 extern FILE *asm_out_file
;
47 extern char *output_multi_immediate ();
48 extern void arm_increase_location ();
50 HOST_WIDE_INT int_log2
PROTO ((HOST_WIDE_INT
));
51 static int get_prologue_size
PROTO ((void));
53 /* Define the information needed to generate branch insns. This is
54 stored from the compare operation. */
56 rtx arm_compare_op0
, arm_compare_op1
;
59 /* What type of cpu are we compiling for? */
60 enum processor_type arm_cpu
;
62 /* Waht type of floating point are we compiling for? */
63 enum floating_point_type arm_fpu
;
65 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
66 must report the mode of the memory reference from PRINT_OPERAND to
67 PRINT_OPERAND_ADDRESS. */
68 enum machine_mode output_memory_reference_mode
;
70 /* Nonzero if the prologue must setup `fp'. */
71 int current_function_anonymous_args
;
73 /* Location counter of .text segment. */
74 int arm_text_location
= 0;
76 /* Set to one if we think that lr is only saved because of subroutine calls,
77 but all of these can be `put after' return insns */
78 int lr_save_eliminated
;
80 /* A hash table is used to store text segment labels and their associated
81 offset from the start of the text segment. */
86 struct label_offset
*cdr
;
89 #define LABEL_HASH_SIZE 257
91 static struct label_offset
*offset_table
[LABEL_HASH_SIZE
];
93 /* Set to 1 when a return insn is output, this means that the epilogue
96 static int return_used_this_function
;
98 /* For an explanation of these variables, see final_prescan_insn below. */
102 int arm_target_label
;
104 /* The condition codes of the ARM, and the inverse function. */
105 char *arm_condition_codes
[] =
107 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
108 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
111 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
113 /* Return 1 if it is possible to return using a single instruction */
120 if (!reload_completed
||current_function_pretend_args_size
121 || current_function_anonymous_args
122 || (get_frame_size () && !(TARGET_APCS
|| frame_pointer_needed
)))
125 /* Can't be done if any of the FPU regs are pushed, since this also
127 for (regno
= 20; regno
< 24; regno
++)
128 if (regs_ever_live
[regno
])
134 /* Return TRUE if int I is a valid immediate ARM constant. */
140 unsigned HOST_WIDE_INT mask
= ~0xFF;
142 /* Fast return for 0 and powers of 2 */
143 if ((i
& (i
- 1)) == 0)
148 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
151 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
152 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT
) 0xffffffff);
153 } while (mask
!= ~0xFF);
158 /* Return true if I is a valid constant for the operation CODE. */
160 const_ok_for_op (i
, code
, mode
)
163 enum machine_mode mode
;
165 if (const_ok_for_arm (i
))
171 return const_ok_for_arm (ARM_SIGN_EXTEND (-i
));
173 case MINUS
: /* Should only occur with (MINUS I reg) => rsb */
179 return const_ok_for_arm (ARM_SIGN_EXTEND (~i
));
186 /* Emit a sequence of insns to handle a large constant.
187 CODE is the code of the operation required, it can be any of SET, PLUS,
188 IOR, AND, XOR, MINUS;
189 MODE is the mode in which the operation is being performed;
190 VAL is the integer to operate on;
191 SOURCE is the other operand (a register, or a null-pointer for SET);
192 SUBTARGETS means it is safe to create scratch registers if that will
193 either produce a simpler sequence, or we will want to cse the values. */
196 arm_split_constant (code
, mode
, val
, target
, source
, subtargets
)
198 enum machine_mode mode
;
207 int can_negate_initial
= 0;
210 int num_bits_set
= 0;
211 int set_sign_bit_copies
= 0;
212 int clear_sign_bit_copies
= 0;
213 int clear_zero_bit_copies
= 0;
214 int set_zero_bit_copies
= 0;
217 unsigned HOST_WIDE_INT temp1
, temp2
;
218 unsigned HOST_WIDE_INT remainder
= val
& 0xffffffff;
220 /* find out which operations are safe for a given CODE. Also do a quick
221 check for degenerate cases; these can occur when DImode operations
233 can_negate_initial
= 1;
237 if (remainder
== 0xffffffff)
239 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
240 GEN_INT (ARM_SIGN_EXTEND (val
))));
245 if (reload_completed
&& rtx_equal_p (target
, source
))
247 emit_insn (gen_rtx (SET
, VOIDmode
, target
, source
));
255 emit_insn (gen_rtx (SET
, VOIDmode
, target
, const0_rtx
));
258 if (remainder
== 0xffffffff)
260 if (reload_completed
&& rtx_equal_p (target
, source
))
262 emit_insn (gen_rtx (SET
, VOIDmode
, target
, source
));
271 if (reload_completed
&& rtx_equal_p (target
, source
))
273 emit_insn (gen_rtx (SET
, VOIDmode
, target
, source
));
276 if (remainder
== 0xffffffff)
278 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
279 gen_rtx (NOT
, mode
, source
)));
283 /* We don't know how to handle this yet below. */
287 /* We treat MINUS as (val - source), since (source - val) is always
288 passed as (source + (-val)). */
291 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
292 gen_rtx (NEG
, mode
, source
)));
295 if (const_ok_for_arm (val
))
297 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
298 gen_rtx (MINUS
, mode
, GEN_INT (val
), source
)));
309 /* If we can do it in one insn get out quickly */
310 if (const_ok_for_arm (val
)
311 || (can_negate_initial
&& const_ok_for_arm (-val
))
312 || (can_invert
&& const_ok_for_arm (~val
)))
314 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
315 (source
? gen_rtx (code
, mode
, source
,
316 GEN_INT (val
)) : GEN_INT (val
))));
321 /* Calculate a few attributes that may be useful for specific
324 for (i
= 31; i
>= 0; i
--)
326 if ((remainder
& (1 << i
)) == 0)
327 clear_sign_bit_copies
++;
332 for (i
= 31; i
>= 0; i
--)
334 if ((remainder
& (1 << i
)) != 0)
335 set_sign_bit_copies
++;
340 for (i
= 0; i
<= 31; i
++)
342 if ((remainder
& (1 << i
)) == 0)
343 clear_zero_bit_copies
++;
348 for (i
= 0; i
<= 31; i
++)
350 if ((remainder
& (1 << i
)) != 0)
351 set_zero_bit_copies
++;
359 /* See if we can do this by sign_extending a constant that is known
360 to be negative. This is a good, way of doing it, since the shift
361 may well merge into a subsequent insn. */
362 if (set_sign_bit_copies
> 1)
365 (temp1
= ARM_SIGN_EXTEND (remainder
366 << (set_sign_bit_copies
- 1))))
368 new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
369 emit_insn (gen_rtx (SET
, VOIDmode
, new_src
, GEN_INT (temp1
)));
370 emit_insn (gen_ashrsi3 (target
, new_src
,
371 GEN_INT (set_sign_bit_copies
- 1)));
374 /* For an inverted constant, we will need to set the low bits,
375 these will be shifted out of harm's way. */
376 temp1
|= (1 << (set_sign_bit_copies
- 1)) - 1;
377 if (const_ok_for_arm (~temp1
))
379 new_src
= subtargets
? gen_reg_rtx (mode
) : target
;
380 emit_insn (gen_rtx (SET
, VOIDmode
, new_src
, GEN_INT (temp1
)));
381 emit_insn (gen_ashrsi3 (target
, new_src
,
382 GEN_INT (set_sign_bit_copies
- 1)));
387 /* See if we can generate this by setting the bottom (or the top)
388 16 bits, and then shifting these into the other half of the
389 word. We only look for the simplest cases, to do more would cost
390 too much. Be careful, however, not to generate this when the
391 alternative would take fewer insns. */
392 if (val
& 0xffff0000)
394 temp1
= remainder
& 0xffff0000;
395 temp2
= remainder
& 0x0000ffff;
397 /* Overlaps outside this range are best done using other methods. */
398 for (i
= 9; i
< 24; i
++)
400 if ((((temp2
| (temp2
<< i
)) & 0xffffffff) == remainder
)
401 && ! const_ok_for_arm (temp2
))
404 = arm_split_constant (code
, mode
, temp2
,
406 = subtargets
? gen_reg_rtx (mode
)
410 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
412 gen_rtx (ASHIFT
, mode
, source
,
419 /* Don't duplicate cases already considered. */
420 for (i
= 17; i
< 24; i
++)
422 if (((temp1
| (temp1
>> i
)) == remainder
)
423 && ! const_ok_for_arm (temp1
))
426 = arm_split_constant (code
, mode
, temp1
,
428 = subtargets
? gen_reg_rtx (mode
)
432 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
434 gen_rtx (LSHIFTRT
, mode
, source
,
445 /* If we have IOR or XOR, and the inverse of the constant can be loaded
446 in a single instruction, and we can find a temporary to put it in,
447 then this can be done in two instructions instead of 3-4. */
449 || (reload_completed
&& ! reg_mentioned_p (target
, source
)))
451 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val
)))
453 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
455 emit_insn (gen_rtx (SET
, VOIDmode
, sub
,
456 GEN_INT (ARM_SIGN_EXTEND (~ val
))));
457 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
458 gen_rtx (code
, mode
, source
, sub
)));
466 if (set_sign_bit_copies
> 8
467 && (val
& (-1 << (32 - set_sign_bit_copies
))) == val
)
469 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
470 rtx shift
= GEN_INT (set_sign_bit_copies
);
472 emit_insn (gen_rtx (SET
, VOIDmode
, sub
,
474 gen_rtx (ASHIFT
, mode
, source
,
476 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
478 gen_rtx (LSHIFTRT
, mode
, sub
,
483 if (set_zero_bit_copies
> 8
484 && (remainder
& ((1 << set_zero_bit_copies
) - 1)) == remainder
)
486 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
487 rtx shift
= GEN_INT (set_zero_bit_copies
);
489 emit_insn (gen_rtx (SET
, VOIDmode
, sub
,
491 gen_rtx (LSHIFTRT
, mode
, source
,
493 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
495 gen_rtx (ASHIFT
, mode
, sub
,
500 if (const_ok_for_arm (temp1
= ARM_SIGN_EXTEND (~ val
)))
502 rtx sub
= subtargets
? gen_reg_rtx (mode
) : target
;
503 emit_insn (gen_rtx (SET
, VOIDmode
, sub
,
504 gen_rtx (NOT
, mode
, source
)));
507 sub
= gen_reg_rtx (mode
);
508 emit_insn (gen_rtx (SET
, VOIDmode
, sub
,
509 gen_rtx (AND
, mode
, source
, GEN_INT (temp1
))));
510 emit_insn (gen_rtx (SET
, VOIDmode
, target
,
511 gen_rtx (NOT
, mode
, sub
)));
517 /* See if two shifts will do 2 or more insn's worth of work. */
518 if (clear_sign_bit_copies
>= 16 && clear_sign_bit_copies
< 24)
520 HOST_WIDE_INT shift_mask
= ((0xffffffff
521 << (32 - clear_sign_bit_copies
))
524 rtx shift
= GEN_INT (clear_sign_bit_copies
);
526 if ((remainder
| shift_mask
) != 0xffffffff)
528 new_source
= subtargets
? gen_reg_rtx (mode
) : target
;
529 insns
= arm_split_constant (AND
, mode
, remainder
| shift_mask
,
530 new_source
, source
, subtargets
);
534 new_source
= subtargets
? gen_reg_rtx (mode
) : target
;
535 emit_insn (gen_ashlsi3 (new_source
, source
, shift
));
536 emit_insn (gen_lshrsi3 (target
, new_source
, shift
));
540 if (clear_zero_bit_copies
>= 16 && clear_zero_bit_copies
< 24)
542 HOST_WIDE_INT shift_mask
= (1 << clear_zero_bit_copies
) - 1;
544 rtx shift
= GEN_INT (clear_zero_bit_copies
);
546 if ((remainder
| shift_mask
) != 0xffffffff)
548 new_source
= subtargets
? gen_reg_rtx (mode
) : target
;
549 insns
= arm_split_constant (AND
, mode
, remainder
| shift_mask
,
550 new_source
, source
, subtargets
);
554 new_source
= subtargets
? gen_reg_rtx (mode
) : target
;
555 emit_insn (gen_lshrsi3 (new_source
, source
, shift
));
556 emit_insn (gen_ashlsi3 (target
, new_source
, shift
));
566 for (i
= 0; i
< 32; i
++)
567 if (remainder
& (1 << i
))
570 if (code
== AND
|| (can_invert
&& num_bits_set
> 16))
571 remainder
= (~remainder
) & 0xffffffff;
572 else if (code
== PLUS
&& num_bits_set
> 16)
573 remainder
= (-remainder
) & 0xffffffff;
580 /* Now try and find a way of doing the job in either two or three
582 We start by looking for the largest block of zeros that are aligned on
583 a 2-bit boundary, we then fill up the temps, wrapping around to the
584 top of the word when we drop off the bottom.
585 In the worst case this code should produce no more than four insns. */
588 int best_consecutive_zeros
= 0;
590 for (i
= 0; i
< 32; i
+= 2)
592 int consecutive_zeros
= 0;
594 if (! (remainder
& (3 << i
)))
596 while ((i
< 32) && ! (remainder
& (3 << i
)))
598 consecutive_zeros
+= 2;
601 if (consecutive_zeros
> best_consecutive_zeros
)
603 best_consecutive_zeros
= consecutive_zeros
;
604 best_start
= i
- consecutive_zeros
;
610 /* Now start emitting the insns, starting with the one with the highest
611 bit set: we do this so that the smallest number will be emitted last;
612 this is more likely to be combinable with addressing insns. */
620 if (remainder
& (3 << (i
- 2)))
625 temp1
= remainder
& ((0x0ff << end
)
626 | ((i
< end
) ? (0xff >> (32 - end
)) : 0));
631 emit_insn (gen_rtx (SET
, VOIDmode
,
632 new_src
= (subtargets
? gen_reg_rtx (mode
)
634 GEN_INT (can_invert
? ~temp1
: temp1
)));
638 else if (code
== MINUS
)
640 emit_insn (gen_rtx (SET
, VOIDmode
,
641 new_src
= (subtargets
? gen_reg_rtx (mode
)
643 gen_rtx (code
, mode
, GEN_INT (temp1
),
649 emit_insn (gen_rtx (SET
, VOIDmode
,
650 new_src
= remainder
? (subtargets
653 gen_rtx (code
, mode
, source
,
654 GEN_INT (can_invert
? ~temp1
656 ? -temp1
: temp1
)))));
669 #define REG_OR_SUBREG_REG(X) \
670 (GET_CODE (X) == REG \
671 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
673 #define REG_OR_SUBREG_RTX(X) \
674 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
676 #define ARM_FRAME_RTX(X) \
677 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
678 || (X) == arg_pointer_rtx)
681 arm_rtx_costs (x
, code
, outer_code
)
683 enum rtx_code code
, outer_code
;
685 enum machine_mode mode
= GET_MODE (x
);
686 enum rtx_code subcode
;
692 /* Memory costs quite a lot for the first word, but subsequent words
693 load at the equivalent of a single insn each. */
694 return (10 + 4 * ((GET_MODE_SIZE (mode
) - 1) / UNITS_PER_WORD
)
695 + (CONSTANT_POOL_ADDRESS_P (x
) ? 4 : 0));
702 if (mode
== SImode
&& GET_CODE (XEXP (x
, 1)) == REG
)
709 case ASHIFT
: case LSHIFTRT
: case ASHIFTRT
:
711 return (8 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : 8)
712 + ((GET_CODE (XEXP (x
, 0)) == REG
713 || (GET_CODE (XEXP (x
, 0)) == SUBREG
714 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
716 return (1 + ((GET_CODE (XEXP (x
, 0)) == REG
717 || (GET_CODE (XEXP (x
, 0)) == SUBREG
718 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
))
720 + ((GET_CODE (XEXP (x
, 1)) == REG
721 || (GET_CODE (XEXP (x
, 1)) == SUBREG
722 && GET_CODE (SUBREG_REG (XEXP (x
, 1))) == REG
)
723 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
))
728 return (4 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 8)
729 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
730 || (GET_CODE (XEXP (x
, 0)) == CONST_INT
731 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))))
734 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
735 return (2 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
736 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
737 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
739 + ((REG_OR_SUBREG_REG (XEXP (x
, 0))
740 || (GET_CODE (XEXP (x
, 0)) == CONST_DOUBLE
741 && const_double_rtx_ok_for_fpu (XEXP (x
, 0))))
744 if (((GET_CODE (XEXP (x
, 0)) == CONST_INT
745 && const_ok_for_arm (INTVAL (XEXP (x
, 0)))
746 && REG_OR_SUBREG_REG (XEXP (x
, 1))))
747 || (((subcode
= GET_CODE (XEXP (x
, 1))) == ASHIFT
748 || subcode
== ASHIFTRT
|| subcode
== LSHIFTRT
749 || subcode
== ROTATE
|| subcode
== ROTATERT
751 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
752 && ((INTVAL (XEXP (XEXP (x
, 1), 1)) &
753 (INTVAL (XEXP (XEXP (x
, 1), 1)) - 1)) == 0)))
754 && REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 0))
755 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 1), 1))
756 || GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
)
757 && REG_OR_SUBREG_REG (XEXP (x
, 0))))
762 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
763 return (2 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
764 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
765 || (GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
766 && const_double_rtx_ok_for_fpu (XEXP (x
, 1))))
770 case AND
: case XOR
: case IOR
:
773 /* Normally the frame registers will be spilt into reg+const during
774 reload, so it is a bad idea to combine them with other instructions,
775 since then they might not be moved outside of loops. As a compromise
776 we allow integration with ops that have a constant as their second
778 if ((REG_OR_SUBREG_REG (XEXP (x
, 0))
779 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))
780 && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
781 || (REG_OR_SUBREG_REG (XEXP (x
, 0))
782 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x
, 0)))))
786 return (4 + extra_cost
+ (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 8)
787 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
788 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
789 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
, mode
)))
792 if (REG_OR_SUBREG_REG (XEXP (x
, 0)))
793 return (1 + (GET_CODE (XEXP (x
, 1)) == CONST_INT
? 0 : extra_cost
)
794 + ((REG_OR_SUBREG_REG (XEXP (x
, 1))
795 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
796 && const_ok_for_op (INTVAL (XEXP (x
, 1)), code
, mode
)))
799 else if (REG_OR_SUBREG_REG (XEXP (x
, 1)))
800 return (1 + extra_cost
801 + ((((subcode
= GET_CODE (XEXP (x
, 0))) == ASHIFT
802 || subcode
== LSHIFTRT
|| subcode
== ASHIFTRT
803 || subcode
== ROTATE
|| subcode
== ROTATERT
805 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
806 && ((INTVAL (XEXP (XEXP (x
, 0), 1)) &
807 (INTVAL (XEXP (XEXP (x
, 0), 1)) - 1)) == 0))
808 && (REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 0)))
809 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x
, 0), 1)))
810 || GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)))
816 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
820 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
822 HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1)) & 0xffffffff;
823 int add_cost
= const_ok_for_arm (i
) ? 4 : 8;
826 /* This will need adjusting for ARM's with fast multiplies */
827 for (j
= 0; i
&& j
< 32; j
+= 2)
836 return (30 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4)
837 + (REG_OR_SUBREG_REG (XEXP (x
, 1)) ? 0 : 4));
840 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
841 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 6);
845 return 4 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
847 return 1 + (REG_OR_SUBREG_REG (XEXP (x
, 0)) ? 0 : 4);
850 if (GET_CODE (XEXP (x
, 1)) == PC
|| GET_CODE (XEXP (x
, 2)) == PC
)
858 return 4 + (mode
== DImode
? 4 : 0);
861 if (GET_MODE (XEXP (x
, 0)) == QImode
)
862 return (4 + (mode
== DImode
? 4 : 0)
863 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
866 switch (GET_MODE (XEXP (x
, 0)))
869 return (1 + (mode
== DImode
? 4 : 0)
870 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
873 return (4 + (mode
== DImode
? 4 : 0)
874 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
877 return (1 + (GET_CODE (XEXP (x
, 0)) == MEM
? 10 : 0));
886 /* This code has been fixed for cross compilation. */
888 static int fpa_consts_inited
= 0;
890 char *strings_fpa
[8] = {
901 static REAL_VALUE_TYPE values_fpa
[8];
909 for (i
= 0; i
< 8; i
++)
911 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
915 fpa_consts_inited
= 1;
918 /* Return TRUE if rtx X is a valid immediate FPU constant. */
921 const_double_rtx_ok_for_fpu (x
)
927 if (!fpa_consts_inited
)
930 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
931 if (REAL_VALUE_MINUS_ZERO (r
))
934 for (i
= 0; i
< 8; i
++)
935 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
941 /* Return TRUE if rtx X is a valid immediate FPU constant. */
944 neg_const_double_rtx_ok_for_fpu (x
)
950 if (!fpa_consts_inited
)
953 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
954 r
= REAL_VALUE_NEGATE (r
);
955 if (REAL_VALUE_MINUS_ZERO (r
))
958 for (i
= 0; i
< 8; i
++)
959 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
965 /* Predicates for `match_operand' and `match_operator'. */
967 /* s_register_operand is the same as register_operand, but it doesn't accept
968 (SUBREG (MEM)...). */
971 s_register_operand (op
, mode
)
973 enum machine_mode mode
;
975 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
978 if (GET_CODE (op
) == SUBREG
)
979 op
= SUBREG_REG (op
);
981 /* We don't consider registers whose class is NO_REGS
982 to be a register operand. */
983 return (GET_CODE (op
) == REG
984 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
985 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
988 /* Only accept reg, subreg(reg), const_int. */
991 reg_or_int_operand (op
, mode
)
993 enum machine_mode mode
;
995 if (GET_CODE (op
) == CONST_INT
)
998 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1001 if (GET_CODE (op
) == SUBREG
)
1002 op
= SUBREG_REG (op
);
1004 /* We don't consider registers whose class is NO_REGS
1005 to be a register operand. */
1006 return (GET_CODE (op
) == REG
1007 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1011 /* Return 1 if OP is an item in memory, given that we are in reload. */
1014 reload_memory_operand (op
, mode
)
1016 enum machine_mode mode
;
1018 int regno
= true_regnum (op
);
1020 return (! CONSTANT_P (op
)
1022 || (GET_CODE (op
) == REG
1023 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
1026 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1029 arm_rhs_operand (op
, mode
)
1031 enum machine_mode mode
;
1033 return (s_register_operand (op
, mode
)
1034 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
1037 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1041 arm_rhsm_operand (op
, mode
)
1043 enum machine_mode mode
;
1045 return (s_register_operand (op
, mode
)
1046 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
1047 || memory_operand (op
, mode
));
1050 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1051 constant that is valid when negated. */
1054 arm_add_operand (op
, mode
)
1056 enum machine_mode mode
;
1058 return (s_register_operand (op
, mode
)
1059 || (GET_CODE (op
) == CONST_INT
1060 && (const_ok_for_arm (INTVAL (op
))
1061 || const_ok_for_arm (-INTVAL (op
)))));
1065 arm_not_operand (op
, mode
)
1067 enum machine_mode mode
;
1069 return (s_register_operand (op
, mode
)
1070 || (GET_CODE (op
) == CONST_INT
1071 && (const_ok_for_arm (INTVAL (op
))
1072 || const_ok_for_arm (~INTVAL (op
)))));
1075 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1078 fpu_rhs_operand (op
, mode
)
1080 enum machine_mode mode
;
1082 if (s_register_operand (op
, mode
))
1084 else if (GET_CODE (op
) == CONST_DOUBLE
)
1085 return (const_double_rtx_ok_for_fpu (op
));
1091 fpu_add_operand (op
, mode
)
1093 enum machine_mode mode
;
1095 if (s_register_operand (op
, mode
))
1097 else if (GET_CODE (op
) == CONST_DOUBLE
)
1098 return (const_double_rtx_ok_for_fpu (op
)
1099 || neg_const_double_rtx_ok_for_fpu (op
));
1104 /* Return nonzero if OP is a constant power of two. */
1107 power_of_two_operand (op
, mode
)
1109 enum machine_mode mode
;
1111 if (GET_CODE (op
) == CONST_INT
)
1113 HOST_WIDE_INT value
= INTVAL(op
);
1114 return value
!= 0 && (value
& (value
- 1)) == 0;
1119 /* Return TRUE for a valid operand of a DImode operation.
1120 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1121 Note that this disallows MEM(REG+REG), but allows
1122 MEM(PRE/POST_INC/DEC(REG)). */
1125 di_operand (op
, mode
)
1127 enum machine_mode mode
;
1129 if (s_register_operand (op
, mode
))
1132 switch (GET_CODE (op
))
1139 return memory_address_p (DImode
, XEXP (op
, 0));
1146 /* Return TRUE for valid index operands. */
1149 index_operand (op
, mode
)
1151 enum machine_mode mode
;
1153 return (s_register_operand(op
, mode
)
1154 || (immediate_operand (op
, mode
)
1155 && INTVAL (op
) < 4096 && INTVAL (op
) > -4096));
1158 /* Return TRUE for valid shifts by a constant. This also accepts any
1159 power of two on the (somewhat overly relaxed) assumption that the
1160 shift operator in this case was a mult. */
1163 const_shift_operand (op
, mode
)
1165 enum machine_mode mode
;
1167 return (power_of_two_operand (op
, mode
)
1168 || (immediate_operand (op
, mode
)
1169 && (INTVAL (op
) < 32 && INTVAL (op
) > 0)));
1172 /* Return TRUE for arithmetic operators which can be combined with a multiply
1176 shiftable_operator (x
, mode
)
1178 enum machine_mode mode
;
1180 if (GET_MODE (x
) != mode
)
1184 enum rtx_code code
= GET_CODE (x
);
1186 return (code
== PLUS
|| code
== MINUS
1187 || code
== IOR
|| code
== XOR
|| code
== AND
);
1191 /* Return TRUE for shift operators. */
1194 shift_operator (x
, mode
)
1196 enum machine_mode mode
;
1198 if (GET_MODE (x
) != mode
)
1202 enum rtx_code code
= GET_CODE (x
);
1205 return power_of_two_operand (XEXP (x
, 1));
1207 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
1208 || code
== ROTATERT
);
1212 int equality_operator (x
, mode
)
1214 enum machine_mode mode
;
1216 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
1219 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1222 minmax_operator (x
, mode
)
1224 enum machine_mode mode
;
1226 enum rtx_code code
= GET_CODE (x
);
1228 if (GET_MODE (x
) != mode
)
1231 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
1234 /* return TRUE if x is EQ or NE */
1236 /* Return TRUE if this is the condition code register, if we aren't given
1237 a mode, accept any class CCmode register */
1240 cc_register (x
, mode
)
1242 enum machine_mode mode
;
1244 if (mode
== VOIDmode
)
1246 mode
= GET_MODE (x
);
1247 if (GET_MODE_CLASS (mode
) != MODE_CC
)
1251 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
1257 /* Return TRUE if this is the condition code register, if we aren't given
1258 a mode, accept any mode in class CC_MODE that is reversible */
1261 reversible_cc_register (x
, mode
)
1263 enum machine_mode mode
;
1265 if (mode
== VOIDmode
)
1267 mode
= GET_MODE (x
);
1268 if (GET_MODE_CLASS (mode
) != MODE_CC
1269 && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
1271 if (GET_MODE_CLASS (mode
) != MODE_CC
1272 || (! flag_fast_math
&& ! REVERSIBLE_CC_MODE (mode
)))
1276 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
1286 enum rtx_code code
= GET_CODE (x
);
1290 else if (code
== SMIN
)
1292 else if (code
== UMIN
)
1294 else if (code
== UMAX
)
1300 /* Return 1 if memory locations are adjacent */
1303 adjacent_mem_locations (a
, b
)
1306 int val0
= 0, val1
= 0;
1309 if ((GET_CODE (XEXP (a
, 0)) == REG
1310 || (GET_CODE (XEXP (a
, 0)) == PLUS
1311 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
1312 && (GET_CODE (XEXP (b
, 0)) == REG
1313 || (GET_CODE (XEXP (b
, 0)) == PLUS
1314 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
1316 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
1318 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
1319 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
1322 reg0
= REGNO (XEXP (a
, 0));
1323 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
1325 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
1326 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
1329 reg1
= REGNO (XEXP (b
, 0));
1330 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
1335 /* Return 1 if OP is a load multiple operation. It is known to be
1336 parallel and the first section will be tested. */
1339 load_multiple_operation (op
, mode
)
1341 enum machine_mode mode
;
1343 HOST_WIDE_INT count
= XVECLEN (op
, 0);
1346 HOST_WIDE_INT i
= 1, base
= 0;
1350 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
1353 /* Check to see if this might be a write-back */
1354 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
1359 /* Now check it more carefully */
1360 if (GET_CODE (SET_DEST (elt
)) != REG
1361 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
1362 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
1363 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
1364 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
1365 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
1366 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
1367 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
1368 != REGNO (SET_DEST (elt
)))
1374 /* Perform a quick check so we don't blow up below. */
1376 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
1377 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
1378 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
1381 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
1382 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
1384 for (; i
< count
; i
++)
1386 rtx elt
= XVECEXP (op
, 0, i
);
1388 if (GET_CODE (elt
) != SET
1389 || GET_CODE (SET_DEST (elt
)) != REG
1390 || GET_MODE (SET_DEST (elt
)) != SImode
1391 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
- base
1392 || GET_CODE (SET_SRC (elt
)) != MEM
1393 || GET_MODE (SET_SRC (elt
)) != SImode
1394 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
1395 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
1396 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
1397 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
1404 /* Return 1 if OP is a store multiple operation. It is known to be
1405 parallel and the first section will be tested. */
1408 store_multiple_operation (op
, mode
)
1410 enum machine_mode mode
;
1412 HOST_WIDE_INT count
= XVECLEN (op
, 0);
1415 HOST_WIDE_INT i
= 1, base
= 0;
1419 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
1422 /* Check to see if this might be a write-back */
1423 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
1428 /* Now check it more carefully */
1429 if (GET_CODE (SET_DEST (elt
)) != REG
1430 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
1431 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
1432 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
1433 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
1434 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
1435 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
1436 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
1437 != REGNO (SET_DEST (elt
)))
1443 /* Perform a quick check so we don't blow up below. */
1445 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
1446 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
1447 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
1450 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
1451 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
1453 for (; i
< count
; i
++)
1455 elt
= XVECEXP (op
, 0, i
);
1457 if (GET_CODE (elt
) != SET
1458 || GET_CODE (SET_SRC (elt
)) != REG
1459 || GET_MODE (SET_SRC (elt
)) != SImode
1460 || REGNO (SET_SRC (elt
)) != src_regno
+ i
- base
1461 || GET_CODE (SET_DEST (elt
)) != MEM
1462 || GET_MODE (SET_DEST (elt
)) != SImode
1463 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
1464 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
1465 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
1466 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
1474 multi_register_push (op
, mode
)
1476 enum machine_mode mode
;
1478 if (GET_CODE (op
) != PARALLEL
1479 || (GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
1480 || (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) != UNSPEC
)
1481 || (XINT (SET_SRC (XVECEXP (op
, 0, 0)), 1) != 2))
1488 /* Routines for use with attributes */
1491 const_pool_offset (symbol
)
1494 return get_pool_offset (symbol
) - get_pool_size () - get_prologue_size ();
1497 /* Routines for use in generating RTL */
1500 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
)
1509 int sign
= up
? 1 : -1;
1511 result
= gen_rtx (PARALLEL
, VOIDmode
,
1512 rtvec_alloc (count
+ (write_back
? 2 : 0)));
1515 XVECEXP (result
, 0, 0)
1516 = gen_rtx (SET
, GET_MODE (from
), from
,
1517 plus_constant (from
, count
* 4 * sign
));
1522 for (j
= 0; i
< count
; i
++, j
++)
1524 XVECEXP (result
, 0, i
)
1525 = gen_rtx (SET
, VOIDmode
, gen_rtx (REG
, SImode
, base_regno
+ j
),
1526 gen_rtx (MEM
, SImode
,
1527 plus_constant (from
, j
* 4 * sign
)));
1531 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, from
);
1537 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
)
1546 int sign
= up
? 1 : -1;
1548 result
= gen_rtx (PARALLEL
, VOIDmode
,
1549 rtvec_alloc (count
+ (write_back
? 2 : 0)));
1552 XVECEXP (result
, 0, 0)
1553 = gen_rtx (SET
, GET_MODE (to
), to
,
1554 plus_constant (to
, count
* 4 * sign
));
1559 for (j
= 0; i
< count
; i
++, j
++)
1561 XVECEXP (result
, 0, i
)
1562 = gen_rtx (SET
, VOIDmode
,
1563 gen_rtx (MEM
, SImode
, plus_constant (to
, j
* 4 * sign
)),
1564 gen_rtx (REG
, SImode
, base_regno
+ j
));
1568 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, to
);
1574 arm_gen_movstrqi (operands
)
1577 HOST_WIDE_INT in_words_to_go
, out_words_to_go
, last_bytes
;
1579 rtx const_sxteen
= gen_rtx (CONST_INT
, SImode
, 16);
1581 rtx st_src
, st_dst
, end_src
, end_dst
, fin_src
, fin_dst
;
1582 rtx part_bytes_reg
= NULL
;
1583 extern int optimize
;
1585 if (GET_CODE (operands
[2]) != CONST_INT
1586 || GET_CODE (operands
[3]) != CONST_INT
1587 || INTVAL (operands
[2]) > 64
1588 || INTVAL (operands
[3]) & 3)
1591 st_dst
= XEXP (operands
[0], 0);
1592 st_src
= XEXP (operands
[1], 0);
1593 fin_dst
= dst
= copy_to_mode_reg (SImode
, st_dst
);
1594 fin_src
= src
= copy_to_mode_reg (SImode
, st_src
);
1596 in_words_to_go
= (INTVAL (operands
[2]) + 3) / 4;
1597 out_words_to_go
= INTVAL (operands
[2]) / 4;
1598 last_bytes
= INTVAL (operands
[2]) & 3;
1600 if (out_words_to_go
!= in_words_to_go
&& ((in_words_to_go
- 1) & 3) != 0)
1601 part_bytes_reg
= gen_rtx (REG
, SImode
, (in_words_to_go
- 1) & 3);
1603 for (i
= 0; in_words_to_go
>= 2; i
+=4)
1605 emit_insn (arm_gen_load_multiple (0, (in_words_to_go
> 4
1606 ? 4 : in_words_to_go
),
1608 if (out_words_to_go
)
1610 if (out_words_to_go
!= 1)
1611 emit_insn (arm_gen_store_multiple (0, (out_words_to_go
> 4
1612 ? 4 : out_words_to_go
),
1616 emit_move_insn (gen_rtx (MEM
, SImode
, dst
),
1617 gen_rtx (REG
, SImode
, 0));
1618 emit_insn (gen_addsi3 (dst
, dst
, GEN_INT (4)));
1622 in_words_to_go
-= in_words_to_go
< 4 ? in_words_to_go
: 4;
1623 out_words_to_go
-= out_words_to_go
< 4 ? out_words_to_go
: 4;
1626 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
1627 if (out_words_to_go
)
1631 emit_move_insn (sreg
= gen_reg_rtx (SImode
), gen_rtx (MEM
, SImode
, src
));
1632 emit_move_insn (fin_src
= gen_reg_rtx (SImode
), plus_constant (src
, 4));
1633 emit_move_insn (gen_rtx (MEM
, SImode
, dst
), sreg
);
1634 emit_move_insn (fin_dst
= gen_reg_rtx (SImode
), plus_constant (dst
, 4));
1637 if (in_words_to_go
) /* Sanity check */
1643 if (in_words_to_go
< 0)
1646 part_bytes_reg
= copy_to_mode_reg (SImode
, gen_rtx (MEM
, SImode
, src
));
1647 emit_insn (gen_addsi3 (src
, src
, GEN_INT (4)));
1650 if (BYTES_BIG_ENDIAN
&& last_bytes
)
1652 rtx tmp
= gen_reg_rtx (SImode
);
1654 if (part_bytes_reg
== NULL
)
1657 /* The bytes we want are in the top end of the word */
1658 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
,
1659 GEN_INT (8 * (4 - last_bytes
))));
1660 part_bytes_reg
= tmp
;
1664 emit_move_insn (gen_rtx (MEM
, QImode
,
1665 plus_constant (dst
, last_bytes
- 1)),
1666 gen_rtx (SUBREG
, QImode
, part_bytes_reg
, 0));
1669 tmp
= gen_reg_rtx (SImode
);
1670 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (8)));
1671 part_bytes_reg
= tmp
;
1680 if (part_bytes_reg
== NULL
)
1683 emit_move_insn (gen_rtx (MEM
, QImode
, dst
),
1684 gen_rtx (SUBREG
, QImode
, part_bytes_reg
, 0));
1685 emit_insn (gen_addsi3 (dst
, dst
, const1_rtx
));
1688 rtx tmp
= gen_reg_rtx (SImode
);
1689 emit_insn (gen_lshrsi3 (tmp
, part_bytes_reg
, GEN_INT (8)));
1690 part_bytes_reg
= tmp
;
1698 /* X and Y are two things to compare using CODE. Emit the compare insn and
1699 return the rtx for register 0 in the proper mode. FP means this is a
1700 floating point compare: I don't think that it is needed on the arm. */
1703 gen_compare_reg (code
, x
, y
, fp
)
1707 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
1708 rtx cc_reg
= gen_rtx (REG
, mode
, 24);
1710 emit_insn (gen_rtx (SET
, VOIDmode
, cc_reg
,
1711 gen_rtx (COMPARE
, mode
, x
, y
)));
1717 arm_reload_in_hi (operands
)
1720 rtx base
= find_replacement (&XEXP (operands
[1], 0));
1722 emit_insn (gen_zero_extendqisi2 (operands
[2], gen_rtx (MEM
, QImode
, base
)));
1723 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG
, SImode
, operands
[0], 0),
1724 gen_rtx (MEM
, QImode
,
1725 plus_constant (base
, 1))));
1726 if (BYTES_BIG_ENDIAN
)
1727 emit_insn (gen_rtx (SET
, VOIDmode
, gen_rtx (SUBREG
, SImode
,
1729 gen_rtx (IOR
, SImode
,
1730 gen_rtx (ASHIFT
, SImode
,
1731 gen_rtx (SUBREG
, SImode
,
1736 emit_insn (gen_rtx (SET
, VOIDmode
, gen_rtx (SUBREG
, SImode
,
1738 gen_rtx (IOR
, SImode
,
1739 gen_rtx (ASHIFT
, SImode
,
1742 gen_rtx (SUBREG
, SImode
, operands
[0], 0))));
1746 arm_reload_out_hi (operands
)
1749 rtx base
= find_replacement (&XEXP (operands
[0], 0));
1751 if (BYTES_BIG_ENDIAN
)
1753 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, plus_constant (base
, 1)),
1754 gen_rtx (SUBREG
, QImode
, operands
[1], 0)));
1755 emit_insn (gen_lshrsi3 (operands
[2],
1756 gen_rtx (SUBREG
, SImode
, operands
[1], 0),
1758 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, base
),
1759 gen_rtx (SUBREG
, QImode
, operands
[2], 0)));
1763 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, base
),
1764 gen_rtx (SUBREG
, QImode
, operands
[1], 0)));
1765 emit_insn (gen_lshrsi3 (operands
[2],
1766 gen_rtx (SUBREG
, SImode
, operands
[1], 0),
1768 emit_insn (gen_movqi (gen_rtx (MEM
, QImode
, plus_constant (base
, 1)),
1769 gen_rtx (SUBREG
, QImode
, operands
[2], 0)));
1773 /* Check to see if a branch is forwards or backwards. Return TRUE if it
1777 arm_backwards_branch (from
, to
)
1780 return insn_addresses
[to
] <= insn_addresses
[from
];
1783 /* Check to see if a branch is within the distance that can be done using
1784 an arithmetic expression. */
1786 short_branch (from
, to
)
1789 int delta
= insn_addresses
[from
] + 8 - insn_addresses
[to
];
1791 return abs (delta
) < 980; /* A small margin for safety */
1794 /* Check to see that the insn isn't the target of the conditionalizing
1797 arm_insn_not_targeted (insn
)
1800 return insn
!= arm_target_insn
;
1804 /* Routines to output assembly language. */
1806 /* If the rtx is the correct value then return the string of the number.
1807 In this way we can ensure that valid double constants are generated even
1808 when cross compiling. */
1810 fp_immediate_constant (x
)
1816 if (!fpa_consts_inited
)
1819 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
1820 for (i
= 0; i
< 8; i
++)
1821 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
1822 return strings_fpa
[i
];
1827 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
1829 fp_const_from_val (r
)
1834 if (! fpa_consts_inited
)
1837 for (i
= 0; i
< 8; i
++)
1838 if (REAL_VALUES_EQUAL (*r
, values_fpa
[i
]))
1839 return strings_fpa
[i
];
1844 /* Output the operands of a LDM/STM instruction to STREAM.
1845 MASK is the ARM register set mask of which only bits 0-15 are important.
1846 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
1847 must follow the register list. */
1850 print_multi_reg (stream
, instr
, mask
, hat
)
1856 int not_first
= FALSE
;
1858 fputc ('\t', stream
);
1859 fprintf (stream
, instr
, ARM_REG_PREFIX
);
1860 fputs (", {", stream
);
1861 for (i
= 0; i
< 16; i
++)
1862 if (mask
& (1 << i
))
1865 fprintf (stream
, ", ");
1866 fprintf (stream
, "%s%s", ARM_REG_PREFIX
, reg_names
[i
]);
1870 fprintf (stream
, "}%s\n", hat
? "^" : "");
1873 /* Output a 'call' insn. */
1876 output_call (operands
)
1879 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
1881 if (REGNO (operands
[0]) == 14)
1883 operands
[0] = gen_rtx (REG
, SImode
, 12);
1884 output_asm_insn ("mov%?\t%0, %|lr", operands
);
1886 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
1887 output_asm_insn ("mov%?\t%|pc, %0", operands
);
1895 int something_changed
= 0;
1897 int code
= GET_CODE (x0
);
1904 if (REGNO (x0
) == 14)
1906 *x
= gen_rtx (REG
, SImode
, 12);
1911 /* Scan through the sub-elements and change any references there */
1912 fmt
= GET_RTX_FORMAT (code
);
1913 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1915 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
1916 else if (fmt
[i
] == 'E')
1917 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
1918 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
1919 return something_changed
;
1923 /* Output a 'call' insn that is a reference in memory. */
1926 output_call_mem (operands
)
1929 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful */
1930 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
1932 if (eliminate_lr2ip (&operands
[0]))
1933 output_asm_insn ("mov%?\t%|ip, %|lr", operands
);
1935 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
1936 output_asm_insn ("ldr%?\t%|pc, %0", operands
);
1941 /* Output a move from arm registers to an fpu registers.
1942 OPERANDS[0] is an fpu register.
1943 OPERANDS[1] is the first registers of an arm register pair. */
1946 output_mov_long_double_fpu_from_arm (operands
)
1949 int arm_reg0
= REGNO (operands
[1]);
1955 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1956 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1957 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
1959 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops
);
1960 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands
);
1964 /* Output a move from an fpu register to arm registers.
1965 OPERANDS[0] is the first registers of an arm register pair.
1966 OPERANDS[1] is an fpu register. */
1969 output_mov_long_double_arm_from_fpu (operands
)
1972 int arm_reg0
= REGNO (operands
[0]);
1978 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1979 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1980 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
1982 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands
);
1983 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops
);
1987 /* Output a move from arm registers to arm registers of a long double
1988 OPERANDS[0] is the destination.
1989 OPERANDS[1] is the source. */
1991 output_mov_long_double_arm_from_arm (operands
)
1994 /* We have to be careful here because the two might overlap */
1995 int dest_start
= REGNO (operands
[0]);
1996 int src_start
= REGNO (operands
[1]);
2000 if (dest_start
< src_start
)
2002 for (i
= 0; i
< 3; i
++)
2004 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
2005 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
2006 output_asm_insn ("mov%?\t%0, %1", ops
);
2011 for (i
= 2; i
>= 0; i
--)
2013 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
2014 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
2015 output_asm_insn ("mov%?\t%0, %1", ops
);
2023 /* Output a move from arm registers to an fpu registers.
2024 OPERANDS[0] is an fpu register.
2025 OPERANDS[1] is the first registers of an arm register pair. */
2028 output_mov_double_fpu_from_arm (operands
)
2031 int arm_reg0
= REGNO (operands
[1]);
2036 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
2037 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
2038 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops
);
2039 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands
);
2043 /* Output a move from an fpu register to arm registers.
2044 OPERANDS[0] is the first registers of an arm register pair.
2045 OPERANDS[1] is an fpu register. */
2048 output_mov_double_arm_from_fpu (operands
)
2051 int arm_reg0
= REGNO (operands
[0]);
2057 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
2058 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
2059 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands
);
2060 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops
);
2064 /* Output a move between double words.
2065 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
2066 or MEM<-REG and all MEMs must be offsettable addresses. */
2069 output_move_double (operands
)
2072 enum rtx_code code0
= GET_CODE (operands
[0]);
2073 enum rtx_code code1
= GET_CODE (operands
[1]);
2078 int reg0
= REGNO (operands
[0]);
2080 otherops
[0] = gen_rtx (REG
, SImode
, 1 + reg0
);
2083 int reg1
= REGNO (operands
[1]);
2087 otherops
[1] = gen_rtx (REG
, SImode
, 1 + reg1
);
2089 /* Ensure the second source is not overwritten */
2090 if (reg0
== 1 + reg1
)
2092 output_asm_insn("mov%?\t%0, %1", otherops
);
2093 output_asm_insn("mov%?\t%0, %1", operands
);
2097 output_asm_insn("mov%?\t%0, %1", operands
);
2098 output_asm_insn("mov%?\t%0, %1", otherops
);
2101 else if (code1
== CONST_DOUBLE
)
2103 otherops
[1] = gen_rtx (CONST_INT
, VOIDmode
,
2104 CONST_DOUBLE_HIGH (operands
[1]));
2105 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
2106 CONST_DOUBLE_LOW (operands
[1]));
2107 output_mov_immediate (operands
, FALSE
, "");
2108 output_mov_immediate (otherops
, FALSE
, "");
2110 else if (code1
== CONST_INT
)
2112 otherops
[1] = const0_rtx
;
2113 /* sign extend the intval into the high-order word */
2114 /* Note: output_mov_immediate may clobber operands[1], so we
2115 put this out first */
2116 if (INTVAL (operands
[1]) < 0)
2117 output_asm_insn ("mvn%?\t%0, %1", otherops
);
2119 output_asm_insn ("mov%?\t%0, %1", otherops
);
2120 output_mov_immediate (operands
, FALSE
, "");
2122 else if (code1
== MEM
)
2124 switch (GET_CODE (XEXP (operands
[1], 0)))
2127 /* Handle the simple case where address is [r, #0] more
2129 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
2132 output_asm_insn ("add%?\t%m1, %m1, #8", operands
);
2133 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
2136 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
2137 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
2140 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands
);
2143 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
2144 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
2147 otherops
[1] = adj_offsettable_operand (operands
[1], 4);
2148 /* Take care of overlapping base/data reg. */
2149 if (reg_mentioned_p (operands
[0], operands
[1]))
2151 output_asm_insn ("ldr%?\t%0, %1", otherops
);
2152 output_asm_insn ("ldr%?\t%0, %1", operands
);
2156 output_asm_insn ("ldr%?\t%0, %1", operands
);
2157 output_asm_insn ("ldr%?\t%0, %1", otherops
);
2161 else abort(); /* Constraints should prevent this */
2163 else if (code0
== MEM
&& code1
== REG
)
2165 if (REGNO (operands
[1]) == 12)
2167 switch (GET_CODE (XEXP (operands
[0], 0)))
2170 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
2173 output_asm_insn ("add%?\t%m0, %m0, #8", operands
);
2174 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
2177 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
2178 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
2181 output_asm_insn ("stm%?ia\t%m0!, %M1", operands
);
2184 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
2185 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
2188 otherops
[0] = adj_offsettable_operand (operands
[0], 4);
2189 otherops
[1] = gen_rtx (REG
, SImode
, 1 + REGNO (operands
[1]));
2190 output_asm_insn ("str%?\t%1, %0", operands
);
2191 output_asm_insn ("str%?\t%1, %0", otherops
);
2194 else abort(); /* Constraints should prevent this */
2200 /* Output an arbitrary MOV reg, #n.
2201 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
2204 output_mov_immediate (operands
)
2207 HOST_WIDE_INT n
= INTVAL (operands
[1]);
2211 /* Try to use one MOV */
2212 if (const_ok_for_arm (n
))
2214 output_asm_insn ("mov%?\t%0, %1", operands
);
2218 /* Try to use one MVN */
2219 if (const_ok_for_arm (~n
))
2221 operands
[1] = GEN_INT (~n
);
2222 output_asm_insn ("mvn%?\t%0, %1", operands
);
2226 /* If all else fails, make it out of ORRs or BICs as appropriate. */
2228 for (i
=0; i
< 32; i
++)
2232 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
2233 output_multi_immediate(operands
, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
2236 output_multi_immediate(operands
, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
2243 /* Output an ADD r, s, #n where n may be too big for one instruction. If
2244 adding zero to one register, output nothing. */
2247 output_add_immediate (operands
)
2250 HOST_WIDE_INT n
= INTVAL (operands
[2]);
2252 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
2255 output_multi_immediate (operands
,
2256 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
2259 output_multi_immediate (operands
,
2260 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
2267 /* Output a multiple immediate operation.
2268 OPERANDS is the vector of operands referred to in the output patterns.
2269 INSTR1 is the output pattern to use for the first constant.
2270 INSTR2 is the output pattern to use for subsequent constants.
2271 IMMED_OP is the index of the constant slot in OPERANDS.
2272 N is the constant value. */
2275 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
2277 char *instr1
, *instr2
;
2281 #if HOST_BITS_PER_WIDE_INT > 32
2287 operands
[immed_op
] = const0_rtx
;
2288 output_asm_insn (instr1
, operands
); /* Quick and easy output */
2293 char *instr
= instr1
;
2295 /* Note that n is never zero here (which would give no output) */
2296 for (i
= 0; i
< 32; i
+= 2)
2300 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
2301 output_asm_insn (instr
, operands
);
2311 /* Return the appropriate ARM instruction for the operation code.
2312 The returned result should not be overwritten. OP is the rtx of the
2313 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
2317 arithmetic_instr (op
, shift_first_arg
)
2319 int shift_first_arg
;
2321 switch (GET_CODE (op
))
2327 return shift_first_arg
? "rsb" : "sub";
2344 /* Ensure valid constant shifts and return the appropriate shift mnemonic
2345 for the operation code. The returned result should not be overwritten.
2346 OP is the rtx code of the shift.
2347 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
2351 shift_op (op
, amountp
)
2353 HOST_WIDE_INT
*amountp
;
2356 enum rtx_code code
= GET_CODE (op
);
2358 if (GET_CODE (XEXP (op
, 1)) == REG
|| GET_CODE (XEXP (op
, 1)) == SUBREG
)
2360 else if (GET_CODE (XEXP (op
, 1)) == CONST_INT
)
2361 *amountp
= INTVAL (XEXP (op
, 1));
2384 /* We never have to worry about the amount being other than a
2385 power of 2, since this case can never be reloaded from a reg. */
2387 *amountp
= int_log2 (*amountp
);
2398 /* This is not 100% correct, but follows from the desire to merge
2399 multiplication by a power of 2 with the recognizer for a
2400 shift. >=32 is not a valid shift for "asl", so we must try and
2401 output a shift that produces the correct arithmetical result.
2402 Using lsr #32 is idendical except for the fact that the carry bit
2403 is not set correctly if we set the flags; but we never use the
2404 carry bit from such an operation, so we can ignore that. */
2405 if (code
== ROTATERT
)
2406 *amountp
&= 31; /* Rotate is just modulo 32 */
2407 else if (*amountp
!= (*amountp
& 31))
2414 /* Shifts of 0 are no-ops. */
2423 /* Obtain the shift from the POWER of two. */
2427 HOST_WIDE_INT power
;
2429 HOST_WIDE_INT shift
= 0;
2431 while (((1 << shift
) & power
) == 0)
2441 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
2442 /bin/as is horribly restrictive. */
2445 output_ascii_pseudo_op (stream
, p
, len
)
2451 int len_so_far
= 1000;
2452 int chars_so_far
= 0;
2454 for (i
= 0; i
< len
; i
++)
2456 register int c
= p
[i
];
2458 if (len_so_far
> 50)
2461 fputs ("\"\n", stream
);
2462 fputs ("\t.ascii\t\"", stream
);
2464 arm_increase_location (chars_so_far
);
2468 if (c
== '\"' || c
== '\\')
2474 if (c
>= ' ' && c
< 0177)
2481 fprintf (stream
, "\\%03o", c
);
2488 fputs ("\"\n", stream
);
2489 arm_increase_location (chars_so_far
);
2493 /* Try to determine whether a pattern really clobbers the link register.
2494 This information is useful when peepholing, so that lr need not be pushed
2495 if we combine a call followed by a return.
2496 NOTE: This code does not check for side-effect expressions in a SET_SRC:
2497 such a check should not be needed because these only update an existing
2498 value within a register; the register must still be set elsewhere within
2502 pattern_really_clobbers_lr (x
)
2507 switch (GET_CODE (x
))
2510 switch (GET_CODE (SET_DEST (x
)))
2513 return REGNO (SET_DEST (x
)) == 14;
2516 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == REG
)
2517 return REGNO (XEXP (SET_DEST (x
), 0)) == 14;
2519 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == MEM
)
2528 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
2529 if (pattern_really_clobbers_lr (XVECEXP (x
, 0, i
)))
2534 switch (GET_CODE (XEXP (x
, 0)))
2537 return REGNO (XEXP (x
, 0)) == 14;
2540 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
2541 return REGNO (XEXP (XEXP (x
, 0), 0)) == 14;
2557 function_really_clobbers_lr (first
)
2562 for (insn
= first
; insn
; insn
= next_nonnote_insn (insn
))
2564 switch (GET_CODE (insn
))
2569 case JUMP_INSN
: /* Jump insns only change the PC (and conds) */
2574 if (pattern_really_clobbers_lr (PATTERN (insn
)))
2579 /* Don't yet know how to handle those calls that are not to a
2581 if (GET_CODE (PATTERN (insn
)) != PARALLEL
)
2584 switch (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)))
2587 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn
), 0, 0), 0), 0))
2593 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
),
2599 default: /* Don't recognize it, be safe */
2603 /* A call can be made (by peepholing) not to clobber lr iff it is
2604 followed by a return. There may, however, be a use insn iff
2605 we are returning the result of the call.
2606 If we run off the end of the insn chain, then that means the
2607 call was at the end of the function. Unfortunately we don't
2608 have a return insn for the peephole to recognize, so we
2609 must reject this. (Can this be fixed by adding our own insn?) */
2610 if ((next
= next_nonnote_insn (insn
)) == NULL
)
2613 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == USE
2614 && (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
2615 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn
), 0, 0)))
2616 == REGNO (XEXP (PATTERN (next
), 0))))
2617 if ((next
= next_nonnote_insn (next
)) == NULL
)
2620 if (GET_CODE (next
) == JUMP_INSN
2621 && GET_CODE (PATTERN (next
)) == RETURN
)
2630 /* We have reached the end of the chain so lr was _not_ clobbered */
2635 output_return_instruction (operand
, really_return
)
2640 int reg
, live_regs
= 0;
2641 int volatile_func
= (optimize
> 0
2642 && TREE_THIS_VOLATILE (current_function_decl
));
2644 return_used_this_function
= 1;
2649 /* If this function was declared non-returning, and we have found a tail
2650 call, then we have to trust that the called function won't return. */
2651 if (! really_return
)
2654 /* Otherwise, trap an attempted return by aborting. */
2656 ops
[1] = gen_rtx (SYMBOL_REF
, Pmode
, "abort");
2657 output_asm_insn ("bl%d0\t%a1", ops
);
2661 if (current_function_calls_alloca
&& ! really_return
)
2664 for (reg
= 0; reg
<= 10; reg
++)
2665 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2668 if (live_regs
|| (regs_ever_live
[14] && ! lr_save_eliminated
))
2671 if (frame_pointer_needed
)
2676 if (lr_save_eliminated
|| ! regs_ever_live
[14])
2679 if (frame_pointer_needed
)
2680 strcpy (instr
, "ldm%?%d0ea\t%|fp, {");
2682 strcpy (instr
, "ldm%?%d0fd\t%|sp!, {");
2684 for (reg
= 0; reg
<= 10; reg
++)
2685 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2687 strcat (instr
, "%|");
2688 strcat (instr
, reg_names
[reg
]);
2690 strcat (instr
, ", ");
2693 if (frame_pointer_needed
)
2695 strcat (instr
, "%|");
2696 strcat (instr
, reg_names
[11]);
2697 strcat (instr
, ", ");
2698 strcat (instr
, "%|");
2699 strcat (instr
, reg_names
[13]);
2700 strcat (instr
, ", ");
2701 strcat (instr
, "%|");
2702 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
2706 strcat (instr
, "%|");
2707 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
2709 strcat (instr
, (TARGET_6
|| !really_return
) ? "}" : "}^");
2710 output_asm_insn (instr
, &operand
);
2712 else if (really_return
)
2715 TARGET_6
? "mov%?%d0\t%|pc, lr" : "mov%?%d0s\t%|pc, %|lr");
2716 output_asm_insn (instr
, &operand
);
2723 arm_volatile_func ()
2725 return (optimize
> 0 && TREE_THIS_VOLATILE (current_function_decl
));
2728 /* Return the size of the prologue. It's not too bad if we slightly
2732 get_prologue_size ()
2734 return profile_flag
? 12 : 0;
2737 /* The amount of stack adjustment that happens here, in output_return and in
2738 output_epilogue must be exactly the same as was calculated during reload,
2739 or things will point to the wrong place. The only time we can safely
2740 ignore this constraint is when a function has no arguments on the stack,
2741 no stack frame requirement and no live registers execpt for `lr'. If we
2742 can guarantee that by making all function calls into tail calls and that
2743 lr is not clobbered in any other way, then there is no need to push lr
2747 output_func_prologue (f
, frame_size
)
2751 int reg
, live_regs_mask
= 0;
2753 int volatile_func
= (optimize
> 0
2754 && TREE_THIS_VOLATILE (current_function_decl
));
2756 /* Nonzero if we must stuff some register arguments onto the stack as if
2757 they were passed there. */
2758 int store_arg_regs
= 0;
2760 if (arm_ccfsm_state
|| arm_target_insn
)
2761 abort (); /* Sanity check */
2763 return_used_this_function
= 0;
2764 lr_save_eliminated
= 0;
2766 fprintf (f
, "\t%c args = %d, pretend = %d, frame = %d\n",
2767 ARM_COMMENT_CHAR
, current_function_args_size
,
2768 current_function_pretend_args_size
, frame_size
);
2769 fprintf (f
, "\t%c frame_needed = %d, current_function_anonymous_args = %d\n",
2770 ARM_COMMENT_CHAR
, frame_pointer_needed
,
2771 current_function_anonymous_args
);
2774 fprintf (f
, "\t%c Volatile function.\n", ARM_COMMENT_CHAR
);
2776 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
2779 for (reg
= 0; reg
<= 10; reg
++)
2780 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2781 live_regs_mask
|= (1 << reg
);
2783 if (frame_pointer_needed
)
2784 live_regs_mask
|= 0xD800;
2785 else if (regs_ever_live
[14])
2787 if (! current_function_args_size
2788 && ! function_really_clobbers_lr (get_insns ()))
2789 lr_save_eliminated
= 1;
2791 live_regs_mask
|= 0x4000;
2796 /* if a di mode load/store multiple is used, and the base register
2797 is r3, then r4 can become an ever live register without lr
2798 doing so, in this case we need to push lr as well, or we
2799 will fail to get a proper return. */
2801 live_regs_mask
|= 0x4000;
2802 lr_save_eliminated
= 0;
2806 if (lr_save_eliminated
)
2807 fprintf (f
,"\t%c I don't think this function clobbers lr\n",
2813 output_func_epilogue (f
, frame_size
)
2817 int reg
, live_regs_mask
= 0, code_size
= 0;
2818 /* If we need this then it will always be at lesat this much */
2819 int floats_offset
= 24;
2821 int volatile_func
= (optimize
> 0
2822 && TREE_THIS_VOLATILE (current_function_decl
));
2824 if (use_return_insn() && return_used_this_function
)
2826 if (frame_size
&& !(frame_pointer_needed
|| TARGET_APCS
))
2833 /* A volatile function should never return. Call abort. */
2836 rtx op
= gen_rtx (SYMBOL_REF
, Pmode
, "abort");
2837 output_asm_insn ("bl\t%a0", &op
);
2842 for (reg
= 0; reg
<= 10; reg
++)
2843 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2845 live_regs_mask
|= (1 << reg
);
2849 if (frame_pointer_needed
)
2851 for (reg
= 23; reg
> 15; reg
--)
2852 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2854 fprintf (f
, "\tldfe\t%s%s, [%sfp, #-%d]\n", ARM_REG_PREFIX
,
2855 reg_names
[reg
], ARM_REG_PREFIX
, floats_offset
);
2856 floats_offset
+= 12;
2860 live_regs_mask
|= 0xA800;
2861 print_multi_reg (f
, "ldmea\t%sfp", live_regs_mask
,
2862 TARGET_6
? FALSE
: TRUE
);
2867 /* Restore stack pointer if necessary. */
2870 operands
[0] = operands
[1] = stack_pointer_rtx
;
2871 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, frame_size
);
2872 output_add_immediate (operands
);
2875 for (reg
= 16; reg
< 24; reg
++)
2876 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2878 fprintf (f
, "\tldfe\t%s%s, [%ssp], #12\n", ARM_REG_PREFIX
,
2879 reg_names
[reg
], ARM_REG_PREFIX
);
2882 if (current_function_pretend_args_size
== 0 && regs_ever_live
[14])
2884 print_multi_reg (f
, "ldmfd\t%ssp!", live_regs_mask
| 0x8000,
2885 TARGET_6
? FALSE
: TRUE
);
2890 if (live_regs_mask
|| regs_ever_live
[14])
2892 live_regs_mask
|= 0x4000;
2893 print_multi_reg (f
, "ldmfd\t%ssp!", live_regs_mask
, FALSE
);
2896 if (current_function_pretend_args_size
)
2898 operands
[0] = operands
[1] = stack_pointer_rtx
;
2899 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
2900 current_function_pretend_args_size
);
2901 output_add_immediate (operands
);
2904 TARGET_6
? "\tmov\t%spc, %slr\n" : "\tmovs\t%spc, %slr\n",
2905 ARM_REG_PREFIX
, ARM_REG_PREFIX
, f
);
2912 /* insn_addresses isn't allocated when not optimizing */
2915 arm_increase_location (code_size
2916 + insn_addresses
[INSN_UID (get_last_insn ())]
2917 + get_prologue_size ());
2919 current_function_anonymous_args
= 0;
2923 emit_multi_reg_push (mask
)
2930 for (i
= 0; i
< 16; i
++)
2931 if (mask
& (1 << i
))
2934 if (num_regs
== 0 || num_regs
> 16)
2937 par
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (num_regs
));
2939 for (i
= 0; i
< 16; i
++)
2941 if (mask
& (1 << i
))
2944 = gen_rtx (SET
, VOIDmode
, gen_rtx (MEM
, BLKmode
,
2945 gen_rtx (PRE_DEC
, BLKmode
,
2946 stack_pointer_rtx
)),
2947 gen_rtx (UNSPEC
, BLKmode
,
2948 gen_rtvec (1, gen_rtx (REG
, SImode
, i
)),
2954 for (j
= 1, i
++; j
< num_regs
; i
++)
2956 if (mask
& (1 << i
))
2959 = gen_rtx (USE
, VOIDmode
, gen_rtx (REG
, SImode
, i
));
2967 arm_expand_prologue ()
2970 rtx amount
= GEN_INT (- get_frame_size ());
2973 int live_regs_mask
= 0;
2974 int store_arg_regs
= 0;
2975 int volatile_func
= (optimize
> 0
2976 && TREE_THIS_VOLATILE (current_function_decl
));
2978 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
2981 if (! volatile_func
)
2982 for (reg
= 0; reg
<= 10; reg
++)
2983 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
2984 live_regs_mask
|= 1 << reg
;
2986 if (! volatile_func
&& regs_ever_live
[14])
2987 live_regs_mask
|= 0x4000;
2989 if (frame_pointer_needed
)
2991 live_regs_mask
|= 0xD800;
2992 emit_insn (gen_movsi (gen_rtx (REG
, SImode
, 12),
2993 stack_pointer_rtx
));
2996 if (current_function_pretend_args_size
)
2999 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size
/ 4))
3002 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
,
3003 GEN_INT (-current_function_pretend_args_size
)));
3008 /* If we have to push any regs, then we must push lr as well, or
3009 we won't get a propper return. */
3010 live_regs_mask
|= 0x4000;
3011 emit_multi_reg_push (live_regs_mask
);
3014 /* For now the integer regs are still pushed in output_func_epilogue (). */
3016 if (! volatile_func
)
3017 for (reg
= 23; reg
> 15; reg
--)
3018 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
3019 emit_insn (gen_rtx (SET
, VOIDmode
,
3020 gen_rtx (MEM
, XFmode
,
3021 gen_rtx (PRE_DEC
, XFmode
,
3022 stack_pointer_rtx
)),
3023 gen_rtx (REG
, XFmode
, reg
)));
3025 if (frame_pointer_needed
)
3026 emit_insn (gen_addsi3 (hard_frame_pointer_rtx
, gen_rtx (REG
, SImode
, 12),
3028 (-(4 + current_function_pretend_args_size
)))));
3030 if (amount
!= const0_rtx
)
3032 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, amount
));
3033 emit_insn (gen_rtx (CLOBBER
, VOIDmode
,
3034 gen_rtx (MEM
, BLKmode
, stack_pointer_rtx
)));
3037 /* If we are profiling, make sure no instructions are scheduled before
3038 the call to mcount. */
3039 if (profile_flag
|| profile_block_flag
)
3040 emit_insn (gen_blockage ());
3044 /* If CODE is 'd', then the X is a condition operand and the instruction
3045 should only be executed if the condition is true.
3046 if CODE is 'D', then the X is a condition operand and the instruciton
3047 should only be executed if the condition is false: however, if the mode
3048 of the comparison is CCFPEmode, then always execute the instruction -- we
3049 do this because in these circumstances !GE does not necessarily imply LT;
3050 in these cases the instruction pattern will take care to make sure that
3051 an instruction containing %d will follow, thereby undoing the effects of
3052 doing this instrucion unconditionally.
3053 If CODE is 'N' then X is a floating point operand that must be negated
3055 If CODE is 'B' then output a bitwise inverted value of X (a const int).
3056 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
3059 arm_print_operand (stream
, x
, code
)
3067 fputc (ARM_COMMENT_CHAR
, stream
);
3071 fputs (ARM_REG_PREFIX
, stream
);
3075 if (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4)
3076 fputs (arm_condition_codes
[arm_current_cc
], stream
);
3082 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
3083 r
= REAL_VALUE_NEGATE (r
);
3084 fprintf (stream
, "%s", fp_const_from_val (&r
));
3089 if (GET_CODE (x
) == CONST_INT
)
3091 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3096 ARM_SIGN_EXTEND (~ INTVAL (x
)));
3100 output_addr_const (stream
, x
);
3105 fprintf (stream
, "%s", arithmetic_instr (x
, 1));
3109 fprintf (stream
, "%s", arithmetic_instr (x
, 0));
3115 char *shift
= shift_op (x
, &val
);
3119 fprintf (stream
, ", %s ", shift_op (x
, &val
));
3121 arm_print_operand (stream
, XEXP (x
, 1), 0);
3124 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
3137 fputs (ARM_REG_PREFIX
, stream
);
3138 fputs (reg_names
[REGNO (x
) + 1], stream
);
3142 fputs (ARM_REG_PREFIX
, stream
);
3143 if (GET_CODE (XEXP (x
, 0)) == REG
)
3144 fputs (reg_names
[REGNO (XEXP (x
, 0))], stream
);
3146 fputs (reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))], stream
);
3150 fprintf (stream
, "{%s%s-%s%s}", ARM_REG_PREFIX
, reg_names
[REGNO (x
)],
3151 ARM_REG_PREFIX
, reg_names
[REGNO (x
) - 1
3152 + ((GET_MODE_SIZE (GET_MODE (x
))
3153 + GET_MODE_SIZE (SImode
) - 1)
3154 / GET_MODE_SIZE (SImode
))]);
3159 fputs (arm_condition_codes
[get_arm_condition_code (x
)],
3164 if (x
&& (flag_fast_math
3165 || GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
3166 || (GET_MODE (XEXP (x
, 0)) != CCFPEmode
3167 && (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0)))
3169 fputs (arm_condition_codes
[ARM_INVERSE_CONDITION_CODE
3170 (get_arm_condition_code (x
))],
3178 if (GET_CODE (x
) == REG
)
3180 fputs (ARM_REG_PREFIX
, stream
);
3181 fputs (reg_names
[REGNO (x
)], stream
);
3183 else if (GET_CODE (x
) == MEM
)
3185 output_memory_reference_mode
= GET_MODE (x
);
3186 output_address (XEXP (x
, 0));
3188 else if (GET_CODE (x
) == CONST_DOUBLE
)
3189 fprintf (stream
, "#%s", fp_immediate_constant (x
));
3190 else if (GET_CODE (x
) == NEG
)
3191 abort (); /* This should never happen now. */
3194 fputc ('#', stream
);
3195 output_addr_const (stream
, x
);
3200 /* Increase the `arm_text_location' by AMOUNT if we're in the text
3204 arm_increase_location (amount
)
3207 if (in_text_section ())
3208 arm_text_location
+= amount
;
3212 /* Output a label definition. If this label is within the .text segment, it
3213 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
3214 Maybe GCC remembers names not starting with a `*' for a long time, but this
3215 is a minority anyway, so we just make a copy. Do not store the leading `*'
3216 if the name starts with one. */
3219 arm_asm_output_label (stream
, name
)
3223 char *real_name
, *s
;
3224 struct label_offset
*cur
;
3227 assemble_name (stream
, name
);
3228 fputs (":\n", stream
);
3229 if (! in_text_section ())
3234 real_name
= xmalloc (1 + strlen (&name
[1]));
3235 strcpy (real_name
, &name
[1]);
3239 real_name
= xmalloc (2 + strlen (name
));
3240 strcpy (real_name
, "_");
3241 strcat (real_name
, name
);
3243 for (s
= real_name
; *s
; s
++)
3246 hash
= hash
% LABEL_HASH_SIZE
;
3247 cur
= (struct label_offset
*) xmalloc (sizeof (struct label_offset
));
3248 cur
->name
= real_name
;
3249 cur
->offset
= arm_text_location
;
3250 cur
->cdr
= offset_table
[hash
];
3251 offset_table
[hash
] = cur
;
3254 /* Load a symbol that is known to be in the text segment into a register.
3255 This should never be called when not optimizing. */
3258 output_load_symbol (insn
, operands
)
3263 char *name
= XSTR (operands
[1], 0);
3264 struct label_offset
*he
;
3267 unsigned int mask
, never_mask
= 0xffffffff;
3271 if (optimize
== 0 || *name
!= '*')
3274 for (s
= &name
[1]; *s
; s
++)
3277 hash
= hash
% LABEL_HASH_SIZE
;
3278 he
= offset_table
[hash
];
3279 while (he
&& strcmp (he
->name
, &name
[1]))
3285 offset
= (arm_text_location
+ insn_addresses
[INSN_UID (insn
)]
3286 + get_prologue_size () + 8 - he
->offset
);
3290 /* When generating the instructions, we never mask out the bits that we
3291 think will be always zero, then if a mistake has occured somewhere, the
3292 assembler will spot it and generate an error. */
3294 /* If the symbol is word aligned then we might be able to reduce the
3296 shift
= ((offset
& 3) == 0) ? 2 : 0;
3298 /* Clear the bits from NEVER_MASK that will be orred in with the individual
3300 for (; shift
< 32; shift
+= 8)
3302 mask
= 0xff << shift
;
3303 if ((offset
& mask
) || ((unsigned) offset
) > mask
)
3304 never_mask
&= ~mask
;
3308 mask
= 0xff << (shift
- 32);
3310 while (mask
&& (never_mask
& mask
) == 0)
3314 strcpy (buffer
, "sub%?\t%0, %|pc, #(8 + . -%a1)");
3315 if ((never_mask
| mask
) != 0xffffffff)
3316 sprintf (buffer
+ strlen (buffer
), " & 0x%x", mask
| never_mask
);
3319 sprintf (buffer
, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
3320 inst
, mask
| never_mask
);
3322 output_asm_insn (buffer
, operands
);
3330 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
3331 directive hence this hack, which works by reserving some `.space' in the
3332 bss segment directly.
3334 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
3335 define STATIC COMMON space but merely STATIC BSS space. */
3338 output_lcomm_directive (stream
, name
, size
, rounded
)
3343 fprintf (stream
, "\n\t.bss\t%c .lcomm\n", ARM_COMMENT_CHAR
);
3344 assemble_name (stream
, name
);
3345 fprintf (stream
, ":\t.space\t%d\n", rounded
);
3346 if (in_text_section ())
3347 fputs ("\n\t.text\n", stream
);
3349 fputs ("\n\t.data\n", stream
);
3352 /* A finite state machine takes care of noticing whether or not instructions
3353 can be conditionally executed, and thus decrease execution time and code
3354 size by deleting branch instructions. The fsm is controlled by
3355 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
3357 /* The state of the fsm controlling condition codes are:
3358 0: normal, do nothing special
3359 1: make ASM_OUTPUT_OPCODE not output this instruction
3360 2: make ASM_OUTPUT_OPCODE not output this instruction
3361 3: make instructions conditional
3362 4: make instructions conditional
3364 State transitions (state->state by whom under condition):
3365 0 -> 1 final_prescan_insn if the `target' is a label
3366 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
3367 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
3368 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
3369 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
3370 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
3371 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
3372 (the target insn is arm_target_insn).
3374 If the jump clobbers the conditions then we use states 2 and 4.
3376 A similar thing can be done with conditional return insns.
3378 XXX In case the `target' is an unconditional branch, this conditionalising
3379 of the instructions always reduces code size, but not always execution
3380 time. But then, I want to reduce the code size to somewhere near what
3381 /bin/cc produces. */
3383 /* Returns the index of the ARM condition code string in
3384 `arm_condition_codes'. COMPARISON should be an rtx like
3385 `(eq (...) (...))'. */
3388 get_arm_condition_code (comparison
)
3391 switch (GET_CODE (comparison
))
3393 case NE
: return (1);
3394 case EQ
: return (0);
3395 case GE
: return (10);
3396 case GT
: return (12);
3397 case LE
: return (13);
3398 case LT
: return (11);
3399 case GEU
: return (2);
3400 case GTU
: return (8);
3401 case LEU
: return (9);
3402 case LTU
: return (3);
3411 final_prescan_insn (insn
, opvec
, noperands
)
3416 /* BODY will hold the body of INSN. */
3417 register rtx body
= PATTERN (insn
);
3419 /* This will be 1 if trying to repeat the trick, and things need to be
3420 reversed if it appears to fail. */
3423 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
3424 taken are clobbered, even if the rtl suggests otherwise. It also
3425 means that we have to grub around within the jump expression to find
3426 out what the conditions are when the jump isn't taken. */
3427 int jump_clobbers
= 0;
3429 /* If we start with a return insn, we only succeed if we find another one. */
3430 int seeking_return
= 0;
3432 /* START_INSN will hold the insn from where we start looking. This is the
3433 first insn after the following code_label if REVERSE is true. */
3434 rtx start_insn
= insn
;
3436 /* If in state 4, check if the target branch is reached, in order to
3437 change back to state 0. */
3438 if (arm_ccfsm_state
== 4)
3440 if (insn
== arm_target_insn
)
3442 arm_target_insn
= NULL
;
3443 arm_ccfsm_state
= 0;
3448 /* If in state 3, it is possible to repeat the trick, if this insn is an
3449 unconditional branch to a label, and immediately following this branch
3450 is the previous target label which is only used once, and the label this
3451 branch jumps to is not too far off. */
3452 if (arm_ccfsm_state
== 3)
3454 if (simplejump_p (insn
))
3456 start_insn
= next_nonnote_insn (start_insn
);
3457 if (GET_CODE (start_insn
) == BARRIER
)
3459 /* XXX Isn't this always a barrier? */
3460 start_insn
= next_nonnote_insn (start_insn
);
3462 if (GET_CODE (start_insn
) == CODE_LABEL
3463 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
3464 && LABEL_NUSES (start_insn
) == 1)
3469 else if (GET_CODE (body
) == RETURN
)
3471 start_insn
= next_nonnote_insn (start_insn
);
3472 if (GET_CODE (start_insn
) == BARRIER
)
3473 start_insn
= next_nonnote_insn (start_insn
);
3474 if (GET_CODE (start_insn
) == CODE_LABEL
3475 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
3476 && LABEL_NUSES (start_insn
) == 1)
3488 if (arm_ccfsm_state
!= 0 && !reverse
)
3490 if (GET_CODE (insn
) != JUMP_INSN
)
3493 /* This jump might be paralled with a clobber of the condition codes
3494 the jump should always come first */
3495 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
3496 body
= XVECEXP (body
, 0, 0);
3499 /* If this is a conditional return then we don't want to know */
3500 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
3501 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
3502 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
3503 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
3508 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
3509 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
3511 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
3512 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
3513 int then_not_else
= TRUE
;
3514 rtx this_insn
= start_insn
, label
= 0;
3516 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
3518 /* The code below is wrong for these, and I haven't time to
3519 fix it now. So we just do the safe thing and return. This
3520 whole function needs re-writing anyway. */
3525 /* Register the insn jumped to. */
3528 if (!seeking_return
)
3529 label
= XEXP (SET_SRC (body
), 0);
3531 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
3532 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
3533 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
3535 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
3536 then_not_else
= FALSE
;
3538 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
3540 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
3543 then_not_else
= FALSE
;
3548 /* See how many insns this branch skips, and what kind of insns. If all
3549 insns are okay, and the label or unconditional branch to the same
3550 label is not too far away, succeed. */
3551 for (insns_skipped
= 0;
3552 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
3557 this_insn
= next_nonnote_insn (this_insn
);
3561 scanbody
= PATTERN (this_insn
);
3563 switch (GET_CODE (this_insn
))
3566 /* Succeed if it is the target label, otherwise fail since
3567 control falls in from somewhere else. */
3568 if (this_insn
== label
)
3572 arm_ccfsm_state
= 2;
3573 this_insn
= next_nonnote_insn (this_insn
);
3576 arm_ccfsm_state
= 1;
3584 /* Succeed if the following insn is the target label.
3586 If return insns are used then the last insn in a function
3587 will be a barrier. */
3588 this_insn
= next_nonnote_insn (this_insn
);
3589 if (this_insn
&& this_insn
== label
)
3593 arm_ccfsm_state
= 2;
3594 this_insn
= next_nonnote_insn (this_insn
);
3597 arm_ccfsm_state
= 1;
3605 /* The arm 6xx uses full 32 bit addresses so the cc is not
3606 preserved over calls */
3611 /* If this is an unconditional branch to the same label, succeed.
3612 If it is to another label, do nothing. If it is conditional,
3614 /* XXX Probably, the test for the SET and the PC are unnecessary. */
3616 if (GET_CODE (scanbody
) == SET
3617 && GET_CODE (SET_DEST (scanbody
)) == PC
)
3619 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
3620 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
3622 arm_ccfsm_state
= 2;
3625 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
3628 else if (GET_CODE (scanbody
) == RETURN
3631 arm_ccfsm_state
= 2;
3634 else if (GET_CODE (scanbody
) == PARALLEL
)
3636 switch (get_attr_conds (this_insn
))
3648 /* Instructions using or affecting the condition codes make it
3650 if ((GET_CODE (scanbody
) == SET
3651 || GET_CODE (scanbody
) == PARALLEL
)
3652 && get_attr_conds (this_insn
) != CONDS_NOCOND
)
3662 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
3663 arm_target_label
= CODE_LABEL_NUMBER (label
);
3664 else if (seeking_return
|| arm_ccfsm_state
== 2)
3666 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
3668 this_insn
= next_nonnote_insn (this_insn
);
3669 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
3670 || GET_CODE (this_insn
) == CODE_LABEL
))
3675 /* Oh, dear! we ran off the end.. give up */
3676 recog (PATTERN (insn
), insn
, NULL_PTR
);
3677 arm_ccfsm_state
= 0;
3678 arm_target_insn
= NULL
;
3681 arm_target_insn
= this_insn
;
3690 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
3692 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
3693 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
3694 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
3695 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
3699 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
3702 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
3706 if (reverse
|| then_not_else
)
3707 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
3709 /* restore recog_operand (getting the attributes of other insns can
3710 destroy this array, but final.c assumes that it remains intact
3711 accross this call; since the insn has been recognized already we
3712 call recog direct). */
3713 recog (PATTERN (insn
), insn
, NULL_PTR
);