1 /* Subroutines for insn-output.c for Pyramid 90x, 9000, and MIServer Series.
2 Copyright (C) 1989, 1991 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /* Some output-actions in pyr.md need these. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
37 * This cannot be defined as a macro on pyramids, because Pyramid Technology's
38 * C compiler dies on (several equivalent definitions of) this macro.
39 * The only way around this cc bug was to make this a function.
40 * While it would be possible to use a macro version for gcc, it seems
41 * more reliable to have a single version of the code.
44 pyr_function_arg(cum
, mode
, type
, named
)
46 enum machine_mode mode
;
49 return (void *)(FUNCTION_ARG_HELPER (cum
, mode
,type
,named
));
52 /* Do the hard part of PARAM_SAFE_FOR_REG_P.
53 * This cannot be defined as a macro on pyramids, because Pyramid Technology's
54 * C compiler dies on (several equivalent definitions of) this macro.
55 * The only way around this cc bug was to make this a function.
58 inner_param_safe_helper (type
)
61 return (INNER_PARAM_SAFE_HELPER(type
));
65 /* Return 1 if OP is a non-indexed operand of mode MODE.
66 This is either a register reference, a memory reference,
67 or a constant. In the case of a memory reference, the address
68 is checked to make sure it isn't indexed.
70 Register and memory references must have mode MODE in order to be valid,
71 but some constants have no machine mode and are valid for any mode.
73 If MODE is VOIDmode, OP is checked for validity for whatever mode
76 The main use of this function is as a predicate in match_operand
77 expressions in the machine description.
79 It is useful to compare this with general_operand(). They should
80 be identical except for one line.
82 This function seems necessary because of the non-orthogonality of
84 For any 2-operand insn, and any combination of operand modes,
85 if indexing is valid for the isn's second operand, it is invalid
86 for the first operand to be indexed. */
88 extern int volatile_ok
;
91 nonindexed_operand (op
, mode
)
93 enum machine_mode mode
;
95 register RTX_CODE code
= GET_CODE (op
);
96 int mode_altering_drug
= 0;
101 /* Don't accept CONST_INT or anything similar
102 if the caller wants something floating. */
103 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
104 && GET_MODE_CLASS (mode
) != MODE_INT
)
108 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
109 && LEGITIMATE_CONSTANT_P (op
));
111 /* Except for certain constants with VOIDmode, already checked for,
112 OP's mode must match MODE if MODE specifies a mode. */
114 if (GET_MODE (op
) != mode
)
117 while (code
== SUBREG
)
119 op
= SUBREG_REG (op
);
120 code
= GET_CODE (op
);
122 /* No longer needed, since (SUBREG (MEM...))
123 will load the MEM into a reload reg in the MEM's own mode. */
124 mode_altering_drug
= 1;
129 if (code
== CONST_DOUBLE
)
130 return LEGITIMATE_CONSTANT_P (op
);
133 register rtx y
= XEXP (op
, 0);
134 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
136 GO_IF_NONINDEXED_ADDRESS (y
, win
);
141 if (mode_altering_drug
)
142 return ! mode_dependent_address_p (XEXP (op
, 0));
146 /* Return non-zero if the rtx OP has an immediate component. An
147 immediate component or additive term equal to zero is rejected
148 due to assembler problems. */
154 if ((CONSTANT_ADDRESS_P (op
)
156 || (GET_CODE (op
) == PLUS
157 && ((CONSTANT_ADDRESS_P (XEXP (op
, 1))
158 && XEXP (op
, 1) != const0_rtx
)
159 || (CONSTANT_ADDRESS_P (XEXP (op
, 0))
160 && XEXP (op
, 0) != const0_rtx
))))
166 /* Return zero if the rtx OP has a (scaled) index. */
172 if (GET_CODE (op
) == PLUS
173 && (GET_CODE (XEXP (op
, 0)) == MULT
174 || (GET_CODE (XEXP (op
, 1)) == MULT
)))
182 /* weird_memory_memory -- return 1 if OP1 and OP2 can be compared (or
183 exchanged with xchw) with one instruction. If the operands need to
184 be swapped, set the global variable SWAP_OPERANDS. This function
185 silently assumes that both OP0 and OP1 are valid memory references.
189 weird_memory_memory (op0
, op1
)
192 RTX_CODE code0
, code1
;
196 code0
= GET_CODE (op0
);
197 code1
= GET_CODE (op1
);
201 if (code1
== REG
|| code1
== SUBREG
)
205 if (code0
== REG
|| code0
== SUBREG
)
210 if (has_direct_base (op0
) && has_direct_base (op1
))
225 signed_comparison (x
, mode
)
227 enum machine_mode mode
;
229 return ! TRULY_UNSIGNED_COMPARE_P (GET_CODE (x
));
232 extern rtx
force_reg ();
233 rtx test_op0
, test_op1
;
234 enum machine_mode test_mode
;
236 /* Sign-extend or zero-extend constant X from FROM_MODE to TO_MODE. */
239 extend_const (x
, extop
, from_mode
, to_mode
)
242 enum machine_mode from_mode
, to_mode
;
246 if (from_mode
== to_mode
)
248 if (GET_CODE (x
) != CONST_INT
)
251 negative
= val
& (1 << (GET_MODE_BITSIZE (from_mode
) - 1));
252 if (GET_MODE_BITSIZE (from_mode
) == HOST_BITS_PER_INT
)
254 if (negative
&& extop
== SIGN_EXTEND
)
255 val
= val
| ((-1) << (GET_MODE_BITSIZE (from_mode
)));
257 val
= val
& ~((-1) << (GET_MODE_BITSIZE (from_mode
)));
258 if (GET_MODE_BITSIZE (to_mode
) == HOST_BITS_PER_INT
)
259 return gen_rtx (CONST_INT
, VOIDmode
, val
);
260 return gen_rtx (CONST_INT
, VOIDmode
,
261 val
& ~((-1) << (GET_MODE_BITSIZE (to_mode
))));
265 ensure_extended (op
, extop
, from_mode
)
268 enum machine_mode from_mode
;
270 if (GET_CODE (op
) == CONST_INT
)
271 return extend_const (op
, extop
, from_mode
, SImode
);
273 return force_reg (SImode
, gen_rtx (extop
, SImode
, op
));
276 /* Emit rtl for a branch, as well as any delayed (integer) compare insns.
277 The compare insn to perform is determined by the global variables
278 test_op0 and test_op1. */
281 extend_and_branch (extop
)
285 RTX_CODE code0
, code1
;
287 op0
= test_op0
, op1
= test_op1
;
291 code0
= GET_CODE (op0
);
293 code1
= GET_CODE (op1
);
294 test_op0
= test_op1
= 0;
298 op0
= ensure_extended (op0
, extop
, test_mode
);
299 emit_insn (gen_rtx (SET
, VOIDmode
, cc0_rtx
, op0
));
303 if (CONSTANT_P (op0
) && CONSTANT_P (op1
))
305 op0
= ensure_extended (op0
, extop
, test_mode
);
306 op1
= ensure_extended (op1
, extop
, test_mode
);
308 else if (extop
== ZERO_EXTEND
&& test_mode
== HImode
)
310 /* Pyramids have no unsigned "cmphi" instructions. We need to
311 zero extend unsigned halfwords into temporary registers. */
312 op0
= ensure_extended (op0
, extop
, test_mode
);
313 op1
= ensure_extended (op1
, extop
, test_mode
);
315 else if (CONSTANT_P (op0
))
317 op0
= ensure_extended (op0
, extop
, test_mode
);
318 op1
= ensure_extended (op1
, extop
, test_mode
);
320 else if (CONSTANT_P (op1
))
322 op1
= ensure_extended (op1
, extop
, test_mode
);
323 op0
= ensure_extended (op0
, extop
, test_mode
);
325 else if ((code0
== REG
|| code0
== SUBREG
)
326 && (code1
== REG
|| code1
== SUBREG
))
328 /* I could do this case without extension, by using the virtual
329 register address (but that would lose for global regs). */
330 op0
= ensure_extended (op0
, extop
, test_mode
);
331 op1
= ensure_extended (op1
, extop
, test_mode
);
333 else if (code0
== MEM
&& code1
== MEM
)
335 /* Load into a reg if the address combination can't be handled
337 if (! weird_memory_memory (op0
, op1
))
338 op0
= force_reg (test_mode
, op0
);
341 emit_insn (gen_rtx (SET
, VOIDmode
, cc0_rtx
,
342 gen_rtx (COMPARE
, VOIDmode
, op0
, op1
)));
346 /* Return non-zero if the two single-word moves with operands[0]
347 and operands[1] for the first single-word move, and operands[2]
348 and operands[3] for the second single-word move, is possible to
349 combine to a double word move.
351 The criterion is whether the operands are in consecutive memory cells,
355 movdi_possible (operands
)
358 int cnst_diff0
, cnst_diff1
;
359 RTX_CODE code0
= GET_CODE (operands
[0]);
360 RTX_CODE code1
= GET_CODE (operands
[1]);
362 /* Don't dare to combine (possibly overlapping) memory -> memory moves. */
363 /* It would be possible to detect the cases where we dare, by using
364 constant_diff (operands[0], operands[1])!!! */
365 if (code0
== MEM
&& code1
== MEM
)
368 cnst_diff0
= consecutive_operands (operands
[0], operands
[2]);
372 cnst_diff1
= consecutive_operands (operands
[1], operands
[3]);
376 if (cnst_diff0
& cnst_diff1
)
378 /* The source and destination operands are consecutive. */
380 /* If the first move writes into the source of the second move,
381 we cannot combine. */
383 && reg_overlap_mentioned_p (operands
[0], operands
[3]))
385 && subreg_overlap_mentioned_p (operands
[0], operands
[3])))
389 /* operands[0],[1] has higher addresses than operands[2],[3]. */
392 /* operands[0],[1] has lower addresses than operands[2],[3]. */
399 /* Like reg_overlap_mentioned_p, but accepts a subreg rtx instead
403 subreg_overlap_mentioned_p (subreg
, x
)
406 rtx reg
= SUBREG_REG (subreg
);
407 int regno
= REGNO (reg
) + SUBREG_WORD (subreg
);
408 int endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (subreg
));
409 return refers_to_regno_p (regno
, endregno
, x
, 0);
412 /* Return 1 if OP0 is a consecutive operand to OP1, 2 if OP1 is a
413 consecutive operand to OP0.
415 This function is used to determine if addresses are consecutive,
416 and therefore possible to combine to fewer instructions. */
419 consecutive_operands (op0
, op1
)
422 RTX_CODE code0
, code1
;
424 int regno_off0
, regno_off1
;
426 code0
= GET_CODE (op0
);
427 code1
= GET_CODE (op1
);
432 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))) <= UNITS_PER_WORD
)
434 regno_off0
= SUBREG_WORD (op0
);
435 op0
= SUBREG_REG (op0
);
442 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1
))) <= UNITS_PER_WORD
)
444 regno_off1
= SUBREG_WORD (op1
);
445 op1
= SUBREG_REG (op1
);
455 /* Cannot permit any symbolic constants, even if the consecutive
456 operand is 0, since a movl really performs sign extension. */
457 if (code1
!= CONST_INT
)
459 if ((INTVAL (op0
) == 0 && INTVAL (op1
) == 0)
460 || (INTVAL (op0
) == -1 && INTVAL (op1
) == -1))
462 if ((INTVAL (op0
) == 0 && INTVAL (op1
) > 0)
463 || (INTVAL (op0
) == -1 && INTVAL (op1
) < 0))
465 if ((INTVAL (op1
) == 0 && INTVAL (op0
) > 0)
466 || (INTVAL (op1
) == -1 && INTVAL (op0
) < 0))
471 regno_off0
= REGNO (op0
) + regno_off0
;
472 regno_off1
= REGNO (op1
) + regno_off1
;
474 cnst_diff
= regno_off0
- regno_off1
;
477 /* movl with the highest numbered parameter (local) register as
478 source or destination, doesn't wrap to the lowest numbered local
479 (temporary) register. */
481 if (regno_off0
% 16 != 0)
486 else if (cnst_diff
== -1)
488 if (regno_off1
% 16 != 0)
498 if (GET_CODE (op0
) == CONST
)
500 if (GET_CODE (op1
) == CONST
)
503 cnst_diff
= constant_diff (op0
, op1
);
508 else if (cnst_diff
== -4)
516 /* Return the constant difference of the rtx expressions OP0 and OP1,
517 or 0 if they don't have a constant difference.
519 This function is used to determine if addresses are consecutive,
520 and therefore possible to combine to fewer instructions. */
523 constant_diff (op0
, op1
)
526 RTX_CODE code0
, code1
;
529 code0
= GET_CODE (op0
);
530 code1
= GET_CODE (op1
);
536 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
537 && rtx_equal_p (op1
, XEXP (op0
, 0)))
538 return INTVAL (XEXP (op0
, 1));
540 else if (code1
== PLUS
)
542 if (GET_CODE (XEXP (op1
, 1)) == CONST_INT
543 && rtx_equal_p (op0
, XEXP (op1
, 0)))
544 return -INTVAL (XEXP (op1
, 1));
549 if (code0
== CONST_INT
)
550 return INTVAL (op0
) - INTVAL (op1
);
554 cnst_diff
= constant_diff (XEXP (op0
, 0), XEXP (op1
, 0));
556 return (rtx_equal_p (XEXP (op0
, 1), XEXP (op1
, 1)))
558 cnst_diff
= constant_diff (XEXP (op0
, 1), XEXP (op1
, 1));
560 return (rtx_equal_p (XEXP (op0
, 0), XEXP (op1
, 0)))
568 already_sign_extended (insn
, from_mode
, op
)
570 enum machine_mode from_mode
;
573 rtx xinsn
, xdest
, xsrc
;
577 insn
= PREV_INSN (insn
);
580 if (GET_CODE (insn
) == NOTE
|| GET_CODE (insn
) == JUMP_INSN
)
582 if (GET_CODE (insn
) == CALL_INSN
&& ! call_used_regs
[REGNO (op
)])
584 if (GET_CODE (insn
) != INSN
)
586 xinsn
= PATTERN (insn
);
588 if (GET_CODE (xinsn
) != SET
)
591 xdest
= SET_DEST (xinsn
);
592 xsrc
= SET_SRC (xinsn
);
594 if (GET_CODE (xdest
) == SUBREG
)
597 if ( ! REG_P (xdest
))
600 if (REGNO (op
) == REGNO (xdest
)
601 && ((GET_CODE (xsrc
) == SIGN_EXTEND
602 && GET_MODE (XEXP (xsrc
, 0)) == from_mode
)
603 || (GET_CODE (xsrc
) == MEM
604 && GET_MODE (xsrc
) == from_mode
)))
607 /* The register is modified by another operation. */
608 if (reg_overlap_mentioned_p (xdest
, op
))
614 output_move_double (operands
)
617 if (GET_CODE (operands
[1]) == CONST_DOUBLE
)
619 if (GET_MODE_CLASS (GET_MODE (operands
[1])) == MODE_INT
)
621 /* In an integer, the low-order word is in CONST_DOUBLE_LOW. */
622 rtx const_op
= operands
[1];
623 if ((CONST_DOUBLE_HIGH (const_op
) == 0
624 && CONST_DOUBLE_LOW (const_op
) >= 0)
625 || (CONST_DOUBLE_HIGH (const_op
) == -1
626 && CONST_DOUBLE_LOW (const_op
) < 0))
628 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
629 CONST_DOUBLE_LOW (const_op
));
632 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
633 CONST_DOUBLE_HIGH (const_op
));
634 output_asm_insn ("movw %1,%0", operands
);
635 operands
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
636 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
637 CONST_DOUBLE_LOW (const_op
));
642 /* In a real, the low-address word is in CONST_DOUBLE_LOW. */
643 rtx const_op
= operands
[1];
644 if ((CONST_DOUBLE_LOW (const_op
) == 0
645 && CONST_DOUBLE_HIGH (const_op
) >= 0)
646 || (CONST_DOUBLE_LOW (const_op
) == -1
647 && CONST_DOUBLE_HIGH (const_op
) < 0))
649 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
650 CONST_DOUBLE_HIGH (const_op
));
653 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
654 CONST_DOUBLE_LOW (const_op
));
655 output_asm_insn ("movw %1,%0", operands
);
656 operands
[0] = gen_rtx (REG
, SImode
, REGNO (operands
[0]) + 1);
657 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
658 CONST_DOUBLE_HIGH (const_op
));
666 /* Output a shift insns, after having reduced integer arguments to
667 avoid as warnings. */
670 output_shift (pattern
, op2
, mod
)
675 if (GET_CODE (op2
) == CONST_INT
)
677 int cnt
= INTVAL (op2
) % mod
;
680 cc_status
= cc_prev_status
;
683 op2
= gen_rtx (CONST_INT
, VOIDmode
, cnt
);
688 /* Return non-zero if the code of this rtx pattern is a relop. */
693 enum machine_mode mode
;
695 switch (GET_CODE (op
))
713 notice_update_cc (EXP
, INSN
)
716 switch (GET_CODE (EXP
))
719 switch (GET_CODE (SET_DEST (EXP
)))
724 cc_status
.value1
= 0;
725 cc_status
.value2
= SET_SRC (EXP
);
732 switch (GET_CODE (SET_SRC (EXP
)))
737 if (GET_MODE (SET_SRC (EXP
)) == QImode
738 || GET_MODE (SET_SRC (EXP
)) == HImode
)
741 cc_status
.flags
= CC_NO_OVERFLOW
;
742 cc_status
.value1
= SET_DEST (EXP
);
743 cc_status
.value2
= SET_SRC (EXP
);
746 /* else: Fall through. */
754 && reg_overlap_mentioned_p (SET_DEST (EXP
),
756 cc_status
.value1
= 0;
758 && reg_overlap_mentioned_p (SET_DEST (EXP
),
760 cc_status
.value2
= 0;
765 cc_status
.mdep
= CC_VALID_FOR_UNSIGNED
;
766 cc_status
.flags
= CC_NO_OVERFLOW
;
767 cc_status
.value1
= SET_DEST (EXP
);
768 cc_status
.value2
= SET_SRC (EXP
);
772 cc_status
.flags
= CC_NO_OVERFLOW
;
773 cc_status
.value1
= SET_DEST (EXP
);
774 cc_status
.value2
= SET_SRC (EXP
);
780 switch (GET_CODE (SET_SRC (EXP
)))
783 if (GET_MODE (SET_SRC (EXP
)) == QImode
784 || GET_MODE (SET_SRC (EXP
)) == HImode
)
786 cc_status
.flags
= CC_NO_OVERFLOW
;
787 cc_status
.value1
= SET_DEST (EXP
);
788 cc_status
.value2
= SET_SRC (EXP
);
792 /* else: Fall through. */
799 /* Need to forget cc_status about memory positions each
800 time a memory store is made, even if the memory store
801 insns in question doesn't modify the condition codes. */
802 if (cc_status
.value1
&&
803 GET_CODE (cc_status
.value1
) == MEM
)
804 cc_status
.value1
= 0;
805 if (cc_status
.value2
&&
806 GET_CODE (cc_status
.value2
) == MEM
)
807 cc_status
.value2
= 0;
814 cc_status
.flags
= CC_NO_OVERFLOW
;
815 cc_status
.value1
= SET_DEST (EXP
);
816 cc_status
.value2
= SET_SRC (EXP
);
834 /* Do calls preserve the condition codes? (At least forget
835 cc_status expressions if they refer to registers
836 not preserved across calls. Also forget expressions
837 about memory contents.) */
839 && (refers_to_regno_p (PYR_TREG (0), PYR_TREG (15),
841 || GET_CODE (cc_status
.value1
) == MEM
))
842 cc_status
.value1
= 0;
844 && (refers_to_regno_p (PYR_TREG (0), PYR_TREG (15),
846 || GET_CODE (cc_status
.value2
) == MEM
))
847 cc_status
.value2
= 0;
856 forget_cc_if_dependent (op
)
859 cc_status
= cc_prev_status
;
860 if (cc_status
.value1
&& reg_overlap_mentioned_p (op
, cc_status
.value1
))
861 cc_status
.value1
= 0;
862 if (cc_status
.value2
&& reg_overlap_mentioned_p (op
, cc_status
.value2
))
863 cc_status
.value2
= 0;