1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
31 #include "hard-reg-set.h"
35 #ifndef STACK_PUSH_CODE
36 #ifdef STACK_GROWS_DOWNWARD
37 #define STACK_PUSH_CODE PRE_DEC
39 #define STACK_PUSH_CODE PRE_INC
43 /* Import from final.c: */
44 extern rtx
alter_subreg ();
46 static rtx
*find_single_use_1
PROTO((rtx
, rtx
*));
48 /* Nonzero means allow operands to be volatile.
49 This should be 0 if you are generating rtl, such as if you are calling
50 the functions in optabs.c and expmed.c (most of the time).
51 This should be 1 if all valid insns need to be recognized,
52 such as in regclass.c and final.c and reload.c.
54 init_recog and init_recog_no_volatile are responsible for setting this. */
58 /* On return from `constrain_operands', indicate which alternative
61 int which_alternative
;
63 /* Nonzero after end of reload pass.
64 Set to 1 or 0 by toplev.c.
65 Controls the significance of (SUBREG (MEM)). */
69 /* Initialize data used by the function `recog'.
70 This must be called once in the compilation of a function
71 before any insn recognition may be done in the function. */
74 init_recog_no_volatile ()
85 /* Try recognizing the instruction INSN,
86 and return the code number that results.
87 Remember the code so that repeated calls do not
88 need to spend the time for actual rerecognition.
90 This function is the normal interface to instruction recognition.
91 The automatically-generated function `recog' is normally called
92 through this one. (The only exception is in combine.c.) */
98 if (INSN_CODE (insn
) < 0)
99 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
100 return INSN_CODE (insn
);
103 /* Check that X is an insn-body for an `asm' with operands
104 and that the operands mentioned in it are legitimate. */
107 check_asm_operands (x
)
110 int noperands
= asm_noperands (x
);
119 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
120 decode_asm_operands (x
, operands
, NULL_PTR
, NULL_PTR
, NULL_PTR
);
122 for (i
= 0; i
< noperands
; i
++)
123 if (!general_operand (operands
[i
], VOIDmode
))
129 /* Static data for the next two routines.
131 The maximum number of changes supported is defined as the maximum
132 number of operands times 5. This allows for repeated substitutions
133 inside complex indexed address, or, alternatively, changes in up
136 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
138 static rtx change_objects
[MAX_CHANGE_LOCS
];
139 static int change_old_codes
[MAX_CHANGE_LOCS
];
140 static rtx
*change_locs
[MAX_CHANGE_LOCS
];
141 static rtx change_olds
[MAX_CHANGE_LOCS
];
143 static int num_changes
= 0;
145 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
146 at which NEW will be placed. If OBJECT is zero, no validation is done,
147 the change is simply made.
149 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
150 will be called with the address and mode as parameters. If OBJECT is
151 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
154 IN_GROUP is non-zero if this is part of a group of changes that must be
155 performed as a group. In that case, the changes will be stored. The
156 function `apply_change_group' will validate and apply the changes.
158 If IN_GROUP is zero, this is a single change. Try to recognize the insn
159 or validate the memory reference with the change applied. If the result
160 is not valid for the machine, suppress the change and return zero.
161 Otherwise, perform the change and return 1. */
164 validate_change (object
, loc
, new, in_group
)
172 if (old
== new || rtx_equal_p (old
, new))
175 if (num_changes
>= MAX_CHANGE_LOCS
176 || (in_group
== 0 && num_changes
!= 0))
181 /* Save the information describing this change. */
182 change_objects
[num_changes
] = object
;
183 change_locs
[num_changes
] = loc
;
184 change_olds
[num_changes
] = old
;
186 if (object
&& GET_CODE (object
) != MEM
)
188 /* Set INSN_CODE to force rerecognition of insn. Save old code in
190 change_old_codes
[num_changes
] = INSN_CODE (object
);
191 INSN_CODE (object
) = -1;
196 /* If we are making a group of changes, return 1. Otherwise, validate the
197 change group we made. */
202 return apply_change_group ();
205 /* Apply a group of changes previously issued with `validate_change'.
206 Return 1 if all changes are valid, zero otherwise. */
209 apply_change_group ()
213 /* The changes have been applied and all INSN_CODEs have been reset to force
216 The changes are valid if we aren't given an object, or if we are
217 given a MEM and it still is a valid address, or if this is in insn
218 and it is recognized. In the latter case, if reload has completed,
219 we also require that the operands meet the constraints for
220 the insn. We do not allow modifying an ASM_OPERANDS after reload
221 has completed because verifying the constraints is too difficult. */
223 for (i
= 0; i
< num_changes
; i
++)
225 rtx object
= change_objects
[i
];
230 if (GET_CODE (object
) == MEM
)
232 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
235 else if ((recog_memoized (object
) < 0
236 && (asm_noperands (PATTERN (object
)) < 0
237 || ! check_asm_operands (PATTERN (object
))
238 || reload_completed
))
240 && (insn_extract (object
),
241 ! constrain_operands (INSN_CODE (object
), 1))))
243 rtx pat
= PATTERN (object
);
245 /* Perhaps we couldn't recognize the insn because there were
246 extra CLOBBERs at the end. If so, try to re-recognize
247 without the last CLOBBER (later iterations will cause each of
248 them to be eliminated, in turn). But don't do this if we
249 have an ASM_OPERAND. */
250 if (GET_CODE (pat
) == PARALLEL
251 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
252 && asm_noperands (PATTERN (object
)) < 0)
256 if (XVECLEN (pat
, 0) == 2)
257 newpat
= XVECEXP (pat
, 0, 0);
263 = gen_rtx_PARALLEL (VOIDmode
,
264 gen_rtvec (XVECLEN (pat
, 0) - 1));
265 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
266 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
269 /* Add a new change to this group to replace the pattern
270 with this new pattern. Then consider this change
271 as having succeeded. The change we added will
272 cause the entire call to fail if things remain invalid.
274 Note that this can lose if a later change than the one
275 we are processing specified &XVECEXP (PATTERN (object), 0, X)
276 but this shouldn't occur. */
278 validate_change (object
, &PATTERN (object
), newpat
, 1);
280 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
281 /* If this insn is a CLOBBER or USE, it is always valid, but is
289 if (i
== num_changes
)
301 /* Return the number of changes so far in the current group. */
304 num_validated_changes ()
309 /* Retract the changes numbered NUM and up. */
317 /* Back out all the changes. Do this in the opposite order in which
319 for (i
= num_changes
- 1; i
>= num
; i
--)
321 *change_locs
[i
] = change_olds
[i
];
322 if (change_objects
[i
] && GET_CODE (change_objects
[i
]) != MEM
)
323 INSN_CODE (change_objects
[i
]) = change_old_codes
[i
];
328 /* Replace every occurrence of FROM in X with TO. Mark each change with
329 validate_change passing OBJECT. */
332 validate_replace_rtx_1 (loc
, from
, to
, object
)
334 rtx from
, to
, object
;
338 register rtx x
= *loc
;
339 enum rtx_code code
= GET_CODE (x
);
341 /* X matches FROM if it is the same rtx or they are both referring to the
342 same register in the same mode. Avoid calling rtx_equal_p unless the
343 operands look similar. */
346 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
347 && GET_MODE (x
) == GET_MODE (from
)
348 && REGNO (x
) == REGNO (from
))
349 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
350 && rtx_equal_p (x
, from
)))
352 validate_change (object
, loc
, to
, 1);
356 /* For commutative or comparison operations, try replacing each argument
357 separately and seeing if we made any changes. If so, put a constant
359 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
361 int prev_changes
= num_changes
;
363 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
364 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
365 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
367 validate_change (object
, loc
,
368 gen_rtx (GET_RTX_CLASS (code
) == 'c' ? code
369 : swap_condition (code
),
370 GET_MODE (x
), XEXP (x
, 1), XEXP (x
, 0)),
377 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
378 done the substitution, otherwise we won't. */
383 /* If we have have a PLUS whose second operand is now a CONST_INT, use
384 plus_constant to try to simplify it. */
385 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
386 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
391 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
393 validate_change (object
, loc
,
394 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
402 /* In these cases, the operation to be performed depends on the mode
403 of the operand. If we are replacing the operand with a VOIDmode
404 constant, we lose the information. So try to simplify the operation
405 in that case. If it fails, substitute in something that we know
406 won't be recognized. */
407 if (GET_MODE (to
) == VOIDmode
408 && (XEXP (x
, 0) == from
409 || (GET_CODE (XEXP (x
, 0)) == REG
&& GET_CODE (from
) == REG
410 && GET_MODE (XEXP (x
, 0)) == GET_MODE (from
)
411 && REGNO (XEXP (x
, 0)) == REGNO (from
))))
413 rtx
new = simplify_unary_operation (code
, GET_MODE (x
), to
,
416 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
418 validate_change (object
, loc
, new, 1);
424 /* If we have a SUBREG of a register that we are replacing and we are
425 replacing it with a MEM, make a new MEM and try replacing the
426 SUBREG with it. Don't do this if the MEM has a mode-dependent address
427 or if we would be widening it. */
429 if (SUBREG_REG (x
) == from
430 && GET_CODE (from
) == REG
431 && GET_CODE (to
) == MEM
432 && ! mode_dependent_address_p (XEXP (to
, 0))
433 && ! MEM_VOLATILE_P (to
)
434 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
436 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
437 enum machine_mode mode
= GET_MODE (x
);
440 if (BYTES_BIG_ENDIAN
)
441 offset
+= (MIN (UNITS_PER_WORD
,
442 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
443 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
445 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
446 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to
);
447 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to
);
448 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to
);
449 validate_change (object
, loc
, new, 1);
456 /* If we are replacing a register with memory, try to change the memory
457 to be the mode required for memory in extract operations (this isn't
458 likely to be an insertion operation; if it was, nothing bad will
459 happen, we might just fail in some cases). */
461 if (XEXP (x
, 0) == from
&& GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
462 && GET_CODE (XEXP (x
, 1)) == CONST_INT
463 && GET_CODE (XEXP (x
, 2)) == CONST_INT
464 && ! mode_dependent_address_p (XEXP (to
, 0))
465 && ! MEM_VOLATILE_P (to
))
467 enum machine_mode wanted_mode
= VOIDmode
;
468 enum machine_mode is_mode
= GET_MODE (to
);
469 int width
= INTVAL (XEXP (x
, 1));
470 int pos
= INTVAL (XEXP (x
, 2));
473 if (code
== ZERO_EXTRACT
)
474 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
477 if (code
== SIGN_EXTRACT
)
478 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
481 /* If we have a narrower mode, we can do something. */
482 if (wanted_mode
!= VOIDmode
483 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
485 int offset
= pos
/ BITS_PER_UNIT
;
488 /* If the bytes and bits are counted differently, we
489 must adjust the offset. */
490 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
491 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
494 pos
%= GET_MODE_BITSIZE (wanted_mode
);
496 newmem
= gen_rtx_MEM (wanted_mode
,
497 plus_constant (XEXP (to
, 0), offset
));
498 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (to
);
499 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (to
);
500 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (to
);
502 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
503 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
513 fmt
= GET_RTX_FORMAT (code
);
514 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
517 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
518 else if (fmt
[i
] == 'E')
519 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
520 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
524 /* Try replacing every occurrence of FROM in INSN with TO. After all
525 changes have been made, validate by seeing if INSN is still valid. */
528 validate_replace_rtx (from
, to
, insn
)
531 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
532 return apply_change_group ();
536 /* Return 1 if the insn using CC0 set by INSN does not contain
537 any ordered tests applied to the condition codes.
538 EQ and NE tests do not count. */
541 next_insn_tests_no_inequality (insn
)
544 register rtx next
= next_cc0_user (insn
);
546 /* If there is no next insn, we have to take the conservative choice. */
550 return ((GET_CODE (next
) == JUMP_INSN
551 || GET_CODE (next
) == INSN
552 || GET_CODE (next
) == CALL_INSN
)
553 && ! inequality_comparisons_p (PATTERN (next
)));
556 #if 0 /* This is useless since the insn that sets the cc's
557 must be followed immediately by the use of them. */
558 /* Return 1 if the CC value set up by INSN is not used. */
561 next_insns_test_no_inequality (insn
)
564 register rtx next
= NEXT_INSN (insn
);
566 for (; next
!= 0; next
= NEXT_INSN (next
))
568 if (GET_CODE (next
) == CODE_LABEL
569 || GET_CODE (next
) == BARRIER
)
571 if (GET_CODE (next
) == NOTE
)
573 if (inequality_comparisons_p (PATTERN (next
)))
575 if (sets_cc0_p (PATTERN (next
)) == 1)
577 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
585 /* This is used by find_single_use to locate an rtx that contains exactly one
586 use of DEST, which is typically either a REG or CC0. It returns a
587 pointer to the innermost rtx expression containing DEST. Appearances of
588 DEST that are being used to totally replace it are not counted. */
591 find_single_use_1 (dest
, loc
)
596 enum rtx_code code
= GET_CODE (x
);
613 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
614 of a REG that occupies all of the REG, the insn uses DEST if
615 it is mentioned in the destination or the source. Otherwise, we
616 need just check the source. */
617 if (GET_CODE (SET_DEST (x
)) != CC0
618 && GET_CODE (SET_DEST (x
)) != PC
619 && GET_CODE (SET_DEST (x
)) != REG
620 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
621 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
622 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
623 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
624 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
625 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
628 return find_single_use_1 (dest
, &SET_SRC (x
));
632 return find_single_use_1 (dest
, &XEXP (x
, 0));
638 /* If it wasn't one of the common cases above, check each expression and
639 vector of this code. Look for a unique usage of DEST. */
641 fmt
= GET_RTX_FORMAT (code
);
642 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
646 if (dest
== XEXP (x
, i
)
647 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
648 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
651 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
654 result
= this_result
;
655 else if (this_result
)
656 /* Duplicate usage. */
659 else if (fmt
[i
] == 'E')
663 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
665 if (XVECEXP (x
, i
, j
) == dest
666 || (GET_CODE (dest
) == REG
667 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
668 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
671 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
674 result
= this_result
;
675 else if (this_result
)
684 /* See if DEST, produced in INSN, is used only a single time in the
685 sequel. If so, return a pointer to the innermost rtx expression in which
688 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
690 This routine will return usually zero either before flow is called (because
691 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
692 note can't be trusted).
694 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
695 care about REG_DEAD notes or LOG_LINKS.
697 Otherwise, we find the single use by finding an insn that has a
698 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
699 only referenced once in that insn, we know that it must be the first
700 and last insn referencing DEST. */
703 find_single_use (dest
, insn
, ploc
)
715 next
= NEXT_INSN (insn
);
717 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
720 result
= find_single_use_1 (dest
, &PATTERN (next
));
727 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
730 for (next
= next_nonnote_insn (insn
);
731 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
732 next
= next_nonnote_insn (next
))
733 if (GET_RTX_CLASS (GET_CODE (next
)) == 'i' && dead_or_set_p (next
, dest
))
735 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
736 if (XEXP (link
, 0) == insn
)
741 result
= find_single_use_1 (dest
, &PATTERN (next
));
751 /* Return 1 if OP is a valid general operand for machine mode MODE.
752 This is either a register reference, a memory reference,
753 or a constant. In the case of a memory reference, the address
754 is checked for general validity for the target machine.
756 Register and memory references must have mode MODE in order to be valid,
757 but some constants have no machine mode and are valid for any mode.
759 If MODE is VOIDmode, OP is checked for validity for whatever mode
762 The main use of this function is as a predicate in match_operand
763 expressions in the machine description.
765 For an explanation of this function's behavior for registers of
766 class NO_REGS, see the comment for `register_operand'. */
769 general_operand (op
, mode
)
771 enum machine_mode mode
;
773 register enum rtx_code code
= GET_CODE (op
);
774 int mode_altering_drug
= 0;
776 if (mode
== VOIDmode
)
777 mode
= GET_MODE (op
);
779 /* Don't accept CONST_INT or anything similar
780 if the caller wants something floating. */
781 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
782 && GET_MODE_CLASS (mode
) != MODE_INT
783 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
787 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
788 #ifdef LEGITIMATE_PIC_OPERAND_P
789 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
791 && LEGITIMATE_CONSTANT_P (op
));
793 /* Except for certain constants with VOIDmode, already checked for,
794 OP's mode must match MODE if MODE specifies a mode. */
796 if (GET_MODE (op
) != mode
)
801 #ifdef INSN_SCHEDULING
802 /* On machines that have insn scheduling, we want all memory
803 reference to be explicit, so outlaw paradoxical SUBREGs. */
804 if (GET_CODE (SUBREG_REG (op
)) == MEM
805 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
809 op
= SUBREG_REG (op
);
810 code
= GET_CODE (op
);
812 /* No longer needed, since (SUBREG (MEM...))
813 will load the MEM into a reload reg in the MEM's own mode. */
814 mode_altering_drug
= 1;
819 /* A register whose class is NO_REGS is not a general operand. */
820 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
821 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
825 register rtx y
= XEXP (op
, 0);
826 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
828 if (GET_CODE (y
) == ADDRESSOF
)
830 /* Use the mem's mode, since it will be reloaded thus. */
831 mode
= GET_MODE (op
);
832 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
835 /* Pretend this is an operand for now; we'll run force_operand
836 on its replacement in fixup_var_refs_1. */
837 if (code
== ADDRESSOF
)
843 if (mode_altering_drug
)
844 return ! mode_dependent_address_p (XEXP (op
, 0));
848 /* Return 1 if OP is a valid memory address for a memory reference
851 The main use of this function is as a predicate in match_operand
852 expressions in the machine description. */
855 address_operand (op
, mode
)
857 enum machine_mode mode
;
859 return memory_address_p (mode
, op
);
862 /* Return 1 if OP is a register reference of mode MODE.
863 If MODE is VOIDmode, accept a register in any mode.
865 The main use of this function is as a predicate in match_operand
866 expressions in the machine description.
868 As a special exception, registers whose class is NO_REGS are
869 not accepted by `register_operand'. The reason for this change
870 is to allow the representation of special architecture artifacts
871 (such as a condition code register) without extending the rtl
872 definitions. Since registers of class NO_REGS cannot be used
873 as registers in any case where register classes are examined,
874 it is most consistent to keep this function from accepting them. */
877 register_operand (op
, mode
)
879 enum machine_mode mode
;
881 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
884 if (GET_CODE (op
) == SUBREG
)
886 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
887 because it is guaranteed to be reloaded into one.
888 Just make sure the MEM is valid in itself.
889 (Ideally, (SUBREG (MEM)...) should not exist after reload,
890 but currently it does result from (SUBREG (REG)...) where the
891 reg went on the stack.) */
892 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
893 return general_operand (op
, mode
);
895 #ifdef CLASS_CANNOT_CHANGE_SIZE
896 if (GET_CODE (SUBREG_REG (op
)) == REG
897 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
898 && TEST_HARD_REG_BIT (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
899 REGNO (SUBREG_REG (op
)))
900 && (GET_MODE_SIZE (mode
)
901 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
902 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
903 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
907 op
= SUBREG_REG (op
);
910 /* We don't consider registers whose class is NO_REGS
911 to be a register operand. */
912 return (GET_CODE (op
) == REG
913 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
914 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
917 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
918 or a hard register. */
921 scratch_operand (op
, mode
)
923 enum machine_mode mode
;
925 return (GET_MODE (op
) == mode
926 && (GET_CODE (op
) == SCRATCH
927 || (GET_CODE (op
) == REG
928 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)));
931 /* Return 1 if OP is a valid immediate operand for mode MODE.
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description. */
937 immediate_operand (op
, mode
)
939 enum machine_mode mode
;
941 /* Don't accept CONST_INT or anything similar
942 if the caller wants something floating. */
943 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
944 && GET_MODE_CLASS (mode
) != MODE_INT
945 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
948 return (CONSTANT_P (op
)
949 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
950 || GET_MODE (op
) == VOIDmode
)
951 #ifdef LEGITIMATE_PIC_OPERAND_P
952 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
954 && LEGITIMATE_CONSTANT_P (op
));
957 /* Returns 1 if OP is an operand that is a CONST_INT. */
960 const_int_operand (op
, mode
)
962 enum machine_mode mode
;
964 return GET_CODE (op
) == CONST_INT
;
967 /* Returns 1 if OP is an operand that is a constant integer or constant
968 floating-point number. */
971 const_double_operand (op
, mode
)
973 enum machine_mode mode
;
975 /* Don't accept CONST_INT or anything similar
976 if the caller wants something floating. */
977 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
978 && GET_MODE_CLASS (mode
) != MODE_INT
979 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
982 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
983 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
984 || GET_MODE (op
) == VOIDmode
));
987 /* Return 1 if OP is a general operand that is not an immediate operand. */
990 nonimmediate_operand (op
, mode
)
992 enum machine_mode mode
;
994 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
997 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1000 nonmemory_operand (op
, mode
)
1002 enum machine_mode mode
;
1004 if (CONSTANT_P (op
))
1006 /* Don't accept CONST_INT or anything similar
1007 if the caller wants something floating. */
1008 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1009 && GET_MODE_CLASS (mode
) != MODE_INT
1010 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1013 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
1014 #ifdef LEGITIMATE_PIC_OPERAND_P
1015 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1017 && LEGITIMATE_CONSTANT_P (op
));
1020 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1023 if (GET_CODE (op
) == SUBREG
)
1025 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1026 because it is guaranteed to be reloaded into one.
1027 Just make sure the MEM is valid in itself.
1028 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1029 but currently it does result from (SUBREG (REG)...) where the
1030 reg went on the stack.) */
1031 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1032 return general_operand (op
, mode
);
1033 op
= SUBREG_REG (op
);
1036 /* We don't consider registers whose class is NO_REGS
1037 to be a register operand. */
1038 return (GET_CODE (op
) == REG
1039 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1040 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1043 /* Return 1 if OP is a valid operand that stands for pushing a
1044 value of mode MODE onto the stack.
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1050 push_operand (op
, mode
)
1052 enum machine_mode mode
;
1054 if (GET_CODE (op
) != MEM
)
1057 if (GET_MODE (op
) != mode
)
1062 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1065 return XEXP (op
, 0) == stack_pointer_rtx
;
1068 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1071 memory_address_p (mode
, addr
)
1072 enum machine_mode mode
;
1075 if (GET_CODE (addr
) == ADDRESSOF
)
1078 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1085 /* Return 1 if OP is a valid memory reference with mode MODE,
1086 including a valid address.
1088 The main use of this function is as a predicate in match_operand
1089 expressions in the machine description. */
1092 memory_operand (op
, mode
)
1094 enum machine_mode mode
;
1098 if (! reload_completed
)
1099 /* Note that no SUBREG is a memory operand before end of reload pass,
1100 because (SUBREG (MEM...)) forces reloading into a register. */
1101 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1103 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1107 if (GET_CODE (inner
) == SUBREG
)
1108 inner
= SUBREG_REG (inner
);
1110 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1113 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1114 that is, a memory reference whose address is a general_operand. */
1117 indirect_operand (op
, mode
)
1119 enum machine_mode mode
;
1121 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1122 if (! reload_completed
1123 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1125 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1126 rtx inner
= SUBREG_REG (op
);
1128 if (BYTES_BIG_ENDIAN
)
1129 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1130 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1132 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1135 /* The only way that we can have a general_operand as the resulting
1136 address is if OFFSET is zero and the address already is an operand
1137 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1140 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1141 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1142 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1143 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1144 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1147 return (GET_CODE (op
) == MEM
1148 && memory_operand (op
, mode
)
1149 && general_operand (XEXP (op
, 0), Pmode
));
1152 /* Return 1 if this is a comparison operator. This allows the use of
1153 MATCH_OPERATOR to recognize all the branch insns. */
1156 comparison_operator (op
, mode
)
1158 enum machine_mode mode
;
1160 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1161 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1164 /* If BODY is an insn body that uses ASM_OPERANDS,
1165 return the number of operands (both input and output) in the insn.
1166 Otherwise return -1. */
1169 asm_noperands (body
)
1172 if (GET_CODE (body
) == ASM_OPERANDS
)
1173 /* No output operands: return number of input operands. */
1174 return ASM_OPERANDS_INPUT_LENGTH (body
);
1175 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1176 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1177 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1178 else if (GET_CODE (body
) == PARALLEL
1179 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1180 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1182 /* Multiple output operands, or 1 output plus some clobbers:
1183 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1187 /* Count backwards through CLOBBERs to determine number of SETs. */
1188 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1190 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1192 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1196 /* N_SETS is now number of output operands. */
1199 /* Verify that all the SETs we have
1200 came from a single original asm_operands insn
1201 (so that invalid combinations are blocked). */
1202 for (i
= 0; i
< n_sets
; i
++)
1204 rtx elt
= XVECEXP (body
, 0, i
);
1205 if (GET_CODE (elt
) != SET
)
1207 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1209 /* If these ASM_OPERANDS rtx's came from different original insns
1210 then they aren't allowed together. */
1211 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1212 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1215 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1218 else if (GET_CODE (body
) == PARALLEL
1219 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1221 /* 0 outputs, but some clobbers:
1222 body is [(asm_operands ...) (clobber (reg ...))...]. */
1225 /* Make sure all the other parallel things really are clobbers. */
1226 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1227 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1230 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1236 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1237 copy its operands (both input and output) into the vector OPERANDS,
1238 the locations of the operands within the insn into the vector OPERAND_LOCS,
1239 and the constraints for the operands into CONSTRAINTS.
1240 Write the modes of the operands into MODES.
1241 Return the assembler-template.
1243 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1244 we don't store that info. */
1247 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1252 enum machine_mode
*modes
;
1258 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1260 rtx asmop
= SET_SRC (body
);
1261 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1263 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1265 for (i
= 1; i
< noperands
; i
++)
1268 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1270 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1272 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1274 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1277 /* The output is in the SET.
1278 Its constraint is in the ASM_OPERANDS itself. */
1280 operands
[0] = SET_DEST (body
);
1282 operand_locs
[0] = &SET_DEST (body
);
1284 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1286 modes
[0] = GET_MODE (SET_DEST (body
));
1287 template = ASM_OPERANDS_TEMPLATE (asmop
);
1289 else if (GET_CODE (body
) == ASM_OPERANDS
)
1292 /* No output operands: BODY is (asm_operands ....). */
1294 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1296 /* The input operands are found in the 1st element vector. */
1297 /* Constraints for inputs are in the 2nd element vector. */
1298 for (i
= 0; i
< noperands
; i
++)
1301 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1303 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1305 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1307 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1309 template = ASM_OPERANDS_TEMPLATE (asmop
);
1311 else if (GET_CODE (body
) == PARALLEL
1312 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1314 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1315 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1316 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1317 int nout
= 0; /* Does not include CLOBBERs. */
1319 /* At least one output, plus some CLOBBERs. */
1321 /* The outputs are in the SETs.
1322 Their constraints are in the ASM_OPERANDS itself. */
1323 for (i
= 0; i
< nparallel
; i
++)
1325 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1326 break; /* Past last SET */
1329 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1331 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1333 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1335 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1339 for (i
= 0; i
< nin
; i
++)
1342 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1344 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1346 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1348 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1351 template = ASM_OPERANDS_TEMPLATE (asmop
);
1353 else if (GET_CODE (body
) == PARALLEL
1354 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1356 /* No outputs, but some CLOBBERs. */
1358 rtx asmop
= XVECEXP (body
, 0, 0);
1359 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1361 for (i
= 0; i
< nin
; i
++)
1364 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1366 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1368 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1370 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1373 template = ASM_OPERANDS_TEMPLATE (asmop
);
1379 /* Given an rtx *P, if it is a sum containing an integer constant term,
1380 return the location (type rtx *) of the pointer to that constant term.
1381 Otherwise, return a null pointer. */
1384 find_constant_term_loc (p
)
1388 register enum rtx_code code
= GET_CODE (*p
);
1390 /* If *P IS such a constant term, P is its location. */
1392 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1396 /* Otherwise, if not a sum, it has no constant term. */
1398 if (GET_CODE (*p
) != PLUS
)
1401 /* If one of the summands is constant, return its location. */
1403 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1404 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1407 /* Otherwise, check each summand for containing a constant term. */
1409 if (XEXP (*p
, 0) != 0)
1411 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1416 if (XEXP (*p
, 1) != 0)
1418 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1426 /* Return 1 if OP is a memory reference
1427 whose address contains no side effects
1428 and remains valid after the addition
1429 of a positive integer less than the
1430 size of the object being referenced.
1432 We assume that the original address is valid and do not check it.
1434 This uses strict_memory_address_p as a subroutine, so
1435 don't use it before reload. */
1438 offsettable_memref_p (op
)
1441 return ((GET_CODE (op
) == MEM
)
1442 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1445 /* Similar, but don't require a strictly valid mem ref:
1446 consider pseudo-regs valid as index or base regs. */
1449 offsettable_nonstrict_memref_p (op
)
1452 return ((GET_CODE (op
) == MEM
)
1453 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1456 /* Return 1 if Y is a memory address which contains no side effects
1457 and would remain valid after the addition of a positive integer
1458 less than the size of that mode.
1460 We assume that the original address is valid and do not check it.
1461 We do check that it is valid for narrower modes.
1463 If STRICTP is nonzero, we require a strictly valid address,
1464 for the sake of use in reload.c. */
1467 offsettable_address_p (strictp
, mode
, y
)
1469 enum machine_mode mode
;
1472 register enum rtx_code ycode
= GET_CODE (y
);
1476 int (*addressp
) () = (strictp
? strict_memory_address_p
: memory_address_p
);
1478 if (CONSTANT_ADDRESS_P (y
))
1481 /* Adjusting an offsettable address involves changing to a narrower mode.
1482 Make sure that's OK. */
1484 if (mode_dependent_address_p (y
))
1487 /* If the expression contains a constant term,
1488 see if it remains valid when max possible offset is added. */
1490 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1495 *y2
= plus_constant (*y2
, GET_MODE_SIZE (mode
) - 1);
1496 /* Use QImode because an odd displacement may be automatically invalid
1497 for any wider mode. But it should be valid for a single byte. */
1498 good
= (*addressp
) (QImode
, y
);
1500 /* In any case, restore old contents of memory. */
1505 if (ycode
== PRE_DEC
|| ycode
== PRE_INC
1506 || ycode
== POST_DEC
|| ycode
== POST_INC
)
1509 /* The offset added here is chosen as the maximum offset that
1510 any instruction could need to add when operating on something
1511 of the specified mode. We assume that if Y and Y+c are
1512 valid addresses then so is Y+d for all 0<d<c. */
1514 z
= plus_constant_for_output (y
, GET_MODE_SIZE (mode
) - 1);
1516 /* Use QImode because an odd displacement may be automatically invalid
1517 for any wider mode. But it should be valid for a single byte. */
1518 return (*addressp
) (QImode
, z
);
1521 /* Return 1 if ADDR is an address-expression whose effect depends
1522 on the mode of the memory reference it is used in.
1524 Autoincrement addressing is a typical example of mode-dependence
1525 because the amount of the increment depends on the mode. */
1528 mode_dependent_address_p (addr
)
1531 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1537 /* Return 1 if OP is a general operand
1538 other than a memory ref with a mode dependent address. */
1541 mode_independent_operand (op
, mode
)
1542 enum machine_mode mode
;
1547 if (! general_operand (op
, mode
))
1550 if (GET_CODE (op
) != MEM
)
1553 addr
= XEXP (op
, 0);
1554 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
1560 /* Given an operand OP that is a valid memory reference
1561 which satisfies offsettable_memref_p,
1562 return a new memory reference whose address has been adjusted by OFFSET.
1563 OFFSET should be positive and less than the size of the object referenced.
1567 adj_offsettable_operand (op
, offset
)
1571 register enum rtx_code code
= GET_CODE (op
);
1575 register rtx y
= XEXP (op
, 0);
1578 if (CONSTANT_ADDRESS_P (y
))
1580 new = gen_rtx_MEM (GET_MODE (op
),
1581 plus_constant_for_output (y
, offset
));
1582 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1586 if (GET_CODE (y
) == PLUS
)
1589 register rtx
*const_loc
;
1593 const_loc
= find_constant_term_loc (&z
);
1596 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
1601 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
1602 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1608 #ifdef REGISTER_CONSTRAINTS
1610 /* Check the operands of an insn (found in recog_operands)
1611 against the insn's operand constraints (found via INSN_CODE_NUM)
1612 and return 1 if they are valid.
1614 WHICH_ALTERNATIVE is set to a number which indicates which
1615 alternative of constraints was matched: 0 for the first alternative,
1616 1 for the next, etc.
1618 In addition, when two operands are match
1619 and it happens that the output operand is (reg) while the
1620 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1621 make the output operand look like the input.
1622 This is because the output operand is the one the template will print.
1624 This is used in final, just before printing the assembler code and by
1625 the routines that determine an insn's attribute.
1627 If STRICT is a positive non-zero value, it means that we have been
1628 called after reload has been completed. In that case, we must
1629 do all checks strictly. If it is zero, it means that we have been called
1630 before reload has completed. In that case, we first try to see if we can
1631 find an alternative that matches strictly. If not, we try again, this
1632 time assuming that reload will fix up the insn. This provides a "best
1633 guess" for the alternative and is used to compute attributes of insns prior
1634 to reload. A negative value of STRICT is used for this internal call. */
1642 constrain_operands (insn_code_num
, strict
)
1646 char *constraints
[MAX_RECOG_OPERANDS
];
1647 int matching_operands
[MAX_RECOG_OPERANDS
];
1648 enum op_type
{OP_IN
, OP_OUT
, OP_INOUT
} op_types
[MAX_RECOG_OPERANDS
];
1649 int earlyclobber
[MAX_RECOG_OPERANDS
];
1651 int noperands
= insn_n_operands
[insn_code_num
];
1653 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
1654 int funny_match_index
;
1655 int nalternatives
= insn_n_alternatives
[insn_code_num
];
1657 if (noperands
== 0 || nalternatives
== 0)
1660 for (c
= 0; c
< noperands
; c
++)
1662 constraints
[c
] = insn_operand_constraint
[insn_code_num
][c
];
1663 matching_operands
[c
] = -1;
1664 op_types
[c
] = OP_IN
;
1667 which_alternative
= 0;
1669 while (which_alternative
< nalternatives
)
1673 funny_match_index
= 0;
1675 for (opno
= 0; opno
< noperands
; opno
++)
1677 register rtx op
= recog_operand
[opno
];
1678 enum machine_mode mode
= GET_MODE (op
);
1679 register char *p
= constraints
[opno
];
1684 earlyclobber
[opno
] = 0;
1686 /* A unary operator may be accepted by the predicate, but it
1687 is irrelevant for matching constraints. */
1688 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
1691 if (GET_CODE (op
) == SUBREG
)
1693 if (GET_CODE (SUBREG_REG (op
)) == REG
1694 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
1695 offset
= SUBREG_WORD (op
);
1696 op
= SUBREG_REG (op
);
1699 /* An empty constraint or empty alternative
1700 allows anything which matched the pattern. */
1701 if (*p
== 0 || *p
== ',')
1704 while (*p
&& (c
= *p
++) != ',')
1707 case '?': case '!': case '*': case '%':
1711 /* Ignore rest of this alternative as far as
1712 constraint checking is concerned. */
1713 while (*p
&& *p
!= ',')
1718 op_types
[opno
] = OP_OUT
;
1722 op_types
[opno
] = OP_INOUT
;
1726 earlyclobber
[opno
] = 1;
1729 case '0': case '1': case '2': case '3': case '4':
1730 case '5': case '6': case '7': case '8': case '9':
1732 /* This operand must be the same as a previous one.
1733 This kind of constraint is used for instructions such
1734 as add when they take only two operands.
1736 Note that the lower-numbered operand is passed first.
1738 If we are not testing strictly, assume that this constraint
1739 will be satisfied. */
1743 val
= operands_match_p (recog_operand
[c
- '0'],
1744 recog_operand
[opno
]);
1746 matching_operands
[opno
] = c
- '0';
1747 matching_operands
[c
- '0'] = opno
;
1751 /* If output is *x and input is *--x,
1752 arrange later to change the output to *--x as well,
1753 since the output op is the one that will be printed. */
1754 if (val
== 2 && strict
> 0)
1756 funny_match
[funny_match_index
].this = opno
;
1757 funny_match
[funny_match_index
++].other
= c
- '0';
1762 /* p is used for address_operands. When we are called by
1763 gen_reload, no one will have checked that the address is
1764 strictly valid, i.e., that all pseudos requiring hard regs
1765 have gotten them. */
1767 || (strict_memory_address_p
1768 (insn_operand_mode
[insn_code_num
][opno
], op
)))
1772 /* No need to check general_operand again;
1773 it was done in insn-recog.c. */
1775 /* Anything goes unless it is a REG and really has a hard reg
1776 but the hard reg is not in the class GENERAL_REGS. */
1778 || GENERAL_REGS
== ALL_REGS
1779 || GET_CODE (op
) != REG
1780 || (reload_in_progress
1781 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1782 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
1789 && GET_CODE (op
) == REG
1790 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1791 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
1792 || (GET_CODE (op
) == REG
1793 && ((GENERAL_REGS
== ALL_REGS
1794 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)
1795 || reg_fits_class_p (op
, GENERAL_REGS
,
1801 /* This is used for a MATCH_SCRATCH in the cases when
1802 we don't actually need anything. So anything goes
1808 if (GET_CODE (op
) == MEM
1809 /* Before reload, accept what reload can turn into mem. */
1810 || (strict
< 0 && CONSTANT_P (op
))
1811 /* During reload, accept a pseudo */
1812 || (reload_in_progress
&& GET_CODE (op
) == REG
1813 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
1818 if (GET_CODE (op
) == MEM
1819 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
1820 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1825 if (GET_CODE (op
) == MEM
1826 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
1827 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1832 #ifndef REAL_ARITHMETIC
1833 /* Match any CONST_DOUBLE, but only if
1834 we can examine the bits of it reliably. */
1835 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1836 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1837 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1840 if (GET_CODE (op
) == CONST_DOUBLE
)
1845 if (GET_CODE (op
) == CONST_DOUBLE
)
1851 if (GET_CODE (op
) == CONST_DOUBLE
1852 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
1857 if (GET_CODE (op
) == CONST_INT
1858 || (GET_CODE (op
) == CONST_DOUBLE
1859 && GET_MODE (op
) == VOIDmode
))
1862 if (CONSTANT_P (op
))
1867 if (GET_CODE (op
) == CONST_INT
1868 || (GET_CODE (op
) == CONST_DOUBLE
1869 && GET_MODE (op
) == VOIDmode
))
1881 if (GET_CODE (op
) == CONST_INT
1882 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
1886 #ifdef EXTRA_CONSTRAINT
1892 if (EXTRA_CONSTRAINT (op
, c
))
1898 if (GET_CODE (op
) == MEM
1899 && ((strict
> 0 && ! offsettable_memref_p (op
))
1901 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
1902 || (reload_in_progress
1903 && !(GET_CODE (op
) == REG
1904 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
1909 if ((strict
> 0 && offsettable_memref_p (op
))
1910 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
1911 /* Before reload, accept what reload can handle. */
1913 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
1914 /* During reload, accept a pseudo */
1915 || (reload_in_progress
&& GET_CODE (op
) == REG
1916 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
1923 && GET_CODE (op
) == REG
1924 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1925 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
1926 || (GET_CODE (op
) == REG
1927 && reg_fits_class_p (op
, REG_CLASS_FROM_LETTER (c
),
1932 constraints
[opno
] = p
;
1933 /* If this operand did not win somehow,
1934 this alternative loses. */
1938 /* This alternative won; the operands are ok.
1939 Change whichever operands this alternative says to change. */
1944 /* See if any earlyclobber operand conflicts with some other
1948 for (eopno
= 0; eopno
< noperands
; eopno
++)
1949 /* Ignore earlyclobber operands now in memory,
1950 because we would often report failure when we have
1951 two memory operands, one of which was formerly a REG. */
1952 if (earlyclobber
[eopno
]
1953 && GET_CODE (recog_operand
[eopno
]) == REG
)
1954 for (opno
= 0; opno
< noperands
; opno
++)
1955 if ((GET_CODE (recog_operand
[opno
]) == MEM
1956 || op_types
[opno
] != OP_OUT
)
1958 /* Ignore things like match_operator operands. */
1959 && *insn_operand_constraint
[insn_code_num
][opno
] != 0
1960 && ! (matching_operands
[opno
] == eopno
1961 && operands_match_p (recog_operand
[opno
],
1962 recog_operand
[eopno
]))
1963 && ! safe_from_earlyclobber (recog_operand
[opno
],
1964 recog_operand
[eopno
]))
1969 while (--funny_match_index
>= 0)
1971 recog_operand
[funny_match
[funny_match_index
].other
]
1972 = recog_operand
[funny_match
[funny_match_index
].this];
1979 which_alternative
++;
1982 /* If we are about to reject this, but we are not to test strictly,
1983 try a very loose test. Only return failure if it fails also. */
1985 return constrain_operands (insn_code_num
, -1);
1990 /* Return 1 iff OPERAND (assumed to be a REG rtx)
1991 is a hard reg in class CLASS when its regno is offset by OFFSET
1992 and changed to mode MODE.
1993 If REG occupies multiple hard regs, all of them must be in CLASS. */
1996 reg_fits_class_p (operand
, class, offset
, mode
)
1998 register enum reg_class
class;
2000 enum machine_mode mode
;
2002 register int regno
= REGNO (operand
);
2003 if (regno
< FIRST_PSEUDO_REGISTER
2004 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2009 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2011 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2020 #endif /* REGISTER_CONSTRAINTS */