1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991-6, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
31 #include "hard-reg-set.h"
35 #ifndef STACK_PUSH_CODE
36 #ifdef STACK_GROWS_DOWNWARD
37 #define STACK_PUSH_CODE PRE_DEC
39 #define STACK_PUSH_CODE PRE_INC
43 /* Import from final.c: */
44 extern rtx
alter_subreg ();
46 int strict_memory_address_p ();
47 int memory_address_p ();
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
55 init_recog and init_recog_no_volatile are responsible for setting this. */
59 /* On return from `constrain_operands', indicate which alternative
62 int which_alternative
;
64 /* Nonzero after end of reload pass.
65 Set to 1 or 0 by toplev.c.
66 Controls the significance of (SUBREG (MEM)). */
70 /* Initialize data used by the function `recog'.
71 This must be called once in the compilation of a function
72 before any insn recognition may be done in the function. */
75 init_recog_no_volatile ()
86 /* Try recognizing the instruction INSN,
87 and return the code number that results.
88 Remember the code so that repeated calls do not
89 need to spend the time for actual rerecognition.
91 This function is the normal interface to instruction recognition.
92 The automatically-generated function `recog' is normally called
93 through this one. (The only exception is in combine.c.) */
99 if (INSN_CODE (insn
) < 0)
100 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
101 return INSN_CODE (insn
);
104 /* Check that X is an insn-body for an `asm' with operands
105 and that the operands mentioned in it are legitimate. */
108 check_asm_operands (x
)
111 int noperands
= asm_noperands (x
);
120 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
121 decode_asm_operands (x
, operands
, NULL_PTR
, NULL_PTR
, NULL_PTR
);
123 for (i
= 0; i
< noperands
; i
++)
124 if (!general_operand (operands
[i
], VOIDmode
))
130 /* Static data for the next two routines.
132 The maximum number of changes supported is defined as the maximum
133 number of operands times 5. This allows for repeated substitutions
134 inside complex indexed address, or, alternatively, changes in up
137 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
139 static rtx change_objects
[MAX_CHANGE_LOCS
];
140 static int change_old_codes
[MAX_CHANGE_LOCS
];
141 static rtx
*change_locs
[MAX_CHANGE_LOCS
];
142 static rtx change_olds
[MAX_CHANGE_LOCS
];
144 static int num_changes
= 0;
146 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
147 at which NEW will be placed. If OBJECT is zero, no validation is done,
148 the change is simply made.
150 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
151 will be called with the address and mode as parameters. If OBJECT is
152 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
155 IN_GROUP is non-zero if this is part of a group of changes that must be
156 performed as a group. In that case, the changes will be stored. The
157 function `apply_change_group' will validate and apply the changes.
159 If IN_GROUP is zero, this is a single change. Try to recognize the insn
160 or validate the memory reference with the change applied. If the result
161 is not valid for the machine, suppress the change and return zero.
162 Otherwise, perform the change and return 1. */
165 validate_change (object
, loc
, new, in_group
)
173 if (old
== new || rtx_equal_p (old
, new))
176 if (num_changes
>= MAX_CHANGE_LOCS
177 || (in_group
== 0 && num_changes
!= 0))
182 /* Save the information describing this change. */
183 change_objects
[num_changes
] = object
;
184 change_locs
[num_changes
] = loc
;
185 change_olds
[num_changes
] = old
;
187 if (object
&& GET_CODE (object
) != MEM
)
189 /* Set INSN_CODE to force rerecognition of insn. Save old code in
191 change_old_codes
[num_changes
] = INSN_CODE (object
);
192 INSN_CODE (object
) = -1;
197 /* If we are making a group of changes, return 1. Otherwise, validate the
198 change group we made. */
203 return apply_change_group ();
206 /* Apply a group of changes previously issued with `validate_change'.
207 Return 1 if all changes are valid, zero otherwise. */
210 apply_change_group ()
214 /* The changes have been applied and all INSN_CODEs have been reset to force
217 The changes are valid if we aren't given an object, or if we are
218 given a MEM and it still is a valid address, or if this is in insn
219 and it is recognized. In the latter case, if reload has completed,
220 we also require that the operands meet the constraints for
221 the insn. We do not allow modifying an ASM_OPERANDS after reload
222 has completed because verifying the constraints is too difficult. */
224 for (i
= 0; i
< num_changes
; i
++)
226 rtx object
= change_objects
[i
];
231 if (GET_CODE (object
) == MEM
)
233 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
236 else if ((recog_memoized (object
) < 0
237 && (asm_noperands (PATTERN (object
)) < 0
238 || ! check_asm_operands (PATTERN (object
))
239 || reload_completed
))
241 && (insn_extract (object
),
242 ! constrain_operands (INSN_CODE (object
), 1))))
244 rtx pat
= PATTERN (object
);
246 /* Perhaps we couldn't recognize the insn because there were
247 extra CLOBBERs at the end. If so, try to re-recognize
248 without the last CLOBBER (later iterations will cause each of
249 them to be eliminated, in turn). But don't do this if we
250 have an ASM_OPERAND. */
251 if (GET_CODE (pat
) == PARALLEL
252 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
253 && asm_noperands (PATTERN (object
)) < 0)
257 if (XVECLEN (pat
, 0) == 2)
258 newpat
= XVECEXP (pat
, 0, 0);
263 newpat
= gen_rtx (PARALLEL
, VOIDmode
,
264 gen_rtvec (XVECLEN (pat
, 0) - 1));
265 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
266 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
269 /* Add a new change to this group to replace the pattern
270 with this new pattern. Then consider this change
271 as having succeeded. The change we added will
272 cause the entire call to fail if things remain invalid.
274 Note that this can lose if a later change than the one
275 we are processing specified &XVECEXP (PATTERN (object), 0, X)
276 but this shouldn't occur. */
278 validate_change (object
, &PATTERN (object
), newpat
, 1);
280 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
281 /* If this insn is a CLOBBER or USE, it is always valid, but is
289 if (i
== num_changes
)
301 /* Return the number of changes so far in the current group. */
304 num_validated_changes ()
309 /* Retract the changes numbered NUM and up. */
317 /* Back out all the changes. Do this in the opposite order in which
319 for (i
= num_changes
- 1; i
>= num
; i
--)
321 *change_locs
[i
] = change_olds
[i
];
322 if (change_objects
[i
] && GET_CODE (change_objects
[i
]) != MEM
)
323 INSN_CODE (change_objects
[i
]) = change_old_codes
[i
];
328 /* Replace every occurrence of FROM in X with TO. Mark each change with
329 validate_change passing OBJECT. */
332 validate_replace_rtx_1 (loc
, from
, to
, object
)
334 rtx from
, to
, object
;
338 register rtx x
= *loc
;
339 enum rtx_code code
= GET_CODE (x
);
341 /* X matches FROM if it is the same rtx or they are both referring to the
342 same register in the same mode. Avoid calling rtx_equal_p unless the
343 operands look similar. */
346 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
347 && GET_MODE (x
) == GET_MODE (from
)
348 && REGNO (x
) == REGNO (from
))
349 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
350 && rtx_equal_p (x
, from
)))
352 validate_change (object
, loc
, to
, 1);
356 /* For commutative or comparison operations, try replacing each argument
357 separately and seeing if we made any changes. If so, put a constant
359 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
361 int prev_changes
= num_changes
;
363 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
364 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
365 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
367 validate_change (object
, loc
,
368 gen_rtx (GET_RTX_CLASS (code
) == 'c' ? code
369 : swap_condition (code
),
370 GET_MODE (x
), XEXP (x
, 1), XEXP (x
, 0)),
377 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
378 done the substitution, otherwise we won't. */
383 /* If we have have a PLUS whose second operand is now a CONST_INT, use
384 plus_constant to try to simplify it. */
385 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
386 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
391 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
393 validate_change (object
, loc
,
394 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
402 /* In these cases, the operation to be performed depends on the mode
403 of the operand. If we are replacing the operand with a VOIDmode
404 constant, we lose the information. So try to simplify the operation
405 in that case. If it fails, substitute in something that we know
406 won't be recognized. */
407 if (GET_MODE (to
) == VOIDmode
408 && (XEXP (x
, 0) == from
409 || (GET_CODE (XEXP (x
, 0)) == REG
&& GET_CODE (from
) == REG
410 && GET_MODE (XEXP (x
, 0)) == GET_MODE (from
)
411 && REGNO (XEXP (x
, 0)) == REGNO (from
))))
413 rtx
new = simplify_unary_operation (code
, GET_MODE (x
), to
,
416 new = gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
418 validate_change (object
, loc
, new, 1);
424 /* If we have a SUBREG of a register that we are replacing and we are
425 replacing it with a MEM, make a new MEM and try replacing the
426 SUBREG with it. Don't do this if the MEM has a mode-dependent address
427 or if we would be widening it. */
429 if (SUBREG_REG (x
) == from
430 && GET_CODE (from
) == REG
431 && GET_CODE (to
) == MEM
432 && ! mode_dependent_address_p (XEXP (to
, 0))
433 && ! MEM_VOLATILE_P (to
)
434 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
436 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
437 enum machine_mode mode
= GET_MODE (x
);
440 if (BYTES_BIG_ENDIAN
)
441 offset
+= (MIN (UNITS_PER_WORD
,
442 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
443 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
445 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (to
, 0), offset
));
446 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to
);
447 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to
);
448 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to
);
449 validate_change (object
, loc
, new, 1);
456 /* If we are replacing a register with memory, try to change the memory
457 to be the mode required for memory in extract operations (this isn't
458 likely to be an insertion operation; if it was, nothing bad will
459 happen, we might just fail in some cases). */
461 if (XEXP (x
, 0) == from
&& GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
462 && GET_CODE (XEXP (x
, 1)) == CONST_INT
463 && GET_CODE (XEXP (x
, 2)) == CONST_INT
464 && ! mode_dependent_address_p (XEXP (to
, 0))
465 && ! MEM_VOLATILE_P (to
))
467 enum machine_mode wanted_mode
= VOIDmode
;
468 enum machine_mode is_mode
= GET_MODE (to
);
469 int width
= INTVAL (XEXP (x
, 1));
470 int pos
= INTVAL (XEXP (x
, 2));
473 if (code
== ZERO_EXTRACT
)
474 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
477 if (code
== SIGN_EXTRACT
)
478 wanted_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
481 /* If we have a narrower mode, we can do something. */
482 if (wanted_mode
!= VOIDmode
483 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
485 int offset
= pos
/ BITS_PER_UNIT
;
488 /* If the bytes and bits are counted differently, we
489 must adjust the offset. */
490 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
491 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
494 pos
%= GET_MODE_BITSIZE (wanted_mode
);
496 newmem
= gen_rtx (MEM
, wanted_mode
,
497 plus_constant (XEXP (to
, 0), offset
));
498 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (to
);
499 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (to
);
500 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (to
);
502 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
503 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
510 fmt
= GET_RTX_FORMAT (code
);
511 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
514 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
515 else if (fmt
[i
] == 'E')
516 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
517 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
521 /* Try replacing every occurrence of FROM in INSN with TO. After all
522 changes have been made, validate by seeing if INSN is still valid. */
525 validate_replace_rtx (from
, to
, insn
)
528 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
529 return apply_change_group ();
533 /* Return 1 if the insn using CC0 set by INSN does not contain
534 any ordered tests applied to the condition codes.
535 EQ and NE tests do not count. */
538 next_insn_tests_no_inequality (insn
)
541 register rtx next
= next_cc0_user (insn
);
543 /* If there is no next insn, we have to take the conservative choice. */
547 return ((GET_CODE (next
) == JUMP_INSN
548 || GET_CODE (next
) == INSN
549 || GET_CODE (next
) == CALL_INSN
)
550 && ! inequality_comparisons_p (PATTERN (next
)));
553 #if 0 /* This is useless since the insn that sets the cc's
554 must be followed immediately by the use of them. */
555 /* Return 1 if the CC value set up by INSN is not used. */
558 next_insns_test_no_inequality (insn
)
561 register rtx next
= NEXT_INSN (insn
);
563 for (; next
!= 0; next
= NEXT_INSN (next
))
565 if (GET_CODE (next
) == CODE_LABEL
566 || GET_CODE (next
) == BARRIER
)
568 if (GET_CODE (next
) == NOTE
)
570 if (inequality_comparisons_p (PATTERN (next
)))
572 if (sets_cc0_p (PATTERN (next
)) == 1)
574 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
582 /* This is used by find_single_use to locate an rtx that contains exactly one
583 use of DEST, which is typically either a REG or CC0. It returns a
584 pointer to the innermost rtx expression containing DEST. Appearances of
585 DEST that are being used to totally replace it are not counted. */
588 find_single_use_1 (dest
, loc
)
593 enum rtx_code code
= GET_CODE (x
);
610 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
611 of a REG that occupies all of the REG, the insn uses DEST if
612 it is mentioned in the destination or the source. Otherwise, we
613 need just check the source. */
614 if (GET_CODE (SET_DEST (x
)) != CC0
615 && GET_CODE (SET_DEST (x
)) != PC
616 && GET_CODE (SET_DEST (x
)) != REG
617 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
618 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
619 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
620 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
621 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
622 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
625 return find_single_use_1 (dest
, &SET_SRC (x
));
629 return find_single_use_1 (dest
, &XEXP (x
, 0));
632 /* If it wasn't one of the common cases above, check each expression and
633 vector of this code. Look for a unique usage of DEST. */
635 fmt
= GET_RTX_FORMAT (code
);
636 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
640 if (dest
== XEXP (x
, i
)
641 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
642 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
645 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
648 result
= this_result
;
649 else if (this_result
)
650 /* Duplicate usage. */
653 else if (fmt
[i
] == 'E')
657 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
659 if (XVECEXP (x
, i
, j
) == dest
660 || (GET_CODE (dest
) == REG
661 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
662 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
665 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
668 result
= this_result
;
669 else if (this_result
)
678 /* See if DEST, produced in INSN, is used only a single time in the
679 sequel. If so, return a pointer to the innermost rtx expression in which
682 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
684 This routine will return usually zero either before flow is called (because
685 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
686 note can't be trusted).
688 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
689 care about REG_DEAD notes or LOG_LINKS.
691 Otherwise, we find the single use by finding an insn that has a
692 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
693 only referenced once in that insn, we know that it must be the first
694 and last insn referencing DEST. */
697 find_single_use (dest
, insn
, ploc
)
709 next
= NEXT_INSN (insn
);
711 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
714 result
= find_single_use_1 (dest
, &PATTERN (next
));
721 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
724 for (next
= next_nonnote_insn (insn
);
725 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
726 next
= next_nonnote_insn (next
))
727 if (GET_RTX_CLASS (GET_CODE (next
)) == 'i' && dead_or_set_p (next
, dest
))
729 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
730 if (XEXP (link
, 0) == insn
)
735 result
= find_single_use_1 (dest
, &PATTERN (next
));
745 /* Return 1 if OP is a valid general operand for machine mode MODE.
746 This is either a register reference, a memory reference,
747 or a constant. In the case of a memory reference, the address
748 is checked for general validity for the target machine.
750 Register and memory references must have mode MODE in order to be valid,
751 but some constants have no machine mode and are valid for any mode.
753 If MODE is VOIDmode, OP is checked for validity for whatever mode
756 The main use of this function is as a predicate in match_operand
757 expressions in the machine description.
759 For an explanation of this function's behavior for registers of
760 class NO_REGS, see the comment for `register_operand'. */
763 general_operand (op
, mode
)
765 enum machine_mode mode
;
767 register enum rtx_code code
= GET_CODE (op
);
768 int mode_altering_drug
= 0;
770 if (mode
== VOIDmode
)
771 mode
= GET_MODE (op
);
773 /* Don't accept CONST_INT or anything similar
774 if the caller wants something floating. */
775 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
776 && GET_MODE_CLASS (mode
) != MODE_INT
777 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
781 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
782 #ifdef LEGITIMATE_PIC_OPERAND_P
783 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
785 && LEGITIMATE_CONSTANT_P (op
));
787 /* Except for certain constants with VOIDmode, already checked for,
788 OP's mode must match MODE if MODE specifies a mode. */
790 if (GET_MODE (op
) != mode
)
795 #ifdef INSN_SCHEDULING
796 /* On machines that have insn scheduling, we want all memory
797 reference to be explicit, so outlaw paradoxical SUBREGs. */
798 if (GET_CODE (SUBREG_REG (op
)) == MEM
799 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
803 op
= SUBREG_REG (op
);
804 code
= GET_CODE (op
);
806 /* No longer needed, since (SUBREG (MEM...))
807 will load the MEM into a reload reg in the MEM's own mode. */
808 mode_altering_drug
= 1;
813 /* A register whose class is NO_REGS is not a general operand. */
814 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
815 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
819 register rtx y
= XEXP (op
, 0);
820 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
822 /* Use the mem's mode, since it will be reloaded thus. */
823 mode
= GET_MODE (op
);
824 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
829 if (mode_altering_drug
)
830 return ! mode_dependent_address_p (XEXP (op
, 0));
834 /* Return 1 if OP is a valid memory address for a memory reference
837 The main use of this function is as a predicate in match_operand
838 expressions in the machine description. */
841 address_operand (op
, mode
)
843 enum machine_mode mode
;
845 return memory_address_p (mode
, op
);
848 /* Return 1 if OP is a register reference of mode MODE.
849 If MODE is VOIDmode, accept a register in any mode.
851 The main use of this function is as a predicate in match_operand
852 expressions in the machine description.
854 As a special exception, registers whose class is NO_REGS are
855 not accepted by `register_operand'. The reason for this change
856 is to allow the representation of special architecture artifacts
857 (such as a condition code register) without extending the rtl
858 definitions. Since registers of class NO_REGS cannot be used
859 as registers in any case where register classes are examined,
860 it is most consistent to keep this function from accepting them. */
863 register_operand (op
, mode
)
865 enum machine_mode mode
;
867 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
870 if (GET_CODE (op
) == SUBREG
)
872 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
873 because it is guaranteed to be reloaded into one.
874 Just make sure the MEM is valid in itself.
875 (Ideally, (SUBREG (MEM)...) should not exist after reload,
876 but currently it does result from (SUBREG (REG)...) where the
877 reg went on the stack.) */
878 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
879 return general_operand (op
, mode
);
881 #ifdef CLASS_CANNOT_CHANGE_SIZE
882 if (GET_CODE (SUBREG_REG (op
)) == REG
883 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
884 && TEST_HARD_REG_BIT (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
885 REGNO (SUBREG_REG (op
)))
886 && (GET_MODE_SIZE (mode
)
887 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
888 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
889 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
893 op
= SUBREG_REG (op
);
896 /* We don't consider registers whose class is NO_REGS
897 to be a register operand. */
898 return (GET_CODE (op
) == REG
899 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
900 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
903 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
904 or a hard register. */
907 scratch_operand (op
, mode
)
909 enum machine_mode mode
;
911 return (GET_MODE (op
) == mode
912 && (GET_CODE (op
) == SCRATCH
913 || (GET_CODE (op
) == REG
914 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)));
917 /* Return 1 if OP is a valid immediate operand for mode MODE.
919 The main use of this function is as a predicate in match_operand
920 expressions in the machine description. */
923 immediate_operand (op
, mode
)
925 enum machine_mode mode
;
927 /* Don't accept CONST_INT or anything similar
928 if the caller wants something floating. */
929 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
930 && GET_MODE_CLASS (mode
) != MODE_INT
931 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
934 return (CONSTANT_P (op
)
935 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
936 || GET_MODE (op
) == VOIDmode
)
937 #ifdef LEGITIMATE_PIC_OPERAND_P
938 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
940 && LEGITIMATE_CONSTANT_P (op
));
943 /* Returns 1 if OP is an operand that is a CONST_INT. */
946 const_int_operand (op
, mode
)
948 enum machine_mode mode
;
950 return GET_CODE (op
) == CONST_INT
;
953 /* Returns 1 if OP is an operand that is a constant integer or constant
954 floating-point number. */
957 const_double_operand (op
, mode
)
959 enum machine_mode mode
;
961 /* Don't accept CONST_INT or anything similar
962 if the caller wants something floating. */
963 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
964 && GET_MODE_CLASS (mode
) != MODE_INT
965 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
968 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
969 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
970 || GET_MODE (op
) == VOIDmode
));
973 /* Return 1 if OP is a general operand that is not an immediate operand. */
976 nonimmediate_operand (op
, mode
)
978 enum machine_mode mode
;
980 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
983 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
986 nonmemory_operand (op
, mode
)
988 enum machine_mode mode
;
992 /* Don't accept CONST_INT or anything similar
993 if the caller wants something floating. */
994 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
995 && GET_MODE_CLASS (mode
) != MODE_INT
996 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
999 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
1000 #ifdef LEGITIMATE_PIC_OPERAND_P
1001 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1003 && LEGITIMATE_CONSTANT_P (op
));
1006 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1009 if (GET_CODE (op
) == SUBREG
)
1011 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1012 because it is guaranteed to be reloaded into one.
1013 Just make sure the MEM is valid in itself.
1014 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1015 but currently it does result from (SUBREG (REG)...) where the
1016 reg went on the stack.) */
1017 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1018 return general_operand (op
, mode
);
1019 op
= SUBREG_REG (op
);
1022 /* We don't consider registers whose class is NO_REGS
1023 to be a register operand. */
1024 return (GET_CODE (op
) == REG
1025 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1026 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1029 /* Return 1 if OP is a valid operand that stands for pushing a
1030 value of mode MODE onto the stack.
1032 The main use of this function is as a predicate in match_operand
1033 expressions in the machine description. */
1036 push_operand (op
, mode
)
1038 enum machine_mode mode
;
1040 if (GET_CODE (op
) != MEM
)
1043 if (GET_MODE (op
) != mode
)
1048 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1051 return XEXP (op
, 0) == stack_pointer_rtx
;
1054 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1057 memory_address_p (mode
, addr
)
1058 enum machine_mode mode
;
1061 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1068 /* Return 1 if OP is a valid memory reference with mode MODE,
1069 including a valid address.
1071 The main use of this function is as a predicate in match_operand
1072 expressions in the machine description. */
1075 memory_operand (op
, mode
)
1077 enum machine_mode mode
;
1081 if (! reload_completed
)
1082 /* Note that no SUBREG is a memory operand before end of reload pass,
1083 because (SUBREG (MEM...)) forces reloading into a register. */
1084 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1086 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1090 if (GET_CODE (inner
) == SUBREG
)
1091 inner
= SUBREG_REG (inner
);
1093 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1096 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1097 that is, a memory reference whose address is a general_operand. */
1100 indirect_operand (op
, mode
)
1102 enum machine_mode mode
;
1104 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1105 if (! reload_completed
1106 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1108 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1109 rtx inner
= SUBREG_REG (op
);
1111 if (BYTES_BIG_ENDIAN
)
1112 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1113 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1115 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1118 /* The only way that we can have a general_operand as the resulting
1119 address is if OFFSET is zero and the address already is an operand
1120 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1123 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1124 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1125 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1126 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1127 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1130 return (GET_CODE (op
) == MEM
1131 && memory_operand (op
, mode
)
1132 && general_operand (XEXP (op
, 0), Pmode
));
1135 /* Return 1 if this is a comparison operator. This allows the use of
1136 MATCH_OPERATOR to recognize all the branch insns. */
1139 comparison_operator (op
, mode
)
1141 enum machine_mode mode
;
1143 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1144 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1147 /* If BODY is an insn body that uses ASM_OPERANDS,
1148 return the number of operands (both input and output) in the insn.
1149 Otherwise return -1. */
1152 asm_noperands (body
)
1155 if (GET_CODE (body
) == ASM_OPERANDS
)
1156 /* No output operands: return number of input operands. */
1157 return ASM_OPERANDS_INPUT_LENGTH (body
);
1158 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1159 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1160 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1161 else if (GET_CODE (body
) == PARALLEL
1162 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1163 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1165 /* Multiple output operands, or 1 output plus some clobbers:
1166 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1170 /* Count backwards through CLOBBERs to determine number of SETs. */
1171 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1173 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1175 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1179 /* N_SETS is now number of output operands. */
1182 /* Verify that all the SETs we have
1183 came from a single original asm_operands insn
1184 (so that invalid combinations are blocked). */
1185 for (i
= 0; i
< n_sets
; i
++)
1187 rtx elt
= XVECEXP (body
, 0, i
);
1188 if (GET_CODE (elt
) != SET
)
1190 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1192 /* If these ASM_OPERANDS rtx's came from different original insns
1193 then they aren't allowed together. */
1194 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1195 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1198 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1201 else if (GET_CODE (body
) == PARALLEL
1202 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1204 /* 0 outputs, but some clobbers:
1205 body is [(asm_operands ...) (clobber (reg ...))...]. */
1208 /* Make sure all the other parallel things really are clobbers. */
1209 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1210 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1213 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1219 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1220 copy its operands (both input and output) into the vector OPERANDS,
1221 the locations of the operands within the insn into the vector OPERAND_LOCS,
1222 and the constraints for the operands into CONSTRAINTS.
1223 Write the modes of the operands into MODES.
1224 Return the assembler-template.
1226 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1227 we don't store that info. */
1230 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1235 enum machine_mode
*modes
;
1241 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1243 rtx asmop
= SET_SRC (body
);
1244 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1246 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1248 for (i
= 1; i
< noperands
; i
++)
1251 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1253 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1255 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1257 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1260 /* The output is in the SET.
1261 Its constraint is in the ASM_OPERANDS itself. */
1263 operands
[0] = SET_DEST (body
);
1265 operand_locs
[0] = &SET_DEST (body
);
1267 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1269 modes
[0] = GET_MODE (SET_DEST (body
));
1270 template = ASM_OPERANDS_TEMPLATE (asmop
);
1272 else if (GET_CODE (body
) == ASM_OPERANDS
)
1275 /* No output operands: BODY is (asm_operands ....). */
1277 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1279 /* The input operands are found in the 1st element vector. */
1280 /* Constraints for inputs are in the 2nd element vector. */
1281 for (i
= 0; i
< noperands
; i
++)
1284 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1286 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1288 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1290 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1292 template = ASM_OPERANDS_TEMPLATE (asmop
);
1294 else if (GET_CODE (body
) == PARALLEL
1295 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1297 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1298 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1299 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1300 int nout
= 0; /* Does not include CLOBBERs. */
1302 /* At least one output, plus some CLOBBERs. */
1304 /* The outputs are in the SETs.
1305 Their constraints are in the ASM_OPERANDS itself. */
1306 for (i
= 0; i
< nparallel
; i
++)
1308 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1309 break; /* Past last SET */
1312 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1314 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1316 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1318 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1322 for (i
= 0; i
< nin
; i
++)
1325 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1327 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1329 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1331 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1334 template = ASM_OPERANDS_TEMPLATE (asmop
);
1336 else if (GET_CODE (body
) == PARALLEL
1337 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1339 /* No outputs, but some CLOBBERs. */
1341 rtx asmop
= XVECEXP (body
, 0, 0);
1342 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1344 for (i
= 0; i
< nin
; i
++)
1347 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1349 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1351 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1353 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1356 template = ASM_OPERANDS_TEMPLATE (asmop
);
1362 /* Given an rtx *P, if it is a sum containing an integer constant term,
1363 return the location (type rtx *) of the pointer to that constant term.
1364 Otherwise, return a null pointer. */
1367 find_constant_term_loc (p
)
1371 register enum rtx_code code
= GET_CODE (*p
);
1373 /* If *P IS such a constant term, P is its location. */
1375 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1379 /* Otherwise, if not a sum, it has no constant term. */
1381 if (GET_CODE (*p
) != PLUS
)
1384 /* If one of the summands is constant, return its location. */
1386 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1387 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1390 /* Otherwise, check each summand for containing a constant term. */
1392 if (XEXP (*p
, 0) != 0)
1394 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1399 if (XEXP (*p
, 1) != 0)
1401 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1409 /* Return 1 if OP is a memory reference
1410 whose address contains no side effects
1411 and remains valid after the addition
1412 of a positive integer less than the
1413 size of the object being referenced.
1415 We assume that the original address is valid and do not check it.
1417 This uses strict_memory_address_p as a subroutine, so
1418 don't use it before reload. */
1421 offsettable_memref_p (op
)
1424 return ((GET_CODE (op
) == MEM
)
1425 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1428 /* Similar, but don't require a strictly valid mem ref:
1429 consider pseudo-regs valid as index or base regs. */
1432 offsettable_nonstrict_memref_p (op
)
1435 return ((GET_CODE (op
) == MEM
)
1436 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1439 /* Return 1 if Y is a memory address which contains no side effects
1440 and would remain valid after the addition of a positive integer
1441 less than the size of that mode.
1443 We assume that the original address is valid and do not check it.
1444 We do check that it is valid for narrower modes.
1446 If STRICTP is nonzero, we require a strictly valid address,
1447 for the sake of use in reload.c. */
1450 offsettable_address_p (strictp
, mode
, y
)
1452 enum machine_mode mode
;
1455 register enum rtx_code ycode
= GET_CODE (y
);
1459 int (*addressp
) () = (strictp
? strict_memory_address_p
: memory_address_p
);
1461 if (CONSTANT_ADDRESS_P (y
))
1464 /* Adjusting an offsettable address involves changing to a narrower mode.
1465 Make sure that's OK. */
1467 if (mode_dependent_address_p (y
))
1470 /* If the expression contains a constant term,
1471 see if it remains valid when max possible offset is added. */
1473 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1478 *y2
= plus_constant (*y2
, GET_MODE_SIZE (mode
) - 1);
1479 /* Use QImode because an odd displacement may be automatically invalid
1480 for any wider mode. But it should be valid for a single byte. */
1481 good
= (*addressp
) (QImode
, y
);
1483 /* In any case, restore old contents of memory. */
1488 if (ycode
== PRE_DEC
|| ycode
== PRE_INC
1489 || ycode
== POST_DEC
|| ycode
== POST_INC
)
1492 /* The offset added here is chosen as the maximum offset that
1493 any instruction could need to add when operating on something
1494 of the specified mode. We assume that if Y and Y+c are
1495 valid addresses then so is Y+d for all 0<d<c. */
1497 z
= plus_constant_for_output (y
, GET_MODE_SIZE (mode
) - 1);
1499 /* Use QImode because an odd displacement may be automatically invalid
1500 for any wider mode. But it should be valid for a single byte. */
1501 return (*addressp
) (QImode
, z
);
1504 /* Return 1 if ADDR is an address-expression whose effect depends
1505 on the mode of the memory reference it is used in.
1507 Autoincrement addressing is a typical example of mode-dependence
1508 because the amount of the increment depends on the mode. */
1511 mode_dependent_address_p (addr
)
1514 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1520 /* Return 1 if OP is a general operand
1521 other than a memory ref with a mode dependent address. */
1524 mode_independent_operand (op
, mode
)
1525 enum machine_mode mode
;
1530 if (! general_operand (op
, mode
))
1533 if (GET_CODE (op
) != MEM
)
1536 addr
= XEXP (op
, 0);
1537 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
1543 /* Given an operand OP that is a valid memory reference
1544 which satisfies offsettable_memref_p,
1545 return a new memory reference whose address has been adjusted by OFFSET.
1546 OFFSET should be positive and less than the size of the object referenced.
1550 adj_offsettable_operand (op
, offset
)
1554 register enum rtx_code code
= GET_CODE (op
);
1558 register rtx y
= XEXP (op
, 0);
1561 if (CONSTANT_ADDRESS_P (y
))
1563 new = gen_rtx (MEM
, GET_MODE (op
), plus_constant_for_output (y
, offset
));
1564 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1568 if (GET_CODE (y
) == PLUS
)
1571 register rtx
*const_loc
;
1575 const_loc
= find_constant_term_loc (&z
);
1578 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
1583 new = gen_rtx (MEM
, GET_MODE (op
), plus_constant_for_output (y
, offset
));
1584 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1590 #ifdef REGISTER_CONSTRAINTS
1592 /* Check the operands of an insn (found in recog_operands)
1593 against the insn's operand constraints (found via INSN_CODE_NUM)
1594 and return 1 if they are valid.
1596 WHICH_ALTERNATIVE is set to a number which indicates which
1597 alternative of constraints was matched: 0 for the first alternative,
1598 1 for the next, etc.
1600 In addition, when two operands are match
1601 and it happens that the output operand is (reg) while the
1602 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1603 make the output operand look like the input.
1604 This is because the output operand is the one the template will print.
1606 This is used in final, just before printing the assembler code and by
1607 the routines that determine an insn's attribute.
1609 If STRICT is a positive non-zero value, it means that we have been
1610 called after reload has been completed. In that case, we must
1611 do all checks strictly. If it is zero, it means that we have been called
1612 before reload has completed. In that case, we first try to see if we can
1613 find an alternative that matches strictly. If not, we try again, this
1614 time assuming that reload will fix up the insn. This provides a "best
1615 guess" for the alternative and is used to compute attributes of insns prior
1616 to reload. A negative value of STRICT is used for this internal call. */
1624 constrain_operands (insn_code_num
, strict
)
1628 char *constraints
[MAX_RECOG_OPERANDS
];
1629 int matching_operands
[MAX_RECOG_OPERANDS
];
1630 enum op_type
{OP_IN
, OP_OUT
, OP_INOUT
} op_types
[MAX_RECOG_OPERANDS
];
1631 int earlyclobber
[MAX_RECOG_OPERANDS
];
1633 int noperands
= insn_n_operands
[insn_code_num
];
1635 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
1636 int funny_match_index
;
1637 int nalternatives
= insn_n_alternatives
[insn_code_num
];
1639 if (noperands
== 0 || nalternatives
== 0)
1642 for (c
= 0; c
< noperands
; c
++)
1644 constraints
[c
] = insn_operand_constraint
[insn_code_num
][c
];
1645 matching_operands
[c
] = -1;
1646 op_types
[c
] = OP_IN
;
1649 which_alternative
= 0;
1651 while (which_alternative
< nalternatives
)
1655 funny_match_index
= 0;
1657 for (opno
= 0; opno
< noperands
; opno
++)
1659 register rtx op
= recog_operand
[opno
];
1660 enum machine_mode mode
= GET_MODE (op
);
1661 register char *p
= constraints
[opno
];
1666 earlyclobber
[opno
] = 0;
1668 /* A unary operator may be accepted by the predicate, but it
1669 is irrelevant for matching contraints. */
1670 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
1673 if (GET_CODE (op
) == SUBREG
)
1675 if (GET_CODE (SUBREG_REG (op
)) == REG
1676 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
1677 offset
= SUBREG_WORD (op
);
1678 op
= SUBREG_REG (op
);
1681 /* An empty constraint or empty alternative
1682 allows anything which matched the pattern. */
1683 if (*p
== 0 || *p
== ',')
1686 while (*p
&& (c
= *p
++) != ',')
1696 /* Ignore rest of this alternative as far as
1697 constraint checking is concerned. */
1698 while (*p
&& *p
!= ',')
1703 op_types
[opno
] = OP_OUT
;
1707 op_types
[opno
] = OP_INOUT
;
1711 earlyclobber
[opno
] = 1;
1719 /* This operand must be the same as a previous one.
1720 This kind of constraint is used for instructions such
1721 as add when they take only two operands.
1723 Note that the lower-numbered operand is passed first.
1725 If we are not testing strictly, assume that this constraint
1726 will be satisfied. */
1730 val
= operands_match_p (recog_operand
[c
- '0'],
1731 recog_operand
[opno
]);
1733 matching_operands
[opno
] = c
- '0';
1734 matching_operands
[c
- '0'] = opno
;
1738 /* If output is *x and input is *--x,
1739 arrange later to change the output to *--x as well,
1740 since the output op is the one that will be printed. */
1741 if (val
== 2 && strict
> 0)
1743 funny_match
[funny_match_index
].this = opno
;
1744 funny_match
[funny_match_index
++].other
= c
- '0';
1749 /* p is used for address_operands. When we are called by
1750 gen_reload, no one will have checked that the address is
1751 strictly valid, i.e., that all pseudos requiring hard regs
1752 have gotten them. */
1754 || (strict_memory_address_p
1755 (insn_operand_mode
[insn_code_num
][opno
], op
)))
1759 /* No need to check general_operand again;
1760 it was done in insn-recog.c. */
1762 /* Anything goes unless it is a REG and really has a hard reg
1763 but the hard reg is not in the class GENERAL_REGS. */
1765 || GENERAL_REGS
== ALL_REGS
1766 || GET_CODE (op
) != REG
1767 || (reload_in_progress
1768 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1769 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
1776 && GET_CODE (op
) == REG
1777 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1778 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
1779 || (GET_CODE (op
) == REG
1780 && ((GENERAL_REGS
== ALL_REGS
1781 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)
1782 || reg_fits_class_p (op
, GENERAL_REGS
,
1788 /* This is used for a MATCH_SCRATCH in the cases when
1789 we don't actually need anything. So anything goes
1795 if (GET_CODE (op
) == MEM
1796 /* Before reload, accept what reload can turn into mem. */
1797 || (strict
< 0 && CONSTANT_P (op
))
1798 /* During reload, accept a pseudo */
1799 || (reload_in_progress
&& GET_CODE (op
) == REG
1800 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
1805 if (GET_CODE (op
) == MEM
1806 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
1807 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1812 if (GET_CODE (op
) == MEM
1813 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
1814 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1819 #ifndef REAL_ARITHMETIC
1820 /* Match any CONST_DOUBLE, but only if
1821 we can examine the bits of it reliably. */
1822 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1823 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1824 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1827 if (GET_CODE (op
) == CONST_DOUBLE
)
1832 if (GET_CODE (op
) == CONST_DOUBLE
)
1838 if (GET_CODE (op
) == CONST_DOUBLE
1839 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
1844 if (GET_CODE (op
) == CONST_INT
1845 || (GET_CODE (op
) == CONST_DOUBLE
1846 && GET_MODE (op
) == VOIDmode
))
1849 if (CONSTANT_P (op
))
1854 if (GET_CODE (op
) == CONST_INT
1855 || (GET_CODE (op
) == CONST_DOUBLE
1856 && GET_MODE (op
) == VOIDmode
))
1868 if (GET_CODE (op
) == CONST_INT
1869 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
1873 #ifdef EXTRA_CONSTRAINT
1879 if (EXTRA_CONSTRAINT (op
, c
))
1885 if (GET_CODE (op
) == MEM
1886 && ((strict
> 0 && ! offsettable_memref_p (op
))
1888 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
1889 || (reload_in_progress
1890 && !(GET_CODE (op
) == REG
1891 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
1896 if ((strict
> 0 && offsettable_memref_p (op
))
1897 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
1898 /* Before reload, accept what reload can handle. */
1900 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
1901 /* During reload, accept a pseudo */
1902 || (reload_in_progress
&& GET_CODE (op
) == REG
1903 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
1910 && GET_CODE (op
) == REG
1911 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
1912 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
1913 || (GET_CODE (op
) == REG
1914 && reg_fits_class_p (op
, REG_CLASS_FROM_LETTER (c
),
1919 constraints
[opno
] = p
;
1920 /* If this operand did not win somehow,
1921 this alternative loses. */
1925 /* This alternative won; the operands are ok.
1926 Change whichever operands this alternative says to change. */
1931 /* See if any earlyclobber operand conflicts with some other
1935 for (eopno
= 0; eopno
< noperands
; eopno
++)
1936 /* Ignore earlyclobber operands now in memory,
1937 because we would often report failure when we have
1938 two memory operands, one of which was formerly a REG. */
1939 if (earlyclobber
[eopno
]
1940 && GET_CODE (recog_operand
[eopno
]) == REG
)
1941 for (opno
= 0; opno
< noperands
; opno
++)
1942 if ((GET_CODE (recog_operand
[opno
]) == MEM
1943 || op_types
[opno
] != OP_OUT
)
1945 /* Ignore things like match_operator operands. */
1946 && *insn_operand_constraint
[insn_code_num
][opno
] != 0
1947 && ! (matching_operands
[opno
] == eopno
1948 && rtx_equal_p (recog_operand
[opno
],
1949 recog_operand
[eopno
]))
1950 && ! safe_from_earlyclobber (recog_operand
[opno
],
1951 recog_operand
[eopno
]))
1956 while (--funny_match_index
>= 0)
1958 recog_operand
[funny_match
[funny_match_index
].other
]
1959 = recog_operand
[funny_match
[funny_match_index
].this];
1966 which_alternative
++;
1969 /* If we are about to reject this, but we are not to test strictly,
1970 try a very loose test. Only return failure if it fails also. */
1972 return constrain_operands (insn_code_num
, -1);
1977 /* Return 1 iff OPERAND (assumed to be a REG rtx)
1978 is a hard reg in class CLASS when its regno is offsetted by OFFSET
1979 and changed to mode MODE.
1980 If REG occupies multiple hard regs, all of them must be in CLASS. */
1983 reg_fits_class_p (operand
, class, offset
, mode
)
1985 register enum reg_class
class;
1987 enum machine_mode mode
;
1989 register int regno
= REGNO (operand
);
1990 if (regno
< FIRST_PSEUDO_REGISTER
1991 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
1996 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
1998 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2007 #endif /* REGISTER_CONSTRAINTS */