1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
38 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
58 static rtx
*find_single_use_1
PARAMS ((rtx
, rtx
*));
59 static rtx
*find_constant_term_loc
PARAMS ((rtx
*));
60 static int insn_invalid_p
PARAMS ((rtx
));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data
;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative
;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn
)
118 if (INSN_CODE (insn
) < 0)
119 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
120 return INSN_CODE (insn
);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x
)
132 const char **constraints
;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed
)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x
));
140 constrain_operands (1);
141 return which_alternative
>= 0;
144 noperands
= asm_noperands (x
);
150 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
151 constraints
= (const char **) alloca (noperands
* sizeof (char *));
153 decode_asm_operands (x
, operands
, NULL_PTR
, constraints
, NULL_PTR
);
155 for (i
= 0; i
< noperands
; i
++)
157 const char *c
= constraints
[i
];
160 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
161 c
= constraints
[c
[0] - '0'];
163 if (! asm_operand_ok (operands
[i
], c
))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object
, loc
, new, in_group
)
212 if (old
== new || rtx_equal_p (old
, new))
215 if (in_group
== 0 && num_changes
!= 0)
220 /* Save the information describing this change. */
221 if (num_changes
>= changes_allocated
)
223 if (changes_allocated
== 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
228 changes_allocated
*= 2;
231 (change_t
*) xrealloc (changes
,
232 sizeof (change_t
) * changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
239 if (object
&& GET_CODE (object
) != MEM
)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes
[num_changes
].old_code
= INSN_CODE (object
);
244 INSN_CODE (object
) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn
)
265 int icode
= recog_memoized (insn
);
266 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
268 if (is_asm
&& ! check_asm_operands (PATTERN (insn
)))
270 if (! is_asm
&& icode
< 0)
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed
)
278 if (! constrain_operands (1))
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
293 /* The changes have been applied and all INSN_CODEs have been reset to force
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
302 for (i
= 0; i
< num_changes
; i
++)
304 rtx object
= changes
[i
].object
;
309 if (GET_CODE (object
) == MEM
)
311 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
314 else if (insn_invalid_p (object
))
316 rtx pat
= PATTERN (object
);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat
) == PARALLEL
324 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object
)) < 0)
329 if (XVECLEN (pat
, 0) == 2)
330 newpat
= XVECEXP (pat
, 0, 0);
336 = gen_rtx_PARALLEL (VOIDmode
,
337 gen_rtvec (XVECLEN (pat
, 0) - 1));
338 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
339 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object
, &PATTERN (object
), newpat
, 1);
353 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
362 if (i
== num_changes
)
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
382 /* Retract the changes numbered NUM and up. */
390 /* Back out all the changes. Do this in the opposite order in which
392 for (i
= num_changes
- 1; i
>= num
; i
--)
394 *changes
[i
].loc
= changes
[i
].old
;
395 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
396 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
405 validate_replace_rtx_1 (loc
, from
, to
, object
)
407 rtx from
, to
, object
;
410 register const char *fmt
;
411 register rtx x
= *loc
;
412 enum rtx_code code
= GET_CODE (x
);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
419 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
420 && GET_MODE (x
) == GET_MODE (from
)
421 && REGNO (x
) == REGNO (from
))
422 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
423 && rtx_equal_p (x
, from
)))
425 validate_change (object
, loc
, to
, 1);
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
432 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
434 int prev_changes
= num_changes
;
436 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
437 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
438 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
440 validate_change (object
, loc
,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
442 : swap_condition (code
),
443 GET_MODE (x
), XEXP (x
, 1),
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
460 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
465 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
467 validate_change (object
, loc
,
468 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to
) == VOIDmode
482 && (XEXP (x
, 0) == from
483 || (GET_CODE (XEXP (x
, 0)) == REG
&& GET_CODE (from
) == REG
484 && GET_MODE (XEXP (x
, 0)) == GET_MODE (from
)
485 && REGNO (XEXP (x
, 0)) == REGNO (from
))))
487 rtx
new = simplify_unary_operation (code
, GET_MODE (x
), to
,
490 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
492 validate_change (object
, loc
, new, 1);
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x
) == from
504 && GET_CODE (from
) == REG
505 && GET_CODE (to
) == MEM
506 && ! mode_dependent_address_p (XEXP (to
, 0))
507 && ! MEM_VOLATILE_P (to
)
508 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
510 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
511 enum machine_mode mode
= GET_MODE (x
);
514 if (BYTES_BIG_ENDIAN
)
515 offset
+= (MIN (UNITS_PER_WORD
,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
517 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
519 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
520 MEM_COPY_ATTRIBUTES (new, to
);
521 validate_change (object
, loc
, new, 1);
528 /* If we are replacing a register with memory, try to change the memory
529 to be the mode required for memory in extract operations (this isn't
530 likely to be an insertion operation; if it was, nothing bad will
531 happen, we might just fail in some cases). */
533 if (XEXP (x
, 0) == from
&& GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
534 && GET_CODE (XEXP (x
, 1)) == CONST_INT
535 && GET_CODE (XEXP (x
, 2)) == CONST_INT
536 && ! mode_dependent_address_p (XEXP (to
, 0))
537 && ! MEM_VOLATILE_P (to
))
539 enum machine_mode wanted_mode
= VOIDmode
;
540 enum machine_mode is_mode
= GET_MODE (to
);
541 int pos
= INTVAL (XEXP (x
, 2));
544 if (code
== ZERO_EXTRACT
)
546 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
547 if (wanted_mode
== VOIDmode
)
548 wanted_mode
= word_mode
;
552 if (code
== SIGN_EXTRACT
)
554 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
555 if (wanted_mode
== VOIDmode
)
556 wanted_mode
= word_mode
;
560 /* If we have a narrower mode, we can do something. */
561 if (wanted_mode
!= VOIDmode
562 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
564 int offset
= pos
/ BITS_PER_UNIT
;
567 /* If the bytes and bits are counted differently, we
568 must adjust the offset. */
569 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
570 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
573 pos
%= GET_MODE_BITSIZE (wanted_mode
);
575 newmem
= gen_rtx_MEM (wanted_mode
,
576 plus_constant (XEXP (to
, 0), offset
));
577 MEM_COPY_ATTRIBUTES (newmem
, to
);
579 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
580 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
590 /* For commutative or comparison operations we've already performed
591 replacements. Don't try to perform them again. */
592 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
594 fmt
= GET_RTX_FORMAT (code
);
595 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
598 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
599 else if (fmt
[i
] == 'E')
600 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
601 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
606 /* Try replacing every occurrence of FROM in INSN with TO. After all
607 changes have been made, validate by seeing if INSN is still valid. */
610 validate_replace_rtx (from
, to
, insn
)
613 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
614 return apply_change_group ();
617 /* Try replacing every occurrence of FROM in INSN with TO. After all
618 changes have been made, validate by seeing if INSN is still valid. */
621 validate_replace_rtx_group (from
, to
, insn
)
624 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
627 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
628 SET_DESTs. After all changes have been made, validate by seeing if
629 INSN is still valid. */
632 validate_replace_src (from
, to
, insn
)
635 if ((GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != JUMP_INSN
)
636 || GET_CODE (PATTERN (insn
)) != SET
)
639 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn
)), from
, to
, insn
);
640 if (GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
641 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn
)), 0),
643 return apply_change_group ();
647 /* Return 1 if the insn using CC0 set by INSN does not contain
648 any ordered tests applied to the condition codes.
649 EQ and NE tests do not count. */
652 next_insn_tests_no_inequality (insn
)
655 register rtx next
= next_cc0_user (insn
);
657 /* If there is no next insn, we have to take the conservative choice. */
661 return ((GET_CODE (next
) == JUMP_INSN
662 || GET_CODE (next
) == INSN
663 || GET_CODE (next
) == CALL_INSN
)
664 && ! inequality_comparisons_p (PATTERN (next
)));
667 #if 0 /* This is useless since the insn that sets the cc's
668 must be followed immediately by the use of them. */
669 /* Return 1 if the CC value set up by INSN is not used. */
672 next_insns_test_no_inequality (insn
)
675 register rtx next
= NEXT_INSN (insn
);
677 for (; next
!= 0; next
= NEXT_INSN (next
))
679 if (GET_CODE (next
) == CODE_LABEL
680 || GET_CODE (next
) == BARRIER
)
682 if (GET_CODE (next
) == NOTE
)
684 if (inequality_comparisons_p (PATTERN (next
)))
686 if (sets_cc0_p (PATTERN (next
)) == 1)
688 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
696 /* This is used by find_single_use to locate an rtx that contains exactly one
697 use of DEST, which is typically either a REG or CC0. It returns a
698 pointer to the innermost rtx expression containing DEST. Appearances of
699 DEST that are being used to totally replace it are not counted. */
702 find_single_use_1 (dest
, loc
)
707 enum rtx_code code
= GET_CODE (x
);
724 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
725 of a REG that occupies all of the REG, the insn uses DEST if
726 it is mentioned in the destination or the source. Otherwise, we
727 need just check the source. */
728 if (GET_CODE (SET_DEST (x
)) != CC0
729 && GET_CODE (SET_DEST (x
)) != PC
730 && GET_CODE (SET_DEST (x
)) != REG
731 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
732 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
733 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
734 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
735 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
736 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
739 return find_single_use_1 (dest
, &SET_SRC (x
));
743 return find_single_use_1 (dest
, &XEXP (x
, 0));
749 /* If it wasn't one of the common cases above, check each expression and
750 vector of this code. Look for a unique usage of DEST. */
752 fmt
= GET_RTX_FORMAT (code
);
753 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
757 if (dest
== XEXP (x
, i
)
758 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
759 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
762 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
765 result
= this_result
;
766 else if (this_result
)
767 /* Duplicate usage. */
770 else if (fmt
[i
] == 'E')
774 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
776 if (XVECEXP (x
, i
, j
) == dest
777 || (GET_CODE (dest
) == REG
778 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
779 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
782 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
785 result
= this_result
;
786 else if (this_result
)
795 /* See if DEST, produced in INSN, is used only a single time in the
796 sequel. If so, return a pointer to the innermost rtx expression in which
799 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
801 This routine will return usually zero either before flow is called (because
802 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
803 note can't be trusted).
805 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
806 care about REG_DEAD notes or LOG_LINKS.
808 Otherwise, we find the single use by finding an insn that has a
809 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
810 only referenced once in that insn, we know that it must be the first
811 and last insn referencing DEST. */
814 find_single_use (dest
, insn
, ploc
)
826 next
= NEXT_INSN (insn
);
828 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
831 result
= find_single_use_1 (dest
, &PATTERN (next
));
838 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
841 for (next
= next_nonnote_insn (insn
);
842 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
843 next
= next_nonnote_insn (next
))
844 if (GET_RTX_CLASS (GET_CODE (next
)) == 'i' && dead_or_set_p (next
, dest
))
846 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
847 if (XEXP (link
, 0) == insn
)
852 result
= find_single_use_1 (dest
, &PATTERN (next
));
862 /* Return 1 if OP is a valid general operand for machine mode MODE.
863 This is either a register reference, a memory reference,
864 or a constant. In the case of a memory reference, the address
865 is checked for general validity for the target machine.
867 Register and memory references must have mode MODE in order to be valid,
868 but some constants have no machine mode and are valid for any mode.
870 If MODE is VOIDmode, OP is checked for validity for whatever mode
873 The main use of this function is as a predicate in match_operand
874 expressions in the machine description.
876 For an explanation of this function's behavior for registers of
877 class NO_REGS, see the comment for `register_operand'. */
880 general_operand (op
, mode
)
882 enum machine_mode mode
;
884 register enum rtx_code code
= GET_CODE (op
);
885 int mode_altering_drug
= 0;
887 if (mode
== VOIDmode
)
888 mode
= GET_MODE (op
);
890 /* Don't accept CONST_INT or anything similar
891 if the caller wants something floating. */
892 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
893 && GET_MODE_CLASS (mode
) != MODE_INT
894 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
898 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
899 #ifdef LEGITIMATE_PIC_OPERAND_P
900 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
902 && LEGITIMATE_CONSTANT_P (op
));
904 /* Except for certain constants with VOIDmode, already checked for,
905 OP's mode must match MODE if MODE specifies a mode. */
907 if (GET_MODE (op
) != mode
)
912 #ifdef INSN_SCHEDULING
913 /* On machines that have insn scheduling, we want all memory
914 reference to be explicit, so outlaw paradoxical SUBREGs. */
915 if (GET_CODE (SUBREG_REG (op
)) == MEM
916 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
920 op
= SUBREG_REG (op
);
921 code
= GET_CODE (op
);
923 /* No longer needed, since (SUBREG (MEM...))
924 will load the MEM into a reload reg in the MEM's own mode. */
925 mode_altering_drug
= 1;
930 /* A register whose class is NO_REGS is not a general operand. */
931 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
932 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
936 register rtx y
= XEXP (op
, 0);
937 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
939 if (GET_CODE (y
) == ADDRESSOF
)
941 /* Use the mem's mode, since it will be reloaded thus. */
942 mode
= GET_MODE (op
);
943 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
946 /* Pretend this is an operand for now; we'll run force_operand
947 on its replacement in fixup_var_refs_1. */
948 if (code
== ADDRESSOF
)
954 if (mode_altering_drug
)
955 return ! mode_dependent_address_p (XEXP (op
, 0));
959 /* Return 1 if OP is a valid memory address for a memory reference
962 The main use of this function is as a predicate in match_operand
963 expressions in the machine description. */
966 address_operand (op
, mode
)
968 enum machine_mode mode
;
970 return memory_address_p (mode
, op
);
973 /* Return 1 if OP is a register reference of mode MODE.
974 If MODE is VOIDmode, accept a register in any mode.
976 The main use of this function is as a predicate in match_operand
977 expressions in the machine description.
979 As a special exception, registers whose class is NO_REGS are
980 not accepted by `register_operand'. The reason for this change
981 is to allow the representation of special architecture artifacts
982 (such as a condition code register) without extending the rtl
983 definitions. Since registers of class NO_REGS cannot be used
984 as registers in any case where register classes are examined,
985 it is most consistent to keep this function from accepting them. */
988 register_operand (op
, mode
)
990 enum machine_mode mode
;
992 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
995 if (GET_CODE (op
) == SUBREG
)
997 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
998 because it is guaranteed to be reloaded into one.
999 Just make sure the MEM is valid in itself.
1000 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1001 but currently it does result from (SUBREG (REG)...) where the
1002 reg went on the stack.) */
1003 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1004 return general_operand (op
, mode
);
1006 #ifdef CLASS_CANNOT_CHANGE_SIZE
1007 if (GET_CODE (SUBREG_REG (op
)) == REG
1008 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1009 && TEST_HARD_REG_BIT (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
1010 REGNO (SUBREG_REG (op
)))
1011 && (GET_MODE_SIZE (mode
)
1012 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1013 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1014 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1018 op
= SUBREG_REG (op
);
1021 /* If we have an ADDRESSOF, consider it valid since it will be
1022 converted into something that will not be a MEM. */
1023 if (GET_CODE (op
) == ADDRESSOF
)
1026 /* We don't consider registers whose class is NO_REGS
1027 to be a register operand. */
1028 return (GET_CODE (op
) == REG
1029 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1030 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1033 /* Return 1 for a register in Pmode; ignore the tested mode. */
1036 pmode_register_operand (op
, mode
)
1038 enum machine_mode mode ATTRIBUTE_UNUSED
;
1040 return register_operand (op
, Pmode
);
1043 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1044 or a hard register. */
1047 scratch_operand (op
, mode
)
1049 enum machine_mode mode
;
1051 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1054 return (GET_CODE (op
) == SCRATCH
1055 || (GET_CODE (op
) == REG
1056 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1059 /* Return 1 if OP is a valid immediate operand for mode MODE.
1061 The main use of this function is as a predicate in match_operand
1062 expressions in the machine description. */
1065 immediate_operand (op
, mode
)
1067 enum machine_mode mode
;
1069 /* Don't accept CONST_INT or anything similar
1070 if the caller wants something floating. */
1071 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1072 && GET_MODE_CLASS (mode
) != MODE_INT
1073 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1076 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1077 result in 0/1. It seems a safe assumption that this is
1078 in range for everyone. */
1079 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1082 return (CONSTANT_P (op
)
1083 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1084 || GET_MODE (op
) == VOIDmode
)
1085 #ifdef LEGITIMATE_PIC_OPERAND_P
1086 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1088 && LEGITIMATE_CONSTANT_P (op
));
1091 /* Returns 1 if OP is an operand that is a CONST_INT. */
1094 const_int_operand (op
, mode
)
1096 enum machine_mode mode ATTRIBUTE_UNUSED
;
1098 return GET_CODE (op
) == CONST_INT
;
1101 /* Returns 1 if OP is an operand that is a constant integer or constant
1102 floating-point number. */
1105 const_double_operand (op
, mode
)
1107 enum machine_mode mode
;
1109 /* Don't accept CONST_INT or anything similar
1110 if the caller wants something floating. */
1111 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1112 && GET_MODE_CLASS (mode
) != MODE_INT
1113 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1116 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1117 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1118 || GET_MODE (op
) == VOIDmode
));
1121 /* Return 1 if OP is a general operand that is not an immediate operand. */
1124 nonimmediate_operand (op
, mode
)
1126 enum machine_mode mode
;
1128 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1131 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1134 nonmemory_operand (op
, mode
)
1136 enum machine_mode mode
;
1138 if (CONSTANT_P (op
))
1140 /* Don't accept CONST_INT or anything similar
1141 if the caller wants something floating. */
1142 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1143 && GET_MODE_CLASS (mode
) != MODE_INT
1144 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1147 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
1148 #ifdef LEGITIMATE_PIC_OPERAND_P
1149 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1151 && LEGITIMATE_CONSTANT_P (op
));
1154 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1157 if (GET_CODE (op
) == SUBREG
)
1159 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1160 because it is guaranteed to be reloaded into one.
1161 Just make sure the MEM is valid in itself.
1162 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1163 but currently it does result from (SUBREG (REG)...) where the
1164 reg went on the stack.) */
1165 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1166 return general_operand (op
, mode
);
1167 op
= SUBREG_REG (op
);
1170 /* We don't consider registers whose class is NO_REGS
1171 to be a register operand. */
1172 return (GET_CODE (op
) == REG
1173 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1174 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1177 /* Return 1 if OP is a valid operand that stands for pushing a
1178 value of mode MODE onto the stack.
1180 The main use of this function is as a predicate in match_operand
1181 expressions in the machine description. */
1184 push_operand (op
, mode
)
1186 enum machine_mode mode
;
1188 if (GET_CODE (op
) != MEM
)
1191 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1196 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1199 return XEXP (op
, 0) == stack_pointer_rtx
;
1202 /* Return 1 if OP is a valid operand that stands for popping a
1203 value of mode MODE off the stack.
1205 The main use of this function is as a predicate in match_operand
1206 expressions in the machine description. */
1209 pop_operand (op
, mode
)
1211 enum machine_mode mode
;
1213 if (GET_CODE (op
) != MEM
)
1216 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1221 if (GET_CODE (op
) != STACK_POP_CODE
)
1224 return XEXP (op
, 0) == stack_pointer_rtx
;
1227 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1230 memory_address_p (mode
, addr
)
1231 enum machine_mode mode ATTRIBUTE_UNUSED
;
1234 if (GET_CODE (addr
) == ADDRESSOF
)
1237 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1244 /* Return 1 if OP is a valid memory reference with mode MODE,
1245 including a valid address.
1247 The main use of this function is as a predicate in match_operand
1248 expressions in the machine description. */
1251 memory_operand (op
, mode
)
1253 enum machine_mode mode
;
1257 if (! reload_completed
)
1258 /* Note that no SUBREG is a memory operand before end of reload pass,
1259 because (SUBREG (MEM...)) forces reloading into a register. */
1260 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1262 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1266 if (GET_CODE (inner
) == SUBREG
)
1267 inner
= SUBREG_REG (inner
);
1269 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1272 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1273 that is, a memory reference whose address is a general_operand. */
1276 indirect_operand (op
, mode
)
1278 enum machine_mode mode
;
1280 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1281 if (! reload_completed
1282 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1284 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1285 rtx inner
= SUBREG_REG (op
);
1287 if (BYTES_BIG_ENDIAN
)
1288 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1289 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1291 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1294 /* The only way that we can have a general_operand as the resulting
1295 address is if OFFSET is zero and the address already is an operand
1296 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1299 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1300 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1301 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1302 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1303 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1306 return (GET_CODE (op
) == MEM
1307 && memory_operand (op
, mode
)
1308 && general_operand (XEXP (op
, 0), Pmode
));
1311 /* Return 1 if this is a comparison operator. This allows the use of
1312 MATCH_OPERATOR to recognize all the branch insns. */
1315 comparison_operator (op
, mode
)
1317 enum machine_mode mode
;
1319 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1320 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1323 /* If BODY is an insn body that uses ASM_OPERANDS,
1324 return the number of operands (both input and output) in the insn.
1325 Otherwise return -1. */
1328 asm_noperands (body
)
1331 if (GET_CODE (body
) == ASM_OPERANDS
)
1332 /* No output operands: return number of input operands. */
1333 return ASM_OPERANDS_INPUT_LENGTH (body
);
1334 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1335 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1336 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1337 else if (GET_CODE (body
) == PARALLEL
1338 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1339 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1341 /* Multiple output operands, or 1 output plus some clobbers:
1342 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1346 /* Count backwards through CLOBBERs to determine number of SETs. */
1347 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1349 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1351 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1355 /* N_SETS is now number of output operands. */
1358 /* Verify that all the SETs we have
1359 came from a single original asm_operands insn
1360 (so that invalid combinations are blocked). */
1361 for (i
= 0; i
< n_sets
; i
++)
1363 rtx elt
= XVECEXP (body
, 0, i
);
1364 if (GET_CODE (elt
) != SET
)
1366 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1368 /* If these ASM_OPERANDS rtx's came from different original insns
1369 then they aren't allowed together. */
1370 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1371 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1374 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1377 else if (GET_CODE (body
) == PARALLEL
1378 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1380 /* 0 outputs, but some clobbers:
1381 body is [(asm_operands ...) (clobber (reg ...))...]. */
1384 /* Make sure all the other parallel things really are clobbers. */
1385 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1386 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1389 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1395 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1396 copy its operands (both input and output) into the vector OPERANDS,
1397 the locations of the operands within the insn into the vector OPERAND_LOCS,
1398 and the constraints for the operands into CONSTRAINTS.
1399 Write the modes of the operands into MODES.
1400 Return the assembler-template.
1402 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1403 we don't store that info. */
1406 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1410 const char **constraints
;
1411 enum machine_mode
*modes
;
1415 const char *template = 0;
1417 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1419 rtx asmop
= SET_SRC (body
);
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1422 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1424 for (i
= 1; i
< noperands
; i
++)
1427 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1429 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1431 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1433 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1436 /* The output is in the SET.
1437 Its constraint is in the ASM_OPERANDS itself. */
1439 operands
[0] = SET_DEST (body
);
1441 operand_locs
[0] = &SET_DEST (body
);
1443 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1445 modes
[0] = GET_MODE (SET_DEST (body
));
1446 template = ASM_OPERANDS_TEMPLATE (asmop
);
1448 else if (GET_CODE (body
) == ASM_OPERANDS
)
1451 /* No output operands: BODY is (asm_operands ....). */
1453 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1455 /* The input operands are found in the 1st element vector. */
1456 /* Constraints for inputs are in the 2nd element vector. */
1457 for (i
= 0; i
< noperands
; i
++)
1460 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1462 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1464 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1466 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1468 template = ASM_OPERANDS_TEMPLATE (asmop
);
1470 else if (GET_CODE (body
) == PARALLEL
1471 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1473 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1474 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1475 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1476 int nout
= 0; /* Does not include CLOBBERs. */
1478 /* At least one output, plus some CLOBBERs. */
1480 /* The outputs are in the SETs.
1481 Their constraints are in the ASM_OPERANDS itself. */
1482 for (i
= 0; i
< nparallel
; i
++)
1484 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1485 break; /* Past last SET */
1488 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1490 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1492 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1494 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1498 for (i
= 0; i
< nin
; i
++)
1501 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1503 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1505 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1507 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1510 template = ASM_OPERANDS_TEMPLATE (asmop
);
1512 else if (GET_CODE (body
) == PARALLEL
1513 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1515 /* No outputs, but some CLOBBERs. */
1517 rtx asmop
= XVECEXP (body
, 0, 0);
1518 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1520 for (i
= 0; i
< nin
; i
++)
1523 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1525 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1527 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1529 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1532 template = ASM_OPERANDS_TEMPLATE (asmop
);
1538 /* Check if an asm_operand matches it's constraints.
1539 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1542 asm_operand_ok (op
, constraint
)
1544 const char *constraint
;
1548 /* Use constrain_operands after reload. */
1549 if (reload_completed
)
1554 switch (*constraint
++)
1567 case '0': case '1': case '2': case '3': case '4':
1568 case '5': case '6': case '7': case '8': case '9':
1569 /* For best results, our caller should have given us the
1570 proper matching constraint, but we can't actually fail
1571 the check if they didn't. Indicate that results are
1577 if (address_operand (op
, VOIDmode
))
1582 case 'V': /* non-offsettable */
1583 if (memory_operand (op
, VOIDmode
))
1587 case 'o': /* offsettable */
1588 if (offsettable_nonstrict_memref_p (op
))
1593 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1594 excepting those that expand_call created. Further, on some
1595 machines which do not have generalized auto inc/dec, an inc/dec
1596 is not a memory_operand.
1598 Match any memory and hope things are resolved after reload. */
1600 if (GET_CODE (op
) == MEM
1602 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1603 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1608 if (GET_CODE (op
) == MEM
1610 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1611 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1616 #ifndef REAL_ARITHMETIC
1617 /* Match any floating double constant, but only if
1618 we can examine the bits of it reliably. */
1619 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1620 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1621 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1627 if (GET_CODE (op
) == CONST_DOUBLE
)
1632 if (GET_CODE (op
) == CONST_DOUBLE
1633 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1637 if (GET_CODE (op
) == CONST_DOUBLE
1638 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1643 if (GET_CODE (op
) == CONST_INT
1644 || (GET_CODE (op
) == CONST_DOUBLE
1645 && GET_MODE (op
) == VOIDmode
))
1651 #ifdef LEGITIMATE_PIC_OPERAND_P
1652 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1659 if (GET_CODE (op
) == CONST_INT
1660 || (GET_CODE (op
) == CONST_DOUBLE
1661 && GET_MODE (op
) == VOIDmode
))
1666 if (GET_CODE (op
) == CONST_INT
1667 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1671 if (GET_CODE (op
) == CONST_INT
1672 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1676 if (GET_CODE (op
) == CONST_INT
1677 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1681 if (GET_CODE (op
) == CONST_INT
1682 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1686 if (GET_CODE (op
) == CONST_INT
1687 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1691 if (GET_CODE (op
) == CONST_INT
1692 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1696 if (GET_CODE (op
) == CONST_INT
1697 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1701 if (GET_CODE (op
) == CONST_INT
1702 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1710 if (general_operand (op
, VOIDmode
))
1714 #ifdef EXTRA_CONSTRAINT
1716 if (EXTRA_CONSTRAINT (op
, 'Q'))
1720 if (EXTRA_CONSTRAINT (op
, 'R'))
1724 if (EXTRA_CONSTRAINT (op
, 'S'))
1728 if (EXTRA_CONSTRAINT (op
, 'T'))
1732 if (EXTRA_CONSTRAINT (op
, 'U'))
1739 if (GET_MODE (op
) == BLKmode
)
1741 if (register_operand (op
, VOIDmode
))
1750 /* Given an rtx *P, if it is a sum containing an integer constant term,
1751 return the location (type rtx *) of the pointer to that constant term.
1752 Otherwise, return a null pointer. */
1755 find_constant_term_loc (p
)
1759 register enum rtx_code code
= GET_CODE (*p
);
1761 /* If *P IS such a constant term, P is its location. */
1763 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1767 /* Otherwise, if not a sum, it has no constant term. */
1769 if (GET_CODE (*p
) != PLUS
)
1772 /* If one of the summands is constant, return its location. */
1774 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1775 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1778 /* Otherwise, check each summand for containing a constant term. */
1780 if (XEXP (*p
, 0) != 0)
1782 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1787 if (XEXP (*p
, 1) != 0)
1789 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1797 /* Return 1 if OP is a memory reference
1798 whose address contains no side effects
1799 and remains valid after the addition
1800 of a positive integer less than the
1801 size of the object being referenced.
1803 We assume that the original address is valid and do not check it.
1805 This uses strict_memory_address_p as a subroutine, so
1806 don't use it before reload. */
1809 offsettable_memref_p (op
)
1812 return ((GET_CODE (op
) == MEM
)
1813 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1816 /* Similar, but don't require a strictly valid mem ref:
1817 consider pseudo-regs valid as index or base regs. */
1820 offsettable_nonstrict_memref_p (op
)
1823 return ((GET_CODE (op
) == MEM
)
1824 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1827 /* Return 1 if Y is a memory address which contains no side effects
1828 and would remain valid after the addition of a positive integer
1829 less than the size of that mode.
1831 We assume that the original address is valid and do not check it.
1832 We do check that it is valid for narrower modes.
1834 If STRICTP is nonzero, we require a strictly valid address,
1835 for the sake of use in reload.c. */
1838 offsettable_address_p (strictp
, mode
, y
)
1840 enum machine_mode mode
;
1843 register enum rtx_code ycode
= GET_CODE (y
);
1847 int (*addressp
) PARAMS ((enum machine_mode
, rtx
)) =
1848 (strictp
? strict_memory_address_p
: memory_address_p
);
1849 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1851 if (CONSTANT_ADDRESS_P (y
))
1854 /* Adjusting an offsettable address involves changing to a narrower mode.
1855 Make sure that's OK. */
1857 if (mode_dependent_address_p (y
))
1860 /* ??? How much offset does an offsettable BLKmode reference need?
1861 Clearly that depends on the situation in which it's being used.
1862 However, the current situation in which we test 0xffffffff is
1863 less than ideal. Caveat user. */
1865 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1867 /* If the expression contains a constant term,
1868 see if it remains valid when max possible offset is added. */
1870 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1875 *y2
= plus_constant (*y2
, mode_sz
- 1);
1876 /* Use QImode because an odd displacement may be automatically invalid
1877 for any wider mode. But it should be valid for a single byte. */
1878 good
= (*addressp
) (QImode
, y
);
1880 /* In any case, restore old contents of memory. */
1885 if (ycode
== PRE_DEC
|| ycode
== PRE_INC
1886 || ycode
== POST_DEC
|| ycode
== POST_INC
)
1889 /* The offset added here is chosen as the maximum offset that
1890 any instruction could need to add when operating on something
1891 of the specified mode. We assume that if Y and Y+c are
1892 valid addresses then so is Y+d for all 0<d<c. */
1894 z
= plus_constant_for_output (y
, mode_sz
- 1);
1896 /* Use QImode because an odd displacement may be automatically invalid
1897 for any wider mode. But it should be valid for a single byte. */
1898 return (*addressp
) (QImode
, z
);
1901 /* Return 1 if ADDR is an address-expression whose effect depends
1902 on the mode of the memory reference it is used in.
1904 Autoincrement addressing is a typical example of mode-dependence
1905 because the amount of the increment depends on the mode. */
1908 mode_dependent_address_p (addr
)
1909 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1911 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1913 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1914 win
: ATTRIBUTE_UNUSED_LABEL
1918 /* Return 1 if OP is a general operand
1919 other than a memory ref with a mode dependent address. */
1922 mode_independent_operand (op
, mode
)
1923 enum machine_mode mode
;
1928 if (! general_operand (op
, mode
))
1931 if (GET_CODE (op
) != MEM
)
1934 addr
= XEXP (op
, 0);
1935 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
1937 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1938 lose
: ATTRIBUTE_UNUSED_LABEL
1942 /* Given an operand OP that is a valid memory reference which
1943 satisfies offsettable_memref_p, return a new memory reference whose
1944 address has been adjusted by OFFSET. OFFSET should be positive and
1945 less than the size of the object referenced. */
1948 adj_offsettable_operand (op
, offset
)
1952 register enum rtx_code code
= GET_CODE (op
);
1956 register rtx y
= XEXP (op
, 0);
1959 if (CONSTANT_ADDRESS_P (y
))
1961 new = gen_rtx_MEM (GET_MODE (op
),
1962 plus_constant_for_output (y
, offset
));
1963 MEM_COPY_ATTRIBUTES (new, op
);
1967 if (GET_CODE (y
) == PLUS
)
1970 register rtx
*const_loc
;
1974 const_loc
= find_constant_term_loc (&z
);
1977 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
1982 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
1983 MEM_COPY_ATTRIBUTES (new, op
);
1989 /* Analyze INSN and fill in recog_data. */
1998 rtx body
= PATTERN (insn
);
2000 recog_data
.n_operands
= 0;
2001 recog_data
.n_alternatives
= 0;
2002 recog_data
.n_dups
= 0;
2004 switch (GET_CODE (body
))
2016 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2019 /* This insn is an `asm' with operands. */
2021 /* expand_asm_operands makes sure there aren't too many operands. */
2022 if (noperands
> MAX_RECOG_OPERANDS
)
2025 /* Now get the operand values and constraints out of the insn. */
2026 decode_asm_operands (body
, recog_data
.operand
,
2027 recog_data
.operand_loc
,
2028 recog_data
.constraints
,
2029 recog_data
.operand_mode
);
2032 const char *p
= recog_data
.constraints
[0];
2033 recog_data
.n_alternatives
= 1;
2035 recog_data
.n_alternatives
+= (*p
++ == ',');
2043 /* Ordinary insn: recognize it, get the operands via insn_extract
2044 and get the constraints. */
2046 icode
= recog_memoized (insn
);
2048 fatal_insn_not_found (insn
);
2050 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2051 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2052 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2054 insn_extract (insn
);
2056 for (i
= 0; i
< noperands
; i
++)
2058 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2059 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2062 for (i
= 0; i
< noperands
; i
++)
2063 recog_data
.operand_type
[i
]
2064 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2065 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2068 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2072 /* After calling extract_insn, you can use this function to extract some
2073 information from the constraint strings into a more usable form.
2074 The collected data is stored in recog_op_alt. */
2076 preprocess_constraints ()
2080 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2081 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2084 struct operand_alternative
*op_alt
;
2085 const char *p
= recog_data
.constraints
[i
];
2087 op_alt
= recog_op_alt
[i
];
2089 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2091 op_alt
[j
].class = NO_REGS
;
2092 op_alt
[j
].constraint
= p
;
2093 op_alt
[j
].matches
= -1;
2094 op_alt
[j
].matched
= -1;
2096 if (*p
== '\0' || *p
== ',')
2098 op_alt
[j
].anything_ok
= 1;
2108 while (c
!= ',' && c
!= '\0');
2109 if (c
== ',' || c
== '\0')
2114 case '=': case '+': case '*': case '%':
2115 case 'E': case 'F': case 'G': case 'H':
2116 case 's': case 'i': case 'n':
2117 case 'I': case 'J': case 'K': case 'L':
2118 case 'M': case 'N': case 'O': case 'P':
2119 #ifdef EXTRA_CONSTRAINT
2120 case 'Q': case 'R': case 'S': case 'T': case 'U':
2122 /* These don't say anything we care about. */
2126 op_alt
[j
].reject
+= 6;
2129 op_alt
[j
].reject
+= 600;
2132 op_alt
[j
].earlyclobber
= 1;
2135 case '0': case '1': case '2': case '3': case '4':
2136 case '5': case '6': case '7': case '8': case '9':
2137 op_alt
[j
].matches
= c
- '0';
2138 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2142 op_alt
[j
].memory_ok
= 1;
2145 op_alt
[j
].decmem_ok
= 1;
2148 op_alt
[j
].incmem_ok
= 1;
2151 op_alt
[j
].nonoffmem_ok
= 1;
2154 op_alt
[j
].offmem_ok
= 1;
2157 op_alt
[j
].anything_ok
= 1;
2161 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2165 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2169 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2177 /* Check the operands of an insn against the insn's operand constraints
2178 and return 1 if they are valid.
2179 The information about the insn's operands, constraints, operand modes
2180 etc. is obtained from the global variables set up by extract_insn.
2182 WHICH_ALTERNATIVE is set to a number which indicates which
2183 alternative of constraints was matched: 0 for the first alternative,
2184 1 for the next, etc.
2186 In addition, when two operands are match
2187 and it happens that the output operand is (reg) while the
2188 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2189 make the output operand look like the input.
2190 This is because the output operand is the one the template will print.
2192 This is used in final, just before printing the assembler code and by
2193 the routines that determine an insn's attribute.
2195 If STRICT is a positive non-zero value, it means that we have been
2196 called after reload has been completed. In that case, we must
2197 do all checks strictly. If it is zero, it means that we have been called
2198 before reload has completed. In that case, we first try to see if we can
2199 find an alternative that matches strictly. If not, we try again, this
2200 time assuming that reload will fix up the insn. This provides a "best
2201 guess" for the alternative and is used to compute attributes of insns prior
2202 to reload. A negative value of STRICT is used for this internal call. */
2210 constrain_operands (strict
)
2213 const char *constraints
[MAX_RECOG_OPERANDS
];
2214 int matching_operands
[MAX_RECOG_OPERANDS
];
2215 int earlyclobber
[MAX_RECOG_OPERANDS
];
2218 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2219 int funny_match_index
;
2221 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2224 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2226 constraints
[c
] = recog_data
.constraints
[c
];
2227 matching_operands
[c
] = -1;
2230 which_alternative
= 0;
2232 while (which_alternative
< recog_data
.n_alternatives
)
2236 funny_match_index
= 0;
2238 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2240 register rtx op
= recog_data
.operand
[opno
];
2241 enum machine_mode mode
= GET_MODE (op
);
2242 register const char *p
= constraints
[opno
];
2247 earlyclobber
[opno
] = 0;
2249 /* A unary operator may be accepted by the predicate, but it
2250 is irrelevant for matching constraints. */
2251 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2254 if (GET_CODE (op
) == SUBREG
)
2256 if (GET_CODE (SUBREG_REG (op
)) == REG
2257 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2258 offset
= SUBREG_WORD (op
);
2259 op
= SUBREG_REG (op
);
2262 /* An empty constraint or empty alternative
2263 allows anything which matched the pattern. */
2264 if (*p
== 0 || *p
== ',')
2267 while (*p
&& (c
= *p
++) != ',')
2270 case '?': case '!': case '*': case '%':
2275 /* Ignore rest of this alternative as far as
2276 constraint checking is concerned. */
2277 while (*p
&& *p
!= ',')
2282 earlyclobber
[opno
] = 1;
2285 case '0': case '1': case '2': case '3': case '4':
2286 case '5': case '6': case '7': case '8': case '9':
2288 /* This operand must be the same as a previous one.
2289 This kind of constraint is used for instructions such
2290 as add when they take only two operands.
2292 Note that the lower-numbered operand is passed first.
2294 If we are not testing strictly, assume that this constraint
2295 will be satisfied. */
2300 rtx op1
= recog_data
.operand
[c
- '0'];
2301 rtx op2
= recog_data
.operand
[opno
];
2303 /* A unary operator may be accepted by the predicate,
2304 but it is irrelevant for matching constraints. */
2305 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2306 op1
= XEXP (op1
, 0);
2307 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2308 op2
= XEXP (op2
, 0);
2310 val
= operands_match_p (op1
, op2
);
2313 matching_operands
[opno
] = c
- '0';
2314 matching_operands
[c
- '0'] = opno
;
2318 /* If output is *x and input is *--x,
2319 arrange later to change the output to *--x as well,
2320 since the output op is the one that will be printed. */
2321 if (val
== 2 && strict
> 0)
2323 funny_match
[funny_match_index
].this = opno
;
2324 funny_match
[funny_match_index
++].other
= c
- '0';
2329 /* p is used for address_operands. When we are called by
2330 gen_reload, no one will have checked that the address is
2331 strictly valid, i.e., that all pseudos requiring hard regs
2332 have gotten them. */
2334 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2339 /* No need to check general_operand again;
2340 it was done in insn-recog.c. */
2342 /* Anything goes unless it is a REG and really has a hard reg
2343 but the hard reg is not in the class GENERAL_REGS. */
2345 || GENERAL_REGS
== ALL_REGS
2346 || GET_CODE (op
) != REG
2347 || (reload_in_progress
2348 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2349 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2356 && GET_CODE (op
) == REG
2357 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2358 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2359 || (GET_CODE (op
) == REG
2360 && ((GENERAL_REGS
== ALL_REGS
2361 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)
2362 || reg_fits_class_p (op
, GENERAL_REGS
,
2368 /* This is used for a MATCH_SCRATCH in the cases when
2369 we don't actually need anything. So anything goes
2375 if (GET_CODE (op
) == MEM
2376 /* Before reload, accept what reload can turn into mem. */
2377 || (strict
< 0 && CONSTANT_P (op
))
2378 /* During reload, accept a pseudo */
2379 || (reload_in_progress
&& GET_CODE (op
) == REG
2380 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2385 if (GET_CODE (op
) == MEM
2386 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2387 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2392 if (GET_CODE (op
) == MEM
2393 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2394 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2399 #ifndef REAL_ARITHMETIC
2400 /* Match any CONST_DOUBLE, but only if
2401 we can examine the bits of it reliably. */
2402 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2403 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2404 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2407 if (GET_CODE (op
) == CONST_DOUBLE
)
2412 if (GET_CODE (op
) == CONST_DOUBLE
)
2418 if (GET_CODE (op
) == CONST_DOUBLE
2419 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2424 if (GET_CODE (op
) == CONST_INT
2425 || (GET_CODE (op
) == CONST_DOUBLE
2426 && GET_MODE (op
) == VOIDmode
))
2429 if (CONSTANT_P (op
))
2434 if (GET_CODE (op
) == CONST_INT
2435 || (GET_CODE (op
) == CONST_DOUBLE
2436 && GET_MODE (op
) == VOIDmode
))
2448 if (GET_CODE (op
) == CONST_INT
2449 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2453 #ifdef EXTRA_CONSTRAINT
2459 if (EXTRA_CONSTRAINT (op
, c
))
2465 if (GET_CODE (op
) == MEM
2466 && ((strict
> 0 && ! offsettable_memref_p (op
))
2468 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2469 || (reload_in_progress
2470 && !(GET_CODE (op
) == REG
2471 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2476 if ((strict
> 0 && offsettable_memref_p (op
))
2477 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2478 /* Before reload, accept what reload can handle. */
2480 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2481 /* During reload, accept a pseudo */
2482 || (reload_in_progress
&& GET_CODE (op
) == REG
2483 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2490 && GET_CODE (op
) == REG
2491 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2492 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2493 || (GET_CODE (op
) == REG
2494 && reg_fits_class_p (op
, REG_CLASS_FROM_LETTER (c
),
2499 constraints
[opno
] = p
;
2500 /* If this operand did not win somehow,
2501 this alternative loses. */
2505 /* This alternative won; the operands are ok.
2506 Change whichever operands this alternative says to change. */
2511 /* See if any earlyclobber operand conflicts with some other
2515 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2516 /* Ignore earlyclobber operands now in memory,
2517 because we would often report failure when we have
2518 two memory operands, one of which was formerly a REG. */
2519 if (earlyclobber
[eopno
]
2520 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2521 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2522 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2523 || recog_data
.operand_type
[opno
] != OP_OUT
)
2525 /* Ignore things like match_operator operands. */
2526 && *recog_data
.constraints
[opno
] != 0
2527 && ! (matching_operands
[opno
] == eopno
2528 && operands_match_p (recog_data
.operand
[opno
],
2529 recog_data
.operand
[eopno
]))
2530 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2531 recog_data
.operand
[eopno
]))
2536 while (--funny_match_index
>= 0)
2538 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2539 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2546 which_alternative
++;
2549 /* If we are about to reject this, but we are not to test strictly,
2550 try a very loose test. Only return failure if it fails also. */
2552 return constrain_operands (-1);
2557 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2558 is a hard reg in class CLASS when its regno is offset by OFFSET
2559 and changed to mode MODE.
2560 If REG occupies multiple hard regs, all of them must be in CLASS. */
2563 reg_fits_class_p (operand
, class, offset
, mode
)
2565 register enum reg_class
class;
2567 enum machine_mode mode
;
2569 register int regno
= REGNO (operand
);
2570 if (regno
< FIRST_PSEUDO_REGISTER
2571 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2576 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2578 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2587 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2590 split_all_insns (upd_life
)
2597 blocks
= sbitmap_alloc (n_basic_blocks
);
2598 sbitmap_zero (blocks
);
2601 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2603 basic_block bb
= BASIC_BLOCK (i
);
2606 for (insn
= bb
->head
; insn
; insn
= next
)
2610 /* Can't use `next_real_insn' because that might go across
2611 CODE_LABELS and short-out basic blocks. */
2612 next
= NEXT_INSN (insn
);
2613 if (GET_CODE (insn
) != INSN
)
2616 /* Don't split no-op move insns. These should silently
2617 disappear later in final. Splitting such insns would
2618 break the code that handles REG_NO_CONFLICT blocks. */
2620 else if ((set
= single_set (insn
)) != NULL
2621 && rtx_equal_p (SET_SRC (set
), SET_DEST (set
)))
2623 /* Nops get in the way while scheduling, so delete them
2624 now if register allocation has already been done. It
2625 is too risky to try to do this before register
2626 allocation, and there are unlikely to be very many
2627 nops then anyways. */
2628 if (reload_completed
)
2630 PUT_CODE (insn
, NOTE
);
2631 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2632 NOTE_SOURCE_FILE (insn
) = 0;
2637 /* Split insns here to get max fine-grain parallelism. */
2638 rtx first
= PREV_INSN (insn
);
2639 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2643 SET_BIT (blocks
, i
);
2646 /* try_split returns the NOTE that INSN became. */
2647 first
= NEXT_INSN (first
);
2648 PUT_CODE (insn
, NOTE
);
2649 NOTE_SOURCE_FILE (insn
) = 0;
2650 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2652 if (insn
== bb
->end
)
2660 if (insn
== bb
->end
)
2664 /* ??? When we're called from just after reload, the CFG is in bad
2665 shape, and we may have fallen off the end. This could be fixed
2666 by having reload not try to delete unreachable code. Otherwise
2667 assert we found the end insn. */
2668 if (insn
== NULL
&& upd_life
)
2672 if (changed
&& upd_life
)
2674 compute_bb_for_insn (get_max_uid ());
2675 count_or_remove_death_notes (blocks
, 1);
2676 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2679 sbitmap_free (blocks
);
2682 #ifdef HAVE_peephole2
2683 struct peep2_insn_data
2689 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2690 static int peep2_current
;
2692 /* A non-insn marker indicating the last insn of the block.
2693 The live_before regset for this element is correct, indicating
2694 global_live_at_end for the block. */
2695 #define PEEP2_EOB pc_rtx
2697 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2698 does not exist. Used by the recognizer to find the next insn to match
2699 in a multi-insn pattern. */
2705 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2709 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2710 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2712 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2714 return peep2_insn_data
[n
].insn
;
2717 /* Return true if REGNO is dead before the Nth non-note insn
2721 peep2_regno_dead_p (ofs
, regno
)
2725 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2728 ofs
+= peep2_current
;
2729 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2730 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2732 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2735 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2738 /* Similarly for a REG. */
2741 peep2_reg_dead_p (ofs
, reg
)
2747 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2750 ofs
+= peep2_current
;
2751 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2752 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2754 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2757 regno
= REGNO (reg
);
2758 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2760 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2765 /* Try to find a hard register of mode MODE, matching the register class in
2766 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2767 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2768 in which case the only condition is that the register must be available
2769 before CURRENT_INSN.
2770 Registers that already have bits set in REG_SET will not be considered.
2772 If an appropriate register is available, it will be returned and the
2773 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2777 peep2_find_free_register (from
, to
, class_str
, mode
, reg_set
)
2779 const char *class_str
;
2780 enum machine_mode mode
;
2781 HARD_REG_SET
*reg_set
;
2783 static int search_ofs
;
2784 enum reg_class
class;
2788 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
2791 from
+= peep2_current
;
2792 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2793 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2794 to
+= peep2_current
;
2795 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2796 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2798 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2800 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2804 HARD_REG_SET this_live
;
2806 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2808 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2810 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2811 IOR_HARD_REG_SET (live
, this_live
);
2814 class = (class_str
[0] == 'r' ? GENERAL_REGS
2815 : REG_CLASS_FROM_LETTER (class_str
[0]));
2817 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2819 int raw_regno
, regno
, success
, j
;
2821 /* Distribute the free registers as much as possible. */
2822 raw_regno
= search_ofs
+ i
;
2823 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2824 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2825 #ifdef REG_ALLOC_ORDER
2826 regno
= reg_alloc_order
[raw_regno
];
2831 /* Don't allocate fixed registers. */
2832 if (fixed_regs
[regno
])
2834 /* Make sure the register is of the right class. */
2835 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
2837 /* And can support the mode we need. */
2838 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2840 /* And that we don't create an extra save/restore. */
2841 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2843 /* And we don't clobber traceback for noreturn functions. */
2844 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2845 && (! reload_completed
|| frame_pointer_needed
))
2849 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2851 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2852 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2860 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2861 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
2863 /* Start the next search with the next register. */
2864 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2866 search_ofs
= raw_regno
;
2868 return gen_rtx_REG (mode
, regno
);
2876 /* Perform the peephole2 optimization pass. */
2879 peephole2_optimize (dump_file
)
2880 FILE *dump_file ATTRIBUTE_UNUSED
;
2882 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
2886 #ifdef HAVE_conditional_execution
2891 /* Initialize the regsets we're going to use. */
2892 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2893 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
2894 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
2896 #ifdef HAVE_conditional_execution
2897 blocks
= sbitmap_alloc (n_basic_blocks
);
2898 sbitmap_zero (blocks
);
2901 count_or_remove_death_notes (NULL
, 1);
2904 for (b
= n_basic_blocks
- 1; b
>= 0; --b
)
2906 basic_block bb
= BASIC_BLOCK (b
);
2907 struct propagate_block_info
*pbi
;
2909 /* Indicate that all slots except the last holds invalid data. */
2910 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
2911 peep2_insn_data
[i
].insn
= NULL_RTX
;
2913 /* Indicate that the last slot contains live_after data. */
2914 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
2915 peep2_current
= MAX_INSNS_PER_PEEP2
;
2917 /* Start up propagation. */
2918 COPY_REG_SET (live
, bb
->global_live_at_end
);
2919 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
2921 #ifdef HAVE_conditional_execution
2922 pbi
= init_propagate_block_info (bb
, live
, NULL
, 0);
2924 pbi
= init_propagate_block_info (bb
, live
, NULL
, PROP_DEATH_NOTES
);
2927 for (insn
= bb
->end
; ; insn
= prev
)
2929 prev
= PREV_INSN (insn
);
2935 /* Record this insn. */
2936 if (--peep2_current
< 0)
2937 peep2_current
= MAX_INSNS_PER_PEEP2
;
2938 peep2_insn_data
[peep2_current
].insn
= insn
;
2939 propagate_one_insn (pbi
, insn
);
2940 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
2942 /* Match the peephole. */
2943 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
2946 i
= match_len
+ peep2_current
;
2947 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
2948 i
-= MAX_INSNS_PER_PEEP2
+ 1;
2950 /* Replace the old sequence with the new. */
2951 flow_delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
2952 try = emit_insn_after (try, prev
);
2954 /* Adjust the basic block boundaries. */
2955 if (peep2_insn_data
[i
].insn
== bb
->end
)
2957 if (insn
== bb
->head
)
2958 bb
->head
= NEXT_INSN (prev
);
2960 #ifdef HAVE_conditional_execution
2961 /* With conditional execution, we cannot back up the
2962 live information so easily, since the conditional
2963 death data structures are not so self-contained.
2964 So record that we've made a modification to this
2965 block and update life information at the end. */
2966 SET_BIT (blocks
, b
);
2969 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2970 peep2_insn_data
[i
].insn
= NULL_RTX
;
2971 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
2973 /* Back up lifetime information past the end of the
2974 newly created sequence. */
2975 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
2977 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
2979 /* Update life information for the new sequence. */
2985 i
= MAX_INSNS_PER_PEEP2
;
2986 peep2_insn_data
[i
].insn
= try;
2987 propagate_one_insn (pbi
, try);
2988 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
2990 try = PREV_INSN (try);
2992 while (try != prev
);
2994 /* ??? Should verify that LIVE now matches what we
2995 had before the new sequence. */
3002 if (insn
== bb
->head
)
3006 free_propagate_block_info (pbi
);
3009 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3010 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3011 FREE_REG_SET (live
);
3013 #ifdef HAVE_conditional_execution
3014 count_or_remove_death_notes (blocks
, 1);
3015 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3016 sbitmap_free (blocks
);
3019 #endif /* HAVE_peephole2 */