1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
38 #include "basic-block.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
46 #define STACK_PUSH_CODE PRE_INC
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
54 #define STACK_POP_CODE POST_DEC
58 static void validate_replace_rtx_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
59 static rtx
*find_single_use_1
PARAMS ((rtx
, rtx
*));
60 static rtx
*find_constant_term_loc
PARAMS ((rtx
*));
61 static int insn_invalid_p
PARAMS ((rtx
));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
73 struct recog_data recog_data
;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
79 /* On return from `constrain_operands', indicate which alternative
82 int which_alternative
;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
95 init_recog_no_volatile ()
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn
)
119 if (INSN_CODE (insn
) < 0)
120 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
121 return INSN_CODE (insn
);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x
)
133 const char **constraints
;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed
)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x
));
141 constrain_operands (1);
142 return which_alternative
>= 0;
145 noperands
= asm_noperands (x
);
151 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
152 constraints
= (const char **) alloca (noperands
* sizeof (char *));
154 decode_asm_operands (x
, operands
, NULL_PTR
, constraints
, NULL_PTR
);
156 for (i
= 0; i
< noperands
; i
++)
158 const char *c
= constraints
[i
];
161 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
162 c
= constraints
[c
[0] - '0'];
164 if (! asm_operand_ok (operands
[i
], c
))
171 /* Static data for the next two routines. */
173 typedef struct change_t
181 static change_t
*changes
;
182 static int changes_allocated
;
184 static int num_changes
= 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object
, loc
, new, in_group
)
213 if (old
== new || rtx_equal_p (old
, new))
216 if (in_group
== 0 && num_changes
!= 0)
221 /* Save the information describing this change. */
222 if (num_changes
>= changes_allocated
)
224 if (changes_allocated
== 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
229 changes_allocated
*= 2;
232 (change_t
*) xrealloc (changes
,
233 sizeof (change_t
) * changes_allocated
);
236 changes
[num_changes
].object
= object
;
237 changes
[num_changes
].loc
= loc
;
238 changes
[num_changes
].old
= old
;
240 if (object
&& GET_CODE (object
) != MEM
)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes
[num_changes
].old_code
= INSN_CODE (object
);
245 INSN_CODE (object
) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
263 insn_invalid_p (insn
)
266 int icode
= recog_memoized (insn
);
267 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
269 if (is_asm
&& ! check_asm_operands (PATTERN (insn
)))
271 if (! is_asm
&& icode
< 0)
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed
)
279 if (! constrain_operands (1))
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
294 /* The changes have been applied and all INSN_CODEs have been reset to force
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
303 for (i
= 0; i
< num_changes
; i
++)
305 rtx object
= changes
[i
].object
;
310 if (GET_CODE (object
) == MEM
)
312 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
315 else if (insn_invalid_p (object
))
317 rtx pat
= PATTERN (object
);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat
) == PARALLEL
325 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object
)) < 0)
330 if (XVECLEN (pat
, 0) == 2)
331 newpat
= XVECEXP (pat
, 0, 0);
337 = gen_rtx_PARALLEL (VOIDmode
,
338 rtvec_alloc (XVECLEN (pat
, 0) - 1));
339 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
340 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object
, &PATTERN (object
), newpat
, 1);
354 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
363 if (i
== num_changes
)
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
383 /* Retract the changes numbered NUM and up. */
391 /* Back out all the changes. Do this in the opposite order in which
393 for (i
= num_changes
- 1; i
>= num
; i
--)
395 *changes
[i
].loc
= changes
[i
].old
;
396 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
397 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
406 validate_replace_rtx_1 (loc
, from
, to
, object
)
408 rtx from
, to
, object
;
411 register const char *fmt
;
412 register rtx x
= *loc
;
418 /* X matches FROM if it is the same rtx or they are both referring to the
419 same register in the same mode. Avoid calling rtx_equal_p unless the
420 operands look similar. */
423 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
424 && GET_MODE (x
) == GET_MODE (from
)
425 && REGNO (x
) == REGNO (from
))
426 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
427 && rtx_equal_p (x
, from
)))
429 validate_change (object
, loc
, to
, 1);
433 /* For commutative or comparison operations, try replacing each argument
434 separately and seeing if we made any changes. If so, put a constant
436 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
438 int prev_changes
= num_changes
;
440 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
441 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
442 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
444 validate_change (object
, loc
,
445 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
446 : swap_condition (code
),
447 GET_MODE (x
), XEXP (x
, 1),
455 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
456 done the substitution, otherwise we won't. */
461 /* If we have a PLUS whose second operand is now a CONST_INT, use
462 plus_constant to try to simplify it. */
463 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
464 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
469 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
471 validate_change (object
, loc
,
472 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
480 /* In these cases, the operation to be performed depends on the mode
481 of the operand. If we are replacing the operand with a VOIDmode
482 constant, we lose the information. So try to simplify the operation
484 if (GET_MODE (to
) == VOIDmode
485 && (rtx_equal_p (XEXP (x
, 0), from
)
486 || (GET_CODE (XEXP (x
, 0)) == SUBREG
487 && rtx_equal_p (SUBREG_REG (XEXP (x
, 0)), from
))))
491 /* If there is a subreg involved, crop to the portion of the
492 constant that we are interested in. */
493 if (GET_CODE (XEXP (x
, 0)) == SUBREG
)
494 to
= operand_subword (to
, SUBREG_WORD (XEXP (x
, 0)),
497 /* If the above didn't fail, perform the extension from the
498 mode of the operand (and not the mode of FROM). */
500 new = simplify_unary_operation (code
, GET_MODE (x
), to
,
501 GET_MODE (XEXP (x
, 0)));
503 /* If any of the above failed, substitute in something that
504 we know won't be recognized. */
506 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
508 validate_change (object
, loc
, new, 1);
514 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
515 expression. We can't do this later, since the information about inner mode
517 if (CONSTANT_P (to
) && rtx_equal_p (SUBREG_REG (x
), from
))
519 if (GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
520 && GET_MODE_SIZE (GET_MODE (from
)) > UNITS_PER_WORD
521 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
523 rtx temp
= operand_subword (to
, SUBREG_WORD (x
),
527 validate_change (object
, loc
, temp
, 1);
531 if (subreg_lowpart_p (x
))
533 rtx
new = gen_lowpart_if_possible (GET_MODE (x
), to
);
536 validate_change (object
, loc
, new, 1);
541 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
542 since we are saying that the high bits don't matter. */
543 if (GET_MODE (to
) == VOIDmode
544 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (from
)))
546 validate_change (object
, loc
, to
, 1);
551 /* Changing mode twice with SUBREG => just change it once,
552 or not at all if changing back to starting mode. */
553 if (GET_CODE (to
) == SUBREG
554 && rtx_equal_p (SUBREG_REG (x
), from
))
556 if (GET_MODE (x
) == GET_MODE (SUBREG_REG (to
))
557 && SUBREG_WORD (x
) == 0 && SUBREG_WORD (to
) == 0)
559 validate_change (object
, loc
, SUBREG_REG (to
), 1);
563 validate_change (object
, loc
,
564 gen_rtx_SUBREG (GET_MODE (x
), SUBREG_REG (to
),
565 SUBREG_WORD (x
) + SUBREG_WORD (to
)), 1);
569 /* If we have a SUBREG of a register that we are replacing and we are
570 replacing it with a MEM, make a new MEM and try replacing the
571 SUBREG with it. Don't do this if the MEM has a mode-dependent address
572 or if we would be widening it. */
574 if (GET_CODE (from
) == REG
575 && GET_CODE (to
) == MEM
576 && rtx_equal_p (SUBREG_REG (x
), from
)
577 && ! mode_dependent_address_p (XEXP (to
, 0))
578 && ! MEM_VOLATILE_P (to
)
579 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
581 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
582 enum machine_mode mode
= GET_MODE (x
);
585 if (BYTES_BIG_ENDIAN
)
586 offset
+= (MIN (UNITS_PER_WORD
,
587 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
588 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
590 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
591 MEM_COPY_ATTRIBUTES (new, to
);
592 validate_change (object
, loc
, new, 1);
599 /* If we are replacing a register with memory, try to change the memory
600 to be the mode required for memory in extract operations (this isn't
601 likely to be an insertion operation; if it was, nothing bad will
602 happen, we might just fail in some cases). */
604 if (GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
605 && rtx_equal_p (XEXP (x
, 0), from
)
606 && GET_CODE (XEXP (x
, 1)) == CONST_INT
607 && GET_CODE (XEXP (x
, 2)) == CONST_INT
608 && ! mode_dependent_address_p (XEXP (to
, 0))
609 && ! MEM_VOLATILE_P (to
))
611 enum machine_mode wanted_mode
= VOIDmode
;
612 enum machine_mode is_mode
= GET_MODE (to
);
613 int pos
= INTVAL (XEXP (x
, 2));
616 if (code
== ZERO_EXTRACT
)
618 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
619 if (wanted_mode
== VOIDmode
)
620 wanted_mode
= word_mode
;
624 if (code
== SIGN_EXTRACT
)
626 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
627 if (wanted_mode
== VOIDmode
)
628 wanted_mode
= word_mode
;
632 /* If we have a narrower mode, we can do something. */
633 if (wanted_mode
!= VOIDmode
634 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
636 int offset
= pos
/ BITS_PER_UNIT
;
639 /* If the bytes and bits are counted differently, we
640 must adjust the offset. */
641 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
642 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
645 pos
%= GET_MODE_BITSIZE (wanted_mode
);
647 newmem
= gen_rtx_MEM (wanted_mode
,
648 plus_constant (XEXP (to
, 0), offset
));
649 MEM_COPY_ATTRIBUTES (newmem
, to
);
651 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
652 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
662 /* For commutative or comparison operations we've already performed
663 replacements. Don't try to perform them again. */
664 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
666 fmt
= GET_RTX_FORMAT (code
);
667 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
670 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
671 else if (fmt
[i
] == 'E')
672 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
673 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
678 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
679 with TO. After all changes have been made, validate by seeing
680 if INSN is still valid. */
683 validate_replace_rtx_subexp (from
, to
, insn
, loc
)
684 rtx from
, to
, insn
, *loc
;
686 validate_replace_rtx_1 (loc
, from
, to
, insn
);
687 return apply_change_group ();
690 /* Try replacing every occurrence of FROM in INSN with TO. After all
691 changes have been made, validate by seeing if INSN is still valid. */
694 validate_replace_rtx (from
, to
, insn
)
697 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
698 return apply_change_group ();
701 /* Try replacing every occurrence of FROM in INSN with TO. After all
702 changes have been made, validate by seeing if INSN is still valid. */
705 validate_replace_rtx_group (from
, to
, insn
)
708 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
711 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
712 SET_DESTs. After all changes have been made, validate by seeing if
713 INSN is still valid. */
716 validate_replace_src (from
, to
, insn
)
719 if ((GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != JUMP_INSN
)
720 || GET_CODE (PATTERN (insn
)) != SET
)
723 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn
)), from
, to
, insn
);
724 if (GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
725 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn
)), 0),
727 return apply_change_group ();
731 /* Return 1 if the insn using CC0 set by INSN does not contain
732 any ordered tests applied to the condition codes.
733 EQ and NE tests do not count. */
736 next_insn_tests_no_inequality (insn
)
739 register rtx next
= next_cc0_user (insn
);
741 /* If there is no next insn, we have to take the conservative choice. */
745 return ((GET_CODE (next
) == JUMP_INSN
746 || GET_CODE (next
) == INSN
747 || GET_CODE (next
) == CALL_INSN
)
748 && ! inequality_comparisons_p (PATTERN (next
)));
751 #if 0 /* This is useless since the insn that sets the cc's
752 must be followed immediately by the use of them. */
753 /* Return 1 if the CC value set up by INSN is not used. */
756 next_insns_test_no_inequality (insn
)
759 register rtx next
= NEXT_INSN (insn
);
761 for (; next
!= 0; next
= NEXT_INSN (next
))
763 if (GET_CODE (next
) == CODE_LABEL
764 || GET_CODE (next
) == BARRIER
)
766 if (GET_CODE (next
) == NOTE
)
768 if (inequality_comparisons_p (PATTERN (next
)))
770 if (sets_cc0_p (PATTERN (next
)) == 1)
772 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
780 /* This is used by find_single_use to locate an rtx that contains exactly one
781 use of DEST, which is typically either a REG or CC0. It returns a
782 pointer to the innermost rtx expression containing DEST. Appearances of
783 DEST that are being used to totally replace it are not counted. */
786 find_single_use_1 (dest
, loc
)
791 enum rtx_code code
= GET_CODE (x
);
808 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
809 of a REG that occupies all of the REG, the insn uses DEST if
810 it is mentioned in the destination or the source. Otherwise, we
811 need just check the source. */
812 if (GET_CODE (SET_DEST (x
)) != CC0
813 && GET_CODE (SET_DEST (x
)) != PC
814 && GET_CODE (SET_DEST (x
)) != REG
815 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
816 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
817 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
818 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
819 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
820 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
823 return find_single_use_1 (dest
, &SET_SRC (x
));
827 return find_single_use_1 (dest
, &XEXP (x
, 0));
833 /* If it wasn't one of the common cases above, check each expression and
834 vector of this code. Look for a unique usage of DEST. */
836 fmt
= GET_RTX_FORMAT (code
);
837 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
841 if (dest
== XEXP (x
, i
)
842 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
843 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
846 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
849 result
= this_result
;
850 else if (this_result
)
851 /* Duplicate usage. */
854 else if (fmt
[i
] == 'E')
858 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
860 if (XVECEXP (x
, i
, j
) == dest
861 || (GET_CODE (dest
) == REG
862 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
863 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
866 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
869 result
= this_result
;
870 else if (this_result
)
879 /* See if DEST, produced in INSN, is used only a single time in the
880 sequel. If so, return a pointer to the innermost rtx expression in which
883 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
885 This routine will return usually zero either before flow is called (because
886 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
887 note can't be trusted).
889 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
890 care about REG_DEAD notes or LOG_LINKS.
892 Otherwise, we find the single use by finding an insn that has a
893 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
894 only referenced once in that insn, we know that it must be the first
895 and last insn referencing DEST. */
898 find_single_use (dest
, insn
, ploc
)
910 next
= NEXT_INSN (insn
);
912 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
915 result
= find_single_use_1 (dest
, &PATTERN (next
));
922 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
925 for (next
= next_nonnote_insn (insn
);
926 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
927 next
= next_nonnote_insn (next
))
928 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
930 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
931 if (XEXP (link
, 0) == insn
)
936 result
= find_single_use_1 (dest
, &PATTERN (next
));
946 /* Return 1 if OP is a valid general operand for machine mode MODE.
947 This is either a register reference, a memory reference,
948 or a constant. In the case of a memory reference, the address
949 is checked for general validity for the target machine.
951 Register and memory references must have mode MODE in order to be valid,
952 but some constants have no machine mode and are valid for any mode.
954 If MODE is VOIDmode, OP is checked for validity for whatever mode
957 The main use of this function is as a predicate in match_operand
958 expressions in the machine description.
960 For an explanation of this function's behavior for registers of
961 class NO_REGS, see the comment for `register_operand'. */
964 general_operand (op
, mode
)
966 enum machine_mode mode
;
968 register enum rtx_code code
= GET_CODE (op
);
969 int mode_altering_drug
= 0;
971 if (mode
== VOIDmode
)
972 mode
= GET_MODE (op
);
974 /* Don't accept CONST_INT or anything similar
975 if the caller wants something floating. */
976 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
977 && GET_MODE_CLASS (mode
) != MODE_INT
978 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
982 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
984 #ifdef LEGITIMATE_PIC_OPERAND_P
985 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
987 && LEGITIMATE_CONSTANT_P (op
));
989 /* Except for certain constants with VOIDmode, already checked for,
990 OP's mode must match MODE if MODE specifies a mode. */
992 if (GET_MODE (op
) != mode
)
997 #ifdef INSN_SCHEDULING
998 /* On machines that have insn scheduling, we want all memory
999 reference to be explicit, so outlaw paradoxical SUBREGs. */
1000 if (GET_CODE (SUBREG_REG (op
)) == MEM
1001 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1005 op
= SUBREG_REG (op
);
1006 code
= GET_CODE (op
);
1008 /* No longer needed, since (SUBREG (MEM...))
1009 will load the MEM into a reload reg in the MEM's own mode. */
1010 mode_altering_drug
= 1;
1015 /* A register whose class is NO_REGS is not a general operand. */
1016 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1017 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1021 register rtx y
= XEXP (op
, 0);
1023 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1026 if (GET_CODE (y
) == ADDRESSOF
)
1029 /* Use the mem's mode, since it will be reloaded thus. */
1030 mode
= GET_MODE (op
);
1031 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
1034 /* Pretend this is an operand for now; we'll run force_operand
1035 on its replacement in fixup_var_refs_1. */
1036 if (code
== ADDRESSOF
)
1042 if (mode_altering_drug
)
1043 return ! mode_dependent_address_p (XEXP (op
, 0));
1047 /* Return 1 if OP is a valid memory address for a memory reference
1050 The main use of this function is as a predicate in match_operand
1051 expressions in the machine description. */
1054 address_operand (op
, mode
)
1056 enum machine_mode mode
;
1058 return memory_address_p (mode
, op
);
1061 /* Return 1 if OP is a register reference of mode MODE.
1062 If MODE is VOIDmode, accept a register in any mode.
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description.
1067 As a special exception, registers whose class is NO_REGS are
1068 not accepted by `register_operand'. The reason for this change
1069 is to allow the representation of special architecture artifacts
1070 (such as a condition code register) without extending the rtl
1071 definitions. Since registers of class NO_REGS cannot be used
1072 as registers in any case where register classes are examined,
1073 it is most consistent to keep this function from accepting them. */
1076 register_operand (op
, mode
)
1078 enum machine_mode mode
;
1080 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1083 if (GET_CODE (op
) == SUBREG
)
1085 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1086 because it is guaranteed to be reloaded into one.
1087 Just make sure the MEM is valid in itself.
1088 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1089 but currently it does result from (SUBREG (REG)...) where the
1090 reg went on the stack.) */
1091 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1092 return general_operand (op
, mode
);
1094 #ifdef CLASS_CANNOT_CHANGE_MODE
1095 if (GET_CODE (SUBREG_REG (op
)) == REG
1096 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1097 && (TEST_HARD_REG_BIT
1098 (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_MODE
],
1099 REGNO (SUBREG_REG (op
))))
1100 && CLASS_CANNOT_CHANGE_MODE_P (mode
, GET_MODE (SUBREG_REG (op
)))
1101 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1102 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1106 op
= SUBREG_REG (op
);
1109 /* If we have an ADDRESSOF, consider it valid since it will be
1110 converted into something that will not be a MEM. */
1111 if (GET_CODE (op
) == ADDRESSOF
)
1114 /* We don't consider registers whose class is NO_REGS
1115 to be a register operand. */
1116 return (GET_CODE (op
) == REG
1117 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1118 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1121 /* Return 1 for a register in Pmode; ignore the tested mode. */
1124 pmode_register_operand (op
, mode
)
1126 enum machine_mode mode ATTRIBUTE_UNUSED
;
1128 return register_operand (op
, Pmode
);
1131 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1132 or a hard register. */
1135 scratch_operand (op
, mode
)
1137 enum machine_mode mode
;
1139 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1142 return (GET_CODE (op
) == SCRATCH
1143 || (GET_CODE (op
) == REG
1144 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1147 /* Return 1 if OP is a valid immediate operand for mode MODE.
1149 The main use of this function is as a predicate in match_operand
1150 expressions in the machine description. */
1153 immediate_operand (op
, mode
)
1155 enum machine_mode mode
;
1157 /* Don't accept CONST_INT or anything similar
1158 if the caller wants something floating. */
1159 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1160 && GET_MODE_CLASS (mode
) != MODE_INT
1161 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1164 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1165 result in 0/1. It seems a safe assumption that this is
1166 in range for everyone. */
1167 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1170 return (CONSTANT_P (op
)
1171 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1172 || GET_MODE (op
) == VOIDmode
)
1173 #ifdef LEGITIMATE_PIC_OPERAND_P
1174 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1176 && LEGITIMATE_CONSTANT_P (op
));
1179 /* Returns 1 if OP is an operand that is a CONST_INT. */
1182 const_int_operand (op
, mode
)
1184 enum machine_mode mode ATTRIBUTE_UNUSED
;
1186 return GET_CODE (op
) == CONST_INT
;
1189 /* Returns 1 if OP is an operand that is a constant integer or constant
1190 floating-point number. */
1193 const_double_operand (op
, mode
)
1195 enum machine_mode mode
;
1197 /* Don't accept CONST_INT or anything similar
1198 if the caller wants something floating. */
1199 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1200 && GET_MODE_CLASS (mode
) != MODE_INT
1201 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1204 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1205 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1206 || GET_MODE (op
) == VOIDmode
));
1209 /* Return 1 if OP is a general operand that is not an immediate operand. */
1212 nonimmediate_operand (op
, mode
)
1214 enum machine_mode mode
;
1216 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1219 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1222 nonmemory_operand (op
, mode
)
1224 enum machine_mode mode
;
1226 if (CONSTANT_P (op
))
1228 /* Don't accept CONST_INT or anything similar
1229 if the caller wants something floating. */
1230 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1231 && GET_MODE_CLASS (mode
) != MODE_INT
1232 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1235 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1236 || mode
== VOIDmode
)
1237 #ifdef LEGITIMATE_PIC_OPERAND_P
1238 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1240 && LEGITIMATE_CONSTANT_P (op
));
1243 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1246 if (GET_CODE (op
) == SUBREG
)
1248 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1249 because it is guaranteed to be reloaded into one.
1250 Just make sure the MEM is valid in itself.
1251 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1252 but currently it does result from (SUBREG (REG)...) where the
1253 reg went on the stack.) */
1254 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1255 return general_operand (op
, mode
);
1256 op
= SUBREG_REG (op
);
1259 /* We don't consider registers whose class is NO_REGS
1260 to be a register operand. */
1261 return (GET_CODE (op
) == REG
1262 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1263 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1266 /* Return 1 if OP is a valid operand that stands for pushing a
1267 value of mode MODE onto the stack.
1269 The main use of this function is as a predicate in match_operand
1270 expressions in the machine description. */
1273 push_operand (op
, mode
)
1275 enum machine_mode mode
;
1277 if (GET_CODE (op
) != MEM
)
1280 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1285 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1288 return XEXP (op
, 0) == stack_pointer_rtx
;
1291 /* Return 1 if OP is a valid operand that stands for popping a
1292 value of mode MODE off the stack.
1294 The main use of this function is as a predicate in match_operand
1295 expressions in the machine description. */
1298 pop_operand (op
, mode
)
1300 enum machine_mode mode
;
1302 if (GET_CODE (op
) != MEM
)
1305 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1310 if (GET_CODE (op
) != STACK_POP_CODE
)
1313 return XEXP (op
, 0) == stack_pointer_rtx
;
1316 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1319 memory_address_p (mode
, addr
)
1320 enum machine_mode mode ATTRIBUTE_UNUSED
;
1323 if (GET_CODE (addr
) == ADDRESSOF
)
1326 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1333 /* Return 1 if OP is a valid memory reference with mode MODE,
1334 including a valid address.
1336 The main use of this function is as a predicate in match_operand
1337 expressions in the machine description. */
1340 memory_operand (op
, mode
)
1342 enum machine_mode mode
;
1346 if (! reload_completed
)
1347 /* Note that no SUBREG is a memory operand before end of reload pass,
1348 because (SUBREG (MEM...)) forces reloading into a register. */
1349 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1351 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1355 if (GET_CODE (inner
) == SUBREG
)
1356 inner
= SUBREG_REG (inner
);
1358 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1361 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1362 that is, a memory reference whose address is a general_operand. */
1365 indirect_operand (op
, mode
)
1367 enum machine_mode mode
;
1369 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1370 if (! reload_completed
1371 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1373 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1374 rtx inner
= SUBREG_REG (op
);
1376 if (BYTES_BIG_ENDIAN
)
1377 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1378 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1380 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1383 /* The only way that we can have a general_operand as the resulting
1384 address is if OFFSET is zero and the address already is an operand
1385 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1388 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1389 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1390 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1391 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1392 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1395 return (GET_CODE (op
) == MEM
1396 && memory_operand (op
, mode
)
1397 && general_operand (XEXP (op
, 0), Pmode
));
1400 /* Return 1 if this is a comparison operator. This allows the use of
1401 MATCH_OPERATOR to recognize all the branch insns. */
1404 comparison_operator (op
, mode
)
1406 enum machine_mode mode
;
1408 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1409 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1412 /* If BODY is an insn body that uses ASM_OPERANDS,
1413 return the number of operands (both input and output) in the insn.
1414 Otherwise return -1. */
1417 asm_noperands (body
)
1420 switch (GET_CODE (body
))
1423 /* No output operands: return number of input operands. */
1424 return ASM_OPERANDS_INPUT_LENGTH (body
);
1426 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1427 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1428 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1432 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1433 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1435 /* Multiple output operands, or 1 output plus some clobbers:
1436 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1440 /* Count backwards through CLOBBERs to determine number of SETs. */
1441 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1443 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1445 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1449 /* N_SETS is now number of output operands. */
1452 /* Verify that all the SETs we have
1453 came from a single original asm_operands insn
1454 (so that invalid combinations are blocked). */
1455 for (i
= 0; i
< n_sets
; i
++)
1457 rtx elt
= XVECEXP (body
, 0, i
);
1458 if (GET_CODE (elt
) != SET
)
1460 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1462 /* If these ASM_OPERANDS rtx's came from different original insns
1463 then they aren't allowed together. */
1464 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1465 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1468 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1471 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1473 /* 0 outputs, but some clobbers:
1474 body is [(asm_operands ...) (clobber (reg ...))...]. */
1477 /* Make sure all the other parallel things really are clobbers. */
1478 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1479 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1482 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1491 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1492 copy its operands (both input and output) into the vector OPERANDS,
1493 the locations of the operands within the insn into the vector OPERAND_LOCS,
1494 and the constraints for the operands into CONSTRAINTS.
1495 Write the modes of the operands into MODES.
1496 Return the assembler-template.
1498 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1499 we don't store that info. */
1502 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1506 const char **constraints
;
1507 enum machine_mode
*modes
;
1511 const char *template = 0;
1513 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1515 rtx asmop
= SET_SRC (body
);
1516 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1518 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1520 for (i
= 1; i
< noperands
; i
++)
1523 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1525 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1527 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1529 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1532 /* The output is in the SET.
1533 Its constraint is in the ASM_OPERANDS itself. */
1535 operands
[0] = SET_DEST (body
);
1537 operand_locs
[0] = &SET_DEST (body
);
1539 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1541 modes
[0] = GET_MODE (SET_DEST (body
));
1542 template = ASM_OPERANDS_TEMPLATE (asmop
);
1544 else if (GET_CODE (body
) == ASM_OPERANDS
)
1547 /* No output operands: BODY is (asm_operands ....). */
1549 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1551 /* The input operands are found in the 1st element vector. */
1552 /* Constraints for inputs are in the 2nd element vector. */
1553 for (i
= 0; i
< noperands
; i
++)
1556 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1558 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1560 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1562 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1564 template = ASM_OPERANDS_TEMPLATE (asmop
);
1566 else if (GET_CODE (body
) == PARALLEL
1567 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1569 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1570 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1571 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1572 int nout
= 0; /* Does not include CLOBBERs. */
1574 /* At least one output, plus some CLOBBERs. */
1576 /* The outputs are in the SETs.
1577 Their constraints are in the ASM_OPERANDS itself. */
1578 for (i
= 0; i
< nparallel
; i
++)
1580 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1581 break; /* Past last SET */
1584 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1586 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1588 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1590 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1594 for (i
= 0; i
< nin
; i
++)
1597 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1599 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1601 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1603 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1606 template = ASM_OPERANDS_TEMPLATE (asmop
);
1608 else if (GET_CODE (body
) == PARALLEL
1609 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1611 /* No outputs, but some CLOBBERs. */
1613 rtx asmop
= XVECEXP (body
, 0, 0);
1614 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1616 for (i
= 0; i
< nin
; i
++)
1619 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1621 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1623 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1625 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1628 template = ASM_OPERANDS_TEMPLATE (asmop
);
1634 /* Check if an asm_operand matches it's constraints.
1635 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1638 asm_operand_ok (op
, constraint
)
1640 const char *constraint
;
1644 /* Use constrain_operands after reload. */
1645 if (reload_completed
)
1650 char c
= *constraint
++;
1664 case '0': case '1': case '2': case '3': case '4':
1665 case '5': case '6': case '7': case '8': case '9':
1666 /* For best results, our caller should have given us the
1667 proper matching constraint, but we can't actually fail
1668 the check if they didn't. Indicate that results are
1674 if (address_operand (op
, VOIDmode
))
1679 case 'V': /* non-offsettable */
1680 if (memory_operand (op
, VOIDmode
))
1684 case 'o': /* offsettable */
1685 if (offsettable_nonstrict_memref_p (op
))
1690 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1691 excepting those that expand_call created. Further, on some
1692 machines which do not have generalized auto inc/dec, an inc/dec
1693 is not a memory_operand.
1695 Match any memory and hope things are resolved after reload. */
1697 if (GET_CODE (op
) == MEM
1699 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1700 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1705 if (GET_CODE (op
) == MEM
1707 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1708 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1713 #ifndef REAL_ARITHMETIC
1714 /* Match any floating double constant, but only if
1715 we can examine the bits of it reliably. */
1716 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1717 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1718 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1724 if (GET_CODE (op
) == CONST_DOUBLE
)
1729 if (GET_CODE (op
) == CONST_DOUBLE
1730 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1734 if (GET_CODE (op
) == CONST_DOUBLE
1735 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1740 if (GET_CODE (op
) == CONST_INT
1741 || (GET_CODE (op
) == CONST_DOUBLE
1742 && GET_MODE (op
) == VOIDmode
))
1748 #ifdef LEGITIMATE_PIC_OPERAND_P
1749 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1756 if (GET_CODE (op
) == CONST_INT
1757 || (GET_CODE (op
) == CONST_DOUBLE
1758 && GET_MODE (op
) == VOIDmode
))
1763 if (GET_CODE (op
) == CONST_INT
1764 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1768 if (GET_CODE (op
) == CONST_INT
1769 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1773 if (GET_CODE (op
) == CONST_INT
1774 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1778 if (GET_CODE (op
) == CONST_INT
1779 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1783 if (GET_CODE (op
) == CONST_INT
1784 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1788 if (GET_CODE (op
) == CONST_INT
1789 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1793 if (GET_CODE (op
) == CONST_INT
1794 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1798 if (GET_CODE (op
) == CONST_INT
1799 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1807 if (general_operand (op
, VOIDmode
))
1812 /* For all other letters, we first check for a register class,
1813 otherwise it is an EXTRA_CONSTRAINT. */
1814 if (REG_CLASS_FROM_LETTER (c
) != NO_REGS
)
1817 if (GET_MODE (op
) == BLKmode
)
1819 if (register_operand (op
, VOIDmode
))
1822 #ifdef EXTRA_CONSTRAINT
1823 if (EXTRA_CONSTRAINT (op
, c
))
1833 /* Given an rtx *P, if it is a sum containing an integer constant term,
1834 return the location (type rtx *) of the pointer to that constant term.
1835 Otherwise, return a null pointer. */
1838 find_constant_term_loc (p
)
1842 register enum rtx_code code
= GET_CODE (*p
);
1844 /* If *P IS such a constant term, P is its location. */
1846 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1850 /* Otherwise, if not a sum, it has no constant term. */
1852 if (GET_CODE (*p
) != PLUS
)
1855 /* If one of the summands is constant, return its location. */
1857 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1858 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1861 /* Otherwise, check each summand for containing a constant term. */
1863 if (XEXP (*p
, 0) != 0)
1865 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1870 if (XEXP (*p
, 1) != 0)
1872 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1880 /* Return 1 if OP is a memory reference
1881 whose address contains no side effects
1882 and remains valid after the addition
1883 of a positive integer less than the
1884 size of the object being referenced.
1886 We assume that the original address is valid and do not check it.
1888 This uses strict_memory_address_p as a subroutine, so
1889 don't use it before reload. */
1892 offsettable_memref_p (op
)
1895 return ((GET_CODE (op
) == MEM
)
1896 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1899 /* Similar, but don't require a strictly valid mem ref:
1900 consider pseudo-regs valid as index or base regs. */
1903 offsettable_nonstrict_memref_p (op
)
1906 return ((GET_CODE (op
) == MEM
)
1907 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1910 /* Return 1 if Y is a memory address which contains no side effects
1911 and would remain valid after the addition of a positive integer
1912 less than the size of that mode.
1914 We assume that the original address is valid and do not check it.
1915 We do check that it is valid for narrower modes.
1917 If STRICTP is nonzero, we require a strictly valid address,
1918 for the sake of use in reload.c. */
1921 offsettable_address_p (strictp
, mode
, y
)
1923 enum machine_mode mode
;
1926 register enum rtx_code ycode
= GET_CODE (y
);
1930 int (*addressp
) PARAMS ((enum machine_mode
, rtx
)) =
1931 (strictp
? strict_memory_address_p
: memory_address_p
);
1932 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1934 if (CONSTANT_ADDRESS_P (y
))
1937 /* Adjusting an offsettable address involves changing to a narrower mode.
1938 Make sure that's OK. */
1940 if (mode_dependent_address_p (y
))
1943 /* ??? How much offset does an offsettable BLKmode reference need?
1944 Clearly that depends on the situation in which it's being used.
1945 However, the current situation in which we test 0xffffffff is
1946 less than ideal. Caveat user. */
1948 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1950 /* If the expression contains a constant term,
1951 see if it remains valid when max possible offset is added. */
1953 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1958 *y2
= plus_constant (*y2
, mode_sz
- 1);
1959 /* Use QImode because an odd displacement may be automatically invalid
1960 for any wider mode. But it should be valid for a single byte. */
1961 good
= (*addressp
) (QImode
, y
);
1963 /* In any case, restore old contents of memory. */
1968 if (GET_RTX_CLASS (ycode
) == 'a')
1971 /* The offset added here is chosen as the maximum offset that
1972 any instruction could need to add when operating on something
1973 of the specified mode. We assume that if Y and Y+c are
1974 valid addresses then so is Y+d for all 0<d<c. */
1976 z
= plus_constant_for_output (y
, mode_sz
- 1);
1978 /* Use QImode because an odd displacement may be automatically invalid
1979 for any wider mode. But it should be valid for a single byte. */
1980 return (*addressp
) (QImode
, z
);
1983 /* Return 1 if ADDR is an address-expression whose effect depends
1984 on the mode of the memory reference it is used in.
1986 Autoincrement addressing is a typical example of mode-dependence
1987 because the amount of the increment depends on the mode. */
1990 mode_dependent_address_p (addr
)
1991 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1993 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1995 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1996 win
: ATTRIBUTE_UNUSED_LABEL
2000 /* Return 1 if OP is a general operand
2001 other than a memory ref with a mode dependent address. */
2004 mode_independent_operand (op
, mode
)
2005 enum machine_mode mode
;
2010 if (! general_operand (op
, mode
))
2013 if (GET_CODE (op
) != MEM
)
2016 addr
= XEXP (op
, 0);
2017 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
2019 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2020 lose
: ATTRIBUTE_UNUSED_LABEL
2024 /* Given an operand OP that is a valid memory reference which
2025 satisfies offsettable_memref_p, return a new memory reference whose
2026 address has been adjusted by OFFSET. OFFSET should be positive and
2027 less than the size of the object referenced. */
2030 adj_offsettable_operand (op
, offset
)
2034 register enum rtx_code code
= GET_CODE (op
);
2038 register rtx y
= XEXP (op
, 0);
2041 if (CONSTANT_ADDRESS_P (y
))
2043 new = gen_rtx_MEM (GET_MODE (op
),
2044 plus_constant_for_output (y
, offset
));
2045 MEM_COPY_ATTRIBUTES (new, op
);
2049 if (GET_CODE (y
) == PLUS
)
2052 register rtx
*const_loc
;
2056 const_loc
= find_constant_term_loc (&z
);
2059 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
2064 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
2065 MEM_COPY_ATTRIBUTES (new, op
);
2071 /* Like extract_insn, but save insn extracted and don't extract again, when
2072 called again for the same insn expecting that recog_data still contain the
2073 valid information. This is used primary by gen_attr infrastructure that
2074 often does extract insn again and again. */
2076 extract_insn_cached (insn
)
2079 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2081 extract_insn (insn
);
2082 recog_data
.insn
= insn
;
2084 /* Do cached extract_insn, constrain_operand and complain about failures.
2085 Used by insn_attrtab. */
2087 extract_constrain_insn_cached (insn
)
2090 extract_insn_cached (insn
);
2091 if (which_alternative
== -1
2092 && !constrain_operands (reload_completed
))
2093 fatal_insn_not_found (insn
);
2095 /* Do cached constrain_operand and complain about failures. */
2097 constrain_operands_cached (strict
)
2100 if (which_alternative
== -1)
2101 return constrain_operands (strict
);
2106 /* Analyze INSN and fill in recog_data. */
2115 rtx body
= PATTERN (insn
);
2117 recog_data
.insn
= NULL
;
2118 recog_data
.n_operands
= 0;
2119 recog_data
.n_alternatives
= 0;
2120 recog_data
.n_dups
= 0;
2121 which_alternative
= -1;
2123 switch (GET_CODE (body
))
2133 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2138 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2139 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2140 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2146 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2149 /* This insn is an `asm' with operands. */
2151 /* expand_asm_operands makes sure there aren't too many operands. */
2152 if (noperands
> MAX_RECOG_OPERANDS
)
2155 /* Now get the operand values and constraints out of the insn. */
2156 decode_asm_operands (body
, recog_data
.operand
,
2157 recog_data
.operand_loc
,
2158 recog_data
.constraints
,
2159 recog_data
.operand_mode
);
2162 const char *p
= recog_data
.constraints
[0];
2163 recog_data
.n_alternatives
= 1;
2165 recog_data
.n_alternatives
+= (*p
++ == ',');
2169 fatal_insn_not_found (insn
);
2173 /* Ordinary insn: recognize it, get the operands via insn_extract
2174 and get the constraints. */
2176 icode
= recog_memoized (insn
);
2178 fatal_insn_not_found (insn
);
2180 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2181 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2182 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2184 insn_extract (insn
);
2186 for (i
= 0; i
< noperands
; i
++)
2188 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2189 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2190 /* VOIDmode match_operands gets mode from their real operand. */
2191 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2192 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2195 for (i
= 0; i
< noperands
; i
++)
2196 recog_data
.operand_type
[i
]
2197 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2198 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2201 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2205 /* After calling extract_insn, you can use this function to extract some
2206 information from the constraint strings into a more usable form.
2207 The collected data is stored in recog_op_alt. */
2209 preprocess_constraints ()
2213 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2214 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2217 struct operand_alternative
*op_alt
;
2218 const char *p
= recog_data
.constraints
[i
];
2220 op_alt
= recog_op_alt
[i
];
2222 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2224 op_alt
[j
].class = NO_REGS
;
2225 op_alt
[j
].constraint
= p
;
2226 op_alt
[j
].matches
= -1;
2227 op_alt
[j
].matched
= -1;
2229 if (*p
== '\0' || *p
== ',')
2231 op_alt
[j
].anything_ok
= 1;
2241 while (c
!= ',' && c
!= '\0');
2242 if (c
== ',' || c
== '\0')
2247 case '=': case '+': case '*': case '%':
2248 case 'E': case 'F': case 'G': case 'H':
2249 case 's': case 'i': case 'n':
2250 case 'I': case 'J': case 'K': case 'L':
2251 case 'M': case 'N': case 'O': case 'P':
2252 /* These don't say anything we care about. */
2256 op_alt
[j
].reject
+= 6;
2259 op_alt
[j
].reject
+= 600;
2262 op_alt
[j
].earlyclobber
= 1;
2265 case '0': case '1': case '2': case '3': case '4':
2266 case '5': case '6': case '7': case '8': case '9':
2267 op_alt
[j
].matches
= c
- '0';
2268 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2272 op_alt
[j
].memory_ok
= 1;
2275 op_alt
[j
].decmem_ok
= 1;
2278 op_alt
[j
].incmem_ok
= 1;
2281 op_alt
[j
].nonoffmem_ok
= 1;
2284 op_alt
[j
].offmem_ok
= 1;
2287 op_alt
[j
].anything_ok
= 1;
2291 op_alt
[j
].is_address
= 1;
2292 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2296 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2300 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2308 /* Check the operands of an insn against the insn's operand constraints
2309 and return 1 if they are valid.
2310 The information about the insn's operands, constraints, operand modes
2311 etc. is obtained from the global variables set up by extract_insn.
2313 WHICH_ALTERNATIVE is set to a number which indicates which
2314 alternative of constraints was matched: 0 for the first alternative,
2315 1 for the next, etc.
2317 In addition, when two operands are match
2318 and it happens that the output operand is (reg) while the
2319 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2320 make the output operand look like the input.
2321 This is because the output operand is the one the template will print.
2323 This is used in final, just before printing the assembler code and by
2324 the routines that determine an insn's attribute.
2326 If STRICT is a positive non-zero value, it means that we have been
2327 called after reload has been completed. In that case, we must
2328 do all checks strictly. If it is zero, it means that we have been called
2329 before reload has completed. In that case, we first try to see if we can
2330 find an alternative that matches strictly. If not, we try again, this
2331 time assuming that reload will fix up the insn. This provides a "best
2332 guess" for the alternative and is used to compute attributes of insns prior
2333 to reload. A negative value of STRICT is used for this internal call. */
2341 constrain_operands (strict
)
2344 const char *constraints
[MAX_RECOG_OPERANDS
];
2345 int matching_operands
[MAX_RECOG_OPERANDS
];
2346 int earlyclobber
[MAX_RECOG_OPERANDS
];
2349 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2350 int funny_match_index
;
2352 which_alternative
= 0;
2353 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2356 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2358 constraints
[c
] = recog_data
.constraints
[c
];
2359 matching_operands
[c
] = -1;
2366 funny_match_index
= 0;
2368 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2370 register rtx op
= recog_data
.operand
[opno
];
2371 enum machine_mode mode
= GET_MODE (op
);
2372 register const char *p
= constraints
[opno
];
2377 earlyclobber
[opno
] = 0;
2379 /* A unary operator may be accepted by the predicate, but it
2380 is irrelevant for matching constraints. */
2381 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2384 if (GET_CODE (op
) == SUBREG
)
2386 if (GET_CODE (SUBREG_REG (op
)) == REG
2387 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2388 offset
= SUBREG_WORD (op
);
2389 op
= SUBREG_REG (op
);
2392 /* An empty constraint or empty alternative
2393 allows anything which matched the pattern. */
2394 if (*p
== 0 || *p
== ',')
2397 while (*p
&& (c
= *p
++) != ',')
2400 case '?': case '!': case '*': case '%':
2405 /* Ignore rest of this alternative as far as
2406 constraint checking is concerned. */
2407 while (*p
&& *p
!= ',')
2412 earlyclobber
[opno
] = 1;
2415 case '0': case '1': case '2': case '3': case '4':
2416 case '5': case '6': case '7': case '8': case '9':
2418 /* This operand must be the same as a previous one.
2419 This kind of constraint is used for instructions such
2420 as add when they take only two operands.
2422 Note that the lower-numbered operand is passed first.
2424 If we are not testing strictly, assume that this constraint
2425 will be satisfied. */
2430 rtx op1
= recog_data
.operand
[c
- '0'];
2431 rtx op2
= recog_data
.operand
[opno
];
2433 /* A unary operator may be accepted by the predicate,
2434 but it is irrelevant for matching constraints. */
2435 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2436 op1
= XEXP (op1
, 0);
2437 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2438 op2
= XEXP (op2
, 0);
2440 val
= operands_match_p (op1
, op2
);
2443 matching_operands
[opno
] = c
- '0';
2444 matching_operands
[c
- '0'] = opno
;
2448 /* If output is *x and input is *--x,
2449 arrange later to change the output to *--x as well,
2450 since the output op is the one that will be printed. */
2451 if (val
== 2 && strict
> 0)
2453 funny_match
[funny_match_index
].this = opno
;
2454 funny_match
[funny_match_index
++].other
= c
- '0';
2459 /* p is used for address_operands. When we are called by
2460 gen_reload, no one will have checked that the address is
2461 strictly valid, i.e., that all pseudos requiring hard regs
2462 have gotten them. */
2464 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2469 /* No need to check general_operand again;
2470 it was done in insn-recog.c. */
2472 /* Anything goes unless it is a REG and really has a hard reg
2473 but the hard reg is not in the class GENERAL_REGS. */
2475 || GENERAL_REGS
== ALL_REGS
2476 || GET_CODE (op
) != REG
2477 || (reload_in_progress
2478 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2479 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2484 /* This is used for a MATCH_SCRATCH in the cases when
2485 we don't actually need anything. So anything goes
2491 if (GET_CODE (op
) == MEM
2492 /* Before reload, accept what reload can turn into mem. */
2493 || (strict
< 0 && CONSTANT_P (op
))
2494 /* During reload, accept a pseudo */
2495 || (reload_in_progress
&& GET_CODE (op
) == REG
2496 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2501 if (GET_CODE (op
) == MEM
2502 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2503 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2508 if (GET_CODE (op
) == MEM
2509 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2510 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2515 #ifndef REAL_ARITHMETIC
2516 /* Match any CONST_DOUBLE, but only if
2517 we can examine the bits of it reliably. */
2518 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2519 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2520 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2523 if (GET_CODE (op
) == CONST_DOUBLE
)
2528 if (GET_CODE (op
) == CONST_DOUBLE
)
2534 if (GET_CODE (op
) == CONST_DOUBLE
2535 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2540 if (GET_CODE (op
) == CONST_INT
2541 || (GET_CODE (op
) == CONST_DOUBLE
2542 && GET_MODE (op
) == VOIDmode
))
2545 if (CONSTANT_P (op
))
2550 if (GET_CODE (op
) == CONST_INT
2551 || (GET_CODE (op
) == CONST_DOUBLE
2552 && GET_MODE (op
) == VOIDmode
))
2564 if (GET_CODE (op
) == CONST_INT
2565 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2570 if (GET_CODE (op
) == MEM
2571 && ((strict
> 0 && ! offsettable_memref_p (op
))
2573 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2574 || (reload_in_progress
2575 && !(GET_CODE (op
) == REG
2576 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2581 if ((strict
> 0 && offsettable_memref_p (op
))
2582 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2583 /* Before reload, accept what reload can handle. */
2585 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2586 /* During reload, accept a pseudo */
2587 || (reload_in_progress
&& GET_CODE (op
) == REG
2588 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2594 enum reg_class
class;
2596 class = (c
== 'r' ? GENERAL_REGS
: REG_CLASS_FROM_LETTER (c
));
2597 if (class != NO_REGS
)
2601 && GET_CODE (op
) == REG
2602 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2603 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2604 || (GET_CODE (op
) == REG
2605 && reg_fits_class_p (op
, class, offset
, mode
)))
2608 #ifdef EXTRA_CONSTRAINT
2609 else if (EXTRA_CONSTRAINT (op
, c
))
2616 constraints
[opno
] = p
;
2617 /* If this operand did not win somehow,
2618 this alternative loses. */
2622 /* This alternative won; the operands are ok.
2623 Change whichever operands this alternative says to change. */
2628 /* See if any earlyclobber operand conflicts with some other
2632 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2633 /* Ignore earlyclobber operands now in memory,
2634 because we would often report failure when we have
2635 two memory operands, one of which was formerly a REG. */
2636 if (earlyclobber
[eopno
]
2637 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2638 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2639 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2640 || recog_data
.operand_type
[opno
] != OP_OUT
)
2642 /* Ignore things like match_operator operands. */
2643 && *recog_data
.constraints
[opno
] != 0
2644 && ! (matching_operands
[opno
] == eopno
2645 && operands_match_p (recog_data
.operand
[opno
],
2646 recog_data
.operand
[eopno
]))
2647 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2648 recog_data
.operand
[eopno
]))
2653 while (--funny_match_index
>= 0)
2655 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2656 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2663 which_alternative
++;
2665 while (which_alternative
< recog_data
.n_alternatives
);
2667 which_alternative
= -1;
2668 /* If we are about to reject this, but we are not to test strictly,
2669 try a very loose test. Only return failure if it fails also. */
2671 return constrain_operands (-1);
2676 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2677 is a hard reg in class CLASS when its regno is offset by OFFSET
2678 and changed to mode MODE.
2679 If REG occupies multiple hard regs, all of them must be in CLASS. */
2682 reg_fits_class_p (operand
, class, offset
, mode
)
2684 register enum reg_class
class;
2686 enum machine_mode mode
;
2688 register int regno
= REGNO (operand
);
2689 if (regno
< FIRST_PSEUDO_REGISTER
2690 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2695 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2697 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2706 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2709 split_all_insns (upd_life
)
2716 blocks
= sbitmap_alloc (n_basic_blocks
);
2717 sbitmap_zero (blocks
);
2720 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2722 basic_block bb
= BASIC_BLOCK (i
);
2725 for (insn
= bb
->head
; insn
; insn
= next
)
2729 /* Can't use `next_real_insn' because that might go across
2730 CODE_LABELS and short-out basic blocks. */
2731 next
= NEXT_INSN (insn
);
2732 if (! INSN_P (insn
))
2735 /* Don't split no-op move insns. These should silently
2736 disappear later in final. Splitting such insns would
2737 break the code that handles REG_NO_CONFLICT blocks. */
2739 else if ((set
= single_set (insn
)) != NULL
2740 && rtx_equal_p (SET_SRC (set
), SET_DEST (set
)))
2742 /* Nops get in the way while scheduling, so delete them
2743 now if register allocation has already been done. It
2744 is too risky to try to do this before register
2745 allocation, and there are unlikely to be very many
2746 nops then anyways. */
2747 if (reload_completed
)
2749 PUT_CODE (insn
, NOTE
);
2750 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2751 NOTE_SOURCE_FILE (insn
) = 0;
2756 /* Split insns here to get max fine-grain parallelism. */
2757 rtx first
= PREV_INSN (insn
);
2758 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2762 SET_BIT (blocks
, i
);
2765 /* try_split returns the NOTE that INSN became. */
2766 PUT_CODE (insn
, NOTE
);
2767 NOTE_SOURCE_FILE (insn
) = 0;
2768 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2770 /* ??? Coddle to md files that generate subregs in post-
2771 reload splitters instead of computing the proper
2773 if (reload_completed
&& first
!= last
)
2775 first
= NEXT_INSN (first
);
2779 cleanup_subreg_operands (first
);
2782 first
= NEXT_INSN (first
);
2786 if (insn
== bb
->end
)
2794 if (insn
== bb
->end
)
2798 /* ??? When we're called from just after reload, the CFG is in bad
2799 shape, and we may have fallen off the end. This could be fixed
2800 by having reload not try to delete unreachable code. Otherwise
2801 assert we found the end insn. */
2802 if (insn
== NULL
&& upd_life
)
2806 if (changed
&& upd_life
)
2808 compute_bb_for_insn (get_max_uid ());
2809 count_or_remove_death_notes (blocks
, 1);
2810 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2813 sbitmap_free (blocks
);
2816 #ifdef HAVE_peephole2
2817 struct peep2_insn_data
2823 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2824 static int peep2_current
;
2826 /* A non-insn marker indicating the last insn of the block.
2827 The live_before regset for this element is correct, indicating
2828 global_live_at_end for the block. */
2829 #define PEEP2_EOB pc_rtx
2831 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2832 does not exist. Used by the recognizer to find the next insn to match
2833 in a multi-insn pattern. */
2839 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2843 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2844 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2846 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2848 return peep2_insn_data
[n
].insn
;
2851 /* Return true if REGNO is dead before the Nth non-note insn
2855 peep2_regno_dead_p (ofs
, regno
)
2859 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2862 ofs
+= peep2_current
;
2863 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2864 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2866 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2869 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2872 /* Similarly for a REG. */
2875 peep2_reg_dead_p (ofs
, reg
)
2881 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2884 ofs
+= peep2_current
;
2885 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2886 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2888 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2891 regno
= REGNO (reg
);
2892 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2894 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2899 /* Try to find a hard register of mode MODE, matching the register class in
2900 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2901 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2902 in which case the only condition is that the register must be available
2903 before CURRENT_INSN.
2904 Registers that already have bits set in REG_SET will not be considered.
2906 If an appropriate register is available, it will be returned and the
2907 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2911 peep2_find_free_register (from
, to
, class_str
, mode
, reg_set
)
2913 const char *class_str
;
2914 enum machine_mode mode
;
2915 HARD_REG_SET
*reg_set
;
2917 static int search_ofs
;
2918 enum reg_class
class;
2922 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
2925 from
+= peep2_current
;
2926 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2927 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2928 to
+= peep2_current
;
2929 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2930 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2932 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2934 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2938 HARD_REG_SET this_live
;
2940 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2942 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2944 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2945 IOR_HARD_REG_SET (live
, this_live
);
2948 class = (class_str
[0] == 'r' ? GENERAL_REGS
2949 : REG_CLASS_FROM_LETTER (class_str
[0]));
2951 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2953 int raw_regno
, regno
, success
, j
;
2955 /* Distribute the free registers as much as possible. */
2956 raw_regno
= search_ofs
+ i
;
2957 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2958 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2959 #ifdef REG_ALLOC_ORDER
2960 regno
= reg_alloc_order
[raw_regno
];
2965 /* Don't allocate fixed registers. */
2966 if (fixed_regs
[regno
])
2968 /* Make sure the register is of the right class. */
2969 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
2971 /* And can support the mode we need. */
2972 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2974 /* And that we don't create an extra save/restore. */
2975 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2977 /* And we don't clobber traceback for noreturn functions. */
2978 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2979 && (! reload_completed
|| frame_pointer_needed
))
2983 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2985 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2986 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2994 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2995 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
2997 /* Start the next search with the next register. */
2998 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3000 search_ofs
= raw_regno
;
3002 return gen_rtx_REG (mode
, regno
);
3010 /* Perform the peephole2 optimization pass. */
3013 peephole2_optimize (dump_file
)
3014 FILE *dump_file ATTRIBUTE_UNUSED
;
3016 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
3020 #ifdef HAVE_conditional_execution
3025 /* Initialize the regsets we're going to use. */
3026 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3027 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
3028 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
3030 #ifdef HAVE_conditional_execution
3031 blocks
= sbitmap_alloc (n_basic_blocks
);
3032 sbitmap_zero (blocks
);
3035 count_or_remove_death_notes (NULL
, 1);
3038 for (b
= n_basic_blocks
- 1; b
>= 0; --b
)
3040 basic_block bb
= BASIC_BLOCK (b
);
3041 struct propagate_block_info
*pbi
;
3043 /* Indicate that all slots except the last holds invalid data. */
3044 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3045 peep2_insn_data
[i
].insn
= NULL_RTX
;
3047 /* Indicate that the last slot contains live_after data. */
3048 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3049 peep2_current
= MAX_INSNS_PER_PEEP2
;
3051 /* Start up propagation. */
3052 COPY_REG_SET (live
, bb
->global_live_at_end
);
3053 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3055 #ifdef HAVE_conditional_execution
3056 pbi
= init_propagate_block_info (bb
, live
, NULL
, 0);
3058 pbi
= init_propagate_block_info (bb
, live
, NULL
, PROP_DEATH_NOTES
);
3061 for (insn
= bb
->end
; ; insn
= prev
)
3063 prev
= PREV_INSN (insn
);
3069 /* Record this insn. */
3070 if (--peep2_current
< 0)
3071 peep2_current
= MAX_INSNS_PER_PEEP2
;
3072 peep2_insn_data
[peep2_current
].insn
= insn
;
3073 propagate_one_insn (pbi
, insn
);
3074 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3076 /* Match the peephole. */
3077 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3080 i
= match_len
+ peep2_current
;
3081 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3082 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3084 /* Replace the old sequence with the new. */
3085 flow_delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3086 try = emit_insn_after (try, prev
);
3088 /* Adjust the basic block boundaries. */
3089 if (peep2_insn_data
[i
].insn
== bb
->end
)
3091 if (insn
== bb
->head
)
3092 bb
->head
= NEXT_INSN (prev
);
3094 #ifdef HAVE_conditional_execution
3095 /* With conditional execution, we cannot back up the
3096 live information so easily, since the conditional
3097 death data structures are not so self-contained.
3098 So record that we've made a modification to this
3099 block and update life information at the end. */
3100 SET_BIT (blocks
, b
);
3103 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3104 peep2_insn_data
[i
].insn
= NULL_RTX
;
3105 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3107 /* Back up lifetime information past the end of the
3108 newly created sequence. */
3109 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3111 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3113 /* Update life information for the new sequence. */
3119 i
= MAX_INSNS_PER_PEEP2
;
3120 peep2_insn_data
[i
].insn
= try;
3121 propagate_one_insn (pbi
, try);
3122 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3124 try = PREV_INSN (try);
3126 while (try != prev
);
3128 /* ??? Should verify that LIVE now matches what we
3129 had before the new sequence. */
3136 if (insn
== bb
->head
)
3140 free_propagate_block_info (pbi
);
3143 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3144 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3145 FREE_REG_SET (live
);
3147 #ifdef HAVE_conditional_execution
3148 count_or_remove_death_notes (blocks
, 1);
3149 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3150 sbitmap_free (blocks
);
3153 #endif /* HAVE_peephole2 */