1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
60 static rtx
*find_single_use_1 (rtx
, rtx
*);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn
)
122 if (INSN_CODE (insn
) < 0)
123 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, 0);
124 return INSN_CODE (insn
);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x
)
135 const char **constraints
;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed
)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x
));
143 constrain_operands (1);
144 return which_alternative
>= 0;
147 noperands
= asm_noperands (x
);
153 operands
= alloca (noperands
* sizeof (rtx
));
154 constraints
= alloca (noperands
* sizeof (char *));
156 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
158 for (i
= 0; i
< noperands
; i
++)
160 const char *c
= constraints
[i
];
163 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
164 c
= constraints
[c
[0] - '0'];
166 if (! asm_operand_ok (operands
[i
], c
))
173 /* Static data for the next two routines. */
175 typedef struct change_t
183 static change_t
*changes
;
184 static int changes_allocated
;
186 static int num_changes
= 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
211 if (old
== new || rtx_equal_p (old
, new))
214 if (in_group
== 0 && num_changes
!= 0)
219 /* Save the information describing this change. */
220 if (num_changes
>= changes_allocated
)
222 if (changes_allocated
== 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
227 changes_allocated
*= 2;
229 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
232 changes
[num_changes
].object
= object
;
233 changes
[num_changes
].loc
= loc
;
234 changes
[num_changes
].old
= old
;
236 if (object
&& GET_CODE (object
) != MEM
)
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
240 changes
[num_changes
].old_code
= INSN_CODE (object
);
241 INSN_CODE (object
) = -1;
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
252 return apply_change_group ();
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
259 insn_invalid_p (rtx insn
)
261 rtx pat
= PATTERN (insn
);
262 int num_clobbers
= 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
265 int icode
= recog (pat
, insn
,
266 (GET_CODE (pat
) == SET
267 && ! reload_completed
&& ! reload_in_progress
)
268 ? &num_clobbers
: 0);
269 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
275 || (!is_asm
&& icode
< 0))
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers
> 0)
285 if (added_clobbers_hard_reg_p (icode
))
288 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
289 XVECEXP (newpat
, 0, 0) = pat
;
290 add_clobbers (newpat
, icode
);
291 PATTERN (insn
) = pat
= newpat
;
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed
)
299 if (! constrain_operands (1))
303 INSN_CODE (insn
) = icode
;
307 /* Return number of changes made and not validated yet. */
309 num_changes_pending (void)
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
318 apply_change_group (void)
321 rtx last_validated
= NULL_RTX
;
323 /* The changes have been applied and all INSN_CODEs have been reset to force
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
332 for (i
= 0; i
< num_changes
; i
++)
334 rtx object
= changes
[i
].object
;
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object
== 0 || object
== last_validated
)
341 if (GET_CODE (object
) == MEM
)
343 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
346 else if (insn_invalid_p (object
))
348 rtx pat
= PATTERN (object
);
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat
) == PARALLEL
356 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object
)) < 0)
361 if (XVECLEN (pat
, 0) == 2)
362 newpat
= XVECEXP (pat
, 0, 0);
368 = gen_rtx_PARALLEL (VOIDmode
,
369 rtvec_alloc (XVECLEN (pat
, 0) - 1));
370 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
371 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
383 validate_change (object
, &PATTERN (object
), newpat
, 1);
386 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
393 last_validated
= object
;
396 if (i
== num_changes
)
400 for (i
= 0; i
< num_changes
; i
++)
401 if (changes
[i
].object
402 && INSN_P (changes
[i
].object
)
403 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
404 bb
->flags
|= BB_DIRTY
;
416 /* Return the number of changes so far in the current group. */
419 num_validated_changes (void)
424 /* Retract the changes numbered NUM and up. */
427 cancel_changes (int num
)
431 /* Back out all the changes. Do this in the opposite order in which
433 for (i
= num_changes
- 1; i
>= num
; i
--)
435 *changes
[i
].loc
= changes
[i
].old
;
436 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
437 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
446 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
452 enum machine_mode op0_mode
= VOIDmode
;
453 int prev_changes
= num_changes
;
460 fmt
= GET_RTX_FORMAT (code
);
462 op0_mode
= GET_MODE (XEXP (x
, 0));
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
469 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
470 && GET_MODE (x
) == GET_MODE (from
)
471 && REGNO (x
) == REGNO (from
))
472 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
473 && rtx_equal_p (x
, from
)))
475 validate_change (object
, loc
, to
, 1);
479 /* Call ourself recursively to perform the replacements. */
481 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
484 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
485 else if (fmt
[i
] == 'E')
486 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
487 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
490 /* If we didn't substitute, there is nothing more to do. */
491 if (num_changes
== prev_changes
)
494 /* Allow substituted expression to have different mode. This is used by
495 regmove to change mode of pseudo register. */
496 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
497 op0_mode
= GET_MODE (XEXP (x
, 0));
499 /* Do changes needed to keep rtx consistent. Don't do any other
500 simplifications, as it is not our job. */
502 if ((GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
503 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
505 validate_change (object
, loc
,
506 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
507 : swap_condition (code
),
508 GET_MODE (x
), XEXP (x
, 1),
517 /* If we have a PLUS whose second operand is now a CONST_INT, use
518 simplify_gen_binary to try to simplify it.
519 ??? We may want later to remove this, once simplification is
520 separated from this function. */
521 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
522 validate_change (object
, loc
,
524 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
527 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
528 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
529 validate_change (object
, loc
,
531 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
532 simplify_gen_unary (NEG
,
533 GET_MODE (x
), XEXP (x
, 1),
538 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
540 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
542 /* If any of the above failed, substitute in something that
543 we know won't be recognized. */
545 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
546 validate_change (object
, loc
, new, 1);
550 /* All subregs possible to simplify should be simplified. */
551 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
554 /* Subregs of VOIDmode operands are incorrect. */
555 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
556 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
558 validate_change (object
, loc
, new, 1);
562 /* If we are replacing a register with memory, try to change the memory
563 to be the mode required for memory in extract operations (this isn't
564 likely to be an insertion operation; if it was, nothing bad will
565 happen, we might just fail in some cases). */
567 if (GET_CODE (XEXP (x
, 0)) == MEM
568 && GET_CODE (XEXP (x
, 1)) == CONST_INT
569 && GET_CODE (XEXP (x
, 2)) == CONST_INT
570 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
571 && !MEM_VOLATILE_P (XEXP (x
, 0)))
573 enum machine_mode wanted_mode
= VOIDmode
;
574 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
575 int pos
= INTVAL (XEXP (x
, 2));
577 if (GET_CODE (x
) == ZERO_EXTRACT
)
579 enum machine_mode new_mode
580 = mode_for_extraction (EP_extzv
, 1);
581 if (new_mode
!= MAX_MACHINE_MODE
)
582 wanted_mode
= new_mode
;
584 else if (GET_CODE (x
) == SIGN_EXTRACT
)
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extv
, 1);
588 if (new_mode
!= MAX_MACHINE_MODE
)
589 wanted_mode
= new_mode
;
592 /* If we have a narrower mode, we can do something. */
593 if (wanted_mode
!= VOIDmode
594 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
596 int offset
= pos
/ BITS_PER_UNIT
;
599 /* If the bytes and bits are counted differently, we
600 must adjust the offset. */
601 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
603 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
606 pos
%= GET_MODE_BITSIZE (wanted_mode
);
608 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
610 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
611 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
622 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
623 with TO. After all changes have been made, validate by seeing
624 if INSN is still valid. */
627 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
629 validate_replace_rtx_1 (loc
, from
, to
, insn
);
630 return apply_change_group ();
633 /* Try replacing every occurrence of FROM in INSN with TO. After all
634 changes have been made, validate by seeing if INSN is still valid. */
637 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
639 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
640 return apply_change_group ();
643 /* Try replacing every occurrence of FROM in INSN with TO. */
646 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
648 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
651 /* Function called by note_uses to replace used subexpressions. */
652 struct validate_replace_src_data
654 rtx from
; /* Old RTX */
655 rtx to
; /* New RTX */
656 rtx insn
; /* Insn in which substitution is occurring. */
660 validate_replace_src_1 (rtx
*x
, void *data
)
662 struct validate_replace_src_data
*d
663 = (struct validate_replace_src_data
*) data
;
665 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
668 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
672 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
674 struct validate_replace_src_data d
;
679 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
682 /* Same as validate_replace_src_group, but validate by seeing if
683 INSN is still valid. */
685 validate_replace_src (rtx from
, rtx to
, rtx insn
)
687 validate_replace_src_group (from
, to
, insn
);
688 return apply_change_group ();
692 /* Return 1 if the insn using CC0 set by INSN does not contain
693 any ordered tests applied to the condition codes.
694 EQ and NE tests do not count. */
697 next_insn_tests_no_inequality (rtx insn
)
699 rtx next
= next_cc0_user (insn
);
701 /* If there is no next insn, we have to take the conservative choice. */
705 return ((GET_CODE (next
) == JUMP_INSN
706 || GET_CODE (next
) == INSN
707 || GET_CODE (next
) == CALL_INSN
)
708 && ! inequality_comparisons_p (PATTERN (next
)));
712 /* This is used by find_single_use to locate an rtx that contains exactly one
713 use of DEST, which is typically either a REG or CC0. It returns a
714 pointer to the innermost rtx expression containing DEST. Appearances of
715 DEST that are being used to totally replace it are not counted. */
718 find_single_use_1 (rtx dest
, rtx
*loc
)
721 enum rtx_code code
= GET_CODE (x
);
739 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
740 of a REG that occupies all of the REG, the insn uses DEST if
741 it is mentioned in the destination or the source. Otherwise, we
742 need just check the source. */
743 if (GET_CODE (SET_DEST (x
)) != CC0
744 && GET_CODE (SET_DEST (x
)) != PC
745 && GET_CODE (SET_DEST (x
)) != REG
746 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
747 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
748 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
749 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
750 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
751 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
754 return find_single_use_1 (dest
, &SET_SRC (x
));
758 return find_single_use_1 (dest
, &XEXP (x
, 0));
764 /* If it wasn't one of the common cases above, check each expression and
765 vector of this code. Look for a unique usage of DEST. */
767 fmt
= GET_RTX_FORMAT (code
);
768 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
772 if (dest
== XEXP (x
, i
)
773 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
774 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
777 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
780 result
= this_result
;
781 else if (this_result
)
782 /* Duplicate usage. */
785 else if (fmt
[i
] == 'E')
789 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
791 if (XVECEXP (x
, i
, j
) == dest
792 || (GET_CODE (dest
) == REG
793 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
794 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
797 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
800 result
= this_result
;
801 else if (this_result
)
810 /* See if DEST, produced in INSN, is used only a single time in the
811 sequel. If so, return a pointer to the innermost rtx expression in which
814 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
816 This routine will return usually zero either before flow is called (because
817 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
818 note can't be trusted).
820 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
821 care about REG_DEAD notes or LOG_LINKS.
823 Otherwise, we find the single use by finding an insn that has a
824 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
825 only referenced once in that insn, we know that it must be the first
826 and last insn referencing DEST. */
829 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
838 next
= NEXT_INSN (insn
);
840 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
843 result
= find_single_use_1 (dest
, &PATTERN (next
));
850 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
853 for (next
= next_nonnote_insn (insn
);
854 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
855 next
= next_nonnote_insn (next
))
856 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
858 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
859 if (XEXP (link
, 0) == insn
)
864 result
= find_single_use_1 (dest
, &PATTERN (next
));
874 /* Return 1 if OP is a valid general operand for machine mode MODE.
875 This is either a register reference, a memory reference,
876 or a constant. In the case of a memory reference, the address
877 is checked for general validity for the target machine.
879 Register and memory references must have mode MODE in order to be valid,
880 but some constants have no machine mode and are valid for any mode.
882 If MODE is VOIDmode, OP is checked for validity for whatever mode
885 The main use of this function is as a predicate in match_operand
886 expressions in the machine description.
888 For an explanation of this function's behavior for registers of
889 class NO_REGS, see the comment for `register_operand'. */
892 general_operand (rtx op
, enum machine_mode mode
)
894 enum rtx_code code
= GET_CODE (op
);
896 if (mode
== VOIDmode
)
897 mode
= GET_MODE (op
);
899 /* Don't accept CONST_INT or anything similar
900 if the caller wants something floating. */
901 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
902 && GET_MODE_CLASS (mode
) != MODE_INT
903 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
906 if (GET_CODE (op
) == CONST_INT
908 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
912 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
914 #ifdef LEGITIMATE_PIC_OPERAND_P
915 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
917 && LEGITIMATE_CONSTANT_P (op
));
919 /* Except for certain constants with VOIDmode, already checked for,
920 OP's mode must match MODE if MODE specifies a mode. */
922 if (GET_MODE (op
) != mode
)
927 rtx sub
= SUBREG_REG (op
);
929 #ifdef INSN_SCHEDULING
930 /* On machines that have insn scheduling, we want all memory
931 reference to be explicit, so outlaw paradoxical SUBREGs. */
932 if (GET_CODE (sub
) == MEM
933 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
936 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
937 may result in incorrect reference. We should simplify all valid
938 subregs of MEM anyway. But allow this after reload because we
939 might be called from cleanup_subreg_operands.
941 ??? This is a kludge. */
942 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
943 && GET_CODE (sub
) == MEM
)
946 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
947 create such rtl, and we must reject it. */
948 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
949 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
953 code
= GET_CODE (op
);
957 /* A register whose class is NO_REGS is not a general operand. */
958 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
959 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
963 rtx y
= XEXP (op
, 0);
965 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
968 if (GET_CODE (y
) == ADDRESSOF
)
971 /* Use the mem's mode, since it will be reloaded thus. */
972 mode
= GET_MODE (op
);
973 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
976 /* Pretend this is an operand for now; we'll run force_operand
977 on its replacement in fixup_var_refs_1. */
978 if (code
== ADDRESSOF
)
987 /* Return 1 if OP is a valid memory address for a memory reference
990 The main use of this function is as a predicate in match_operand
991 expressions in the machine description. */
994 address_operand (rtx op
, enum machine_mode mode
)
996 return memory_address_p (mode
, op
);
999 /* Return 1 if OP is a register reference of mode MODE.
1000 If MODE is VOIDmode, accept a register in any mode.
1002 The main use of this function is as a predicate in match_operand
1003 expressions in the machine description.
1005 As a special exception, registers whose class is NO_REGS are
1006 not accepted by `register_operand'. The reason for this change
1007 is to allow the representation of special architecture artifacts
1008 (such as a condition code register) without extending the rtl
1009 definitions. Since registers of class NO_REGS cannot be used
1010 as registers in any case where register classes are examined,
1011 it is most consistent to keep this function from accepting them. */
1014 register_operand (rtx op
, enum machine_mode mode
)
1016 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1019 if (GET_CODE (op
) == SUBREG
)
1021 rtx sub
= SUBREG_REG (op
);
1023 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1024 because it is guaranteed to be reloaded into one.
1025 Just make sure the MEM is valid in itself.
1026 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1027 but currently it does result from (SUBREG (REG)...) where the
1028 reg went on the stack.) */
1029 if (! reload_completed
&& GET_CODE (sub
) == MEM
)
1030 return general_operand (op
, mode
);
1032 #ifdef CANNOT_CHANGE_MODE_CLASS
1033 if (GET_CODE (sub
) == REG
1034 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1035 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1036 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1037 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1041 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1042 create such rtl, and we must reject it. */
1043 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1044 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1050 /* If we have an ADDRESSOF, consider it valid since it will be
1051 converted into something that will not be a MEM. */
1052 if (GET_CODE (op
) == ADDRESSOF
)
1055 /* We don't consider registers whose class is NO_REGS
1056 to be a register operand. */
1057 return (GET_CODE (op
) == REG
1058 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1059 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1062 /* Return 1 for a register in Pmode; ignore the tested mode. */
1065 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1067 return register_operand (op
, Pmode
);
1070 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1071 or a hard register. */
1074 scratch_operand (rtx op
, enum machine_mode mode
)
1076 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1079 return (GET_CODE (op
) == SCRATCH
1080 || (GET_CODE (op
) == REG
1081 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1084 /* Return 1 if OP is a valid immediate operand for mode MODE.
1086 The main use of this function is as a predicate in match_operand
1087 expressions in the machine description. */
1090 immediate_operand (rtx op
, enum machine_mode mode
)
1092 /* Don't accept CONST_INT or anything similar
1093 if the caller wants something floating. */
1094 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1095 && GET_MODE_CLASS (mode
) != MODE_INT
1096 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1099 if (GET_CODE (op
) == CONST_INT
1101 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1104 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1105 result in 0/1. It seems a safe assumption that this is
1106 in range for everyone. */
1107 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1110 return (CONSTANT_P (op
)
1111 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1112 || GET_MODE (op
) == VOIDmode
)
1113 #ifdef LEGITIMATE_PIC_OPERAND_P
1114 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1116 && LEGITIMATE_CONSTANT_P (op
));
1119 /* Returns 1 if OP is an operand that is a CONST_INT. */
1122 const_int_operand (rtx op
, enum machine_mode mode
)
1124 if (GET_CODE (op
) != CONST_INT
)
1127 if (mode
!= VOIDmode
1128 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1134 /* Returns 1 if OP is an operand that is a constant integer or constant
1135 floating-point number. */
1138 const_double_operand (rtx op
, enum machine_mode mode
)
1140 /* Don't accept CONST_INT or anything similar
1141 if the caller wants something floating. */
1142 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1143 && GET_MODE_CLASS (mode
) != MODE_INT
1144 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1147 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1148 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1149 || GET_MODE (op
) == VOIDmode
));
1152 /* Return 1 if OP is a general operand that is not an immediate operand. */
1155 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1157 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1160 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1163 nonmemory_operand (rtx op
, enum machine_mode mode
)
1165 if (CONSTANT_P (op
))
1167 /* Don't accept CONST_INT or anything similar
1168 if the caller wants something floating. */
1169 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1170 && GET_MODE_CLASS (mode
) != MODE_INT
1171 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1174 if (GET_CODE (op
) == CONST_INT
1176 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1179 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1180 || mode
== VOIDmode
)
1181 #ifdef LEGITIMATE_PIC_OPERAND_P
1182 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1184 && LEGITIMATE_CONSTANT_P (op
));
1187 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1190 if (GET_CODE (op
) == SUBREG
)
1192 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1193 because it is guaranteed to be reloaded into one.
1194 Just make sure the MEM is valid in itself.
1195 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1196 but currently it does result from (SUBREG (REG)...) where the
1197 reg went on the stack.) */
1198 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1199 return general_operand (op
, mode
);
1200 op
= SUBREG_REG (op
);
1203 /* We don't consider registers whose class is NO_REGS
1204 to be a register operand. */
1205 return (GET_CODE (op
) == REG
1206 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1207 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1210 /* Return 1 if OP is a valid operand that stands for pushing a
1211 value of mode MODE onto the stack.
1213 The main use of this function is as a predicate in match_operand
1214 expressions in the machine description. */
1217 push_operand (rtx op
, enum machine_mode mode
)
1219 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1221 #ifdef PUSH_ROUNDING
1222 rounded_size
= PUSH_ROUNDING (rounded_size
);
1225 if (GET_CODE (op
) != MEM
)
1228 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1233 if (rounded_size
== GET_MODE_SIZE (mode
))
1235 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1240 if (GET_CODE (op
) != PRE_MODIFY
1241 || GET_CODE (XEXP (op
, 1)) != PLUS
1242 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1243 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1244 #ifdef STACK_GROWS_DOWNWARD
1245 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1247 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1253 return XEXP (op
, 0) == stack_pointer_rtx
;
1256 /* Return 1 if OP is a valid operand that stands for popping a
1257 value of mode MODE off the stack.
1259 The main use of this function is as a predicate in match_operand
1260 expressions in the machine description. */
1263 pop_operand (rtx op
, enum machine_mode mode
)
1265 if (GET_CODE (op
) != MEM
)
1268 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1273 if (GET_CODE (op
) != STACK_POP_CODE
)
1276 return XEXP (op
, 0) == stack_pointer_rtx
;
1279 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1282 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1284 if (GET_CODE (addr
) == ADDRESSOF
)
1287 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1294 /* Return 1 if OP is a valid memory reference with mode MODE,
1295 including a valid address.
1297 The main use of this function is as a predicate in match_operand
1298 expressions in the machine description. */
1301 memory_operand (rtx op
, enum machine_mode mode
)
1305 if (! reload_completed
)
1306 /* Note that no SUBREG is a memory operand before end of reload pass,
1307 because (SUBREG (MEM...)) forces reloading into a register. */
1308 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1310 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1314 if (GET_CODE (inner
) == SUBREG
)
1315 inner
= SUBREG_REG (inner
);
1317 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1320 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1321 that is, a memory reference whose address is a general_operand. */
1324 indirect_operand (rtx op
, enum machine_mode mode
)
1326 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1327 if (! reload_completed
1328 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1330 int offset
= SUBREG_BYTE (op
);
1331 rtx inner
= SUBREG_REG (op
);
1333 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1336 /* The only way that we can have a general_operand as the resulting
1337 address is if OFFSET is zero and the address already is an operand
1338 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1341 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1342 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1343 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1344 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1345 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1348 return (GET_CODE (op
) == MEM
1349 && memory_operand (op
, mode
)
1350 && general_operand (XEXP (op
, 0), Pmode
));
1353 /* Return 1 if this is a comparison operator. This allows the use of
1354 MATCH_OPERATOR to recognize all the branch insns. */
1357 comparison_operator (rtx op
, enum machine_mode mode
)
1359 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1360 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1363 /* If BODY is an insn body that uses ASM_OPERANDS,
1364 return the number of operands (both input and output) in the insn.
1365 Otherwise return -1. */
1368 asm_noperands (rtx body
)
1370 switch (GET_CODE (body
))
1373 /* No output operands: return number of input operands. */
1374 return ASM_OPERANDS_INPUT_LENGTH (body
);
1376 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1377 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1378 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1382 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1383 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1385 /* Multiple output operands, or 1 output plus some clobbers:
1386 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1390 /* Count backwards through CLOBBERs to determine number of SETs. */
1391 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1393 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1395 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1399 /* N_SETS is now number of output operands. */
1402 /* Verify that all the SETs we have
1403 came from a single original asm_operands insn
1404 (so that invalid combinations are blocked). */
1405 for (i
= 0; i
< n_sets
; i
++)
1407 rtx elt
= XVECEXP (body
, 0, i
);
1408 if (GET_CODE (elt
) != SET
)
1410 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1412 /* If these ASM_OPERANDS rtx's came from different original insns
1413 then they aren't allowed together. */
1414 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1415 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1418 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1421 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1423 /* 0 outputs, but some clobbers:
1424 body is [(asm_operands ...) (clobber (reg ...))...]. */
1427 /* Make sure all the other parallel things really are clobbers. */
1428 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1429 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1432 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1441 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1442 copy its operands (both input and output) into the vector OPERANDS,
1443 the locations of the operands within the insn into the vector OPERAND_LOCS,
1444 and the constraints for the operands into CONSTRAINTS.
1445 Write the modes of the operands into MODES.
1446 Return the assembler-template.
1448 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1449 we don't store that info. */
1452 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1453 const char **constraints
, enum machine_mode
*modes
)
1457 const char *template = 0;
1459 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1461 rtx asmop
= SET_SRC (body
);
1462 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1464 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1466 for (i
= 1; i
< noperands
; i
++)
1469 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1471 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1473 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1475 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1478 /* The output is in the SET.
1479 Its constraint is in the ASM_OPERANDS itself. */
1481 operands
[0] = SET_DEST (body
);
1483 operand_locs
[0] = &SET_DEST (body
);
1485 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1487 modes
[0] = GET_MODE (SET_DEST (body
));
1488 template = ASM_OPERANDS_TEMPLATE (asmop
);
1490 else if (GET_CODE (body
) == ASM_OPERANDS
)
1493 /* No output operands: BODY is (asm_operands ....). */
1495 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1497 /* The input operands are found in the 1st element vector. */
1498 /* Constraints for inputs are in the 2nd element vector. */
1499 for (i
= 0; i
< noperands
; i
++)
1502 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1504 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1506 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1508 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1510 template = ASM_OPERANDS_TEMPLATE (asmop
);
1512 else if (GET_CODE (body
) == PARALLEL
1513 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1514 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1516 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1517 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1518 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1519 int nout
= 0; /* Does not include CLOBBERs. */
1521 /* At least one output, plus some CLOBBERs. */
1523 /* The outputs are in the SETs.
1524 Their constraints are in the ASM_OPERANDS itself. */
1525 for (i
= 0; i
< nparallel
; i
++)
1527 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1528 break; /* Past last SET */
1531 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1533 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1535 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1537 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1541 for (i
= 0; i
< nin
; i
++)
1544 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1546 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1548 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1550 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1553 template = ASM_OPERANDS_TEMPLATE (asmop
);
1555 else if (GET_CODE (body
) == PARALLEL
1556 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1558 /* No outputs, but some CLOBBERs. */
1560 rtx asmop
= XVECEXP (body
, 0, 0);
1561 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1563 for (i
= 0; i
< nin
; i
++)
1566 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1568 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1570 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1572 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1575 template = ASM_OPERANDS_TEMPLATE (asmop
);
1581 /* Check if an asm_operand matches it's constraints.
1582 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1585 asm_operand_ok (rtx op
, const char *constraint
)
1589 /* Use constrain_operands after reload. */
1590 if (reload_completed
)
1595 char c
= *constraint
;
1612 case '0': case '1': case '2': case '3': case '4':
1613 case '5': case '6': case '7': case '8': case '9':
1614 /* For best results, our caller should have given us the
1615 proper matching constraint, but we can't actually fail
1616 the check if they didn't. Indicate that results are
1620 while (ISDIGIT (*constraint
));
1626 if (address_operand (op
, VOIDmode
))
1631 case 'V': /* non-offsettable */
1632 if (memory_operand (op
, VOIDmode
))
1636 case 'o': /* offsettable */
1637 if (offsettable_nonstrict_memref_p (op
))
1642 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1643 excepting those that expand_call created. Further, on some
1644 machines which do not have generalized auto inc/dec, an inc/dec
1645 is not a memory_operand.
1647 Match any memory and hope things are resolved after reload. */
1649 if (GET_CODE (op
) == MEM
1651 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1652 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1657 if (GET_CODE (op
) == MEM
1659 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1660 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1666 if (GET_CODE (op
) == CONST_DOUBLE
1667 || (GET_CODE (op
) == CONST_VECTOR
1668 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1673 if (GET_CODE (op
) == CONST_DOUBLE
1674 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1678 if (GET_CODE (op
) == CONST_DOUBLE
1679 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1684 if (GET_CODE (op
) == CONST_INT
1685 || (GET_CODE (op
) == CONST_DOUBLE
1686 && GET_MODE (op
) == VOIDmode
))
1692 #ifdef LEGITIMATE_PIC_OPERAND_P
1693 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1700 if (GET_CODE (op
) == CONST_INT
1701 || (GET_CODE (op
) == CONST_DOUBLE
1702 && GET_MODE (op
) == VOIDmode
))
1707 if (GET_CODE (op
) == CONST_INT
1708 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1712 if (GET_CODE (op
) == CONST_INT
1713 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1717 if (GET_CODE (op
) == CONST_INT
1718 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1722 if (GET_CODE (op
) == CONST_INT
1723 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1727 if (GET_CODE (op
) == CONST_INT
1728 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1732 if (GET_CODE (op
) == CONST_INT
1733 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1737 if (GET_CODE (op
) == CONST_INT
1738 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1742 if (GET_CODE (op
) == CONST_INT
1743 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1752 if (general_operand (op
, VOIDmode
))
1757 /* For all other letters, we first check for a register class,
1758 otherwise it is an EXTRA_CONSTRAINT. */
1759 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1762 if (GET_MODE (op
) == BLKmode
)
1764 if (register_operand (op
, VOIDmode
))
1767 #ifdef EXTRA_CONSTRAINT_STR
1768 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1770 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1771 /* Every memory operand can be reloaded to fit. */
1772 && memory_operand (op
, VOIDmode
))
1774 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1775 /* Every address operand can be reloaded to fit. */
1776 && address_operand (op
, VOIDmode
))
1781 len
= CONSTRAINT_LEN (c
, constraint
);
1784 while (--len
&& *constraint
);
1792 /* Given an rtx *P, if it is a sum containing an integer constant term,
1793 return the location (type rtx *) of the pointer to that constant term.
1794 Otherwise, return a null pointer. */
1797 find_constant_term_loc (rtx
*p
)
1800 enum rtx_code code
= GET_CODE (*p
);
1802 /* If *P IS such a constant term, P is its location. */
1804 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1808 /* Otherwise, if not a sum, it has no constant term. */
1810 if (GET_CODE (*p
) != PLUS
)
1813 /* If one of the summands is constant, return its location. */
1815 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1816 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1819 /* Otherwise, check each summand for containing a constant term. */
1821 if (XEXP (*p
, 0) != 0)
1823 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1828 if (XEXP (*p
, 1) != 0)
1830 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1838 /* Return 1 if OP is a memory reference
1839 whose address contains no side effects
1840 and remains valid after the addition
1841 of a positive integer less than the
1842 size of the object being referenced.
1844 We assume that the original address is valid and do not check it.
1846 This uses strict_memory_address_p as a subroutine, so
1847 don't use it before reload. */
1850 offsettable_memref_p (rtx op
)
1852 return ((GET_CODE (op
) == MEM
)
1853 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1856 /* Similar, but don't require a strictly valid mem ref:
1857 consider pseudo-regs valid as index or base regs. */
1860 offsettable_nonstrict_memref_p (rtx op
)
1862 return ((GET_CODE (op
) == MEM
)
1863 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1866 /* Return 1 if Y is a memory address which contains no side effects
1867 and would remain valid after the addition of a positive integer
1868 less than the size of that mode.
1870 We assume that the original address is valid and do not check it.
1871 We do check that it is valid for narrower modes.
1873 If STRICTP is nonzero, we require a strictly valid address,
1874 for the sake of use in reload.c. */
1877 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1879 enum rtx_code ycode
= GET_CODE (y
);
1883 int (*addressp
) (enum machine_mode
, rtx
) =
1884 (strictp
? strict_memory_address_p
: memory_address_p
);
1885 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1887 if (CONSTANT_ADDRESS_P (y
))
1890 /* Adjusting an offsettable address involves changing to a narrower mode.
1891 Make sure that's OK. */
1893 if (mode_dependent_address_p (y
))
1896 /* ??? How much offset does an offsettable BLKmode reference need?
1897 Clearly that depends on the situation in which it's being used.
1898 However, the current situation in which we test 0xffffffff is
1899 less than ideal. Caveat user. */
1901 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1903 /* If the expression contains a constant term,
1904 see if it remains valid when max possible offset is added. */
1906 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1911 *y2
= plus_constant (*y2
, mode_sz
- 1);
1912 /* Use QImode because an odd displacement may be automatically invalid
1913 for any wider mode. But it should be valid for a single byte. */
1914 good
= (*addressp
) (QImode
, y
);
1916 /* In any case, restore old contents of memory. */
1921 if (GET_RTX_CLASS (ycode
) == 'a')
1924 /* The offset added here is chosen as the maximum offset that
1925 any instruction could need to add when operating on something
1926 of the specified mode. We assume that if Y and Y+c are
1927 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1928 go inside a LO_SUM here, so we do so as well. */
1929 if (GET_CODE (y
) == LO_SUM
1931 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1932 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1933 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1935 z
= plus_constant (y
, mode_sz
- 1);
1937 /* Use QImode because an odd displacement may be automatically invalid
1938 for any wider mode. But it should be valid for a single byte. */
1939 return (*addressp
) (QImode
, z
);
1942 /* Return 1 if ADDR is an address-expression whose effect depends
1943 on the mode of the memory reference it is used in.
1945 Autoincrement addressing is a typical example of mode-dependence
1946 because the amount of the increment depends on the mode. */
1949 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1951 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1953 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1954 win
: ATTRIBUTE_UNUSED_LABEL
1958 /* Like extract_insn, but save insn extracted and don't extract again, when
1959 called again for the same insn expecting that recog_data still contain the
1960 valid information. This is used primary by gen_attr infrastructure that
1961 often does extract insn again and again. */
1963 extract_insn_cached (rtx insn
)
1965 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1967 extract_insn (insn
);
1968 recog_data
.insn
= insn
;
1970 /* Do cached extract_insn, constrain_operands and complain about failures.
1971 Used by insn_attrtab. */
1973 extract_constrain_insn_cached (rtx insn
)
1975 extract_insn_cached (insn
);
1976 if (which_alternative
== -1
1977 && !constrain_operands (reload_completed
))
1978 fatal_insn_not_found (insn
);
1980 /* Do cached constrain_operands and complain about failures. */
1982 constrain_operands_cached (int strict
)
1984 if (which_alternative
== -1)
1985 return constrain_operands (strict
);
1990 /* Analyze INSN and fill in recog_data. */
1993 extract_insn (rtx insn
)
1998 rtx body
= PATTERN (insn
);
2000 recog_data
.insn
= NULL
;
2001 recog_data
.n_operands
= 0;
2002 recog_data
.n_alternatives
= 0;
2003 recog_data
.n_dups
= 0;
2004 which_alternative
= -1;
2006 switch (GET_CODE (body
))
2016 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2021 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2022 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2023 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2029 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2032 /* This insn is an `asm' with operands. */
2034 /* expand_asm_operands makes sure there aren't too many operands. */
2035 if (noperands
> MAX_RECOG_OPERANDS
)
2038 /* Now get the operand values and constraints out of the insn. */
2039 decode_asm_operands (body
, recog_data
.operand
,
2040 recog_data
.operand_loc
,
2041 recog_data
.constraints
,
2042 recog_data
.operand_mode
);
2045 const char *p
= recog_data
.constraints
[0];
2046 recog_data
.n_alternatives
= 1;
2048 recog_data
.n_alternatives
+= (*p
++ == ',');
2052 fatal_insn_not_found (insn
);
2056 /* Ordinary insn: recognize it, get the operands via insn_extract
2057 and get the constraints. */
2059 icode
= recog_memoized (insn
);
2061 fatal_insn_not_found (insn
);
2063 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2064 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2065 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2067 insn_extract (insn
);
2069 for (i
= 0; i
< noperands
; i
++)
2071 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2072 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2073 /* VOIDmode match_operands gets mode from their real operand. */
2074 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2075 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2078 for (i
= 0; i
< noperands
; i
++)
2079 recog_data
.operand_type
[i
]
2080 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2081 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2084 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2088 /* After calling extract_insn, you can use this function to extract some
2089 information from the constraint strings into a more usable form.
2090 The collected data is stored in recog_op_alt. */
2092 preprocess_constraints (void)
2096 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2097 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2098 * sizeof (struct operand_alternative
)));
2100 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2103 struct operand_alternative
*op_alt
;
2104 const char *p
= recog_data
.constraints
[i
];
2106 op_alt
= recog_op_alt
[i
];
2108 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2110 op_alt
[j
].class = NO_REGS
;
2111 op_alt
[j
].constraint
= p
;
2112 op_alt
[j
].matches
= -1;
2113 op_alt
[j
].matched
= -1;
2115 if (*p
== '\0' || *p
== ',')
2117 op_alt
[j
].anything_ok
= 1;
2127 while (c
!= ',' && c
!= '\0');
2128 if (c
== ',' || c
== '\0')
2136 case '=': case '+': case '*': case '%':
2137 case 'E': case 'F': case 'G': case 'H':
2138 case 's': case 'i': case 'n':
2139 case 'I': case 'J': case 'K': case 'L':
2140 case 'M': case 'N': case 'O': case 'P':
2141 /* These don't say anything we care about. */
2145 op_alt
[j
].reject
+= 6;
2148 op_alt
[j
].reject
+= 600;
2151 op_alt
[j
].earlyclobber
= 1;
2154 case '0': case '1': case '2': case '3': case '4':
2155 case '5': case '6': case '7': case '8': case '9':
2158 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2159 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2165 op_alt
[j
].memory_ok
= 1;
2168 op_alt
[j
].decmem_ok
= 1;
2171 op_alt
[j
].incmem_ok
= 1;
2174 op_alt
[j
].nonoffmem_ok
= 1;
2177 op_alt
[j
].offmem_ok
= 1;
2180 op_alt
[j
].anything_ok
= 1;
2184 op_alt
[j
].is_address
= 1;
2185 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class]
2186 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2190 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2194 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2196 op_alt
[j
].memory_ok
= 1;
2199 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2201 op_alt
[j
].is_address
= 1;
2203 = (reg_class_subunion
2204 [(int) op_alt
[j
].class]
2205 [(int) MODE_BASE_REG_CLASS (VOIDmode
)]);
2210 = (reg_class_subunion
2211 [(int) op_alt
[j
].class]
2212 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2215 p
+= CONSTRAINT_LEN (c
, p
);
2221 /* Check the operands of an insn against the insn's operand constraints
2222 and return 1 if they are valid.
2223 The information about the insn's operands, constraints, operand modes
2224 etc. is obtained from the global variables set up by extract_insn.
2226 WHICH_ALTERNATIVE is set to a number which indicates which
2227 alternative of constraints was matched: 0 for the first alternative,
2228 1 for the next, etc.
2230 In addition, when two operands are required to match
2231 and it happens that the output operand is (reg) while the
2232 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2233 make the output operand look like the input.
2234 This is because the output operand is the one the template will print.
2236 This is used in final, just before printing the assembler code and by
2237 the routines that determine an insn's attribute.
2239 If STRICT is a positive nonzero value, it means that we have been
2240 called after reload has been completed. In that case, we must
2241 do all checks strictly. If it is zero, it means that we have been called
2242 before reload has completed. In that case, we first try to see if we can
2243 find an alternative that matches strictly. If not, we try again, this
2244 time assuming that reload will fix up the insn. This provides a "best
2245 guess" for the alternative and is used to compute attributes of insns prior
2246 to reload. A negative value of STRICT is used for this internal call. */
2254 constrain_operands (int strict
)
2256 const char *constraints
[MAX_RECOG_OPERANDS
];
2257 int matching_operands
[MAX_RECOG_OPERANDS
];
2258 int earlyclobber
[MAX_RECOG_OPERANDS
];
2261 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2262 int funny_match_index
;
2264 which_alternative
= 0;
2265 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2268 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2270 constraints
[c
] = recog_data
.constraints
[c
];
2271 matching_operands
[c
] = -1;
2278 funny_match_index
= 0;
2280 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2282 rtx op
= recog_data
.operand
[opno
];
2283 enum machine_mode mode
= GET_MODE (op
);
2284 const char *p
= constraints
[opno
];
2290 earlyclobber
[opno
] = 0;
2292 /* A unary operator may be accepted by the predicate, but it
2293 is irrelevant for matching constraints. */
2294 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2297 if (GET_CODE (op
) == SUBREG
)
2299 if (GET_CODE (SUBREG_REG (op
)) == REG
2300 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2301 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2302 GET_MODE (SUBREG_REG (op
)),
2305 op
= SUBREG_REG (op
);
2308 /* An empty constraint or empty alternative
2309 allows anything which matched the pattern. */
2310 if (*p
== 0 || *p
== ',')
2314 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2323 case '?': case '!': case '*': case '%':
2328 /* Ignore rest of this alternative as far as
2329 constraint checking is concerned. */
2332 while (*p
&& *p
!= ',');
2337 earlyclobber
[opno
] = 1;
2340 case '0': case '1': case '2': case '3': case '4':
2341 case '5': case '6': case '7': case '8': case '9':
2343 /* This operand must be the same as a previous one.
2344 This kind of constraint is used for instructions such
2345 as add when they take only two operands.
2347 Note that the lower-numbered operand is passed first.
2349 If we are not testing strictly, assume that this
2350 constraint will be satisfied. */
2355 match
= strtoul (p
, &end
, 10);
2362 rtx op1
= recog_data
.operand
[match
];
2363 rtx op2
= recog_data
.operand
[opno
];
2365 /* A unary operator may be accepted by the predicate,
2366 but it is irrelevant for matching constraints. */
2367 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2368 op1
= XEXP (op1
, 0);
2369 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2370 op2
= XEXP (op2
, 0);
2372 val
= operands_match_p (op1
, op2
);
2375 matching_operands
[opno
] = match
;
2376 matching_operands
[match
] = opno
;
2381 /* If output is *x and input is *--x, arrange later
2382 to change the output to *--x as well, since the
2383 output op is the one that will be printed. */
2384 if (val
== 2 && strict
> 0)
2386 funny_match
[funny_match_index
].this = opno
;
2387 funny_match
[funny_match_index
++].other
= match
;
2394 /* p is used for address_operands. When we are called by
2395 gen_reload, no one will have checked that the address is
2396 strictly valid, i.e., that all pseudos requiring hard regs
2397 have gotten them. */
2399 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2404 /* No need to check general_operand again;
2405 it was done in insn-recog.c. */
2407 /* Anything goes unless it is a REG and really has a hard reg
2408 but the hard reg is not in the class GENERAL_REGS. */
2410 || GENERAL_REGS
== ALL_REGS
2411 || GET_CODE (op
) != REG
2412 || (reload_in_progress
2413 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2414 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2419 /* This is used for a MATCH_SCRATCH in the cases when
2420 we don't actually need anything. So anything goes
2426 if (GET_CODE (op
) == MEM
2427 /* Before reload, accept what reload can turn into mem. */
2428 || (strict
< 0 && CONSTANT_P (op
))
2429 /* During reload, accept a pseudo */
2430 || (reload_in_progress
&& GET_CODE (op
) == REG
2431 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2436 if (GET_CODE (op
) == MEM
2437 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2438 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2443 if (GET_CODE (op
) == MEM
2444 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2445 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2451 if (GET_CODE (op
) == CONST_DOUBLE
2452 || (GET_CODE (op
) == CONST_VECTOR
2453 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2459 if (GET_CODE (op
) == CONST_DOUBLE
2460 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2465 if (GET_CODE (op
) == CONST_INT
2466 || (GET_CODE (op
) == CONST_DOUBLE
2467 && GET_MODE (op
) == VOIDmode
))
2470 if (CONSTANT_P (op
))
2475 if (GET_CODE (op
) == CONST_INT
2476 || (GET_CODE (op
) == CONST_DOUBLE
2477 && GET_MODE (op
) == VOIDmode
))
2489 if (GET_CODE (op
) == CONST_INT
2490 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2495 if (GET_CODE (op
) == MEM
2496 && ((strict
> 0 && ! offsettable_memref_p (op
))
2498 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2499 || (reload_in_progress
2500 && !(GET_CODE (op
) == REG
2501 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2506 if ((strict
> 0 && offsettable_memref_p (op
))
2507 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2508 /* Before reload, accept what reload can handle. */
2510 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2511 /* During reload, accept a pseudo */
2512 || (reload_in_progress
&& GET_CODE (op
) == REG
2513 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2519 enum reg_class
class;
2522 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2523 if (class != NO_REGS
)
2527 && GET_CODE (op
) == REG
2528 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2529 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2530 || (GET_CODE (op
) == REG
2531 && reg_fits_class_p (op
, class, offset
, mode
)))
2534 #ifdef EXTRA_CONSTRAINT_STR
2535 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2538 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2539 /* Every memory operand can be reloaded to fit. */
2540 && ((strict
< 0 && GET_CODE (op
) == MEM
)
2541 /* Before reload, accept what reload can turn
2543 || (strict
< 0 && CONSTANT_P (op
))
2544 /* During reload, accept a pseudo */
2545 || (reload_in_progress
&& GET_CODE (op
) == REG
2546 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2548 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2549 /* Every address operand can be reloaded to fit. */
2556 while (p
+= len
, c
);
2558 constraints
[opno
] = p
;
2559 /* If this operand did not win somehow,
2560 this alternative loses. */
2564 /* This alternative won; the operands are ok.
2565 Change whichever operands this alternative says to change. */
2570 /* See if any earlyclobber operand conflicts with some other
2574 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2575 /* Ignore earlyclobber operands now in memory,
2576 because we would often report failure when we have
2577 two memory operands, one of which was formerly a REG. */
2578 if (earlyclobber
[eopno
]
2579 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2580 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2581 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2582 || recog_data
.operand_type
[opno
] != OP_OUT
)
2584 /* Ignore things like match_operator operands. */
2585 && *recog_data
.constraints
[opno
] != 0
2586 && ! (matching_operands
[opno
] == eopno
2587 && operands_match_p (recog_data
.operand
[opno
],
2588 recog_data
.operand
[eopno
]))
2589 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2590 recog_data
.operand
[eopno
]))
2595 while (--funny_match_index
>= 0)
2597 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2598 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2605 which_alternative
++;
2607 while (which_alternative
< recog_data
.n_alternatives
);
2609 which_alternative
= -1;
2610 /* If we are about to reject this, but we are not to test strictly,
2611 try a very loose test. Only return failure if it fails also. */
2613 return constrain_operands (-1);
2618 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2619 is a hard reg in class CLASS when its regno is offset by OFFSET
2620 and changed to mode MODE.
2621 If REG occupies multiple hard regs, all of them must be in CLASS. */
2624 reg_fits_class_p (rtx operand
, enum reg_class
class, int offset
,
2625 enum machine_mode mode
)
2627 int regno
= REGNO (operand
);
2628 if (regno
< FIRST_PSEUDO_REGISTER
2629 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2634 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2636 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2645 /* Split single instruction. Helper function for split_all_insns.
2646 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2648 split_insn (rtx insn
)
2653 /* Don't split no-op move insns. These should silently
2654 disappear later in final. Splitting such insns would
2655 break the code that handles REG_NO_CONFLICT blocks. */
2657 else if ((set
= single_set (insn
)) != NULL
&& set_noop_p (set
))
2659 /* Nops get in the way while scheduling, so delete them
2660 now if register allocation has already been done. It
2661 is too risky to try to do this before register
2662 allocation, and there are unlikely to be very many
2663 nops then anyways. */
2664 if (reload_completed
)
2665 delete_insn_and_edges (insn
);
2669 /* Split insns here to get max fine-grain parallelism. */
2670 rtx first
= PREV_INSN (insn
);
2671 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2675 /* try_split returns the NOTE that INSN became. */
2676 PUT_CODE (insn
, NOTE
);
2677 NOTE_SOURCE_FILE (insn
) = 0;
2678 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2680 /* ??? Coddle to md files that generate subregs in post-
2681 reload splitters instead of computing the proper
2683 if (reload_completed
&& first
!= last
)
2685 first
= NEXT_INSN (first
);
2689 cleanup_subreg_operands (first
);
2692 first
= NEXT_INSN (first
);
2700 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2703 split_all_insns (int upd_life
)
2709 blocks
= sbitmap_alloc (last_basic_block
);
2710 sbitmap_zero (blocks
);
2713 FOR_EACH_BB_REVERSE (bb
)
2716 bool finish
= false;
2718 for (insn
= bb
->head
; !finish
; insn
= next
)
2722 /* Can't use `next_real_insn' because that might go across
2723 CODE_LABELS and short-out basic blocks. */
2724 next
= NEXT_INSN (insn
);
2725 finish
= (insn
== bb
->end
);
2726 last
= split_insn (insn
);
2729 /* The split sequence may include barrier, but the
2730 BB boundary we are interested in will be set to previous
2733 while (GET_CODE (last
) == BARRIER
)
2734 last
= PREV_INSN (last
);
2735 SET_BIT (blocks
, bb
->index
);
2744 int old_last_basic_block
= last_basic_block
;
2746 find_many_sub_basic_blocks (blocks
);
2748 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2749 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2752 if (changed
&& upd_life
)
2753 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2754 PROP_DEATH_NOTES
| PROP_REG_INFO
);
2756 #ifdef ENABLE_CHECKING
2757 verify_flow_info ();
2760 sbitmap_free (blocks
);
2763 /* Same as split_all_insns, but do not expect CFG to be available.
2764 Used by machine dependent reorg passes. */
2767 split_all_insns_noflow (void)
2771 for (insn
= get_insns (); insn
; insn
= next
)
2773 next
= NEXT_INSN (insn
);
2779 #ifdef HAVE_peephole2
2780 struct peep2_insn_data
2786 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2787 static int peep2_current
;
2789 /* A non-insn marker indicating the last insn of the block.
2790 The live_before regset for this element is correct, indicating
2791 global_live_at_end for the block. */
2792 #define PEEP2_EOB pc_rtx
2794 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2795 does not exist. Used by the recognizer to find the next insn to match
2796 in a multi-insn pattern. */
2799 peep2_next_insn (int n
)
2801 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2805 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2806 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2808 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2810 return peep2_insn_data
[n
].insn
;
2813 /* Return true if REGNO is dead before the Nth non-note insn
2817 peep2_regno_dead_p (int ofs
, int regno
)
2819 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2822 ofs
+= peep2_current
;
2823 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2824 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2826 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2829 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2832 /* Similarly for a REG. */
2835 peep2_reg_dead_p (int ofs
, rtx reg
)
2839 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2842 ofs
+= peep2_current
;
2843 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2844 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2846 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2849 regno
= REGNO (reg
);
2850 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2852 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2857 /* Try to find a hard register of mode MODE, matching the register class in
2858 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2859 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2860 in which case the only condition is that the register must be available
2861 before CURRENT_INSN.
2862 Registers that already have bits set in REG_SET will not be considered.
2864 If an appropriate register is available, it will be returned and the
2865 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2869 peep2_find_free_register (int from
, int to
, const char *class_str
,
2870 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2872 static int search_ofs
;
2873 enum reg_class
class;
2877 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
2880 from
+= peep2_current
;
2881 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2882 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2883 to
+= peep2_current
;
2884 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2885 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2887 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2889 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2893 HARD_REG_SET this_live
;
2895 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2897 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2899 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2900 IOR_HARD_REG_SET (live
, this_live
);
2903 class = (class_str
[0] == 'r' ? GENERAL_REGS
2904 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2906 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2908 int raw_regno
, regno
, success
, j
;
2910 /* Distribute the free registers as much as possible. */
2911 raw_regno
= search_ofs
+ i
;
2912 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2913 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2914 #ifdef REG_ALLOC_ORDER
2915 regno
= reg_alloc_order
[raw_regno
];
2920 /* Don't allocate fixed registers. */
2921 if (fixed_regs
[regno
])
2923 /* Make sure the register is of the right class. */
2924 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
2926 /* And can support the mode we need. */
2927 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2929 /* And that we don't create an extra save/restore. */
2930 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2932 /* And we don't clobber traceback for noreturn functions. */
2933 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2934 && (! reload_completed
|| frame_pointer_needed
))
2938 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2940 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2941 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2949 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
2950 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
2952 /* Start the next search with the next register. */
2953 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2955 search_ofs
= raw_regno
;
2957 return gen_rtx_REG (mode
, regno
);
2965 /* Perform the peephole2 optimization pass. */
2968 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED
)
2970 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
2975 #ifdef HAVE_conditional_execution
2979 bool do_cleanup_cfg
= false;
2980 bool do_rebuild_jump_labels
= false;
2982 /* Initialize the regsets we're going to use. */
2983 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2984 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
2985 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
2987 #ifdef HAVE_conditional_execution
2988 blocks
= sbitmap_alloc (last_basic_block
);
2989 sbitmap_zero (blocks
);
2992 count_or_remove_death_notes (NULL
, 1);
2995 FOR_EACH_BB_REVERSE (bb
)
2997 struct propagate_block_info
*pbi
;
2999 /* Indicate that all slots except the last holds invalid data. */
3000 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3001 peep2_insn_data
[i
].insn
= NULL_RTX
;
3003 /* Indicate that the last slot contains live_after data. */
3004 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3005 peep2_current
= MAX_INSNS_PER_PEEP2
;
3007 /* Start up propagation. */
3008 COPY_REG_SET (live
, bb
->global_live_at_end
);
3009 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3011 #ifdef HAVE_conditional_execution
3012 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3014 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3017 for (insn
= bb
->end
; ; insn
= prev
)
3019 prev
= PREV_INSN (insn
);
3022 rtx
try, before_try
, x
;
3025 bool was_call
= false;
3027 /* Record this insn. */
3028 if (--peep2_current
< 0)
3029 peep2_current
= MAX_INSNS_PER_PEEP2
;
3030 peep2_insn_data
[peep2_current
].insn
= insn
;
3031 propagate_one_insn (pbi
, insn
);
3032 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3034 /* Match the peephole. */
3035 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3038 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3039 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3040 cfg-related call notes. */
3041 for (i
= 0; i
<= match_len
; ++i
)
3044 rtx old_insn
, new_insn
, note
;
3046 j
= i
+ peep2_current
;
3047 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3048 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3049 old_insn
= peep2_insn_data
[j
].insn
;
3050 if (GET_CODE (old_insn
) != CALL_INSN
)
3055 while (new_insn
!= NULL_RTX
)
3057 if (GET_CODE (new_insn
) == CALL_INSN
)
3059 new_insn
= NEXT_INSN (new_insn
);
3062 if (new_insn
== NULL_RTX
)
3065 CALL_INSN_FUNCTION_USAGE (new_insn
)
3066 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3068 for (note
= REG_NOTES (old_insn
);
3070 note
= XEXP (note
, 1))
3071 switch (REG_NOTE_KIND (note
))
3075 case REG_ALWAYS_RETURN
:
3076 REG_NOTES (new_insn
)
3077 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3079 REG_NOTES (new_insn
));
3081 /* Discard all other reg notes. */
3085 /* Croak if there is another call in the sequence. */
3086 while (++i
<= match_len
)
3088 j
= i
+ peep2_current
;
3089 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3090 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3091 old_insn
= peep2_insn_data
[j
].insn
;
3092 if (GET_CODE (old_insn
) == CALL_INSN
)
3098 i
= match_len
+ peep2_current
;
3099 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3100 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3102 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3103 REG_EH_REGION
, NULL_RTX
);
3105 /* Replace the old sequence with the new. */
3106 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3107 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3108 before_try
= PREV_INSN (insn
);
3109 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3111 /* Re-insert the EH_REGION notes. */
3112 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3116 for (eh_edge
= bb
->succ
; eh_edge
3117 ; eh_edge
= eh_edge
->succ_next
)
3118 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3121 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3122 if (GET_CODE (x
) == CALL_INSN
3123 || (flag_non_call_exceptions
3124 && may_trap_p (PATTERN (x
))
3125 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3129 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3133 if (x
!= bb
->end
&& eh_edge
)
3138 nfte
= split_block (bb
, x
);
3139 flags
= (eh_edge
->flags
3140 & (EDGE_EH
| EDGE_ABNORMAL
));
3141 if (GET_CODE (x
) == CALL_INSN
)
3142 flags
|= EDGE_ABNORMAL_CALL
;
3143 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3146 nehe
->probability
= eh_edge
->probability
;
3148 = REG_BR_PROB_BASE
- nehe
->probability
;
3150 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3151 #ifdef HAVE_conditional_execution
3152 SET_BIT (blocks
, nfte
->dest
->index
);
3160 /* Converting possibly trapping insn to non-trapping is
3161 possible. Zap dummy outgoing edges. */
3162 do_cleanup_cfg
|= purge_dead_edges (bb
);
3165 #ifdef HAVE_conditional_execution
3166 /* With conditional execution, we cannot back up the
3167 live information so easily, since the conditional
3168 death data structures are not so self-contained.
3169 So record that we've made a modification to this
3170 block and update life information at the end. */
3171 SET_BIT (blocks
, bb
->index
);
3174 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3175 peep2_insn_data
[i
].insn
= NULL_RTX
;
3176 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3178 /* Back up lifetime information past the end of the
3179 newly created sequence. */
3180 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3182 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3184 /* Update life information for the new sequence. */
3191 i
= MAX_INSNS_PER_PEEP2
;
3192 peep2_insn_data
[i
].insn
= x
;
3193 propagate_one_insn (pbi
, x
);
3194 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3200 /* ??? Should verify that LIVE now matches what we
3201 had before the new sequence. */
3206 /* If we generated a jump instruction, it won't have
3207 JUMP_LABEL set. Recompute after we're done. */
3208 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3209 if (GET_CODE (x
) == JUMP_INSN
)
3211 do_rebuild_jump_labels
= true;
3217 if (insn
== bb
->head
)
3221 free_propagate_block_info (pbi
);
3224 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3225 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3226 FREE_REG_SET (live
);
3228 if (do_rebuild_jump_labels
)
3229 rebuild_jump_labels (get_insns ());
3231 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3232 we've changed global life since exception handlers are no longer
3237 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3239 #ifdef HAVE_conditional_execution
3242 count_or_remove_death_notes (blocks
, 1);
3243 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3245 sbitmap_free (blocks
);
3248 #endif /* HAVE_peephole2 */
3250 /* Common predicates for use with define_bypass. */
3252 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3253 data not the address operand(s) of the store. IN_INSN must be
3254 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3258 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3260 rtx out_set
, in_set
;
3262 in_set
= single_set (in_insn
);
3266 if (GET_CODE (SET_DEST (in_set
)) != MEM
)
3269 out_set
= single_set (out_insn
);
3272 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3280 out_pat
= PATTERN (out_insn
);
3281 if (GET_CODE (out_pat
) != PARALLEL
)
3284 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3286 rtx exp
= XVECEXP (out_pat
, 0, i
);
3288 if (GET_CODE (exp
) == CLOBBER
)
3291 if (GET_CODE (exp
) != SET
)
3294 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3302 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3303 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3304 or multiple set; IN_INSN should be single_set for truth, but for convenience
3305 of insn categorization may be any JUMP or CALL insn. */
3308 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3310 rtx out_set
, in_set
;
3312 in_set
= single_set (in_insn
);
3315 if (GET_CODE (in_insn
) == JUMP_INSN
|| GET_CODE (in_insn
) == CALL_INSN
)
3320 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3322 in_set
= SET_SRC (in_set
);
3324 out_set
= single_set (out_insn
);
3327 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3328 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3336 out_pat
= PATTERN (out_insn
);
3337 if (GET_CODE (out_pat
) != PARALLEL
)
3340 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3342 rtx exp
= XVECEXP (out_pat
, 0, i
);
3344 if (GET_CODE (exp
) == CLOBBER
)
3347 if (GET_CODE (exp
) != SET
)
3350 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3351 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))