1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
60 static rtx
*find_single_use_1 (rtx
, rtx
*);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
111 /* Check that X is an insn-body for an `asm' with operands
112 and that the operands mentioned in it are legitimate. */
115 check_asm_operands (rtx x
)
119 const char **constraints
;
122 /* Post-reload, be more strict with things. */
123 if (reload_completed
)
125 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
126 extract_insn (make_insn_raw (x
));
127 constrain_operands (1);
128 return which_alternative
>= 0;
131 noperands
= asm_noperands (x
);
137 operands
= alloca (noperands
* sizeof (rtx
));
138 constraints
= alloca (noperands
* sizeof (char *));
140 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
142 for (i
= 0; i
< noperands
; i
++)
144 const char *c
= constraints
[i
];
147 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
148 c
= constraints
[c
[0] - '0'];
150 if (! asm_operand_ok (operands
[i
], c
))
157 /* Static data for the next two routines. */
159 typedef struct change_t
167 static change_t
*changes
;
168 static int changes_allocated
;
170 static int num_changes
= 0;
172 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
173 at which NEW will be placed. If OBJECT is zero, no validation is done,
174 the change is simply made.
176 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
177 will be called with the address and mode as parameters. If OBJECT is
178 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
181 IN_GROUP is nonzero if this is part of a group of changes that must be
182 performed as a group. In that case, the changes will be stored. The
183 function `apply_change_group' will validate and apply the changes.
185 If IN_GROUP is zero, this is a single change. Try to recognize the insn
186 or validate the memory reference with the change applied. If the result
187 is not valid for the machine, suppress the change and return zero.
188 Otherwise, perform the change and return 1. */
191 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
195 if (old
== new || rtx_equal_p (old
, new))
198 gcc_assert (in_group
!= 0 || num_changes
== 0);
202 /* Save the information describing this change. */
203 if (num_changes
>= changes_allocated
)
205 if (changes_allocated
== 0)
206 /* This value allows for repeated substitutions inside complex
207 indexed addresses, or changes in up to 5 insns. */
208 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
210 changes_allocated
*= 2;
212 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
215 changes
[num_changes
].object
= object
;
216 changes
[num_changes
].loc
= loc
;
217 changes
[num_changes
].old
= old
;
219 if (object
&& !MEM_P (object
))
221 /* Set INSN_CODE to force rerecognition of insn. Save old code in
223 changes
[num_changes
].old_code
= INSN_CODE (object
);
224 INSN_CODE (object
) = -1;
229 /* If we are making a group of changes, return 1. Otherwise, validate the
230 change group we made. */
235 return apply_change_group ();
239 /* Function to be passed to for_each_rtx to test whether a piece of
240 RTL contains any mem/v. */
242 volatile_mem_p (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
244 return (MEM_P (*x
) && MEM_VOLATILE_P (*x
));
247 /* Same as validate_change, but doesn't support groups, and it accepts
248 volatile mems if they're already present in the original insn. */
251 validate_change_maybe_volatile (rtx object
, rtx
*loc
, rtx
new)
255 if (validate_change (object
, loc
, new, 0))
259 /* If there isn't a volatile MEM, there's nothing we can do. */
260 || !for_each_rtx (&PATTERN (object
), volatile_mem_p
, 0)
261 /* Make sure we're not adding or removing volatile MEMs. */
262 || for_each_rtx (loc
, volatile_mem_p
, 0)
263 || for_each_rtx (&new, volatile_mem_p
, 0)
264 || !insn_invalid_p (object
))
269 gcc_assert (!insn_invalid_p (object
));
271 result
= validate_change (object
, loc
, new, 0);
278 /* This subroutine of apply_change_group verifies whether the changes to INSN
279 were valid; i.e. whether INSN can still be recognized. */
282 insn_invalid_p (rtx insn
)
284 rtx pat
= PATTERN (insn
);
285 int num_clobbers
= 0;
286 /* If we are before reload and the pattern is a SET, see if we can add
288 int icode
= recog (pat
, insn
,
289 (GET_CODE (pat
) == SET
290 && ! reload_completed
&& ! reload_in_progress
)
291 ? &num_clobbers
: 0);
292 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
295 /* If this is an asm and the operand aren't legal, then fail. Likewise if
296 this is not an asm and the insn wasn't recognized. */
297 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
298 || (!is_asm
&& icode
< 0))
301 /* If we have to add CLOBBERs, fail if we have to add ones that reference
302 hard registers since our callers can't know if they are live or not.
303 Otherwise, add them. */
304 if (num_clobbers
> 0)
308 if (added_clobbers_hard_reg_p (icode
))
311 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
312 XVECEXP (newpat
, 0, 0) = pat
;
313 add_clobbers (newpat
, icode
);
314 PATTERN (insn
) = pat
= newpat
;
317 /* After reload, verify that all constraints are satisfied. */
318 if (reload_completed
)
322 if (! constrain_operands (1))
326 INSN_CODE (insn
) = icode
;
330 /* Return number of changes made and not validated yet. */
332 num_changes_pending (void)
337 /* Tentatively apply the changes numbered NUM and up.
338 Return 1 if all changes are valid, zero otherwise. */
341 verify_changes (int num
)
344 rtx last_validated
= NULL_RTX
;
346 /* The changes have been applied and all INSN_CODEs have been reset to force
349 The changes are valid if we aren't given an object, or if we are
350 given a MEM and it still is a valid address, or if this is in insn
351 and it is recognized. In the latter case, if reload has completed,
352 we also require that the operands meet the constraints for
355 for (i
= num
; i
< num_changes
; i
++)
357 rtx object
= changes
[i
].object
;
359 /* If there is no object to test or if it is the same as the one we
360 already tested, ignore it. */
361 if (object
== 0 || object
== last_validated
)
366 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
369 else if (insn_invalid_p (object
))
371 rtx pat
= PATTERN (object
);
373 /* Perhaps we couldn't recognize the insn because there were
374 extra CLOBBERs at the end. If so, try to re-recognize
375 without the last CLOBBER (later iterations will cause each of
376 them to be eliminated, in turn). But don't do this if we
377 have an ASM_OPERAND. */
378 if (GET_CODE (pat
) == PARALLEL
379 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
380 && asm_noperands (PATTERN (object
)) < 0)
384 if (XVECLEN (pat
, 0) == 2)
385 newpat
= XVECEXP (pat
, 0, 0);
391 = gen_rtx_PARALLEL (VOIDmode
,
392 rtvec_alloc (XVECLEN (pat
, 0) - 1));
393 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
394 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
397 /* Add a new change to this group to replace the pattern
398 with this new pattern. Then consider this change
399 as having succeeded. The change we added will
400 cause the entire call to fail if things remain invalid.
402 Note that this can lose if a later change than the one
403 we are processing specified &XVECEXP (PATTERN (object), 0, X)
404 but this shouldn't occur. */
406 validate_change (object
, &PATTERN (object
), newpat
, 1);
409 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
410 /* If this insn is a CLOBBER or USE, it is always valid, but is
416 last_validated
= object
;
419 return (i
== num_changes
);
422 /* A group of changes has previously been issued with validate_change and
423 verified with verify_changes. Update the BB_DIRTY flags of the affected
424 blocks, and clear num_changes. */
427 confirm_change_group (void)
432 for (i
= 0; i
< num_changes
; i
++)
433 if (changes
[i
].object
434 && INSN_P (changes
[i
].object
)
435 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
436 bb
->flags
|= BB_DIRTY
;
441 /* Apply a group of changes previously issued with `validate_change'.
442 If all changes are valid, call confirm_change_group and return 1,
443 otherwise, call cancel_changes and return 0. */
446 apply_change_group (void)
448 if (verify_changes (0))
450 confirm_change_group ();
461 /* Return the number of changes so far in the current group. */
464 num_validated_changes (void)
469 /* Retract the changes numbered NUM and up. */
472 cancel_changes (int num
)
476 /* Back out all the changes. Do this in the opposite order in which
478 for (i
= num_changes
- 1; i
>= num
; i
--)
480 *changes
[i
].loc
= changes
[i
].old
;
481 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
482 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
487 /* Replace every occurrence of FROM in X with TO. Mark each change with
488 validate_change passing OBJECT. */
491 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
497 enum machine_mode op0_mode
= VOIDmode
;
498 int prev_changes
= num_changes
;
505 fmt
= GET_RTX_FORMAT (code
);
507 op0_mode
= GET_MODE (XEXP (x
, 0));
509 /* X matches FROM if it is the same rtx or they are both referring to the
510 same register in the same mode. Avoid calling rtx_equal_p unless the
511 operands look similar. */
514 || (REG_P (x
) && REG_P (from
)
515 && GET_MODE (x
) == GET_MODE (from
)
516 && REGNO (x
) == REGNO (from
))
517 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
518 && rtx_equal_p (x
, from
)))
520 validate_change (object
, loc
, to
, 1);
524 /* Call ourself recursively to perform the replacements.
525 We must not replace inside already replaced expression, otherwise we
526 get infinite recursion for replacements like (reg X)->(subreg (reg X))
527 done by regmove, so we must special case shared ASM_OPERANDS. */
529 if (GET_CODE (x
) == PARALLEL
)
531 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
533 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
534 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
536 /* Verify that operands are really shared. */
537 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
538 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
540 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
544 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
548 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
551 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
552 else if (fmt
[i
] == 'E')
553 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
554 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
557 /* If we didn't substitute, there is nothing more to do. */
558 if (num_changes
== prev_changes
)
561 /* Allow substituted expression to have different mode. This is used by
562 regmove to change mode of pseudo register. */
563 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
564 op0_mode
= GET_MODE (XEXP (x
, 0));
566 /* Do changes needed to keep rtx consistent. Don't do any other
567 simplifications, as it is not our job. */
569 if (SWAPPABLE_OPERANDS_P (x
)
570 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
572 validate_change (object
, loc
,
573 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
574 : swap_condition (code
),
575 GET_MODE (x
), XEXP (x
, 1),
584 /* If we have a PLUS whose second operand is now a CONST_INT, use
585 simplify_gen_binary to try to simplify it.
586 ??? We may want later to remove this, once simplification is
587 separated from this function. */
588 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
589 validate_change (object
, loc
,
591 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
594 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
595 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
596 validate_change (object
, loc
,
598 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
599 simplify_gen_unary (NEG
,
600 GET_MODE (x
), XEXP (x
, 1),
605 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
607 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
609 /* If any of the above failed, substitute in something that
610 we know won't be recognized. */
612 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
613 validate_change (object
, loc
, new, 1);
617 /* All subregs possible to simplify should be simplified. */
618 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
621 /* Subregs of VOIDmode operands are incorrect. */
622 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
623 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
625 validate_change (object
, loc
, new, 1);
629 /* If we are replacing a register with memory, try to change the memory
630 to be the mode required for memory in extract operations (this isn't
631 likely to be an insertion operation; if it was, nothing bad will
632 happen, we might just fail in some cases). */
634 if (MEM_P (XEXP (x
, 0))
635 && GET_CODE (XEXP (x
, 1)) == CONST_INT
636 && GET_CODE (XEXP (x
, 2)) == CONST_INT
637 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
638 && !MEM_VOLATILE_P (XEXP (x
, 0)))
640 enum machine_mode wanted_mode
= VOIDmode
;
641 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
642 int pos
= INTVAL (XEXP (x
, 2));
644 if (GET_CODE (x
) == ZERO_EXTRACT
)
646 enum machine_mode new_mode
647 = mode_for_extraction (EP_extzv
, 1);
648 if (new_mode
!= MAX_MACHINE_MODE
)
649 wanted_mode
= new_mode
;
651 else if (GET_CODE (x
) == SIGN_EXTRACT
)
653 enum machine_mode new_mode
654 = mode_for_extraction (EP_extv
, 1);
655 if (new_mode
!= MAX_MACHINE_MODE
)
656 wanted_mode
= new_mode
;
659 /* If we have a narrower mode, we can do something. */
660 if (wanted_mode
!= VOIDmode
661 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
663 int offset
= pos
/ BITS_PER_UNIT
;
666 /* If the bytes and bits are counted differently, we
667 must adjust the offset. */
668 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
670 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
673 pos
%= GET_MODE_BITSIZE (wanted_mode
);
675 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
677 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
678 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
689 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
690 with TO. After all changes have been made, validate by seeing
691 if INSN is still valid. */
694 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
696 validate_replace_rtx_1 (loc
, from
, to
, insn
);
697 return apply_change_group ();
700 /* Try replacing every occurrence of FROM in INSN with TO. After all
701 changes have been made, validate by seeing if INSN is still valid. */
704 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
706 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
707 return apply_change_group ();
710 /* Try replacing every occurrence of FROM in INSN with TO. */
713 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
715 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
718 /* Function called by note_uses to replace used subexpressions. */
719 struct validate_replace_src_data
721 rtx from
; /* Old RTX */
722 rtx to
; /* New RTX */
723 rtx insn
; /* Insn in which substitution is occurring. */
727 validate_replace_src_1 (rtx
*x
, void *data
)
729 struct validate_replace_src_data
*d
730 = (struct validate_replace_src_data
*) data
;
732 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
735 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
739 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
741 struct validate_replace_src_data d
;
746 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
750 /* Return 1 if the insn using CC0 set by INSN does not contain
751 any ordered tests applied to the condition codes.
752 EQ and NE tests do not count. */
755 next_insn_tests_no_inequality (rtx insn
)
757 rtx next
= next_cc0_user (insn
);
759 /* If there is no next insn, we have to take the conservative choice. */
763 return (INSN_P (next
)
764 && ! inequality_comparisons_p (PATTERN (next
)));
768 /* This is used by find_single_use to locate an rtx that contains exactly one
769 use of DEST, which is typically either a REG or CC0. It returns a
770 pointer to the innermost rtx expression containing DEST. Appearances of
771 DEST that are being used to totally replace it are not counted. */
774 find_single_use_1 (rtx dest
, rtx
*loc
)
777 enum rtx_code code
= GET_CODE (x
);
795 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
796 of a REG that occupies all of the REG, the insn uses DEST if
797 it is mentioned in the destination or the source. Otherwise, we
798 need just check the source. */
799 if (GET_CODE (SET_DEST (x
)) != CC0
800 && GET_CODE (SET_DEST (x
)) != PC
801 && !REG_P (SET_DEST (x
))
802 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
803 && REG_P (SUBREG_REG (SET_DEST (x
)))
804 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
805 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
806 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
807 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
810 return find_single_use_1 (dest
, &SET_SRC (x
));
814 return find_single_use_1 (dest
, &XEXP (x
, 0));
820 /* If it wasn't one of the common cases above, check each expression and
821 vector of this code. Look for a unique usage of DEST. */
823 fmt
= GET_RTX_FORMAT (code
);
824 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
828 if (dest
== XEXP (x
, i
)
829 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
830 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
833 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
836 result
= this_result
;
837 else if (this_result
)
838 /* Duplicate usage. */
841 else if (fmt
[i
] == 'E')
845 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
847 if (XVECEXP (x
, i
, j
) == dest
849 && REG_P (XVECEXP (x
, i
, j
))
850 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
853 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
856 result
= this_result
;
857 else if (this_result
)
866 /* See if DEST, produced in INSN, is used only a single time in the
867 sequel. If so, return a pointer to the innermost rtx expression in which
870 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
872 This routine will return usually zero either before flow is called (because
873 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
874 note can't be trusted).
876 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
877 care about REG_DEAD notes or LOG_LINKS.
879 Otherwise, we find the single use by finding an insn that has a
880 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
881 only referenced once in that insn, we know that it must be the first
882 and last insn referencing DEST. */
885 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
894 next
= NEXT_INSN (insn
);
896 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
899 result
= find_single_use_1 (dest
, &PATTERN (next
));
906 if (reload_completed
|| reload_in_progress
|| !REG_P (dest
))
909 for (next
= next_nonnote_insn (insn
);
910 next
!= 0 && !LABEL_P (next
);
911 next
= next_nonnote_insn (next
))
912 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
914 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
915 if (XEXP (link
, 0) == insn
)
920 result
= find_single_use_1 (dest
, &PATTERN (next
));
930 /* Return 1 if OP is a valid general operand for machine mode MODE.
931 This is either a register reference, a memory reference,
932 or a constant. In the case of a memory reference, the address
933 is checked for general validity for the target machine.
935 Register and memory references must have mode MODE in order to be valid,
936 but some constants have no machine mode and are valid for any mode.
938 If MODE is VOIDmode, OP is checked for validity for whatever mode
941 The main use of this function is as a predicate in match_operand
942 expressions in the machine description.
944 For an explanation of this function's behavior for registers of
945 class NO_REGS, see the comment for `register_operand'. */
948 general_operand (rtx op
, enum machine_mode mode
)
950 enum rtx_code code
= GET_CODE (op
);
952 if (mode
== VOIDmode
)
953 mode
= GET_MODE (op
);
955 /* Don't accept CONST_INT or anything similar
956 if the caller wants something floating. */
957 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
958 && GET_MODE_CLASS (mode
) != MODE_INT
959 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
962 if (GET_CODE (op
) == CONST_INT
964 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
968 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
970 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
971 && LEGITIMATE_CONSTANT_P (op
));
973 /* Except for certain constants with VOIDmode, already checked for,
974 OP's mode must match MODE if MODE specifies a mode. */
976 if (GET_MODE (op
) != mode
)
981 rtx sub
= SUBREG_REG (op
);
983 #ifdef INSN_SCHEDULING
984 /* On machines that have insn scheduling, we want all memory
985 reference to be explicit, so outlaw paradoxical SUBREGs.
986 However, we must allow them after reload so that they can
987 get cleaned up by cleanup_subreg_operands. */
988 if (!reload_completed
&& MEM_P (sub
)
989 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
992 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
993 may result in incorrect reference. We should simplify all valid
994 subregs of MEM anyway. But allow this after reload because we
995 might be called from cleanup_subreg_operands.
997 ??? This is a kludge. */
998 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
1002 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1003 create such rtl, and we must reject it. */
1004 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1005 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1009 code
= GET_CODE (op
);
1013 /* A register whose class is NO_REGS is not a general operand. */
1014 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1015 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1019 rtx y
= XEXP (op
, 0);
1021 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1024 /* Use the mem's mode, since it will be reloaded thus. */
1025 if (memory_address_p (GET_MODE (op
), y
))
1032 /* Return 1 if OP is a valid memory address for a memory reference
1035 The main use of this function is as a predicate in match_operand
1036 expressions in the machine description. */
1039 address_operand (rtx op
, enum machine_mode mode
)
1041 return memory_address_p (mode
, op
);
1044 /* Return 1 if OP is a register reference of mode MODE.
1045 If MODE is VOIDmode, accept a register in any mode.
1047 The main use of this function is as a predicate in match_operand
1048 expressions in the machine description.
1050 As a special exception, registers whose class is NO_REGS are
1051 not accepted by `register_operand'. The reason for this change
1052 is to allow the representation of special architecture artifacts
1053 (such as a condition code register) without extending the rtl
1054 definitions. Since registers of class NO_REGS cannot be used
1055 as registers in any case where register classes are examined,
1056 it is most consistent to keep this function from accepting them. */
1059 register_operand (rtx op
, enum machine_mode mode
)
1061 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1064 if (GET_CODE (op
) == SUBREG
)
1066 rtx sub
= SUBREG_REG (op
);
1068 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1069 because it is guaranteed to be reloaded into one.
1070 Just make sure the MEM is valid in itself.
1071 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1072 but currently it does result from (SUBREG (REG)...) where the
1073 reg went on the stack.) */
1074 if (! reload_completed
&& MEM_P (sub
))
1075 return general_operand (op
, mode
);
1077 #ifdef CANNOT_CHANGE_MODE_CLASS
1079 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1080 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1081 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1082 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1086 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1087 create such rtl, and we must reject it. */
1088 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1089 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1095 /* We don't consider registers whose class is NO_REGS
1096 to be a register operand. */
1098 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1099 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1102 /* Return 1 for a register in Pmode; ignore the tested mode. */
1105 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1107 return register_operand (op
, Pmode
);
1110 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1111 or a hard register. */
1114 scratch_operand (rtx op
, enum machine_mode mode
)
1116 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1119 return (GET_CODE (op
) == SCRATCH
1121 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1124 /* Return 1 if OP is a valid immediate operand for mode MODE.
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1130 immediate_operand (rtx op
, enum machine_mode mode
)
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1135 && GET_MODE_CLASS (mode
) != MODE_INT
1136 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1139 if (GET_CODE (op
) == CONST_INT
1141 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1144 return (CONSTANT_P (op
)
1145 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1146 || GET_MODE (op
) == VOIDmode
)
1147 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1148 && LEGITIMATE_CONSTANT_P (op
));
1151 /* Returns 1 if OP is an operand that is a CONST_INT. */
1154 const_int_operand (rtx op
, enum machine_mode mode
)
1156 if (GET_CODE (op
) != CONST_INT
)
1159 if (mode
!= VOIDmode
1160 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1166 /* Returns 1 if OP is an operand that is a constant integer or constant
1167 floating-point number. */
1170 const_double_operand (rtx op
, enum machine_mode mode
)
1172 /* Don't accept CONST_INT or anything similar
1173 if the caller wants something floating. */
1174 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1175 && GET_MODE_CLASS (mode
) != MODE_INT
1176 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1179 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1180 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1181 || GET_MODE (op
) == VOIDmode
));
1184 /* Return 1 if OP is a general operand that is not an immediate operand. */
1187 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1189 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1192 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1195 nonmemory_operand (rtx op
, enum machine_mode mode
)
1197 if (CONSTANT_P (op
))
1199 /* Don't accept CONST_INT or anything similar
1200 if the caller wants something floating. */
1201 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1202 && GET_MODE_CLASS (mode
) != MODE_INT
1203 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1206 if (GET_CODE (op
) == CONST_INT
1208 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1211 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1212 || mode
== VOIDmode
)
1213 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1214 && LEGITIMATE_CONSTANT_P (op
));
1217 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1220 if (GET_CODE (op
) == SUBREG
)
1222 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1223 because it is guaranteed to be reloaded into one.
1224 Just make sure the MEM is valid in itself.
1225 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1226 but currently it does result from (SUBREG (REG)...) where the
1227 reg went on the stack.) */
1228 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1229 return general_operand (op
, mode
);
1230 op
= SUBREG_REG (op
);
1233 /* We don't consider registers whose class is NO_REGS
1234 to be a register operand. */
1236 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1237 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1240 /* Return 1 if OP is a valid operand that stands for pushing a
1241 value of mode MODE onto the stack.
1243 The main use of this function is as a predicate in match_operand
1244 expressions in the machine description. */
1247 push_operand (rtx op
, enum machine_mode mode
)
1249 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1251 #ifdef PUSH_ROUNDING
1252 rounded_size
= PUSH_ROUNDING (rounded_size
);
1258 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1263 if (rounded_size
== GET_MODE_SIZE (mode
))
1265 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1270 if (GET_CODE (op
) != PRE_MODIFY
1271 || GET_CODE (XEXP (op
, 1)) != PLUS
1272 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1273 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1274 #ifdef STACK_GROWS_DOWNWARD
1275 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1277 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1283 return XEXP (op
, 0) == stack_pointer_rtx
;
1286 /* Return 1 if OP is a valid operand that stands for popping a
1287 value of mode MODE off the stack.
1289 The main use of this function is as a predicate in match_operand
1290 expressions in the machine description. */
1293 pop_operand (rtx op
, enum machine_mode mode
)
1298 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1303 if (GET_CODE (op
) != STACK_POP_CODE
)
1306 return XEXP (op
, 0) == stack_pointer_rtx
;
1309 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1312 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1314 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1321 /* Return 1 if OP is a valid memory reference with mode MODE,
1322 including a valid address.
1324 The main use of this function is as a predicate in match_operand
1325 expressions in the machine description. */
1328 memory_operand (rtx op
, enum machine_mode mode
)
1332 if (! reload_completed
)
1333 /* Note that no SUBREG is a memory operand before end of reload pass,
1334 because (SUBREG (MEM...)) forces reloading into a register. */
1335 return MEM_P (op
) && general_operand (op
, mode
);
1337 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1341 if (GET_CODE (inner
) == SUBREG
)
1342 inner
= SUBREG_REG (inner
);
1344 return (MEM_P (inner
) && general_operand (op
, mode
));
1347 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1348 that is, a memory reference whose address is a general_operand. */
1351 indirect_operand (rtx op
, enum machine_mode mode
)
1353 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1354 if (! reload_completed
1355 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1357 int offset
= SUBREG_BYTE (op
);
1358 rtx inner
= SUBREG_REG (op
);
1360 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1363 /* The only way that we can have a general_operand as the resulting
1364 address is if OFFSET is zero and the address already is an operand
1365 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1368 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1369 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1370 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1371 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1372 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1376 && memory_operand (op
, mode
)
1377 && general_operand (XEXP (op
, 0), Pmode
));
1380 /* Return 1 if this is a comparison operator. This allows the use of
1381 MATCH_OPERATOR to recognize all the branch insns. */
1384 comparison_operator (rtx op
, enum machine_mode mode
)
1386 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1387 && COMPARISON_P (op
));
1390 /* If BODY is an insn body that uses ASM_OPERANDS,
1391 return the number of operands (both input and output) in the insn.
1392 Otherwise return -1. */
1395 asm_noperands (rtx body
)
1397 switch (GET_CODE (body
))
1400 /* No output operands: return number of input operands. */
1401 return ASM_OPERANDS_INPUT_LENGTH (body
);
1403 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1404 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1405 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1409 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1410 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1412 /* Multiple output operands, or 1 output plus some clobbers:
1413 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1417 /* Count backwards through CLOBBERs to determine number of SETs. */
1418 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1420 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1422 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1426 /* N_SETS is now number of output operands. */
1429 /* Verify that all the SETs we have
1430 came from a single original asm_operands insn
1431 (so that invalid combinations are blocked). */
1432 for (i
= 0; i
< n_sets
; i
++)
1434 rtx elt
= XVECEXP (body
, 0, i
);
1435 if (GET_CODE (elt
) != SET
)
1437 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1439 /* If these ASM_OPERANDS rtx's came from different original insns
1440 then they aren't allowed together. */
1441 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1442 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1445 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1448 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1450 /* 0 outputs, but some clobbers:
1451 body is [(asm_operands ...) (clobber (reg ...))...]. */
1454 /* Make sure all the other parallel things really are clobbers. */
1455 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1456 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1459 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1468 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1469 copy its operands (both input and output) into the vector OPERANDS,
1470 the locations of the operands within the insn into the vector OPERAND_LOCS,
1471 and the constraints for the operands into CONSTRAINTS.
1472 Write the modes of the operands into MODES.
1473 Return the assembler-template.
1475 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1476 we don't store that info. */
1479 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1480 const char **constraints
, enum machine_mode
*modes
)
1484 const char *template = 0;
1486 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1488 rtx asmop
= SET_SRC (body
);
1489 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1491 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1493 for (i
= 1; i
< noperands
; i
++)
1496 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1498 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1500 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1502 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1505 /* The output is in the SET.
1506 Its constraint is in the ASM_OPERANDS itself. */
1508 operands
[0] = SET_DEST (body
);
1510 operand_locs
[0] = &SET_DEST (body
);
1512 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1514 modes
[0] = GET_MODE (SET_DEST (body
));
1515 template = ASM_OPERANDS_TEMPLATE (asmop
);
1517 else if (GET_CODE (body
) == ASM_OPERANDS
)
1520 /* No output operands: BODY is (asm_operands ....). */
1522 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1524 /* The input operands are found in the 1st element vector. */
1525 /* Constraints for inputs are in the 2nd element vector. */
1526 for (i
= 0; i
< noperands
; i
++)
1529 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1531 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1533 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1535 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1537 template = ASM_OPERANDS_TEMPLATE (asmop
);
1539 else if (GET_CODE (body
) == PARALLEL
1540 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1541 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1543 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1544 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1545 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1546 int nout
= 0; /* Does not include CLOBBERs. */
1548 /* At least one output, plus some CLOBBERs. */
1550 /* The outputs are in the SETs.
1551 Their constraints are in the ASM_OPERANDS itself. */
1552 for (i
= 0; i
< nparallel
; i
++)
1554 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1555 break; /* Past last SET */
1558 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1560 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1562 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1564 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1568 for (i
= 0; i
< nin
; i
++)
1571 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1573 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1575 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1577 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1580 template = ASM_OPERANDS_TEMPLATE (asmop
);
1582 else if (GET_CODE (body
) == PARALLEL
1583 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1585 /* No outputs, but some CLOBBERs. */
1587 rtx asmop
= XVECEXP (body
, 0, 0);
1588 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1590 for (i
= 0; i
< nin
; i
++)
1593 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1595 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1597 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1599 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1602 template = ASM_OPERANDS_TEMPLATE (asmop
);
1608 /* Check if an asm_operand matches its constraints.
1609 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1612 asm_operand_ok (rtx op
, const char *constraint
)
1616 /* Use constrain_operands after reload. */
1617 gcc_assert (!reload_completed
);
1621 char c
= *constraint
;
1638 case '0': case '1': case '2': case '3': case '4':
1639 case '5': case '6': case '7': case '8': case '9':
1640 /* For best results, our caller should have given us the
1641 proper matching constraint, but we can't actually fail
1642 the check if they didn't. Indicate that results are
1646 while (ISDIGIT (*constraint
));
1652 if (address_operand (op
, VOIDmode
))
1657 case 'V': /* non-offsettable */
1658 if (memory_operand (op
, VOIDmode
))
1662 case 'o': /* offsettable */
1663 if (offsettable_nonstrict_memref_p (op
))
1668 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1669 excepting those that expand_call created. Further, on some
1670 machines which do not have generalized auto inc/dec, an inc/dec
1671 is not a memory_operand.
1673 Match any memory and hope things are resolved after reload. */
1677 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1678 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1685 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1686 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1692 if (GET_CODE (op
) == CONST_DOUBLE
1693 || (GET_CODE (op
) == CONST_VECTOR
1694 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1699 if (GET_CODE (op
) == CONST_DOUBLE
1700 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1704 if (GET_CODE (op
) == CONST_DOUBLE
1705 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1710 if (GET_CODE (op
) == CONST_INT
1711 || (GET_CODE (op
) == CONST_DOUBLE
1712 && GET_MODE (op
) == VOIDmode
))
1717 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1722 if (GET_CODE (op
) == CONST_INT
1723 || (GET_CODE (op
) == CONST_DOUBLE
1724 && GET_MODE (op
) == VOIDmode
))
1729 if (GET_CODE (op
) == CONST_INT
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1734 if (GET_CODE (op
) == CONST_INT
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1739 if (GET_CODE (op
) == CONST_INT
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1744 if (GET_CODE (op
) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1749 if (GET_CODE (op
) == CONST_INT
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1754 if (GET_CODE (op
) == CONST_INT
1755 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1759 if (GET_CODE (op
) == CONST_INT
1760 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1764 if (GET_CODE (op
) == CONST_INT
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1774 if (general_operand (op
, VOIDmode
))
1779 /* For all other letters, we first check for a register class,
1780 otherwise it is an EXTRA_CONSTRAINT. */
1781 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1784 if (GET_MODE (op
) == BLKmode
)
1786 if (register_operand (op
, VOIDmode
))
1789 #ifdef EXTRA_CONSTRAINT_STR
1790 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1792 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1793 /* Every memory operand can be reloaded to fit. */
1794 && memory_operand (op
, VOIDmode
))
1796 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1797 /* Every address operand can be reloaded to fit. */
1798 && address_operand (op
, VOIDmode
))
1803 len
= CONSTRAINT_LEN (c
, constraint
);
1806 while (--len
&& *constraint
);
1814 /* Given an rtx *P, if it is a sum containing an integer constant term,
1815 return the location (type rtx *) of the pointer to that constant term.
1816 Otherwise, return a null pointer. */
1819 find_constant_term_loc (rtx
*p
)
1822 enum rtx_code code
= GET_CODE (*p
);
1824 /* If *P IS such a constant term, P is its location. */
1826 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1830 /* Otherwise, if not a sum, it has no constant term. */
1832 if (GET_CODE (*p
) != PLUS
)
1835 /* If one of the summands is constant, return its location. */
1837 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1838 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1841 /* Otherwise, check each summand for containing a constant term. */
1843 if (XEXP (*p
, 0) != 0)
1845 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1850 if (XEXP (*p
, 1) != 0)
1852 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1860 /* Return 1 if OP is a memory reference
1861 whose address contains no side effects
1862 and remains valid after the addition
1863 of a positive integer less than the
1864 size of the object being referenced.
1866 We assume that the original address is valid and do not check it.
1868 This uses strict_memory_address_p as a subroutine, so
1869 don't use it before reload. */
1872 offsettable_memref_p (rtx op
)
1874 return ((MEM_P (op
))
1875 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1878 /* Similar, but don't require a strictly valid mem ref:
1879 consider pseudo-regs valid as index or base regs. */
1882 offsettable_nonstrict_memref_p (rtx op
)
1884 return ((MEM_P (op
))
1885 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1888 /* Return 1 if Y is a memory address which contains no side effects
1889 and would remain valid after the addition of a positive integer
1890 less than the size of that mode.
1892 We assume that the original address is valid and do not check it.
1893 We do check that it is valid for narrower modes.
1895 If STRICTP is nonzero, we require a strictly valid address,
1896 for the sake of use in reload.c. */
1899 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1901 enum rtx_code ycode
= GET_CODE (y
);
1905 int (*addressp
) (enum machine_mode
, rtx
) =
1906 (strictp
? strict_memory_address_p
: memory_address_p
);
1907 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1909 if (CONSTANT_ADDRESS_P (y
))
1912 /* Adjusting an offsettable address involves changing to a narrower mode.
1913 Make sure that's OK. */
1915 if (mode_dependent_address_p (y
))
1918 /* ??? How much offset does an offsettable BLKmode reference need?
1919 Clearly that depends on the situation in which it's being used.
1920 However, the current situation in which we test 0xffffffff is
1921 less than ideal. Caveat user. */
1923 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1925 /* If the expression contains a constant term,
1926 see if it remains valid when max possible offset is added. */
1928 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1933 *y2
= plus_constant (*y2
, mode_sz
- 1);
1934 /* Use QImode because an odd displacement may be automatically invalid
1935 for any wider mode. But it should be valid for a single byte. */
1936 good
= (*addressp
) (QImode
, y
);
1938 /* In any case, restore old contents of memory. */
1943 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1946 /* The offset added here is chosen as the maximum offset that
1947 any instruction could need to add when operating on something
1948 of the specified mode. We assume that if Y and Y+c are
1949 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1950 go inside a LO_SUM here, so we do so as well. */
1951 if (GET_CODE (y
) == LO_SUM
1953 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1954 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1955 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1957 z
= plus_constant (y
, mode_sz
- 1);
1959 /* Use QImode because an odd displacement may be automatically invalid
1960 for any wider mode. But it should be valid for a single byte. */
1961 return (*addressp
) (QImode
, z
);
1964 /* Return 1 if ADDR is an address-expression whose effect depends
1965 on the mode of the memory reference it is used in.
1967 Autoincrement addressing is a typical example of mode-dependence
1968 because the amount of the increment depends on the mode. */
1971 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1973 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1975 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1976 win
: ATTRIBUTE_UNUSED_LABEL
1980 /* Like extract_insn, but save insn extracted and don't extract again, when
1981 called again for the same insn expecting that recog_data still contain the
1982 valid information. This is used primary by gen_attr infrastructure that
1983 often does extract insn again and again. */
1985 extract_insn_cached (rtx insn
)
1987 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1989 extract_insn (insn
);
1990 recog_data
.insn
= insn
;
1992 /* Do cached extract_insn, constrain_operands and complain about failures.
1993 Used by insn_attrtab. */
1995 extract_constrain_insn_cached (rtx insn
)
1997 extract_insn_cached (insn
);
1998 if (which_alternative
== -1
1999 && !constrain_operands (reload_completed
))
2000 fatal_insn_not_found (insn
);
2002 /* Do cached constrain_operands and complain about failures. */
2004 constrain_operands_cached (int strict
)
2006 if (which_alternative
== -1)
2007 return constrain_operands (strict
);
2012 /* Analyze INSN and fill in recog_data. */
2015 extract_insn (rtx insn
)
2020 rtx body
= PATTERN (insn
);
2022 recog_data
.insn
= NULL
;
2023 recog_data
.n_operands
= 0;
2024 recog_data
.n_alternatives
= 0;
2025 recog_data
.n_dups
= 0;
2026 which_alternative
= -1;
2028 switch (GET_CODE (body
))
2038 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2043 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2044 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2045 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2051 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2054 /* This insn is an `asm' with operands. */
2056 /* expand_asm_operands makes sure there aren't too many operands. */
2057 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2059 /* Now get the operand values and constraints out of the insn. */
2060 decode_asm_operands (body
, recog_data
.operand
,
2061 recog_data
.operand_loc
,
2062 recog_data
.constraints
,
2063 recog_data
.operand_mode
);
2066 const char *p
= recog_data
.constraints
[0];
2067 recog_data
.n_alternatives
= 1;
2069 recog_data
.n_alternatives
+= (*p
++ == ',');
2073 fatal_insn_not_found (insn
);
2077 /* Ordinary insn: recognize it, get the operands via insn_extract
2078 and get the constraints. */
2080 icode
= recog_memoized (insn
);
2082 fatal_insn_not_found (insn
);
2084 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2085 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2086 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2088 insn_extract (insn
);
2090 for (i
= 0; i
< noperands
; i
++)
2092 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2093 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2094 /* VOIDmode match_operands gets mode from their real operand. */
2095 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2096 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2099 for (i
= 0; i
< noperands
; i
++)
2100 recog_data
.operand_type
[i
]
2101 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2102 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2105 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2108 /* After calling extract_insn, you can use this function to extract some
2109 information from the constraint strings into a more usable form.
2110 The collected data is stored in recog_op_alt. */
2112 preprocess_constraints (void)
2116 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2117 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2118 * sizeof (struct operand_alternative
)));
2120 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2123 struct operand_alternative
*op_alt
;
2124 const char *p
= recog_data
.constraints
[i
];
2126 op_alt
= recog_op_alt
[i
];
2128 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2130 op_alt
[j
].cl
= NO_REGS
;
2131 op_alt
[j
].constraint
= p
;
2132 op_alt
[j
].matches
= -1;
2133 op_alt
[j
].matched
= -1;
2135 if (*p
== '\0' || *p
== ',')
2137 op_alt
[j
].anything_ok
= 1;
2147 while (c
!= ',' && c
!= '\0');
2148 if (c
== ',' || c
== '\0')
2156 case '=': case '+': case '*': case '%':
2157 case 'E': case 'F': case 'G': case 'H':
2158 case 's': case 'i': case 'n':
2159 case 'I': case 'J': case 'K': case 'L':
2160 case 'M': case 'N': case 'O': case 'P':
2161 /* These don't say anything we care about. */
2165 op_alt
[j
].reject
+= 6;
2168 op_alt
[j
].reject
+= 600;
2171 op_alt
[j
].earlyclobber
= 1;
2174 case '0': case '1': case '2': case '3': case '4':
2175 case '5': case '6': case '7': case '8': case '9':
2178 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2179 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2185 op_alt
[j
].memory_ok
= 1;
2188 op_alt
[j
].decmem_ok
= 1;
2191 op_alt
[j
].incmem_ok
= 1;
2194 op_alt
[j
].nonoffmem_ok
= 1;
2197 op_alt
[j
].offmem_ok
= 1;
2200 op_alt
[j
].anything_ok
= 1;
2204 op_alt
[j
].is_address
= 1;
2205 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2206 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2212 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2216 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2218 op_alt
[j
].memory_ok
= 1;
2221 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2223 op_alt
[j
].is_address
= 1;
2225 = (reg_class_subunion
2226 [(int) op_alt
[j
].cl
]
2227 [(int) MODE_BASE_REG_CLASS (VOIDmode
)]);
2232 = (reg_class_subunion
2233 [(int) op_alt
[j
].cl
]
2234 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2237 p
+= CONSTRAINT_LEN (c
, p
);
2243 /* Check the operands of an insn against the insn's operand constraints
2244 and return 1 if they are valid.
2245 The information about the insn's operands, constraints, operand modes
2246 etc. is obtained from the global variables set up by extract_insn.
2248 WHICH_ALTERNATIVE is set to a number which indicates which
2249 alternative of constraints was matched: 0 for the first alternative,
2250 1 for the next, etc.
2252 In addition, when two operands are required to match
2253 and it happens that the output operand is (reg) while the
2254 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2255 make the output operand look like the input.
2256 This is because the output operand is the one the template will print.
2258 This is used in final, just before printing the assembler code and by
2259 the routines that determine an insn's attribute.
2261 If STRICT is a positive nonzero value, it means that we have been
2262 called after reload has been completed. In that case, we must
2263 do all checks strictly. If it is zero, it means that we have been called
2264 before reload has completed. In that case, we first try to see if we can
2265 find an alternative that matches strictly. If not, we try again, this
2266 time assuming that reload will fix up the insn. This provides a "best
2267 guess" for the alternative and is used to compute attributes of insns prior
2268 to reload. A negative value of STRICT is used for this internal call. */
2276 constrain_operands (int strict
)
2278 const char *constraints
[MAX_RECOG_OPERANDS
];
2279 int matching_operands
[MAX_RECOG_OPERANDS
];
2280 int earlyclobber
[MAX_RECOG_OPERANDS
];
2283 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2284 int funny_match_index
;
2286 which_alternative
= 0;
2287 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2290 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2292 constraints
[c
] = recog_data
.constraints
[c
];
2293 matching_operands
[c
] = -1;
2298 int seen_earlyclobber_at
= -1;
2301 funny_match_index
= 0;
2303 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2305 rtx op
= recog_data
.operand
[opno
];
2306 enum machine_mode mode
= GET_MODE (op
);
2307 const char *p
= constraints
[opno
];
2313 earlyclobber
[opno
] = 0;
2315 /* A unary operator may be accepted by the predicate, but it
2316 is irrelevant for matching constraints. */
2320 if (GET_CODE (op
) == SUBREG
)
2322 if (REG_P (SUBREG_REG (op
))
2323 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2324 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2325 GET_MODE (SUBREG_REG (op
)),
2328 op
= SUBREG_REG (op
);
2331 /* An empty constraint or empty alternative
2332 allows anything which matched the pattern. */
2333 if (*p
== 0 || *p
== ',')
2337 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2346 case '?': case '!': case '*': case '%':
2351 /* Ignore rest of this alternative as far as
2352 constraint checking is concerned. */
2355 while (*p
&& *p
!= ',');
2360 earlyclobber
[opno
] = 1;
2361 if (seen_earlyclobber_at
< 0)
2362 seen_earlyclobber_at
= opno
;
2365 case '0': case '1': case '2': case '3': case '4':
2366 case '5': case '6': case '7': case '8': case '9':
2368 /* This operand must be the same as a previous one.
2369 This kind of constraint is used for instructions such
2370 as add when they take only two operands.
2372 Note that the lower-numbered operand is passed first.
2374 If we are not testing strictly, assume that this
2375 constraint will be satisfied. */
2380 match
= strtoul (p
, &end
, 10);
2387 rtx op1
= recog_data
.operand
[match
];
2388 rtx op2
= recog_data
.operand
[opno
];
2390 /* A unary operator may be accepted by the predicate,
2391 but it is irrelevant for matching constraints. */
2393 op1
= XEXP (op1
, 0);
2395 op2
= XEXP (op2
, 0);
2397 val
= operands_match_p (op1
, op2
);
2400 matching_operands
[opno
] = match
;
2401 matching_operands
[match
] = opno
;
2406 /* If output is *x and input is *--x, arrange later
2407 to change the output to *--x as well, since the
2408 output op is the one that will be printed. */
2409 if (val
== 2 && strict
> 0)
2411 funny_match
[funny_match_index
].this = opno
;
2412 funny_match
[funny_match_index
++].other
= match
;
2419 /* p is used for address_operands. When we are called by
2420 gen_reload, no one will have checked that the address is
2421 strictly valid, i.e., that all pseudos requiring hard regs
2422 have gotten them. */
2424 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2429 /* No need to check general_operand again;
2430 it was done in insn-recog.c. */
2432 /* Anything goes unless it is a REG and really has a hard reg
2433 but the hard reg is not in the class GENERAL_REGS. */
2435 || GENERAL_REGS
== ALL_REGS
2437 || (reload_in_progress
2438 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2439 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2444 /* This is used for a MATCH_SCRATCH in the cases when
2445 we don't actually need anything. So anything goes
2451 /* Memory operands must be valid, to the extent
2452 required by STRICT. */
2456 && !strict_memory_address_p (GET_MODE (op
),
2460 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2464 /* Before reload, accept what reload can turn into mem. */
2465 else if (strict
< 0 && CONSTANT_P (op
))
2467 /* During reload, accept a pseudo */
2468 else if (reload_in_progress
&& REG_P (op
)
2469 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2475 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2476 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2482 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2483 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2489 if (GET_CODE (op
) == CONST_DOUBLE
2490 || (GET_CODE (op
) == CONST_VECTOR
2491 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2497 if (GET_CODE (op
) == CONST_DOUBLE
2498 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2503 if (GET_CODE (op
) == CONST_INT
2504 || (GET_CODE (op
) == CONST_DOUBLE
2505 && GET_MODE (op
) == VOIDmode
))
2508 if (CONSTANT_P (op
))
2513 if (GET_CODE (op
) == CONST_INT
2514 || (GET_CODE (op
) == CONST_DOUBLE
2515 && GET_MODE (op
) == VOIDmode
))
2527 if (GET_CODE (op
) == CONST_INT
2528 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2534 && ((strict
> 0 && ! offsettable_memref_p (op
))
2536 && !(CONSTANT_P (op
) || MEM_P (op
)))
2537 || (reload_in_progress
2539 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2544 if ((strict
> 0 && offsettable_memref_p (op
))
2545 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2546 /* Before reload, accept what reload can handle. */
2548 && (CONSTANT_P (op
) || MEM_P (op
)))
2549 /* During reload, accept a pseudo */
2550 || (reload_in_progress
&& REG_P (op
)
2551 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2560 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2566 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2567 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2569 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2572 #ifdef EXTRA_CONSTRAINT_STR
2573 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2576 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2577 /* Every memory operand can be reloaded to fit. */
2578 && ((strict
< 0 && MEM_P (op
))
2579 /* Before reload, accept what reload can turn
2581 || (strict
< 0 && CONSTANT_P (op
))
2582 /* During reload, accept a pseudo */
2583 || (reload_in_progress
&& REG_P (op
)
2584 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2586 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2587 /* Every address operand can be reloaded to fit. */
2594 while (p
+= len
, c
);
2596 constraints
[opno
] = p
;
2597 /* If this operand did not win somehow,
2598 this alternative loses. */
2602 /* This alternative won; the operands are ok.
2603 Change whichever operands this alternative says to change. */
2608 /* See if any earlyclobber operand conflicts with some other
2611 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2612 for (eopno
= seen_earlyclobber_at
;
2613 eopno
< recog_data
.n_operands
;
2615 /* Ignore earlyclobber operands now in memory,
2616 because we would often report failure when we have
2617 two memory operands, one of which was formerly a REG. */
2618 if (earlyclobber
[eopno
]
2619 && REG_P (recog_data
.operand
[eopno
]))
2620 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2621 if ((MEM_P (recog_data
.operand
[opno
])
2622 || recog_data
.operand_type
[opno
] != OP_OUT
)
2624 /* Ignore things like match_operator operands. */
2625 && *recog_data
.constraints
[opno
] != 0
2626 && ! (matching_operands
[opno
] == eopno
2627 && operands_match_p (recog_data
.operand
[opno
],
2628 recog_data
.operand
[eopno
]))
2629 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2630 recog_data
.operand
[eopno
]))
2635 while (--funny_match_index
>= 0)
2637 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2638 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2645 which_alternative
++;
2647 while (which_alternative
< recog_data
.n_alternatives
);
2649 which_alternative
= -1;
2650 /* If we are about to reject this, but we are not to test strictly,
2651 try a very loose test. Only return failure if it fails also. */
2653 return constrain_operands (-1);
2658 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2659 is a hard reg in class CLASS when its regno is offset by OFFSET
2660 and changed to mode MODE.
2661 If REG occupies multiple hard regs, all of them must be in CLASS. */
2664 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2665 enum machine_mode mode
)
2667 int regno
= REGNO (operand
);
2668 if (regno
< FIRST_PSEUDO_REGISTER
2669 && TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2674 for (sr
= hard_regno_nregs
[regno
][mode
] - 1;
2676 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2685 /* Split single instruction. Helper function for split_all_insns and
2686 split_all_insns_noflow. Return last insn in the sequence if successful,
2687 or NULL if unsuccessful. */
2690 split_insn (rtx insn
)
2692 /* Split insns here to get max fine-grain parallelism. */
2693 rtx first
= PREV_INSN (insn
);
2694 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2699 /* try_split returns the NOTE that INSN became. */
2700 SET_INSN_DELETED (insn
);
2702 /* ??? Coddle to md files that generate subregs in post-reload
2703 splitters instead of computing the proper hard register. */
2704 if (reload_completed
&& first
!= last
)
2706 first
= NEXT_INSN (first
);
2710 cleanup_subreg_operands (first
);
2713 first
= NEXT_INSN (first
);
2719 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2722 split_all_insns (int upd_life
)
2728 blocks
= sbitmap_alloc (last_basic_block
);
2729 sbitmap_zero (blocks
);
2732 FOR_EACH_BB_REVERSE (bb
)
2735 bool finish
= false;
2737 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2739 /* Can't use `next_real_insn' because that might go across
2740 CODE_LABELS and short-out basic blocks. */
2741 next
= NEXT_INSN (insn
);
2742 finish
= (insn
== BB_END (bb
));
2745 rtx set
= single_set (insn
);
2747 /* Don't split no-op move insns. These should silently
2748 disappear later in final. Splitting such insns would
2749 break the code that handles REG_NO_CONFLICT blocks. */
2750 if (set
&& set_noop_p (set
))
2752 /* Nops get in the way while scheduling, so delete them
2753 now if register allocation has already been done. It
2754 is too risky to try to do this before register
2755 allocation, and there are unlikely to be very many
2756 nops then anyways. */
2757 if (reload_completed
)
2759 /* If the no-op set has a REG_UNUSED note, we need
2760 to update liveness information. */
2761 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2763 SET_BIT (blocks
, bb
->index
);
2766 /* ??? Is life info affected by deleting edges? */
2767 delete_insn_and_edges (insn
);
2772 rtx last
= split_insn (insn
);
2775 /* The split sequence may include barrier, but the
2776 BB boundary we are interested in will be set to
2779 while (BARRIER_P (last
))
2780 last
= PREV_INSN (last
);
2781 SET_BIT (blocks
, bb
->index
);
2791 int old_last_basic_block
= last_basic_block
;
2793 find_many_sub_basic_blocks (blocks
);
2795 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2796 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2799 if (changed
&& upd_life
)
2800 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2803 #ifdef ENABLE_CHECKING
2804 verify_flow_info ();
2807 sbitmap_free (blocks
);
2810 /* Same as split_all_insns, but do not expect CFG to be available.
2811 Used by machine dependent reorg passes. */
2814 split_all_insns_noflow (void)
2818 for (insn
= get_insns (); insn
; insn
= next
)
2820 next
= NEXT_INSN (insn
);
2823 /* Don't split no-op move insns. These should silently
2824 disappear later in final. Splitting such insns would
2825 break the code that handles REG_NO_CONFLICT blocks. */
2826 rtx set
= single_set (insn
);
2827 if (set
&& set_noop_p (set
))
2829 /* Nops get in the way while scheduling, so delete them
2830 now if register allocation has already been done. It
2831 is too risky to try to do this before register
2832 allocation, and there are unlikely to be very many
2835 ??? Should we use delete_insn when the CFG isn't valid? */
2836 if (reload_completed
)
2837 delete_insn_and_edges (insn
);
2845 #ifdef HAVE_peephole2
2846 struct peep2_insn_data
2852 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2853 static int peep2_current
;
2855 /* A non-insn marker indicating the last insn of the block.
2856 The live_before regset for this element is correct, indicating
2857 global_live_at_end for the block. */
2858 #define PEEP2_EOB pc_rtx
2860 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2861 does not exist. Used by the recognizer to find the next insn to match
2862 in a multi-insn pattern. */
2865 peep2_next_insn (int n
)
2867 gcc_assert (n
< MAX_INSNS_PER_PEEP2
+ 1);
2870 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2871 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2873 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2875 return peep2_insn_data
[n
].insn
;
2878 /* Return true if REGNO is dead before the Nth non-note insn
2882 peep2_regno_dead_p (int ofs
, int regno
)
2884 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2886 ofs
+= peep2_current
;
2887 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2888 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2890 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2892 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2895 /* Similarly for a REG. */
2898 peep2_reg_dead_p (int ofs
, rtx reg
)
2902 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2904 ofs
+= peep2_current
;
2905 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2906 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2908 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2910 regno
= REGNO (reg
);
2911 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2913 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2918 /* Try to find a hard register of mode MODE, matching the register class in
2919 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2920 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2921 in which case the only condition is that the register must be available
2922 before CURRENT_INSN.
2923 Registers that already have bits set in REG_SET will not be considered.
2925 If an appropriate register is available, it will be returned and the
2926 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2930 peep2_find_free_register (int from
, int to
, const char *class_str
,
2931 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2933 static int search_ofs
;
2938 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2939 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2941 from
+= peep2_current
;
2942 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2943 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2944 to
+= peep2_current
;
2945 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2946 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2948 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2949 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2953 HARD_REG_SET this_live
;
2955 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2957 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2958 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2959 IOR_HARD_REG_SET (live
, this_live
);
2962 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2963 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2965 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2967 int raw_regno
, regno
, success
, j
;
2969 /* Distribute the free registers as much as possible. */
2970 raw_regno
= search_ofs
+ i
;
2971 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2972 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2973 #ifdef REG_ALLOC_ORDER
2974 regno
= reg_alloc_order
[raw_regno
];
2979 /* Don't allocate fixed registers. */
2980 if (fixed_regs
[regno
])
2982 /* Make sure the register is of the right class. */
2983 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2985 /* And can support the mode we need. */
2986 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2988 /* And that we don't create an extra save/restore. */
2989 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2991 /* And we don't clobber traceback for noreturn functions. */
2992 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2993 && (! reload_completed
|| frame_pointer_needed
))
2997 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2999 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3000 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3008 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
3009 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3011 /* Start the next search with the next register. */
3012 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3014 search_ofs
= raw_regno
;
3016 return gen_rtx_REG (mode
, regno
);
3024 /* Perform the peephole2 optimization pass. */
3027 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED
)
3033 #ifdef HAVE_conditional_execution
3037 bool do_cleanup_cfg
= false;
3038 bool do_global_life_update
= false;
3039 bool do_rebuild_jump_labels
= false;
3041 /* Initialize the regsets we're going to use. */
3042 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3043 peep2_insn_data
[i
].live_before
= ALLOC_REG_SET (®_obstack
);
3044 live
= ALLOC_REG_SET (®_obstack
);
3046 #ifdef HAVE_conditional_execution
3047 blocks
= sbitmap_alloc (last_basic_block
);
3048 sbitmap_zero (blocks
);
3051 count_or_remove_death_notes (NULL
, 1);
3054 FOR_EACH_BB_REVERSE (bb
)
3056 struct propagate_block_info
*pbi
;
3057 reg_set_iterator rsi
;
3060 /* Indicate that all slots except the last holds invalid data. */
3061 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3062 peep2_insn_data
[i
].insn
= NULL_RTX
;
3064 /* Indicate that the last slot contains live_after data. */
3065 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3066 peep2_current
= MAX_INSNS_PER_PEEP2
;
3068 /* Start up propagation. */
3069 COPY_REG_SET (live
, bb
->il
.rtl
->global_live_at_end
);
3070 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3072 #ifdef HAVE_conditional_execution
3073 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3075 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3078 for (insn
= BB_END (bb
); ; insn
= prev
)
3080 prev
= PREV_INSN (insn
);
3083 rtx
try, before_try
, x
;
3086 bool was_call
= false;
3088 /* Record this insn. */
3089 if (--peep2_current
< 0)
3090 peep2_current
= MAX_INSNS_PER_PEEP2
;
3091 peep2_insn_data
[peep2_current
].insn
= insn
;
3092 propagate_one_insn (pbi
, insn
);
3093 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3095 /* Match the peephole. */
3096 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3099 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3100 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3101 cfg-related call notes. */
3102 for (i
= 0; i
<= match_len
; ++i
)
3105 rtx old_insn
, new_insn
, note
;
3107 j
= i
+ peep2_current
;
3108 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3109 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3110 old_insn
= peep2_insn_data
[j
].insn
;
3111 if (!CALL_P (old_insn
))
3116 while (new_insn
!= NULL_RTX
)
3118 if (CALL_P (new_insn
))
3120 new_insn
= NEXT_INSN (new_insn
);
3123 gcc_assert (new_insn
!= NULL_RTX
);
3125 CALL_INSN_FUNCTION_USAGE (new_insn
)
3126 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3128 for (note
= REG_NOTES (old_insn
);
3130 note
= XEXP (note
, 1))
3131 switch (REG_NOTE_KIND (note
))
3135 REG_NOTES (new_insn
)
3136 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3138 REG_NOTES (new_insn
));
3140 /* Discard all other reg notes. */
3144 /* Croak if there is another call in the sequence. */
3145 while (++i
<= match_len
)
3147 j
= i
+ peep2_current
;
3148 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3149 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3150 old_insn
= peep2_insn_data
[j
].insn
;
3151 gcc_assert (!CALL_P (old_insn
));
3156 i
= match_len
+ peep2_current
;
3157 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3158 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3160 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3161 REG_EH_REGION
, NULL_RTX
);
3163 /* Replace the old sequence with the new. */
3164 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3165 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3166 before_try
= PREV_INSN (insn
);
3167 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3169 /* Re-insert the EH_REGION notes. */
3170 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3175 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3176 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3179 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3181 || (flag_non_call_exceptions
3182 && may_trap_p (PATTERN (x
))
3183 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3187 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3191 if (x
!= BB_END (bb
) && eh_edge
)
3196 nfte
= split_block (bb
, x
);
3197 flags
= (eh_edge
->flags
3198 & (EDGE_EH
| EDGE_ABNORMAL
));
3200 flags
|= EDGE_ABNORMAL_CALL
;
3201 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3204 nehe
->probability
= eh_edge
->probability
;
3206 = REG_BR_PROB_BASE
- nehe
->probability
;
3208 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3209 #ifdef HAVE_conditional_execution
3210 SET_BIT (blocks
, nfte
->dest
->index
);
3218 /* Converting possibly trapping insn to non-trapping is
3219 possible. Zap dummy outgoing edges. */
3220 do_cleanup_cfg
|= purge_dead_edges (bb
);
3223 #ifdef HAVE_conditional_execution
3224 /* With conditional execution, we cannot back up the
3225 live information so easily, since the conditional
3226 death data structures are not so self-contained.
3227 So record that we've made a modification to this
3228 block and update life information at the end. */
3229 SET_BIT (blocks
, bb
->index
);
3232 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3233 peep2_insn_data
[i
].insn
= NULL_RTX
;
3234 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3236 /* Back up lifetime information past the end of the
3237 newly created sequence. */
3238 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3240 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3242 /* Update life information for the new sequence. */
3249 i
= MAX_INSNS_PER_PEEP2
;
3250 peep2_insn_data
[i
].insn
= x
;
3251 propagate_one_insn (pbi
, x
);
3252 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3258 /* ??? Should verify that LIVE now matches what we
3259 had before the new sequence. */
3264 /* If we generated a jump instruction, it won't have
3265 JUMP_LABEL set. Recompute after we're done. */
3266 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3269 do_rebuild_jump_labels
= true;
3275 if (insn
== BB_HEAD (bb
))
3279 /* Some peepholes can decide the don't need one or more of their
3280 inputs. If this happens, local life update is not enough. */
3281 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb
->il
.rtl
->global_live_at_start
, live
,
3284 do_global_life_update
= true;
3288 free_propagate_block_info (pbi
);
3291 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3292 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3293 FREE_REG_SET (live
);
3295 if (do_rebuild_jump_labels
)
3296 rebuild_jump_labels (get_insns ());
3298 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3299 we've changed global life since exception handlers are no longer
3304 do_global_life_update
= true;
3306 if (do_global_life_update
)
3307 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3308 #ifdef HAVE_conditional_execution
3311 count_or_remove_death_notes (blocks
, 1);
3312 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3314 sbitmap_free (blocks
);
3317 #endif /* HAVE_peephole2 */
3319 /* Common predicates for use with define_bypass. */
3321 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3322 data not the address operand(s) of the store. IN_INSN must be
3323 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3327 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3329 rtx out_set
, in_set
;
3331 in_set
= single_set (in_insn
);
3332 gcc_assert (in_set
);
3334 if (!MEM_P (SET_DEST (in_set
)))
3337 out_set
= single_set (out_insn
);
3340 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3348 out_pat
= PATTERN (out_insn
);
3349 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3351 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3353 rtx exp
= XVECEXP (out_pat
, 0, i
);
3355 if (GET_CODE (exp
) == CLOBBER
)
3358 gcc_assert (GET_CODE (exp
) == SET
);
3360 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3368 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3369 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3370 or multiple set; IN_INSN should be single_set for truth, but for convenience
3371 of insn categorization may be any JUMP or CALL insn. */
3374 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3376 rtx out_set
, in_set
;
3378 in_set
= single_set (in_insn
);
3381 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3385 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3387 in_set
= SET_SRC (in_set
);
3389 out_set
= single_set (out_insn
);
3392 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3393 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3401 out_pat
= PATTERN (out_insn
);
3402 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3404 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3406 rtx exp
= XVECEXP (out_pat
, 0, i
);
3408 if (GET_CODE (exp
) == CLOBBER
)
3411 gcc_assert (GET_CODE (exp
) == SET
);
3413 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3414 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))