1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
34 #include "addresses.h"
40 #include "basic-block.h"
45 #include "tree-pass.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
52 #define STACK_PUSH_CODE PRE_INC
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
60 #define STACK_POP_CODE POST_DEC
64 #ifndef HAVE_ATTR_enabled
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED
)
72 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
, bool);
73 static void validate_replace_src_1 (rtx
*, void *);
74 static rtx
split_insn (rtx
);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in regclass.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
86 struct recog_data recog_data
;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
92 /* On return from `constrain_operands', indicate which alternative
95 int which_alternative
;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed
;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed
;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
111 init_recog_no_volatile (void)
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x
)
131 const char **constraints
;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed
)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x
));
139 constrain_operands (1);
140 return which_alternative
>= 0;
143 noperands
= asm_noperands (x
);
149 operands
= XALLOCAVEC (rtx
, noperands
);
150 constraints
= XALLOCAVEC (const char *, noperands
);
152 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
154 for (i
= 0; i
< noperands
; i
++)
156 const char *c
= constraints
[i
];
159 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
160 c
= constraints
[c
[0] - '0'];
162 if (! asm_operand_ok (operands
[i
], c
))
169 /* Static data for the next two routines. */
171 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
208 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
211 gcc_assert (in_group
!= 0 || num_changes
== 0);
215 /* Save the information describing this change. */
216 if (num_changes
>= changes_allocated
)
218 if (changes_allocated
== 0)
219 /* This value allows for repeated substitutions inside complex
220 indexed addresses, or changes in up to 5 insns. */
221 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
223 changes_allocated
*= 2;
225 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
228 changes
[num_changes
].object
= object
;
229 changes
[num_changes
].loc
= loc
;
230 changes
[num_changes
].old
= old
;
231 changes
[num_changes
].unshare
= unshare
;
233 if (object
&& !MEM_P (object
))
235 /* Set INSN_CODE to force rerecognition of insn. Save old code in
237 changes
[num_changes
].old_code
= INSN_CODE (object
);
238 INSN_CODE (object
) = -1;
243 /* If we are making a group of changes, return 1. Otherwise, validate the
244 change group we made. */
249 return apply_change_group ();
252 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
256 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
258 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
261 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
265 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
267 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
271 /* Keep X canonicalized if some changes have made it non-canonical; only
272 modifies the operands of X, not (for example) its code. Simplifications
273 are not the job of this routine.
275 Return true if anything was changed. */
277 canonicalize_change_group (rtx insn
, rtx x
)
279 if (COMMUTATIVE_P (x
)
280 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
282 /* Oops, the caller has made X no longer canonical.
283 Let's redo the changes in the correct order. */
284 rtx tem
= XEXP (x
, 0);
285 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
286 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
294 /* This subroutine of apply_change_group verifies whether the changes to INSN
295 were valid; i.e. whether INSN can still be recognized. */
298 insn_invalid_p (rtx insn
)
300 rtx pat
= PATTERN (insn
);
301 int num_clobbers
= 0;
302 /* If we are before reload and the pattern is a SET, see if we can add
304 int icode
= recog (pat
, insn
,
305 (GET_CODE (pat
) == SET
306 && ! reload_completed
&& ! reload_in_progress
)
307 ? &num_clobbers
: 0);
308 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
311 /* If this is an asm and the operand aren't legal, then fail. Likewise if
312 this is not an asm and the insn wasn't recognized. */
313 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
314 || (!is_asm
&& icode
< 0))
317 /* If we have to add CLOBBERs, fail if we have to add ones that reference
318 hard registers since our callers can't know if they are live or not.
319 Otherwise, add them. */
320 if (num_clobbers
> 0)
324 if (added_clobbers_hard_reg_p (icode
))
327 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
328 XVECEXP (newpat
, 0, 0) = pat
;
329 add_clobbers (newpat
, icode
);
330 PATTERN (insn
) = pat
= newpat
;
333 /* After reload, verify that all constraints are satisfied. */
334 if (reload_completed
)
338 if (! constrain_operands (1))
342 INSN_CODE (insn
) = icode
;
346 /* Return number of changes made and not validated yet. */
348 num_changes_pending (void)
353 /* Tentatively apply the changes numbered NUM and up.
354 Return 1 if all changes are valid, zero otherwise. */
357 verify_changes (int num
)
360 rtx last_validated
= NULL_RTX
;
362 /* The changes have been applied and all INSN_CODEs have been reset to force
365 The changes are valid if we aren't given an object, or if we are
366 given a MEM and it still is a valid address, or if this is in insn
367 and it is recognized. In the latter case, if reload has completed,
368 we also require that the operands meet the constraints for
371 for (i
= num
; i
< num_changes
; i
++)
373 rtx object
= changes
[i
].object
;
375 /* If there is no object to test or if it is the same as the one we
376 already tested, ignore it. */
377 if (object
== 0 || object
== last_validated
)
382 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
385 else if (insn_invalid_p (object
))
387 rtx pat
= PATTERN (object
);
389 /* Perhaps we couldn't recognize the insn because there were
390 extra CLOBBERs at the end. If so, try to re-recognize
391 without the last CLOBBER (later iterations will cause each of
392 them to be eliminated, in turn). But don't do this if we
393 have an ASM_OPERAND. */
394 if (GET_CODE (pat
) == PARALLEL
395 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
396 && asm_noperands (PATTERN (object
)) < 0)
400 if (XVECLEN (pat
, 0) == 2)
401 newpat
= XVECEXP (pat
, 0, 0);
407 = gen_rtx_PARALLEL (VOIDmode
,
408 rtvec_alloc (XVECLEN (pat
, 0) - 1));
409 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
410 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
413 /* Add a new change to this group to replace the pattern
414 with this new pattern. Then consider this change
415 as having succeeded. The change we added will
416 cause the entire call to fail if things remain invalid.
418 Note that this can lose if a later change than the one
419 we are processing specified &XVECEXP (PATTERN (object), 0, X)
420 but this shouldn't occur. */
422 validate_change (object
, &PATTERN (object
), newpat
, 1);
425 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
426 /* If this insn is a CLOBBER or USE, it is always valid, but is
432 last_validated
= object
;
435 return (i
== num_changes
);
438 /* A group of changes has previously been issued with validate_change
439 and verified with verify_changes. Call df_insn_rescan for each of
440 the insn changed and clear num_changes. */
443 confirm_change_group (void)
446 rtx last_object
= NULL
;
448 for (i
= 0; i
< num_changes
; i
++)
450 rtx object
= changes
[i
].object
;
452 if (changes
[i
].unshare
)
453 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
455 /* Avoid unnecessary rescanning when multiple changes to same instruction
459 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
460 df_insn_rescan (last_object
);
461 last_object
= object
;
465 if (last_object
&& INSN_P (last_object
))
466 df_insn_rescan (last_object
);
470 /* Apply a group of changes previously issued with `validate_change'.
471 If all changes are valid, call confirm_change_group and return 1,
472 otherwise, call cancel_changes and return 0. */
475 apply_change_group (void)
477 if (verify_changes (0))
479 confirm_change_group ();
490 /* Return the number of changes so far in the current group. */
493 num_validated_changes (void)
498 /* Retract the changes numbered NUM and up. */
501 cancel_changes (int num
)
505 /* Back out all the changes. Do this in the opposite order in which
507 for (i
= num_changes
- 1; i
>= num
; i
--)
509 *changes
[i
].loc
= changes
[i
].old
;
510 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
511 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
516 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
520 simplify_while_replacing (rtx
*loc
, rtx to
, rtx object
,
521 enum machine_mode op0_mode
)
524 enum rtx_code code
= GET_CODE (x
);
527 if (SWAPPABLE_OPERANDS_P (x
)
528 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
530 validate_unshare_change (object
, loc
,
531 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
532 : swap_condition (code
),
533 GET_MODE (x
), XEXP (x
, 1),
542 /* If we have a PLUS whose second operand is now a CONST_INT, use
543 simplify_gen_binary to try to simplify it.
544 ??? We may want later to remove this, once simplification is
545 separated from this function. */
546 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
547 validate_change (object
, loc
,
549 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
552 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
553 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
554 validate_change (object
, loc
,
556 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
557 simplify_gen_unary (NEG
,
558 GET_MODE (x
), XEXP (x
, 1),
563 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
565 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
567 /* If any of the above failed, substitute in something that
568 we know won't be recognized. */
570 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
571 validate_change (object
, loc
, new_rtx
, 1);
575 /* All subregs possible to simplify should be simplified. */
576 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
579 /* Subregs of VOIDmode operands are incorrect. */
580 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
581 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
583 validate_change (object
, loc
, new_rtx
, 1);
587 /* If we are replacing a register with memory, try to change the memory
588 to be the mode required for memory in extract operations (this isn't
589 likely to be an insertion operation; if it was, nothing bad will
590 happen, we might just fail in some cases). */
592 if (MEM_P (XEXP (x
, 0))
593 && GET_CODE (XEXP (x
, 1)) == CONST_INT
594 && GET_CODE (XEXP (x
, 2)) == CONST_INT
595 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
596 && !MEM_VOLATILE_P (XEXP (x
, 0)))
598 enum machine_mode wanted_mode
= VOIDmode
;
599 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
600 int pos
= INTVAL (XEXP (x
, 2));
602 if (GET_CODE (x
) == ZERO_EXTRACT
)
604 enum machine_mode new_mode
605 = mode_for_extraction (EP_extzv
, 1);
606 if (new_mode
!= MAX_MACHINE_MODE
)
607 wanted_mode
= new_mode
;
609 else if (GET_CODE (x
) == SIGN_EXTRACT
)
611 enum machine_mode new_mode
612 = mode_for_extraction (EP_extv
, 1);
613 if (new_mode
!= MAX_MACHINE_MODE
)
614 wanted_mode
= new_mode
;
617 /* If we have a narrower mode, we can do something. */
618 if (wanted_mode
!= VOIDmode
619 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
621 int offset
= pos
/ BITS_PER_UNIT
;
624 /* If the bytes and bits are counted differently, we
625 must adjust the offset. */
626 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
628 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
631 pos
%= GET_MODE_BITSIZE (wanted_mode
);
633 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
635 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
636 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
647 /* Replace every occurrence of FROM in X with TO. Mark each change with
648 validate_change passing OBJECT. */
651 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
,
658 enum machine_mode op0_mode
= VOIDmode
;
659 int prev_changes
= num_changes
;
665 fmt
= GET_RTX_FORMAT (code
);
667 op0_mode
= GET_MODE (XEXP (x
, 0));
669 /* X matches FROM if it is the same rtx or they are both referring to the
670 same register in the same mode. Avoid calling rtx_equal_p unless the
671 operands look similar. */
674 || (REG_P (x
) && REG_P (from
)
675 && GET_MODE (x
) == GET_MODE (from
)
676 && REGNO (x
) == REGNO (from
))
677 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
678 && rtx_equal_p (x
, from
)))
680 validate_unshare_change (object
, loc
, to
, 1);
684 /* Call ourself recursively to perform the replacements.
685 We must not replace inside already replaced expression, otherwise we
686 get infinite recursion for replacements like (reg X)->(subreg (reg X))
687 done by regmove, so we must special case shared ASM_OPERANDS. */
689 if (GET_CODE (x
) == PARALLEL
)
691 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
693 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
694 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
696 /* Verify that operands are really shared. */
697 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
698 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
700 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
701 from
, to
, object
, simplify
);
704 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
709 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
712 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
713 else if (fmt
[i
] == 'E')
714 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
715 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
719 /* If we didn't substitute, there is nothing more to do. */
720 if (num_changes
== prev_changes
)
723 /* Allow substituted expression to have different mode. This is used by
724 regmove to change mode of pseudo register. */
725 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
726 op0_mode
= GET_MODE (XEXP (x
, 0));
728 /* Do changes needed to keep rtx consistent. Don't do any other
729 simplifications, as it is not our job. */
731 simplify_while_replacing (loc
, to
, object
, op0_mode
);
734 /* Try replacing every occurrence of FROM in INSN with TO. After all
735 changes have been made, validate by seeing if INSN is still valid. */
738 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
740 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
741 return apply_change_group ();
744 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
745 is a part of INSN. After all changes have been made, validate by seeing if
747 validate_replace_rtx (from, to, insn) is equivalent to
748 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
751 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx insn
)
753 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
754 return apply_change_group ();
757 /* Same as above, but do not simplify rtx afterwards. */
759 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
762 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
763 return apply_change_group ();
767 /* Try replacing every occurrence of FROM in INSN with TO. */
770 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
772 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
775 /* Function called by note_uses to replace used subexpressions. */
776 struct validate_replace_src_data
778 rtx from
; /* Old RTX */
779 rtx to
; /* New RTX */
780 rtx insn
; /* Insn in which substitution is occurring. */
784 validate_replace_src_1 (rtx
*x
, void *data
)
786 struct validate_replace_src_data
*d
787 = (struct validate_replace_src_data
*) data
;
789 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
792 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
796 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
798 struct validate_replace_src_data d
;
803 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
806 /* Try simplify INSN.
807 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
808 pattern and return true if something was simplified. */
811 validate_simplify_insn (rtx insn
)
817 pat
= PATTERN (insn
);
819 if (GET_CODE (pat
) == SET
)
821 newpat
= simplify_rtx (SET_SRC (pat
));
822 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
823 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
824 newpat
= simplify_rtx (SET_DEST (pat
));
825 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
826 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
828 else if (GET_CODE (pat
) == PARALLEL
)
829 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
831 rtx s
= XVECEXP (pat
, 0, i
);
833 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
835 newpat
= simplify_rtx (SET_SRC (s
));
836 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
837 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
838 newpat
= simplify_rtx (SET_DEST (s
));
839 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
840 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
843 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
847 /* Return 1 if the insn using CC0 set by INSN does not contain
848 any ordered tests applied to the condition codes.
849 EQ and NE tests do not count. */
852 next_insn_tests_no_inequality (rtx insn
)
854 rtx next
= next_cc0_user (insn
);
856 /* If there is no next insn, we have to take the conservative choice. */
860 return (INSN_P (next
)
861 && ! inequality_comparisons_p (PATTERN (next
)));
865 /* Return 1 if OP is a valid general operand for machine mode MODE.
866 This is either a register reference, a memory reference,
867 or a constant. In the case of a memory reference, the address
868 is checked for general validity for the target machine.
870 Register and memory references must have mode MODE in order to be valid,
871 but some constants have no machine mode and are valid for any mode.
873 If MODE is VOIDmode, OP is checked for validity for whatever mode
876 The main use of this function is as a predicate in match_operand
877 expressions in the machine description.
879 For an explanation of this function's behavior for registers of
880 class NO_REGS, see the comment for `register_operand'. */
883 general_operand (rtx op
, enum machine_mode mode
)
885 enum rtx_code code
= GET_CODE (op
);
887 if (mode
== VOIDmode
)
888 mode
= GET_MODE (op
);
890 /* Don't accept CONST_INT or anything similar
891 if the caller wants something floating. */
892 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
893 && GET_MODE_CLASS (mode
) != MODE_INT
894 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
897 if (GET_CODE (op
) == CONST_INT
899 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
903 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
905 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
906 && LEGITIMATE_CONSTANT_P (op
));
908 /* Except for certain constants with VOIDmode, already checked for,
909 OP's mode must match MODE if MODE specifies a mode. */
911 if (GET_MODE (op
) != mode
)
916 rtx sub
= SUBREG_REG (op
);
918 #ifdef INSN_SCHEDULING
919 /* On machines that have insn scheduling, we want all memory
920 reference to be explicit, so outlaw paradoxical SUBREGs.
921 However, we must allow them after reload so that they can
922 get cleaned up by cleanup_subreg_operands. */
923 if (!reload_completed
&& MEM_P (sub
)
924 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
927 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
928 may result in incorrect reference. We should simplify all valid
929 subregs of MEM anyway. But allow this after reload because we
930 might be called from cleanup_subreg_operands.
932 ??? This is a kludge. */
933 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
937 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
938 create such rtl, and we must reject it. */
939 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
940 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
944 code
= GET_CODE (op
);
948 /* A register whose class is NO_REGS is not a general operand. */
949 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
950 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
954 rtx y
= XEXP (op
, 0);
956 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
959 /* Use the mem's mode, since it will be reloaded thus. */
960 if (memory_address_p (GET_MODE (op
), y
))
967 /* Return 1 if OP is a valid memory address for a memory reference
970 The main use of this function is as a predicate in match_operand
971 expressions in the machine description. */
974 address_operand (rtx op
, enum machine_mode mode
)
976 return memory_address_p (mode
, op
);
979 /* Return 1 if OP is a register reference of mode MODE.
980 If MODE is VOIDmode, accept a register in any mode.
982 The main use of this function is as a predicate in match_operand
983 expressions in the machine description.
985 As a special exception, registers whose class is NO_REGS are
986 not accepted by `register_operand'. The reason for this change
987 is to allow the representation of special architecture artifacts
988 (such as a condition code register) without extending the rtl
989 definitions. Since registers of class NO_REGS cannot be used
990 as registers in any case where register classes are examined,
991 it is most consistent to keep this function from accepting them. */
994 register_operand (rtx op
, enum machine_mode mode
)
996 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
999 if (GET_CODE (op
) == SUBREG
)
1001 rtx sub
= SUBREG_REG (op
);
1003 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1004 because it is guaranteed to be reloaded into one.
1005 Just make sure the MEM is valid in itself.
1006 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1007 but currently it does result from (SUBREG (REG)...) where the
1008 reg went on the stack.) */
1009 if (! reload_completed
&& MEM_P (sub
))
1010 return general_operand (op
, mode
);
1012 #ifdef CANNOT_CHANGE_MODE_CLASS
1014 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1015 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1016 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1017 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1021 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1022 create such rtl, and we must reject it. */
1023 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1024 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1030 /* We don't consider registers whose class is NO_REGS
1031 to be a register operand. */
1033 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1034 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1037 /* Return 1 for a register in Pmode; ignore the tested mode. */
1040 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1042 return register_operand (op
, Pmode
);
1045 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1046 or a hard register. */
1049 scratch_operand (rtx op
, enum machine_mode mode
)
1051 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1054 return (GET_CODE (op
) == SCRATCH
1056 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1059 /* Return 1 if OP is a valid immediate operand for mode MODE.
1061 The main use of this function is as a predicate in match_operand
1062 expressions in the machine description. */
1065 immediate_operand (rtx op
, enum machine_mode mode
)
1067 /* Don't accept CONST_INT or anything similar
1068 if the caller wants something floating. */
1069 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1070 && GET_MODE_CLASS (mode
) != MODE_INT
1071 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1074 if (GET_CODE (op
) == CONST_INT
1076 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1079 return (CONSTANT_P (op
)
1080 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1081 || GET_MODE (op
) == VOIDmode
)
1082 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1083 && LEGITIMATE_CONSTANT_P (op
));
1086 /* Returns 1 if OP is an operand that is a CONST_INT. */
1089 const_int_operand (rtx op
, enum machine_mode mode
)
1091 if (GET_CODE (op
) != CONST_INT
)
1094 if (mode
!= VOIDmode
1095 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1101 /* Returns 1 if OP is an operand that is a constant integer or constant
1102 floating-point number. */
1105 const_double_operand (rtx op
, enum machine_mode mode
)
1107 /* Don't accept CONST_INT or anything similar
1108 if the caller wants something floating. */
1109 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1110 && GET_MODE_CLASS (mode
) != MODE_INT
1111 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1114 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1115 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1116 || GET_MODE (op
) == VOIDmode
));
1119 /* Return 1 if OP is a general operand that is not an immediate operand. */
1122 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1124 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1127 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1130 nonmemory_operand (rtx op
, enum machine_mode mode
)
1132 if (CONSTANT_P (op
))
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1137 && GET_MODE_CLASS (mode
) != MODE_INT
1138 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1141 if (GET_CODE (op
) == CONST_INT
1143 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1146 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1147 || mode
== VOIDmode
)
1148 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1149 && LEGITIMATE_CONSTANT_P (op
));
1152 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1155 if (GET_CODE (op
) == SUBREG
)
1157 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1158 because it is guaranteed to be reloaded into one.
1159 Just make sure the MEM is valid in itself.
1160 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1161 but currently it does result from (SUBREG (REG)...) where the
1162 reg went on the stack.) */
1163 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1164 return general_operand (op
, mode
);
1165 op
= SUBREG_REG (op
);
1168 /* We don't consider registers whose class is NO_REGS
1169 to be a register operand. */
1171 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1172 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1175 /* Return 1 if OP is a valid operand that stands for pushing a
1176 value of mode MODE onto the stack.
1178 The main use of this function is as a predicate in match_operand
1179 expressions in the machine description. */
1182 push_operand (rtx op
, enum machine_mode mode
)
1184 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1186 #ifdef PUSH_ROUNDING
1187 rounded_size
= PUSH_ROUNDING (rounded_size
);
1193 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1198 if (rounded_size
== GET_MODE_SIZE (mode
))
1200 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1205 if (GET_CODE (op
) != PRE_MODIFY
1206 || GET_CODE (XEXP (op
, 1)) != PLUS
1207 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1208 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1209 #ifdef STACK_GROWS_DOWNWARD
1210 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1212 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1218 return XEXP (op
, 0) == stack_pointer_rtx
;
1221 /* Return 1 if OP is a valid operand that stands for popping a
1222 value of mode MODE off the stack.
1224 The main use of this function is as a predicate in match_operand
1225 expressions in the machine description. */
1228 pop_operand (rtx op
, enum machine_mode mode
)
1233 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1238 if (GET_CODE (op
) != STACK_POP_CODE
)
1241 return XEXP (op
, 0) == stack_pointer_rtx
;
1244 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1247 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1249 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1256 /* Return 1 if OP is a valid memory reference with mode MODE,
1257 including a valid address.
1259 The main use of this function is as a predicate in match_operand
1260 expressions in the machine description. */
1263 memory_operand (rtx op
, enum machine_mode mode
)
1267 if (! reload_completed
)
1268 /* Note that no SUBREG is a memory operand before end of reload pass,
1269 because (SUBREG (MEM...)) forces reloading into a register. */
1270 return MEM_P (op
) && general_operand (op
, mode
);
1272 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1276 if (GET_CODE (inner
) == SUBREG
)
1277 inner
= SUBREG_REG (inner
);
1279 return (MEM_P (inner
) && general_operand (op
, mode
));
1282 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1283 that is, a memory reference whose address is a general_operand. */
1286 indirect_operand (rtx op
, enum machine_mode mode
)
1288 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1289 if (! reload_completed
1290 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1292 int offset
= SUBREG_BYTE (op
);
1293 rtx inner
= SUBREG_REG (op
);
1295 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1298 /* The only way that we can have a general_operand as the resulting
1299 address is if OFFSET is zero and the address already is an operand
1300 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1303 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1304 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1305 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1306 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1307 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1311 && memory_operand (op
, mode
)
1312 && general_operand (XEXP (op
, 0), Pmode
));
1315 /* Return 1 if this is a comparison operator. This allows the use of
1316 MATCH_OPERATOR to recognize all the branch insns. */
1319 comparison_operator (rtx op
, enum machine_mode mode
)
1321 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1322 && COMPARISON_P (op
));
1325 /* If BODY is an insn body that uses ASM_OPERANDS,
1326 return the number of operands (both input and output) in the insn.
1327 Otherwise return -1. */
1330 asm_noperands (const_rtx body
)
1332 switch (GET_CODE (body
))
1335 /* No output operands: return number of input operands. */
1336 return ASM_OPERANDS_INPUT_LENGTH (body
);
1338 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1339 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1340 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1344 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1345 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1347 /* Multiple output operands, or 1 output plus some clobbers:
1348 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1352 /* Count backwards through CLOBBERs to determine number of SETs. */
1353 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1355 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1357 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1361 /* N_SETS is now number of output operands. */
1364 /* Verify that all the SETs we have
1365 came from a single original asm_operands insn
1366 (so that invalid combinations are blocked). */
1367 for (i
= 0; i
< n_sets
; i
++)
1369 rtx elt
= XVECEXP (body
, 0, i
);
1370 if (GET_CODE (elt
) != SET
)
1372 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1374 /* If these ASM_OPERANDS rtx's came from different original insns
1375 then they aren't allowed together. */
1376 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1377 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1380 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1383 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1385 /* 0 outputs, but some clobbers:
1386 body is [(asm_operands ...) (clobber (reg ...))...]. */
1389 /* Make sure all the other parallel things really are clobbers. */
1390 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1391 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1394 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1403 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1404 copy its operands (both input and output) into the vector OPERANDS,
1405 the locations of the operands within the insn into the vector OPERAND_LOCS,
1406 and the constraints for the operands into CONSTRAINTS.
1407 Write the modes of the operands into MODES.
1408 Return the assembler-template.
1410 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1411 we don't store that info. */
1414 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1415 const char **constraints
, enum machine_mode
*modes
,
1422 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1424 asmop
= SET_SRC (body
);
1425 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1427 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1429 for (i
= 1; i
< noperands
; i
++)
1432 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1434 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1436 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1438 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1441 /* The output is in the SET.
1442 Its constraint is in the ASM_OPERANDS itself. */
1444 operands
[0] = SET_DEST (body
);
1446 operand_locs
[0] = &SET_DEST (body
);
1448 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1450 modes
[0] = GET_MODE (SET_DEST (body
));
1452 else if (GET_CODE (body
) == ASM_OPERANDS
)
1455 /* No output operands: BODY is (asm_operands ....). */
1457 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1459 /* The input operands are found in the 1st element vector. */
1460 /* Constraints for inputs are in the 2nd element vector. */
1461 for (i
= 0; i
< noperands
; i
++)
1464 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1466 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1468 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1470 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1473 else if (GET_CODE (body
) == PARALLEL
1474 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1475 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1477 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1479 int nout
= 0; /* Does not include CLOBBERs. */
1481 asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1482 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1484 /* At least one output, plus some CLOBBERs. */
1486 /* The outputs are in the SETs.
1487 Their constraints are in the ASM_OPERANDS itself. */
1488 for (i
= 0; i
< nparallel
; i
++)
1490 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1491 break; /* Past last SET */
1494 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1496 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1498 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1500 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1504 for (i
= 0; i
< nin
; i
++)
1507 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1509 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1511 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1513 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1516 else if (GET_CODE (body
) == PARALLEL
1517 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1519 /* No outputs, but some CLOBBERs. */
1523 asmop
= XVECEXP (body
, 0, 0);
1524 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1526 for (i
= 0; i
< nin
; i
++)
1529 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1531 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1533 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1535 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1541 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1543 return ASM_OPERANDS_TEMPLATE (asmop
);
1546 /* Check if an asm_operand matches its constraints.
1547 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1550 asm_operand_ok (rtx op
, const char *constraint
)
1554 /* Use constrain_operands after reload. */
1555 gcc_assert (!reload_completed
);
1559 char c
= *constraint
;
1576 case '0': case '1': case '2': case '3': case '4':
1577 case '5': case '6': case '7': case '8': case '9':
1578 /* For best results, our caller should have given us the
1579 proper matching constraint, but we can't actually fail
1580 the check if they didn't. Indicate that results are
1584 while (ISDIGIT (*constraint
));
1590 if (address_operand (op
, VOIDmode
))
1594 case TARGET_MEM_CONSTRAINT
:
1595 case 'V': /* non-offsettable */
1596 if (memory_operand (op
, VOIDmode
))
1600 case 'o': /* offsettable */
1601 if (offsettable_nonstrict_memref_p (op
))
1606 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1607 excepting those that expand_call created. Further, on some
1608 machines which do not have generalized auto inc/dec, an inc/dec
1609 is not a memory_operand.
1611 Match any memory and hope things are resolved after reload. */
1615 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1616 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1623 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1624 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1630 if (GET_CODE (op
) == CONST_DOUBLE
1631 || (GET_CODE (op
) == CONST_VECTOR
1632 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1637 if (GET_CODE (op
) == CONST_DOUBLE
1638 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1642 if (GET_CODE (op
) == CONST_DOUBLE
1643 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1648 if (GET_CODE (op
) == CONST_INT
1649 || (GET_CODE (op
) == CONST_DOUBLE
1650 && GET_MODE (op
) == VOIDmode
))
1655 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1660 if (GET_CODE (op
) == CONST_INT
1661 || (GET_CODE (op
) == CONST_DOUBLE
1662 && GET_MODE (op
) == VOIDmode
))
1667 if (GET_CODE (op
) == CONST_INT
1668 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1672 if (GET_CODE (op
) == CONST_INT
1673 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1677 if (GET_CODE (op
) == CONST_INT
1678 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1682 if (GET_CODE (op
) == CONST_INT
1683 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1687 if (GET_CODE (op
) == CONST_INT
1688 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1692 if (GET_CODE (op
) == CONST_INT
1693 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1697 if (GET_CODE (op
) == CONST_INT
1698 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1702 if (GET_CODE (op
) == CONST_INT
1703 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1712 if (general_operand (op
, VOIDmode
))
1717 /* For all other letters, we first check for a register class,
1718 otherwise it is an EXTRA_CONSTRAINT. */
1719 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1722 if (GET_MODE (op
) == BLKmode
)
1724 if (register_operand (op
, VOIDmode
))
1727 #ifdef EXTRA_CONSTRAINT_STR
1728 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
))
1729 /* Every memory operand can be reloaded to fit. */
1730 result
= result
|| memory_operand (op
, VOIDmode
);
1731 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
))
1732 /* Every address operand can be reloaded to fit. */
1733 result
= result
|| address_operand (op
, VOIDmode
);
1734 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1739 len
= CONSTRAINT_LEN (c
, constraint
);
1742 while (--len
&& *constraint
);
1750 /* Given an rtx *P, if it is a sum containing an integer constant term,
1751 return the location (type rtx *) of the pointer to that constant term.
1752 Otherwise, return a null pointer. */
1755 find_constant_term_loc (rtx
*p
)
1758 enum rtx_code code
= GET_CODE (*p
);
1760 /* If *P IS such a constant term, P is its location. */
1762 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1766 /* Otherwise, if not a sum, it has no constant term. */
1768 if (GET_CODE (*p
) != PLUS
)
1771 /* If one of the summands is constant, return its location. */
1773 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1774 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1777 /* Otherwise, check each summand for containing a constant term. */
1779 if (XEXP (*p
, 0) != 0)
1781 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1786 if (XEXP (*p
, 1) != 0)
1788 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1796 /* Return 1 if OP is a memory reference
1797 whose address contains no side effects
1798 and remains valid after the addition
1799 of a positive integer less than the
1800 size of the object being referenced.
1802 We assume that the original address is valid and do not check it.
1804 This uses strict_memory_address_p as a subroutine, so
1805 don't use it before reload. */
1808 offsettable_memref_p (rtx op
)
1810 return ((MEM_P (op
))
1811 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1814 /* Similar, but don't require a strictly valid mem ref:
1815 consider pseudo-regs valid as index or base regs. */
1818 offsettable_nonstrict_memref_p (rtx op
)
1820 return ((MEM_P (op
))
1821 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1824 /* Return 1 if Y is a memory address which contains no side effects
1825 and would remain valid after the addition of a positive integer
1826 less than the size of that mode.
1828 We assume that the original address is valid and do not check it.
1829 We do check that it is valid for narrower modes.
1831 If STRICTP is nonzero, we require a strictly valid address,
1832 for the sake of use in reload.c. */
1835 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1837 enum rtx_code ycode
= GET_CODE (y
);
1841 int (*addressp
) (enum machine_mode
, rtx
) =
1842 (strictp
? strict_memory_address_p
: memory_address_p
);
1843 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1845 if (CONSTANT_ADDRESS_P (y
))
1848 /* Adjusting an offsettable address involves changing to a narrower mode.
1849 Make sure that's OK. */
1851 if (mode_dependent_address_p (y
))
1854 /* ??? How much offset does an offsettable BLKmode reference need?
1855 Clearly that depends on the situation in which it's being used.
1856 However, the current situation in which we test 0xffffffff is
1857 less than ideal. Caveat user. */
1859 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1861 /* If the expression contains a constant term,
1862 see if it remains valid when max possible offset is added. */
1864 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1869 *y2
= plus_constant (*y2
, mode_sz
- 1);
1870 /* Use QImode because an odd displacement may be automatically invalid
1871 for any wider mode. But it should be valid for a single byte. */
1872 good
= (*addressp
) (QImode
, y
);
1874 /* In any case, restore old contents of memory. */
1879 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1882 /* The offset added here is chosen as the maximum offset that
1883 any instruction could need to add when operating on something
1884 of the specified mode. We assume that if Y and Y+c are
1885 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1886 go inside a LO_SUM here, so we do so as well. */
1887 if (GET_CODE (y
) == LO_SUM
1889 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1890 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1891 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1893 z
= plus_constant (y
, mode_sz
- 1);
1895 /* Use QImode because an odd displacement may be automatically invalid
1896 for any wider mode. But it should be valid for a single byte. */
1897 return (*addressp
) (QImode
, z
);
1900 /* Return 1 if ADDR is an address-expression whose effect depends
1901 on the mode of the memory reference it is used in.
1903 Autoincrement addressing is a typical example of mode-dependence
1904 because the amount of the increment depends on the mode. */
1907 mode_dependent_address_p (rtx addr
)
1909 /* Auto-increment addressing with anything other than post_modify
1910 or pre_modify always introduces a mode dependency. Catch such
1911 cases now instead of deferring to the target. */
1912 if (GET_CODE (addr
) == PRE_INC
1913 || GET_CODE (addr
) == POST_INC
1914 || GET_CODE (addr
) == PRE_DEC
1915 || GET_CODE (addr
) == POST_DEC
)
1918 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1920 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1921 win
: ATTRIBUTE_UNUSED_LABEL
1925 /* Like extract_insn, but save insn extracted and don't extract again, when
1926 called again for the same insn expecting that recog_data still contain the
1927 valid information. This is used primary by gen_attr infrastructure that
1928 often does extract insn again and again. */
1930 extract_insn_cached (rtx insn
)
1932 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1934 extract_insn (insn
);
1935 recog_data
.insn
= insn
;
1938 /* Do cached extract_insn, constrain_operands and complain about failures.
1939 Used by insn_attrtab. */
1941 extract_constrain_insn_cached (rtx insn
)
1943 extract_insn_cached (insn
);
1944 if (which_alternative
== -1
1945 && !constrain_operands (reload_completed
))
1946 fatal_insn_not_found (insn
);
1949 /* Do cached constrain_operands and complain about failures. */
1951 constrain_operands_cached (int strict
)
1953 if (which_alternative
== -1)
1954 return constrain_operands (strict
);
1959 /* Analyze INSN and fill in recog_data. */
1962 extract_insn (rtx insn
)
1967 rtx body
= PATTERN (insn
);
1969 recog_data
.n_operands
= 0;
1970 recog_data
.n_alternatives
= 0;
1971 recog_data
.n_dups
= 0;
1973 switch (GET_CODE (body
))
1983 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1988 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1989 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1990 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1996 recog_data
.n_operands
= noperands
= asm_noperands (body
);
1999 /* This insn is an `asm' with operands. */
2001 /* expand_asm_operands makes sure there aren't too many operands. */
2002 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2004 /* Now get the operand values and constraints out of the insn. */
2005 decode_asm_operands (body
, recog_data
.operand
,
2006 recog_data
.operand_loc
,
2007 recog_data
.constraints
,
2008 recog_data
.operand_mode
, NULL
);
2011 const char *p
= recog_data
.constraints
[0];
2012 recog_data
.n_alternatives
= 1;
2014 recog_data
.n_alternatives
+= (*p
++ == ',');
2018 fatal_insn_not_found (insn
);
2022 /* Ordinary insn: recognize it, get the operands via insn_extract
2023 and get the constraints. */
2025 icode
= recog_memoized (insn
);
2027 fatal_insn_not_found (insn
);
2029 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2030 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2031 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2033 insn_extract (insn
);
2035 for (i
= 0; i
< noperands
; i
++)
2037 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2038 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2039 /* VOIDmode match_operands gets mode from their real operand. */
2040 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2041 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2044 for (i
= 0; i
< noperands
; i
++)
2045 recog_data
.operand_type
[i
]
2046 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2047 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2050 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2052 if (INSN_CODE (insn
) < 0)
2053 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2054 recog_data
.alternative_enabled_p
[i
] = true;
2057 recog_data
.insn
= insn
;
2058 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2060 which_alternative
= i
;
2061 recog_data
.alternative_enabled_p
[i
] = get_attr_enabled (insn
);
2065 recog_data
.insn
= NULL
;
2066 which_alternative
= -1;
2069 /* After calling extract_insn, you can use this function to extract some
2070 information from the constraint strings into a more usable form.
2071 The collected data is stored in recog_op_alt. */
2073 preprocess_constraints (void)
2077 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2078 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2079 * sizeof (struct operand_alternative
)));
2081 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2084 struct operand_alternative
*op_alt
;
2085 const char *p
= recog_data
.constraints
[i
];
2087 op_alt
= recog_op_alt
[i
];
2089 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2091 op_alt
[j
].cl
= NO_REGS
;
2092 op_alt
[j
].constraint
= p
;
2093 op_alt
[j
].matches
= -1;
2094 op_alt
[j
].matched
= -1;
2096 if (!recog_data
.alternative_enabled_p
[j
])
2098 p
= skip_alternative (p
);
2102 if (*p
== '\0' || *p
== ',')
2104 op_alt
[j
].anything_ok
= 1;
2114 while (c
!= ',' && c
!= '\0');
2115 if (c
== ',' || c
== '\0')
2123 case '=': case '+': case '*': case '%':
2124 case 'E': case 'F': case 'G': case 'H':
2125 case 's': case 'i': case 'n':
2126 case 'I': case 'J': case 'K': case 'L':
2127 case 'M': case 'N': case 'O': case 'P':
2128 /* These don't say anything we care about. */
2132 op_alt
[j
].reject
+= 6;
2135 op_alt
[j
].reject
+= 600;
2138 op_alt
[j
].earlyclobber
= 1;
2141 case '0': case '1': case '2': case '3': case '4':
2142 case '5': case '6': case '7': case '8': case '9':
2145 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2146 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2151 case TARGET_MEM_CONSTRAINT
:
2152 op_alt
[j
].memory_ok
= 1;
2155 op_alt
[j
].decmem_ok
= 1;
2158 op_alt
[j
].incmem_ok
= 1;
2161 op_alt
[j
].nonoffmem_ok
= 1;
2164 op_alt
[j
].offmem_ok
= 1;
2167 op_alt
[j
].anything_ok
= 1;
2171 op_alt
[j
].is_address
= 1;
2172 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2173 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
2179 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2183 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2185 op_alt
[j
].memory_ok
= 1;
2188 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2190 op_alt
[j
].is_address
= 1;
2192 = (reg_class_subunion
2193 [(int) op_alt
[j
].cl
]
2194 [(int) base_reg_class (VOIDmode
, ADDRESS
,
2200 = (reg_class_subunion
2201 [(int) op_alt
[j
].cl
]
2202 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2205 p
+= CONSTRAINT_LEN (c
, p
);
2211 /* Check the operands of an insn against the insn's operand constraints
2212 and return 1 if they are valid.
2213 The information about the insn's operands, constraints, operand modes
2214 etc. is obtained from the global variables set up by extract_insn.
2216 WHICH_ALTERNATIVE is set to a number which indicates which
2217 alternative of constraints was matched: 0 for the first alternative,
2218 1 for the next, etc.
2220 In addition, when two operands are required to match
2221 and it happens that the output operand is (reg) while the
2222 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2223 make the output operand look like the input.
2224 This is because the output operand is the one the template will print.
2226 This is used in final, just before printing the assembler code and by
2227 the routines that determine an insn's attribute.
2229 If STRICT is a positive nonzero value, it means that we have been
2230 called after reload has been completed. In that case, we must
2231 do all checks strictly. If it is zero, it means that we have been called
2232 before reload has completed. In that case, we first try to see if we can
2233 find an alternative that matches strictly. If not, we try again, this
2234 time assuming that reload will fix up the insn. This provides a "best
2235 guess" for the alternative and is used to compute attributes of insns prior
2236 to reload. A negative value of STRICT is used for this internal call. */
2244 constrain_operands (int strict
)
2246 const char *constraints
[MAX_RECOG_OPERANDS
];
2247 int matching_operands
[MAX_RECOG_OPERANDS
];
2248 int earlyclobber
[MAX_RECOG_OPERANDS
];
2251 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2252 int funny_match_index
;
2254 which_alternative
= 0;
2255 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2258 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2260 constraints
[c
] = recog_data
.constraints
[c
];
2261 matching_operands
[c
] = -1;
2266 int seen_earlyclobber_at
= -1;
2269 funny_match_index
= 0;
2271 if (!recog_data
.alternative_enabled_p
[which_alternative
])
2275 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2276 constraints
[i
] = skip_alternative (constraints
[i
]);
2278 which_alternative
++;
2282 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2284 rtx op
= recog_data
.operand
[opno
];
2285 enum machine_mode mode
= GET_MODE (op
);
2286 const char *p
= constraints
[opno
];
2292 earlyclobber
[opno
] = 0;
2294 /* A unary operator may be accepted by the predicate, but it
2295 is irrelevant for matching constraints. */
2299 if (GET_CODE (op
) == SUBREG
)
2301 if (REG_P (SUBREG_REG (op
))
2302 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2303 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2304 GET_MODE (SUBREG_REG (op
)),
2307 op
= SUBREG_REG (op
);
2310 /* An empty constraint or empty alternative
2311 allows anything which matched the pattern. */
2312 if (*p
== 0 || *p
== ',')
2316 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2325 case '?': case '!': case '*': case '%':
2330 /* Ignore rest of this alternative as far as
2331 constraint checking is concerned. */
2334 while (*p
&& *p
!= ',');
2339 earlyclobber
[opno
] = 1;
2340 if (seen_earlyclobber_at
< 0)
2341 seen_earlyclobber_at
= opno
;
2344 case '0': case '1': case '2': case '3': case '4':
2345 case '5': case '6': case '7': case '8': case '9':
2347 /* This operand must be the same as a previous one.
2348 This kind of constraint is used for instructions such
2349 as add when they take only two operands.
2351 Note that the lower-numbered operand is passed first.
2353 If we are not testing strictly, assume that this
2354 constraint will be satisfied. */
2359 match
= strtoul (p
, &end
, 10);
2366 rtx op1
= recog_data
.operand
[match
];
2367 rtx op2
= recog_data
.operand
[opno
];
2369 /* A unary operator may be accepted by the predicate,
2370 but it is irrelevant for matching constraints. */
2372 op1
= XEXP (op1
, 0);
2374 op2
= XEXP (op2
, 0);
2376 val
= operands_match_p (op1
, op2
);
2379 matching_operands
[opno
] = match
;
2380 matching_operands
[match
] = opno
;
2385 /* If output is *x and input is *--x, arrange later
2386 to change the output to *--x as well, since the
2387 output op is the one that will be printed. */
2388 if (val
== 2 && strict
> 0)
2390 funny_match
[funny_match_index
].this_op
= opno
;
2391 funny_match
[funny_match_index
++].other
= match
;
2398 /* p is used for address_operands. When we are called by
2399 gen_reload, no one will have checked that the address is
2400 strictly valid, i.e., that all pseudos requiring hard regs
2401 have gotten them. */
2403 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2408 /* No need to check general_operand again;
2409 it was done in insn-recog.c. Well, except that reload
2410 doesn't check the validity of its replacements, but
2411 that should only matter when there's a bug. */
2413 /* Anything goes unless it is a REG and really has a hard reg
2414 but the hard reg is not in the class GENERAL_REGS. */
2418 || GENERAL_REGS
== ALL_REGS
2419 || (reload_in_progress
2420 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2421 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2424 else if (strict
< 0 || general_operand (op
, mode
))
2429 /* This is used for a MATCH_SCRATCH in the cases when
2430 we don't actually need anything. So anything goes
2435 case TARGET_MEM_CONSTRAINT
:
2436 /* Memory operands must be valid, to the extent
2437 required by STRICT. */
2441 && !strict_memory_address_p (GET_MODE (op
),
2445 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2449 /* Before reload, accept what reload can turn into mem. */
2450 else if (strict
< 0 && CONSTANT_P (op
))
2452 /* During reload, accept a pseudo */
2453 else if (reload_in_progress
&& REG_P (op
)
2454 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2460 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2461 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2467 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2468 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2474 if (GET_CODE (op
) == CONST_DOUBLE
2475 || (GET_CODE (op
) == CONST_VECTOR
2476 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2482 if (GET_CODE (op
) == CONST_DOUBLE
2483 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2488 if (GET_CODE (op
) == CONST_INT
2489 || (GET_CODE (op
) == CONST_DOUBLE
2490 && GET_MODE (op
) == VOIDmode
))
2493 if (CONSTANT_P (op
))
2498 if (GET_CODE (op
) == CONST_INT
2499 || (GET_CODE (op
) == CONST_DOUBLE
2500 && GET_MODE (op
) == VOIDmode
))
2512 if (GET_CODE (op
) == CONST_INT
2513 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2519 && ((strict
> 0 && ! offsettable_memref_p (op
))
2521 && !(CONSTANT_P (op
) || MEM_P (op
)))
2522 || (reload_in_progress
2524 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2529 if ((strict
> 0 && offsettable_memref_p (op
))
2530 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2531 /* Before reload, accept what reload can handle. */
2533 && (CONSTANT_P (op
) || MEM_P (op
)))
2534 /* During reload, accept a pseudo */
2535 || (reload_in_progress
&& REG_P (op
)
2536 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2545 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2551 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2552 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2554 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2557 #ifdef EXTRA_CONSTRAINT_STR
2558 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2561 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2562 /* Every memory operand can be reloaded to fit. */
2563 && ((strict
< 0 && MEM_P (op
))
2564 /* Before reload, accept what reload can turn
2566 || (strict
< 0 && CONSTANT_P (op
))
2567 /* During reload, accept a pseudo */
2568 || (reload_in_progress
&& REG_P (op
)
2569 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2571 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2572 /* Every address operand can be reloaded to fit. */
2579 while (p
+= len
, c
);
2581 constraints
[opno
] = p
;
2582 /* If this operand did not win somehow,
2583 this alternative loses. */
2587 /* This alternative won; the operands are ok.
2588 Change whichever operands this alternative says to change. */
2593 /* See if any earlyclobber operand conflicts with some other
2596 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2597 for (eopno
= seen_earlyclobber_at
;
2598 eopno
< recog_data
.n_operands
;
2600 /* Ignore earlyclobber operands now in memory,
2601 because we would often report failure when we have
2602 two memory operands, one of which was formerly a REG. */
2603 if (earlyclobber
[eopno
]
2604 && REG_P (recog_data
.operand
[eopno
]))
2605 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2606 if ((MEM_P (recog_data
.operand
[opno
])
2607 || recog_data
.operand_type
[opno
] != OP_OUT
)
2609 /* Ignore things like match_operator operands. */
2610 && *recog_data
.constraints
[opno
] != 0
2611 && ! (matching_operands
[opno
] == eopno
2612 && operands_match_p (recog_data
.operand
[opno
],
2613 recog_data
.operand
[eopno
]))
2614 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2615 recog_data
.operand
[eopno
]))
2620 while (--funny_match_index
>= 0)
2622 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2623 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2630 which_alternative
++;
2632 while (which_alternative
< recog_data
.n_alternatives
);
2634 which_alternative
= -1;
2635 /* If we are about to reject this, but we are not to test strictly,
2636 try a very loose test. Only return failure if it fails also. */
2638 return constrain_operands (-1);
2643 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2644 is a hard reg in class CLASS when its regno is offset by OFFSET
2645 and changed to mode MODE.
2646 If REG occupies multiple hard regs, all of them must be in CLASS. */
2649 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2650 enum machine_mode mode
)
2652 int regno
= REGNO (operand
);
2657 return (regno
< FIRST_PSEUDO_REGISTER
2658 && in_hard_reg_set_p (reg_class_contents
[(int) cl
],
2659 mode
, regno
+ offset
));
2662 /* Split single instruction. Helper function for split_all_insns and
2663 split_all_insns_noflow. Return last insn in the sequence if successful,
2664 or NULL if unsuccessful. */
2667 split_insn (rtx insn
)
2669 /* Split insns here to get max fine-grain parallelism. */
2670 rtx first
= PREV_INSN (insn
);
2671 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2672 rtx insn_set
, last_set
, note
;
2677 /* If the original instruction was a single set that was known to be
2678 equivalent to a constant, see if we can say the same about the last
2679 instruction in the split sequence. The two instructions must set
2680 the same destination. */
2681 insn_set
= single_set (insn
);
2684 last_set
= single_set (last
);
2685 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2687 note
= find_reg_equal_equiv_note (insn
);
2688 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2689 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2690 else if (CONSTANT_P (SET_SRC (insn_set
)))
2691 set_unique_reg_note (last
, REG_EQUAL
, SET_SRC (insn_set
));
2695 /* try_split returns the NOTE that INSN became. */
2696 SET_INSN_DELETED (insn
);
2698 /* ??? Coddle to md files that generate subregs in post-reload
2699 splitters instead of computing the proper hard register. */
2700 if (reload_completed
&& first
!= last
)
2702 first
= NEXT_INSN (first
);
2706 cleanup_subreg_operands (first
);
2709 first
= NEXT_INSN (first
);
2716 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2719 split_all_insns (void)
2725 blocks
= sbitmap_alloc (last_basic_block
);
2726 sbitmap_zero (blocks
);
2729 FOR_EACH_BB_REVERSE (bb
)
2732 bool finish
= false;
2734 rtl_profile_for_bb (bb
);
2735 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2737 /* Can't use `next_real_insn' because that might go across
2738 CODE_LABELS and short-out basic blocks. */
2739 next
= NEXT_INSN (insn
);
2740 finish
= (insn
== BB_END (bb
));
2743 rtx set
= single_set (insn
);
2745 /* Don't split no-op move insns. These should silently
2746 disappear later in final. Splitting such insns would
2747 break the code that handles LIBCALL blocks. */
2748 if (set
&& set_noop_p (set
))
2750 /* Nops get in the way while scheduling, so delete them
2751 now if register allocation has already been done. It
2752 is too risky to try to do this before register
2753 allocation, and there are unlikely to be very many
2754 nops then anyways. */
2755 if (reload_completed
)
2756 delete_insn_and_edges (insn
);
2760 rtx last
= split_insn (insn
);
2763 /* The split sequence may include barrier, but the
2764 BB boundary we are interested in will be set to
2767 while (BARRIER_P (last
))
2768 last
= PREV_INSN (last
);
2769 SET_BIT (blocks
, bb
->index
);
2777 default_rtl_profile ();
2779 find_many_sub_basic_blocks (blocks
);
2781 #ifdef ENABLE_CHECKING
2782 verify_flow_info ();
2785 sbitmap_free (blocks
);
2788 /* Same as split_all_insns, but do not expect CFG to be available.
2789 Used by machine dependent reorg passes. */
2792 split_all_insns_noflow (void)
2796 for (insn
= get_insns (); insn
; insn
= next
)
2798 next
= NEXT_INSN (insn
);
2801 /* Don't split no-op move insns. These should silently
2802 disappear later in final. Splitting such insns would
2803 break the code that handles LIBCALL blocks. */
2804 rtx set
= single_set (insn
);
2805 if (set
&& set_noop_p (set
))
2807 /* Nops get in the way while scheduling, so delete them
2808 now if register allocation has already been done. It
2809 is too risky to try to do this before register
2810 allocation, and there are unlikely to be very many
2813 ??? Should we use delete_insn when the CFG isn't valid? */
2814 if (reload_completed
)
2815 delete_insn_and_edges (insn
);
2824 #ifdef HAVE_peephole2
2825 struct peep2_insn_data
2831 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2832 static int peep2_current
;
2833 /* The number of instructions available to match a peep2. */
2834 int peep2_current_count
;
2836 /* A non-insn marker indicating the last insn of the block.
2837 The live_before regset for this element is correct, indicating
2838 DF_LIVE_OUT for the block. */
2839 #define PEEP2_EOB pc_rtx
2841 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2842 does not exist. Used by the recognizer to find the next insn to match
2843 in a multi-insn pattern. */
2846 peep2_next_insn (int n
)
2848 gcc_assert (n
<= peep2_current_count
);
2851 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2852 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2854 return peep2_insn_data
[n
].insn
;
2857 /* Return true if REGNO is dead before the Nth non-note insn
2861 peep2_regno_dead_p (int ofs
, int regno
)
2863 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2865 ofs
+= peep2_current
;
2866 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2867 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2869 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2871 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2874 /* Similarly for a REG. */
2877 peep2_reg_dead_p (int ofs
, rtx reg
)
2881 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2883 ofs
+= peep2_current
;
2884 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2885 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2887 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2889 regno
= REGNO (reg
);
2890 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2892 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2897 /* Try to find a hard register of mode MODE, matching the register class in
2898 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2899 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2900 in which case the only condition is that the register must be available
2901 before CURRENT_INSN.
2902 Registers that already have bits set in REG_SET will not be considered.
2904 If an appropriate register is available, it will be returned and the
2905 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2909 peep2_find_free_register (int from
, int to
, const char *class_str
,
2910 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2912 static int search_ofs
;
2917 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2918 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2920 from
+= peep2_current
;
2921 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2922 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2923 to
+= peep2_current
;
2924 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2925 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2927 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2928 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2932 HARD_REG_SET this_live
;
2934 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2936 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2937 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2938 IOR_HARD_REG_SET (live
, this_live
);
2941 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2942 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2944 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2946 int raw_regno
, regno
, success
, j
;
2948 /* Distribute the free registers as much as possible. */
2949 raw_regno
= search_ofs
+ i
;
2950 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2951 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2952 #ifdef REG_ALLOC_ORDER
2953 regno
= reg_alloc_order
[raw_regno
];
2958 /* Don't allocate fixed registers. */
2959 if (fixed_regs
[regno
])
2961 /* Don't allocate global registers. */
2962 if (global_regs
[regno
])
2964 /* Make sure the register is of the right class. */
2965 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2967 /* And can support the mode we need. */
2968 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2970 /* And that we don't create an extra save/restore. */
2971 if (! call_used_regs
[regno
] && ! df_regs_ever_live_p (regno
))
2973 if (! targetm
.hard_regno_scratch_ok (regno
))
2976 /* And we don't clobber traceback for noreturn functions. */
2977 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2978 && (! reload_completed
|| frame_pointer_needed
))
2982 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2984 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2985 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2993 add_to_hard_reg_set (reg_set
, mode
, regno
);
2995 /* Start the next search with the next register. */
2996 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2998 search_ofs
= raw_regno
;
3000 return gen_rtx_REG (mode
, regno
);
3008 /* Perform the peephole2 optimization pass. */
3011 peephole2_optimize (void)
3017 bool do_cleanup_cfg
= false;
3018 bool do_rebuild_jump_labels
= false;
3020 df_set_flags (DF_LR_RUN_DCE
);
3023 /* Initialize the regsets we're going to use. */
3024 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3025 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3026 live
= BITMAP_ALLOC (®_obstack
);
3028 FOR_EACH_BB_REVERSE (bb
)
3030 rtl_profile_for_bb (bb
);
3031 /* Indicate that all slots except the last holds invalid data. */
3032 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3033 peep2_insn_data
[i
].insn
= NULL_RTX
;
3034 peep2_current_count
= 0;
3036 /* Indicate that the last slot contains live_after data. */
3037 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3038 peep2_current
= MAX_INSNS_PER_PEEP2
;
3040 /* Start up propagation. */
3041 bitmap_copy (live
, DF_LR_OUT (bb
));
3042 df_simulate_artificial_refs_at_end (bb
, live
);
3043 bitmap_copy (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3045 for (insn
= BB_END (bb
); ; insn
= prev
)
3047 prev
= PREV_INSN (insn
);
3050 rtx attempt
, before_try
, x
;
3053 bool was_call
= false;
3055 /* Record this insn. */
3056 if (--peep2_current
< 0)
3057 peep2_current
= MAX_INSNS_PER_PEEP2
;
3058 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3059 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
3060 peep2_current_count
++;
3061 peep2_insn_data
[peep2_current
].insn
= insn
;
3062 df_simulate_one_insn (bb
, insn
, live
);
3063 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3065 if (RTX_FRAME_RELATED_P (insn
))
3067 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3068 substitution would lose the
3069 REG_FRAME_RELATED_EXPR that is attached. */
3070 peep2_current_count
= 0;
3074 /* Match the peephole. */
3075 attempt
= peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3077 if (attempt
!= NULL
)
3079 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3080 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3081 cfg-related call notes. */
3082 for (i
= 0; i
<= match_len
; ++i
)
3085 rtx old_insn
, new_insn
, note
;
3087 j
= i
+ peep2_current
;
3088 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3089 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3090 old_insn
= peep2_insn_data
[j
].insn
;
3091 if (!CALL_P (old_insn
))
3096 while (new_insn
!= NULL_RTX
)
3098 if (CALL_P (new_insn
))
3100 new_insn
= NEXT_INSN (new_insn
);
3103 gcc_assert (new_insn
!= NULL_RTX
);
3105 CALL_INSN_FUNCTION_USAGE (new_insn
)
3106 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3108 for (note
= REG_NOTES (old_insn
);
3110 note
= XEXP (note
, 1))
3111 switch (REG_NOTE_KIND (note
))
3115 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3119 /* Discard all other reg notes. */
3123 /* Croak if there is another call in the sequence. */
3124 while (++i
<= match_len
)
3126 j
= i
+ peep2_current
;
3127 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3128 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3129 old_insn
= peep2_insn_data
[j
].insn
;
3130 gcc_assert (!CALL_P (old_insn
));
3135 i
= match_len
+ peep2_current
;
3136 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3137 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3139 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3140 REG_EH_REGION
, NULL_RTX
);
3142 /* Replace the old sequence with the new. */
3143 attempt
= emit_insn_after_setloc (attempt
,
3144 peep2_insn_data
[i
].insn
,
3145 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3146 before_try
= PREV_INSN (insn
);
3147 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3149 /* Re-insert the EH_REGION notes. */
3150 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3155 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3156 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3159 for (x
= attempt
; x
!= before_try
; x
= PREV_INSN (x
))
3161 || (flag_non_call_exceptions
3162 && may_trap_p (PATTERN (x
))
3163 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3166 add_reg_note (x
, REG_EH_REGION
, XEXP (note
, 0));
3168 if (x
!= BB_END (bb
) && eh_edge
)
3173 nfte
= split_block (bb
, x
);
3174 flags
= (eh_edge
->flags
3175 & (EDGE_EH
| EDGE_ABNORMAL
));
3177 flags
|= EDGE_ABNORMAL_CALL
;
3178 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3181 nehe
->probability
= eh_edge
->probability
;
3183 = REG_BR_PROB_BASE
- nehe
->probability
;
3185 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3191 /* Converting possibly trapping insn to non-trapping is
3192 possible. Zap dummy outgoing edges. */
3193 do_cleanup_cfg
|= purge_dead_edges (bb
);
3196 #ifdef HAVE_conditional_execution
3197 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3198 peep2_insn_data
[i
].insn
= NULL_RTX
;
3199 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3200 peep2_current_count
= 0;
3202 /* Back up lifetime information past the end of the
3203 newly created sequence. */
3204 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3206 bitmap_copy (live
, peep2_insn_data
[i
].live_before
);
3208 /* Update life information for the new sequence. */
3215 i
= MAX_INSNS_PER_PEEP2
;
3216 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3217 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3218 peep2_current_count
++;
3219 peep2_insn_data
[i
].insn
= x
;
3221 df_simulate_one_insn (bb
, x
, live
);
3222 bitmap_copy (peep2_insn_data
[i
].live_before
, live
);
3231 /* If we generated a jump instruction, it won't have
3232 JUMP_LABEL set. Recompute after we're done. */
3233 for (x
= attempt
; x
!= before_try
; x
= PREV_INSN (x
))
3236 do_rebuild_jump_labels
= true;
3242 if (insn
== BB_HEAD (bb
))
3247 default_rtl_profile ();
3248 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3249 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3251 if (do_rebuild_jump_labels
)
3252 rebuild_jump_labels (get_insns ());
3254 #endif /* HAVE_peephole2 */
3256 /* Common predicates for use with define_bypass. */
3258 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3259 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3260 must be either a single_set or a PARALLEL with SETs inside. */
3263 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3265 rtx out_set
, in_set
;
3266 rtx out_pat
, in_pat
;
3267 rtx out_exp
, in_exp
;
3270 in_set
= single_set (in_insn
);
3273 if (!MEM_P (SET_DEST (in_set
)))
3276 out_set
= single_set (out_insn
);
3279 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3284 out_pat
= PATTERN (out_insn
);
3286 if (GET_CODE (out_pat
) != PARALLEL
)
3289 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3291 out_exp
= XVECEXP (out_pat
, 0, i
);
3293 if (GET_CODE (out_exp
) == CLOBBER
)
3296 gcc_assert (GET_CODE (out_exp
) == SET
);
3298 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3305 in_pat
= PATTERN (in_insn
);
3306 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3308 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3310 in_exp
= XVECEXP (in_pat
, 0, i
);
3312 if (GET_CODE (in_exp
) == CLOBBER
)
3315 gcc_assert (GET_CODE (in_exp
) == SET
);
3317 if (!MEM_P (SET_DEST (in_exp
)))
3320 out_set
= single_set (out_insn
);
3323 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3328 out_pat
= PATTERN (out_insn
);
3329 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3331 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3333 out_exp
= XVECEXP (out_pat
, 0, j
);
3335 if (GET_CODE (out_exp
) == CLOBBER
)
3338 gcc_assert (GET_CODE (out_exp
) == SET
);
3340 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3350 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3351 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3352 or multiple set; IN_INSN should be single_set for truth, but for convenience
3353 of insn categorization may be any JUMP or CALL insn. */
3356 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3358 rtx out_set
, in_set
;
3360 in_set
= single_set (in_insn
);
3363 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3367 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3369 in_set
= SET_SRC (in_set
);
3371 out_set
= single_set (out_insn
);
3374 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3375 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3383 out_pat
= PATTERN (out_insn
);
3384 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3386 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3388 rtx exp
= XVECEXP (out_pat
, 0, i
);
3390 if (GET_CODE (exp
) == CLOBBER
)
3393 gcc_assert (GET_CODE (exp
) == SET
);
3395 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3396 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3405 gate_handle_peephole2 (void)
3407 return (optimize
> 0 && flag_peephole2
);
3411 rest_of_handle_peephole2 (void)
3413 #ifdef HAVE_peephole2
3414 peephole2_optimize ();
3419 struct rtl_opt_pass pass_peephole2
=
3423 "peephole2", /* name */
3424 gate_handle_peephole2
, /* gate */
3425 rest_of_handle_peephole2
, /* execute */
3428 0, /* static_pass_number */
3429 TV_PEEPHOLE2
, /* tv_id */
3430 0, /* properties_required */
3431 0, /* properties_provided */
3432 0, /* properties_destroyed */
3433 0, /* todo_flags_start */
3434 TODO_df_finish
| TODO_verify_rtl_sharing
|
3435 TODO_dump_func
/* todo_flags_finish */
3440 rest_of_handle_split_all_insns (void)
3446 struct rtl_opt_pass pass_split_all_insns
=
3450 "split1", /* name */
3452 rest_of_handle_split_all_insns
, /* execute */
3455 0, /* static_pass_number */
3457 0, /* properties_required */
3458 0, /* properties_provided */
3459 0, /* properties_destroyed */
3460 0, /* todo_flags_start */
3461 TODO_dump_func
/* todo_flags_finish */
3466 rest_of_handle_split_after_reload (void)
3468 /* If optimizing, then go ahead and split insns now. */
3476 struct rtl_opt_pass pass_split_after_reload
=
3480 "split2", /* name */
3482 rest_of_handle_split_after_reload
, /* execute */
3485 0, /* static_pass_number */
3487 0, /* properties_required */
3488 0, /* properties_provided */
3489 0, /* properties_destroyed */
3490 0, /* todo_flags_start */
3491 TODO_dump_func
/* todo_flags_finish */
3496 gate_handle_split_before_regstack (void)
3498 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3499 /* If flow2 creates new instructions which need splitting
3500 and scheduling after reload is not done, they might not be
3501 split until final which doesn't allow splitting
3502 if HAVE_ATTR_length. */
3503 # ifdef INSN_SCHEDULING
3504 return (optimize
&& !flag_schedule_insns_after_reload
);
3514 rest_of_handle_split_before_regstack (void)
3520 struct rtl_opt_pass pass_split_before_regstack
=
3524 "split3", /* name */
3525 gate_handle_split_before_regstack
, /* gate */
3526 rest_of_handle_split_before_regstack
, /* execute */
3529 0, /* static_pass_number */
3531 0, /* properties_required */
3532 0, /* properties_provided */
3533 0, /* properties_destroyed */
3534 0, /* todo_flags_start */
3535 TODO_dump_func
/* todo_flags_finish */
3540 gate_handle_split_before_sched2 (void)
3542 #ifdef INSN_SCHEDULING
3543 return optimize
> 0 && flag_schedule_insns_after_reload
;
3550 rest_of_handle_split_before_sched2 (void)
3552 #ifdef INSN_SCHEDULING
3558 struct rtl_opt_pass pass_split_before_sched2
=
3562 "split4", /* name */
3563 gate_handle_split_before_sched2
, /* gate */
3564 rest_of_handle_split_before_sched2
, /* execute */
3567 0, /* static_pass_number */
3569 0, /* properties_required */
3570 0, /* properties_provided */
3571 0, /* properties_destroyed */
3572 0, /* todo_flags_start */
3574 TODO_dump_func
/* todo_flags_finish */
3578 /* The placement of the splitting that we do for shorten_branches
3579 depends on whether regstack is used by the target or not. */
3581 gate_do_final_split (void)
3583 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3590 struct rtl_opt_pass pass_split_for_shorten_branches
=
3594 "split5", /* name */
3595 gate_do_final_split
, /* gate */
3596 split_all_insns_noflow
, /* execute */
3599 0, /* static_pass_number */
3601 0, /* properties_required */
3602 0, /* properties_provided */
3603 0, /* properties_destroyed */
3604 0, /* todo_flags_start */
3605 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */