1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
34 #include "addresses.h"
40 #include "basic-block.h"
45 #include "tree-pass.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
52 #define STACK_PUSH_CODE PRE_INC
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
60 #define STACK_POP_CODE POST_DEC
64 #ifndef HAVE_ATTR_enabled
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED
)
72 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
73 static void validate_replace_src_1 (rtx
*, void *);
74 static rtx
split_insn (rtx
);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in regclass.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
86 struct recog_data recog_data
;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
92 /* On return from `constrain_operands', indicate which alternative
95 int which_alternative
;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed
;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed
;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
111 init_recog_no_volatile (void)
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x
)
131 const char **constraints
;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed
)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x
));
139 constrain_operands (1);
140 return which_alternative
>= 0;
143 noperands
= asm_noperands (x
);
149 operands
= XALLOCAVEC (rtx
, noperands
);
150 constraints
= XALLOCAVEC (const char *, noperands
);
152 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
154 for (i
= 0; i
< noperands
; i
++)
156 const char *c
= constraints
[i
];
159 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
160 c
= constraints
[c
[0] - '0'];
162 if (! asm_operand_ok (operands
[i
], c
))
169 /* Static data for the next two routines. */
171 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
208 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
211 gcc_assert (in_group
!= 0 || num_changes
== 0);
215 /* Save the information describing this change. */
216 if (num_changes
>= changes_allocated
)
218 if (changes_allocated
== 0)
219 /* This value allows for repeated substitutions inside complex
220 indexed addresses, or changes in up to 5 insns. */
221 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
223 changes_allocated
*= 2;
225 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
228 changes
[num_changes
].object
= object
;
229 changes
[num_changes
].loc
= loc
;
230 changes
[num_changes
].old
= old
;
231 changes
[num_changes
].unshare
= unshare
;
233 if (object
&& !MEM_P (object
))
235 /* Set INSN_CODE to force rerecognition of insn. Save old code in
237 changes
[num_changes
].old_code
= INSN_CODE (object
);
238 INSN_CODE (object
) = -1;
243 /* If we are making a group of changes, return 1. Otherwise, validate the
244 change group we made. */
249 return apply_change_group ();
252 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
256 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
258 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
261 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
265 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
267 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
271 /* Keep X canonicalized if some changes have made it non-canonical; only
272 modifies the operands of X, not (for example) its code. Simplifications
273 are not the job of this routine.
275 Return true if anything was changed. */
277 canonicalize_change_group (rtx insn
, rtx x
)
279 if (COMMUTATIVE_P (x
)
280 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
282 /* Oops, the caller has made X no longer canonical.
283 Let's redo the changes in the correct order. */
284 rtx tem
= XEXP (x
, 0);
285 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
286 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
294 /* This subroutine of apply_change_group verifies whether the changes to INSN
295 were valid; i.e. whether INSN can still be recognized. */
298 insn_invalid_p (rtx insn
)
300 rtx pat
= PATTERN (insn
);
301 int num_clobbers
= 0;
302 /* If we are before reload and the pattern is a SET, see if we can add
304 int icode
= recog (pat
, insn
,
305 (GET_CODE (pat
) == SET
306 && ! reload_completed
&& ! reload_in_progress
)
307 ? &num_clobbers
: 0);
308 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
311 /* If this is an asm and the operand aren't legal, then fail. Likewise if
312 this is not an asm and the insn wasn't recognized. */
313 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
314 || (!is_asm
&& icode
< 0))
317 /* If we have to add CLOBBERs, fail if we have to add ones that reference
318 hard registers since our callers can't know if they are live or not.
319 Otherwise, add them. */
320 if (num_clobbers
> 0)
324 if (added_clobbers_hard_reg_p (icode
))
327 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
328 XVECEXP (newpat
, 0, 0) = pat
;
329 add_clobbers (newpat
, icode
);
330 PATTERN (insn
) = pat
= newpat
;
333 /* After reload, verify that all constraints are satisfied. */
334 if (reload_completed
)
338 if (! constrain_operands (1))
342 INSN_CODE (insn
) = icode
;
346 /* Return number of changes made and not validated yet. */
348 num_changes_pending (void)
353 /* Tentatively apply the changes numbered NUM and up.
354 Return 1 if all changes are valid, zero otherwise. */
357 verify_changes (int num
)
360 rtx last_validated
= NULL_RTX
;
362 /* The changes have been applied and all INSN_CODEs have been reset to force
365 The changes are valid if we aren't given an object, or if we are
366 given a MEM and it still is a valid address, or if this is in insn
367 and it is recognized. In the latter case, if reload has completed,
368 we also require that the operands meet the constraints for
371 for (i
= num
; i
< num_changes
; i
++)
373 rtx object
= changes
[i
].object
;
375 /* If there is no object to test or if it is the same as the one we
376 already tested, ignore it. */
377 if (object
== 0 || object
== last_validated
)
382 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
385 else if (insn_invalid_p (object
))
387 rtx pat
= PATTERN (object
);
389 /* Perhaps we couldn't recognize the insn because there were
390 extra CLOBBERs at the end. If so, try to re-recognize
391 without the last CLOBBER (later iterations will cause each of
392 them to be eliminated, in turn). But don't do this if we
393 have an ASM_OPERAND. */
394 if (GET_CODE (pat
) == PARALLEL
395 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
396 && asm_noperands (PATTERN (object
)) < 0)
400 if (XVECLEN (pat
, 0) == 2)
401 newpat
= XVECEXP (pat
, 0, 0);
407 = gen_rtx_PARALLEL (VOIDmode
,
408 rtvec_alloc (XVECLEN (pat
, 0) - 1));
409 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
410 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
413 /* Add a new change to this group to replace the pattern
414 with this new pattern. Then consider this change
415 as having succeeded. The change we added will
416 cause the entire call to fail if things remain invalid.
418 Note that this can lose if a later change than the one
419 we are processing specified &XVECEXP (PATTERN (object), 0, X)
420 but this shouldn't occur. */
422 validate_change (object
, &PATTERN (object
), newpat
, 1);
425 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
426 /* If this insn is a CLOBBER or USE, it is always valid, but is
432 last_validated
= object
;
435 return (i
== num_changes
);
438 /* A group of changes has previously been issued with validate_change
439 and verified with verify_changes. Call df_insn_rescan for each of
440 the insn changed and clear num_changes. */
443 confirm_change_group (void)
446 rtx last_object
= NULL
;
448 for (i
= 0; i
< num_changes
; i
++)
450 rtx object
= changes
[i
].object
;
452 if (changes
[i
].unshare
)
453 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
455 /* Avoid unnecessary rescanning when multiple changes to same instruction
459 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
460 df_insn_rescan (last_object
);
461 last_object
= object
;
465 if (last_object
&& INSN_P (last_object
))
466 df_insn_rescan (last_object
);
470 /* Apply a group of changes previously issued with `validate_change'.
471 If all changes are valid, call confirm_change_group and return 1,
472 otherwise, call cancel_changes and return 0. */
475 apply_change_group (void)
477 if (verify_changes (0))
479 confirm_change_group ();
490 /* Return the number of changes so far in the current group. */
493 num_validated_changes (void)
498 /* Retract the changes numbered NUM and up. */
501 cancel_changes (int num
)
505 /* Back out all the changes. Do this in the opposite order in which
507 for (i
= num_changes
- 1; i
>= num
; i
--)
509 *changes
[i
].loc
= changes
[i
].old
;
510 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
511 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
516 /* Replace every occurrence of FROM in X with TO. Mark each change with
517 validate_change passing OBJECT. */
520 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
526 enum machine_mode op0_mode
= VOIDmode
;
527 int prev_changes
= num_changes
;
534 fmt
= GET_RTX_FORMAT (code
);
536 op0_mode
= GET_MODE (XEXP (x
, 0));
538 /* X matches FROM if it is the same rtx or they are both referring to the
539 same register in the same mode. Avoid calling rtx_equal_p unless the
540 operands look similar. */
543 || (REG_P (x
) && REG_P (from
)
544 && GET_MODE (x
) == GET_MODE (from
)
545 && REGNO (x
) == REGNO (from
))
546 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
547 && rtx_equal_p (x
, from
)))
549 validate_unshare_change (object
, loc
, to
, 1);
553 /* Call ourself recursively to perform the replacements.
554 We must not replace inside already replaced expression, otherwise we
555 get infinite recursion for replacements like (reg X)->(subreg (reg X))
556 done by regmove, so we must special case shared ASM_OPERANDS. */
558 if (GET_CODE (x
) == PARALLEL
)
560 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
562 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
563 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
565 /* Verify that operands are really shared. */
566 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
567 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
569 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
573 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
577 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
580 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
581 else if (fmt
[i
] == 'E')
582 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
583 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
586 /* If we didn't substitute, there is nothing more to do. */
587 if (num_changes
== prev_changes
)
590 /* Allow substituted expression to have different mode. This is used by
591 regmove to change mode of pseudo register. */
592 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
593 op0_mode
= GET_MODE (XEXP (x
, 0));
595 /* Do changes needed to keep rtx consistent. Don't do any other
596 simplifications, as it is not our job. */
598 if (SWAPPABLE_OPERANDS_P (x
)
599 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
601 validate_unshare_change (object
, loc
,
602 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
603 : swap_condition (code
),
604 GET_MODE (x
), XEXP (x
, 1),
613 /* If we have a PLUS whose second operand is now a CONST_INT, use
614 simplify_gen_binary to try to simplify it.
615 ??? We may want later to remove this, once simplification is
616 separated from this function. */
617 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
618 validate_change (object
, loc
,
620 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
623 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
624 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
625 validate_change (object
, loc
,
627 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
628 simplify_gen_unary (NEG
,
629 GET_MODE (x
), XEXP (x
, 1),
634 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
636 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
638 /* If any of the above failed, substitute in something that
639 we know won't be recognized. */
641 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
642 validate_change (object
, loc
, new_rtx
, 1);
646 /* All subregs possible to simplify should be simplified. */
647 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
650 /* Subregs of VOIDmode operands are incorrect. */
651 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
652 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
654 validate_change (object
, loc
, new_rtx
, 1);
658 /* If we are replacing a register with memory, try to change the memory
659 to be the mode required for memory in extract operations (this isn't
660 likely to be an insertion operation; if it was, nothing bad will
661 happen, we might just fail in some cases). */
663 if (MEM_P (XEXP (x
, 0))
664 && GET_CODE (XEXP (x
, 1)) == CONST_INT
665 && GET_CODE (XEXP (x
, 2)) == CONST_INT
666 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
667 && !MEM_VOLATILE_P (XEXP (x
, 0)))
669 enum machine_mode wanted_mode
= VOIDmode
;
670 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
671 int pos
= INTVAL (XEXP (x
, 2));
673 if (GET_CODE (x
) == ZERO_EXTRACT
)
675 enum machine_mode new_mode
676 = mode_for_extraction (EP_extzv
, 1);
677 if (new_mode
!= MAX_MACHINE_MODE
)
678 wanted_mode
= new_mode
;
680 else if (GET_CODE (x
) == SIGN_EXTRACT
)
682 enum machine_mode new_mode
683 = mode_for_extraction (EP_extv
, 1);
684 if (new_mode
!= MAX_MACHINE_MODE
)
685 wanted_mode
= new_mode
;
688 /* If we have a narrower mode, we can do something. */
689 if (wanted_mode
!= VOIDmode
690 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
692 int offset
= pos
/ BITS_PER_UNIT
;
695 /* If the bytes and bits are counted differently, we
696 must adjust the offset. */
697 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
699 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
702 pos
%= GET_MODE_BITSIZE (wanted_mode
);
704 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
706 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
707 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
718 /* Try replacing every occurrence of FROM in INSN with TO. After all
719 changes have been made, validate by seeing if INSN is still valid. */
722 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
724 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
725 return apply_change_group ();
728 /* Try replacing every occurrence of FROM in INSN with TO. */
731 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
733 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
736 /* Function called by note_uses to replace used subexpressions. */
737 struct validate_replace_src_data
739 rtx from
; /* Old RTX */
740 rtx to
; /* New RTX */
741 rtx insn
; /* Insn in which substitution is occurring. */
745 validate_replace_src_1 (rtx
*x
, void *data
)
747 struct validate_replace_src_data
*d
748 = (struct validate_replace_src_data
*) data
;
750 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
753 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
757 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
759 struct validate_replace_src_data d
;
764 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
767 /* Try simplify INSN.
768 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
769 pattern and return true if something was simplified. */
772 validate_simplify_insn (rtx insn
)
778 pat
= PATTERN (insn
);
780 if (GET_CODE (pat
) == SET
)
782 newpat
= simplify_rtx (SET_SRC (pat
));
783 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
784 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
785 newpat
= simplify_rtx (SET_DEST (pat
));
786 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
787 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
789 else if (GET_CODE (pat
) == PARALLEL
)
790 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
792 rtx s
= XVECEXP (pat
, 0, i
);
794 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
796 newpat
= simplify_rtx (SET_SRC (s
));
797 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
798 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
799 newpat
= simplify_rtx (SET_DEST (s
));
800 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
801 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
804 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
808 /* Return 1 if the insn using CC0 set by INSN does not contain
809 any ordered tests applied to the condition codes.
810 EQ and NE tests do not count. */
813 next_insn_tests_no_inequality (rtx insn
)
815 rtx next
= next_cc0_user (insn
);
817 /* If there is no next insn, we have to take the conservative choice. */
821 return (INSN_P (next
)
822 && ! inequality_comparisons_p (PATTERN (next
)));
826 /* Return 1 if OP is a valid general operand for machine mode MODE.
827 This is either a register reference, a memory reference,
828 or a constant. In the case of a memory reference, the address
829 is checked for general validity for the target machine.
831 Register and memory references must have mode MODE in order to be valid,
832 but some constants have no machine mode and are valid for any mode.
834 If MODE is VOIDmode, OP is checked for validity for whatever mode
837 The main use of this function is as a predicate in match_operand
838 expressions in the machine description.
840 For an explanation of this function's behavior for registers of
841 class NO_REGS, see the comment for `register_operand'. */
844 general_operand (rtx op
, enum machine_mode mode
)
846 enum rtx_code code
= GET_CODE (op
);
848 if (mode
== VOIDmode
)
849 mode
= GET_MODE (op
);
851 /* Don't accept CONST_INT or anything similar
852 if the caller wants something floating. */
853 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
854 && GET_MODE_CLASS (mode
) != MODE_INT
855 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
858 if (GET_CODE (op
) == CONST_INT
860 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
864 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
866 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
867 && LEGITIMATE_CONSTANT_P (op
));
869 /* Except for certain constants with VOIDmode, already checked for,
870 OP's mode must match MODE if MODE specifies a mode. */
872 if (GET_MODE (op
) != mode
)
877 rtx sub
= SUBREG_REG (op
);
879 #ifdef INSN_SCHEDULING
880 /* On machines that have insn scheduling, we want all memory
881 reference to be explicit, so outlaw paradoxical SUBREGs.
882 However, we must allow them after reload so that they can
883 get cleaned up by cleanup_subreg_operands. */
884 if (!reload_completed
&& MEM_P (sub
)
885 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
888 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
889 may result in incorrect reference. We should simplify all valid
890 subregs of MEM anyway. But allow this after reload because we
891 might be called from cleanup_subreg_operands.
893 ??? This is a kludge. */
894 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
898 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
899 create such rtl, and we must reject it. */
900 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
901 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
905 code
= GET_CODE (op
);
909 /* A register whose class is NO_REGS is not a general operand. */
910 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
911 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
915 rtx y
= XEXP (op
, 0);
917 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
920 /* Use the mem's mode, since it will be reloaded thus. */
921 if (memory_address_p (GET_MODE (op
), y
))
928 /* Return 1 if OP is a valid memory address for a memory reference
931 The main use of this function is as a predicate in match_operand
932 expressions in the machine description. */
935 address_operand (rtx op
, enum machine_mode mode
)
937 return memory_address_p (mode
, op
);
940 /* Return 1 if OP is a register reference of mode MODE.
941 If MODE is VOIDmode, accept a register in any mode.
943 The main use of this function is as a predicate in match_operand
944 expressions in the machine description.
946 As a special exception, registers whose class is NO_REGS are
947 not accepted by `register_operand'. The reason for this change
948 is to allow the representation of special architecture artifacts
949 (such as a condition code register) without extending the rtl
950 definitions. Since registers of class NO_REGS cannot be used
951 as registers in any case where register classes are examined,
952 it is most consistent to keep this function from accepting them. */
955 register_operand (rtx op
, enum machine_mode mode
)
957 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
960 if (GET_CODE (op
) == SUBREG
)
962 rtx sub
= SUBREG_REG (op
);
964 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
965 because it is guaranteed to be reloaded into one.
966 Just make sure the MEM is valid in itself.
967 (Ideally, (SUBREG (MEM)...) should not exist after reload,
968 but currently it does result from (SUBREG (REG)...) where the
969 reg went on the stack.) */
970 if (! reload_completed
&& MEM_P (sub
))
971 return general_operand (op
, mode
);
973 #ifdef CANNOT_CHANGE_MODE_CLASS
975 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
976 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
977 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
978 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
982 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
983 create such rtl, and we must reject it. */
984 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
985 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
991 /* We don't consider registers whose class is NO_REGS
992 to be a register operand. */
994 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
995 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
998 /* Return 1 for a register in Pmode; ignore the tested mode. */
1001 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1003 return register_operand (op
, Pmode
);
1006 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1007 or a hard register. */
1010 scratch_operand (rtx op
, enum machine_mode mode
)
1012 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1015 return (GET_CODE (op
) == SCRATCH
1017 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1020 /* Return 1 if OP is a valid immediate operand for mode MODE.
1022 The main use of this function is as a predicate in match_operand
1023 expressions in the machine description. */
1026 immediate_operand (rtx op
, enum machine_mode mode
)
1028 /* Don't accept CONST_INT or anything similar
1029 if the caller wants something floating. */
1030 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1031 && GET_MODE_CLASS (mode
) != MODE_INT
1032 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1035 if (GET_CODE (op
) == CONST_INT
1037 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1040 return (CONSTANT_P (op
)
1041 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1042 || GET_MODE (op
) == VOIDmode
)
1043 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1044 && LEGITIMATE_CONSTANT_P (op
));
1047 /* Returns 1 if OP is an operand that is a CONST_INT. */
1050 const_int_operand (rtx op
, enum machine_mode mode
)
1052 if (GET_CODE (op
) != CONST_INT
)
1055 if (mode
!= VOIDmode
1056 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1062 /* Returns 1 if OP is an operand that is a constant integer or constant
1063 floating-point number. */
1066 const_double_operand (rtx op
, enum machine_mode mode
)
1068 /* Don't accept CONST_INT or anything similar
1069 if the caller wants something floating. */
1070 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1071 && GET_MODE_CLASS (mode
) != MODE_INT
1072 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1075 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1076 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1077 || GET_MODE (op
) == VOIDmode
));
1080 /* Return 1 if OP is a general operand that is not an immediate operand. */
1083 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1085 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1088 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1091 nonmemory_operand (rtx op
, enum machine_mode mode
)
1093 if (CONSTANT_P (op
))
1095 /* Don't accept CONST_INT or anything similar
1096 if the caller wants something floating. */
1097 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1098 && GET_MODE_CLASS (mode
) != MODE_INT
1099 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1102 if (GET_CODE (op
) == CONST_INT
1104 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1107 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1108 || mode
== VOIDmode
)
1109 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1110 && LEGITIMATE_CONSTANT_P (op
));
1113 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1116 if (GET_CODE (op
) == SUBREG
)
1118 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1119 because it is guaranteed to be reloaded into one.
1120 Just make sure the MEM is valid in itself.
1121 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1122 but currently it does result from (SUBREG (REG)...) where the
1123 reg went on the stack.) */
1124 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1125 return general_operand (op
, mode
);
1126 op
= SUBREG_REG (op
);
1129 /* We don't consider registers whose class is NO_REGS
1130 to be a register operand. */
1132 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1133 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1136 /* Return 1 if OP is a valid operand that stands for pushing a
1137 value of mode MODE onto the stack.
1139 The main use of this function is as a predicate in match_operand
1140 expressions in the machine description. */
1143 push_operand (rtx op
, enum machine_mode mode
)
1145 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1147 #ifdef PUSH_ROUNDING
1148 rounded_size
= PUSH_ROUNDING (rounded_size
);
1154 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1159 if (rounded_size
== GET_MODE_SIZE (mode
))
1161 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1166 if (GET_CODE (op
) != PRE_MODIFY
1167 || GET_CODE (XEXP (op
, 1)) != PLUS
1168 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1169 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1170 #ifdef STACK_GROWS_DOWNWARD
1171 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1173 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1179 return XEXP (op
, 0) == stack_pointer_rtx
;
1182 /* Return 1 if OP is a valid operand that stands for popping a
1183 value of mode MODE off the stack.
1185 The main use of this function is as a predicate in match_operand
1186 expressions in the machine description. */
1189 pop_operand (rtx op
, enum machine_mode mode
)
1194 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1199 if (GET_CODE (op
) != STACK_POP_CODE
)
1202 return XEXP (op
, 0) == stack_pointer_rtx
;
1205 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1208 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1210 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1217 /* Return 1 if OP is a valid memory reference with mode MODE,
1218 including a valid address.
1220 The main use of this function is as a predicate in match_operand
1221 expressions in the machine description. */
1224 memory_operand (rtx op
, enum machine_mode mode
)
1228 if (! reload_completed
)
1229 /* Note that no SUBREG is a memory operand before end of reload pass,
1230 because (SUBREG (MEM...)) forces reloading into a register. */
1231 return MEM_P (op
) && general_operand (op
, mode
);
1233 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1237 if (GET_CODE (inner
) == SUBREG
)
1238 inner
= SUBREG_REG (inner
);
1240 return (MEM_P (inner
) && general_operand (op
, mode
));
1243 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1244 that is, a memory reference whose address is a general_operand. */
1247 indirect_operand (rtx op
, enum machine_mode mode
)
1249 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1250 if (! reload_completed
1251 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1253 int offset
= SUBREG_BYTE (op
);
1254 rtx inner
= SUBREG_REG (op
);
1256 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1259 /* The only way that we can have a general_operand as the resulting
1260 address is if OFFSET is zero and the address already is an operand
1261 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1264 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1265 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1266 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1267 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1268 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1272 && memory_operand (op
, mode
)
1273 && general_operand (XEXP (op
, 0), Pmode
));
1276 /* Return 1 if this is a comparison operator. This allows the use of
1277 MATCH_OPERATOR to recognize all the branch insns. */
1280 comparison_operator (rtx op
, enum machine_mode mode
)
1282 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1283 && COMPARISON_P (op
));
1286 /* If BODY is an insn body that uses ASM_OPERANDS,
1287 return the number of operands (both input and output) in the insn.
1288 Otherwise return -1. */
1291 asm_noperands (const_rtx body
)
1293 switch (GET_CODE (body
))
1296 /* No output operands: return number of input operands. */
1297 return ASM_OPERANDS_INPUT_LENGTH (body
);
1299 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1300 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1301 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1305 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1306 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1308 /* Multiple output operands, or 1 output plus some clobbers:
1309 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1313 /* Count backwards through CLOBBERs to determine number of SETs. */
1314 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1316 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1318 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1322 /* N_SETS is now number of output operands. */
1325 /* Verify that all the SETs we have
1326 came from a single original asm_operands insn
1327 (so that invalid combinations are blocked). */
1328 for (i
= 0; i
< n_sets
; i
++)
1330 rtx elt
= XVECEXP (body
, 0, i
);
1331 if (GET_CODE (elt
) != SET
)
1333 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1335 /* If these ASM_OPERANDS rtx's came from different original insns
1336 then they aren't allowed together. */
1337 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1338 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1341 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1344 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1346 /* 0 outputs, but some clobbers:
1347 body is [(asm_operands ...) (clobber (reg ...))...]. */
1350 /* Make sure all the other parallel things really are clobbers. */
1351 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1352 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1355 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1364 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1365 copy its operands (both input and output) into the vector OPERANDS,
1366 the locations of the operands within the insn into the vector OPERAND_LOCS,
1367 and the constraints for the operands into CONSTRAINTS.
1368 Write the modes of the operands into MODES.
1369 Return the assembler-template.
1371 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1372 we don't store that info. */
1375 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1376 const char **constraints
, enum machine_mode
*modes
,
1383 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1385 asmop
= SET_SRC (body
);
1386 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1388 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1390 for (i
= 1; i
< noperands
; i
++)
1393 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1395 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1397 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1399 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1402 /* The output is in the SET.
1403 Its constraint is in the ASM_OPERANDS itself. */
1405 operands
[0] = SET_DEST (body
);
1407 operand_locs
[0] = &SET_DEST (body
);
1409 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1411 modes
[0] = GET_MODE (SET_DEST (body
));
1413 else if (GET_CODE (body
) == ASM_OPERANDS
)
1416 /* No output operands: BODY is (asm_operands ....). */
1418 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1420 /* The input operands are found in the 1st element vector. */
1421 /* Constraints for inputs are in the 2nd element vector. */
1422 for (i
= 0; i
< noperands
; i
++)
1425 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1427 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1429 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1431 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1434 else if (GET_CODE (body
) == PARALLEL
1435 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1436 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1438 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1440 int nout
= 0; /* Does not include CLOBBERs. */
1442 asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1443 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1445 /* At least one output, plus some CLOBBERs. */
1447 /* The outputs are in the SETs.
1448 Their constraints are in the ASM_OPERANDS itself. */
1449 for (i
= 0; i
< nparallel
; i
++)
1451 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1452 break; /* Past last SET */
1455 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1457 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1459 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1461 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1465 for (i
= 0; i
< nin
; i
++)
1468 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1470 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1472 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1474 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1477 else if (GET_CODE (body
) == PARALLEL
1478 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1480 /* No outputs, but some CLOBBERs. */
1484 asmop
= XVECEXP (body
, 0, 0);
1485 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1487 for (i
= 0; i
< nin
; i
++)
1490 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1492 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1494 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1496 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1502 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1504 return ASM_OPERANDS_TEMPLATE (asmop
);
1507 /* Check if an asm_operand matches its constraints.
1508 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1511 asm_operand_ok (rtx op
, const char *constraint
)
1515 /* Use constrain_operands after reload. */
1516 gcc_assert (!reload_completed
);
1520 char c
= *constraint
;
1537 case '0': case '1': case '2': case '3': case '4':
1538 case '5': case '6': case '7': case '8': case '9':
1539 /* For best results, our caller should have given us the
1540 proper matching constraint, but we can't actually fail
1541 the check if they didn't. Indicate that results are
1545 while (ISDIGIT (*constraint
));
1551 if (address_operand (op
, VOIDmode
))
1555 case TARGET_MEM_CONSTRAINT
:
1556 case 'V': /* non-offsettable */
1557 if (memory_operand (op
, VOIDmode
))
1561 case 'o': /* offsettable */
1562 if (offsettable_nonstrict_memref_p (op
))
1567 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1568 excepting those that expand_call created. Further, on some
1569 machines which do not have generalized auto inc/dec, an inc/dec
1570 is not a memory_operand.
1572 Match any memory and hope things are resolved after reload. */
1576 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1577 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1584 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1585 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1591 if (GET_CODE (op
) == CONST_DOUBLE
1592 || (GET_CODE (op
) == CONST_VECTOR
1593 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1598 if (GET_CODE (op
) == CONST_DOUBLE
1599 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1603 if (GET_CODE (op
) == CONST_DOUBLE
1604 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1609 if (GET_CODE (op
) == CONST_INT
1610 || (GET_CODE (op
) == CONST_DOUBLE
1611 && GET_MODE (op
) == VOIDmode
))
1616 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1621 if (GET_CODE (op
) == CONST_INT
1622 || (GET_CODE (op
) == CONST_DOUBLE
1623 && GET_MODE (op
) == VOIDmode
))
1628 if (GET_CODE (op
) == CONST_INT
1629 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1633 if (GET_CODE (op
) == CONST_INT
1634 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1638 if (GET_CODE (op
) == CONST_INT
1639 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1643 if (GET_CODE (op
) == CONST_INT
1644 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1648 if (GET_CODE (op
) == CONST_INT
1649 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1653 if (GET_CODE (op
) == CONST_INT
1654 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1658 if (GET_CODE (op
) == CONST_INT
1659 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1663 if (GET_CODE (op
) == CONST_INT
1664 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1673 if (general_operand (op
, VOIDmode
))
1678 /* For all other letters, we first check for a register class,
1679 otherwise it is an EXTRA_CONSTRAINT. */
1680 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1683 if (GET_MODE (op
) == BLKmode
)
1685 if (register_operand (op
, VOIDmode
))
1688 #ifdef EXTRA_CONSTRAINT_STR
1689 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
))
1690 /* Every memory operand can be reloaded to fit. */
1691 result
= result
|| memory_operand (op
, VOIDmode
);
1692 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
))
1693 /* Every address operand can be reloaded to fit. */
1694 result
= result
|| address_operand (op
, VOIDmode
);
1695 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1700 len
= CONSTRAINT_LEN (c
, constraint
);
1703 while (--len
&& *constraint
);
1711 /* Given an rtx *P, if it is a sum containing an integer constant term,
1712 return the location (type rtx *) of the pointer to that constant term.
1713 Otherwise, return a null pointer. */
1716 find_constant_term_loc (rtx
*p
)
1719 enum rtx_code code
= GET_CODE (*p
);
1721 /* If *P IS such a constant term, P is its location. */
1723 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1727 /* Otherwise, if not a sum, it has no constant term. */
1729 if (GET_CODE (*p
) != PLUS
)
1732 /* If one of the summands is constant, return its location. */
1734 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1735 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1738 /* Otherwise, check each summand for containing a constant term. */
1740 if (XEXP (*p
, 0) != 0)
1742 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1747 if (XEXP (*p
, 1) != 0)
1749 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1757 /* Return 1 if OP is a memory reference
1758 whose address contains no side effects
1759 and remains valid after the addition
1760 of a positive integer less than the
1761 size of the object being referenced.
1763 We assume that the original address is valid and do not check it.
1765 This uses strict_memory_address_p as a subroutine, so
1766 don't use it before reload. */
1769 offsettable_memref_p (rtx op
)
1771 return ((MEM_P (op
))
1772 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1775 /* Similar, but don't require a strictly valid mem ref:
1776 consider pseudo-regs valid as index or base regs. */
1779 offsettable_nonstrict_memref_p (rtx op
)
1781 return ((MEM_P (op
))
1782 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1785 /* Return 1 if Y is a memory address which contains no side effects
1786 and would remain valid after the addition of a positive integer
1787 less than the size of that mode.
1789 We assume that the original address is valid and do not check it.
1790 We do check that it is valid for narrower modes.
1792 If STRICTP is nonzero, we require a strictly valid address,
1793 for the sake of use in reload.c. */
1796 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1798 enum rtx_code ycode
= GET_CODE (y
);
1802 int (*addressp
) (enum machine_mode
, rtx
) =
1803 (strictp
? strict_memory_address_p
: memory_address_p
);
1804 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1806 if (CONSTANT_ADDRESS_P (y
))
1809 /* Adjusting an offsettable address involves changing to a narrower mode.
1810 Make sure that's OK. */
1812 if (mode_dependent_address_p (y
))
1815 /* ??? How much offset does an offsettable BLKmode reference need?
1816 Clearly that depends on the situation in which it's being used.
1817 However, the current situation in which we test 0xffffffff is
1818 less than ideal. Caveat user. */
1820 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1822 /* If the expression contains a constant term,
1823 see if it remains valid when max possible offset is added. */
1825 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1830 *y2
= plus_constant (*y2
, mode_sz
- 1);
1831 /* Use QImode because an odd displacement may be automatically invalid
1832 for any wider mode. But it should be valid for a single byte. */
1833 good
= (*addressp
) (QImode
, y
);
1835 /* In any case, restore old contents of memory. */
1840 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1843 /* The offset added here is chosen as the maximum offset that
1844 any instruction could need to add when operating on something
1845 of the specified mode. We assume that if Y and Y+c are
1846 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1847 go inside a LO_SUM here, so we do so as well. */
1848 if (GET_CODE (y
) == LO_SUM
1850 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1851 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1852 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1854 z
= plus_constant (y
, mode_sz
- 1);
1856 /* Use QImode because an odd displacement may be automatically invalid
1857 for any wider mode. But it should be valid for a single byte. */
1858 return (*addressp
) (QImode
, z
);
1861 /* Return 1 if ADDR is an address-expression whose effect depends
1862 on the mode of the memory reference it is used in.
1864 Autoincrement addressing is a typical example of mode-dependence
1865 because the amount of the increment depends on the mode. */
1868 mode_dependent_address_p (rtx addr
)
1870 /* Auto-increment addressing with anything other than post_modify
1871 or pre_modify always introduces a mode dependency. Catch such
1872 cases now instead of deferring to the target. */
1873 if (GET_CODE (addr
) == PRE_INC
1874 || GET_CODE (addr
) == POST_INC
1875 || GET_CODE (addr
) == PRE_DEC
1876 || GET_CODE (addr
) == POST_DEC
)
1879 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1881 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1882 win
: ATTRIBUTE_UNUSED_LABEL
1886 /* Like extract_insn, but save insn extracted and don't extract again, when
1887 called again for the same insn expecting that recog_data still contain the
1888 valid information. This is used primary by gen_attr infrastructure that
1889 often does extract insn again and again. */
1891 extract_insn_cached (rtx insn
)
1893 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1895 extract_insn (insn
);
1896 recog_data
.insn
= insn
;
1899 /* Do cached extract_insn, constrain_operands and complain about failures.
1900 Used by insn_attrtab. */
1902 extract_constrain_insn_cached (rtx insn
)
1904 extract_insn_cached (insn
);
1905 if (which_alternative
== -1
1906 && !constrain_operands (reload_completed
))
1907 fatal_insn_not_found (insn
);
1910 /* Do cached constrain_operands and complain about failures. */
1912 constrain_operands_cached (int strict
)
1914 if (which_alternative
== -1)
1915 return constrain_operands (strict
);
1920 /* Analyze INSN and fill in recog_data. */
1923 extract_insn (rtx insn
)
1928 rtx body
= PATTERN (insn
);
1930 recog_data
.n_operands
= 0;
1931 recog_data
.n_alternatives
= 0;
1932 recog_data
.n_dups
= 0;
1934 switch (GET_CODE (body
))
1944 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1949 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1950 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1951 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1957 recog_data
.n_operands
= noperands
= asm_noperands (body
);
1960 /* This insn is an `asm' with operands. */
1962 /* expand_asm_operands makes sure there aren't too many operands. */
1963 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
1965 /* Now get the operand values and constraints out of the insn. */
1966 decode_asm_operands (body
, recog_data
.operand
,
1967 recog_data
.operand_loc
,
1968 recog_data
.constraints
,
1969 recog_data
.operand_mode
, NULL
);
1972 const char *p
= recog_data
.constraints
[0];
1973 recog_data
.n_alternatives
= 1;
1975 recog_data
.n_alternatives
+= (*p
++ == ',');
1979 fatal_insn_not_found (insn
);
1983 /* Ordinary insn: recognize it, get the operands via insn_extract
1984 and get the constraints. */
1986 icode
= recog_memoized (insn
);
1988 fatal_insn_not_found (insn
);
1990 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
1991 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
1992 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
1994 insn_extract (insn
);
1996 for (i
= 0; i
< noperands
; i
++)
1998 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
1999 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2000 /* VOIDmode match_operands gets mode from their real operand. */
2001 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2002 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2005 for (i
= 0; i
< noperands
; i
++)
2006 recog_data
.operand_type
[i
]
2007 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2008 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2011 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2013 if (INSN_CODE (insn
) < 0)
2014 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2015 recog_data
.alternative_enabled_p
[i
] = true;
2018 recog_data
.insn
= insn
;
2019 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2021 which_alternative
= i
;
2022 recog_data
.alternative_enabled_p
[i
] = get_attr_enabled (insn
);
2026 recog_data
.insn
= NULL
;
2027 which_alternative
= -1;
2030 /* After calling extract_insn, you can use this function to extract some
2031 information from the constraint strings into a more usable form.
2032 The collected data is stored in recog_op_alt. */
2034 preprocess_constraints (void)
2038 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2039 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2040 * sizeof (struct operand_alternative
)));
2042 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2045 struct operand_alternative
*op_alt
;
2046 const char *p
= recog_data
.constraints
[i
];
2048 op_alt
= recog_op_alt
[i
];
2050 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2052 op_alt
[j
].cl
= NO_REGS
;
2053 op_alt
[j
].constraint
= p
;
2054 op_alt
[j
].matches
= -1;
2055 op_alt
[j
].matched
= -1;
2057 if (!recog_data
.alternative_enabled_p
[j
])
2059 p
= skip_alternative (p
);
2063 if (*p
== '\0' || *p
== ',')
2065 op_alt
[j
].anything_ok
= 1;
2075 while (c
!= ',' && c
!= '\0');
2076 if (c
== ',' || c
== '\0')
2084 case '=': case '+': case '*': case '%':
2085 case 'E': case 'F': case 'G': case 'H':
2086 case 's': case 'i': case 'n':
2087 case 'I': case 'J': case 'K': case 'L':
2088 case 'M': case 'N': case 'O': case 'P':
2089 /* These don't say anything we care about. */
2093 op_alt
[j
].reject
+= 6;
2096 op_alt
[j
].reject
+= 600;
2099 op_alt
[j
].earlyclobber
= 1;
2102 case '0': case '1': case '2': case '3': case '4':
2103 case '5': case '6': case '7': case '8': case '9':
2106 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2107 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2112 case TARGET_MEM_CONSTRAINT
:
2113 op_alt
[j
].memory_ok
= 1;
2116 op_alt
[j
].decmem_ok
= 1;
2119 op_alt
[j
].incmem_ok
= 1;
2122 op_alt
[j
].nonoffmem_ok
= 1;
2125 op_alt
[j
].offmem_ok
= 1;
2128 op_alt
[j
].anything_ok
= 1;
2132 op_alt
[j
].is_address
= 1;
2133 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2134 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
2140 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2144 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2146 op_alt
[j
].memory_ok
= 1;
2149 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2151 op_alt
[j
].is_address
= 1;
2153 = (reg_class_subunion
2154 [(int) op_alt
[j
].cl
]
2155 [(int) base_reg_class (VOIDmode
, ADDRESS
,
2161 = (reg_class_subunion
2162 [(int) op_alt
[j
].cl
]
2163 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2166 p
+= CONSTRAINT_LEN (c
, p
);
2172 /* Check the operands of an insn against the insn's operand constraints
2173 and return 1 if they are valid.
2174 The information about the insn's operands, constraints, operand modes
2175 etc. is obtained from the global variables set up by extract_insn.
2177 WHICH_ALTERNATIVE is set to a number which indicates which
2178 alternative of constraints was matched: 0 for the first alternative,
2179 1 for the next, etc.
2181 In addition, when two operands are required to match
2182 and it happens that the output operand is (reg) while the
2183 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2184 make the output operand look like the input.
2185 This is because the output operand is the one the template will print.
2187 This is used in final, just before printing the assembler code and by
2188 the routines that determine an insn's attribute.
2190 If STRICT is a positive nonzero value, it means that we have been
2191 called after reload has been completed. In that case, we must
2192 do all checks strictly. If it is zero, it means that we have been called
2193 before reload has completed. In that case, we first try to see if we can
2194 find an alternative that matches strictly. If not, we try again, this
2195 time assuming that reload will fix up the insn. This provides a "best
2196 guess" for the alternative and is used to compute attributes of insns prior
2197 to reload. A negative value of STRICT is used for this internal call. */
2205 constrain_operands (int strict
)
2207 const char *constraints
[MAX_RECOG_OPERANDS
];
2208 int matching_operands
[MAX_RECOG_OPERANDS
];
2209 int earlyclobber
[MAX_RECOG_OPERANDS
];
2212 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2213 int funny_match_index
;
2215 which_alternative
= 0;
2216 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2219 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2221 constraints
[c
] = recog_data
.constraints
[c
];
2222 matching_operands
[c
] = -1;
2227 int seen_earlyclobber_at
= -1;
2230 funny_match_index
= 0;
2232 if (!recog_data
.alternative_enabled_p
[which_alternative
])
2236 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2237 constraints
[i
] = skip_alternative (constraints
[i
]);
2239 which_alternative
++;
2243 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2245 rtx op
= recog_data
.operand
[opno
];
2246 enum machine_mode mode
= GET_MODE (op
);
2247 const char *p
= constraints
[opno
];
2253 earlyclobber
[opno
] = 0;
2255 /* A unary operator may be accepted by the predicate, but it
2256 is irrelevant for matching constraints. */
2260 if (GET_CODE (op
) == SUBREG
)
2262 if (REG_P (SUBREG_REG (op
))
2263 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2264 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2265 GET_MODE (SUBREG_REG (op
)),
2268 op
= SUBREG_REG (op
);
2271 /* An empty constraint or empty alternative
2272 allows anything which matched the pattern. */
2273 if (*p
== 0 || *p
== ',')
2277 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2286 case '?': case '!': case '*': case '%':
2291 /* Ignore rest of this alternative as far as
2292 constraint checking is concerned. */
2295 while (*p
&& *p
!= ',');
2300 earlyclobber
[opno
] = 1;
2301 if (seen_earlyclobber_at
< 0)
2302 seen_earlyclobber_at
= opno
;
2305 case '0': case '1': case '2': case '3': case '4':
2306 case '5': case '6': case '7': case '8': case '9':
2308 /* This operand must be the same as a previous one.
2309 This kind of constraint is used for instructions such
2310 as add when they take only two operands.
2312 Note that the lower-numbered operand is passed first.
2314 If we are not testing strictly, assume that this
2315 constraint will be satisfied. */
2320 match
= strtoul (p
, &end
, 10);
2327 rtx op1
= recog_data
.operand
[match
];
2328 rtx op2
= recog_data
.operand
[opno
];
2330 /* A unary operator may be accepted by the predicate,
2331 but it is irrelevant for matching constraints. */
2333 op1
= XEXP (op1
, 0);
2335 op2
= XEXP (op2
, 0);
2337 val
= operands_match_p (op1
, op2
);
2340 matching_operands
[opno
] = match
;
2341 matching_operands
[match
] = opno
;
2346 /* If output is *x and input is *--x, arrange later
2347 to change the output to *--x as well, since the
2348 output op is the one that will be printed. */
2349 if (val
== 2 && strict
> 0)
2351 funny_match
[funny_match_index
].this_op
= opno
;
2352 funny_match
[funny_match_index
++].other
= match
;
2359 /* p is used for address_operands. When we are called by
2360 gen_reload, no one will have checked that the address is
2361 strictly valid, i.e., that all pseudos requiring hard regs
2362 have gotten them. */
2364 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2369 /* No need to check general_operand again;
2370 it was done in insn-recog.c. Well, except that reload
2371 doesn't check the validity of its replacements, but
2372 that should only matter when there's a bug. */
2374 /* Anything goes unless it is a REG and really has a hard reg
2375 but the hard reg is not in the class GENERAL_REGS. */
2379 || GENERAL_REGS
== ALL_REGS
2380 || (reload_in_progress
2381 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2382 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2385 else if (strict
< 0 || general_operand (op
, mode
))
2390 /* This is used for a MATCH_SCRATCH in the cases when
2391 we don't actually need anything. So anything goes
2396 case TARGET_MEM_CONSTRAINT
:
2397 /* Memory operands must be valid, to the extent
2398 required by STRICT. */
2402 && !strict_memory_address_p (GET_MODE (op
),
2406 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2410 /* Before reload, accept what reload can turn into mem. */
2411 else if (strict
< 0 && CONSTANT_P (op
))
2413 /* During reload, accept a pseudo */
2414 else if (reload_in_progress
&& REG_P (op
)
2415 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2421 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2422 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2428 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2429 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2435 if (GET_CODE (op
) == CONST_DOUBLE
2436 || (GET_CODE (op
) == CONST_VECTOR
2437 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2443 if (GET_CODE (op
) == CONST_DOUBLE
2444 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2449 if (GET_CODE (op
) == CONST_INT
2450 || (GET_CODE (op
) == CONST_DOUBLE
2451 && GET_MODE (op
) == VOIDmode
))
2454 if (CONSTANT_P (op
))
2459 if (GET_CODE (op
) == CONST_INT
2460 || (GET_CODE (op
) == CONST_DOUBLE
2461 && GET_MODE (op
) == VOIDmode
))
2473 if (GET_CODE (op
) == CONST_INT
2474 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2480 && ((strict
> 0 && ! offsettable_memref_p (op
))
2482 && !(CONSTANT_P (op
) || MEM_P (op
)))
2483 || (reload_in_progress
2485 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2490 if ((strict
> 0 && offsettable_memref_p (op
))
2491 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2492 /* Before reload, accept what reload can handle. */
2494 && (CONSTANT_P (op
) || MEM_P (op
)))
2495 /* During reload, accept a pseudo */
2496 || (reload_in_progress
&& REG_P (op
)
2497 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2506 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2512 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2513 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2515 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2518 #ifdef EXTRA_CONSTRAINT_STR
2519 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2522 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2523 /* Every memory operand can be reloaded to fit. */
2524 && ((strict
< 0 && MEM_P (op
))
2525 /* Before reload, accept what reload can turn
2527 || (strict
< 0 && CONSTANT_P (op
))
2528 /* During reload, accept a pseudo */
2529 || (reload_in_progress
&& REG_P (op
)
2530 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2532 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2533 /* Every address operand can be reloaded to fit. */
2540 while (p
+= len
, c
);
2542 constraints
[opno
] = p
;
2543 /* If this operand did not win somehow,
2544 this alternative loses. */
2548 /* This alternative won; the operands are ok.
2549 Change whichever operands this alternative says to change. */
2554 /* See if any earlyclobber operand conflicts with some other
2557 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2558 for (eopno
= seen_earlyclobber_at
;
2559 eopno
< recog_data
.n_operands
;
2561 /* Ignore earlyclobber operands now in memory,
2562 because we would often report failure when we have
2563 two memory operands, one of which was formerly a REG. */
2564 if (earlyclobber
[eopno
]
2565 && REG_P (recog_data
.operand
[eopno
]))
2566 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2567 if ((MEM_P (recog_data
.operand
[opno
])
2568 || recog_data
.operand_type
[opno
] != OP_OUT
)
2570 /* Ignore things like match_operator operands. */
2571 && *recog_data
.constraints
[opno
] != 0
2572 && ! (matching_operands
[opno
] == eopno
2573 && operands_match_p (recog_data
.operand
[opno
],
2574 recog_data
.operand
[eopno
]))
2575 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2576 recog_data
.operand
[eopno
]))
2581 while (--funny_match_index
>= 0)
2583 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2584 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2591 which_alternative
++;
2593 while (which_alternative
< recog_data
.n_alternatives
);
2595 which_alternative
= -1;
2596 /* If we are about to reject this, but we are not to test strictly,
2597 try a very loose test. Only return failure if it fails also. */
2599 return constrain_operands (-1);
2604 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2605 is a hard reg in class CLASS when its regno is offset by OFFSET
2606 and changed to mode MODE.
2607 If REG occupies multiple hard regs, all of them must be in CLASS. */
2610 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2611 enum machine_mode mode
)
2613 int regno
= REGNO (operand
);
2618 return (regno
< FIRST_PSEUDO_REGISTER
2619 && in_hard_reg_set_p (reg_class_contents
[(int) cl
],
2620 mode
, regno
+ offset
));
2623 /* Split single instruction. Helper function for split_all_insns and
2624 split_all_insns_noflow. Return last insn in the sequence if successful,
2625 or NULL if unsuccessful. */
2628 split_insn (rtx insn
)
2630 /* Split insns here to get max fine-grain parallelism. */
2631 rtx first
= PREV_INSN (insn
);
2632 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2637 /* try_split returns the NOTE that INSN became. */
2638 SET_INSN_DELETED (insn
);
2640 /* ??? Coddle to md files that generate subregs in post-reload
2641 splitters instead of computing the proper hard register. */
2642 if (reload_completed
&& first
!= last
)
2644 first
= NEXT_INSN (first
);
2648 cleanup_subreg_operands (first
);
2651 first
= NEXT_INSN (first
);
2657 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2660 split_all_insns (void)
2666 blocks
= sbitmap_alloc (last_basic_block
);
2667 sbitmap_zero (blocks
);
2670 FOR_EACH_BB_REVERSE (bb
)
2673 bool finish
= false;
2675 rtl_profile_for_bb (bb
);
2676 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2678 /* Can't use `next_real_insn' because that might go across
2679 CODE_LABELS and short-out basic blocks. */
2680 next
= NEXT_INSN (insn
);
2681 finish
= (insn
== BB_END (bb
));
2684 rtx set
= single_set (insn
);
2686 /* Don't split no-op move insns. These should silently
2687 disappear later in final. Splitting such insns would
2688 break the code that handles LIBCALL blocks. */
2689 if (set
&& set_noop_p (set
))
2691 /* Nops get in the way while scheduling, so delete them
2692 now if register allocation has already been done. It
2693 is too risky to try to do this before register
2694 allocation, and there are unlikely to be very many
2695 nops then anyways. */
2696 if (reload_completed
)
2697 delete_insn_and_edges (insn
);
2701 rtx last
= split_insn (insn
);
2704 /* The split sequence may include barrier, but the
2705 BB boundary we are interested in will be set to
2708 while (BARRIER_P (last
))
2709 last
= PREV_INSN (last
);
2710 SET_BIT (blocks
, bb
->index
);
2718 default_rtl_profile ();
2720 find_many_sub_basic_blocks (blocks
);
2722 #ifdef ENABLE_CHECKING
2723 verify_flow_info ();
2726 sbitmap_free (blocks
);
2729 /* Same as split_all_insns, but do not expect CFG to be available.
2730 Used by machine dependent reorg passes. */
2733 split_all_insns_noflow (void)
2737 for (insn
= get_insns (); insn
; insn
= next
)
2739 next
= NEXT_INSN (insn
);
2742 /* Don't split no-op move insns. These should silently
2743 disappear later in final. Splitting such insns would
2744 break the code that handles LIBCALL blocks. */
2745 rtx set
= single_set (insn
);
2746 if (set
&& set_noop_p (set
))
2748 /* Nops get in the way while scheduling, so delete them
2749 now if register allocation has already been done. It
2750 is too risky to try to do this before register
2751 allocation, and there are unlikely to be very many
2754 ??? Should we use delete_insn when the CFG isn't valid? */
2755 if (reload_completed
)
2756 delete_insn_and_edges (insn
);
2765 #ifdef HAVE_peephole2
2766 struct peep2_insn_data
2772 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2773 static int peep2_current
;
2774 /* The number of instructions available to match a peep2. */
2775 int peep2_current_count
;
2777 /* A non-insn marker indicating the last insn of the block.
2778 The live_before regset for this element is correct, indicating
2779 DF_LIVE_OUT for the block. */
2780 #define PEEP2_EOB pc_rtx
2782 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2783 does not exist. Used by the recognizer to find the next insn to match
2784 in a multi-insn pattern. */
2787 peep2_next_insn (int n
)
2789 gcc_assert (n
<= peep2_current_count
);
2792 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2793 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2795 return peep2_insn_data
[n
].insn
;
2798 /* Return true if REGNO is dead before the Nth non-note insn
2802 peep2_regno_dead_p (int ofs
, int regno
)
2804 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2806 ofs
+= peep2_current
;
2807 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2808 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2810 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2812 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2815 /* Similarly for a REG. */
2818 peep2_reg_dead_p (int ofs
, rtx reg
)
2822 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2824 ofs
+= peep2_current
;
2825 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2826 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2828 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2830 regno
= REGNO (reg
);
2831 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2833 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2838 /* Try to find a hard register of mode MODE, matching the register class in
2839 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2840 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2841 in which case the only condition is that the register must be available
2842 before CURRENT_INSN.
2843 Registers that already have bits set in REG_SET will not be considered.
2845 If an appropriate register is available, it will be returned and the
2846 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2850 peep2_find_free_register (int from
, int to
, const char *class_str
,
2851 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2853 static int search_ofs
;
2858 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2859 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2861 from
+= peep2_current
;
2862 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2863 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2864 to
+= peep2_current
;
2865 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2866 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2868 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2869 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2873 HARD_REG_SET this_live
;
2875 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2877 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2878 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2879 IOR_HARD_REG_SET (live
, this_live
);
2882 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2883 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2885 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2887 int raw_regno
, regno
, success
, j
;
2889 /* Distribute the free registers as much as possible. */
2890 raw_regno
= search_ofs
+ i
;
2891 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2892 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2893 #ifdef REG_ALLOC_ORDER
2894 regno
= reg_alloc_order
[raw_regno
];
2899 /* Don't allocate fixed registers. */
2900 if (fixed_regs
[regno
])
2902 /* Don't allocate global registers. */
2903 if (global_regs
[regno
])
2905 /* Make sure the register is of the right class. */
2906 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2908 /* And can support the mode we need. */
2909 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2911 /* And that we don't create an extra save/restore. */
2912 if (! call_used_regs
[regno
] && ! df_regs_ever_live_p (regno
))
2914 if (! targetm
.hard_regno_scratch_ok (regno
))
2917 /* And we don't clobber traceback for noreturn functions. */
2918 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2919 && (! reload_completed
|| frame_pointer_needed
))
2923 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2925 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2926 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2934 add_to_hard_reg_set (reg_set
, mode
, regno
);
2936 /* Start the next search with the next register. */
2937 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2939 search_ofs
= raw_regno
;
2941 return gen_rtx_REG (mode
, regno
);
2949 /* Perform the peephole2 optimization pass. */
2952 peephole2_optimize (void)
2958 bool do_cleanup_cfg
= false;
2959 bool do_rebuild_jump_labels
= false;
2961 df_set_flags (DF_LR_RUN_DCE
);
2964 /* Initialize the regsets we're going to use. */
2965 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2966 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
2967 live
= BITMAP_ALLOC (®_obstack
);
2969 FOR_EACH_BB_REVERSE (bb
)
2971 rtl_profile_for_bb (bb
);
2972 /* Indicate that all slots except the last holds invalid data. */
2973 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
2974 peep2_insn_data
[i
].insn
= NULL_RTX
;
2975 peep2_current_count
= 0;
2977 /* Indicate that the last slot contains live_after data. */
2978 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
2979 peep2_current
= MAX_INSNS_PER_PEEP2
;
2981 /* Start up propagation. */
2982 bitmap_copy (live
, DF_LR_OUT (bb
));
2983 df_simulate_artificial_refs_at_end (bb
, live
);
2984 bitmap_copy (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
2986 for (insn
= BB_END (bb
); ; insn
= prev
)
2988 prev
= PREV_INSN (insn
);
2991 rtx attempt
, before_try
, x
;
2994 bool was_call
= false;
2996 /* Record this insn. */
2997 if (--peep2_current
< 0)
2998 peep2_current
= MAX_INSNS_PER_PEEP2
;
2999 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3000 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
3001 peep2_current_count
++;
3002 peep2_insn_data
[peep2_current
].insn
= insn
;
3003 df_simulate_one_insn (bb
, insn
, live
);
3004 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3006 if (RTX_FRAME_RELATED_P (insn
))
3008 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3009 substitution would lose the
3010 REG_FRAME_RELATED_EXPR that is attached. */
3011 peep2_current_count
= 0;
3015 /* Match the peephole. */
3016 attempt
= peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3018 if (attempt
!= NULL
)
3020 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3021 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3022 cfg-related call notes. */
3023 for (i
= 0; i
<= match_len
; ++i
)
3026 rtx old_insn
, new_insn
, note
;
3028 j
= i
+ peep2_current
;
3029 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3030 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3031 old_insn
= peep2_insn_data
[j
].insn
;
3032 if (!CALL_P (old_insn
))
3037 while (new_insn
!= NULL_RTX
)
3039 if (CALL_P (new_insn
))
3041 new_insn
= NEXT_INSN (new_insn
);
3044 gcc_assert (new_insn
!= NULL_RTX
);
3046 CALL_INSN_FUNCTION_USAGE (new_insn
)
3047 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3049 for (note
= REG_NOTES (old_insn
);
3051 note
= XEXP (note
, 1))
3052 switch (REG_NOTE_KIND (note
))
3056 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3060 /* Discard all other reg notes. */
3064 /* Croak if there is another call in the sequence. */
3065 while (++i
<= match_len
)
3067 j
= i
+ peep2_current
;
3068 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3069 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3070 old_insn
= peep2_insn_data
[j
].insn
;
3071 gcc_assert (!CALL_P (old_insn
));
3076 i
= match_len
+ peep2_current
;
3077 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3078 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3080 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3081 REG_EH_REGION
, NULL_RTX
);
3083 /* Replace the old sequence with the new. */
3084 attempt
= emit_insn_after_setloc (attempt
,
3085 peep2_insn_data
[i
].insn
,
3086 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3087 before_try
= PREV_INSN (insn
);
3088 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3090 /* Re-insert the EH_REGION notes. */
3091 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3096 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3097 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3100 for (x
= attempt
; x
!= before_try
; x
= PREV_INSN (x
))
3102 || (flag_non_call_exceptions
3103 && may_trap_p (PATTERN (x
))
3104 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3107 add_reg_note (x
, REG_EH_REGION
, XEXP (note
, 0));
3109 if (x
!= BB_END (bb
) && eh_edge
)
3114 nfte
= split_block (bb
, x
);
3115 flags
= (eh_edge
->flags
3116 & (EDGE_EH
| EDGE_ABNORMAL
));
3118 flags
|= EDGE_ABNORMAL_CALL
;
3119 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3122 nehe
->probability
= eh_edge
->probability
;
3124 = REG_BR_PROB_BASE
- nehe
->probability
;
3126 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3132 /* Converting possibly trapping insn to non-trapping is
3133 possible. Zap dummy outgoing edges. */
3134 do_cleanup_cfg
|= purge_dead_edges (bb
);
3137 #ifdef HAVE_conditional_execution
3138 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3139 peep2_insn_data
[i
].insn
= NULL_RTX
;
3140 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3141 peep2_current_count
= 0;
3143 /* Back up lifetime information past the end of the
3144 newly created sequence. */
3145 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3147 bitmap_copy (live
, peep2_insn_data
[i
].live_before
);
3149 /* Update life information for the new sequence. */
3156 i
= MAX_INSNS_PER_PEEP2
;
3157 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3158 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3159 peep2_current_count
++;
3160 peep2_insn_data
[i
].insn
= x
;
3162 df_simulate_one_insn (bb
, x
, live
);
3163 bitmap_copy (peep2_insn_data
[i
].live_before
, live
);
3172 /* If we generated a jump instruction, it won't have
3173 JUMP_LABEL set. Recompute after we're done. */
3174 for (x
= attempt
; x
!= before_try
; x
= PREV_INSN (x
))
3177 do_rebuild_jump_labels
= true;
3183 if (insn
== BB_HEAD (bb
))
3188 default_rtl_profile ();
3189 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3190 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3192 if (do_rebuild_jump_labels
)
3193 rebuild_jump_labels (get_insns ());
3195 #endif /* HAVE_peephole2 */
3197 /* Common predicates for use with define_bypass. */
3199 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3200 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3201 must be either a single_set or a PARALLEL with SETs inside. */
3204 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3206 rtx out_set
, in_set
;
3207 rtx out_pat
, in_pat
;
3208 rtx out_exp
, in_exp
;
3211 in_set
= single_set (in_insn
);
3214 if (!MEM_P (SET_DEST (in_set
)))
3217 out_set
= single_set (out_insn
);
3220 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3225 out_pat
= PATTERN (out_insn
);
3227 if (GET_CODE (out_pat
) != PARALLEL
)
3230 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3232 out_exp
= XVECEXP (out_pat
, 0, i
);
3234 if (GET_CODE (out_exp
) == CLOBBER
)
3237 gcc_assert (GET_CODE (out_exp
) == SET
);
3239 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3246 in_pat
= PATTERN (in_insn
);
3247 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3249 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3251 in_exp
= XVECEXP (in_pat
, 0, i
);
3253 if (GET_CODE (in_exp
) == CLOBBER
)
3256 gcc_assert (GET_CODE (in_exp
) == SET
);
3258 if (!MEM_P (SET_DEST (in_exp
)))
3261 out_set
= single_set (out_insn
);
3264 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3269 out_pat
= PATTERN (out_insn
);
3270 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3272 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3274 out_exp
= XVECEXP (out_pat
, 0, j
);
3276 if (GET_CODE (out_exp
) == CLOBBER
)
3279 gcc_assert (GET_CODE (out_exp
) == SET
);
3281 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3291 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3292 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3293 or multiple set; IN_INSN should be single_set for truth, but for convenience
3294 of insn categorization may be any JUMP or CALL insn. */
3297 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3299 rtx out_set
, in_set
;
3301 in_set
= single_set (in_insn
);
3304 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3308 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3310 in_set
= SET_SRC (in_set
);
3312 out_set
= single_set (out_insn
);
3315 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3316 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3324 out_pat
= PATTERN (out_insn
);
3325 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3327 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3329 rtx exp
= XVECEXP (out_pat
, 0, i
);
3331 if (GET_CODE (exp
) == CLOBBER
)
3334 gcc_assert (GET_CODE (exp
) == SET
);
3336 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3337 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3346 gate_handle_peephole2 (void)
3348 return (optimize
> 0 && flag_peephole2
);
3352 rest_of_handle_peephole2 (void)
3354 #ifdef HAVE_peephole2
3355 peephole2_optimize ();
3360 struct rtl_opt_pass pass_peephole2
=
3364 "peephole2", /* name */
3365 gate_handle_peephole2
, /* gate */
3366 rest_of_handle_peephole2
, /* execute */
3369 0, /* static_pass_number */
3370 TV_PEEPHOLE2
, /* tv_id */
3371 0, /* properties_required */
3372 0, /* properties_provided */
3373 0, /* properties_destroyed */
3374 0, /* todo_flags_start */
3375 TODO_df_finish
| TODO_verify_rtl_sharing
|
3376 TODO_dump_func
/* todo_flags_finish */
3381 rest_of_handle_split_all_insns (void)
3387 struct rtl_opt_pass pass_split_all_insns
=
3391 "split1", /* name */
3393 rest_of_handle_split_all_insns
, /* execute */
3396 0, /* static_pass_number */
3398 0, /* properties_required */
3399 0, /* properties_provided */
3400 0, /* properties_destroyed */
3401 0, /* todo_flags_start */
3402 TODO_dump_func
/* todo_flags_finish */
3407 rest_of_handle_split_after_reload (void)
3409 /* If optimizing, then go ahead and split insns now. */
3417 struct rtl_opt_pass pass_split_after_reload
=
3421 "split2", /* name */
3423 rest_of_handle_split_after_reload
, /* execute */
3426 0, /* static_pass_number */
3428 0, /* properties_required */
3429 0, /* properties_provided */
3430 0, /* properties_destroyed */
3431 0, /* todo_flags_start */
3432 TODO_dump_func
/* todo_flags_finish */
3437 gate_handle_split_before_regstack (void)
3439 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3440 /* If flow2 creates new instructions which need splitting
3441 and scheduling after reload is not done, they might not be
3442 split until final which doesn't allow splitting
3443 if HAVE_ATTR_length. */
3444 # ifdef INSN_SCHEDULING
3445 return (optimize
&& !flag_schedule_insns_after_reload
);
3455 rest_of_handle_split_before_regstack (void)
3461 struct rtl_opt_pass pass_split_before_regstack
=
3465 "split3", /* name */
3466 gate_handle_split_before_regstack
, /* gate */
3467 rest_of_handle_split_before_regstack
, /* execute */
3470 0, /* static_pass_number */
3472 0, /* properties_required */
3473 0, /* properties_provided */
3474 0, /* properties_destroyed */
3475 0, /* todo_flags_start */
3476 TODO_dump_func
/* todo_flags_finish */
3481 gate_handle_split_before_sched2 (void)
3483 #ifdef INSN_SCHEDULING
3484 return optimize
> 0 && flag_schedule_insns_after_reload
;
3491 rest_of_handle_split_before_sched2 (void)
3493 #ifdef INSN_SCHEDULING
3499 struct rtl_opt_pass pass_split_before_sched2
=
3503 "split4", /* name */
3504 gate_handle_split_before_sched2
, /* gate */
3505 rest_of_handle_split_before_sched2
, /* execute */
3508 0, /* static_pass_number */
3510 0, /* properties_required */
3511 0, /* properties_provided */
3512 0, /* properties_destroyed */
3513 0, /* todo_flags_start */
3515 TODO_dump_func
/* todo_flags_finish */
3519 /* The placement of the splitting that we do for shorten_branches
3520 depends on whether regstack is used by the target or not. */
3522 gate_do_final_split (void)
3524 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3531 struct rtl_opt_pass pass_split_for_shorten_branches
=
3535 "split5", /* name */
3536 gate_do_final_split
, /* gate */
3537 split_all_insns_noflow
, /* execute */
3540 0, /* static_pass_number */
3542 0, /* properties_required */
3543 0, /* properties_provided */
3544 0, /* properties_destroyed */
3545 0, /* todo_flags_start */
3546 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */