1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
34 #include "addresses.h"
40 #include "basic-block.h"
45 #include "tree-pass.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
52 #define STACK_PUSH_CODE PRE_INC
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
60 #define STACK_POP_CODE POST_DEC
64 #ifndef HAVE_ATTR_enabled
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED
)
72 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
73 static void validate_replace_src_1 (rtx
*, void *);
74 static rtx
split_insn (rtx
);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in regclass.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
86 struct recog_data recog_data
;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
92 /* On return from `constrain_operands', indicate which alternative
95 int which_alternative
;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed
;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed
;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
111 init_recog_no_volatile (void)
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x
)
131 const char **constraints
;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed
)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x
));
139 constrain_operands (1);
140 return which_alternative
>= 0;
143 noperands
= asm_noperands (x
);
149 operands
= XALLOCAVEC (rtx
, noperands
);
150 constraints
= XALLOCAVEC (const char *, noperands
);
152 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
154 for (i
= 0; i
< noperands
; i
++)
156 const char *c
= constraints
[i
];
159 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
160 c
= constraints
[c
[0] - '0'];
162 if (! asm_operand_ok (operands
[i
], c
))
169 /* Static data for the next two routines. */
171 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change_1 (rtx object
, rtx
*loc
, rtx
new, bool in_group
, bool unshare
)
208 if (old
== new || rtx_equal_p (old
, new))
211 gcc_assert (in_group
!= 0 || num_changes
== 0);
215 /* Save the information describing this change. */
216 if (num_changes
>= changes_allocated
)
218 if (changes_allocated
== 0)
219 /* This value allows for repeated substitutions inside complex
220 indexed addresses, or changes in up to 5 insns. */
221 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
223 changes_allocated
*= 2;
225 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
228 changes
[num_changes
].object
= object
;
229 changes
[num_changes
].loc
= loc
;
230 changes
[num_changes
].old
= old
;
231 changes
[num_changes
].unshare
= unshare
;
233 if (object
&& !MEM_P (object
))
235 /* Set INSN_CODE to force rerecognition of insn. Save old code in
237 changes
[num_changes
].old_code
= INSN_CODE (object
);
238 INSN_CODE (object
) = -1;
243 /* If we are making a group of changes, return 1. Otherwise, validate the
244 change group we made. */
249 return apply_change_group ();
252 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
256 validate_change (rtx object
, rtx
*loc
, rtx
new, bool in_group
)
258 return validate_change_1 (object
, loc
, new, in_group
, false);
261 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
265 validate_unshare_change (rtx object
, rtx
*loc
, rtx
new, bool in_group
)
267 return validate_change_1 (object
, loc
, new, in_group
, true);
271 /* Keep X canonicalized if some changes have made it non-canonical; only
272 modifies the operands of X, not (for example) its code. Simplifications
273 are not the job of this routine.
275 Return true if anything was changed. */
277 canonicalize_change_group (rtx insn
, rtx x
)
279 if (COMMUTATIVE_P (x
)
280 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
282 /* Oops, the caller has made X no longer canonical.
283 Let's redo the changes in the correct order. */
284 rtx tem
= XEXP (x
, 0);
285 validate_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
286 validate_change (insn
, &XEXP (x
, 1), tem
, 1);
294 /* This subroutine of apply_change_group verifies whether the changes to INSN
295 were valid; i.e. whether INSN can still be recognized. */
298 insn_invalid_p (rtx insn
)
300 rtx pat
= PATTERN (insn
);
301 int num_clobbers
= 0;
302 /* If we are before reload and the pattern is a SET, see if we can add
304 int icode
= recog (pat
, insn
,
305 (GET_CODE (pat
) == SET
306 && ! reload_completed
&& ! reload_in_progress
)
307 ? &num_clobbers
: 0);
308 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
311 /* If this is an asm and the operand aren't legal, then fail. Likewise if
312 this is not an asm and the insn wasn't recognized. */
313 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
314 || (!is_asm
&& icode
< 0))
317 /* If we have to add CLOBBERs, fail if we have to add ones that reference
318 hard registers since our callers can't know if they are live or not.
319 Otherwise, add them. */
320 if (num_clobbers
> 0)
324 if (added_clobbers_hard_reg_p (icode
))
327 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
328 XVECEXP (newpat
, 0, 0) = pat
;
329 add_clobbers (newpat
, icode
);
330 PATTERN (insn
) = pat
= newpat
;
333 /* After reload, verify that all constraints are satisfied. */
334 if (reload_completed
)
338 if (! constrain_operands (1))
342 INSN_CODE (insn
) = icode
;
346 /* Return number of changes made and not validated yet. */
348 num_changes_pending (void)
353 /* Tentatively apply the changes numbered NUM and up.
354 Return 1 if all changes are valid, zero otherwise. */
357 verify_changes (int num
)
360 rtx last_validated
= NULL_RTX
;
362 /* The changes have been applied and all INSN_CODEs have been reset to force
365 The changes are valid if we aren't given an object, or if we are
366 given a MEM and it still is a valid address, or if this is in insn
367 and it is recognized. In the latter case, if reload has completed,
368 we also require that the operands meet the constraints for
371 for (i
= num
; i
< num_changes
; i
++)
373 rtx object
= changes
[i
].object
;
375 /* If there is no object to test or if it is the same as the one we
376 already tested, ignore it. */
377 if (object
== 0 || object
== last_validated
)
382 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
385 else if (insn_invalid_p (object
))
387 rtx pat
= PATTERN (object
);
389 /* Perhaps we couldn't recognize the insn because there were
390 extra CLOBBERs at the end. If so, try to re-recognize
391 without the last CLOBBER (later iterations will cause each of
392 them to be eliminated, in turn). But don't do this if we
393 have an ASM_OPERAND. */
394 if (GET_CODE (pat
) == PARALLEL
395 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
396 && asm_noperands (PATTERN (object
)) < 0)
400 if (XVECLEN (pat
, 0) == 2)
401 newpat
= XVECEXP (pat
, 0, 0);
407 = gen_rtx_PARALLEL (VOIDmode
,
408 rtvec_alloc (XVECLEN (pat
, 0) - 1));
409 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
410 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
413 /* Add a new change to this group to replace the pattern
414 with this new pattern. Then consider this change
415 as having succeeded. The change we added will
416 cause the entire call to fail if things remain invalid.
418 Note that this can lose if a later change than the one
419 we are processing specified &XVECEXP (PATTERN (object), 0, X)
420 but this shouldn't occur. */
422 validate_change (object
, &PATTERN (object
), newpat
, 1);
425 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
426 /* If this insn is a CLOBBER or USE, it is always valid, but is
432 last_validated
= object
;
435 return (i
== num_changes
);
438 /* A group of changes has previously been issued with validate_change
439 and verified with verify_changes. Call df_insn_rescan for each of
440 the insn changed and clear num_changes. */
443 confirm_change_group (void)
446 rtx last_object
= NULL
;
448 for (i
= 0; i
< num_changes
; i
++)
450 rtx object
= changes
[i
].object
;
452 if (changes
[i
].unshare
)
453 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
455 /* Avoid unnecessary rescanning when multiple changes to same instruction
459 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
460 df_insn_rescan (last_object
);
461 last_object
= object
;
465 if (last_object
&& INSN_P (last_object
))
466 df_insn_rescan (last_object
);
470 /* Apply a group of changes previously issued with `validate_change'.
471 If all changes are valid, call confirm_change_group and return 1,
472 otherwise, call cancel_changes and return 0. */
475 apply_change_group (void)
477 if (verify_changes (0))
479 confirm_change_group ();
490 /* Return the number of changes so far in the current group. */
493 num_validated_changes (void)
498 /* Retract the changes numbered NUM and up. */
501 cancel_changes (int num
)
505 /* Back out all the changes. Do this in the opposite order in which
507 for (i
= num_changes
- 1; i
>= num
; i
--)
509 *changes
[i
].loc
= changes
[i
].old
;
510 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
511 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
516 /* Replace every occurrence of FROM in X with TO. Mark each change with
517 validate_change passing OBJECT. */
520 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
526 enum machine_mode op0_mode
= VOIDmode
;
527 int prev_changes
= num_changes
;
534 fmt
= GET_RTX_FORMAT (code
);
536 op0_mode
= GET_MODE (XEXP (x
, 0));
538 /* X matches FROM if it is the same rtx or they are both referring to the
539 same register in the same mode. Avoid calling rtx_equal_p unless the
540 operands look similar. */
543 || (REG_P (x
) && REG_P (from
)
544 && GET_MODE (x
) == GET_MODE (from
)
545 && REGNO (x
) == REGNO (from
))
546 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
547 && rtx_equal_p (x
, from
)))
549 validate_unshare_change (object
, loc
, to
, 1);
553 /* Call ourself recursively to perform the replacements.
554 We must not replace inside already replaced expression, otherwise we
555 get infinite recursion for replacements like (reg X)->(subreg (reg X))
556 done by regmove, so we must special case shared ASM_OPERANDS. */
558 if (GET_CODE (x
) == PARALLEL
)
560 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
562 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
563 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
565 /* Verify that operands are really shared. */
566 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
567 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
569 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
573 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
577 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
580 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
581 else if (fmt
[i
] == 'E')
582 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
583 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
586 /* If we didn't substitute, there is nothing more to do. */
587 if (num_changes
== prev_changes
)
590 /* Allow substituted expression to have different mode. This is used by
591 regmove to change mode of pseudo register. */
592 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
593 op0_mode
= GET_MODE (XEXP (x
, 0));
595 /* Do changes needed to keep rtx consistent. Don't do any other
596 simplifications, as it is not our job. */
598 if (SWAPPABLE_OPERANDS_P (x
)
599 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
601 validate_unshare_change (object
, loc
,
602 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
603 : swap_condition (code
),
604 GET_MODE (x
), XEXP (x
, 1),
613 /* If we have a PLUS whose second operand is now a CONST_INT, use
614 simplify_gen_binary to try to simplify it.
615 ??? We may want later to remove this, once simplification is
616 separated from this function. */
617 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
618 validate_change (object
, loc
,
620 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
623 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
624 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
625 validate_change (object
, loc
,
627 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
628 simplify_gen_unary (NEG
,
629 GET_MODE (x
), XEXP (x
, 1),
634 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
636 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
638 /* If any of the above failed, substitute in something that
639 we know won't be recognized. */
641 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
642 validate_change (object
, loc
, new, 1);
646 /* All subregs possible to simplify should be simplified. */
647 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
650 /* Subregs of VOIDmode operands are incorrect. */
651 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
652 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
654 validate_change (object
, loc
, new, 1);
658 /* If we are replacing a register with memory, try to change the memory
659 to be the mode required for memory in extract operations (this isn't
660 likely to be an insertion operation; if it was, nothing bad will
661 happen, we might just fail in some cases). */
663 if (MEM_P (XEXP (x
, 0))
664 && GET_CODE (XEXP (x
, 1)) == CONST_INT
665 && GET_CODE (XEXP (x
, 2)) == CONST_INT
666 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
667 && !MEM_VOLATILE_P (XEXP (x
, 0)))
669 enum machine_mode wanted_mode
= VOIDmode
;
670 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
671 int pos
= INTVAL (XEXP (x
, 2));
673 if (GET_CODE (x
) == ZERO_EXTRACT
)
675 enum machine_mode new_mode
676 = mode_for_extraction (EP_extzv
, 1);
677 if (new_mode
!= MAX_MACHINE_MODE
)
678 wanted_mode
= new_mode
;
680 else if (GET_CODE (x
) == SIGN_EXTRACT
)
682 enum machine_mode new_mode
683 = mode_for_extraction (EP_extv
, 1);
684 if (new_mode
!= MAX_MACHINE_MODE
)
685 wanted_mode
= new_mode
;
688 /* If we have a narrower mode, we can do something. */
689 if (wanted_mode
!= VOIDmode
690 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
692 int offset
= pos
/ BITS_PER_UNIT
;
695 /* If the bytes and bits are counted differently, we
696 must adjust the offset. */
697 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
699 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
702 pos
%= GET_MODE_BITSIZE (wanted_mode
);
704 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
706 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
707 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
718 /* Try replacing every occurrence of FROM in INSN with TO. After all
719 changes have been made, validate by seeing if INSN is still valid. */
722 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
724 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
725 return apply_change_group ();
728 /* Try replacing every occurrence of FROM in INSN with TO. */
731 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
733 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
736 /* Function called by note_uses to replace used subexpressions. */
737 struct validate_replace_src_data
739 rtx from
; /* Old RTX */
740 rtx to
; /* New RTX */
741 rtx insn
; /* Insn in which substitution is occurring. */
745 validate_replace_src_1 (rtx
*x
, void *data
)
747 struct validate_replace_src_data
*d
748 = (struct validate_replace_src_data
*) data
;
750 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
753 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
757 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
759 struct validate_replace_src_data d
;
764 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
767 /* Try simplify INSN.
768 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
769 pattern and return true if something was simplified. */
772 validate_simplify_insn (rtx insn
)
778 pat
= PATTERN (insn
);
780 if (GET_CODE (pat
) == SET
)
782 newpat
= simplify_rtx (SET_SRC (pat
));
783 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
784 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
785 newpat
= simplify_rtx (SET_DEST (pat
));
786 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
787 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
789 else if (GET_CODE (pat
) == PARALLEL
)
790 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
792 rtx s
= XVECEXP (pat
, 0, i
);
794 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
796 newpat
= simplify_rtx (SET_SRC (s
));
797 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
798 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
799 newpat
= simplify_rtx (SET_DEST (s
));
800 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
801 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
804 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
808 /* Return 1 if the insn using CC0 set by INSN does not contain
809 any ordered tests applied to the condition codes.
810 EQ and NE tests do not count. */
813 next_insn_tests_no_inequality (rtx insn
)
815 rtx next
= next_cc0_user (insn
);
817 /* If there is no next insn, we have to take the conservative choice. */
821 return (INSN_P (next
)
822 && ! inequality_comparisons_p (PATTERN (next
)));
826 /* Return 1 if OP is a valid general operand for machine mode MODE.
827 This is either a register reference, a memory reference,
828 or a constant. In the case of a memory reference, the address
829 is checked for general validity for the target machine.
831 Register and memory references must have mode MODE in order to be valid,
832 but some constants have no machine mode and are valid for any mode.
834 If MODE is VOIDmode, OP is checked for validity for whatever mode
837 The main use of this function is as a predicate in match_operand
838 expressions in the machine description.
840 For an explanation of this function's behavior for registers of
841 class NO_REGS, see the comment for `register_operand'. */
844 general_operand (rtx op
, enum machine_mode mode
)
846 enum rtx_code code
= GET_CODE (op
);
848 if (mode
== VOIDmode
)
849 mode
= GET_MODE (op
);
851 /* Don't accept CONST_INT or anything similar
852 if the caller wants something floating. */
853 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
854 && GET_MODE_CLASS (mode
) != MODE_INT
855 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
858 if (GET_CODE (op
) == CONST_INT
860 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
864 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
866 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
867 && LEGITIMATE_CONSTANT_P (op
));
869 /* Except for certain constants with VOIDmode, already checked for,
870 OP's mode must match MODE if MODE specifies a mode. */
872 if (GET_MODE (op
) != mode
)
877 rtx sub
= SUBREG_REG (op
);
879 #ifdef INSN_SCHEDULING
880 /* On machines that have insn scheduling, we want all memory
881 reference to be explicit, so outlaw paradoxical SUBREGs.
882 However, we must allow them after reload so that they can
883 get cleaned up by cleanup_subreg_operands. */
884 if (!reload_completed
&& MEM_P (sub
)
885 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
888 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
889 may result in incorrect reference. We should simplify all valid
890 subregs of MEM anyway. But allow this after reload because we
891 might be called from cleanup_subreg_operands.
893 ??? This is a kludge. */
894 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
898 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
899 create such rtl, and we must reject it. */
900 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
901 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
905 code
= GET_CODE (op
);
909 /* A register whose class is NO_REGS is not a general operand. */
910 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
911 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
915 rtx y
= XEXP (op
, 0);
917 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
920 /* Use the mem's mode, since it will be reloaded thus. */
921 if (memory_address_p (GET_MODE (op
), y
))
928 /* Return 1 if OP is a valid memory address for a memory reference
931 The main use of this function is as a predicate in match_operand
932 expressions in the machine description. */
935 address_operand (rtx op
, enum machine_mode mode
)
937 return memory_address_p (mode
, op
);
940 /* Return 1 if OP is a register reference of mode MODE.
941 If MODE is VOIDmode, accept a register in any mode.
943 The main use of this function is as a predicate in match_operand
944 expressions in the machine description.
946 As a special exception, registers whose class is NO_REGS are
947 not accepted by `register_operand'. The reason for this change
948 is to allow the representation of special architecture artifacts
949 (such as a condition code register) without extending the rtl
950 definitions. Since registers of class NO_REGS cannot be used
951 as registers in any case where register classes are examined,
952 it is most consistent to keep this function from accepting them. */
955 register_operand (rtx op
, enum machine_mode mode
)
957 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
960 if (GET_CODE (op
) == SUBREG
)
962 rtx sub
= SUBREG_REG (op
);
964 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
965 because it is guaranteed to be reloaded into one.
966 Just make sure the MEM is valid in itself.
967 (Ideally, (SUBREG (MEM)...) should not exist after reload,
968 but currently it does result from (SUBREG (REG)...) where the
969 reg went on the stack.) */
970 if (! reload_completed
&& MEM_P (sub
))
971 return general_operand (op
, mode
);
973 #ifdef CANNOT_CHANGE_MODE_CLASS
975 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
976 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
977 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
978 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
982 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
983 create such rtl, and we must reject it. */
984 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
985 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
991 /* We don't consider registers whose class is NO_REGS
992 to be a register operand. */
994 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
995 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
998 /* Return 1 for a register in Pmode; ignore the tested mode. */
1001 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1003 return register_operand (op
, Pmode
);
1006 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1007 or a hard register. */
1010 scratch_operand (rtx op
, enum machine_mode mode
)
1012 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1015 return (GET_CODE (op
) == SCRATCH
1017 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1020 /* Return 1 if OP is a valid immediate operand for mode MODE.
1022 The main use of this function is as a predicate in match_operand
1023 expressions in the machine description. */
1026 immediate_operand (rtx op
, enum machine_mode mode
)
1028 /* Don't accept CONST_INT or anything similar
1029 if the caller wants something floating. */
1030 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1031 && GET_MODE_CLASS (mode
) != MODE_INT
1032 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1035 if (GET_CODE (op
) == CONST_INT
1037 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1040 return (CONSTANT_P (op
)
1041 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1042 || GET_MODE (op
) == VOIDmode
)
1043 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1044 && LEGITIMATE_CONSTANT_P (op
));
1047 /* Returns 1 if OP is an operand that is a CONST_INT. */
1050 const_int_operand (rtx op
, enum machine_mode mode
)
1052 if (GET_CODE (op
) != CONST_INT
)
1055 if (mode
!= VOIDmode
1056 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1062 /* Returns 1 if OP is an operand that is a constant integer or constant
1063 floating-point number. */
1066 const_double_operand (rtx op
, enum machine_mode mode
)
1068 /* Don't accept CONST_INT or anything similar
1069 if the caller wants something floating. */
1070 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1071 && GET_MODE_CLASS (mode
) != MODE_INT
1072 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1075 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1076 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1077 || GET_MODE (op
) == VOIDmode
));
1080 /* Return 1 if OP is a general operand that is not an immediate operand. */
1083 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1085 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1088 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1091 nonmemory_operand (rtx op
, enum machine_mode mode
)
1093 if (CONSTANT_P (op
))
1095 /* Don't accept CONST_INT or anything similar
1096 if the caller wants something floating. */
1097 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1098 && GET_MODE_CLASS (mode
) != MODE_INT
1099 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1102 if (GET_CODE (op
) == CONST_INT
1104 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1107 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1108 || mode
== VOIDmode
)
1109 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1110 && LEGITIMATE_CONSTANT_P (op
));
1113 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1116 if (GET_CODE (op
) == SUBREG
)
1118 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1119 because it is guaranteed to be reloaded into one.
1120 Just make sure the MEM is valid in itself.
1121 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1122 but currently it does result from (SUBREG (REG)...) where the
1123 reg went on the stack.) */
1124 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1125 return general_operand (op
, mode
);
1126 op
= SUBREG_REG (op
);
1129 /* We don't consider registers whose class is NO_REGS
1130 to be a register operand. */
1132 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1133 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1136 /* Return 1 if OP is a valid operand that stands for pushing a
1137 value of mode MODE onto the stack.
1139 The main use of this function is as a predicate in match_operand
1140 expressions in the machine description. */
1143 push_operand (rtx op
, enum machine_mode mode
)
1145 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1147 #ifdef PUSH_ROUNDING
1148 rounded_size
= PUSH_ROUNDING (rounded_size
);
1154 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1159 if (rounded_size
== GET_MODE_SIZE (mode
))
1161 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1166 if (GET_CODE (op
) != PRE_MODIFY
1167 || GET_CODE (XEXP (op
, 1)) != PLUS
1168 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1169 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1170 #ifdef STACK_GROWS_DOWNWARD
1171 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1173 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1179 return XEXP (op
, 0) == stack_pointer_rtx
;
1182 /* Return 1 if OP is a valid operand that stands for popping a
1183 value of mode MODE off the stack.
1185 The main use of this function is as a predicate in match_operand
1186 expressions in the machine description. */
1189 pop_operand (rtx op
, enum machine_mode mode
)
1194 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1199 if (GET_CODE (op
) != STACK_POP_CODE
)
1202 return XEXP (op
, 0) == stack_pointer_rtx
;
1205 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1208 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1210 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1217 /* Return 1 if OP is a valid memory reference with mode MODE,
1218 including a valid address.
1220 The main use of this function is as a predicate in match_operand
1221 expressions in the machine description. */
1224 memory_operand (rtx op
, enum machine_mode mode
)
1228 if (! reload_completed
)
1229 /* Note that no SUBREG is a memory operand before end of reload pass,
1230 because (SUBREG (MEM...)) forces reloading into a register. */
1231 return MEM_P (op
) && general_operand (op
, mode
);
1233 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1237 if (GET_CODE (inner
) == SUBREG
)
1238 inner
= SUBREG_REG (inner
);
1240 return (MEM_P (inner
) && general_operand (op
, mode
));
1243 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1244 that is, a memory reference whose address is a general_operand. */
1247 indirect_operand (rtx op
, enum machine_mode mode
)
1249 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1250 if (! reload_completed
1251 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1253 int offset
= SUBREG_BYTE (op
);
1254 rtx inner
= SUBREG_REG (op
);
1256 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1259 /* The only way that we can have a general_operand as the resulting
1260 address is if OFFSET is zero and the address already is an operand
1261 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1264 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1265 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1266 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1267 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1268 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1272 && memory_operand (op
, mode
)
1273 && general_operand (XEXP (op
, 0), Pmode
));
1276 /* Return 1 if this is a comparison operator. This allows the use of
1277 MATCH_OPERATOR to recognize all the branch insns. */
1280 comparison_operator (rtx op
, enum machine_mode mode
)
1282 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1283 && COMPARISON_P (op
));
1286 /* If BODY is an insn body that uses ASM_OPERANDS,
1287 return the number of operands (both input and output) in the insn.
1288 Otherwise return -1. */
1291 asm_noperands (const_rtx body
)
1293 switch (GET_CODE (body
))
1296 /* No output operands: return number of input operands. */
1297 return ASM_OPERANDS_INPUT_LENGTH (body
);
1299 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1300 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1301 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1305 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1306 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1308 /* Multiple output operands, or 1 output plus some clobbers:
1309 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1313 /* Count backwards through CLOBBERs to determine number of SETs. */
1314 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1316 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1318 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1322 /* N_SETS is now number of output operands. */
1325 /* Verify that all the SETs we have
1326 came from a single original asm_operands insn
1327 (so that invalid combinations are blocked). */
1328 for (i
= 0; i
< n_sets
; i
++)
1330 rtx elt
= XVECEXP (body
, 0, i
);
1331 if (GET_CODE (elt
) != SET
)
1333 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1335 /* If these ASM_OPERANDS rtx's came from different original insns
1336 then they aren't allowed together. */
1337 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1338 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1341 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1344 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1346 /* 0 outputs, but some clobbers:
1347 body is [(asm_operands ...) (clobber (reg ...))...]. */
1350 /* Make sure all the other parallel things really are clobbers. */
1351 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1352 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1355 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1364 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1365 copy its operands (both input and output) into the vector OPERANDS,
1366 the locations of the operands within the insn into the vector OPERAND_LOCS,
1367 and the constraints for the operands into CONSTRAINTS.
1368 Write the modes of the operands into MODES.
1369 Return the assembler-template.
1371 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1372 we don't store that info. */
1375 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1376 const char **constraints
, enum machine_mode
*modes
,
1383 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1385 asmop
= SET_SRC (body
);
1386 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1388 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1390 for (i
= 1; i
< noperands
; i
++)
1393 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1395 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1397 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1399 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1402 /* The output is in the SET.
1403 Its constraint is in the ASM_OPERANDS itself. */
1405 operands
[0] = SET_DEST (body
);
1407 operand_locs
[0] = &SET_DEST (body
);
1409 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1411 modes
[0] = GET_MODE (SET_DEST (body
));
1413 else if (GET_CODE (body
) == ASM_OPERANDS
)
1416 /* No output operands: BODY is (asm_operands ....). */
1418 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1420 /* The input operands are found in the 1st element vector. */
1421 /* Constraints for inputs are in the 2nd element vector. */
1422 for (i
= 0; i
< noperands
; i
++)
1425 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1427 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1429 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1431 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1434 else if (GET_CODE (body
) == PARALLEL
1435 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1436 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1438 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1440 int nout
= 0; /* Does not include CLOBBERs. */
1442 asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1443 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1445 /* At least one output, plus some CLOBBERs. */
1447 /* The outputs are in the SETs.
1448 Their constraints are in the ASM_OPERANDS itself. */
1449 for (i
= 0; i
< nparallel
; i
++)
1451 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1452 break; /* Past last SET */
1455 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1457 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1459 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1461 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1465 for (i
= 0; i
< nin
; i
++)
1468 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1470 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1472 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1474 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1477 else if (GET_CODE (body
) == PARALLEL
1478 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1480 /* No outputs, but some CLOBBERs. */
1484 asmop
= XVECEXP (body
, 0, 0);
1485 nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1487 for (i
= 0; i
< nin
; i
++)
1490 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1492 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1494 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1496 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1502 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1504 return ASM_OPERANDS_TEMPLATE (asmop
);
1507 /* Check if an asm_operand matches its constraints.
1508 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1511 asm_operand_ok (rtx op
, const char *constraint
)
1515 /* Use constrain_operands after reload. */
1516 gcc_assert (!reload_completed
);
1520 char c
= *constraint
;
1537 case '0': case '1': case '2': case '3': case '4':
1538 case '5': case '6': case '7': case '8': case '9':
1539 /* For best results, our caller should have given us the
1540 proper matching constraint, but we can't actually fail
1541 the check if they didn't. Indicate that results are
1545 while (ISDIGIT (*constraint
));
1551 if (address_operand (op
, VOIDmode
))
1555 case TARGET_MEM_CONSTRAINT
:
1556 case 'V': /* non-offsettable */
1557 if (memory_operand (op
, VOIDmode
))
1561 case 'o': /* offsettable */
1562 if (offsettable_nonstrict_memref_p (op
))
1567 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1568 excepting those that expand_call created. Further, on some
1569 machines which do not have generalized auto inc/dec, an inc/dec
1570 is not a memory_operand.
1572 Match any memory and hope things are resolved after reload. */
1576 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1577 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1584 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1585 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1591 if (GET_CODE (op
) == CONST_DOUBLE
1592 || (GET_CODE (op
) == CONST_VECTOR
1593 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1598 if (GET_CODE (op
) == CONST_DOUBLE
1599 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1603 if (GET_CODE (op
) == CONST_DOUBLE
1604 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1609 if (GET_CODE (op
) == CONST_INT
1610 || (GET_CODE (op
) == CONST_DOUBLE
1611 && GET_MODE (op
) == VOIDmode
))
1616 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1621 if (GET_CODE (op
) == CONST_INT
1622 || (GET_CODE (op
) == CONST_DOUBLE
1623 && GET_MODE (op
) == VOIDmode
))
1628 if (GET_CODE (op
) == CONST_INT
1629 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1633 if (GET_CODE (op
) == CONST_INT
1634 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1638 if (GET_CODE (op
) == CONST_INT
1639 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1643 if (GET_CODE (op
) == CONST_INT
1644 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1648 if (GET_CODE (op
) == CONST_INT
1649 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1653 if (GET_CODE (op
) == CONST_INT
1654 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1658 if (GET_CODE (op
) == CONST_INT
1659 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1663 if (GET_CODE (op
) == CONST_INT
1664 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1673 if (general_operand (op
, VOIDmode
))
1678 /* For all other letters, we first check for a register class,
1679 otherwise it is an EXTRA_CONSTRAINT. */
1680 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1683 if (GET_MODE (op
) == BLKmode
)
1685 if (register_operand (op
, VOIDmode
))
1688 #ifdef EXTRA_CONSTRAINT_STR
1689 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1691 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1692 /* Every memory operand can be reloaded to fit. */
1693 && memory_operand (op
, VOIDmode
))
1695 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1696 /* Every address operand can be reloaded to fit. */
1697 && address_operand (op
, VOIDmode
))
1702 len
= CONSTRAINT_LEN (c
, constraint
);
1705 while (--len
&& *constraint
);
1713 /* Given an rtx *P, if it is a sum containing an integer constant term,
1714 return the location (type rtx *) of the pointer to that constant term.
1715 Otherwise, return a null pointer. */
1718 find_constant_term_loc (rtx
*p
)
1721 enum rtx_code code
= GET_CODE (*p
);
1723 /* If *P IS such a constant term, P is its location. */
1725 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1729 /* Otherwise, if not a sum, it has no constant term. */
1731 if (GET_CODE (*p
) != PLUS
)
1734 /* If one of the summands is constant, return its location. */
1736 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1737 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1740 /* Otherwise, check each summand for containing a constant term. */
1742 if (XEXP (*p
, 0) != 0)
1744 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1749 if (XEXP (*p
, 1) != 0)
1751 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1759 /* Return 1 if OP is a memory reference
1760 whose address contains no side effects
1761 and remains valid after the addition
1762 of a positive integer less than the
1763 size of the object being referenced.
1765 We assume that the original address is valid and do not check it.
1767 This uses strict_memory_address_p as a subroutine, so
1768 don't use it before reload. */
1771 offsettable_memref_p (rtx op
)
1773 return ((MEM_P (op
))
1774 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1777 /* Similar, but don't require a strictly valid mem ref:
1778 consider pseudo-regs valid as index or base regs. */
1781 offsettable_nonstrict_memref_p (rtx op
)
1783 return ((MEM_P (op
))
1784 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1787 /* Return 1 if Y is a memory address which contains no side effects
1788 and would remain valid after the addition of a positive integer
1789 less than the size of that mode.
1791 We assume that the original address is valid and do not check it.
1792 We do check that it is valid for narrower modes.
1794 If STRICTP is nonzero, we require a strictly valid address,
1795 for the sake of use in reload.c. */
1798 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1800 enum rtx_code ycode
= GET_CODE (y
);
1804 int (*addressp
) (enum machine_mode
, rtx
) =
1805 (strictp
? strict_memory_address_p
: memory_address_p
);
1806 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1808 if (CONSTANT_ADDRESS_P (y
))
1811 /* Adjusting an offsettable address involves changing to a narrower mode.
1812 Make sure that's OK. */
1814 if (mode_dependent_address_p (y
))
1817 /* ??? How much offset does an offsettable BLKmode reference need?
1818 Clearly that depends on the situation in which it's being used.
1819 However, the current situation in which we test 0xffffffff is
1820 less than ideal. Caveat user. */
1822 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1824 /* If the expression contains a constant term,
1825 see if it remains valid when max possible offset is added. */
1827 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1832 *y2
= plus_constant (*y2
, mode_sz
- 1);
1833 /* Use QImode because an odd displacement may be automatically invalid
1834 for any wider mode. But it should be valid for a single byte. */
1835 good
= (*addressp
) (QImode
, y
);
1837 /* In any case, restore old contents of memory. */
1842 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1845 /* The offset added here is chosen as the maximum offset that
1846 any instruction could need to add when operating on something
1847 of the specified mode. We assume that if Y and Y+c are
1848 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1849 go inside a LO_SUM here, so we do so as well. */
1850 if (GET_CODE (y
) == LO_SUM
1852 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1853 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1854 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1856 z
= plus_constant (y
, mode_sz
- 1);
1858 /* Use QImode because an odd displacement may be automatically invalid
1859 for any wider mode. But it should be valid for a single byte. */
1860 return (*addressp
) (QImode
, z
);
1863 /* Return 1 if ADDR is an address-expression whose effect depends
1864 on the mode of the memory reference it is used in.
1866 Autoincrement addressing is a typical example of mode-dependence
1867 because the amount of the increment depends on the mode. */
1870 mode_dependent_address_p (rtx addr
)
1872 /* Auto-increment addressing with anything other than post_modify
1873 or pre_modify always introduces a mode dependency. Catch such
1874 cases now instead of deferring to the target. */
1875 if (GET_CODE (addr
) == PRE_INC
1876 || GET_CODE (addr
) == POST_INC
1877 || GET_CODE (addr
) == PRE_DEC
1878 || GET_CODE (addr
) == POST_DEC
)
1881 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1883 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1884 win
: ATTRIBUTE_UNUSED_LABEL
1888 /* Like extract_insn, but save insn extracted and don't extract again, when
1889 called again for the same insn expecting that recog_data still contain the
1890 valid information. This is used primary by gen_attr infrastructure that
1891 often does extract insn again and again. */
1893 extract_insn_cached (rtx insn
)
1895 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1897 extract_insn (insn
);
1898 recog_data
.insn
= insn
;
1901 /* Do cached extract_insn, constrain_operands and complain about failures.
1902 Used by insn_attrtab. */
1904 extract_constrain_insn_cached (rtx insn
)
1906 extract_insn_cached (insn
);
1907 if (which_alternative
== -1
1908 && !constrain_operands (reload_completed
))
1909 fatal_insn_not_found (insn
);
1912 /* Do cached constrain_operands and complain about failures. */
1914 constrain_operands_cached (int strict
)
1916 if (which_alternative
== -1)
1917 return constrain_operands (strict
);
1922 /* Analyze INSN and fill in recog_data. */
1925 extract_insn (rtx insn
)
1930 rtx body
= PATTERN (insn
);
1932 recog_data
.n_operands
= 0;
1933 recog_data
.n_alternatives
= 0;
1934 recog_data
.n_dups
= 0;
1936 switch (GET_CODE (body
))
1946 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1951 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1952 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1953 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1959 recog_data
.n_operands
= noperands
= asm_noperands (body
);
1962 /* This insn is an `asm' with operands. */
1964 /* expand_asm_operands makes sure there aren't too many operands. */
1965 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
1967 /* Now get the operand values and constraints out of the insn. */
1968 decode_asm_operands (body
, recog_data
.operand
,
1969 recog_data
.operand_loc
,
1970 recog_data
.constraints
,
1971 recog_data
.operand_mode
, NULL
);
1974 const char *p
= recog_data
.constraints
[0];
1975 recog_data
.n_alternatives
= 1;
1977 recog_data
.n_alternatives
+= (*p
++ == ',');
1981 fatal_insn_not_found (insn
);
1985 /* Ordinary insn: recognize it, get the operands via insn_extract
1986 and get the constraints. */
1988 icode
= recog_memoized (insn
);
1990 fatal_insn_not_found (insn
);
1992 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
1993 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
1994 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
1996 insn_extract (insn
);
1998 for (i
= 0; i
< noperands
; i
++)
2000 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2001 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2002 /* VOIDmode match_operands gets mode from their real operand. */
2003 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2004 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2007 for (i
= 0; i
< noperands
; i
++)
2008 recog_data
.operand_type
[i
]
2009 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2010 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2013 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2015 if (INSN_CODE (insn
) < 0)
2016 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2017 recog_data
.alternative_enabled_p
[i
] = true;
2020 recog_data
.insn
= insn
;
2021 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2023 which_alternative
= i
;
2024 recog_data
.alternative_enabled_p
[i
] = get_attr_enabled (insn
);
2028 recog_data
.insn
= NULL
;
2029 which_alternative
= -1;
2032 /* After calling extract_insn, you can use this function to extract some
2033 information from the constraint strings into a more usable form.
2034 The collected data is stored in recog_op_alt. */
2036 preprocess_constraints (void)
2040 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2041 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2042 * sizeof (struct operand_alternative
)));
2044 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2047 struct operand_alternative
*op_alt
;
2048 const char *p
= recog_data
.constraints
[i
];
2050 op_alt
= recog_op_alt
[i
];
2052 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2054 op_alt
[j
].cl
= NO_REGS
;
2055 op_alt
[j
].constraint
= p
;
2056 op_alt
[j
].matches
= -1;
2057 op_alt
[j
].matched
= -1;
2059 if (!recog_data
.alternative_enabled_p
[j
])
2061 p
= skip_alternative (p
);
2065 if (*p
== '\0' || *p
== ',')
2067 op_alt
[j
].anything_ok
= 1;
2077 while (c
!= ',' && c
!= '\0');
2078 if (c
== ',' || c
== '\0')
2086 case '=': case '+': case '*': case '%':
2087 case 'E': case 'F': case 'G': case 'H':
2088 case 's': case 'i': case 'n':
2089 case 'I': case 'J': case 'K': case 'L':
2090 case 'M': case 'N': case 'O': case 'P':
2091 /* These don't say anything we care about. */
2095 op_alt
[j
].reject
+= 6;
2098 op_alt
[j
].reject
+= 600;
2101 op_alt
[j
].earlyclobber
= 1;
2104 case '0': case '1': case '2': case '3': case '4':
2105 case '5': case '6': case '7': case '8': case '9':
2108 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2109 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2114 case TARGET_MEM_CONSTRAINT
:
2115 op_alt
[j
].memory_ok
= 1;
2118 op_alt
[j
].decmem_ok
= 1;
2121 op_alt
[j
].incmem_ok
= 1;
2124 op_alt
[j
].nonoffmem_ok
= 1;
2127 op_alt
[j
].offmem_ok
= 1;
2130 op_alt
[j
].anything_ok
= 1;
2134 op_alt
[j
].is_address
= 1;
2135 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2136 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
2142 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2146 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2148 op_alt
[j
].memory_ok
= 1;
2151 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2153 op_alt
[j
].is_address
= 1;
2155 = (reg_class_subunion
2156 [(int) op_alt
[j
].cl
]
2157 [(int) base_reg_class (VOIDmode
, ADDRESS
,
2163 = (reg_class_subunion
2164 [(int) op_alt
[j
].cl
]
2165 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2168 p
+= CONSTRAINT_LEN (c
, p
);
2174 /* Check the operands of an insn against the insn's operand constraints
2175 and return 1 if they are valid.
2176 The information about the insn's operands, constraints, operand modes
2177 etc. is obtained from the global variables set up by extract_insn.
2179 WHICH_ALTERNATIVE is set to a number which indicates which
2180 alternative of constraints was matched: 0 for the first alternative,
2181 1 for the next, etc.
2183 In addition, when two operands are required to match
2184 and it happens that the output operand is (reg) while the
2185 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2186 make the output operand look like the input.
2187 This is because the output operand is the one the template will print.
2189 This is used in final, just before printing the assembler code and by
2190 the routines that determine an insn's attribute.
2192 If STRICT is a positive nonzero value, it means that we have been
2193 called after reload has been completed. In that case, we must
2194 do all checks strictly. If it is zero, it means that we have been called
2195 before reload has completed. In that case, we first try to see if we can
2196 find an alternative that matches strictly. If not, we try again, this
2197 time assuming that reload will fix up the insn. This provides a "best
2198 guess" for the alternative and is used to compute attributes of insns prior
2199 to reload. A negative value of STRICT is used for this internal call. */
2207 constrain_operands (int strict
)
2209 const char *constraints
[MAX_RECOG_OPERANDS
];
2210 int matching_operands
[MAX_RECOG_OPERANDS
];
2211 int earlyclobber
[MAX_RECOG_OPERANDS
];
2214 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2215 int funny_match_index
;
2217 which_alternative
= 0;
2218 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2221 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2223 constraints
[c
] = recog_data
.constraints
[c
];
2224 matching_operands
[c
] = -1;
2229 int seen_earlyclobber_at
= -1;
2232 funny_match_index
= 0;
2234 if (!recog_data
.alternative_enabled_p
[which_alternative
])
2238 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2239 constraints
[i
] = skip_alternative (constraints
[i
]);
2241 which_alternative
++;
2245 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2247 rtx op
= recog_data
.operand
[opno
];
2248 enum machine_mode mode
= GET_MODE (op
);
2249 const char *p
= constraints
[opno
];
2255 earlyclobber
[opno
] = 0;
2257 /* A unary operator may be accepted by the predicate, but it
2258 is irrelevant for matching constraints. */
2262 if (GET_CODE (op
) == SUBREG
)
2264 if (REG_P (SUBREG_REG (op
))
2265 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2266 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2267 GET_MODE (SUBREG_REG (op
)),
2270 op
= SUBREG_REG (op
);
2273 /* An empty constraint or empty alternative
2274 allows anything which matched the pattern. */
2275 if (*p
== 0 || *p
== ',')
2279 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2288 case '?': case '!': case '*': case '%':
2293 /* Ignore rest of this alternative as far as
2294 constraint checking is concerned. */
2297 while (*p
&& *p
!= ',');
2302 earlyclobber
[opno
] = 1;
2303 if (seen_earlyclobber_at
< 0)
2304 seen_earlyclobber_at
= opno
;
2307 case '0': case '1': case '2': case '3': case '4':
2308 case '5': case '6': case '7': case '8': case '9':
2310 /* This operand must be the same as a previous one.
2311 This kind of constraint is used for instructions such
2312 as add when they take only two operands.
2314 Note that the lower-numbered operand is passed first.
2316 If we are not testing strictly, assume that this
2317 constraint will be satisfied. */
2322 match
= strtoul (p
, &end
, 10);
2329 rtx op1
= recog_data
.operand
[match
];
2330 rtx op2
= recog_data
.operand
[opno
];
2332 /* A unary operator may be accepted by the predicate,
2333 but it is irrelevant for matching constraints. */
2335 op1
= XEXP (op1
, 0);
2337 op2
= XEXP (op2
, 0);
2339 val
= operands_match_p (op1
, op2
);
2342 matching_operands
[opno
] = match
;
2343 matching_operands
[match
] = opno
;
2348 /* If output is *x and input is *--x, arrange later
2349 to change the output to *--x as well, since the
2350 output op is the one that will be printed. */
2351 if (val
== 2 && strict
> 0)
2353 funny_match
[funny_match_index
].this = opno
;
2354 funny_match
[funny_match_index
++].other
= match
;
2361 /* p is used for address_operands. When we are called by
2362 gen_reload, no one will have checked that the address is
2363 strictly valid, i.e., that all pseudos requiring hard regs
2364 have gotten them. */
2366 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2371 /* No need to check general_operand again;
2372 it was done in insn-recog.c. Well, except that reload
2373 doesn't check the validity of its replacements, but
2374 that should only matter when there's a bug. */
2376 /* Anything goes unless it is a REG and really has a hard reg
2377 but the hard reg is not in the class GENERAL_REGS. */
2381 || GENERAL_REGS
== ALL_REGS
2382 || (reload_in_progress
2383 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2384 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2387 else if (strict
< 0 || general_operand (op
, mode
))
2392 /* This is used for a MATCH_SCRATCH in the cases when
2393 we don't actually need anything. So anything goes
2398 case TARGET_MEM_CONSTRAINT
:
2399 /* Memory operands must be valid, to the extent
2400 required by STRICT. */
2404 && !strict_memory_address_p (GET_MODE (op
),
2408 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2412 /* Before reload, accept what reload can turn into mem. */
2413 else if (strict
< 0 && CONSTANT_P (op
))
2415 /* During reload, accept a pseudo */
2416 else if (reload_in_progress
&& REG_P (op
)
2417 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2423 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2424 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2430 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2431 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2437 if (GET_CODE (op
) == CONST_DOUBLE
2438 || (GET_CODE (op
) == CONST_VECTOR
2439 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2445 if (GET_CODE (op
) == CONST_DOUBLE
2446 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2451 if (GET_CODE (op
) == CONST_INT
2452 || (GET_CODE (op
) == CONST_DOUBLE
2453 && GET_MODE (op
) == VOIDmode
))
2456 if (CONSTANT_P (op
))
2461 if (GET_CODE (op
) == CONST_INT
2462 || (GET_CODE (op
) == CONST_DOUBLE
2463 && GET_MODE (op
) == VOIDmode
))
2475 if (GET_CODE (op
) == CONST_INT
2476 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2482 && ((strict
> 0 && ! offsettable_memref_p (op
))
2484 && !(CONSTANT_P (op
) || MEM_P (op
)))
2485 || (reload_in_progress
2487 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2492 if ((strict
> 0 && offsettable_memref_p (op
))
2493 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2494 /* Before reload, accept what reload can handle. */
2496 && (CONSTANT_P (op
) || MEM_P (op
)))
2497 /* During reload, accept a pseudo */
2498 || (reload_in_progress
&& REG_P (op
)
2499 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2508 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2514 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2515 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2517 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2520 #ifdef EXTRA_CONSTRAINT_STR
2521 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2524 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2525 /* Every memory operand can be reloaded to fit. */
2526 && ((strict
< 0 && MEM_P (op
))
2527 /* Before reload, accept what reload can turn
2529 || (strict
< 0 && CONSTANT_P (op
))
2530 /* During reload, accept a pseudo */
2531 || (reload_in_progress
&& REG_P (op
)
2532 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2534 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2535 /* Every address operand can be reloaded to fit. */
2542 while (p
+= len
, c
);
2544 constraints
[opno
] = p
;
2545 /* If this operand did not win somehow,
2546 this alternative loses. */
2550 /* This alternative won; the operands are ok.
2551 Change whichever operands this alternative says to change. */
2556 /* See if any earlyclobber operand conflicts with some other
2559 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2560 for (eopno
= seen_earlyclobber_at
;
2561 eopno
< recog_data
.n_operands
;
2563 /* Ignore earlyclobber operands now in memory,
2564 because we would often report failure when we have
2565 two memory operands, one of which was formerly a REG. */
2566 if (earlyclobber
[eopno
]
2567 && REG_P (recog_data
.operand
[eopno
]))
2568 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2569 if ((MEM_P (recog_data
.operand
[opno
])
2570 || recog_data
.operand_type
[opno
] != OP_OUT
)
2572 /* Ignore things like match_operator operands. */
2573 && *recog_data
.constraints
[opno
] != 0
2574 && ! (matching_operands
[opno
] == eopno
2575 && operands_match_p (recog_data
.operand
[opno
],
2576 recog_data
.operand
[eopno
]))
2577 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2578 recog_data
.operand
[eopno
]))
2583 while (--funny_match_index
>= 0)
2585 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2586 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2593 which_alternative
++;
2595 while (which_alternative
< recog_data
.n_alternatives
);
2597 which_alternative
= -1;
2598 /* If we are about to reject this, but we are not to test strictly,
2599 try a very loose test. Only return failure if it fails also. */
2601 return constrain_operands (-1);
2606 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2607 is a hard reg in class CLASS when its regno is offset by OFFSET
2608 and changed to mode MODE.
2609 If REG occupies multiple hard regs, all of them must be in CLASS. */
2612 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2613 enum machine_mode mode
)
2615 int regno
= REGNO (operand
);
2620 return (regno
< FIRST_PSEUDO_REGISTER
2621 && in_hard_reg_set_p (reg_class_contents
[(int) cl
],
2622 mode
, regno
+ offset
));
2625 /* Split single instruction. Helper function for split_all_insns and
2626 split_all_insns_noflow. Return last insn in the sequence if successful,
2627 or NULL if unsuccessful. */
2630 split_insn (rtx insn
)
2632 /* Split insns here to get max fine-grain parallelism. */
2633 rtx first
= PREV_INSN (insn
);
2634 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2639 /* try_split returns the NOTE that INSN became. */
2640 SET_INSN_DELETED (insn
);
2642 /* ??? Coddle to md files that generate subregs in post-reload
2643 splitters instead of computing the proper hard register. */
2644 if (reload_completed
&& first
!= last
)
2646 first
= NEXT_INSN (first
);
2650 cleanup_subreg_operands (first
);
2653 first
= NEXT_INSN (first
);
2659 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2662 split_all_insns (void)
2668 blocks
= sbitmap_alloc (last_basic_block
);
2669 sbitmap_zero (blocks
);
2672 FOR_EACH_BB_REVERSE (bb
)
2675 bool finish
= false;
2677 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2679 /* Can't use `next_real_insn' because that might go across
2680 CODE_LABELS and short-out basic blocks. */
2681 next
= NEXT_INSN (insn
);
2682 finish
= (insn
== BB_END (bb
));
2685 rtx set
= single_set (insn
);
2687 /* Don't split no-op move insns. These should silently
2688 disappear later in final. Splitting such insns would
2689 break the code that handles LIBCALL blocks. */
2690 if (set
&& set_noop_p (set
))
2692 /* Nops get in the way while scheduling, so delete them
2693 now if register allocation has already been done. It
2694 is too risky to try to do this before register
2695 allocation, and there are unlikely to be very many
2696 nops then anyways. */
2697 if (reload_completed
)
2698 delete_insn_and_edges (insn
);
2702 rtx last
= split_insn (insn
);
2705 /* The split sequence may include barrier, but the
2706 BB boundary we are interested in will be set to
2709 while (BARRIER_P (last
))
2710 last
= PREV_INSN (last
);
2711 SET_BIT (blocks
, bb
->index
);
2720 find_many_sub_basic_blocks (blocks
);
2722 #ifdef ENABLE_CHECKING
2723 verify_flow_info ();
2726 sbitmap_free (blocks
);
2729 /* Same as split_all_insns, but do not expect CFG to be available.
2730 Used by machine dependent reorg passes. */
2733 split_all_insns_noflow (void)
2737 for (insn
= get_insns (); insn
; insn
= next
)
2739 next
= NEXT_INSN (insn
);
2742 /* Don't split no-op move insns. These should silently
2743 disappear later in final. Splitting such insns would
2744 break the code that handles LIBCALL blocks. */
2745 rtx set
= single_set (insn
);
2746 if (set
&& set_noop_p (set
))
2748 /* Nops get in the way while scheduling, so delete them
2749 now if register allocation has already been done. It
2750 is too risky to try to do this before register
2751 allocation, and there are unlikely to be very many
2754 ??? Should we use delete_insn when the CFG isn't valid? */
2755 if (reload_completed
)
2756 delete_insn_and_edges (insn
);
2765 #ifdef HAVE_peephole2
2766 struct peep2_insn_data
2772 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2773 static int peep2_current
;
2774 /* The number of instructions available to match a peep2. */
2775 int peep2_current_count
;
2777 /* A non-insn marker indicating the last insn of the block.
2778 The live_before regset for this element is correct, indicating
2779 DF_LIVE_OUT for the block. */
2780 #define PEEP2_EOB pc_rtx
2782 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2783 does not exist. Used by the recognizer to find the next insn to match
2784 in a multi-insn pattern. */
2787 peep2_next_insn (int n
)
2789 gcc_assert (n
<= peep2_current_count
);
2792 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2793 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2795 return peep2_insn_data
[n
].insn
;
2798 /* Return true if REGNO is dead before the Nth non-note insn
2802 peep2_regno_dead_p (int ofs
, int regno
)
2804 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2806 ofs
+= peep2_current
;
2807 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2808 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2810 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2812 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2815 /* Similarly for a REG. */
2818 peep2_reg_dead_p (int ofs
, rtx reg
)
2822 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2824 ofs
+= peep2_current
;
2825 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2826 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2828 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2830 regno
= REGNO (reg
);
2831 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2833 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2838 /* Try to find a hard register of mode MODE, matching the register class in
2839 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2840 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2841 in which case the only condition is that the register must be available
2842 before CURRENT_INSN.
2843 Registers that already have bits set in REG_SET will not be considered.
2845 If an appropriate register is available, it will be returned and the
2846 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2850 peep2_find_free_register (int from
, int to
, const char *class_str
,
2851 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2853 static int search_ofs
;
2858 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2859 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2861 from
+= peep2_current
;
2862 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2863 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2864 to
+= peep2_current
;
2865 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2866 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2868 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2869 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2873 HARD_REG_SET this_live
;
2875 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2877 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2878 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2879 IOR_HARD_REG_SET (live
, this_live
);
2882 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2883 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2885 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2887 int raw_regno
, regno
, success
, j
;
2889 /* Distribute the free registers as much as possible. */
2890 raw_regno
= search_ofs
+ i
;
2891 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2892 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2893 #ifdef REG_ALLOC_ORDER
2894 regno
= reg_alloc_order
[raw_regno
];
2899 /* Don't allocate fixed registers. */
2900 if (fixed_regs
[regno
])
2902 /* Don't allocate global registers. */
2903 if (global_regs
[regno
])
2905 /* Make sure the register is of the right class. */
2906 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2908 /* And can support the mode we need. */
2909 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2911 /* And that we don't create an extra save/restore. */
2912 if (! call_used_regs
[regno
] && ! df_regs_ever_live_p (regno
))
2914 if (! targetm
.hard_regno_scratch_ok (regno
))
2917 /* And we don't clobber traceback for noreturn functions. */
2918 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2919 && (! reload_completed
|| frame_pointer_needed
))
2923 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2925 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2926 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2934 add_to_hard_reg_set (reg_set
, mode
, regno
);
2936 /* Start the next search with the next register. */
2937 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2939 search_ofs
= raw_regno
;
2941 return gen_rtx_REG (mode
, regno
);
2949 /* Perform the peephole2 optimization pass. */
2952 peephole2_optimize (void)
2958 bool do_cleanup_cfg
= false;
2959 bool do_rebuild_jump_labels
= false;
2961 df_set_flags (DF_LR_RUN_DCE
);
2964 /* Initialize the regsets we're going to use. */
2965 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2966 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
2967 live
= BITMAP_ALLOC (®_obstack
);
2969 FOR_EACH_BB_REVERSE (bb
)
2971 /* Indicate that all slots except the last holds invalid data. */
2972 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
2973 peep2_insn_data
[i
].insn
= NULL_RTX
;
2974 peep2_current_count
= 0;
2976 /* Indicate that the last slot contains live_after data. */
2977 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
2978 peep2_current
= MAX_INSNS_PER_PEEP2
;
2980 /* Start up propagation. */
2981 bitmap_copy (live
, DF_LR_OUT (bb
));
2982 df_simulate_artificial_refs_at_end (bb
, live
);
2983 bitmap_copy (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
2985 for (insn
= BB_END (bb
); ; insn
= prev
)
2987 prev
= PREV_INSN (insn
);
2990 rtx
try, before_try
, x
;
2993 bool was_call
= false;
2995 /* Record this insn. */
2996 if (--peep2_current
< 0)
2997 peep2_current
= MAX_INSNS_PER_PEEP2
;
2998 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
2999 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
3000 peep2_current_count
++;
3001 peep2_insn_data
[peep2_current
].insn
= insn
;
3002 df_simulate_one_insn (bb
, insn
, live
);
3003 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3005 if (RTX_FRAME_RELATED_P (insn
))
3007 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3008 substitution would lose the
3009 REG_FRAME_RELATED_EXPR that is attached. */
3010 peep2_current_count
= 0;
3014 /* Match the peephole. */
3015 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3019 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3020 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3021 cfg-related call notes. */
3022 for (i
= 0; i
<= match_len
; ++i
)
3025 rtx old_insn
, new_insn
, note
;
3027 j
= i
+ peep2_current
;
3028 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3029 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3030 old_insn
= peep2_insn_data
[j
].insn
;
3031 if (!CALL_P (old_insn
))
3036 while (new_insn
!= NULL_RTX
)
3038 if (CALL_P (new_insn
))
3040 new_insn
= NEXT_INSN (new_insn
);
3043 gcc_assert (new_insn
!= NULL_RTX
);
3045 CALL_INSN_FUNCTION_USAGE (new_insn
)
3046 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3048 for (note
= REG_NOTES (old_insn
);
3050 note
= XEXP (note
, 1))
3051 switch (REG_NOTE_KIND (note
))
3055 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3059 /* Discard all other reg notes. */
3063 /* Croak if there is another call in the sequence. */
3064 while (++i
<= match_len
)
3066 j
= i
+ peep2_current
;
3067 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3068 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3069 old_insn
= peep2_insn_data
[j
].insn
;
3070 gcc_assert (!CALL_P (old_insn
));
3075 i
= match_len
+ peep2_current
;
3076 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3077 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3079 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3080 REG_EH_REGION
, NULL_RTX
);
3082 /* Replace the old sequence with the new. */
3083 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3084 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3085 before_try
= PREV_INSN (insn
);
3086 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3088 /* Re-insert the EH_REGION notes. */
3089 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3094 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3095 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3098 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3100 || (flag_non_call_exceptions
3101 && may_trap_p (PATTERN (x
))
3102 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3105 add_reg_note (x
, REG_EH_REGION
, XEXP (note
, 0));
3107 if (x
!= BB_END (bb
) && eh_edge
)
3112 nfte
= split_block (bb
, x
);
3113 flags
= (eh_edge
->flags
3114 & (EDGE_EH
| EDGE_ABNORMAL
));
3116 flags
|= EDGE_ABNORMAL_CALL
;
3117 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3120 nehe
->probability
= eh_edge
->probability
;
3122 = REG_BR_PROB_BASE
- nehe
->probability
;
3124 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3130 /* Converting possibly trapping insn to non-trapping is
3131 possible. Zap dummy outgoing edges. */
3132 do_cleanup_cfg
|= purge_dead_edges (bb
);
3135 #ifdef HAVE_conditional_execution
3136 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3137 peep2_insn_data
[i
].insn
= NULL_RTX
;
3138 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3139 peep2_current_count
= 0;
3141 /* Back up lifetime information past the end of the
3142 newly created sequence. */
3143 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3145 bitmap_copy (live
, peep2_insn_data
[i
].live_before
);
3147 /* Update life information for the new sequence. */
3154 i
= MAX_INSNS_PER_PEEP2
;
3155 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3156 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3157 peep2_current_count
++;
3158 peep2_insn_data
[i
].insn
= x
;
3160 df_simulate_one_insn (bb
, x
, live
);
3161 bitmap_copy (peep2_insn_data
[i
].live_before
, live
);
3170 /* If we generated a jump instruction, it won't have
3171 JUMP_LABEL set. Recompute after we're done. */
3172 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3175 do_rebuild_jump_labels
= true;
3181 if (insn
== BB_HEAD (bb
))
3186 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3187 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3189 if (do_rebuild_jump_labels
)
3190 rebuild_jump_labels (get_insns ());
3192 #endif /* HAVE_peephole2 */
3194 /* Common predicates for use with define_bypass. */
3196 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3197 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3198 must be either a single_set or a PARALLEL with SETs inside. */
3201 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3203 rtx out_set
, in_set
;
3204 rtx out_pat
, in_pat
;
3205 rtx out_exp
, in_exp
;
3208 in_set
= single_set (in_insn
);
3211 if (!MEM_P (SET_DEST (in_set
)))
3214 out_set
= single_set (out_insn
);
3217 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3222 out_pat
= PATTERN (out_insn
);
3224 if (GET_CODE (out_pat
) != PARALLEL
)
3227 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3229 out_exp
= XVECEXP (out_pat
, 0, i
);
3231 if (GET_CODE (out_exp
) == CLOBBER
)
3234 gcc_assert (GET_CODE (out_exp
) == SET
);
3236 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3243 in_pat
= PATTERN (in_insn
);
3244 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3246 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3248 in_exp
= XVECEXP (in_pat
, 0, i
);
3250 if (GET_CODE (in_exp
) == CLOBBER
)
3253 gcc_assert (GET_CODE (in_exp
) == SET
);
3255 if (!MEM_P (SET_DEST (in_exp
)))
3258 out_set
= single_set (out_insn
);
3261 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3266 out_pat
= PATTERN (out_insn
);
3267 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3269 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3271 out_exp
= XVECEXP (out_pat
, 0, j
);
3273 if (GET_CODE (out_exp
) == CLOBBER
)
3276 gcc_assert (GET_CODE (out_exp
) == SET
);
3278 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3288 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3289 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3290 or multiple set; IN_INSN should be single_set for truth, but for convenience
3291 of insn categorization may be any JUMP or CALL insn. */
3294 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3296 rtx out_set
, in_set
;
3298 in_set
= single_set (in_insn
);
3301 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3305 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3307 in_set
= SET_SRC (in_set
);
3309 out_set
= single_set (out_insn
);
3312 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3313 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3321 out_pat
= PATTERN (out_insn
);
3322 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3324 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3326 rtx exp
= XVECEXP (out_pat
, 0, i
);
3328 if (GET_CODE (exp
) == CLOBBER
)
3331 gcc_assert (GET_CODE (exp
) == SET
);
3333 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3334 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3343 gate_handle_peephole2 (void)
3345 return (optimize
> 0 && flag_peephole2
);
3349 rest_of_handle_peephole2 (void)
3351 #ifdef HAVE_peephole2
3352 peephole2_optimize ();
3357 struct rtl_opt_pass pass_peephole2
=
3361 "peephole2", /* name */
3362 gate_handle_peephole2
, /* gate */
3363 rest_of_handle_peephole2
, /* execute */
3366 0, /* static_pass_number */
3367 TV_PEEPHOLE2
, /* tv_id */
3368 0, /* properties_required */
3369 0, /* properties_provided */
3370 0, /* properties_destroyed */
3371 0, /* todo_flags_start */
3372 TODO_df_finish
| TODO_verify_rtl_sharing
|
3373 TODO_dump_func
/* todo_flags_finish */
3378 rest_of_handle_split_all_insns (void)
3384 struct rtl_opt_pass pass_split_all_insns
=
3388 "split1", /* name */
3390 rest_of_handle_split_all_insns
, /* execute */
3393 0, /* static_pass_number */
3395 0, /* properties_required */
3396 0, /* properties_provided */
3397 0, /* properties_destroyed */
3398 0, /* todo_flags_start */
3399 TODO_dump_func
/* todo_flags_finish */
3404 rest_of_handle_split_after_reload (void)
3406 /* If optimizing, then go ahead and split insns now. */
3414 struct rtl_opt_pass pass_split_after_reload
=
3418 "split2", /* name */
3420 rest_of_handle_split_after_reload
, /* execute */
3423 0, /* static_pass_number */
3425 0, /* properties_required */
3426 0, /* properties_provided */
3427 0, /* properties_destroyed */
3428 0, /* todo_flags_start */
3429 TODO_dump_func
/* todo_flags_finish */
3434 gate_handle_split_before_regstack (void)
3436 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3437 /* If flow2 creates new instructions which need splitting
3438 and scheduling after reload is not done, they might not be
3439 split until final which doesn't allow splitting
3440 if HAVE_ATTR_length. */
3441 # ifdef INSN_SCHEDULING
3442 return (optimize
&& !flag_schedule_insns_after_reload
);
3452 rest_of_handle_split_before_regstack (void)
3458 struct rtl_opt_pass pass_split_before_regstack
=
3462 "split3", /* name */
3463 gate_handle_split_before_regstack
, /* gate */
3464 rest_of_handle_split_before_regstack
, /* execute */
3467 0, /* static_pass_number */
3469 0, /* properties_required */
3470 0, /* properties_provided */
3471 0, /* properties_destroyed */
3472 0, /* todo_flags_start */
3473 TODO_dump_func
/* todo_flags_finish */
3478 gate_handle_split_before_sched2 (void)
3480 #ifdef INSN_SCHEDULING
3481 return optimize
> 0 && flag_schedule_insns_after_reload
;
3488 rest_of_handle_split_before_sched2 (void)
3490 #ifdef INSN_SCHEDULING
3496 struct rtl_opt_pass pass_split_before_sched2
=
3500 "split4", /* name */
3501 gate_handle_split_before_sched2
, /* gate */
3502 rest_of_handle_split_before_sched2
, /* execute */
3505 0, /* static_pass_number */
3507 0, /* properties_required */
3508 0, /* properties_provided */
3509 0, /* properties_destroyed */
3510 0, /* todo_flags_start */
3512 TODO_dump_func
/* todo_flags_finish */
3516 /* The placement of the splitting that we do for shorten_branches
3517 depends on whether regstack is used by the target or not. */
3519 gate_do_final_split (void)
3521 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3528 struct rtl_opt_pass pass_split_for_shorten_branches
=
3532 "split5", /* name */
3533 gate_do_final_split
, /* gate */
3534 split_all_insns_noflow
, /* execute */
3537 0, /* static_pass_number */
3539 0, /* properties_required */
3540 0, /* properties_provided */
3541 0, /* properties_destroyed */
3542 0, /* todo_flags_start */
3543 TODO_dump_func
| TODO_verify_rtl_sharing
/* todo_flags_finish */